commit fa0767e456c4f8a218766b771344324583c19f84 Author: Damien Coles Date: Mon Jan 26 11:58:04 2026 -0500 public-ready-init diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..c815274 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,15 @@ +target/ +.git/ +.gitignore +.env +.env.* +!.env.example +*.md +!README.md +.idea/ +.vscode/ +context/ +data/ +secrets/ +vault/ +setup.sh diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..dee64f8 --- /dev/null +++ b/.env.example @@ -0,0 +1,35 @@ +# Nexus Environment Configuration +# Copy to .env and fill in values + +# Server +HOST=0.0.0.0 +PORT=5050 + +# Vault Configuration +VAULT_ADDR=http://vault.example.local:8200 + +# Vault AppRole - Nexus App (runtime: database/creds/nexus-app + secret/data/nexus/*) +VAULT_APP_ROLE_ID= +VAULT_APP_SECRET_ID= + +# Vault AppRole - Nexus Migrate (migrations: database/creds/nexus-migrate only) +VAULT_MIGRATE_ROLE_ID= +VAULT_MIGRATE_SECRET_ID= + +# Vault AppRole - Kratos App (runtime: database/creds/nexus-kratos-app + secret/data/nexus/kratos) +VAULT_KRATOS_APP_ROLE_ID= +VAULT_KRATOS_APP_SECRET_ID= + +# Vault AppRole - Kratos Migrate (migrations: database/creds/nexus-kratos-migrate only) +VAULT_KRATOS_MIGRATE_ROLE_ID= +VAULT_KRATOS_MIGRATE_SECRET_ID= + +# Vault AppRole - Oathkeeper (runtime: secret/data/nexus/oathkeeper only) +VAULT_OATHKEEPER_ROLE_ID= +VAULT_OATHKEEPER_SECRET_ID= + +# Logging +RUST_LOG=nexus=debug,tower_http=debug + +# Note: All secrets (DATABASE_URL, VALKEY_URL, S3_*, OATHKEEPER_SECRET, etc.) +# are fetched dynamically from Vault by the Vault Agent sidecars. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..49fe2a2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +/context/ +/data/ +/secrets/ +/run/ +/scripts/ +.env + +# Oathkeeper secrets +oathkeeper/config/id_token.jwks.json + +# Added by cargo + +/target +.idea \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..ac0adea --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,4981 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "aligned" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4508988c62edf04abd8d92897fca0c2995d907ce1dfeaf369dac3716a40685" +dependencies = [ + "as-slice", +] + +[[package]] +name = "aligned-vec" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" +dependencies = [ + "equator", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "apalis" +version = "1.0.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93be0eb33b912f5e66004d0b756423c285273259068b1c80a71d7842658189b" +dependencies = [ + "apalis-core", + "futures-util", + "pin-project", + "thiserror 2.0.17", + "tower", + "tracing", +] + +[[package]] +name = "apalis-codec" +version = "0.1.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5ed6bb8e64c360ed4ad666a6cbc42e9e6df73087461dc4071f510a3af284637" +dependencies = [ + "apalis-core", + "serde", + "serde_json", +] + +[[package]] +name = "apalis-core" +version = "1.0.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b7edea7ba9866a1bc2e58f39183bdf204d78db45402d9ac1eb67d0e482735e" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "futures-timer", + "futures-util", + "pin-project", + "serde", + "thiserror 2.0.17", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "apalis-cron" +version = "1.0.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f0eeb8ae0f072e3b25777687226963aef99ee23c9eacd6739be938c3457af2" +dependencies = [ + "apalis-core", + "chrono", + "cron", + "futures-util", + "serde", + "ulid", +] + +[[package]] +name = "apalis-redis" +version = "1.0.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46336d2880ecb3bacd96dc520bc7caced3b3e8abd61a6c49967234f364a982b3" +dependencies = [ + "apalis-codec", + "apalis-core", + "chrono", + "event-listener", + "futures", + "log", + "redis", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tower", + "ulid", +] + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" + +[[package]] +name = "arc-swap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d03449bb8ca2cc2ef70869af31463d1ae5ccc8fa3e334b307203fbf815207e" +dependencies = [ + "rustversion", +] + +[[package]] +name = "arg_enum_proc_macro" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "as-slice" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "ascii_utils" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71938f30533e4d95a6d17aa530939da3842c2ab6f4f84b9dae68447e4129f74a" + +[[package]] +name = "async-graphql" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "036618f842229ba0b89652ffe425f96c7c16a49f7e3cb23b56fca7f61fd74980" +dependencies = [ + "async-graphql-derive", + "async-graphql-parser", + "async-graphql-value", + "async-stream", + "async-trait", + "base64 0.22.1", + "bytes", + "chrono", + "fast_chemail", + "fnv", + "futures-timer", + "futures-util", + "handlebars", + "http 1.4.0", + "indexmap", + "mime", + "multer", + "num-traits", + "pin-project-lite", + "regex", + "rust_decimal", + "serde", + "serde_json", + "serde_urlencoded", + "static_assertions_next", + "tempfile", + "thiserror 1.0.69", + "uuid", +] + +[[package]] +name = "async-graphql-axum" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8725874ecfbf399e071150b8619c4071d7b2b7a2f117e173dddef53c6bdb6bb1" +dependencies = [ + "async-graphql", + "axum", + "bytes", + "futures-util", + "serde_json", + "tokio", + "tokio-stream", + "tokio-util", + "tower-service", +] + +[[package]] +name = "async-graphql-derive" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd45deb3dbe5da5cdb8d6a670a7736d735ba65b455328440f236dfb113727a3d" +dependencies = [ + "Inflector", + "async-graphql-parser", + "darling", + "proc-macro-crate", + "proc-macro2", + "quote", + "strum", + "syn 2.0.112", + "thiserror 1.0.69", +] + +[[package]] +name = "async-graphql-parser" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b7607e59424a35dadbc085b0d513aa54ec28160ee640cf79ec3b634eba66d3" +dependencies = [ + "async-graphql-value", + "pest", + "serde", + "serde_json", +] + +[[package]] +name = "async-graphql-value" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de" +dependencies = [ + "bytes", + "indexmap", + "serde", + "serde_json", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "attohttpc" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07a9b245ba0739fc90935094c29adbaee3f977218b5fb95e822e261cda7f56a3" +dependencies = [ + "http 1.4.0", + "log", + "rustls 0.23.36", + "serde", + "serde_json", + "url", + "webpki-roots 0.26.11", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "av-scenechange" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f321d77c20e19b92c39e7471cf986812cbb46659d2af674adc4331ef3f18394" +dependencies = [ + "aligned", + "anyhow", + "arg_enum_proc_macro", + "arrayvec", + "log", + "num-rational", + "num-traits", + "pastey", + "rayon", + "thiserror 2.0.17", + "v_frame", + "y4m", +] + +[[package]] +name = "av1-grain" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cfddb07216410377231960af4fcab838eaa12e013417781b78bd95ee22077f8" +dependencies = [ + "anyhow", + "arrayvec", + "log", + "nom", + "num-rational", + "v_frame", +] + +[[package]] +name = "avif-serialize" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47c8fbc0f831f4519fe8b810b6a7a91410ec83031b8233f730a0480029f6a23f" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "aws-creds" +version = "0.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f84143206b9c72b3c5cb65415de60c7539c79cd1559290fddec657939131be0" +dependencies = [ + "attohttpc", + "home", + "log", + "quick-xml", + "rust-ini", + "serde", + "thiserror 1.0.69", + "time", + "url", +] + +[[package]] +name = "aws-region" +version = "0.25.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9aed3f9c7eac9be28662fdb3b0f4d1951e812f7c64fed4f0327ba702f459b3b" +dependencies = [ + "thiserror 1.0.69", +] + +[[package]] +name = "axum" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" +dependencies = [ + "axum-core", + "axum-macros", + "base64 0.22.1", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.8.1", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper", + "tokio", + "tokio-tungstenite", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "backon" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" +dependencies = [ + "fastrand", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a" + +[[package]] +name = "bit_field" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "serde_core", +] + +[[package]] +name = "bitstream-io" +version = "4.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60d4bd9d1db2c6bdf285e223a7fa369d5ce98ec767dec949c6ca62863ce61757" +dependencies = [ + "core2", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "borsh" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "built" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4ad8f11f288f48ca24471bbd51ac257aaeaaa07adae295591266b792902ae64" + +[[package]] +name = "bumpalo" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" + +[[package]] +name = "bytecheck" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bytemuck" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + +[[package]] +name = "bytes" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" +dependencies = [ + "serde", +] + +[[package]] +name = "cc" +version = "1.2.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a0aeaff4ff1a90589618835a598e545176939b97874f7abc7851caa0618f203" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf", +] + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cron" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5877d3fbf742507b66bc2a1945106bd30dd8504019d596901ddd012a4dd01740" +dependencies = [ + "chrono", + "once_cell", + "winnow 0.6.26", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.112", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +dependencies = [ + "powerfmt", + "serde_core", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equator" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "exr" +version = "1.74.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4300e043a56aa2cb633c01af81ca8f699a321879a7854d3896a0ba89056363be" +dependencies = [ + "bit_field", + "half", + "lebe", + "miniz_oxide", + "rayon-core", + "smallvec", + "zune-inflate", +] + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fast_chemail" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "495a39d30d624c2caabe6312bfead73e7717692b44e0b32df168c275a2e8e9e4" +dependencies = [ + "ascii_utils", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fax" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645cbb3a84e60b7531617d5ae4e57f7e27308f6445f5abf653209ea76dec8dff" + +[[package]] +name = "flate2" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] +name = "gif" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5df2ba84018d80c213569363bdcd0c64e6933c67fe4c1d60ecf822971a3c35e" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.4.0", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + +[[package]] +name = "handlebars" +version = "5.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.4.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http 1.4.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.4.0", + "hyper 1.8.1", + "hyper-util", + "rustls 0.23.36", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.4", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.8.1", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.4.0", + "http-body 1.0.1", + "hyper 1.8.1", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.1", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "image" +version = "0.25.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a" +dependencies = [ + "bytemuck", + "byteorder-lite", + "color_quant", + "exr", + "gif", + "image-webp", + "moxcms", + "num-traits", + "png", + "qoi", + "ravif", + "rayon", + "rgb", + "tiff", + "zune-core 0.5.0", + "zune-jpeg 0.5.8", +] + +[[package]] +name = "image-webp" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" +dependencies = [ + "byteorder-lite", + "quick-error", +] + +[[package]] +name = "imgref" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" + +[[package]] +name = "indexmap" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "interpolate_name" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "lebe" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" + +[[package]] +name = "libc" +version = "0.2.178" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" + +[[package]] +name = "libfuzzer-sys" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "libm" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" + +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags", + "libc", + "redox_syscall 0.7.0", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "loop9" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062" +dependencies = [ + "imgref", +] + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + +[[package]] +name = "maybe-async" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "maybe-rayon" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" +dependencies = [ + "cfg-if", + "rayon", +] + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "moxcms" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac9557c559cd6fc9867e122e20d2cbefc9ca29d80d027a8e39310920ed2f0a97" +dependencies = [ + "num-traits", + "pxfm", +] + +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.4.0", + "httparse", + "memchr", + "mime", + "spin", + "version_check", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nexus" +version = "0.1.0" +dependencies = [ + "anyhow", + "apalis", + "apalis-cron", + "apalis-redis", + "async-graphql", + "async-graphql-axum", + "axum", + "base64 0.22.1", + "bytes", + "chrono", + "chrono-tz", + "cron", + "dotenvy", + "futures-util", + "image", + "jsonwebtoken", + "reqwest", + "rust-s3", + "rust_decimal", + "serde", + "serde_json", + "sqlx", + "tempfile", + "thiserror 2.0.17", + "tokio", + "tokio-util", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", + "urlencoding", + "uuid", +] + +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + +[[package]] +name = "noop_proc_macro" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec", + "windows-link", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pastey" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" + +[[package]] +name = "pem" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" +dependencies = [ + "base64 0.22.1", + "serde_core", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "pest_meta" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "png" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" +dependencies = [ + "bitflags", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "postgres-protocol" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbef655056b916eb868048276cfd5d6a7dea4f81560dfd047f97c8c6fe3fcfd4" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac", + "md-5", + "memchr", + "rand 0.9.2", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef4605b7c057056dd35baeb6ac0c0338e4975b1f2bef0f65da953285eb007095" +dependencies = [ + "bytes", + "fallible-iterator", + "postgres-protocol", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "profiling" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" +dependencies = [ + "profiling-procmacros", +] + +[[package]] +name = "profiling-procmacros" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" +dependencies = [ + "quote", + "syn 2.0.112", +] + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pxfm" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7186d3822593aa4393561d186d1393b3923e9d6163d3fbfd6e825e3e6cf3e6a8" +dependencies = [ + "num-traits", +] + +[[package]] +name = "qoi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quick-xml" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quote" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rav1e" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b6dd56e85d9483277cde964fd1bdb0428de4fec5ebba7540995639a21cb32b" +dependencies = [ + "aligned-vec", + "arbitrary", + "arg_enum_proc_macro", + "arrayvec", + "av-scenechange", + "av1-grain", + "bitstream-io", + "built", + "cfg-if", + "interpolate_name", + "itertools", + "libc", + "libfuzzer-sys", + "log", + "maybe-rayon", + "new_debug_unreachable", + "noop_proc_macro", + "num-derive", + "num-traits", + "paste", + "profiling", + "rand 0.9.2", + "rand_chacha 0.9.0", + "simd_helpers", + "thiserror 2.0.17", + "v_frame", + "wasm-bindgen", +] + +[[package]] +name = "ravif" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef69c1990ceef18a116855938e74793a5f7496ee907562bd0857b6ac734ab285" +dependencies = [ + "avif-serialize", + "imgref", + "loop9", + "quick-error", + "rav1e", + "rayon", + "rgb", +] + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redis" +version = "0.32.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "014cc767fefab6a3e798ca45112bccad9c6e0e218fbd49720042716c73cfef44" +dependencies = [ + "arc-swap", + "backon", + "bytes", + "cfg-if", + "combine", + "futures-channel", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1_smol", + "socket2 0.6.1", + "tokio", + "tokio-util", + "url", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "rend" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-core", + "h2", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.8.1", + "hyper-rustls 0.27.7", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "rgb" +version = "0.8.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rkyv" +version = "0.7.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" +dependencies = [ + "bitvec", + "bytecheck", + "bytes", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rsa" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rust-ini" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + +[[package]] +name = "rust-s3" +version = "0.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3df3f353b1f4209dcf437d777cda90279c397ab15a0cd6fd06bd32c88591533" +dependencies = [ + "async-trait", + "aws-creds", + "aws-region", + "base64 0.22.1", + "bytes", + "cfg-if", + "futures", + "hex", + "hmac", + "http 0.2.12", + "hyper 0.14.32", + "hyper-rustls 0.24.2", + "log", + "maybe-async", + "md5", + "percent-encoding", + "quick-xml", + "rustls 0.21.12", + "rustls-native-certs", + "serde", + "serde_derive", + "serde_json", + "sha2", + "thiserror 1.0.69", + "time", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "url", +] + +[[package]] +name = "rust_decimal" +version = "1.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35affe401787a9bd846712274d97654355d21b2a2c092a3139aabe31e9022282" +dependencies = [ + "arrayvec", + "borsh", + "bytes", + "num-traits", + "postgres-types", + "rand 0.8.5", + "rkyv", + "serde", + "serde_json", +] + +[[package]] +name = "rustix" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.103.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pki-types" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "serde_json" +version = "1.0.148" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "simd_helpers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" +dependencies = [ + "quote", +] + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.17", + "time", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64 0.22.1", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink", + "indexmap", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rust_decimal", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.112", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.112", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "rust_decimal", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "rust_decimal", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "static_assertions_next" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.112", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.112" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "tiff" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" +dependencies = [ + "fax", + "flate2", + "half", + "quick-error", + "weezl", + "zune-jpeg 0.4.21", +] + +[[package]] +name = "time" +version = "0.3.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" + +[[package]] +name = "time-macros" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.6.1", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls 0.23.36", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + +[[package]] +name = "tokio-util" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.23.10+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +dependencies = [ + "indexmap", + "toml_datetime", + "toml_parser", + "winnow 0.7.14", +] + +[[package]] +name = "toml_parser" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +dependencies = [ + "winnow 0.7.14", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-core", + "futures-util", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http 1.4.0", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "ulid" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe" +dependencies = [ + "rand 0.9.2", + "serde", + "web-time", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-normalization" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" +dependencies = [ + "getrandom 0.3.4", + "js-sys", + "serde_core", + "wasm-bindgen", +] + +[[package]] +name = "v_frame" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2" +dependencies = [ + "aligned-vec", + "num-traits", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.112", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.5", +] + +[[package]] +name = "webpki-roots" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "weezl" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.6.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "y4m" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "zmij" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac060176f7020d62c3bcc1cdbcec619d54f48b07ad1963a3f80ce7a0c17755f" + +[[package]] +name = "zune-core" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" + +[[package]] +name = "zune-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "111f7d9820f05fd715df3144e254d6fc02ee4088b0644c0ffd0efc9e6d9d2773" + +[[package]] +name = "zune-inflate" +version = "0.2.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "zune-jpeg" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" +dependencies = [ + "zune-core 0.4.12", +] + +[[package]] +name = "zune-jpeg" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35aee689668bf9bd6f6f3a6c60bb29ba1244b3b43adfd50edd554a371da37d5" +dependencies = [ + "zune-core 0.5.0", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..7e140cd --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,71 @@ +[package] +name = "nexus" +version = "0.1.0" +edition = "2024" + +[dependencies] +# Web Framework +axum = { version = "0.8", features = ["macros"] } +tokio = { version = "1", features = ["full"] } +tokio-util = { version = "0.7", features = ["io", "compat"] } +tower = "0.5" +tower-http = { version = "0.6", features = ["cors", "trace", "fs"] } +futures-util = "0.3" + +# GraphQL +async-graphql = { version = "7", features = ["chrono", "uuid", "decimal"] } +async-graphql-axum = "7" + +# Database +sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "uuid", "chrono", "json", "rust_decimal"] } + +# Background Jobs +apalis = "1.0.0-rc.1" +apalis-redis = "1.0.0-rc.1" +apalis-cron = "1.0.0-rc.1" +cron = "0.15" + +# Serialization +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +# Types +uuid = { version = "1", features = ["v4", "serde"] } +chrono = { version = "0.4", features = ["serde"] } +chrono-tz = "0.10" +rust_decimal = { version = "1", features = ["serde", "db-postgres"] } + +# Tracing +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } + +# Config +dotenvy = "0.15" + +# Error handling +thiserror = "2" +anyhow = "1" + +# HTTP client for Google APIs +reqwest = { version = "0.12", features = ["json"] } + +# S3/Garage storage +rust-s3 = { version = "0.35", default-features = false, features = ["tokio-rustls-tls"] } + +# Image processing +image = "0.25" + +# Temp files for video processing +tempfile = "3" + +# Bytes for S3 uploads +bytes = "1" + +# JWT for service account auth +jsonwebtoken = "9" + +# Base64 for credentials +base64 = "0.22" + +# URL encoding +urlencoding = "2" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..9a8d36e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,56 @@ +# Build stage +FROM rustlang/rust:nightly-bookworm AS builder + +WORKDIR /app + +# Copy manifests first for dependency caching +COPY Cargo.toml Cargo.lock ./ + +# Create dummy src to build dependencies +RUN mkdir src && echo "fn main() {}" > src/main.rs +RUN cargo build --release +RUN rm -rf src + +# Copy actual source code +COPY src ./src +COPY migrations ./migrations + +# Touch main.rs to invalidate the dummy build +RUN touch src/main.rs + +# Build the actual application +RUN cargo build --release + +# Runtime stage +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y \ + ca-certificates \ + libssl3 \ + curl \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Copy the binary from builder +COPY --from=builder /app/target/release/nexus /app/nexus + +# Copy migrations for runtime (if needed for embedded migrations) +COPY --from=builder /app/migrations ./migrations + +# Copy static files (logo for emails, etc.) +COPY static ./static + +# Copy entrypoint script +COPY entrypoint.sh /app/entrypoint.sh +RUN chmod 755 /app/entrypoint.sh + +# Create non-root user +RUN useradd -r -s /bin/false nexus +USER nexus + +EXPOSE 5050 + +ENV RUST_LOG=nexus=info,tower_http=info + +ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/Dockerfile.migrate b/Dockerfile.migrate new file mode 100644 index 0000000..46c39a4 --- /dev/null +++ b/Dockerfile.migrate @@ -0,0 +1,9 @@ +# Migration runner using Rust nightly +FROM rustlang/rust:nightly-bookworm + +# Install sqlx-cli with only postgres support +RUN cargo install sqlx-cli --no-default-features --features postgres,native-tls + +WORKDIR /app + +ENTRYPOINT ["/bin/sh", "-c"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..81d597e --- /dev/null +++ b/README.md @@ -0,0 +1,282 @@ +# Nexus 6 - Rust Platform Rewrite + +The final evolution of the Nexus platform, completely rewritten in Rust for maximum performance, type safety, and reliability. This is a production-ready monorepo containing the Axum-based API, SvelteKit frontends, and infrastructure configuration. + +## Overview + +Nexus 6 represents the culmination of lessons learned from five previous iterations, now built on a rock-solid Rust foundation: + +- **Nexus 1-3**: Django + Graphene (Python) +- **Nexus 4**: Rust experiment (abandoned) +- **Nexus 5**: Django + Strawberry GraphQL (Python) +- **Nexus 6**: Full Rust rewrite with Axum + async-graphql + +## Architecture + +``` +┌─────────────────────────────────────────────────────────────┐ +│ Clients │ +│ (Browser / Mobile / API Consumers) │ +└─────────────────────────┬───────────────────────────────────┘ + │ +┌─────────────────────────▼───────────────────────────────────┐ +│ Ory Oathkeeper │ +│ (API Gateway / Zero Trust) │ +│ - Route-based authentication │ +│ - JWT token injection │ +│ - CORS handling │ +└─────────────────────────┬───────────────────────────────────┘ + │ + ┌────────────────┼────────────────┐ + │ │ │ + ▼ ▼ ▼ +┌─────────────┐ ┌─────────────┐ ┌─────────────────────┐ +│ Frontend │ │ Auth │ │ Nexus API │ +│ (SvelteKit) │ │ Frontend │ │ (Axum/Rust) │ +│ │ │ (SvelteKit) │ │ │ +│ - Admin │ │ │ │ - GraphQL API │ +│ - Team │ │ - Login │ │ - Background Jobs │ +│ - Customer │ │ - Register │ │ - Media handling │ +│ - Public │ │ - Settings │ │ - Notifications │ +└─────────────┘ └─────────────┘ └──────────┬──────────┘ + │ │ + │ │ + ┌────────────────┼───────────────────┘ + │ │ + ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ +│ Ory Kratos │ │ PostgreSQL │ +│ (Identity) │ │ (via │ +│ │ │ PgBouncer) │ +│ - Sessions │ │ │ +│ - Recovery │ │ - App data │ +│ - Verification │ │ - Kratos data │ +└─────────────────┘ └─────────────────┘ + │ + ▼ +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ HashiCorp │ │ Redis │ │ S3 Storage │ +│ Vault │ │ (Valkey) │ │ (Garage) │ +│ │ │ │ │ │ +│ - DB creds │ │ - Job queue │ │ - Media files │ +│ - API keys │ │ - Caching │ │ - Reports │ +│ - Secrets │ │ │ │ - Uploads │ +└─────────────────┘ └─────────────────┘ └─────────────────┘ +``` + +## Tech Stack + +### Backend (Rust) +- **Axum** - Web framework with Tower middleware +- **async-graphql** - Type-safe GraphQL with subscriptions +- **SQLx** - Async SQL with compile-time checked queries +- **Apalis** - Background job processing (Redis-backed) +- **Tokio** - Async runtime + +### Frontend +- **SvelteKit 5** - Svelte 5 with runes +- **Tailwind CSS v4** - Utility-first CSS +- **TypeScript** - Type safety throughout + +### Infrastructure +- **Ory Kratos** - Identity management +- **Ory Oathkeeper** - API gateway / Zero-trust +- **PgBouncer** - Connection pooling +- **HashiCorp Vault** - Secrets management +- **Redis/Valkey** - Job queue and caching +- **S3 (Garage)** - Object storage + +## Evolution Comparison + +| Feature | Django (nexus-5) | Rust (nexus-6) | +|---------|------------------|----------------| +| **Language** | Python 3.12 | Rust 2024 Edition | +| **Web Framework** | Django 5.x | Axum 0.8 | +| **GraphQL** | Strawberry | async-graphql | +| **Database** | Django ORM | SQLx (compile-time) | +| **Background Jobs** | Celery | Apalis | +| **Type Safety** | Runtime | Compile-time | +| **Memory Usage** | ~500MB | ~50MB | +| **Startup Time** | ~5s | <100ms | +| **Concurrency** | Thread-based | Async/await | + +## Project Structure + +``` +nexus-6/ +├── src/ # Rust backend +│ ├── main.rs # Entry point with Axum server +│ ├── config.rs # Configuration management +│ ├── db.rs # Database connection pool +│ ├── auth/ # Authentication middleware +│ ├── graphql/ # GraphQL schema +│ │ ├── queries/ # Query resolvers +│ │ ├── mutations/ # Mutation resolvers +│ │ └── types/ # GraphQL types +│ ├── models/ # Database models +│ ├── services/ # Business logic services +│ ├── jobs/ # Background job handlers +│ └── routes/ # HTTP route handlers +├── migrations/ # SQL migrations +├── frontend/ # Main SvelteKit app +│ ├── src/ +│ │ ├── lib/ # Components, stores, utils +│ │ └── routes/ # Page routes +│ └── package.json +├── auth-frontend/ # Auth UI (Ory Kratos) +├── kratos/ # Kratos configuration +├── oathkeeper/ # Oathkeeper configuration +├── vault/ # Vault agent templates +├── pgbouncer/ # PgBouncer configuration +├── docker-compose.yml # Full stack deployment +└── Cargo.toml # Rust dependencies +``` + +## Features + +### Core Functionality +- **Customer Management** - CRM with profiles and accounts +- **Service Scheduling** - Recurring service management +- **Project Management** - One-time project tracking +- **Work Sessions** - Time tracking with task completion +- **Scope Templates** - Reusable work specifications +- **Reporting** - PDF report generation with media +- **Invoicing** - Wave API integration + +### Technical Features +- **GraphQL API** - Full query/mutation/subscription support +- **Real-time Updates** - WebSocket subscriptions +- **Background Jobs** - Scheduled and on-demand processing +- **Media Handling** - Image/video upload and processing +- **Email Integration** - Gmail API for notifications +- **Calendar Sync** - Google Calendar integration + +## Getting Started + +### Prerequisites +- Rust 1.75+ (2024 edition) +- Node.js 20+ +- PostgreSQL 16+ +- Redis 7+ +- Docker & Docker Compose + +### Development Setup + +```bash +# Clone the repository +git clone https://github.com/your-org/nexus-6.git +cd nexus-6 + +# Copy environment file +cp .env.example .env +# Edit .env with your configuration + +# Run database migrations +cargo run --bin migrate + +# Start the backend +cargo run + +# In another terminal, start the frontend +cd frontend +npm install +npm run dev + +# Start auth frontend +cd auth-frontend +npm install +npm run dev +``` + +### Docker Deployment + +```bash +# Start the full stack +docker-compose up -d + +# View logs +docker-compose logs -f nexus + +# Stop all services +docker-compose down +``` + +## Configuration + +### Environment Variables + +```bash +# Database +DATABASE_URL=postgres://user:pass@localhost:5432/nexus + +# Redis (Job Queue) +REDIS_URL=redis://localhost:6379 + +# S3 Storage +S3_ENDPOINT=http://localhost:3900 +S3_ACCESS_KEY=your-access-key +S3_SECRET_KEY=your-secret-key +S3_BUCKET=nexus-media + +# Google APIs +GOOGLE_OAUTH_CLIENT_ID=... +GOOGLE_OAUTH_CLIENT_SECRET=... + +# Wave Invoicing +WAVE_ACCESS_TOKEN=... +WAVE_BUSINESS_ID=... + +# Ory +KRATOS_PUBLIC_URL=http://localhost:4433 +OATHKEEPER_SECRET=your-secret +``` + +## API Documentation + +The GraphQL API is self-documenting. Access the GraphQL Playground at: +- Development: `http://localhost:8080/graphql` +- Production: `https://api.your-domain.com/graphql` + +## Performance + +Benchmarks comparing nexus-5 (Django) vs nexus-6 (Rust): + +| Metric | Django | Rust | Improvement | +|--------|--------|------|-------------| +| Requests/sec | 1,200 | 45,000 | 37x | +| P99 Latency | 85ms | 2ms | 42x | +| Memory Usage | 512MB | 48MB | 10x | +| Cold Start | 4.2s | 80ms | 52x | + +## Security + +- **Zero-Trust Architecture** - All requests validated via Oathkeeper +- **Session Management** - Ory Kratos handles auth flows +- **Secrets Management** - HashiCorp Vault for all credentials +- **SQL Injection Prevention** - Compile-time query checking +- **Type Safety** - Rust's ownership model prevents memory bugs + +## Related Repositories + +- **nexus-1 through nexus-5** - Previous Python iterations +- **nexus-5-auth** - Standalone Ory configuration (if separating) +- **nexus-5-frontend-1/2/3** - Previous SvelteKit frontends + +## License + +MIT License - See LICENSE file for details. + +## Contributing + +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Run `cargo test` and `cargo clippy` +5. Submit a pull request + +## Acknowledgments + +This project represents years of iteration and learning. Special thanks to: +- The Rust community for excellent tooling +- Ory for their identity infrastructure +- The SvelteKit team for an amazing framework diff --git a/auth-frontend/.dockerignore b/auth-frontend/.dockerignore new file mode 100644 index 0000000..be692a1 --- /dev/null +++ b/auth-frontend/.dockerignore @@ -0,0 +1,32 @@ +# Dependencies +node_modules +.pnp +.pnp.js + +# Testing +coverage + +# Build outputs +.svelte-kit +build +dist + +# Misc +.DS_Store +*.pem +.env.local +.env.development.local +.env.test.local + +# Debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# IDE +.vscode +.idea + +# Git +.git +.gitignore diff --git a/auth-frontend/.env.development b/auth-frontend/.env.development new file mode 100644 index 0000000..7558748 --- /dev/null +++ b/auth-frontend/.env.development @@ -0,0 +1,27 @@ +# ==================================== +# Frontend Configuration - DEVELOPMENT +# ==================================== + +# Port to expose the frontend on +FRONTEND_PORT=3000 + +# ==================================== +# Kratos Connection URLs (Unified Stack) +# ==================================== + +# Browser/client-side requests go through Oathkeeper proxy +PUBLIC_KRATOS_URL=http://localhost:7200 + +# Server-side requests during SSR (direct to Kratos public API) +KRATOS_SERVER_URL=http://localhost:6000 + +# ==================================== +# Origin Configuration +# ==================================== +ORIGIN=http://localhost:3000 + +# ==================================== +# Admin Configuration +# ==================================== +# User ID that has admin access to the dashboard +ADMIN_USER_ID= diff --git a/auth-frontend/.env.production b/auth-frontend/.env.production new file mode 100644 index 0000000..2a5c60f --- /dev/null +++ b/auth-frontend/.env.production @@ -0,0 +1,27 @@ +# ==================================== +# Frontend Configuration - PRODUCTION +# ==================================== + +# Port to expose the frontend on +FRONTEND_PORT=3000 + +# ==================================== +# Kratos Connection URLs (Unified Stack) +# ==================================== + +# Browser/client-side requests go through Oathkeeper via Caddy +PUBLIC_KRATOS_URL=https://auth.example.com + +# Server-side requests during SSR (direct to Kratos public API on same VM) +KRATOS_SERVER_URL=http://localhost:6000 + +# ==================================== +# Origin Configuration +# ==================================== +ORIGIN=https://account.example.com + +# ==================================== +# Admin Configuration +# ==================================== +# User ID that has admin access to the dashboard +ADMIN_USER_ID=00000000-0000-0000-0000-000000000000 diff --git a/auth-frontend/.gitignore b/auth-frontend/.gitignore new file mode 100644 index 0000000..779b1fa --- /dev/null +++ b/auth-frontend/.gitignore @@ -0,0 +1,26 @@ +node_modules + +# Output +.output +.vercel +.netlify +.wrangler +/.svelte-kit +/build + +# OS +.DS_Store +Thumbs.db + +# Env +.env +.env.local +!.env.development +!.env.production +!.env.example +!.env.test + +# Vite +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +/.env.example diff --git a/auth-frontend/.npmrc b/auth-frontend/.npmrc new file mode 100644 index 0000000..b6f27f1 --- /dev/null +++ b/auth-frontend/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/auth-frontend/.prettierignore b/auth-frontend/.prettierignore new file mode 100644 index 0000000..7d74fe2 --- /dev/null +++ b/auth-frontend/.prettierignore @@ -0,0 +1,9 @@ +# Package Managers +package-lock.json +pnpm-lock.yaml +yarn.lock +bun.lock +bun.lockb + +# Miscellaneous +/static/ diff --git a/auth-frontend/.prettierrc b/auth-frontend/.prettierrc new file mode 100644 index 0000000..8103a0b --- /dev/null +++ b/auth-frontend/.prettierrc @@ -0,0 +1,16 @@ +{ + "useTabs": true, + "singleQuote": true, + "trailingComma": "none", + "printWidth": 100, + "plugins": ["prettier-plugin-svelte", "prettier-plugin-tailwindcss"], + "overrides": [ + { + "files": "*.svelte", + "options": { + "parser": "svelte" + } + } + ], + "tailwindStylesheet": "./src/app.css" +} diff --git a/auth-frontend/Dockerfile b/auth-frontend/Dockerfile new file mode 100644 index 0000000..586b5c7 --- /dev/null +++ b/auth-frontend/Dockerfile @@ -0,0 +1,63 @@ +# ==================================== +# Build Stage +# ==================================== +FROM node:20-alpine AS builder + +# Set working directory +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies (including devDependencies for build) +RUN npm ci + +# Copy source code and configuration +COPY . . + +# Build the SvelteKit application +# Note: PUBLIC_* vars must be set at build time for SvelteKit +ARG PUBLIC_KRATOS_URL=https://auth.example.com +ENV PUBLIC_KRATOS_URL=$PUBLIC_KRATOS_URL + +RUN npm run build + +# Prune dev dependencies +RUN npm prune --production + +# ==================================== +# Production Stage +# ==================================== +FROM node:20-alpine + +# Install curl for health checks +RUN apk add --no-cache curl + +# Set working directory +WORKDIR /app + +# Copy built application from builder +COPY --from=builder /app/build ./build +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./package.json + +# Create non-root user +RUN addgroup -g 1001 -S nodejs && \ + adduser -S sveltekit -u 1001 && \ + chown -R sveltekit:nodejs /app + +# Switch to non-root user +USER sveltekit + +# Expose port 3000 +EXPOSE 3000 + +# Health check +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ + CMD curl -f http://localhost:3000/ || exit 1 + +# Set environment variable for production +ENV NODE_ENV=production + +# Start the application +CMD ["node", "build"] diff --git a/auth-frontend/docker-compose.yml b/auth-frontend/docker-compose.yml new file mode 100644 index 0000000..e001f6d --- /dev/null +++ b/auth-frontend/docker-compose.yml @@ -0,0 +1,28 @@ +services: + frontend: + build: + context: . + dockerfile: Dockerfile + container_name: nexus-auth-frontend + restart: unless-stopped + ports: + - '${FRONTEND_PORT:-3000}:3000' + environment: + - NODE_ENV=production + - PUBLIC_KRATOS_URL=${PUBLIC_KRATOS_URL} + - KRATOS_SERVER_URL=${KRATOS_SERVER_URL} + - ORIGIN=${ORIGIN} + - ADMIN_USER_ID=${ADMIN_USER_ID} + networks: + - ory-network + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:3000/'] + interval: 30s + timeout: 5s + retries: 5 + start_period: 10s + +networks: + ory-network: + external: true + name: ory-network diff --git a/auth-frontend/eslint.config.js b/auth-frontend/eslint.config.js new file mode 100644 index 0000000..2c49fa6 --- /dev/null +++ b/auth-frontend/eslint.config.js @@ -0,0 +1,41 @@ +import prettier from 'eslint-config-prettier'; +import { fileURLToPath } from 'node:url'; +import { includeIgnoreFile } from '@eslint/compat'; +import js from '@eslint/js'; +import svelte from 'eslint-plugin-svelte'; +import { defineConfig } from 'eslint/config'; +import globals from 'globals'; +import ts from 'typescript-eslint'; +import svelteConfig from './svelte.config.js'; + +const gitignorePath = fileURLToPath(new URL('./.gitignore', import.meta.url)); + +export default defineConfig( + includeIgnoreFile(gitignorePath), + js.configs.recommended, + ...ts.configs.recommended, + ...svelte.configs.recommended, + prettier, + ...svelte.configs.prettier, + { + languageOptions: { + globals: { ...globals.browser, ...globals.node } + }, + rules: { + // typescript-eslint strongly recommend that you do not use the no-undef lint rule on TypeScript projects. + // see: https://typescript-eslint.io/troubleshooting/faqs/eslint/#i-get-errors-from-the-no-undef-rule-about-global-variables-not-being-defined-even-though-there-are-no-typescript-errors + 'no-undef': 'off' + } + }, + { + files: ['**/*.svelte', '**/*.svelte.ts', '**/*.svelte.js'], + languageOptions: { + parserOptions: { + projectService: true, + extraFileExtensions: ['.svelte'], + parser: ts.parser, + svelteConfig + } + } + } +); diff --git a/auth-frontend/package-lock.json b/auth-frontend/package-lock.json new file mode 100644 index 0000000..173a490 --- /dev/null +++ b/auth-frontend/package-lock.json @@ -0,0 +1,4925 @@ +{ + "name": "nexus-5-auth-frontend", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "nexus-5-auth-frontend", + "version": "0.0.1", + "dependencies": { + "@ory/client": "^1.22.5", + "axios": "^1.12.2", + "flowbite-svelte": "^1.17.4" + }, + "devDependencies": { + "@eslint/compat": "^1.4.0", + "@eslint/js": "^9.36.0", + "@sveltejs/adapter-node": "^5.3.2", + "@sveltejs/kit": "^2.43.2", + "@sveltejs/vite-plugin-svelte": "^6.2.0", + "@tailwindcss/forms": "^0.5.10", + "@tailwindcss/typography": "^0.5.18", + "@tailwindcss/vite": "^4.1.13", + "@types/node": "^22", + "eslint": "^9.36.0", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-svelte": "^3.12.4", + "globals": "^16.4.0", + "prettier": "^3.6.2", + "prettier-plugin-svelte": "^3.4.0", + "prettier-plugin-tailwindcss": "^0.6.14", + "svelte": "^5.39.5", + "svelte-check": "^4.3.2", + "tailwindcss": "^4.1.13", + "typescript": "^5.9.2", + "typescript-eslint": "^8.44.1", + "vite": "^7.1.7" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", + "integrity": "sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.10.tgz", + "integrity": "sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.10.tgz", + "integrity": "sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.10.tgz", + "integrity": "sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz", + "integrity": "sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz", + "integrity": "sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.10.tgz", + "integrity": "sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.10.tgz", + "integrity": "sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.10.tgz", + "integrity": "sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.10.tgz", + "integrity": "sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.10.tgz", + "integrity": "sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.10.tgz", + "integrity": "sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.10.tgz", + "integrity": "sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.10.tgz", + "integrity": "sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.10.tgz", + "integrity": "sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.10.tgz", + "integrity": "sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz", + "integrity": "sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.10.tgz", + "integrity": "sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.10.tgz", + "integrity": "sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.10.tgz", + "integrity": "sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.10.tgz", + "integrity": "sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.10.tgz", + "integrity": "sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.10.tgz", + "integrity": "sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.10.tgz", + "integrity": "sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.10.tgz", + "integrity": "sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz", + "integrity": "sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/compat": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.4.0.tgz", + "integrity": "sha512-DEzm5dKeDBPm3r08Ixli/0cmxr8LkRdwxMRUIJBlSCpAwSrvFEJpVBzV+66JhDxiaqKxnRzCXhtiMiczF7Hglg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "peerDependencies": { + "eslint": "^8.40 || 9" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.0.tgz", + "integrity": "sha512-WUFvV4WoIwW8Bv0KeKCIIEgdSiFOsulyN0xrMu+7z43q/hkOLXjvb5u7UC9jDxvRzcrbEmuZBX5yJZz1741jog==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", + "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.37.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.37.0.tgz", + "integrity": "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.0.tgz", + "integrity": "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", + "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz", + "integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.3", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", + "license": "MIT" + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@ory/client": { + "version": "1.22.5", + "resolved": "https://registry.npmjs.org/@ory/client/-/client-1.22.5.tgz", + "integrity": "sha512-uSiQrpWkzFRL66B9nlPlTNes8KJylsjV495cEHnfCMdAX2eCtkKHSKj0D2IYY7Y6iEFQBX7aMYJZ6OmnMC4LWA==", + "license": "Apache-2.0", + "dependencies": { + "axios": "^1.6.1" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "dev": true, + "license": "MIT" + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/@rollup/plugin-commonjs": { + "version": "28.0.6", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.6.tgz", + "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "commondir": "^1.0.1", + "estree-walker": "^2.0.2", + "fdir": "^6.2.0", + "is-reference": "1.2.1", + "magic-string": "^0.30.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=16.0.0 || 14 >= 14.17" + }, + "peerDependencies": { + "rollup": "^2.68.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-json": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", + "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.1.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "16.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.2.tgz", + "integrity": "sha512-tCtHJ2BlhSoK4cCs25NMXfV7EALKr0jyasmqVCq3y9cBrKdmJhtsy1iTz36Xhk/O+pDJbzawxF4K6ZblqCnITQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.4.tgz", + "integrity": "sha512-BTm2qKNnWIQ5auf4deoetINJm2JzvihvGb9R6K/ETwKLql/Bb3Eg2H1FBp1gUb4YGbydMA3jcmQTR73q7J+GAA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.4.tgz", + "integrity": "sha512-P9LDQiC5vpgGFgz7GSM6dKPCiqR3XYN1WwJKA4/BUVDjHpYsf3iBEmVz62uyq20NGYbiGPR5cNHI7T1HqxNs2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.4.tgz", + "integrity": "sha512-QRWSW+bVccAvZF6cbNZBJwAehmvG9NwfWHwMy4GbWi/BQIA/laTIktebT2ipVjNncqE6GLPxOok5hsECgAxGZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.4.tgz", + "integrity": "sha512-hZgP05pResAkRJxL1b+7yxCnXPGsXU0fG9Yfd6dUaoGk+FhdPKCJ5L1Sumyxn8kvw8Qi5PvQ8ulenUbRjzeCTw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.4.tgz", + "integrity": "sha512-xmc30VshuBNUd58Xk4TKAEcRZHaXlV+tCxIXELiE9sQuK3kG8ZFgSPi57UBJt8/ogfhAF5Oz4ZSUBN77weM+mQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.4.tgz", + "integrity": "sha512-WdSLpZFjOEqNZGmHflxyifolwAiZmDQzuOzIq9L27ButpCVpD7KzTRtEG1I0wMPFyiyUdOO+4t8GvrnBLQSwpw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.4.tgz", + "integrity": "sha512-xRiOu9Of1FZ4SxVbB0iEDXc4ddIcjCv2aj03dmW8UrZIW7aIQ9jVJdLBIhxBI+MaTnGAKyvMwPwQnoOEvP7FgQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.4.tgz", + "integrity": "sha512-FbhM2p9TJAmEIEhIgzR4soUcsW49e9veAQCziwbR+XWB2zqJ12b4i/+hel9yLiD8pLncDH4fKIPIbt5238341Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.4.tgz", + "integrity": "sha512-4n4gVwhPHR9q/g8lKCyz0yuaD0MvDf7dV4f9tHt0C73Mp8h38UCtSCSE6R9iBlTbXlmA8CjpsZoujhszefqueg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.4.tgz", + "integrity": "sha512-u0n17nGA0nvi/11gcZKsjkLj1QIpAuPFQbR48Subo7SmZJnGxDpspyw2kbpuoQnyK+9pwf3pAoEXerJs/8Mi9g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.4.tgz", + "integrity": "sha512-0G2c2lpYtbTuXo8KEJkDkClE/+/2AFPdPAbmaHoE870foRFs4pBrDehilMcrSScrN/fB/1HTaWO4bqw+ewBzMQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.4.tgz", + "integrity": "sha512-teSACug1GyZHmPDv14VNbvZFX779UqWTsd7KtTM9JIZRDI5NUwYSIS30kzI8m06gOPB//jtpqlhmraQ68b5X2g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.4.tgz", + "integrity": "sha512-/MOEW3aHjjs1p4Pw1Xk4+3egRevx8Ji9N6HUIA1Ifh8Q+cg9dremvFCUbOX2Zebz80BwJIgCBUemjqhU5XI5Eg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.4.tgz", + "integrity": "sha512-1HHmsRyh845QDpEWzOFtMCph5Ts+9+yllCrREuBR/vg2RogAQGGBRC8lDPrPOMnrdOJ+mt1WLMOC2Kao/UwcvA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.4.tgz", + "integrity": "sha512-seoeZp4L/6D1MUyjWkOMRU6/iLmCU2EjbMTyAG4oIOs1/I82Y5lTeaxW0KBfkUdHAWN7j25bpkt0rjnOgAcQcA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.4.tgz", + "integrity": "sha512-Wi6AXf0k0L7E2gteNsNHUs7UMwCIhsCTs6+tqQ5GPwVRWMaflqGec4Sd8n6+FNFDw9vGcReqk2KzBDhCa1DLYg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.4.tgz", + "integrity": "sha512-dtBZYjDmCQ9hW+WgEkaffvRRCKm767wWhxsFW3Lw86VXz/uJRuD438/XvbZT//B96Vs8oTA8Q4A0AfHbrxP9zw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.4.tgz", + "integrity": "sha512-1ox+GqgRWqaB1RnyZXL8PD6E5f7YyRUJYnCqKpNzxzP0TkaUh112NDrR9Tt+C8rJ4x5G9Mk8PQR3o7Ku2RKqKA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.4.tgz", + "integrity": "sha512-8GKr640PdFNXwzIE0IrkMWUNUomILLkfeHjXBi/nUvFlpZP+FA8BKGKpacjW6OUUHaNI6sUURxR2U2g78FOHWQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.4.tgz", + "integrity": "sha512-AIy/jdJ7WtJ/F6EcfOb2GjR9UweO0n43jNObQMb6oGxkYTfLcnN7vYYpG+CN3lLxrQkzWnMOoNSHTW54pgbVxw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.4.tgz", + "integrity": "sha512-UF9KfsH9yEam0UjTwAgdK0anlQ7c8/pWPU2yVjyWcF1I1thABt6WXE47cI71pGiZ8wGvxohBoLnxM04L/wj8mQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.4.tgz", + "integrity": "sha512-bf9PtUa0u8IXDVxzRToFQKsNCRz9qLYfR/MpECxl4mRoWYjAeFjgxj1XdZr2M/GNVpT05p+LgQOHopYDlUu6/w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sveltejs/acorn-typescript": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.6.tgz", + "integrity": "sha512-4awhxtMh4cx9blePWl10HRHj8Iivtqj+2QdDCSMDzxG+XKa9+VCNupQuCuvzEhYPzZSrX+0gC+0lHA/0fFKKQQ==", + "license": "MIT", + "peerDependencies": { + "acorn": "^8.9.0" + } + }, + "node_modules/@sveltejs/adapter-node": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-5.3.3.tgz", + "integrity": "sha512-SRDVuFBkmpKGsA9b0wYaCrrSChq2Yv5Dv8g7WiZcs8E69vdQNRamN0DzQV9/rEixvuRkojATLADNeQ+6FeyVNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/plugin-commonjs": "^28.0.1", + "@rollup/plugin-json": "^6.1.0", + "@rollup/plugin-node-resolve": "^16.0.0", + "rollup": "^4.9.5" + }, + "peerDependencies": { + "@sveltejs/kit": "^2.4.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "2.46.4", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.46.4.tgz", + "integrity": "sha512-J1fd80WokLzIm6EAV7z7C2+/C02qVAX645LZomARARTRJkbbJSY1Jln3wtBZYibUB8c9/5Z6xqLAV39VdbtWCQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/cookie": "^0.6.0", + "acorn": "^8.14.1", + "cookie": "^0.6.0", + "devalue": "^5.3.2", + "esm-env": "^1.2.2", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^3.0.0" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": ">=18.13" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + } + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.2.1.tgz", + "integrity": "sha512-YZs/OSKOQAQCnJvM/P+F1URotNnYNeU3P2s4oIpzm1uFaqUEqRxUB0g5ejMjEb5Gjb9/PiBI5Ktrq4rUUF8UVQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", + "debug": "^4.4.1", + "deepmerge": "^4.3.1", + "magic-string": "^0.30.17", + "vitefu": "^1.1.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.1.tgz", + "integrity": "sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@svgdotjs/svg.draggable.js": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@svgdotjs/svg.draggable.js/-/svg.draggable.js-3.0.6.tgz", + "integrity": "sha512-7iJFm9lL3C40HQcqzEfezK2l+dW2CpoVY3b77KQGqc8GXWa6LhhmX5Ckv7alQfUXBuZbjpICZ+Dvq1czlGx7gA==", + "license": "MIT", + "peerDependencies": { + "@svgdotjs/svg.js": "^3.2.4" + } + }, + "node_modules/@svgdotjs/svg.filter.js": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@svgdotjs/svg.filter.js/-/svg.filter.js-3.0.9.tgz", + "integrity": "sha512-/69XMRCDoam2HgC4ldHIaDgeQf1ViHIsa0Ld4uWgiXtZ+E24DWHe/9Ib6kbNiZ7WRIdlVokUDR1Fg0kjIpkfbw==", + "license": "MIT", + "dependencies": { + "@svgdotjs/svg.js": "^3.2.4" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/@svgdotjs/svg.js": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@svgdotjs/svg.js/-/svg.js-3.2.5.tgz", + "integrity": "sha512-/VNHWYhNu+BS7ktbYoVGrCmsXDh+chFMaONMwGNdIBcFHrWqk2jY8fNyr3DLdtQUIalvkPfM554ZSFa3dm3nxQ==", + "license": "MIT", + "peer": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Fuzzyma" + } + }, + "node_modules/@svgdotjs/svg.resize.js": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@svgdotjs/svg.resize.js/-/svg.resize.js-2.0.5.tgz", + "integrity": "sha512-4heRW4B1QrJeENfi7326lUPYBCevj78FJs8kfeDxn5st0IYPIRXoTtOSYvTzFWgaWWXd3YCDE6ao4fmv91RthA==", + "license": "MIT", + "engines": { + "node": ">= 14.18" + }, + "peerDependencies": { + "@svgdotjs/svg.js": "^3.2.4", + "@svgdotjs/svg.select.js": "^4.0.1" + } + }, + "node_modules/@svgdotjs/svg.select.js": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@svgdotjs/svg.select.js/-/svg.select.js-4.0.3.tgz", + "integrity": "sha512-qkMgso1sd2hXKd1FZ1weO7ANq12sNmQJeGDjs46QwDVsxSRcHmvWKL2NDF7Yimpwf3sl5esOLkPqtV2bQ3v/Jg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 14.18" + }, + "peerDependencies": { + "@svgdotjs/svg.js": "^3.2.4" + } + }, + "node_modules/@tailwindcss/forms": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.10.tgz", + "integrity": "sha512-utI1ONF6uf/pPNO68kmN1b8rEwNXv3czukalo8VtJH8ksIkZXr3Q3VYudZLkCsDd4Wku120uF02hYK25XGPorw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mini-svg-data-uri": "^1.2.3" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1 || >= 4.0.0-alpha.20 || >= 4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.14.tgz", + "integrity": "sha512-hpz+8vFk3Ic2xssIA3e01R6jkmsAhvkQdXlEbRTk6S10xDAtiQiM3FyvZVGsucefq764euO/b8WUW9ysLdThHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "enhanced-resolve": "^5.18.3", + "jiti": "^2.6.0", + "lightningcss": "1.30.1", + "magic-string": "^0.30.19", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.14" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.14.tgz", + "integrity": "sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.4", + "tar": "^7.5.1" + }, + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.14", + "@tailwindcss/oxide-darwin-arm64": "4.1.14", + "@tailwindcss/oxide-darwin-x64": "4.1.14", + "@tailwindcss/oxide-freebsd-x64": "4.1.14", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.14", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.14", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.14", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.14", + "@tailwindcss/oxide-linux-x64-musl": "4.1.14", + "@tailwindcss/oxide-wasm32-wasi": "4.1.14", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.14", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.14" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.14.tgz", + "integrity": "sha512-a94ifZrGwMvbdeAxWoSuGcIl6/DOP5cdxagid7xJv6bwFp3oebp7y2ImYsnZBMTwjn5Ev5xESvS3FFYUGgPODQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.14.tgz", + "integrity": "sha512-HkFP/CqfSh09xCnrPJA7jud7hij5ahKyWomrC3oiO2U9i0UjP17o9pJbxUN0IJ471GTQQmzwhp0DEcpbp4MZTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.14.tgz", + "integrity": "sha512-eVNaWmCgdLf5iv6Qd3s7JI5SEFBFRtfm6W0mphJYXgvnDEAZ5sZzqmI06bK6xo0IErDHdTA5/t7d4eTfWbWOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.14.tgz", + "integrity": "sha512-QWLoRXNikEuqtNb0dhQN6wsSVVjX6dmUFzuuiL09ZeXju25dsei2uIPl71y2Ic6QbNBsB4scwBoFnlBfabHkEw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.14.tgz", + "integrity": "sha512-VB4gjQni9+F0VCASU+L8zSIyjrLLsy03sjcR3bM0V2g4SNamo0FakZFKyUQ96ZVwGK4CaJsc9zd/obQy74o0Fw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.14.tgz", + "integrity": "sha512-qaEy0dIZ6d9vyLnmeg24yzA8XuEAD9WjpM5nIM1sUgQ/Zv7cVkharPDQcmm/t/TvXoKo/0knI3me3AGfdx6w1w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.14.tgz", + "integrity": "sha512-ISZjT44s59O8xKsPEIesiIydMG/sCXoMBCqsphDm/WcbnuWLxxb+GcvSIIA5NjUw6F8Tex7s5/LM2yDy8RqYBQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.14.tgz", + "integrity": "sha512-02c6JhLPJj10L2caH4U0zF8Hji4dOeahmuMl23stk0MU1wfd1OraE7rOloidSF8W5JTHkFdVo/O7uRUJJnUAJg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.14.tgz", + "integrity": "sha512-TNGeLiN1XS66kQhxHG/7wMeQDOoL0S33x9BgmydbrWAb9Qw0KYdd8o1ifx4HOGDWhVmJ+Ul+JQ7lyknQFilO3Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.14.tgz", + "integrity": "sha512-uZYAsaW/jS/IYkd6EWPJKW/NlPNSkWkBlaeVBi/WsFQNP05/bzkebUL8FH1pdsqx4f2fH/bWFcUABOM9nfiJkQ==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.5.0", + "@emnapi/runtime": "^1.5.0", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.0.5", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.14.tgz", + "integrity": "sha512-Az0RnnkcvRqsuoLH2Z4n3JfAef0wElgzHD5Aky/e+0tBUxUhIeIqFBTMNQvmMRSP15fWwmvjBxZ3Q8RhsDnxAA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.14.tgz", + "integrity": "sha512-ttblVGHgf68kEE4om1n/n44I0yGPkCPbLsqzjvybhpwa6mKKtgFfAzy6btc3HRmuW7nHe0OOrSeNP9sQmmH9XA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.19.tgz", + "integrity": "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.14.tgz", + "integrity": "sha512-BoFUoU0XqgCUS1UXWhmDJroKKhNXeDzD7/XwabjkDIAbMnc4ULn5e2FuEuBbhZ6ENZoSYzKlzvZ44Yr6EUDUSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.1.14", + "@tailwindcss/oxide": "4.1.14", + "tailwindcss": "4.1.14" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7" + } + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.18.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.9.tgz", + "integrity": "sha512-5yBtK0k/q8PjkMXbTfeIEP/XVYnz1R9qZJ3yUicdEW7ppdDJfe+MqXEhpqDL3mtn4Wvs1u0KLEG0RXzCgNpsSg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.0.tgz", + "integrity": "sha512-hA8gxBq4ukonVXPy0OKhiaUh/68D0E88GSmtC1iAEnGaieuDi38LhS7jdCHRLi6ErJBNDGCzvh5EnzdPwUc0DA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.46.0", + "@typescript-eslint/type-utils": "8.46.0", + "@typescript-eslint/utils": "8.46.0", + "@typescript-eslint/visitor-keys": "8.46.0", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.46.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.0.tgz", + "integrity": "sha512-n1H6IcDhmmUEG7TNVSspGmiHHutt7iVKtZwRppD7e04wha5MrkV1h3pti9xQLcCMt6YWsncpoT0HMjkH1FNwWQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.46.0", + "@typescript-eslint/types": "8.46.0", + "@typescript-eslint/typescript-estree": "8.46.0", + "@typescript-eslint/visitor-keys": "8.46.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.0.tgz", + "integrity": "sha512-OEhec0mH+U5Je2NZOeK1AbVCdm0ChyapAyTeXVIYTPXDJ3F07+cu87PPXcGoYqZ7M9YJVvFnfpGg1UmCIqM+QQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.46.0", + "@typescript-eslint/types": "^8.46.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.0.tgz", + "integrity": "sha512-lWETPa9XGcBes4jqAMYD9fW0j4n6hrPtTJwWDmtqgFO/4HF4jmdH/Q6wggTw5qIT5TXjKzbt7GsZUBnWoO3dqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.0", + "@typescript-eslint/visitor-keys": "8.46.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.0.tgz", + "integrity": "sha512-WrYXKGAHY836/N7zoK/kzi6p8tXFhasHh8ocFL9VZSAkvH956gfeRfcnhs3xzRy8qQ/dq3q44v1jvQieMFg2cw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.0.tgz", + "integrity": "sha512-hy+lvYV1lZpVs2jRaEYvgCblZxUoJiPyCemwbQZ+NGulWkQRy0HRPYAoef/CNSzaLt+MLvMptZsHXHlkEilaeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.0", + "@typescript-eslint/typescript-estree": "8.46.0", + "@typescript-eslint/utils": "8.46.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.0.tgz", + "integrity": "sha512-bHGGJyVjSE4dJJIO5yyEWt/cHyNwga/zXGJbJJ8TiO01aVREK6gCTu3L+5wrkb1FbDkQ+TKjMNe9R/QQQP9+rA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.0.tgz", + "integrity": "sha512-ekDCUfVpAKWJbRfm8T1YRrCot1KFxZn21oV76v5Fj4tr7ELyk84OS+ouvYdcDAwZL89WpEkEj2DKQ+qg//+ucg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.46.0", + "@typescript-eslint/tsconfig-utils": "8.46.0", + "@typescript-eslint/types": "8.46.0", + "@typescript-eslint/visitor-keys": "8.46.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.0.tgz", + "integrity": "sha512-nD6yGWPj1xiOm4Gk0k6hLSZz2XkNXhuYmyIrOWcHoPuAhjT9i5bAG+xbWPgFeNR8HPHHtpNKdYUXJl/D3x7f5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.46.0", + "@typescript-eslint/types": "8.46.0", + "@typescript-eslint/typescript-estree": "8.46.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.0.tgz", + "integrity": "sha512-FrvMpAK+hTbFy7vH5j1+tMYHMSKLE6RzluFJlkFNKD0p9YsUT75JlBSmr5so3QRzvMwU5/bIEdeNrxm8du8l3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.46.0", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@yr/monotone-cubic-spline": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@yr/monotone-cubic-spline/-/monotone-cubic-spline-1.0.3.tgz", + "integrity": "sha512-FQXkOta0XBSUPHndIKON2Y9JeQz5ZeMqLYZVVK93FliNBFm7LNMIZmY6FrMEB9XPcDbE2bekMbZD6kzDkxwYjA==", + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/apexcharts": { + "version": "5.3.5", + "resolved": "https://registry.npmjs.org/apexcharts/-/apexcharts-5.3.5.tgz", + "integrity": "sha512-I04DY/WBZbJgJD2uixeV5EzyiL+J5LgKQXEu8rctqAwyRmKv44aDVeofJoLdTJe3ao4r2KEQfCgtVzXn6pqirg==", + "license": "SEE LICENSE IN LICENSE", + "dependencies": { + "@svgdotjs/svg.draggable.js": "^3.0.4", + "@svgdotjs/svg.filter.js": "^3.0.8", + "@svgdotjs/svg.js": "^3.2.4", + "@svgdotjs/svg.resize.js": "^2.0.2", + "@svgdotjs/svg.select.js": "^4.0.1", + "@yr/monotone-cubic-spline": "^1.0.3" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/aria-query": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/chownr": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/devalue": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.3.2.tgz", + "integrity": "sha512-UDsjUbpQn9kvm68slnrs+mfxwFkIflOhkanmyabZ8zOYk8SMEIbJ3TK+88g70hSIeytu4y18f0z/hYHMTrXIWw==", + "dev": true, + "license": "MIT" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.18.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", + "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz", + "integrity": "sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.10", + "@esbuild/android-arm": "0.25.10", + "@esbuild/android-arm64": "0.25.10", + "@esbuild/android-x64": "0.25.10", + "@esbuild/darwin-arm64": "0.25.10", + "@esbuild/darwin-x64": "0.25.10", + "@esbuild/freebsd-arm64": "0.25.10", + "@esbuild/freebsd-x64": "0.25.10", + "@esbuild/linux-arm": "0.25.10", + "@esbuild/linux-arm64": "0.25.10", + "@esbuild/linux-ia32": "0.25.10", + "@esbuild/linux-loong64": "0.25.10", + "@esbuild/linux-mips64el": "0.25.10", + "@esbuild/linux-ppc64": "0.25.10", + "@esbuild/linux-riscv64": "0.25.10", + "@esbuild/linux-s390x": "0.25.10", + "@esbuild/linux-x64": "0.25.10", + "@esbuild/netbsd-arm64": "0.25.10", + "@esbuild/netbsd-x64": "0.25.10", + "@esbuild/openbsd-arm64": "0.25.10", + "@esbuild/openbsd-x64": "0.25.10", + "@esbuild/openharmony-arm64": "0.25.10", + "@esbuild/sunos-x64": "0.25.10", + "@esbuild/win32-arm64": "0.25.10", + "@esbuild/win32-ia32": "0.25.10", + "@esbuild/win32-x64": "0.25.10" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.37.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.37.0.tgz", + "integrity": "sha512-XyLmROnACWqSxiGYArdef1fItQd47weqB7iwtfr9JHwRrqIXZdcFMvvEcL9xHCmL0SNsOvF0c42lWyM1U5dgig==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.4.0", + "@eslint/core": "^0.16.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.37.0", + "@eslint/plugin-kit": "^0.4.0", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-prettier": { + "version": "10.1.8", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.8.tgz", + "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "funding": { + "url": "https://opencollective.com/eslint-config-prettier" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-svelte": { + "version": "3.12.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-3.12.4.tgz", + "integrity": "sha512-hD7wPe+vrPgx3U2X2b/wyTMtWobm660PygMGKrWWYTc9lvtY8DpNFDaU2CJQn1szLjGbn/aJ3g8WiXuKakrEkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.6.1", + "@jridgewell/sourcemap-codec": "^1.5.0", + "esutils": "^2.0.3", + "globals": "^16.0.0", + "known-css-properties": "^0.37.0", + "postcss": "^8.4.49", + "postcss-load-config": "^3.1.4", + "postcss-safe-parser": "^7.0.0", + "semver": "^7.6.3", + "svelte-eslint-parser": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "eslint": "^8.57.1 || ^9.0.0", + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esm-env": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "license": "MIT" + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrap": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.1.0.tgz", + "integrity": "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/flowbite": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/flowbite/-/flowbite-3.1.2.tgz", + "integrity": "sha512-MkwSgbbybCYgMC+go6Da5idEKUFfMqc/AmSjm/2ZbdmvoKf5frLPq/eIhXc9P+rC8t9boZtUXzHDgt5whZ6A/Q==", + "license": "MIT", + "dependencies": { + "@popperjs/core": "^2.9.3", + "flowbite-datepicker": "^1.3.1", + "mini-svg-data-uri": "^1.4.3", + "postcss": "^8.5.1" + } + }, + "node_modules/flowbite-datepicker": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/flowbite-datepicker/-/flowbite-datepicker-1.3.2.tgz", + "integrity": "sha512-6Nfm0MCVX3mpaR7YSCjmEO2GO8CDt6CX8ZpQnGdeu03WUCWtEPQ/uy0PUiNtIJjJZWnX0Cm3H55MOhbD1g+E/g==", + "license": "MIT", + "dependencies": { + "@rollup/plugin-node-resolve": "^15.2.3", + "flowbite": "^2.0.0" + } + }, + "node_modules/flowbite-datepicker/node_modules/@rollup/plugin-node-resolve": { + "version": "15.3.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.1.tgz", + "integrity": "sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==", + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/flowbite-datepicker/node_modules/flowbite": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/flowbite/-/flowbite-2.5.2.tgz", + "integrity": "sha512-kwFD3n8/YW4EG8GlY3Od9IoKND97kitO+/ejISHSqpn3vw2i5K/+ZI8Jm2V+KC4fGdnfi0XZ+TzYqQb4Q1LshA==", + "license": "MIT", + "dependencies": { + "@popperjs/core": "^2.9.3", + "flowbite-datepicker": "^1.3.0", + "mini-svg-data-uri": "^1.4.3" + } + }, + "node_modules/flowbite-svelte": { + "version": "1.17.4", + "resolved": "https://registry.npmjs.org/flowbite-svelte/-/flowbite-svelte-1.17.4.tgz", + "integrity": "sha512-z8e7wJZHgYN+vE6yQT/jH/U3+TkH57UdX52dBg7R2oLkhQFjgA5nAzPqeOPF46+3wZy5mJIFpZ9czxKeCQx0wg==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.4", + "@floating-ui/utils": "^0.2.10", + "apexcharts": "^5.3.5", + "clsx": "^2.1.1", + "date-fns": "^4.1.0", + "esm-env": "^1.2.2", + "flowbite": "^3.1.2", + "tailwind-merge": "^3.3.1", + "tailwind-variants": "^3.1.1" + }, + "peerDependencies": { + "svelte": "^5.29.0", + "tailwindcss": "^4.1.4" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "16.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.4.0.tgz", + "integrity": "sha512-ob/2LcVVaVGCYN+r14cnwnoDPUufjiYgSqRhiFD0Q1iI4Odora5RE8Iv1D24hAz5oMophRGkGz+yuvQmmUMnMw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", + "license": "MIT" + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-reference": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/known-css-properties": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.37.0.tgz", + "integrity": "sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lightningcss": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", + "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-darwin-arm64": "1.30.1", + "lightningcss-darwin-x64": "1.30.1", + "lightningcss-freebsd-x64": "1.30.1", + "lightningcss-linux-arm-gnueabihf": "1.30.1", + "lightningcss-linux-arm64-gnu": "1.30.1", + "lightningcss-linux-arm64-musl": "1.30.1", + "lightningcss-linux-x64-gnu": "1.30.1", + "lightningcss-linux-x64-musl": "1.30.1", + "lightningcss-win32-arm64-msvc": "1.30.1", + "lightningcss-win32-x64-msvc": "1.30.1" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", + "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", + "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", + "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", + "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", + "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", + "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", + "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", + "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", + "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", + "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mini-svg-data-uri": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", + "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==", + "license": "MIT", + "bin": { + "mini-svg-data-uri": "cli.js" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minizlib": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.1.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-load-config": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "dev": true, + "license": "MIT", + "dependencies": { + "lilconfig": "^2.0.5", + "yaml": "^1.10.2" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-load-config/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss-safe-parser": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-7.0.1.tgz", + "integrity": "sha512-0AioNCJZ2DPYz5ABT6bddIqlhgwhpHZ/l65YAYo0BCIn0xiDpsnTHz0gnoTGk0OXZW0JRs+cDwL8u/teRdz+8A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-safe-parser" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "postcss": "^8.4.31" + } + }, + "node_modules/postcss-scss": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/postcss-scss/-/postcss-scss-4.0.9.tgz", + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-scss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.4.29" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-svelte": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/prettier-plugin-svelte/-/prettier-plugin-svelte-3.4.0.tgz", + "integrity": "sha512-pn1ra/0mPObzqoIQn/vUTR3ZZI6UuZ0sHqMK5x2jMLGrs53h0sXhkVuDcrlssHwIMk7FYrMjHBPoUSyyEEDlBQ==", + "dev": true, + "license": "MIT", + "peer": true, + "peerDependencies": { + "prettier": "^3.0.0", + "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" + } + }, + "node_modules/prettier-plugin-tailwindcss": { + "version": "0.6.14", + "resolved": "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.6.14.tgz", + "integrity": "sha512-pi2e/+ZygeIqntN+vC573BcW5Cve8zUB0SSAGxqpB4f96boZF4M3phPVoOFCeypwkpRYdi7+jQ5YJJUwrkGUAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.21.3" + }, + "peerDependencies": { + "@ianvs/prettier-plugin-sort-imports": "*", + "@prettier/plugin-hermes": "*", + "@prettier/plugin-oxc": "*", + "@prettier/plugin-pug": "*", + "@shopify/prettier-plugin-liquid": "*", + "@trivago/prettier-plugin-sort-imports": "*", + "@zackad/prettier-plugin-twig": "*", + "prettier": "^3.0", + "prettier-plugin-astro": "*", + "prettier-plugin-css-order": "*", + "prettier-plugin-import-sort": "*", + "prettier-plugin-jsdoc": "*", + "prettier-plugin-marko": "*", + "prettier-plugin-multiline-arrays": "*", + "prettier-plugin-organize-attributes": "*", + "prettier-plugin-organize-imports": "*", + "prettier-plugin-sort-imports": "*", + "prettier-plugin-style-order": "*", + "prettier-plugin-svelte": "*" + }, + "peerDependenciesMeta": { + "@ianvs/prettier-plugin-sort-imports": { + "optional": true + }, + "@prettier/plugin-hermes": { + "optional": true + }, + "@prettier/plugin-oxc": { + "optional": true + }, + "@prettier/plugin-pug": { + "optional": true + }, + "@shopify/prettier-plugin-liquid": { + "optional": true + }, + "@trivago/prettier-plugin-sort-imports": { + "optional": true + }, + "@zackad/prettier-plugin-twig": { + "optional": true + }, + "prettier-plugin-astro": { + "optional": true + }, + "prettier-plugin-css-order": { + "optional": true + }, + "prettier-plugin-import-sort": { + "optional": true + }, + "prettier-plugin-jsdoc": { + "optional": true + }, + "prettier-plugin-marko": { + "optional": true + }, + "prettier-plugin-multiline-arrays": { + "optional": true + }, + "prettier-plugin-organize-attributes": { + "optional": true + }, + "prettier-plugin-organize-imports": { + "optional": true + }, + "prettier-plugin-sort-imports": { + "optional": true + }, + "prettier-plugin-style-order": { + "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + } + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.52.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.4.tgz", + "integrity": "sha512-CLEVl+MnPAiKh5pl4dEWSyMTpuflgNQiLGhMv8ezD5W/qP8AKvmYpCOKRRNOh7oRKnauBZ4SyeYkMS+1VSyKwQ==", + "devOptional": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.52.4", + "@rollup/rollup-android-arm64": "4.52.4", + "@rollup/rollup-darwin-arm64": "4.52.4", + "@rollup/rollup-darwin-x64": "4.52.4", + "@rollup/rollup-freebsd-arm64": "4.52.4", + "@rollup/rollup-freebsd-x64": "4.52.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.52.4", + "@rollup/rollup-linux-arm-musleabihf": "4.52.4", + "@rollup/rollup-linux-arm64-gnu": "4.52.4", + "@rollup/rollup-linux-arm64-musl": "4.52.4", + "@rollup/rollup-linux-loong64-gnu": "4.52.4", + "@rollup/rollup-linux-ppc64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-gnu": "4.52.4", + "@rollup/rollup-linux-riscv64-musl": "4.52.4", + "@rollup/rollup-linux-s390x-gnu": "4.52.4", + "@rollup/rollup-linux-x64-gnu": "4.52.4", + "@rollup/rollup-linux-x64-musl": "4.52.4", + "@rollup/rollup-openharmony-arm64": "4.52.4", + "@rollup/rollup-win32-arm64-msvc": "4.52.4", + "@rollup/rollup-win32-ia32-msvc": "4.52.4", + "@rollup/rollup-win32-x64-gnu": "4.52.4", + "@rollup/rollup-win32-x64-msvc": "4.52.4", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz", + "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sirv": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svelte": { + "version": "5.39.11", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.39.11.tgz", + "integrity": "sha512-8MxWVm2+3YwrFbPaxOlT1bbMi6OTenrAgks6soZfiaS8Fptk4EVyRIFhJc3RpO264EeSNwgjWAdki0ufg4zkGw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "acorn": "^8.12.1", + "aria-query": "^5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "esm-env": "^1.2.1", + "esrap": "^2.1.0", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/svelte-check": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.3.tgz", + "integrity": "sha512-RYP0bEwenDXzfv0P1sKAwjZSlaRyqBn0Fz1TVni58lqyEiqgwztTpmodJrGzP6ZT2aHl4MbTvWP6gbmQ3FOnBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "chokidar": "^4.0.1", + "fdir": "^6.2.0", + "picocolors": "^1.0.0", + "sade": "^1.7.4" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": ">=5.0.0" + } + }, + "node_modules/svelte-eslint-parser": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-1.3.3.tgz", + "integrity": "sha512-oTrDR8Z7Wnguut7QH3YKh7JR19xv1seB/bz4dxU5J/86eJtZOU4eh0/jZq4dy6tAlz/KROxnkRQspv5ZEt7t+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-scope": "^8.2.0", + "eslint-visitor-keys": "^4.0.0", + "espree": "^10.0.0", + "postcss": "^8.4.49", + "postcss-scss": "^4.0.9", + "postcss-selector-parser": "^7.0.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/svelte-eslint-parser/node_modules/postcss-selector-parser": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", + "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/svelte/node_modules/is-reference": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6" + } + }, + "node_modules/tailwind-merge": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.3.1.tgz", + "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==", + "license": "MIT", + "peer": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwind-variants": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/tailwind-variants/-/tailwind-variants-3.1.1.tgz", + "integrity": "sha512-ftLXe3krnqkMHsuBTEmaVUXYovXtPyTK7ckEfDRXS8PBZx0bAUas+A0jYxuKA5b8qg++wvQ3d2MQ7l/xeZxbZQ==", + "license": "MIT", + "engines": { + "node": ">=16.x", + "pnpm": ">=7.x" + }, + "peerDependencies": { + "tailwind-merge": ">=3.0.0", + "tailwindcss": "*" + }, + "peerDependenciesMeta": { + "tailwind-merge": { + "optional": true + } + } + }, + "node_modules/tailwindcss": { + "version": "4.1.14", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.14.tgz", + "integrity": "sha512-b7pCxjGO98LnxVkKjaZSDeNuljC4ueKUddjENJOADtubtdo8llTaJy7HwBMeLNSSo2N5QIAgklslK1+Ir8r6CA==", + "license": "MIT", + "peer": true + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tar": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.1.tgz", + "integrity": "sha512-nlGpxf+hv0v7GkWBK2V9spgactGOp0qvfWRxUMjqHyzrt3SgwE48DIv/FhqPHJYLHpgW1opq3nERbz5Anq7n1g==", + "dev": true, + "license": "ISC", + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "8.46.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.46.0.tgz", + "integrity": "sha512-6+ZrB6y2bT2DX3K+Qd9vn7OFOJR+xSLDj+Aw/N3zBwUt27uTw2sw2TE2+UcY1RiyBZkaGbTkVg9SSdPNUG6aUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.46.0", + "@typescript-eslint/parser": "8.46.0", + "@typescript-eslint/typescript-estree": "8.46.0", + "@typescript-eslint/utils": "8.46.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "7.1.9", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.9.tgz", + "integrity": "sha512-4nVGliEpxmhCL8DslSAUdxlB6+SMrhB0a1v5ijlh1xB1nEPuy1mxaHxysVucLHuWryAxLWg6a5ei+U4TLn/rFg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vitefu": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", + "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "dev": true, + "license": "MIT", + "workspaces": [ + "tests/deps/*", + "tests/projects/*", + "tests/projects/workspace/packages/*" + ], + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yallist": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zimmerframe": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.4.tgz", + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==", + "license": "MIT" + } + } +} diff --git a/auth-frontend/package.json b/auth-frontend/package.json new file mode 100644 index 0000000..0a74b5f --- /dev/null +++ b/auth-frontend/package.json @@ -0,0 +1,45 @@ +{ + "name": "nexus-5-auth-frontend", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "prepare": "svelte-kit sync || echo ''", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "format": "prettier --write .", + "lint": "prettier --check . && eslint ." + }, + "devDependencies": { + "@eslint/compat": "^1.4.0", + "@eslint/js": "^9.36.0", + "@sveltejs/adapter-node": "^5.3.2", + "@sveltejs/kit": "^2.43.2", + "@sveltejs/vite-plugin-svelte": "^6.2.0", + "@tailwindcss/forms": "^0.5.10", + "@tailwindcss/typography": "^0.5.18", + "@tailwindcss/vite": "^4.1.13", + "@types/node": "^22", + "eslint": "^9.36.0", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-svelte": "^3.12.4", + "globals": "^16.4.0", + "prettier": "^3.6.2", + "prettier-plugin-svelte": "^3.4.0", + "prettier-plugin-tailwindcss": "^0.6.14", + "svelte": "^5.39.5", + "svelte-check": "^4.3.2", + "tailwindcss": "^4.1.13", + "typescript": "^5.9.2", + "typescript-eslint": "^8.44.1", + "vite": "^7.1.7" + }, + "dependencies": { + "@ory/client": "^1.22.5", + "axios": "^1.12.2", + "flowbite-svelte": "^1.17.4" + } +} diff --git a/auth-frontend/src/app.css b/auth-frontend/src/app.css new file mode 100644 index 0000000..60856a0 --- /dev/null +++ b/auth-frontend/src/app.css @@ -0,0 +1,314 @@ +@import 'tailwindcss'; +@plugin '@tailwindcss/forms'; +@plugin '@tailwindcss/typography'; + +/* ============================================ + THEME COLOR SYSTEM + ============================================ + Primary: Blue + Secondary: Green + Primary Accent: Orange + Secondary Accent: Purple + Alert/Error: Red + Warning: Yellow + Success: Green (distinct from secondary) + ============================================ */ + +@theme { + /* Primary - Blue (muted/professional) */ + --color-primary-50: #f0f6fc; + --color-primary-100: #dbe8f7; + --color-primary-200: #bdd4f0; + --color-primary-300: #8fb8e5; + --color-primary-400: #5a94d6; + --color-primary-500: #3b78c4; + --color-primary-600: #2d5fa6; + --color-primary-700: #274d87; + --color-primary-800: #254270; + --color-primary-900: #23395e; + --color-primary-950: #18253f; + + /* Secondary - Green (muted/professional) */ + --color-secondary-50: #f2f8f4; + --color-secondary-100: #e0efe4; + --color-secondary-200: #c3dfcc; + --color-secondary-300: #96c7a6; + --color-secondary-400: #65a97b; + --color-secondary-500: #458c5e; + --color-secondary-600: #33714a; + --color-secondary-700: #2a5b3d; + --color-secondary-800: #244933; + --color-secondary-900: #1f3c2b; + --color-secondary-950: #102118; + + /* Accent Primary - Orange (muted/professional) */ + --color-accent-50: #fdf6f0; + --color-accent-100: #fbe9db; + --color-accent-200: #f6d0b6; + --color-accent-300: #f0b088; + --color-accent-400: #e88958; + --color-accent-500: #e16a36; + --color-accent-600: #d2522b; + --color-accent-700: #ae3f26; + --color-accent-800: #8b3425; + --color-accent-900: #712e22; + --color-accent-950: #3d1510; + + /* Accent Secondary - Purple (muted/professional) */ + --color-accent2-50: #f6f4fb; + --color-accent2-100: #ede9f7; + --color-accent2-200: #ddd5f0; + --color-accent2-300: #c5b6e4; + --color-accent2-400: #a78fd4; + --color-accent2-500: #8b6bc2; + --color-accent2-600: #7652ab; + --color-accent2-700: #634391; + --color-accent2-800: #533978; + --color-accent2-900: #463162; + --color-accent2-950: #2c1c42; + + /* Error/Alert - Red (muted/professional) */ + --color-error-50: #fdf3f3; + --color-error-100: #fce4e4; + --color-error-200: #fbcdcd; + --color-error-300: #f6a8a8; + --color-error-400: #ee7676; + --color-error-500: #e14a4a; + --color-error-600: #cd2d2d; + --color-error-700: #ac2323; + --color-error-800: #8e2121; + --color-error-900: #772222; + --color-error-950: #400d0d; + + /* Warning - Yellow (muted/professional) */ + --color-warning-50: #fdfaeb; + --color-warning-100: #faf2c9; + --color-warning-200: #f5e394; + --color-warning-300: #efd05b; + --color-warning-400: #e8bb30; + --color-warning-500: #d8a01d; + --color-warning-600: #ba7c16; + --color-warning-700: #955916; + --color-warning-800: #7b4619; + --color-warning-900: #693a1a; + --color-warning-950: #3d1e0a; + + /* Success - Green (distinct from secondary, muted) */ + --color-success-50: #f0fdf2; + --color-success-100: #dcfce2; + --color-success-200: #bbf7c6; + --color-success-300: #86ef9b; + --color-success-400: #4ade6a; + --color-success-500: #22c546; + --color-success-600: #16a336; + --color-success-700: #16802e; + --color-success-800: #176528; + --color-success-900: #155324; + --color-success-950: #052e10; + + /* Neutral/Surface colors for theming */ + --color-surface-50: #f8fafc; + --color-surface-100: #f1f5f9; + --color-surface-200: #e2e8f0; + --color-surface-300: #cbd5e1; + --color-surface-400: #94a3b8; + --color-surface-500: #64748b; + --color-surface-600: #475569; + --color-surface-700: #334155; + --color-surface-800: #1e293b; + --color-surface-900: #0f172a; + --color-surface-950: #020617; +} + +/* ============================================ + LIGHT THEME (default) + ============================================ */ +:root { + color-scheme: light; + + /* Background colors - subtle blue tint for softer appearance */ + --theme-bg: var(--color-primary-50); + --theme-bg-secondary: #e8f0f8; + --theme-bg-tertiary: var(--color-primary-100); + + /* Text colors */ + --theme-text: var(--color-surface-900); + --theme-text-secondary: var(--color-surface-600); + --theme-text-muted: var(--color-surface-400); + + /* Border colors */ + --theme-border: var(--color-surface-200); + --theme-border-hover: var(--color-surface-300); + + /* Interactive states */ + --theme-hover: var(--color-primary-100); + --theme-active: var(--color-primary-200); + + /* Shadows */ + --theme-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); + --theme-shadow-md: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --theme-shadow-lg: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + + /* Card/Panel backgrounds - subtle blue tint to match overall theme */ + --theme-card: #f5f8fc; + --theme-card-hover: #edf2f9; +} + +/* ============================================ + DARK THEME + ============================================ */ +.dark { + color-scheme: dark; + + /* Background colors */ + --theme-bg: var(--color-surface-900); + --theme-bg-secondary: var(--color-surface-800); + --theme-bg-tertiary: var(--color-surface-700); + + /* Text colors */ + --theme-text: var(--color-surface-50); + --theme-text-secondary: var(--color-surface-300); + --theme-text-muted: var(--color-surface-500); + + /* Border colors */ + --theme-border: var(--color-surface-700); + --theme-border-hover: var(--color-surface-600); + + /* Interactive states */ + --theme-hover: var(--color-surface-800); + --theme-active: var(--color-surface-700); + + /* Shadows (more subtle in dark mode) */ + --theme-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.3), 0 1px 2px -1px rgb(0 0 0 / 0.3); + --theme-shadow-md: 0 4px 6px -1px rgb(0 0 0 / 0.3), 0 2px 4px -2px rgb(0 0 0 / 0.3); + --theme-shadow-lg: 0 10px 15px -3px rgb(0 0 0 / 0.3), 0 4px 6px -4px rgb(0 0 0 / 0.3); + + /* Card/Panel backgrounds */ + --theme-card: var(--color-surface-800); + --theme-card-hover: var(--color-surface-700); +} + +/* ============================================ + BASE STYLES + ============================================ */ +html { + background-color: var(--theme-bg); + color: var(--theme-text); + transition: + background-color 0.2s ease, + color 0.2s ease; +} + +body { + background-color: var(--theme-bg); + min-height: 100vh; +} + +/* ============================================ + UTILITY CLASSES + ============================================ */ +@utility bg-theme { + background-color: var(--theme-bg); +} + +@utility bg-theme-secondary { + background-color: var(--theme-bg-secondary); +} + +@utility bg-theme-tertiary { + background-color: var(--theme-bg-tertiary); +} + +@utility bg-theme-card { + background-color: var(--theme-card); +} + +@utility text-theme { + color: var(--theme-text); +} + +@utility text-theme-secondary { + color: var(--theme-text-secondary); +} + +@utility text-theme-muted { + color: var(--theme-text-muted); +} + +@utility border-theme { + border-color: var(--theme-border); +} + +@utility border-theme-hover { + border-color: var(--theme-border-hover); +} + +@utility shadow-theme { + box-shadow: var(--theme-shadow); +} + +@utility shadow-theme-md { + box-shadow: var(--theme-shadow-md); +} + +@utility shadow-theme-lg { + box-shadow: var(--theme-shadow-lg); +} + +/* ============================================ + COMPONENT STYLES + ============================================ */ + +/* Cards */ +@utility card { + @apply rounded-lg border; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +@utility card-padded { + @apply rounded-lg border p-6; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +/* Buttons */ +@utility btn-primary { + @apply inline-block rounded-lg bg-primary-500 px-4 py-2 font-medium text-white transition-colors hover:bg-primary-600 active:bg-primary-700; +} + +@utility btn-danger { + @apply inline-block rounded-lg bg-error-600 px-4 py-2 text-sm font-medium text-white transition-colors hover:bg-error-700 active:bg-error-800; +} + +/* Form Input Borders (for checkboxes, radios) */ +@utility border-input { + @apply border-surface-300 dark:border-surface-600; +} + +/* Interactive hover/active - unifies hover states */ +@utility interactive { + @apply transition-colors hover:bg-black/5 active:bg-black/10 dark:hover:bg-white/10 dark:active:bg-white/15; +} + +/* Alert utilities */ +@utility alert-error { + @apply rounded-lg border border-error-400 bg-error-50 p-3 text-sm text-error-700 dark:border-error-600 dark:bg-error-900/20 dark:text-error-400; +} + +@utility alert-success { + @apply rounded-lg border border-success-400 bg-success-50 p-3 text-sm text-success-700 dark:border-success-600 dark:bg-success-900/20 dark:text-success-400; +} + +/* Form utilities */ +@utility input-base { + @apply w-full rounded-lg border border-theme bg-theme px-3 py-2 text-theme placeholder:text-theme-muted focus:border-primary-500 focus:ring-1 focus:ring-primary-500 disabled:cursor-not-allowed disabled:opacity-50; +} + +@utility textarea-base { + @apply w-full rounded-lg border border-theme bg-theme px-3 py-2 text-theme placeholder:text-theme-muted focus:border-primary-500 focus:ring-1 focus:ring-primary-500 disabled:cursor-not-allowed disabled:opacity-50; +} + +@utility form-label { + @apply mb-1.5 block text-sm font-medium text-theme; +} diff --git a/auth-frontend/src/app.d.ts b/auth-frontend/src/app.d.ts new file mode 100644 index 0000000..da08e6d --- /dev/null +++ b/auth-frontend/src/app.d.ts @@ -0,0 +1,13 @@ +// See https://svelte.dev/docs/kit/types#app.d.ts +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface PageState {} + // interface Platform {} + } +} + +export {}; diff --git a/auth-frontend/src/app.html b/auth-frontend/src/app.html new file mode 100644 index 0000000..d501564 --- /dev/null +++ b/auth-frontend/src/app.html @@ -0,0 +1,12 @@ + + + + + + NexAuth + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/auth-frontend/src/lib/assets/favicon.svg b/auth-frontend/src/lib/assets/favicon.svg new file mode 100644 index 0000000..cc5dc66 --- /dev/null +++ b/auth-frontend/src/lib/assets/favicon.svg @@ -0,0 +1 @@ +svelte-logo \ No newline at end of file diff --git a/auth-frontend/src/lib/components/FlowForm.svelte b/auth-frontend/src/lib/components/FlowForm.svelte new file mode 100644 index 0000000..2dd1c20 --- /dev/null +++ b/auth-frontend/src/lib/components/FlowForm.svelte @@ -0,0 +1,284 @@ + + +
+ {#if messages.length > 0} +
+ {#each messages as message} + {#if message.type === 'error'} +
+ {message.text} +
+ {:else if message.type === 'success'} +
+ {message.text} +
+ {:else} +
+ {message.text} +
+ {/if} + {/each} +
+ {/if} + +
+ + {#if hasPasswordField && usernameValue()} + + {/if} + {#each nodes as node (node.attributes)} + + {/each} + +
diff --git a/auth-frontend/src/lib/components/FormField.svelte b/auth-frontend/src/lib/components/FormField.svelte new file mode 100644 index 0000000..a9dd4b8 --- /dev/null +++ b/auth-frontend/src/lib/components/FormField.svelte @@ -0,0 +1,243 @@ + + +{#if shouldHideField()} + +{:else} +
+ {#if node.type === 'script' && scriptAttrs} + + {:else if node.type === 'img' && imageAttrs} + +
+ {#if label} +

{label}

+ {/if} + QR Code +
+ {:else if node.type === 'text' && textAttrs} + +
+

{textAttrs.text.text}

+
+ {:else if node.type === 'input' && inputAttrs} + + {#if inputAttrs.type === 'hidden'} + + {:else if inputAttrs.type === 'submit' || inputAttrs.type === 'button'} + + {:else if isProfileTypeSelect()} + + + + {:else if inputAttrs.type === 'checkbox'} +
+ + +
+ {:else} + + + + {/if} + {/if} + + {#if messages.length > 0} +
+ {#each messages as message} +

+ {message.text} +

+ {/each} +
+ {/if} +
+{/if} + + diff --git a/auth-frontend/src/lib/components/SettingsProfileForm.svelte b/auth-frontend/src/lib/components/SettingsProfileForm.svelte new file mode 100644 index 0000000..26e8c48 --- /dev/null +++ b/auth-frontend/src/lib/components/SettingsProfileForm.svelte @@ -0,0 +1,159 @@ + + +
+ + + + {#if error} +
+

{error}

+
+ {/if} + + {#if success} +
+

Profile updated successfully!

+
+ {/if} + + +
+ + +
+ + +
+ + +
+ + +
+ + +
+ + +
+ + +
+ + + +
diff --git a/auth-frontend/src/lib/components/modals/IdentityCreateModal.svelte b/auth-frontend/src/lib/components/modals/IdentityCreateModal.svelte new file mode 100644 index 0000000..5769bbb --- /dev/null +++ b/auth-frontend/src/lib/components/modals/IdentityCreateModal.svelte @@ -0,0 +1,118 @@ + + + + {#snippet header()} +

Create New Identity

+ {/snippet} + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+
+ + +
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+
+
+ + {#snippet footer()} + + + {/snippet} +
diff --git a/auth-frontend/src/lib/components/modals/IdentityDetailsModal.svelte b/auth-frontend/src/lib/components/modals/IdentityDetailsModal.svelte new file mode 100644 index 0000000..b0ad93d --- /dev/null +++ b/auth-frontend/src/lib/components/modals/IdentityDetailsModal.svelte @@ -0,0 +1,116 @@ + + + + {#snippet header()} +

Identity Details

+ {/snippet} + +
+ +
+
+ Email +

{identity?.traits?.email || 'N/A'}

+
+
+ Name +

+ {#if identity?.traits?.name} + {identity.traits.name.first || ''} {identity.traits.name.last || ''} + {:else} + N/A + {/if} +

+
+
+ State +

{identity?.state || 'N/A'}

+
+
+ Created +

+ {identity?.created_at ? new Date(identity.created_at).toLocaleString() : 'N/A'} +

+
+
+ + + {#if identity?.credentials} +
+

Authentication Methods

+
+ {#each Object.entries(identity.credentials) as [type, credential]} +
+
+
+ {type} + {#if credential.identifiers && credential.identifiers.length > 0} +
+ {#each credential.identifiers as identifier} + {identifier} + {/each} +
+ {/if} + {#if credential.created_at} +

+ Added {new Date(credential.created_at).toLocaleDateString()} +

+ {/if} +
+ {#if type !== 'password' && type !== 'code'} + + {/if} +
+
+ {/each} +
+
+ {/if} + + +
+ + View Raw JSON + +
+
{JSON.stringify(identity, null, 2)}
+
+
+
+ + {#snippet footer()} + + {/snippet} +
diff --git a/auth-frontend/src/lib/components/modals/IdentityEditModal.svelte b/auth-frontend/src/lib/components/modals/IdentityEditModal.svelte new file mode 100644 index 0000000..0c87241 --- /dev/null +++ b/auth-frontend/src/lib/components/modals/IdentityEditModal.svelte @@ -0,0 +1,183 @@ + + + + {#snippet header()} +

Edit Identity

+ {/snippet} + +
+ {#if error} +
+

{error}

+
+ {/if} + + {#if identity} +
+ +
+ + +
+ + +
+
+ + +
+
+ + +
+
+ + +
+ + +

Optional

+
+ + +
+ + +

Determines account type and permissions

+
+ + +
+ + +
+ + +
+

System Metadata (Public)

+

+ Read-only for users, editable by admin. Links to Django backend. +

+ +
+
+ + +
+
+
+
+ {/if} +
+ + {#snippet footer()} + + + {/snippet} +
diff --git a/auth-frontend/src/lib/components/modals/IdentitySessionsModal.svelte b/auth-frontend/src/lib/components/modals/IdentitySessionsModal.svelte new file mode 100644 index 0000000..4f77825 --- /dev/null +++ b/auth-frontend/src/lib/components/modals/IdentitySessionsModal.svelte @@ -0,0 +1,122 @@ + + + + {#snippet header()} +
+

+ Sessions for {data?.identity.traits?.email || 'User'} +

+

+ {data?.sessions.length || 0} active + {data?.sessions.length === 1 ? 'session' : 'sessions'} +

+
+ {/snippet} + +
+ {#if data && data.sessions.length === 0} +

No active sessions for this user.

+ {:else if data} +
+ + + + + + + + + + + {#each data.sessions as session (session.id)} + + + + + + + {/each} + +
+ Session ID + + Issued At + + Expires At + + Actions +
+ {session.id.substring(0, 12)}... + + {new Date(session.issued_at || '').toLocaleString()} + + {new Date(session.expires_at || '').toLocaleString()} + + + + +
+
+ {/if} +
+ + {#snippet footer()} + + {/snippet} +
diff --git a/auth-frontend/src/lib/components/modals/MessageDetailsModal.svelte b/auth-frontend/src/lib/components/modals/MessageDetailsModal.svelte new file mode 100644 index 0000000..9b4d0d8 --- /dev/null +++ b/auth-frontend/src/lib/components/modals/MessageDetailsModal.svelte @@ -0,0 +1,40 @@ + + + + {#snippet header()} +

Message Details

+ {/snippet} + +
+
{JSON.stringify(message, null, 2)}
+
+ + {#snippet footer()} + + {/snippet} +
diff --git a/auth-frontend/src/lib/components/modals/SessionDetailsModal.svelte b/auth-frontend/src/lib/components/modals/SessionDetailsModal.svelte new file mode 100644 index 0000000..6117d21 --- /dev/null +++ b/auth-frontend/src/lib/components/modals/SessionDetailsModal.svelte @@ -0,0 +1,40 @@ + + + + {#snippet header()} +

Session Details

+ {/snippet} + +
+
{JSON.stringify(session, null, 2)}
+
+ + {#snippet footer()} + + {/snippet} +
diff --git a/auth-frontend/src/lib/flows.ts b/auth-frontend/src/lib/flows.ts new file mode 100644 index 0000000..48b8e73 --- /dev/null +++ b/auth-frontend/src/lib/flows.ts @@ -0,0 +1,96 @@ +import { kratosServerClient } from './kratos-server'; +import type { + LoginFlow, + RegistrationFlow, + RecoveryFlow, + VerificationFlow, + SettingsFlow +} from '@ory/client'; + +// Utility to (a) get flow if id present, (b) create the new browser flow if not, and +// (c) transparently re-init on stale/expired/forbidden flows similar to Ory Kratos UI +async function getOrCreateFlow< + T extends LoginFlow | RegistrationFlow | RecoveryFlow | VerificationFlow | SettingsFlow +>(params: { + flowId: string | null; + create: () => Promise; + get: () => Promise; + redirectBasePath: string; // e.g. '/login' + searchParams?: URLSearchParams; + excludeParams?: string[]; // Additional params to exclude from redirect (e.g., 'code' for verification/recovery) +}): Promise<{ flow: T; redirectTo?: string }> { + const { flowId, create, get, redirectBasePath, searchParams, excludeParams = [] } = params; + const buildRedirect = (flow: T) => { + const sp = new URLSearchParams(); + // Parameters to exclude from redirect URL + const excluded = new Set(['flow', ...excludeParams]); + // Only copy non-excluded search params + if (searchParams) { + for (const [key, value] of searchParams.entries()) { + if (!excluded.has(key)) { + sp.set(key, value); + } + } + } + sp.set('flow', flow.id); + return `${redirectBasePath}?${sp.toString()}`; + }; + + try { + if (flowId) { + const flow = await get(); + return { flow }; + } + + const flow = await create(); + return { flow, redirectTo: buildRedirect(flow) }; + } catch (e: any) { + // Handle common Kratos flow errors by re-initializing the flow + // 410 (Gone) - flow expired; 403 (Forbidden) - CSRF or not allowed; 400 (Bad Request) - invalid id + if ([410, 403, 400].includes(e?.status || e?.response?.status)) { + try { + const flow = await create(); + return { flow, redirectTo: buildRedirect(flow) }; + } catch (ee: any) { + throw ee; + } + } + throw e; + } +} + +export async function loadRecoveryFlow(flowId: string | null, searchParams?: URLSearchParams) { + return getOrCreateFlow({ + flowId, + create: async () => (await kratosServerClient.createBrowserRecoveryFlow()).data, + get: async () => (await kratosServerClient.getRecoveryFlow({ id: flowId! })).data, + redirectBasePath: '/recovery', + searchParams, + excludeParams: ['code'] // Don't preserve recovery codes across flow recreations + }); +} + +export async function loadVerificationFlow(flowId: string | null, searchParams?: URLSearchParams) { + return getOrCreateFlow({ + flowId, + create: async () => (await kratosServerClient.createBrowserVerificationFlow()).data, + get: async () => (await kratosServerClient.getVerificationFlow({ id: flowId! })).data, + redirectBasePath: '/verification', + searchParams, + excludeParams: ['code'] // Don't preserve verification codes across flow recreations + }); +} + +export async function loadSettingsFlow( + flowId: string | null, + cookie: string, + searchParams?: URLSearchParams +) { + return getOrCreateFlow({ + flowId, + create: async () => (await kratosServerClient.createBrowserSettingsFlow({ cookie })).data, + get: async () => (await kratosServerClient.getSettingsFlow({ id: flowId!, cookie })).data, + redirectBasePath: '/settings', + searchParams + }); +} diff --git a/auth-frontend/src/lib/kratos-server.ts b/auth-frontend/src/lib/kratos-server.ts new file mode 100644 index 0000000..c1ec3e6 --- /dev/null +++ b/auth-frontend/src/lib/kratos-server.ts @@ -0,0 +1,11 @@ +import { Configuration, FrontendApi } from '@ory/client'; +import { PUBLIC_KRATOS_URL } from '$env/static/public'; +import { KRATOS_SERVER_URL } from '$env/static/private'; + +// Server-side client (without browser-specific settings) +// Used only for session validation in server-side loaders +export const kratosServerClient = new FrontendApi( + new Configuration({ + basePath: KRATOS_SERVER_URL || PUBLIC_KRATOS_URL || 'http://localhost:7200' + }) +); diff --git a/auth-frontend/src/lib/kratos.ts b/auth-frontend/src/lib/kratos.ts new file mode 100644 index 0000000..c8558d6 --- /dev/null +++ b/auth-frontend/src/lib/kratos.ts @@ -0,0 +1,24 @@ +import { Configuration, FrontendApi, IdentityApi } from '@ory/client'; +import { PUBLIC_KRATOS_URL } from '$env/static/public'; + +// Browser-side client (with credentials) +// All browser calls (including admin API) go through Oathkeeper at PUBLIC_KRATOS_URL +export const kratosClient = new FrontendApi( + new Configuration({ + basePath: PUBLIC_KRATOS_URL || 'http://localhost:7200', + baseOptions: { + withCredentials: true + } + }) +); + +// Admin client for administrative operations (with credentials) +// Uses the same proxy but with IdentityApi for admin operations +export const kratosAdminClient = new IdentityApi( + new Configuration({ + basePath: PUBLIC_KRATOS_URL || 'http://localhost:7200', + baseOptions: { + withCredentials: true + } + }) +); diff --git a/auth-frontend/src/lib/stores/theme.svelte.ts b/auth-frontend/src/lib/stores/theme.svelte.ts new file mode 100644 index 0000000..b631a7a --- /dev/null +++ b/auth-frontend/src/lib/stores/theme.svelte.ts @@ -0,0 +1,79 @@ +import { browser } from '$app/environment'; + +type Theme = 'light' | 'dark' | 'system'; + +const STORAGE_KEY = 'theme-preference'; + +function getSystemTheme(): 'light' | 'dark' { + if (!browser) return 'light'; + return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'; +} + +function getStoredTheme(): Theme { + if (!browser) return 'system'; + const stored = localStorage.getItem(STORAGE_KEY); + if (stored === 'light' || stored === 'dark' || stored === 'system') { + return stored; + } + return 'system'; +} + +function createThemeStore() { + let preference = $state(getStoredTheme()); + let resolved = $derived<'light' | 'dark'>( + preference === 'system' ? getSystemTheme() : preference + ); + + function applyTheme(theme: 'light' | 'dark') { + if (!browser) return; + document.documentElement.classList.remove('light', 'dark'); + document.documentElement.classList.add(theme); + } + + function setTheme(theme: Theme) { + preference = theme; + + if (browser) { + localStorage.setItem(STORAGE_KEY, theme); + applyTheme(resolved); + } + } + + function toggle() { + const newTheme = resolved === 'light' ? 'dark' : 'light'; + setTheme(newTheme); + } + + function init() { + if (!browser) return; + + applyTheme(resolved); + + const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); + const handleChange = (e: MediaQueryListEvent) => { + if (preference === 'system') { + resolved = e.matches ? 'dark' : 'light'; + applyTheme(resolved); + } + }; + + mediaQuery.addEventListener('change', handleChange); + } + + return { + get preference() { + return preference; + }, + get resolved() { + return resolved; + }, + get isDark() { + return resolved === 'dark'; + }, + setTheme, + toggle, + init + }; +} + +export const theme = createThemeStore(); diff --git a/auth-frontend/src/lib/utils.ts b/auth-frontend/src/lib/utils.ts new file mode 100644 index 0000000..2afef26 --- /dev/null +++ b/auth-frontend/src/lib/utils.ts @@ -0,0 +1,52 @@ +import type { UiNode, UiNodeInputAttributes } from '@ory/client'; + +export function getNodeLabel(node: UiNode): string { + const attrs = node.attributes as UiNodeInputAttributes; + if (node.meta.label?.text) { + return node.meta.label.text; + } + return attrs.name || ''; +} + +export function filterNodesByGroups(nodes: UiNode[], ...groups: string[]): UiNode[] { + return nodes.filter((node) => groups.includes(node.group)); +} + +/** + * Helper function to set nested property in an object using dot notation + * Example: setNestedProperty(obj, 'traits.name.first', 'John') + */ +export function setNestedProperty(obj: any, path: string, value: any): void { + const keys = path.split('.'); + let current = obj; + + for (let i = 0; i < keys.length - 1; i++) { + const key = keys[i]; + if (!(key in current)) { + current[key] = {}; + } + current = current[key]; + } + + current[keys[keys.length - 1]] = value; +} + +/** + * Convert FormData to JSON object for Ory SDK submission + * This handles nested properties (like traits.email, traits.name.first) + * and includes the csrf_token which must be present in JSON requests + */ +export function formDataToJson(formData: FormData): Record { + const json: Record = {}; + + for (const [key, value] of formData.entries()) { + // Handle nested object notation (e.g., traits.email, traits.name.first) + if (key.includes('.')) { + setNestedProperty(json, key, value); + } else { + json[key] = value; + } + } + + return json; +} diff --git a/auth-frontend/src/routes/+layout.server.ts b/auth-frontend/src/routes/+layout.server.ts new file mode 100644 index 0000000..2e69348 --- /dev/null +++ b/auth-frontend/src/routes/+layout.server.ts @@ -0,0 +1,23 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import { ADMIN_USER_ID } from '$env/static/private'; +import type { LayoutServerLoad } from './$types'; + +export const load: LayoutServerLoad = async ({ cookies }) => { + try { + const sessionToken = cookies.get('ory_kratos_session'); + + if (!sessionToken) { + return { session: null, isAdmin: false }; + } + + const { data: session } = await kratosServerClient.toSession({ + cookie: `ory_kratos_session=${sessionToken}` + }); + + const isAdmin = session?.identity?.id === ADMIN_USER_ID; + + return { session, isAdmin }; + } catch { + return { session: null, isAdmin: false }; + } +}; diff --git a/auth-frontend/src/routes/+layout.svelte b/auth-frontend/src/routes/+layout.svelte new file mode 100644 index 0000000..a072944 --- /dev/null +++ b/auth-frontend/src/routes/+layout.svelte @@ -0,0 +1,187 @@ + + +
+ + + + {#if menuOpen} + + + + +
+
+ +
+ + Home + + {#if data.session} + + Settings + + {#if data.isAdmin} + + Admin + + {/if} + {/if} +
+
+
+ {/if} + +
+ {@render children()} +
+
diff --git a/auth-frontend/src/routes/+page.svelte b/auth-frontend/src/routes/+page.svelte new file mode 100644 index 0000000..5fd46a2 --- /dev/null +++ b/auth-frontend/src/routes/+page.svelte @@ -0,0 +1,135 @@ + + + + Home - Example App + + +{#if session} +
+
+

Welcome back!

+ +
+
+

Account Information

+
+
+
Email
+
{traits?.email || 'Not set'}
+
+ {#if traits?.name?.first || traits?.name?.last} +
+
Name
+
+ {traits.name.first} + {traits.name.last} +
+
+ {/if} +
+
User ID
+
{identity?.id}
+
+
+
Session Active
+
+ {session.active ? 'Yes' : 'No'} +
+
+ {#if session.expires_at} +
+
Session Expires
+
+ {new Date(session.expires_at).toLocaleString()} +
+
+ {/if} +
+
Authentication Level
+
+ {session.authenticator_assurance_level === 'aal2' + ? 'Two-Factor' + : 'Single-Factor'} +
+
+
+
+ + {#if session.authentication_methods && session.authentication_methods.length > 0} +
+

Authentication Methods

+
    + {#each session.authentication_methods as method} +
  • + + {method.method} (completed at {new Date( + method.completed_at || '' + ).toLocaleString()}) +
  • + {/each} +
+
+ {/if} + + +
+
+
+{:else} +
+

+ Welcome to Example App +

+

+ A secure authentication platform built with Ory Kratos and SvelteKit +

+ + +
+
+

Secure Authentication

+

+ Powered by Ory Kratos with support for passwords, 2FA, and passwordless login +

+
+
+

Account Recovery

+

+ Easy password recovery and email verification flows to keep your account secure +

+
+
+

Self-Service Settings

+

+ Manage your profile, change passwords, and configure two-factor authentication +

+
+
+
+{/if} diff --git a/auth-frontend/src/routes/admin/+page.server.ts b/auth-frontend/src/routes/admin/+page.server.ts new file mode 100644 index 0000000..c5a0cd0 --- /dev/null +++ b/auth-frontend/src/routes/admin/+page.server.ts @@ -0,0 +1,31 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import { redirect } from '@sveltejs/kit'; +import { ADMIN_USER_ID } from '$env/static/private'; +import type { PageServerLoad } from './$types'; + +export const load: PageServerLoad = async ({ cookies }) => { + const sessionToken = cookies.get('ory_kratos_session'); + + if (!sessionToken) { + redirect(303, '/login?return_to=/admin'); + } + + try { + const { data: session } = await kratosServerClient.toSession({ + cookie: `ory_kratos_session=${sessionToken}` + }); + + // Check if the user is the admin + if (session.identity?.id !== ADMIN_USER_ID) { + redirect(303, '/?error=unauthorized'); + } + + return { + session, + isAdmin: true + }; + } catch { + // If session validation fails, redirect to login + redirect(303, '/login?return_to=/admin'); + } +}; diff --git a/auth-frontend/src/routes/admin/+page.svelte b/auth-frontend/src/routes/admin/+page.svelte new file mode 100644 index 0000000..d7d37f1 --- /dev/null +++ b/auth-frontend/src/routes/admin/+page.svelte @@ -0,0 +1,1444 @@ + + + + Admin Dashboard - Example App + + +
+
+
+

Admin Dashboard

+

+ Logged in as: {data.session?.identity?.traits?.email || 'Unknown'} +

+
+ + +
+ +
+ + + {#if activeTab === 'identities'} +
+
+
+

User Identities

+
+ + + +
+
+
+ +
+
+ +
+ {#if identityLoading} +

Loading identities...

+ {:else if identityError} +
+

{identityError}

+
+ {:else if filteredIdentities.length === 0} +

No identities found.

+ {:else} +
+ + + + + + + + + + + + + {#each filteredIdentities as identity (identity.id)} + + + + + + + + + {/each} + +
+ Email + + Name + + Verified + + Created + + State + + Actions +
+ {identity.traits?.email || 'N/A'} + + {#if identity.traits?.name} + {identity.traits.name.first || ''} {identity.traits.name.last || ''} + {:else} + N/A + {/if} + + {#if hasUnverifiedEmail(identity)} + + Unverified + + {:else} + + Verified + + {/if} + + {new Date(identity.created_at || '').toLocaleDateString()} + + + {identity.state || 'unknown'} + + + + + {#if hasUnverifiedEmail(identity)} + + {/if} + + + +
+
+ + + {#if browser && !identitySearch} +
+
+ Page {identityPage + 1} ({identities.length} identities) +
+
+ + +
+
+ {/if} + {/if} +
+
+ {/if} + + + {#if activeTab === 'sessions'} +
+
+
+

Active Sessions

+ +
+
+ +
+ {#if sessionLoading} +

Loading sessions...

+ {:else if sessionError} +
+

{sessionError}

+
+ {:else if sessions.length === 0} +

No active sessions found.

+ {:else} +
+ + + + + + + + + + + + {#each sessions as session (session.id)} + + + + + + + + {/each} + +
+ User Email + + Session ID + + Issued At + + Expires At + + Actions +
+ {session.identity?.traits?.email || 'N/A'} + + {session.id.substring(0, 8)}... + + {new Date(session.issued_at || '').toLocaleString()} + + {new Date(session.expires_at || '').toLocaleString()} + + + + +
+
+ + + {#if browser} +
+
+ Page {sessionPage + 1} ({sessions.length} sessions) +
+
+ + +
+
+ {/if} + {/if} +
+
+ {/if} + + + {#if activeTab === 'recovery'} +
+
+

Account Recovery

+

Generate recovery links or codes for users

+
+ +
+
+
+ + +
+ +
+ + +
+ + + + {#if recoveryError} +
+

{recoveryError}

+
+ {/if} + + {#if recoveryResult} +
+

+ Recovery generated successfully! +

+
+
{JSON.stringify(
+											recoveryResult,
+											null,
+											2
+										)}
+
+ +
+ {/if} +
+
+
+ {/if} + + + {#if activeTab === 'courier'} +
+
+
+

Courier Messages

+ +
+
+ + +
+
+ +
+ {#if courierLoading} +

Loading messages...

+ {:else if courierError} +
+

{courierError}

+
+ {:else if courierMessages.length === 0} +

No messages found.

+ {:else} +
+ + + + + + + + + + + + + {#each courierMessages as message (message.id)} + + + + + + + + + {/each} + +
+ Recipient + + Subject + + Type + + Status + + Created + + Actions +
+ {message.recipient || 'N/A'} + + {message.subject || 'N/A'} + + {message.template_type || message.channel || 'N/A'} + + + {message.status || 'unknown'} + + + {message.created_at ? new Date(message.created_at).toLocaleString() : 'N/A'} + + +
+
+ + + {#if browser} +
+
+ {courierMessages.length} messages + {#if courierPageToken} + + {/if} +
+ +
+ {/if} + {/if} +
+
+ {/if} + + + {#if activeTab === 'batch'} +
+
+

Batch Identity Operations

+

+ Create multiple identities at once by providing JSON data +

+
+ +
+
+
+ +

+ Provide an array of identity objects. Each identity should have schema_id and + traits. +

+ +
+ + + + {#if batchError} +
+

{batchError}

+
+ {/if} + + {#if batchResult} +
+

+ Batch operation completed successfully! +

+
+
{JSON.stringify(
+											batchResult,
+											null,
+											2
+										)}
+
+ +
+ {/if} + +
+

Example JSON Format

+
{`[
+  {
+    "schema_id": "default",
+    "traits": {
+      "email": "user@example.com",
+      "name": {
+        "first": "First",
+        "last": "Last"
+      }
+    }
+  }
+]`}
+
+
+
+
+ {/if} +
+
+ + + + + + + + + + + { + selectedSession = session; + identitySessionsModal = null; + }} + onExtendSession={extendSession} + onDeleteSession={deleteSession} +/> + + diff --git a/auth-frontend/src/routes/error/+page.server.ts b/auth-frontend/src/routes/error/+page.server.ts new file mode 100644 index 0000000..6e2887f --- /dev/null +++ b/auth-frontend/src/routes/error/+page.server.ts @@ -0,0 +1,22 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import type { PageServerLoad } from './$types'; + +export const load: PageServerLoad = async ({ url }) => { + const flowId = url.searchParams.get('id'); + + if (!flowId) { + return { + errorMessage: 'An error occurred. Please try again.' + }; + } + + try { + const { data: flow } = await kratosServerClient.getFlowError({ id: flowId }); + return { flow }; + } catch (error) { + console.error('Error flow error:', error); + return { + errorMessage: 'An error occurred. Please try again.' + }; + } +}; diff --git a/auth-frontend/src/routes/error/+page.svelte b/auth-frontend/src/routes/error/+page.svelte new file mode 100644 index 0000000..91be58c --- /dev/null +++ b/auth-frontend/src/routes/error/+page.svelte @@ -0,0 +1,49 @@ + + + + Error - Example App + + +
+
+
+
+
+ +
+
+

Error

+
+

{errorMessage}

+ {#if errorDetails?.reason} +

{errorDetails.reason}

+ {/if} +
+ +
+
+
+
+
diff --git a/auth-frontend/src/routes/login/+page.svelte b/auth-frontend/src/routes/login/+page.svelte new file mode 100644 index 0000000..d95b742 --- /dev/null +++ b/auth-frontend/src/routes/login/+page.svelte @@ -0,0 +1,77 @@ + + + + Login - Example App + + +
+
+

+ Sign in to your account +

+
+ +
+
+ {#if loading} +

Loading...

+ {:else if error} +
+

{error}

+
+

+ Try again +

+ {:else if flow} + + {:else} +
+

Failed to load login form

+
+ {/if} +
+ +

+ Don't have an account? + + Register here + +

+ +

+ + Forgot your password? + +

+
+
diff --git a/auth-frontend/src/routes/logout/+server.ts b/auth-frontend/src/routes/logout/+server.ts new file mode 100644 index 0000000..4ab5267 --- /dev/null +++ b/auth-frontend/src/routes/logout/+server.ts @@ -0,0 +1,56 @@ +import { redirect } from '@sveltejs/kit'; +import type { RequestHandler } from './$types'; +import { PUBLIC_KRATOS_URL } from '$env/static/public'; + +export const POST: RequestHandler = async ({ cookies, fetch, request }) => { + const formData = await request.formData(); + const returnTo = formData.get('return_to')?.toString() || 'https://account.example.com'; + + try { + const sessionToken = cookies.get('ory_kratos_session'); + + if (sessionToken) { + // Create a logout flow through Oathkeeper with return_to parameter + const logoutUrl = new URL(`${PUBLIC_KRATOS_URL}/self-service/logout/browser`); + logoutUrl.searchParams.set('return_to', returnTo); + + const response = await fetch(logoutUrl.toString(), { + method: 'GET', + headers: { + cookie: `ory_kratos_session=${sessionToken}` + }, + redirect: 'manual' + }); + + // Get the logout token from response + if (response.status === 200) { + const data = await response.json(); + if (data.logout_url) { + // Execute logout - this will redirect to return_to after logout + await fetch(data.logout_url, { + method: 'GET', + headers: { + cookie: `ory_kratos_session=${sessionToken}` + } + }); + } + } + } + + // Clear cookie on the server side with matching attributes + cookies.delete('ory_kratos_session', { + path: '/', + domain: '.example.com' + }); + } catch (error) { + console.error('Logout error:', error); + // Continue to redirect even if logout fails + } + + // Redirect to the return_to URL or default to login page + if (returnTo && returnTo !== 'https://account.example.com') { + throw redirect(303, returnTo); + } + + throw redirect(303, '/login'); +}; diff --git a/auth-frontend/src/routes/recovery/+page.server.ts b/auth-frontend/src/routes/recovery/+page.server.ts new file mode 100644 index 0000000..542af1b --- /dev/null +++ b/auth-frontend/src/routes/recovery/+page.server.ts @@ -0,0 +1,76 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import { redirect } from '@sveltejs/kit'; +import { PUBLIC_KRATOS_URL } from '$env/static/public'; +import type { PageServerLoad } from './$types'; + +export const load: PageServerLoad = async ({ url, request }) => { + const flowId = url.searchParams.get('flow'); + const code = url.searchParams.get('code'); + + // If no flow ID, redirect to Kratos to create the flow (with proper CSRF cookie handling) + if (!flowId) { + throw redirect(303, `${PUBLIC_KRATOS_URL}/self-service/recovery/browser`); + } + + // Load the existing flow + const cookie = request.headers.get('cookie') || undefined; + let flow; + + try { + const result = await kratosServerClient.getRecoveryFlow({ id: flowId, cookie }); + flow = result.data; + } catch (error: any) { + // If flow is expired/invalid, redirect to create a new one + if ([410, 403, 400].includes(error?.status || error?.response?.status)) { + throw redirect(303, `${PUBLIC_KRATOS_URL}/self-service/recovery/browser`); + } + throw error; + } + + // If we have a valid flow and code, auto-submit the recovery code + if (flow && code) { + try { + const result = await kratosServerClient.updateRecoveryFlow({ + flow: flow.id, + updateRecoveryFlowBody: { + method: 'code', + code: code + } + }); + + // Recovery code submitted successfully + // Check if Kratos wants us to redirect somewhere (usually to settings to set new password) + if (result.data && (result.data as any).redirect_browser_to) { + throw redirect(303, (result.data as any).redirect_browser_to); + } + + // Otherwise update the flow with the result + flow = result.data; + } catch (error: any) { + // Re-throw if this is a redirect (SvelteKit internal) + if (error?.status === 303 || error?.location) { + throw error; + } + + console.error('Auto recovery failed:', error); + + // If Kratos returned an updated flow with error messages, use it + if (error.response?.data?.ui) { + flow = error.response.data; + } else { + // Add a user-friendly error message to the flow UI + if (!flow.ui.messages) { + flow.ui.messages = []; + } + flow.ui.messages.push({ + id: 4060001, + text: 'The recovery code has expired or is invalid. Please request a new recovery email by entering your email address below.', + type: 'error', + context: {} + }); + } + } + } + + return { flow }; +}; diff --git a/auth-frontend/src/routes/recovery/+page.svelte b/auth-frontend/src/routes/recovery/+page.svelte new file mode 100644 index 0000000..f9666bb --- /dev/null +++ b/auth-frontend/src/routes/recovery/+page.svelte @@ -0,0 +1,49 @@ + + + + Password Recovery - Example App + + +
+
+

+ {heading} +

+

+ {description} +

+
+ +
+
+ +
+ +

+ Remember your password? + + Sign in here + +

+
+
diff --git a/auth-frontend/src/routes/registration/+page.svelte b/auth-frontend/src/routes/registration/+page.svelte new file mode 100644 index 0000000..eea26f6 --- /dev/null +++ b/auth-frontend/src/routes/registration/+page.svelte @@ -0,0 +1,71 @@ + + + + Register - Example App + + +
+
+

+ Create your account +

+
+ +
+
+ {#if loading} +

Loading...

+ {:else if error} +
+

{error}

+
+

+ Try again +

+ {:else if flow} + + {:else} +
+

Failed to load registration form

+
+ {/if} +
+ +

+ Already have an account? + + Sign in here + +

+
+
diff --git a/auth-frontend/src/routes/settings/+page.server.ts b/auth-frontend/src/routes/settings/+page.server.ts new file mode 100644 index 0000000..b9e3932 --- /dev/null +++ b/auth-frontend/src/routes/settings/+page.server.ts @@ -0,0 +1,21 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import { redirect } from '@sveltejs/kit'; +import type { PageServerLoad } from './$types'; + +// Only validate session on the server. Do NOT create/fetch the settings flow here +// so that Kratos' Set-Cookie (csrf) reaches the browser directly when the flow +// is initialized client-side. +export const load: PageServerLoad = async ({ cookies }) => { + const sessionToken = cookies.get('ory_kratos_session'); + if (!sessionToken) { + throw redirect(303, '/login'); + } + + const sessionCookie = `ory_kratos_session=${sessionToken}`; + try { + await kratosServerClient.toSession({ cookie: sessionCookie }); + } catch { + throw redirect(303, '/login'); + } + return {}; +}; diff --git a/auth-frontend/src/routes/settings/+page.svelte b/auth-frontend/src/routes/settings/+page.svelte new file mode 100644 index 0000000..3535f99 --- /dev/null +++ b/auth-frontend/src/routes/settings/+page.svelte @@ -0,0 +1,82 @@ + + + + Settings - Example App + + +
+

Account Settings

+ + {#if isUpdated} +
+

Your settings have been updated successfully!

+
+ {/if} + + {#if isLoading} +
+

Loading settings…

+
+ {:else if flow} +
+

Profile Settings

+

Update your personal information

+ +
+ +
+

Password

+ +
+ +
+

Authenticator App (TOTP)

+

+ Use an authenticator app like Google Authenticator, Authy, or 1Password to generate verification codes. +

+ +
+ +
+

Security Keys & Biometrics (WebAuthn)

+

+ Use hardware security keys (like YubiKey) or biometric authentication (like Face ID or Touch ID) for enhanced security. +

+ +
+ {/if} +
diff --git a/auth-frontend/src/routes/verification/+page.server.ts b/auth-frontend/src/routes/verification/+page.server.ts new file mode 100644 index 0000000..71e84e2 --- /dev/null +++ b/auth-frontend/src/routes/verification/+page.server.ts @@ -0,0 +1,77 @@ +import { kratosServerClient } from '$lib/kratos-server'; +import { redirect } from '@sveltejs/kit'; +import { PUBLIC_KRATOS_URL } from '$env/static/public'; +import type { PageServerLoad } from './$types'; + +export const load: PageServerLoad = async ({ url, request }) => { + const flowId = url.searchParams.get('flow'); + const code = url.searchParams.get('code'); + + // If no flow ID, redirect to Kratos to create the flow (with proper CSRF cookie handling) + if (!flowId) { + throw redirect(303, `${PUBLIC_KRATOS_URL}/self-service/verification/browser`); + } + + // Load the existing flow + const cookie = request.headers.get('cookie') || undefined; + let flow; + + try { + const result = await kratosServerClient.getVerificationFlow({ id: flowId, cookie }); + flow = result.data; + } catch (error: any) { + // If flow is expired/invalid, redirect to create a new one + if ([410, 403, 400].includes(error?.status || error?.response?.status)) { + throw redirect(303, `${PUBLIC_KRATOS_URL}/self-service/verification/browser`); + } + throw error; + } + + // If we have a valid flow and code, auto-submit the verification + if (flow && code) { + try { + const result = await kratosServerClient.updateVerificationFlow({ + flow: flow.id, + updateVerificationFlowBody: { + method: 'code', + code: code + }, + cookie // Pass session cookie so Kratos can associate verification with authenticated user + }); + + // Verification code submitted successfully + // Check if Kratos wants us to redirect somewhere + if (result.data && (result.data as any).redirect_browser_to) { + throw redirect(303, (result.data as any).redirect_browser_to); + } + + // Otherwise redirect to home + throw redirect(303, '/'); + } catch (error: any) { + // Re-throw if this is a redirect (SvelteKit internal) + if (error?.status === 303 || error?.location) { + throw error; + } + + console.error('Auto verification failed:', error); + + // If Kratos returned an updated flow with error messages, use it + if (error.response?.data?.ui) { + flow = error.response.data; + } else { + // Add a user-friendly error message to the flow UI + if (!flow.ui.messages) { + flow.ui.messages = []; + } + flow.ui.messages.push({ + id: 4070001, + text: 'The verification code has expired or is invalid. Please request a new verification email by entering your email address below.', + type: 'error', + context: {} + }); + } + } + } + + return { flow }; +}; diff --git a/auth-frontend/src/routes/verification/+page.svelte b/auth-frontend/src/routes/verification/+page.svelte new file mode 100644 index 0000000..5b2758e --- /dev/null +++ b/auth-frontend/src/routes/verification/+page.svelte @@ -0,0 +1,42 @@ + + + + Email Verification - Example App + + +
+
+

+ {heading} +

+

+ {description} +

+
+ +
+
+ +
+
+
diff --git a/auth-frontend/static/robots.txt b/auth-frontend/static/robots.txt new file mode 100644 index 0000000..b6dd667 --- /dev/null +++ b/auth-frontend/static/robots.txt @@ -0,0 +1,3 @@ +# allow crawling everything by default +User-agent: * +Disallow: diff --git a/auth-frontend/svelte.config.js b/auth-frontend/svelte.config.js new file mode 100644 index 0000000..03c17f2 --- /dev/null +++ b/auth-frontend/svelte.config.js @@ -0,0 +1,12 @@ +import adapter from '@sveltejs/adapter-node'; +import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + // Consult https://svelte.dev/docs/kit/integrations + // for more information about preprocessors + preprocess: vitePreprocess(), + kit: { adapter: adapter() } +}; + +export default config; diff --git a/auth-frontend/tsconfig.json b/auth-frontend/tsconfig.json new file mode 100644 index 0000000..a5567ee --- /dev/null +++ b/auth-frontend/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "moduleResolution": "bundler" + } + // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias + // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files + // + // To make changes to top-level options such as include and exclude, we recommend extending + // the generated config; see https://svelte.dev/docs/kit/configuration#typescript +} diff --git a/auth-frontend/vite.config.ts b/auth-frontend/vite.config.ts new file mode 100644 index 0000000..2d35c4f --- /dev/null +++ b/auth-frontend/vite.config.ts @@ -0,0 +1,7 @@ +import tailwindcss from '@tailwindcss/vite'; +import { sveltekit } from '@sveltejs/kit/vite'; +import { defineConfig } from 'vite'; + +export default defineConfig({ + plugins: [tailwindcss(), sveltekit()] +}); diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..6f70361 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,423 @@ +services: + # ============================================ + # NEXUS APP SERVICES + # ============================================ + + # Vault Agent for nexus migrations (one-shot, runs before app) + vault-agent-migrate: + image: hashicorp/vault:1.18 + container_name: nexus-vault-agent-migrate + network_mode: host + user: "0:0" + + command: > + sh -c "rm -f /vault/secrets/.env; + vault agent -config=/vault/config/agent-config.hcl & + while [ ! -f /vault/secrets/.env ]; do sleep 1; done; + echo 'Secrets rendered, exiting'; exit 0" + + cap_add: + - IPC_LOCK + + volumes: + - ./vault/agent-config-migrate.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/migrate/role-id:/vault/role-id:ro + - ./secrets/migrate/secret-id:/vault/secret-id:ro + - ./run/migrate:/vault/secrets + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + # Nexus migration runner (one-shot, runs before app) + migrate: + build: + context: . + dockerfile: Dockerfile.migrate + container_name: nexus-migrate + network_mode: host + + depends_on: + vault-agent-migrate: + condition: service_completed_successfully + + volumes: + - ./run/migrate:/vault/secrets:ro + - ./migrations:/app/migrations:ro + + working_dir: /app + + command: + - | + set -e + echo "Loading credentials from Vault..." + set -a + . /vault/secrets/.env + set +a + echo "Running migrations..." + sqlx migrate run + echo "Migrations complete!" + + # Vault Agent for nexus app runtime (long-running) + vault-agent: + image: hashicorp/vault:1.18 + container_name: nexus-vault-agent + restart: unless-stopped + network_mode: host + pid: host # Share PID namespace to signal nexus on credential refresh + user: "0:0" + + command: ["vault", "agent", "-config=/vault/config/agent-config.hcl"] + + cap_add: + - IPC_LOCK + - KILL # Required to send SIGHUP to nexus for credential refresh + + volumes: + - ./vault/agent-config.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/app/role-id:/vault/role-id:ro + - ./secrets/app/secret-id:/vault/secret-id:ro + - ./run/app:/vault/secrets + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + healthcheck: + test: ["CMD", "test", "-f", "/vault/secrets/.env"] + interval: 5s + timeout: 3s + retries: 30 + start_period: 10s + + # Main nexus application + nexus: + build: + context: . + dockerfile: Dockerfile + container_name: nexus + restart: unless-stopped + network_mode: host + pid: host # Share PID namespace so vault-agent can signal us + + depends_on: + migrate: + condition: service_completed_successfully + vault-agent: + condition: service_healthy + + volumes: + - ./run/app:/vault/secrets # Not read-only - app writes nexus.pid + + environment: + - RUST_LOG=nexus=info,tower_http=info + + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:5050/health/ready"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # ============================================ + # PGBOUNCER SERVICE (for Kratos) + # ============================================ + + # PgBouncer with integrated Vault Agent + # Proxies Kratos DB connections with dynamic Vault credentials + pgbouncer: + build: + context: ./pgbouncer + dockerfile: Dockerfile + container_name: nexus-pgbouncer + restart: unless-stopped + network_mode: host + + cap_add: + - IPC_LOCK + + volumes: + - ./vault/agent-config-pgbouncer.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/kratos-app/role-id:/vault/role-id:ro + - ./secrets/kratos-app/secret-id:/vault/secret-id:ro + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + healthcheck: + test: ["CMD", "pg_isready", "-h", "127.0.0.1", "-p", "6432", "-U", "kratos"] + interval: 5s + timeout: 3s + retries: 30 + start_period: 15s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # ============================================ + # KRATOS SERVICES + # ============================================ + + # Vault Agent for Kratos migrations (one-shot) + vault-agent-kratos-migrate: + image: hashicorp/vault:1.18 + container_name: nexus-vault-agent-kratos-migrate + network_mode: host + user: "0:0" + + command: > + sh -c "rm -f /vault/secrets/.env; + vault agent -config=/vault/config/agent-config.hcl & + while [ ! -f /vault/secrets/.env ]; do sleep 1; done; + echo 'Secrets rendered, exiting'; exit 0" + + cap_add: + - IPC_LOCK + + volumes: + - ./vault/agent-config-kratos-migrate.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/kratos-migrate/role-id:/vault/role-id:ro + - ./secrets/kratos-migrate/secret-id:/vault/secret-id:ro + - ./run/kratos-migrate:/vault/secrets + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + # Kratos migration runner (one-shot) + kratos-migrate: + image: oryd/kratos:v1.1.0 + container_name: nexus-kratos-migrate + network_mode: host + + depends_on: + vault-agent-kratos-migrate: + condition: service_completed_successfully + migrate: + condition: service_completed_successfully # Nexus migrations create kratos schema + + volumes: + - ./kratos/config:/etc/kratos:ro + - ./run/kratos-migrate:/vault/secrets:ro + + entrypoint: ["/bin/sh", "-c"] + command: + - | + export $(grep -v '^#' /vault/secrets/.env | xargs) + exec kratos migrate sql -e --yes + + # Vault Agent for Kratos runtime (long-running) + vault-agent-kratos: + image: hashicorp/vault:1.18 + container_name: nexus-vault-agent-kratos + restart: unless-stopped + network_mode: host + user: "0:0" + + command: ["vault", "agent", "-config=/vault/config/agent-config.hcl"] + + cap_add: + - IPC_LOCK + + volumes: + - ./vault/agent-config-kratos.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/kratos-app/role-id:/vault/role-id:ro + - ./secrets/kratos-app/secret-id:/vault/secret-id:ro + - ./run/kratos:/vault/secrets + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + healthcheck: + test: ["CMD", "test", "-f", "/vault/secrets/.env"] + interval: 5s + timeout: 3s + retries: 30 + start_period: 10s + + # Kratos identity server (long-running) + kratos: + image: oryd/kratos:v1.1.0 + container_name: nexus-kratos + restart: unless-stopped + network_mode: host + + depends_on: + kratos-migrate: + condition: service_completed_successfully + vault-agent-kratos: + condition: service_healthy + pgbouncer: + condition: service_healthy + + volumes: + - ./kratos/config:/etc/kratos:ro + - ./run/kratos:/vault/secrets:ro + + entrypoint: ["/bin/sh", "-c"] + command: + - | + export $(grep -v '^#' /vault/secrets/.env | xargs) + exec kratos serve --config /etc/kratos/kratos.yml + + healthcheck: + test: ["CMD", "wget", "-q", "--spider", "http://localhost:6050/health/alive"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # ============================================ + # OATHKEEPER SERVICE + # ============================================ + + # Vault Agent for Oathkeeper (long-running) + vault-agent-oathkeeper: + image: hashicorp/vault:1.18 + container_name: nexus-vault-agent-oathkeeper + restart: unless-stopped + network_mode: host + user: "0:0" + + command: ["vault", "agent", "-config=/vault/config/agent-config.hcl"] + + cap_add: + - IPC_LOCK + + volumes: + - ./vault/agent-config-oathkeeper.hcl:/vault/config/agent-config.hcl:ro + - ./vault/templates:/vault/templates:ro + - ./secrets/oathkeeper/role-id:/vault/role-id:ro + - ./secrets/oathkeeper/secret-id:/vault/secret-id:ro + - ./run/oathkeeper:/vault/secrets + + environment: + - VAULT_ADDR=http://vault.example.local:8200 + + healthcheck: + test: ["CMD", "test", "-f", "/vault/secrets/.env"] + interval: 5s + timeout: 3s + retries: 30 + start_period: 10s + + # Oathkeeper API gateway (stateless, long-running) + oathkeeper: + build: + context: ./oathkeeper + dockerfile: Dockerfile + container_name: nexus-oathkeeper + restart: unless-stopped + network_mode: host + + depends_on: + kratos: + condition: service_healthy + nexus: + condition: service_healthy + vault-agent-oathkeeper: + condition: service_healthy + + volumes: + - ./run/oathkeeper:/vault/secrets:ro + + healthcheck: + test: ["CMD", "wget", "-q", "--spider", "http://localhost:7250/health/alive"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # ============================================ + # AUTH FRONTEND SERVICE + # ============================================ + + # Auth frontend (account.example.com) + auth-frontend: + build: + context: ./auth-frontend + dockerfile: Dockerfile + container_name: nexus-auth-frontend + restart: unless-stopped + network_mode: host + + environment: + - KRATOS_SERVER_URL=http://localhost:6000 + - ORIGIN=https://account.example.com + - ADMIN_USER_ID=00000000-0000-0000-0000-000000000000 # Replace with your admin user ID + + depends_on: + kratos: + condition: service_healthy + oathkeeper: + condition: service_healthy + + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:3000/"] + interval: 30s + timeout: 5s + retries: 3 + start_period: 10s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # ============================================ + # MAIN FRONTEND SERVICE + # ============================================ + + # Main frontend (app.example.com) + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + container_name: nexus-frontend + restart: unless-stopped + network_mode: host + + environment: + - NODE_ENV=production + - ORIGIN=https://app.example.com + + depends_on: + oathkeeper: + condition: service_healthy + + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:5000/"] + interval: 30s + timeout: 5s + retries: 3 + start_period: 10s + + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..8f6739f --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,32 @@ +#!/bin/sh +set -e + +# Source Vault-rendered environment if available +if [ -f /vault/secrets/.env ]; then + echo "Loading environment from Vault..." + set -a + . /vault/secrets/.env + set +a +fi + +# Start the application in background +/app/nexus & +APP_PID=$! + +# Write PID for Vault Agent to signal on credential rotation +# Use shared volume so vault-agent can read it +echo $APP_PID > /vault/secrets/nexus.pid +echo "Nexus started with PID $APP_PID" + +# Forward signals to the app +trap "kill -TERM $APP_PID" TERM INT +trap "kill -HUP $APP_PID" HUP + +# Wait for app to exit +wait $APP_PID +EXIT_CODE=$? + +# Clean up +rm -f /vault/secrets/nexus.pid + +exit $EXIT_CODE diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 0000000..e2f3f7b --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,49 @@ +# Dependencies +node_modules +.pnp +.pnp.js + +# Build outputs +.svelte-kit +build +dist +.output +.vercel +.netlify +.wrangler + +# Cache and vite +.vite +vite.config.js.timestamp-* +vite.config.ts.timestamp-* + +# Testing +coverage + +# Environment files +.env +.env.* +!.env.example + +# IDE and editors +.vscode +.idea + +# Version control +.git +.gitignore + +# Documentation +*.md + +# OS files +.DS_Store +Thumbs.db + +# Debug logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Misc +*.pem diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..1e90c01 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,24 @@ +node_modules + +# Output +.output +.vercel +.netlify +.wrangler +/.svelte-kit +/build + +# OS +.DS_Store +Thumbs.db + +# Env +.env +.env.* +!.env.example +!.env.test + +# Vite +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +/.vscode/ diff --git a/frontend/.graphqlrc.yaml b/frontend/.graphqlrc.yaml new file mode 100644 index 0000000..8700dbf --- /dev/null +++ b/frontend/.graphqlrc.yaml @@ -0,0 +1,5 @@ +schema: + - http://localhost:5050/graphql: + headers: + X-Oathkeeper-Secret: ${OATHKEEPER_SECRET} +documents: src/lib/graphql/**/*.ts diff --git a/frontend/.npmrc b/frontend/.npmrc new file mode 100644 index 0000000..b6f27f1 --- /dev/null +++ b/frontend/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 0000000..7d74fe2 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1,9 @@ +# Package Managers +package-lock.json +pnpm-lock.yaml +yarn.lock +bun.lock +bun.lockb + +# Miscellaneous +/static/ diff --git a/frontend/.prettierrc b/frontend/.prettierrc new file mode 100644 index 0000000..819fa57 --- /dev/null +++ b/frontend/.prettierrc @@ -0,0 +1,16 @@ +{ + "useTabs": true, + "singleQuote": true, + "trailingComma": "none", + "printWidth": 100, + "plugins": ["prettier-plugin-svelte", "prettier-plugin-tailwindcss"], + "overrides": [ + { + "files": "*.svelte", + "options": { + "parser": "svelte" + } + } + ], + "tailwindStylesheet": "./src/routes/layout.css" +} diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..d83ecb8 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,55 @@ +# ==================================== +# Build Stage +# ==================================== +FROM node:22-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install all dependencies (including devDependencies for build) +RUN npm ci + +# Copy source code and configuration +COPY . . + +# Build the SvelteKit application +RUN npm run build + +# Prune dev dependencies after build +RUN npm prune --production + +# ==================================== +# Production Stage +# ==================================== +FROM node:22-alpine + +# Install curl for health checks +RUN apk add --no-cache curl + +WORKDIR /app + +# Copy built application from builder +COPY --from=builder /app/build ./build +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./package.json + +# Create non-root user +RUN addgroup -g 1001 -S nodejs && \ + adduser -S sveltekit -u 1001 && \ + chown -R sveltekit:nodejs /app + +USER sveltekit + +EXPOSE 5000 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ + CMD curl -f http://localhost:5000/ || exit 1 + +ENV NODE_ENV=production +ENV HOST=0.0.0.0 +ENV PORT=5000 +ENV ORIGIN=https://app.example.com + +CMD ["node", "build"] diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..75842c4 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,38 @@ +# sv + +Everything you need to build a Svelte project, powered by [`sv`](https://github.com/sveltejs/cli). + +## Creating a project + +If you're seeing this, you've probably already done this step. Congrats! + +```sh +# create a new project in the current directory +npx sv create + +# create a new project in my-app +npx sv create my-app +``` + +## Developing + +Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: + +```sh +npm run dev + +# or start the server and open the app in a new browser tab +npm run dev -- --open +``` + +## Building + +To create a production version of your app: + +```sh +npm run build +``` + +You can preview the production build with `npm run preview`. + +> To deploy your app, you may need to install an [adapter](https://svelte.dev/docs/kit/adapters) for your target environment. diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js new file mode 100644 index 0000000..9215357 --- /dev/null +++ b/frontend/eslint.config.js @@ -0,0 +1,43 @@ +import prettier from 'eslint-config-prettier'; +import { fileURLToPath } from 'node:url'; +import { includeIgnoreFile } from '@eslint/compat'; +import js from '@eslint/js'; +import svelte from 'eslint-plugin-svelte'; +import { defineConfig } from 'eslint/config'; +import globals from 'globals'; +import ts from 'typescript-eslint'; +import svelteConfig from './svelte.config.js'; + +const gitignorePath = fileURLToPath(new URL('./.gitignore', import.meta.url)); + +export default defineConfig( + includeIgnoreFile(gitignorePath), + js.configs.recommended, + ...ts.configs.recommended, + ...svelte.configs.recommended, + prettier, + ...svelte.configs.prettier, + { + languageOptions: { globals: { ...globals.browser, ...globals.node } }, + + rules: { + // typescript-eslint strongly recommend that you do not use the no-undef lint rule on TypeScript projects. + // see: https://typescript-eslint.io/troubleshooting/faqs/eslint/#i-get-errors-from-the-no-undef-rule-about-global-variables-not-being-defined-even-though-there-are-no-typescript-errors + 'no-undef': 'off', + // Navigation without resolve() is fine for absolute paths in this project + 'svelte/no-navigation-without-resolve': 'off' + } + }, + { + files: ['**/*.svelte', '**/*.svelte.ts', '**/*.svelte.js'], + + languageOptions: { + parserOptions: { + projectService: true, + extraFileExtensions: ['.svelte'], + parser: ts.parser, + svelteConfig + } + } + } +); diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..cfdf689 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,4712 @@ +{ + "name": "frontend", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.0.1", + "dependencies": { + "@apollo/client": "^4.0.11", + "apollo-upload-client": "^19.0.0", + "date-fns": "^4.1.0", + "graphql": "^16.12.0" + }, + "devDependencies": { + "@eslint/compat": "^1.4.0", + "@eslint/js": "^9.39.1", + "@sveltejs/adapter-node": "^5.4.0", + "@sveltejs/kit": "^2.49.1", + "@sveltejs/vite-plugin-svelte": "^6.2.1", + "@tailwindcss/forms": "^0.5.10", + "@tailwindcss/typography": "^0.5.19", + "@tailwindcss/vite": "^4.1.17", + "@types/apollo-upload-client": "^19.0.0", + "@types/node": "^24", + "eslint": "^9.39.1", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-svelte": "^3.13.1", + "globals": "^16.5.0", + "prettier": "^3.7.4", + "prettier-plugin-svelte": "^3.4.0", + "prettier-plugin-tailwindcss": "^0.7.2", + "svelte": "^5.45.6", + "svelte-check": "^4.3.4", + "tailwindcss": "^4.1.17", + "typescript": "^5.9.3", + "typescript-eslint": "^8.48.1", + "vite": "^7.2.6", + "vite-plugin-devtools-json": "^1.0.0", + "vite-plugin-mkcert": "^1.17.9" + } + }, + "node_modules/@apollo/client": { + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/@apollo/client/-/client-4.0.11.tgz", + "integrity": "sha512-jyW5j3DEYnFlYA1Lk9Szd7O/od1DptnbZnj03DQXxuQb+Gnop0w/uQxVRKaU7bPhvVuBnlAtZYPOykArX+xWdg==", + "license": "MIT", + "workspaces": [ + "dist", + "codegen", + "scripts/codemods/ac3-to-ac4" + ], + "dependencies": { + "@graphql-typed-document-node/core": "^3.1.1", + "@wry/caches": "^1.0.0", + "@wry/equality": "^0.5.6", + "@wry/trie": "^0.5.0", + "graphql-tag": "^2.12.6", + "optimism": "^0.18.0", + "tslib": "^2.3.0" + }, + "peerDependencies": { + "graphql": "^16.0.0", + "graphql-ws": "^5.5.5 || ^6.0.3", + "react": "^17.0.0 || ^18.0.0 || >=19.0.0-rc", + "react-dom": "^17.0.0 || ^18.0.0 || >=19.0.0-rc", + "rxjs": "^7.3.0", + "subscriptions-transport-ws": "^0.9.0 || ^0.11.0" + }, + "peerDependenciesMeta": { + "graphql-ws": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "subscriptions-transport-ws": { + "optional": true + } + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/compat": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.4.1.tgz", + "integrity": "sha512-cfO82V9zxxGBxcQDr1lfaYB7wykTa0b00mGa36FrJl7iTFd0Z2cHfEYuxcBRP/iNijCsWsEkA+jzT8hGYmv33w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "peerDependencies": { + "eslint": "^8.40 || 9" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.1", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@graphql-typed-document-node/core": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz", + "integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==", + "license": "MIT", + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/plugin-commonjs": { + "version": "28.0.9", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.9.tgz", + "integrity": "sha512-PIR4/OHZ79romx0BVVll/PkwWpJ7e5lsqFa3gFfcrFPWwLXLV39JVUzQV9RKjWerE7B845Hqjj9VYlQeieZ2dA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "commondir": "^1.0.1", + "estree-walker": "^2.0.2", + "fdir": "^6.2.0", + "is-reference": "1.2.1", + "magic-string": "^0.30.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=16.0.0 || 14 >= 14.17" + }, + "peerDependencies": { + "rollup": "^2.68.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-json": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", + "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.1.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.3.tgz", + "integrity": "sha512-lUYM3UBGuM93CnMPG1YocWu7X802BrNF3jW2zny5gQyLQgRFJhV1Sq0Zi74+dh/6NBx1DxFC4b4GXg9wUCG5Qg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.54.0.tgz", + "integrity": "sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.54.0.tgz", + "integrity": "sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.54.0.tgz", + "integrity": "sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.54.0.tgz", + "integrity": "sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.54.0.tgz", + "integrity": "sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.54.0.tgz", + "integrity": "sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.54.0.tgz", + "integrity": "sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.54.0.tgz", + "integrity": "sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.54.0.tgz", + "integrity": "sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.54.0.tgz", + "integrity": "sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.54.0.tgz", + "integrity": "sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.54.0.tgz", + "integrity": "sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.54.0.tgz", + "integrity": "sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.54.0.tgz", + "integrity": "sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.54.0.tgz", + "integrity": "sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.54.0.tgz", + "integrity": "sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.54.0.tgz", + "integrity": "sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.54.0.tgz", + "integrity": "sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.54.0.tgz", + "integrity": "sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.54.0.tgz", + "integrity": "sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.54.0.tgz", + "integrity": "sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.54.0.tgz", + "integrity": "sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sveltejs/acorn-typescript": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.8.tgz", + "integrity": "sha512-esgN+54+q0NjB0Y/4BomT9samII7jGwNy/2a3wNZbT2A2RpmXsXwUt24LvLhx6jUq2gVk4cWEvcRO6MFQbOfNA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^8.9.0" + } + }, + "node_modules/@sveltejs/adapter-node": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-5.4.0.tgz", + "integrity": "sha512-NMsrwGVPEn+J73zH83Uhss/hYYZN6zT3u31R3IHAn3MiKC3h8fjmIAhLfTSOeNHr5wPYfjjMg8E+1gyFgyrEcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/plugin-commonjs": "^28.0.1", + "@rollup/plugin-json": "^6.1.0", + "@rollup/plugin-node-resolve": "^16.0.0", + "rollup": "^4.9.5" + }, + "peerDependencies": { + "@sveltejs/kit": "^2.4.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "2.49.2", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.49.2.tgz", + "integrity": "sha512-Vp3zX/qlwerQmHMP6x0Ry1oY7eKKRcOWGc2P59srOp4zcqyn+etJyQpELgOi4+ZSUgteX8Y387NuwruLgGXLUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/cookie": "^0.6.0", + "acorn": "^8.14.1", + "cookie": "^0.6.0", + "devalue": "^5.3.2", + "esm-env": "^1.2.2", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^3.0.0" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": ">=18.13" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + } + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.2.1.tgz", + "integrity": "sha512-YZs/OSKOQAQCnJvM/P+F1URotNnYNeU3P2s4oIpzm1uFaqUEqRxUB0g5ejMjEb5Gjb9/PiBI5Ktrq4rUUF8UVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", + "debug": "^4.4.1", + "deepmerge": "^4.3.1", + "magic-string": "^0.30.17", + "vitefu": "^1.1.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.1.tgz", + "integrity": "sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@tailwindcss/forms": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.11.tgz", + "integrity": "sha512-h9wegbZDPurxG22xZSoWtdzc41/OlNEUQERNqI/0fOwa2aVlWGu7C35E/x6LDyD3lgtztFSSjKZyuVM0hxhbgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "mini-svg-data-uri": "^1.2.3" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1 || >= 4.0.0-alpha.20 || >= 4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", + "integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "enhanced-resolve": "^5.18.3", + "jiti": "^2.6.1", + "lightningcss": "1.30.2", + "magic-string": "^0.30.21", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.18" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", + "integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-x64": "4.1.18", + "@tailwindcss/oxide-freebsd-x64": "4.1.18", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-x64-musl": "4.1.18", + "@tailwindcss/oxide-wasm32-wasi": "4.1.18", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.18.tgz", + "integrity": "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.18.tgz", + "integrity": "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.18.tgz", + "integrity": "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.18.tgz", + "integrity": "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.18.tgz", + "integrity": "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.18.tgz", + "integrity": "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.18.tgz", + "integrity": "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", + "integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", + "integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.18.tgz", + "integrity": "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.0", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.18.tgz", + "integrity": "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.18.tgz", + "integrity": "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.19.tgz", + "integrity": "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.18.tgz", + "integrity": "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.1.18", + "@tailwindcss/oxide": "4.1.18", + "tailwindcss": "4.1.18" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7" + } + }, + "node_modules/@types/apollo-upload-client": { + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/@types/apollo-upload-client/-/apollo-upload-client-19.0.0.tgz", + "integrity": "sha512-PTl+2ZDeF2ZxCCLf1sQhiUqkBFhtpzBQzgJwaXhyo5ygDVi/398Z9LAXHaIQe0mmI9Hu2EtyYUUFNzfBhcuI0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@apollo/client": "^4.0.0", + "@types/extract-files": "*", + "graphql": "14 - 16" + } + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/extract-files": { + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/@types/extract-files/-/extract-files-13.0.2.tgz", + "integrity": "sha512-4sd7uDB0OVZmwH2wD6w7Qlpr2P5Pn8C9IGwnaq9aiiBDD3Lou7CwFjjkJTDYCDsEvk9zxAtmv9TaMg1lt/YJfA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.10.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.4.tgz", + "integrity": "sha512-vnDVpYPMzs4wunl27jHrfmwojOGKya0xyM3sH+UE5iv5uPS6vX7UIoh6m+vQc5LGBq52HBKPIn/zcSZVzeDEZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.51.0.tgz", + "integrity": "sha512-XtssGWJvypyM2ytBnSnKtHYOGT+4ZwTnBVl36TA4nRO2f4PRNGz5/1OszHzcZCvcBMh+qb7I06uoCmLTRdR9og==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.51.0", + "@typescript-eslint/type-utils": "8.51.0", + "@typescript-eslint/utils": "8.51.0", + "@typescript-eslint/visitor-keys": "8.51.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.51.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.51.0.tgz", + "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.51.0", + "@typescript-eslint/types": "8.51.0", + "@typescript-eslint/typescript-estree": "8.51.0", + "@typescript-eslint/visitor-keys": "8.51.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.51.0.tgz", + "integrity": "sha512-Luv/GafO07Z7HpiI7qeEW5NW8HUtZI/fo/kE0YbtQEFpJRUuR0ajcWfCE5bnMvL7QQFrmT/odMe8QZww8X2nfQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.51.0", + "@typescript-eslint/types": "^8.51.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.51.0.tgz", + "integrity": "sha512-JhhJDVwsSx4hiOEQPeajGhCWgBMBwVkxC/Pet53EpBVs7zHHtayKefw1jtPaNRXpI9RA2uocdmpdfE7T+NrizA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.51.0", + "@typescript-eslint/visitor-keys": "8.51.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.51.0.tgz", + "integrity": "sha512-Qi5bSy/vuHeWyir2C8u/uqGMIlIDu8fuiYWv48ZGlZ/k+PRPHtaAu7erpc7p5bzw2WNNSniuxoMSO4Ar6V9OXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.51.0.tgz", + "integrity": "sha512-0XVtYzxnobc9K0VU7wRWg1yiUrw4oQzexCG2V2IDxxCxhqBMSMbjB+6o91A+Uc0GWtgjCa3Y8bi7hwI0Tu4n5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.51.0", + "@typescript-eslint/typescript-estree": "8.51.0", + "@typescript-eslint/utils": "8.51.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.51.0.tgz", + "integrity": "sha512-TizAvWYFM6sSscmEakjY3sPqGwxZRSywSsPEiuZF6d5GmGD9Gvlsv0f6N8FvAAA0CD06l3rIcWNbsN1e5F/9Ag==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.51.0.tgz", + "integrity": "sha512-1qNjGqFRmlq0VW5iVlcyHBbCjPB7y6SxpBkrbhNWMy/65ZoncXCEPJxkRZL8McrseNH6lFhaxCIaX+vBuFnRng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.51.0", + "@typescript-eslint/tsconfig-utils": "8.51.0", + "@typescript-eslint/types": "8.51.0", + "@typescript-eslint/visitor-keys": "8.51.0", + "debug": "^4.3.4", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "tinyglobby": "^0.2.15", + "ts-api-utils": "^2.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.51.0.tgz", + "integrity": "sha512-11rZYxSe0zabiKaCP2QAwRf/dnmgFgvTmeDTtZvUvXG3UuAdg/GU02NExmmIXzz3vLGgMdtrIosI84jITQOxUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.51.0", + "@typescript-eslint/types": "8.51.0", + "@typescript-eslint/typescript-estree": "8.51.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.51.0.tgz", + "integrity": "sha512-mM/JRQOzhVN1ykejrvwnBRV3+7yTKK8tVANVN3o1O0t0v7o+jqdVu9crPy5Y9dov15TJk/FTIgoUGHrTOVL3Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.51.0", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@wry/caches": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@wry/caches/-/caches-1.0.1.tgz", + "integrity": "sha512-bXuaUNLVVkD20wcGBWRyo7j9N3TxePEWFZj2Y+r9OoUzfqmavM84+mFykRicNsBqatba5JLay1t48wxaXaWnlA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/context": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@wry/context/-/context-0.7.4.tgz", + "integrity": "sha512-jmT7Sb4ZQWI5iyu3lobQxICu2nC/vbUhP0vIdd6tHC9PTfenmRmuIFqktc6GH9cgi+ZHnsLWPvfSvc4DrYmKiQ==", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/equality": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@wry/equality/-/equality-0.5.7.tgz", + "integrity": "sha512-BRFORjsTuQv5gxcXsuDXx6oGRhuVsEGwZy6LOzRRfgu+eSfxbhUQ9L9YtSEIuIjY/o7g3iWFjrc5eSY1GXP2Dw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/trie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@wry/trie/-/trie-0.5.0.tgz", + "integrity": "sha512-FNoYzHawTMk/6KMQoEG5O4PuioX19UbwdQKF44yw0nLfOypfQdjtfZzo/UIJWAJ23sNIFbD1Ug9lbaDGMwbqQA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/apollo-upload-client": { + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/apollo-upload-client/-/apollo-upload-client-19.0.0.tgz", + "integrity": "sha512-SOORV4GB71Pd1Ktdb5D8qBthu/gtTQirTx6rAhgujR+Z24dwmLU47hUERFVeffnFpCMRsir/V5ijOSfqqJdC4A==", + "license": "MIT", + "dependencies": { + "extract-files": "^13.0.0" + }, + "engines": { + "node": "^20.9.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/jaydenseric" + }, + "peerDependencies": { + "@apollo/client": "^4.0.0", + "graphql": "14 - 16", + "rxjs": "^7.3.0" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/aria-query": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz", + "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/devalue": { + "version": "5.6.1", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.1.tgz", + "integrity": "sha512-jDwizj+IlEZBunHcOuuFVBnIMPAEHvTsJj0BcIp94xYguLRVBcXO853px/MyIJvbVzWdsGvrRweIUWJw8hBP7A==", + "dev": true, + "license": "MIT" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.18.4", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.4.tgz", + "integrity": "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-prettier": { + "version": "10.1.8", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.8.tgz", + "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "funding": { + "url": "https://opencollective.com/eslint-config-prettier" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-svelte": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-3.13.1.tgz", + "integrity": "sha512-Ng+kV/qGS8P/isbNYVE3sJORtubB+yLEcYICMkUWNaDTb0SwZni/JhAYXh/Dz/q2eThUwWY0VMPZ//KYD1n3eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.6.1", + "@jridgewell/sourcemap-codec": "^1.5.0", + "esutils": "^2.0.3", + "globals": "^16.0.0", + "known-css-properties": "^0.37.0", + "postcss": "^8.4.49", + "postcss-load-config": "^3.1.4", + "postcss-safe-parser": "^7.0.0", + "semver": "^7.6.3", + "svelte-eslint-parser": "^1.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "eslint": "^8.57.1 || ^9.0.0", + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esm-env": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrap": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.1.tgz", + "integrity": "sha512-GiYWG34AN/4CUyaWAgunGt0Rxvr1PTMlGC0vvEov/uOQYWne2bpN03Um+k8jT+q3op33mKouP2zeJ6OlM+qeUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extract-files": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/extract-files/-/extract-files-13.0.0.tgz", + "integrity": "sha512-FXD+2Tsr8Iqtm3QZy1Zmwscca7Jx3mMC5Crr+sEP1I303Jy1CYMuYCm7hRTplFNg3XdUavErkxnTzpaqdSoi6g==", + "license": "MIT", + "dependencies": { + "is-plain-obj": "^4.1.0" + }, + "engines": { + "node": "^14.17.0 || ^16.0.0 || >= 18.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/jaydenseric" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphql": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.12.0.tgz", + "integrity": "sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==", + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" + } + }, + "node_modules/graphql-tag": { + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.6.tgz", + "integrity": "sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-reference": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/known-css-properties": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.37.0.tgz", + "integrity": "sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lightningcss": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", + "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.30.2", + "lightningcss-darwin-arm64": "1.30.2", + "lightningcss-darwin-x64": "1.30.2", + "lightningcss-freebsd-x64": "1.30.2", + "lightningcss-linux-arm-gnueabihf": "1.30.2", + "lightningcss-linux-arm64-gnu": "1.30.2", + "lightningcss-linux-arm64-musl": "1.30.2", + "lightningcss-linux-x64-gnu": "1.30.2", + "lightningcss-linux-x64-musl": "1.30.2", + "lightningcss-win32-arm64-msvc": "1.30.2", + "lightningcss-win32-x64-msvc": "1.30.2" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", + "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", + "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", + "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", + "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", + "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", + "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", + "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", + "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", + "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", + "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", + "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mini-svg-data-uri": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", + "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==", + "dev": true, + "license": "MIT", + "bin": { + "mini-svg-data-uri": "cli.js" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/optimism": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/optimism/-/optimism-0.18.1.tgz", + "integrity": "sha512-mLXNwWPa9dgFyDqkNi54sjDyNJ9/fTI6WGBLgnXku1vdKY/jovHfZT5r+aiVeFFLOz+foPNOm5YJ4mqgld2GBQ==", + "license": "MIT", + "dependencies": { + "@wry/caches": "^1.0.0", + "@wry/context": "^0.7.0", + "@wry/trie": "^0.5.0", + "tslib": "^2.3.0" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-load-config": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "dev": true, + "license": "MIT", + "dependencies": { + "lilconfig": "^2.0.5", + "yaml": "^1.10.2" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-load-config/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss-safe-parser": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-7.0.1.tgz", + "integrity": "sha512-0AioNCJZ2DPYz5ABT6bddIqlhgwhpHZ/l65YAYo0BCIn0xiDpsnTHz0gnoTGk0OXZW0JRs+cDwL8u/teRdz+8A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-safe-parser" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "postcss": "^8.4.31" + } + }, + "node_modules/postcss-scss": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/postcss-scss/-/postcss-scss-4.0.9.tgz", + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-scss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.4.29" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", + "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-svelte": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/prettier-plugin-svelte/-/prettier-plugin-svelte-3.4.1.tgz", + "integrity": "sha512-xL49LCloMoZRvSwa6IEdN2GV6cq2IqpYGstYtMT+5wmml1/dClEoI0MZR78MiVPpu6BdQFfN0/y73yO6+br5Pg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "prettier": "^3.0.0", + "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" + } + }, + "node_modules/prettier-plugin-tailwindcss": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.7.2.tgz", + "integrity": "sha512-LkphyK3Fw+q2HdMOoiEHWf93fNtYJwfamoKPl7UwtjFQdei/iIBoX11G6j706FzN3ymX9mPVi97qIY8328vdnA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.19" + }, + "peerDependencies": { + "@ianvs/prettier-plugin-sort-imports": "*", + "@prettier/plugin-hermes": "*", + "@prettier/plugin-oxc": "*", + "@prettier/plugin-pug": "*", + "@shopify/prettier-plugin-liquid": "*", + "@trivago/prettier-plugin-sort-imports": "*", + "@zackad/prettier-plugin-twig": "*", + "prettier": "^3.0", + "prettier-plugin-astro": "*", + "prettier-plugin-css-order": "*", + "prettier-plugin-jsdoc": "*", + "prettier-plugin-marko": "*", + "prettier-plugin-multiline-arrays": "*", + "prettier-plugin-organize-attributes": "*", + "prettier-plugin-organize-imports": "*", + "prettier-plugin-sort-imports": "*", + "prettier-plugin-svelte": "*" + }, + "peerDependenciesMeta": { + "@ianvs/prettier-plugin-sort-imports": { + "optional": true + }, + "@prettier/plugin-hermes": { + "optional": true + }, + "@prettier/plugin-oxc": { + "optional": true + }, + "@prettier/plugin-pug": { + "optional": true + }, + "@shopify/prettier-plugin-liquid": { + "optional": true + }, + "@trivago/prettier-plugin-sort-imports": { + "optional": true + }, + "@zackad/prettier-plugin-twig": { + "optional": true + }, + "prettier-plugin-astro": { + "optional": true + }, + "prettier-plugin-css-order": { + "optional": true + }, + "prettier-plugin-jsdoc": { + "optional": true + }, + "prettier-plugin-marko": { + "optional": true + }, + "prettier-plugin-multiline-arrays": { + "optional": true + }, + "prettier-plugin-organize-attributes": { + "optional": true + }, + "prettier-plugin-organize-imports": { + "optional": true + }, + "prettier-plugin-sort-imports": { + "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + } + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/rollup": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.54.0.tgz", + "integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.54.0", + "@rollup/rollup-android-arm64": "4.54.0", + "@rollup/rollup-darwin-arm64": "4.54.0", + "@rollup/rollup-darwin-x64": "4.54.0", + "@rollup/rollup-freebsd-arm64": "4.54.0", + "@rollup/rollup-freebsd-x64": "4.54.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.54.0", + "@rollup/rollup-linux-arm-musleabihf": "4.54.0", + "@rollup/rollup-linux-arm64-gnu": "4.54.0", + "@rollup/rollup-linux-arm64-musl": "4.54.0", + "@rollup/rollup-linux-loong64-gnu": "4.54.0", + "@rollup/rollup-linux-ppc64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-musl": "4.54.0", + "@rollup/rollup-linux-s390x-gnu": "4.54.0", + "@rollup/rollup-linux-x64-gnu": "4.54.0", + "@rollup/rollup-linux-x64-musl": "4.54.0", + "@rollup/rollup-openharmony-arm64": "4.54.0", + "@rollup/rollup-win32-arm64-msvc": "4.54.0", + "@rollup/rollup-win32-ia32-msvc": "4.54.0", + "@rollup/rollup-win32-x64-gnu": "4.54.0", + "@rollup/rollup-win32-x64-msvc": "4.54.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "dev": true, + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sirv": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svelte": { + "version": "5.46.1", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.46.1.tgz", + "integrity": "sha512-ynjfCHD3nP2el70kN5Pmg37sSi0EjOm9FgHYQdC4giWG/hzO3AatzXXJJgP305uIhGQxSufJLuYWtkY8uK/8RA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "acorn": "^8.12.1", + "aria-query": "^5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "devalue": "^5.5.0", + "esm-env": "^1.2.1", + "esrap": "^2.2.1", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/svelte-check": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.5.tgz", + "integrity": "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "chokidar": "^4.0.1", + "fdir": "^6.2.0", + "picocolors": "^1.0.0", + "sade": "^1.7.4" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": ">=5.0.0" + } + }, + "node_modules/svelte-eslint-parser": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-1.4.1.tgz", + "integrity": "sha512-1eqkfQ93goAhjAXxZiu1SaKI9+0/sxp4JIWQwUpsz7ybehRE5L8dNuz7Iry7K22R47p5/+s9EM+38nHV2OlgXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-scope": "^8.2.0", + "eslint-visitor-keys": "^4.0.0", + "espree": "^10.0.0", + "postcss": "^8.4.49", + "postcss-scss": "^4.0.9", + "postcss-selector-parser": "^7.0.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0", + "pnpm": "10.24.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/svelte-eslint-parser/node_modules/postcss-selector-parser": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/svelte/node_modules/is-reference": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6" + } + }, + "node_modules/tailwindcss": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", + "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ts-api-utils": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.3.0.tgz", + "integrity": "sha512-6eg3Y9SF7SsAvGzRHQvvc1skDAhwI4YQ32ui1scxD1Ccr0G5qIIbUBT3pFTKX8kmWIQClHobtUdNuaBgwdfdWg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "8.51.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.51.0.tgz", + "integrity": "sha512-jh8ZuM5oEh2PSdyQG9YAEM1TCGuWenLSuSUhf/irbVUNW9O5FhbFVONviN2TgMTBnUmyHv7E56rYnfLZK6TkiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.51.0", + "@typescript-eslint/parser": "8.51.0", + "@typescript-eslint/typescript-estree": "8.51.0", + "@typescript-eslint/utils": "8.51.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/vite": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", + "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-plugin-devtools-json": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/vite-plugin-devtools-json/-/vite-plugin-devtools-json-1.0.0.tgz", + "integrity": "sha512-MobvwqX76Vqt/O4AbnNMNWoXWGrKUqZbphCUle/J2KXH82yKQiunOeKnz/nqEPosPsoWWPP9FtNuPBSYpiiwkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "uuid": "^11.1.0" + }, + "peerDependencies": { + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/vite-plugin-mkcert": { + "version": "1.17.9", + "resolved": "https://registry.npmjs.org/vite-plugin-mkcert/-/vite-plugin-mkcert-1.17.9.tgz", + "integrity": "sha512-SwI7yqp2Cq4r2XItarnHRCj2uzHPqevbxFNMLpyN+LDXd5w1vmZeM4l5X/wCZoP4mjPQYN+9+4kmE6e3nPO5fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "axios": "^1.12.2", + "debug": "^4.4.3", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=v16.7.0" + }, + "peerDependencies": { + "vite": ">=3" + } + }, + "node_modules/vitefu": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", + "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "dev": true, + "license": "MIT", + "workspaces": [ + "tests/deps/*", + "tests/projects/*", + "tests/projects/workspace/packages/*" + ], + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zimmerframe": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.4.tgz", + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..e343e23 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,49 @@ +{ + "name": "frontend", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "prepare": "svelte-kit sync || echo ''", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "format": "prettier --write .", + "lint": "prettier --check . && eslint ." + }, + "devDependencies": { + "@eslint/compat": "^1.4.0", + "@eslint/js": "^9.39.1", + "@sveltejs/adapter-node": "^5.4.0", + "@sveltejs/kit": "^2.49.1", + "@sveltejs/vite-plugin-svelte": "^6.2.1", + "@tailwindcss/forms": "^0.5.10", + "@tailwindcss/typography": "^0.5.19", + "@tailwindcss/vite": "^4.1.17", + "@types/apollo-upload-client": "^19.0.0", + "@types/node": "^24", + "eslint": "^9.39.1", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-svelte": "^3.13.1", + "globals": "^16.5.0", + "prettier": "^3.7.4", + "prettier-plugin-svelte": "^3.4.0", + "prettier-plugin-tailwindcss": "^0.7.2", + "svelte": "^5.45.6", + "svelte-check": "^4.3.4", + "tailwindcss": "^4.1.17", + "typescript": "^5.9.3", + "typescript-eslint": "^8.48.1", + "vite": "^7.2.6", + "vite-plugin-devtools-json": "^1.0.0", + "vite-plugin-mkcert": "^1.17.9" + }, + "dependencies": { + "@apollo/client": "^4.0.11", + "apollo-upload-client": "^19.0.0", + "date-fns": "^4.1.0", + "graphql": "^16.12.0" + } +} diff --git a/frontend/src/app.d.ts b/frontend/src/app.d.ts new file mode 100644 index 0000000..9969761 --- /dev/null +++ b/frontend/src/app.d.ts @@ -0,0 +1,56 @@ +// See https://svelte.dev/docs/kit/types#app.d.ts +// for information about these interfaces + +// Kratos session identity +interface SessionIdentity { + id: string; + traits: { + email?: string; + name?: { + first?: string; + last?: string; + }; + phone?: string; + profile_type?: 'team' | 'customer'; + }; + metadata_public?: { + django_profile_id?: string; + customer_id?: string; + }; +} + +// Kratos session +interface Session { + id: string; + active: boolean; + identity: SessionIdentity; + expires_at?: string; + authenticated_at?: string; +} + +// User from GraphQL Me query +interface User { + __typename: 'TeamProfileType' | 'CustomerProfileType'; + id: string; + fullName: string; + email: string; + phone?: string; + role?: 'ADMIN' | 'TEAM_LEADER' | 'TEAM_MEMBER'; // TeamProfile only + customers?: Array<{ id: string; name: string }>; // CustomerProfile only +} + +declare global { + namespace App { + // interface Error {} + interface Locals { + cookie: string | null; + } + interface PageData { + user: User | null; + } + // interface PageState {} + // interface Platform {} + } +} + +export { Session, SessionIdentity, User }; diff --git a/frontend/src/app.html b/frontend/src/app.html new file mode 100644 index 0000000..a822b35 --- /dev/null +++ b/frontend/src/app.html @@ -0,0 +1,28 @@ + + + + + + + %sveltekit.head% + Nexus + + +
%sveltekit.body%
+ + diff --git a/frontend/src/hooks.server.ts b/frontend/src/hooks.server.ts new file mode 100644 index 0000000..27dfe0c --- /dev/null +++ b/frontend/src/hooks.server.ts @@ -0,0 +1,10 @@ +import type { Handle } from '@sveltejs/kit'; +import { KRATOS_SESSION_COOKIE } from '$lib/config'; + +export const handle: Handle = async ({ event, resolve }) => { + // Extract Kratos session cookie for forwarding to backend + const cookie = event.cookies.get(KRATOS_SESSION_COOKIE); + event.locals.cookie = cookie ? `${KRATOS_SESSION_COOKIE}=${cookie}` : null; + + return resolve(event); +}; diff --git a/frontend/src/lib/assets/favicon.svg b/frontend/src/lib/assets/favicon.svg new file mode 100644 index 0000000..cc5dc66 --- /dev/null +++ b/frontend/src/lib/assets/favicon.svg @@ -0,0 +1 @@ +svelte-logo \ No newline at end of file diff --git a/frontend/src/lib/assets/floors.jpg b/frontend/src/lib/assets/floors.jpg new file mode 100644 index 0000000..a1fe338 Binary files /dev/null and b/frontend/src/lib/assets/floors.jpg differ diff --git a/frontend/src/lib/assets/hero.jpg b/frontend/src/lib/assets/hero.jpg new file mode 100644 index 0000000..986fa6e Binary files /dev/null and b/frontend/src/lib/assets/hero.jpg differ diff --git a/frontend/src/lib/assets/kitchens.jpg b/frontend/src/lib/assets/kitchens.jpg new file mode 100644 index 0000000..6016324 Binary files /dev/null and b/frontend/src/lib/assets/kitchens.jpg differ diff --git a/frontend/src/lib/assets/logo-icon.png b/frontend/src/lib/assets/logo-icon.png new file mode 100644 index 0000000..dbeff20 Binary files /dev/null and b/frontend/src/lib/assets/logo-icon.png differ diff --git a/frontend/src/lib/components/PublicBackLink.svelte b/frontend/src/lib/components/PublicBackLink.svelte new file mode 100644 index 0000000..0c75a4e --- /dev/null +++ b/frontend/src/lib/components/PublicBackLink.svelte @@ -0,0 +1,19 @@ + + + + + + + {label} + diff --git a/frontend/src/lib/components/admin/AdminBottomNav.svelte b/frontend/src/lib/components/admin/AdminBottomNav.svelte new file mode 100644 index 0000000..f3b81f9 --- /dev/null +++ b/frontend/src/lib/components/admin/AdminBottomNav.svelte @@ -0,0 +1,148 @@ + + + { + if (addMenuOpen) closeMenu(); + }} +/> + + diff --git a/frontend/src/lib/components/admin/AdminDashboardHeader.svelte b/frontend/src/lib/components/admin/AdminDashboardHeader.svelte new file mode 100644 index 0000000..6494fec --- /dev/null +++ b/frontend/src/lib/components/admin/AdminDashboardHeader.svelte @@ -0,0 +1,250 @@ + + + + +
+ {#if showBackButton} + +
+
+ + {#if isNavigatingBack} + + + + + {:else} + + + + {/if} + +
+
+

{title}

+ +
+ {#if subtitleSnippet} +

+ {@render subtitleSnippet({ toggleMenu })} +

+ {:else if subtitle} +

{subtitle}

+ {/if} +
+
+ {#if actions} +
+ {@render actions()} +
+ {/if} +
+ {:else} + +
+
+
+

{title}

+ +
+ {#if subtitleSnippet} +

+ {@render subtitleSnippet({ toggleMenu })} +

+ {:else if subtitle} +

{subtitle}

+ {/if} +
+ {#if actions} +
+ {@render actions()} +
+ {/if} +
+ {/if} +
diff --git a/frontend/src/lib/components/admin/MonthSelector.svelte b/frontend/src/lib/components/admin/MonthSelector.svelte new file mode 100644 index 0000000..fc20eda --- /dev/null +++ b/frontend/src/lib/components/admin/MonthSelector.svelte @@ -0,0 +1,79 @@ + + +
+ + + + + +
diff --git a/frontend/src/lib/components/admin/PageHeader.svelte b/frontend/src/lib/components/admin/PageHeader.svelte new file mode 100644 index 0000000..ded0e52 --- /dev/null +++ b/frontend/src/lib/components/admin/PageHeader.svelte @@ -0,0 +1,116 @@ + + +
+
+ + + {#if isNavigating} + + + + + {:else} + + {/if} + + + +
+

{title}

+ {#if subtitle} +

{subtitle}

+ {/if} + {#if children} +
+ {@render children()} +
+ {/if} +
+ + + {#if onEdit} + + {/if} +
+
diff --git a/frontend/src/lib/components/admin/Pagination.svelte b/frontend/src/lib/components/admin/Pagination.svelte new file mode 100644 index 0000000..a962242 --- /dev/null +++ b/frontend/src/lib/components/admin/Pagination.svelte @@ -0,0 +1,123 @@ + + +{#if totalCount > 0} +
+ +

+ Showing {startItem} + to {endItem} + of {totalCount} results +

+ + +
+ + + + + {#each pageNumbers as pageNum} + {#if pageNum === 'ellipsis'} + ... + {:else} + + {/if} + {/each} + + + +
+
+{/if} diff --git a/frontend/src/lib/components/admin/SectionHeader.svelte b/frontend/src/lib/components/admin/SectionHeader.svelte new file mode 100644 index 0000000..52b58af --- /dev/null +++ b/frontend/src/lib/components/admin/SectionHeader.svelte @@ -0,0 +1,30 @@ + + +
+

{title}

+ {#if onButtonClick && buttonText} + + {/if} +
diff --git a/frontend/src/lib/components/admin/StatusTabs.svelte b/frontend/src/lib/components/admin/StatusTabs.svelte new file mode 100644 index 0000000..f834e8a --- /dev/null +++ b/frontend/src/lib/components/admin/StatusTabs.svelte @@ -0,0 +1,111 @@ + + +
+ {#each tabs as tab} + + {/each} +
diff --git a/frontend/src/lib/components/admin/index.ts b/frontend/src/lib/components/admin/index.ts new file mode 100644 index 0000000..5e0c377 --- /dev/null +++ b/frontend/src/lib/components/admin/index.ts @@ -0,0 +1,7 @@ +export { default as AdminBottomNav } from './AdminBottomNav.svelte'; +export { default as AdminDashboardHeader } from './AdminDashboardHeader.svelte'; +export { default as MonthSelector } from './MonthSelector.svelte'; +export { default as PageHeader } from './PageHeader.svelte'; +export { default as Pagination } from './Pagination.svelte'; +export { default as SectionHeader } from './SectionHeader.svelte'; +export { default as StatusTabs } from './StatusTabs.svelte'; diff --git a/frontend/src/lib/components/admin/services/GenerateServicesModal.svelte b/frontend/src/lib/components/admin/services/GenerateServicesModal.svelte new file mode 100644 index 0000000..f6a0fe3 --- /dev/null +++ b/frontend/src/lib/components/admin/services/GenerateServicesModal.svelte @@ -0,0 +1,554 @@ + + + + +{#if open} + + + + + +{/if} diff --git a/frontend/src/lib/components/customer/CustomerBottomNav.svelte b/frontend/src/lib/components/customer/CustomerBottomNav.svelte new file mode 100644 index 0000000..0e79ccb --- /dev/null +++ b/frontend/src/lib/components/customer/CustomerBottomNav.svelte @@ -0,0 +1,43 @@ + + + diff --git a/frontend/src/lib/components/customer/CustomerDashboardHeader.svelte b/frontend/src/lib/components/customer/CustomerDashboardHeader.svelte new file mode 100644 index 0000000..4e96816 --- /dev/null +++ b/frontend/src/lib/components/customer/CustomerDashboardHeader.svelte @@ -0,0 +1,15 @@ + + +
+

{title}

+ {#if subtitle} +

{subtitle}

+ {/if} +
diff --git a/frontend/src/lib/components/customer/CustomerPageHeader.svelte b/frontend/src/lib/components/customer/CustomerPageHeader.svelte new file mode 100644 index 0000000..744f1a1 --- /dev/null +++ b/frontend/src/lib/components/customer/CustomerPageHeader.svelte @@ -0,0 +1,84 @@ + + +
+
+ + + {#if isNavigating} + + + + + {:else} + + {/if} + + + +
+
+

{title}

+ {#if children} + {@render children()} + {/if} +
+ {#if subtitle} +

{subtitle}

+ {/if} +
+
+
diff --git a/frontend/src/lib/components/customer/CustomerSectionContainer.svelte b/frontend/src/lib/components/customer/CustomerSectionContainer.svelte new file mode 100644 index 0000000..150089d --- /dev/null +++ b/frontend/src/lib/components/customer/CustomerSectionContainer.svelte @@ -0,0 +1,44 @@ + + +
+
+

{title}

+
+
+ {@render children()} +
+
diff --git a/frontend/src/lib/components/customer/index.ts b/frontend/src/lib/components/customer/index.ts new file mode 100644 index 0000000..777e575 --- /dev/null +++ b/frontend/src/lib/components/customer/index.ts @@ -0,0 +1,4 @@ +export { default as CustomerBottomNav } from './CustomerBottomNav.svelte'; +export { default as CustomerDashboardHeader } from './CustomerDashboardHeader.svelte'; +export { default as CustomerPageHeader } from './CustomerPageHeader.svelte'; +export { default as CustomerSectionContainer } from './CustomerSectionContainer.svelte'; diff --git a/frontend/src/lib/components/drawers/FormDrawer.svelte b/frontend/src/lib/components/drawers/FormDrawer.svelte new file mode 100644 index 0000000..0bf8dda --- /dev/null +++ b/frontend/src/lib/components/drawers/FormDrawer.svelte @@ -0,0 +1,89 @@ + + + + +{#if open} + + + + + + + +{/if} diff --git a/frontend/src/lib/components/drawers/index.ts b/frontend/src/lib/components/drawers/index.ts new file mode 100644 index 0000000..97c8390 --- /dev/null +++ b/frontend/src/lib/components/drawers/index.ts @@ -0,0 +1 @@ +export { default as FormDrawer } from './FormDrawer.svelte'; diff --git a/frontend/src/lib/components/forms/AccountAddressForm.svelte b/frontend/src/lib/components/forms/AccountAddressForm.svelte new file mode 100644 index 0000000..8b5a227 --- /dev/null +++ b/frontend/src/lib/components/forms/AccountAddressForm.svelte @@ -0,0 +1,209 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+ +
+
+ + +
+ {#if isEdit} +
+ + +
+ {/if} +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/AccountContactForm.svelte b/frontend/src/lib/components/forms/AccountContactForm.svelte new file mode 100644 index 0000000..ab3a9ba --- /dev/null +++ b/frontend/src/lib/components/forms/AccountContactForm.svelte @@ -0,0 +1,162 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+
+ + +
+
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/AccountCreateForm.svelte b/frontend/src/lib/components/forms/AccountCreateForm.svelte new file mode 100644 index 0000000..2f8b647 --- /dev/null +++ b/frontend/src/lib/components/forms/AccountCreateForm.svelte @@ -0,0 +1,176 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + {#if customersLoading} +
+ + + + + Loading customers... +
+ {:else} + + {/if} +
+ +
+ + +
+ +
+ + +
+ +
+
+ + +
+
+ + +

Leave blank for ongoing account

+
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/AccountForm.svelte b/frontend/src/lib/components/forms/AccountForm.svelte new file mode 100644 index 0000000..20370f8 --- /dev/null +++ b/frontend/src/lib/components/forms/AccountForm.svelte @@ -0,0 +1,261 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ + +
+

Monthly Revenue

+ +
+
+ +
+ $ + +
+
+ +
+ + +

Wave accounting product for invoicing

+
+ + {#if activeRevenue && revenueChanged} + + {/if} +
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/AddressForm.svelte b/frontend/src/lib/components/forms/AddressForm.svelte new file mode 100644 index 0000000..3963781 --- /dev/null +++ b/frontend/src/lib/components/forms/AddressForm.svelte @@ -0,0 +1,207 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+ +
+
+ + +
+ {#if isEdit} +
+ + +
+ {/if} +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/AreaForm.svelte b/frontend/src/lib/components/forms/AreaForm.svelte new file mode 100644 index 0000000..d7aaa9e --- /dev/null +++ b/frontend/src/lib/components/forms/AreaForm.svelte @@ -0,0 +1,121 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +

Lower numbers appear first

+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/CalendarEventForm.svelte b/frontend/src/lib/components/forms/CalendarEventForm.svelte new file mode 100644 index 0000000..42e7213 --- /dev/null +++ b/frontend/src/lib/components/forms/CalendarEventForm.svelte @@ -0,0 +1,518 @@ + + +
+ + + + + + + + {#if !isAllDay} +
+ + + + Work begins after midnight + + {#if isAfterMidnight} + + Calendar event will be scheduled for {actualStartDate} (next calendar day) + + {:else} + + Check if work starts after midnight but belongs to the previous business day + + {/if} + +
+ {/if} + + +
+ +
+ + + {#if !isAllDay} + + {/if} +
+ + +
+ + + {#if !isAllDay} + + {/if} +
+
+ + + + + + + + +
+ Color (optional) +
+ + + {#each EVENT_COLORS as color (color.id)} + + {/each} +
+
+ + +
+ Attendees + {#if availableAttendees.length > 0} +
+ {#each availableAttendees as profile (profile.id)} + {@const selected = isAttendeeSelected(profile.email)} + + {/each} +
+ {#if selectedAttendeeEmails.size > 0} +

+ {selectedAttendeeEmails.size} attendee{selectedAttendeeEmails.size !== 1 ? 's' : ''} selected +

+ {/if} + {:else} +

No team members with email addresses available.

+ {/if} +
+ + +
+ Reminders + + + + + + {#if !useDefaultReminders} +
+ {#if customReminders.length === 0} +

No reminders set. Add one below.

+ {:else} + {#each customReminders as reminder, index (index)} +
+ + + + + + + + +
+ {/each} + {/if} + + + +
+ {/if} +
+ + {#if error} +
+

{error}

+
+ {/if} + + +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ContactForm.svelte b/frontend/src/lib/components/forms/ContactForm.svelte new file mode 100644 index 0000000..aca391d --- /dev/null +++ b/frontend/src/lib/components/forms/ContactForm.svelte @@ -0,0 +1,162 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+
+ + +
+
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/CustomerCreateForm.svelte b/frontend/src/lib/components/forms/CustomerCreateForm.svelte new file mode 100644 index 0000000..91f346b --- /dev/null +++ b/frontend/src/lib/components/forms/CustomerCreateForm.svelte @@ -0,0 +1,237 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +

Wave accounting customer ID for invoicing

+
+ + +
+

Billing Address

+

Optional. You can add this later.

+ +
+
+ + +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+
+
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/CustomerForm.svelte b/frontend/src/lib/components/forms/CustomerForm.svelte new file mode 100644 index 0000000..ae11264 --- /dev/null +++ b/frontend/src/lib/components/forms/CustomerForm.svelte @@ -0,0 +1,315 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +

Wave accounting customer ID for invoicing

+
+ + +
+

Billing Address

+ +
+
+ + +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+ + {#if activeAddress && addressChanged} + + {/if} +
+
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/CustomerProfileCreateForm.svelte b/frontend/src/lib/components/forms/CustomerProfileCreateForm.svelte new file mode 100644 index 0000000..1f4eda6 --- /dev/null +++ b/frontend/src/lib/components/forms/CustomerProfileCreateForm.svelte @@ -0,0 +1,183 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +

+ The UUID from Kratos identity. This links the profile to authentication. +

+ {#if kratosIdentityId && !isValidKratosId} +

Invalid UUID format

+ {/if} +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/CustomerProfileForm.svelte b/frontend/src/lib/components/forms/CustomerProfileForm.svelte new file mode 100644 index 0000000..664dc9c --- /dev/null +++ b/frontend/src/lib/components/forms/CustomerProfileForm.svelte @@ -0,0 +1,153 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/LaborForm.svelte b/frontend/src/lib/components/forms/LaborForm.svelte new file mode 100644 index 0000000..42485c2 --- /dev/null +++ b/frontend/src/lib/components/forms/LaborForm.svelte @@ -0,0 +1,133 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ +
+ $ + +
+

Hourly labor rate paid for this location

+
+ +
+ + +

When this rate becomes effective

+
+ + {#if !isEdit} +
+

+ Creating a new labor rate will automatically end any existing active rate. +

+
+ {/if} + +
+ + +
+
diff --git a/frontend/src/lib/components/forms/LocationForm.svelte b/frontend/src/lib/components/forms/LocationForm.svelte new file mode 100644 index 0000000..bcda552 --- /dev/null +++ b/frontend/src/lib/components/forms/LocationForm.svelte @@ -0,0 +1,317 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +

Optional friendly name for this location

+
+ +
+ + +
+ +
+
+ + +
+ +
+ + +
+ +
+ + +
+
+ +
+ + +
+ + +
+

Labor Paid

+ +
+
+ +
+ $ + +
+

Amount paid to cleaners for this location

+
+ + {#if activeLabor && laborChanged} + + {/if} +
+
+ +
+ +

+ Primary location is used as the default for billing and communications +

+ + +

+ Inactive locations are hidden from day-to-day operations +

+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectAmountForm.svelte b/frontend/src/lib/components/forms/ProjectAmountForm.svelte new file mode 100644 index 0000000..60098ff --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectAmountForm.svelte @@ -0,0 +1,115 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ +
+ $ + +
+
+ +
+ + +

Link to Wave accounting product for invoicing

+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectCategoryForm.svelte b/frontend/src/lib/components/forms/ProjectCategoryForm.svelte new file mode 100644 index 0000000..10a898e --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectCategoryForm.svelte @@ -0,0 +1,119 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +

Lower numbers appear first

+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectCreateForm.svelte b/frontend/src/lib/components/forms/ProjectCreateForm.svelte new file mode 100644 index 0000000..239efb8 --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectCreateForm.svelte @@ -0,0 +1,410 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + {#if loadingCustomers} +
Loading customers...
+ {:else} + + {/if} +
+ +
+ + +
+ +
+ + +
+ + +
+

+ Location * +

+ + + {#if customerId && accounts.length > 0} +
+ + +
+ {/if} + + {#if locationType === 'account' && customerId && accounts.length > 0} + +
+
+ + {#if loadingAccounts} +
Loading accounts...
+ {:else} + + {/if} +
+ + {#if accountId} +
+ + {#if loadingAddresses} +
Loading locations...
+ {:else if activeAddresses.length === 0} +
No active locations for this account
+ {:else} + + {/if} +
+ {/if} +
+ {:else} + +
+
+ + +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+
+ {/if} +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectForm.svelte b/frontend/src/lib/components/forms/ProjectForm.svelte new file mode 100644 index 0000000..2c11bea --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectForm.svelte @@ -0,0 +1,207 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ +
+ $ + +
+

Labor cost for this project

+
+ +
+ +
+ $ + +
+

Invoice amount for this project

+
+ +
+ + +

Wave accounting product for invoicing

+
+ +
+ + +

Google Calendar event ID for scheduling

+
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectScopeForm.svelte b/frontend/src/lib/components/forms/ProjectScopeForm.svelte new file mode 100644 index 0000000..0c30e8c --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectScopeForm.svelte @@ -0,0 +1,267 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + + {#if !isEdit && templates.length > 0} +
+ + +
+ + {#if createMode === 'template'} +
+ + +
+ + {#if selectedTemplate} +
+

+ {selectedTemplate.name} +

+ {#if selectedTemplate.description} +

+ {selectedTemplate.description} +

+ {/if} +

+ This will create a scope with {selectedTemplate.categoryCount} categor{selectedTemplate.categoryCount !== + 1 + ? 'ies' + : 'y'} + and {selectedTemplate.taskCount} task{selectedTemplate.taskCount !== 1 ? 's' : ''}. +

+
+ {/if} + {/if} + {/if} + + {#if isEdit || createMode === 'blank'} +
+ + +
+ +
+ + +
+ {/if} + + {#if !isEdit} +
+

+ {#if createMode === 'template'} + Creating from a template will copy all categories and tasks. Any existing active scope + will be deactivated. + {:else} + Creating a new scope will automatically deactivate any existing active scope. You can add + categories and tasks after creating the scope. + {/if} +

+
+ {/if} + + {#if isEdit && scope?.categories && scope.categories.length > 0} +
+

+ This scope has {scope.categories.length} categor{scope.categories.length === 1 + ? 'y' + : 'ies'} with tasks. +

+
+ {/if} + +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ProjectTaskForm.svelte b/frontend/src/lib/components/forms/ProjectTaskForm.svelte new file mode 100644 index 0000000..b052db2 --- /dev/null +++ b/frontend/src/lib/components/forms/ProjectTaskForm.svelte @@ -0,0 +1,170 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +

Customer-facing description for scope documents

+
+ +
+ + +
+ +
+ + +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/RevenueForm.svelte b/frontend/src/lib/components/forms/RevenueForm.svelte new file mode 100644 index 0000000..011f859 --- /dev/null +++ b/frontend/src/lib/components/forms/RevenueForm.svelte @@ -0,0 +1,172 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + + {#if !isEdit} +
+

+ Creating a new rate will end the current active rate as of today. +

+
+ {/if} + +
+ +
+ $ + +
+
+ +
+
+ + +
+ {#if isEdit} +
+ + +
+ {/if} +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ScheduleForm.svelte b/frontend/src/lib/components/forms/ScheduleForm.svelte new file mode 100644 index 0000000..80fd888 --- /dev/null +++ b/frontend/src/lib/components/forms/ScheduleForm.svelte @@ -0,0 +1,339 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+
+ Service Days +
+ + | + +
+
+ +
+ {#each [{ key: 'monday', label: 'Mon', short: 'M' }, { key: 'tuesday', label: 'Tue', short: 'T' }, { key: 'wednesday', label: 'Wed', short: 'W' }, { key: 'thursday', label: 'Thu', short: 'Th' }, { key: 'friday', label: 'Fri', short: 'F', isWeekend: true }, { key: 'saturday', label: 'Sat', short: 'Sa', isWeekend: true }, { key: 'sunday', label: 'Sun', short: 'Su', isWeekend: true }] as day} + + {/each} +
+
+ +
+
+ +
+ +

+ Service can be completed anytime Friday through Sunday +

+
+
+
+ +
+ + +
+ +
+
+ + +
+
+ + +

Leave blank for ongoing schedule

+
+
+ +
+

+ Schedules cannot have overlapping date ranges. Each schedule must have a unique time period. +

+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ScopeForm.svelte b/frontend/src/lib/components/forms/ScopeForm.svelte new file mode 100644 index 0000000..1b4a59c --- /dev/null +++ b/frontend/src/lib/components/forms/ScopeForm.svelte @@ -0,0 +1,271 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + + {#if !isEdit && templates.length > 0} +
+ + +
+ + {#if createMode === 'template'} +
+ + +
+ + {#if selectedTemplate} +
+

+ {selectedTemplate.name} +

+ {#if selectedTemplate.description} +

+ {selectedTemplate.description} +

+ {/if} +

+ This will create a scope with {selectedTemplate.areaCount} area{selectedTemplate.areaCount !== + 1 + ? 's' + : ''} + and {selectedTemplate.taskCount} task{selectedTemplate.taskCount !== 1 ? 's' : ''}. +

+
+ {/if} + {/if} + {/if} + + {#if isEdit || createMode === 'blank'} +
+ + +
+ +
+ + +
+ {/if} + + {#if !isEdit} +
+

+ {#if createMode === 'template'} + Creating from a template will copy all areas and tasks. Any existing active scope will be + deactivated. + {:else} + Creating a new scope will automatically deactivate any existing active scope. You can add + areas and tasks after creating the scope. + {/if} +

+
+ {/if} + + {#if isEdit && scope?.areas && scope.areas.length > 0} +
+

+ This scope has {scope.areas.length} area{scope.areas.length === 1 ? '' : 's'} with tasks. Editing + areas and tasks is coming soon. +

+
+ {/if} + +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ServiceCreateForm.svelte b/frontend/src/lib/components/forms/ServiceCreateForm.svelte new file mode 100644 index 0000000..58a8343 --- /dev/null +++ b/frontend/src/lib/components/forms/ServiceCreateForm.svelte @@ -0,0 +1,213 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + {#if loadingAccounts} +
Loading accounts...
+ {:else} + + {/if} +
+ +
+ + {#if !accountId} +
Select an account first
+ {:else if loadingAddresses} +
Loading locations...
+ {:else if activeAddresses.length === 0} +
No active locations for this account
+ {:else} + + {/if} +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/ServiceForm.svelte b/frontend/src/lib/components/forms/ServiceForm.svelte new file mode 100644 index 0000000..48e04ca --- /dev/null +++ b/frontend/src/lib/components/forms/ServiceForm.svelte @@ -0,0 +1,124 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +
+ +
+ + +
+ +
+ + +

Google Calendar event ID for scheduling

+
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/TaskForm.svelte b/frontend/src/lib/components/forms/TaskForm.svelte new file mode 100644 index 0000000..6bf1947 --- /dev/null +++ b/frontend/src/lib/components/forms/TaskForm.svelte @@ -0,0 +1,208 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +

This appears on the scope of work document

+
+ +
+ + +
+ +
+ + +
+ +
+
+ + +
+ +
+ + +
+ +
+ + +
+
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/TeamProfileCreateForm.svelte b/frontend/src/lib/components/forms/TeamProfileCreateForm.svelte new file mode 100644 index 0000000..7f69972 --- /dev/null +++ b/frontend/src/lib/components/forms/TeamProfileCreateForm.svelte @@ -0,0 +1,197 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + +

+ The UUID from Kratos identity. This links the profile to authentication. +

+ {#if kratosIdentityId && !isValidKratosId} +

Invalid UUID format

+ {/if} +
+ +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+

Role & Access

+ +
+
+ + +
+ +
+ + +
+
+
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/TeamProfileForm.svelte b/frontend/src/lib/components/forms/TeamProfileForm.svelte new file mode 100644 index 0000000..c4b3986 --- /dev/null +++ b/frontend/src/lib/components/forms/TeamProfileForm.svelte @@ -0,0 +1,167 @@ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+
+ + +
+ +
+ + +
+
+ +
+ + +
+ +
+ + +
+ +
+

Role & Access

+ +
+
+ + +
+ +
+ + +
+
+
+ +
+ + +
+ +
+ + +
+
diff --git a/frontend/src/lib/components/forms/index.ts b/frontend/src/lib/components/forms/index.ts new file mode 100644 index 0000000..eaf5456 --- /dev/null +++ b/frontend/src/lib/components/forms/index.ts @@ -0,0 +1,26 @@ +export { default as AccountAddressForm } from './AccountAddressForm.svelte'; +export { default as AccountContactForm } from './AccountContactForm.svelte'; +export { default as AccountCreateForm } from './AccountCreateForm.svelte'; +export { default as AccountForm } from './AccountForm.svelte'; +export { default as CalendarEventForm } from './CalendarEventForm.svelte'; +export { default as AddressForm } from './AddressForm.svelte'; +export { default as AreaForm } from './AreaForm.svelte'; +export { default as ContactForm } from './ContactForm.svelte'; +export { default as CustomerCreateForm } from './CustomerCreateForm.svelte'; +export { default as CustomerForm } from './CustomerForm.svelte'; +export { default as CustomerProfileCreateForm } from './CustomerProfileCreateForm.svelte'; +export { default as CustomerProfileForm } from './CustomerProfileForm.svelte'; +export { default as LaborForm } from './LaborForm.svelte'; +export { default as LocationForm } from './LocationForm.svelte'; +export { default as ProjectAmountForm } from './ProjectAmountForm.svelte'; +export { default as ProjectCategoryForm } from './ProjectCategoryForm.svelte'; +export { default as ProjectForm } from './ProjectForm.svelte'; +export { default as ProjectScopeForm } from './ProjectScopeForm.svelte'; +export { default as ProjectTaskForm } from './ProjectTaskForm.svelte'; +export { default as RevenueForm } from './RevenueForm.svelte'; +export { default as ScheduleForm } from './ScheduleForm.svelte'; +export { default as ServiceForm } from './ServiceForm.svelte'; +export { default as ScopeForm } from './ScopeForm.svelte'; +export { default as TaskForm } from './TaskForm.svelte'; +export { default as TeamProfileCreateForm } from './TeamProfileCreateForm.svelte'; +export { default as TeamProfileForm } from './TeamProfileForm.svelte'; diff --git a/frontend/src/lib/components/icons/IconChevronLeft.svelte b/frontend/src/lib/components/icons/IconChevronLeft.svelte new file mode 100644 index 0000000..2632461 --- /dev/null +++ b/frontend/src/lib/components/icons/IconChevronLeft.svelte @@ -0,0 +1,11 @@ + + + + + diff --git a/frontend/src/lib/components/icons/IconChevronRight.svelte b/frontend/src/lib/components/icons/IconChevronRight.svelte new file mode 100644 index 0000000..b04d93f --- /dev/null +++ b/frontend/src/lib/components/icons/IconChevronRight.svelte @@ -0,0 +1,11 @@ + + + + + diff --git a/frontend/src/lib/components/icons/IconEdit.svelte b/frontend/src/lib/components/icons/IconEdit.svelte new file mode 100644 index 0000000..d020a24 --- /dev/null +++ b/frontend/src/lib/components/icons/IconEdit.svelte @@ -0,0 +1,16 @@ + + + + + diff --git a/frontend/src/lib/components/icons/IconPlus.svelte b/frontend/src/lib/components/icons/IconPlus.svelte new file mode 100644 index 0000000..cf48f87 --- /dev/null +++ b/frontend/src/lib/components/icons/IconPlus.svelte @@ -0,0 +1,11 @@ + + + + + diff --git a/frontend/src/lib/components/icons/IconTrash.svelte b/frontend/src/lib/components/icons/IconTrash.svelte new file mode 100644 index 0000000..654ee4a --- /dev/null +++ b/frontend/src/lib/components/icons/IconTrash.svelte @@ -0,0 +1,16 @@ + + + + + diff --git a/frontend/src/lib/components/icons/NavIcon.svelte b/frontend/src/lib/components/icons/NavIcon.svelte new file mode 100644 index 0000000..c540d7e --- /dev/null +++ b/frontend/src/lib/components/icons/NavIcon.svelte @@ -0,0 +1,185 @@ + + +{#if name === 'customers'} + + + + + + + + + + + + + + + +{:else if name === 'accounts'} + + + + + + + + + + + + + + + + +{:else if name === 'services'} + + + + + + + + + + + +{:else if name === 'projects'} + + + + + + + + + + + + + + + + + +{:else if name === 'scopes'} + + + + + + + + + + + + + + + + +{:else if name === 'reports'} + + + + + + + + + +{:else if name === 'invoices'} + + + + + + + +{:else if name === 'calendar'} + + + + + + + + + + + + + + + + +{:else if name === 'history'} + + + + + + + + + + +{/if} diff --git a/frontend/src/lib/components/icons/index.ts b/frontend/src/lib/components/icons/index.ts new file mode 100644 index 0000000..96f44bd --- /dev/null +++ b/frontend/src/lib/components/icons/index.ts @@ -0,0 +1,5 @@ +export { default as IconEdit } from './IconEdit.svelte'; +export { default as IconPlus } from './IconPlus.svelte'; +export { default as IconTrash } from './IconTrash.svelte'; +export { default as IconChevronRight } from './IconChevronRight.svelte'; +export { default as IconChevronLeft } from './IconChevronLeft.svelte'; diff --git a/frontend/src/lib/components/layout/ContentContainer.svelte b/frontend/src/lib/components/layout/ContentContainer.svelte new file mode 100644 index 0000000..943b823 --- /dev/null +++ b/frontend/src/lib/components/layout/ContentContainer.svelte @@ -0,0 +1,27 @@ + + +
+ {@render children()} +
diff --git a/frontend/src/lib/components/layout/MenuOverlay.svelte b/frontend/src/lib/components/layout/MenuOverlay.svelte new file mode 100644 index 0000000..1c5a19d --- /dev/null +++ b/frontend/src/lib/components/layout/MenuOverlay.svelte @@ -0,0 +1,414 @@ + + + + +{#if open} + + +
e.key === 'Enter' && onClose()} + > + +
+ + +
e.stopPropagation()} + onkeydown={(e) => e.stopPropagation()} + > + +
+ + + Logo + {brandName} + +
+
+ + + + + +
+ {#if user} + + + + + Sign Out + + {:else} + + + + + Sign In + + {/if} +
+
+
+
+{/if} diff --git a/frontend/src/lib/components/layout/index.ts b/frontend/src/lib/components/layout/index.ts new file mode 100644 index 0000000..73b19c5 --- /dev/null +++ b/frontend/src/lib/components/layout/index.ts @@ -0,0 +1 @@ +export { default as ContentContainer } from './ContentContainer.svelte'; diff --git a/frontend/src/lib/components/modals/DeleteModal.svelte b/frontend/src/lib/components/modals/DeleteModal.svelte new file mode 100644 index 0000000..7b2cff5 --- /dev/null +++ b/frontend/src/lib/components/modals/DeleteModal.svelte @@ -0,0 +1,151 @@ + + + + +{#if open} + + + + + +{/if} diff --git a/frontend/src/lib/components/modals/ScheduleListModal.svelte b/frontend/src/lib/components/modals/ScheduleListModal.svelte new file mode 100644 index 0000000..610962c --- /dev/null +++ b/frontend/src/lib/components/modals/ScheduleListModal.svelte @@ -0,0 +1,122 @@ + + + + +{#if open} + + + + + +{/if} diff --git a/frontend/src/lib/components/modals/ScopeListModal.svelte b/frontend/src/lib/components/modals/ScopeListModal.svelte new file mode 100644 index 0000000..55a53a6 --- /dev/null +++ b/frontend/src/lib/components/modals/ScopeListModal.svelte @@ -0,0 +1,108 @@ + + + + +{#if open} + + + + + +{/if} diff --git a/frontend/src/lib/components/modals/index.ts b/frontend/src/lib/components/modals/index.ts new file mode 100644 index 0000000..7141678 --- /dev/null +++ b/frontend/src/lib/components/modals/index.ts @@ -0,0 +1,3 @@ +export { default as DeleteModal } from './DeleteModal.svelte'; +export { default as ScheduleListModal } from './ScheduleListModal.svelte'; +export { default as ScopeListModal } from './ScopeListModal.svelte'; diff --git a/frontend/src/lib/components/nav/TopNav.svelte b/frontend/src/lib/components/nav/TopNav.svelte new file mode 100644 index 0000000..8c85b16 --- /dev/null +++ b/frontend/src/lib/components/nav/TopNav.svelte @@ -0,0 +1,441 @@ + + + diff --git a/frontend/src/lib/components/notifications/NotificationCard.svelte b/frontend/src/lib/components/notifications/NotificationCard.svelte new file mode 100644 index 0000000..7930667 --- /dev/null +++ b/frontend/src/lib/components/notifications/NotificationCard.svelte @@ -0,0 +1,98 @@ + + +
+ + + + {#if onDelete} + + {/if} +
diff --git a/frontend/src/lib/components/notifications/UnreadBadge.svelte b/frontend/src/lib/components/notifications/UnreadBadge.svelte new file mode 100644 index 0000000..fc53db3 --- /dev/null +++ b/frontend/src/lib/components/notifications/UnreadBadge.svelte @@ -0,0 +1,43 @@ + + +{#if count > 0} + {#if variant === 'dot'} + + {:else} + + {formatCount(count)} + + {/if} +{/if} diff --git a/frontend/src/lib/components/notifications/index.ts b/frontend/src/lib/components/notifications/index.ts new file mode 100644 index 0000000..568ba5b --- /dev/null +++ b/frontend/src/lib/components/notifications/index.ts @@ -0,0 +1,2 @@ +export { default as UnreadBadge } from './UnreadBadge.svelte'; +export { default as NotificationCard } from './NotificationCard.svelte'; diff --git a/frontend/src/lib/components/session/SessionContent.svelte b/frontend/src/lib/components/session/SessionContent.svelte new file mode 100644 index 0000000..3fe271d --- /dev/null +++ b/frontend/src/lib/components/session/SessionContent.svelte @@ -0,0 +1,249 @@ + + +
+ + + + + + + +
+ {#if activeTab === 'summary'} + + {:else if activeTab === 'tasks'} + + {:else if activeTab === 'media'} + + {:else if activeTab === 'notes'} + + {/if} +
+
diff --git a/frontend/src/lib/components/session/SessionHeader.svelte b/frontend/src/lib/components/session/SessionHeader.svelte new file mode 100644 index 0000000..17b74d3 --- /dev/null +++ b/frontend/src/lib/components/session/SessionHeader.svelte @@ -0,0 +1,169 @@ + + +
+
+
+ + + +
+
+

+ {entityName} +

+
+ + {statusLabel} + + | + {formatDuration(liveDuration)} +
+
+
+ + {#if isActive && (onClose || onRevert)} +
+ {#if canRevert && onRevert} + + {/if} + + {#if onClose} + + {/if} +
+ {/if} +
diff --git a/frontend/src/lib/components/session/SessionMediaTab.svelte b/frontend/src/lib/components/session/SessionMediaTab.svelte new file mode 100644 index 0000000..f52a235 --- /dev/null +++ b/frontend/src/lib/components/session/SessionMediaTab.svelte @@ -0,0 +1,1233 @@ + + +
+ + {#if isActive && (onUploadPhoto || onUploadVideo)} +
+ + + + + {#if onUploadPhoto} + + {/if} + + {#if onUploadVideo} + + {/if} +
+ {/if} + + + {#if showPhotoUploadForm && stagedPhotos.length > 0} +
+
+

+ {stagedPhotos.length} photo{stagedPhotos.length !== 1 ? 's' : ''} ready to upload +

+ +
+ + + + + +
+ {#each stagedPhotos as photo (photo.id)} +
+ +
+ {#if photo.previewUrl} + Preview + {:else if photo.isHeic} + +
+ + + + HEIC +
+ {/if} +
+ + +
+ { + stagedPhotos = stagedPhotos.map((p) => + p.id === photo.id ? { ...p, title: e.currentTarget.value } : p + ); + }} + disabled={photo.uploading} + /> + + + {#if photo.error} +

{photo.error}

+ {/if} +
+ + + {#if !photo.uploading} + + {:else} +
+ + + + +
+ {/if} +
+ {/each} +
+ + + {#if uploadProgress} +
+
+ + Uploading {uploadProgress.completed} of {uploadProgress.total}... + + {#if uploadProgress.failed > 0} + {uploadProgress.failed} failed + {/if} +
+
+
+
+
+ {/if} + + +
+ + +
+
+ {/if} + + + {#if showVideoUploadForm && stagedVideo} +
+

Upload Video

+ +
+ + {#if stagedVideo.previewUrl} +
+ +
+ {/if} + + +
+ + + + + +
+
+ + {#if videoUploadError} +
+

{videoUploadError}

+
+ {/if} + +
+ + +
+
+ {/if} + + + {#if photos.length > 0} +
+

Photos ({photos.length})

+
+ {#each photos as photo (photo.id)} + {@const thumbnailPath = photo.thumbnail || photo.image} +
+ + + + + {#if photo.internal} + + Internal + + {/if} + + + {#if isActive && (onUpdatePhoto || onDeletePhoto)} +
+ {#if onUpdatePhoto} + + {/if} + {#if onDeletePhoto} + + {/if} +
+ {/if} + + +
+

{photo.title || 'Untitled'}

+ {#if photo.notes} +

{photo.notes}

+ {/if} +

{getTeamMemberName(photo.uploadedById)}

+
+
+ {/each} +
+
+ {/if} + + + {#if videos.length > 0} +
+

Videos ({videos.length})

+
+ {#each videos as video (video.id)} +
+ + + + + {#if video.internal} + + Internal + + {/if} + + + {#if isActive && (onUpdateVideo || onDeleteVideo)} +
+ {#if onUpdateVideo} + + {/if} + {#if onDeleteVideo} + + {/if} +
+ {/if} + + +
+

{video.title || 'Untitled'}

+ {#if video.notes} +

{video.notes}

+ {/if} +

+ {getTeamMemberName(video.uploadedById)} + {#if video.durationSeconds} + · {Math.floor(video.durationSeconds / 60)}:{(video.durationSeconds % 60) + .toString() + .padStart(2, '0')} + {/if} +

+
+
+ {/each} +
+
+ {/if} + + + {#if photos.length === 0 && videos.length === 0 && !showPhotoUploadForm && !showVideoUploadForm} +
+ + + + +

No media yet

+
+ {/if} +
+ + +{#if (selectedPhoto || selectedVideo) && isActive} + +{/if} diff --git a/frontend/src/lib/components/session/SessionNotesTab.svelte b/frontend/src/lib/components/session/SessionNotesTab.svelte new file mode 100644 index 0000000..fab2e6b --- /dev/null +++ b/frontend/src/lib/components/session/SessionNotesTab.svelte @@ -0,0 +1,327 @@ + + +
+ + {#if isActive && onAddNote} +
+ + +
+ + + +
+
+ {/if} + + + {#if notes.length === 0} +
+ + + +

No notes yet

+
+ {:else} +
+ {#each notes as note (note.id)} +
+ {#if editingNoteId === note.id} + + + +
+ + +
+ + +
+
+ {:else} + +
+
+
+ {getTeamMemberName(note.authorId)} + | + {formatDateTime(note.createdAt)} + {#if note.internal} + + Internal + + {/if} +
+

+ {note.content} +

+
+ + {#if isActive && (onUpdateNote || onDeleteNote)} +
+ {#if onUpdateNote} + + {/if} + {#if onDeleteNote} + + {/if} +
+ {/if} +
+ {/if} +
+ {/each} +
+ {/if} +
diff --git a/frontend/src/lib/components/session/SessionSummaryTab.svelte b/frontend/src/lib/components/session/SessionSummaryTab.svelte new file mode 100644 index 0000000..48b454c --- /dev/null +++ b/frontend/src/lib/components/session/SessionSummaryTab.svelte @@ -0,0 +1,223 @@ + + +
+ +
+ +
+
+
+ + + +
+
+

Duration

+

+ {formatDuration(liveDuration)} +

+
+
+
+ + + + + + + + + +
+ + +
+

Session Details

+ +
+
+
+ {sessionType === 'service' ? 'Service' : 'Project'} +
+
{entityName}
+
+ + {#if scopeName} +
+
Scope
+
{scopeName}
+
+ {/if} + + {#if addressInfo} +
+
Location
+
{addressInfo}
+
+ {/if} + +
+
Started
+
{formatDateTime(startTime)}
+
+ + {#if createdByName} +
+
Started By
+
{createdByName}
+
+ {/if} + + {#if endTime} +
+
Ended
+
{formatDateTime(endTime)}
+
+ {/if} + + {#if closedByName} +
+
Closed By
+
{closedByName}
+
+ {/if} +
+ + {#if scopeDescription} +
+
+ Scope Description +
+

{scopeDescription}

+
+ {/if} +
+
diff --git a/frontend/src/lib/components/session/SessionTabs.svelte b/frontend/src/lib/components/session/SessionTabs.svelte new file mode 100644 index 0000000..e477044 --- /dev/null +++ b/frontend/src/lib/components/session/SessionTabs.svelte @@ -0,0 +1,199 @@ + + + +
+ + + + +
+ {#if currentTab.icon === 'chart'} + + + + {:else if currentTab.icon === 'check'} + + + + {:else if currentTab.icon === 'camera'} + + + + + {:else if currentTab.icon === 'document'} + + + + {/if} + {currentTab.label} + {#if currentCount > 0} + {currentCount} + {/if} +
+ + + +
+ + + diff --git a/frontend/src/lib/components/session/SessionTasksTab.svelte b/frontend/src/lib/components/session/SessionTasksTab.svelte new file mode 100644 index 0000000..de6d7d6 --- /dev/null +++ b/frontend/src/lib/components/session/SessionTasksTab.svelte @@ -0,0 +1,481 @@ + + +
+ + {#if hasReadyToSubmit && isActive} +
+
+

+ Ready to Submit ({selectedTaskIds.size}) +

+
+ {#if onClearSelection} + + {/if} + {#if onSubmitAllTasks && selectedTaskIds.size > 0} + + {/if} +
+
+ +
+ {#each readyToSubmitByArea as { area, tasks: areaTasks }} +
+ + + + {#if isAreaExpanded('ready', area.id)} +
+ {#each areaTasks as task (task.id)} +
+
+ + + {task.sessionDescription || task.scopeDescription} + +
+
+ {#if isServiceTask(task) && task.frequency} + + {formatFrequency(task.frequency)} + + {/if} + {#if onSubmitTask} + + {/if} +
+
+ {/each} +
+ {/if} +
+ {/each} +
+
+ {/if} + + + {#if hasCompletedTasks} +
+

+ Completed ({completedTaskIds.size}) +

+
+ {#each completedTasksByArea as { area, completions }} +
+ + + + {#if isAreaExpanded('completed', area.id)} +
+ {#each completions as completion (completion.id)} +
+
+ + + +
+ + {completion.task?.sessionDescription || completion.task?.scopeDescription} + +
+ {completion.completedBy?.fullName} - {formatDateTime( + completion.completedAt + )} +
+
+
+ {#if isActive && onRemoveCompletedTask} + + {/if} +
+ {/each} +
+ {/if} +
+ {/each} +
+
+ {/if} + + + {#if availableTasksCount > 0 && isActive} +
+

+ Available Tasks ({availableTasksCount}) +

+
+ {#each availableTasksByArea() as { area, tasks: areaTasks }} +
+ + + + {#if isAreaExpanded('available', area.id)} +
+ {#each areaTasks as task (task.id)} + + {/each} +
+ {/if} +
+ {/each} +
+
+ {:else if !hasCompletedTasks && !hasReadyToSubmit} +
+ + + +

No tasks available

+
+ {/if} +
diff --git a/frontend/src/lib/components/session/StartSessionButton.svelte b/frontend/src/lib/components/session/StartSessionButton.svelte new file mode 100644 index 0000000..f076a7a --- /dev/null +++ b/frontend/src/lib/components/session/StartSessionButton.svelte @@ -0,0 +1,59 @@ + + +
+ + + {#if onBack} + + {/if} +
diff --git a/frontend/src/lib/components/session/index.ts b/frontend/src/lib/components/session/index.ts new file mode 100644 index 0000000..3320e8e --- /dev/null +++ b/frontend/src/lib/components/session/index.ts @@ -0,0 +1,12 @@ +// Session components barrel export +export { default as SessionContent } from './SessionContent.svelte'; +export { default as SessionHeader } from './SessionHeader.svelte'; +export { default as SessionTabs } from './SessionTabs.svelte'; +export { default as SessionSummaryTab } from './SessionSummaryTab.svelte'; +export { default as SessionTasksTab } from './SessionTasksTab.svelte'; +export { default as SessionMediaTab } from './SessionMediaTab.svelte'; +export { default as SessionNotesTab } from './SessionNotesTab.svelte'; +export { default as StartSessionButton } from './StartSessionButton.svelte'; + +// Re-export types +export * from './types'; diff --git a/frontend/src/lib/components/session/types.ts b/frontend/src/lib/components/session/types.ts new file mode 100644 index 0000000..f3f6f02 --- /dev/null +++ b/frontend/src/lib/components/session/types.ts @@ -0,0 +1,250 @@ +// Session component shared types +import type { + ServiceSession, + ProjectSession, + SessionNote, + SessionImage, + SessionVideo, + ServiceTaskCompletion, + ProjectTaskCompletion, + ServiceScopeTask, + ProjectScopeTask +} from '$lib/graphql/queries/session'; + +// Re-export for convenience +export type { + ServiceSession, + ProjectSession, + SessionNote, + SessionImage, + SessionVideo, + ServiceTaskCompletion, + ProjectTaskCompletion, + ServiceScopeTask, + ProjectScopeTask +}; + +// Session type discriminator +export type SessionType = 'service' | 'project'; + +// Tab type +export type SessionTab = 'summary' | 'tasks' | 'media' | 'notes'; + +// Union types for components that handle both +export type Session = ServiceSession | ProjectSession; +export type TaskCompletion = ServiceTaskCompletion | ProjectTaskCompletion; + +// Extended task type with area context +export interface ExtendedServiceTask extends ServiceScopeTask { + uuid: string; + areaId: string; + areaName: string; +} + +export interface ExtendedProjectTask extends ProjectScopeTask { + uuid: string; + areaId: string; + areaName: string; +} + +export type ExtendedTask = ExtendedServiceTask | ExtendedProjectTask; + +// Area types from scope +export interface ServiceArea { + id: string; + name: string; + description: string | null; + order: number; + tasks: ServiceScopeTask[]; +} + +export interface ProjectArea { + id: string; + name: string; + description: string | null; + order: number; + tasks: ProjectScopeTask[]; +} + +// Area with completions (for completed tasks section) +export interface ServiceAreaWithCompletions { + area: ServiceArea; + completions: ServiceTaskCompletion[]; +} + +export interface ProjectAreaWithCompletions { + area: ProjectArea; + completions: ProjectTaskCompletion[]; +} + +export type AreaWithCompletions = ServiceAreaWithCompletions | ProjectAreaWithCompletions; + +// Area with tasks (for ready to submit section) +export interface ServiceAreaWithTasks { + area: ServiceArea; + tasks: ExtendedServiceTask[]; +} + +export interface ProjectAreaWithTasks { + area: ProjectArea; + tasks: ExtendedProjectTask[]; +} + +export type AreaWithTasks = ServiceAreaWithTasks | ProjectAreaWithTasks; + +// Props interfaces for components +export interface SessionTabsCounts { + tasks: number; + photos: number; + videos: number; + notes: number; +} + +// Re-export frequency types for convenience +export type { TaskFrequency } from '$lib/graphql/queries/service-scope-templates'; + +// Helper function to format error messages +export function getErrorMessage(errors: { message: string }[] | null | undefined): string { + return errors?.map((e) => e.message).join(', ') ?? ''; +} + +// Frequency badge colors - matches TaskFrequencyChoices enum from backend +export const frequencyColors: Record = { + DAILY: 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400', + WEEKLY: 'bg-orange-100 text-orange-700 dark:bg-orange-900/30 dark:text-orange-400', + MONTHLY: 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400', + QUARTERLY: 'bg-purple-100 text-purple-700 dark:bg-purple-900/30 dark:text-purple-400', + TRIANNUAL: 'bg-yellow-100 text-yellow-700 dark:bg-yellow-900/30 dark:text-yellow-400', + ANNUAL: 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400', + AS_NEEDED: 'bg-gray-100 text-gray-700 dark:bg-gray-900/30 dark:text-gray-400' +}; + +// Format frequency for display +export function formatFrequency(frequency: string | null | undefined): string { + if (!frequency) return ''; + return frequency.replace('_', ' ').toLowerCase(); +} + +// Format date/time +export function formatDateTime(dateStr: string | null | undefined): string { + if (!dateStr) return ''; + const date = new Date(dateStr); + return date.toLocaleString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + hour: 'numeric', + minute: '2-digit' + }); +} + +export function formatDate(dateStr: string | null | undefined): string { + if (!dateStr) return ''; + const date = new Date(dateStr); + return date.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + }); +} + +export function formatTime(dateStr: string | null | undefined): string { + if (!dateStr) return ''; + const date = new Date(dateStr); + return date.toLocaleTimeString('en-US', { + hour: 'numeric', + minute: '2-digit' + }); +} + +// Format duration +export function formatDuration(seconds: number | null | undefined): string { + if (!seconds) return '0m'; + const hours = Math.floor(seconds / 3600); + const minutes = Math.floor((seconds % 3600) / 60); + if (hours > 0) { + return `${hours}h ${minutes}m`; + } + return `${minutes}m`; +} + +// API base URL for authenticated media requests +// Use production API even in dev so cookies work (same domain as auth) +export const API_BASE_URL = 'https://api.example.com'; + +// Helper to get authenticated image URL by fetching the image and creating an object URL +export async function getAuthenticatedImageUrl(path: string): Promise { + try { + // Normalize the path to include /api/media/ prefix + let mediaPath = path; + if (!path.startsWith('/')) { + // Relative path from database (e.g., "uploads/project_session/...") + mediaPath = `/api/media/${path}`; + } else if (!path.startsWith('/api/')) { + // Absolute path without /api prefix + mediaPath = `/api/media${path}`; + } + // Add /v2 prefix for Oathkeeper routing (Rust backend) + const fullUrl = `${API_BASE_URL}/v2${mediaPath}`; + const response = await fetch(fullUrl, { + credentials: 'include' + }); + if (!response.ok) { + throw new Error(`HTTP ${response.status}`); + } + const blob = await response.blob(); + return URL.createObjectURL(blob); + } catch (err) { + console.error('Failed to fetch authenticated image:', path, err); + // Return a placeholder instead of the raw path to avoid broken relative URLs + return '/placeholder-image.svg'; + } +} + +// Helper to revoke object URLs when done +export function revokeAuthenticatedUrl(url: string): void { + if (url.startsWith('blob:')) { + URL.revokeObjectURL(url); + } +} + +// Type guard for service session +export function isServiceSession(session: Session): session is ServiceSession { + return 'serviceId' in session; +} + +// Type guard for project session +export function isProjectSession(session: Session): session is ProjectSession { + return 'projectId' in session; +} + +// Type guard for service task (has frequency) +export function isServiceTask(task: ExtendedTask): task is ExtendedServiceTask { + return 'frequency' in task; +} + +// Get session color classes based on type +export function getSessionColorClasses(type: SessionType): { + primary: string; + bg: string; + border: string; + borderLight: string; + ring: string; +} { + if (type === 'service') { + return { + primary: 'text-secondary-600 dark:text-secondary-400', + bg: 'bg-secondary-500', + border: 'border-secondary-500', + borderLight: 'border-secondary-200 dark:border-secondary-700', + ring: 'ring-secondary-500' + }; + } + return { + primary: 'text-accent-600 dark:text-accent-400', + bg: 'bg-accent-500', + border: 'border-accent-500', + borderLight: 'border-accent-200 dark:border-accent-700', + ring: 'ring-accent-500' + }; +} diff --git a/frontend/src/lib/components/team/TeamBottomNav.svelte b/frontend/src/lib/components/team/TeamBottomNav.svelte new file mode 100644 index 0000000..5397f2b --- /dev/null +++ b/frontend/src/lib/components/team/TeamBottomNav.svelte @@ -0,0 +1,43 @@ + + + diff --git a/frontend/src/lib/components/team/TeamDashboardHeader.svelte b/frontend/src/lib/components/team/TeamDashboardHeader.svelte new file mode 100644 index 0000000..492a2a7 --- /dev/null +++ b/frontend/src/lib/components/team/TeamDashboardHeader.svelte @@ -0,0 +1,240 @@ + + + + +
+ {#if showBackButton} + +
+
+ + {#if isNavigatingBack} + + + + + {:else} + + + + {/if} + +
+
+

{title}

+ +
+ {#if subtitleSnippet} +

+ {@render subtitleSnippet({ toggleMenu })} +

+ {:else if subtitle} +

{subtitle}

+ {/if} +
+
+ {#if actions} +
+ {@render actions()} +
+ {/if} +
+ {:else} + +
+
+
+

{title}

+ +
+ {#if subtitleSnippet} +

+ {@render subtitleSnippet({ toggleMenu })} +

+ {:else if subtitle} +

{subtitle}

+ {/if} +
+ {#if actions} +
+ {@render actions()} +
+ {/if} +
+ {/if} +
diff --git a/frontend/src/lib/components/team/index.ts b/frontend/src/lib/components/team/index.ts new file mode 100644 index 0000000..73f0fc6 --- /dev/null +++ b/frontend/src/lib/components/team/index.ts @@ -0,0 +1,2 @@ +export { default as TeamBottomNav } from './TeamBottomNav.svelte'; +export { default as TeamDashboardHeader } from './TeamDashboardHeader.svelte'; diff --git a/frontend/src/lib/config.ts b/frontend/src/lib/config.ts new file mode 100644 index 0000000..2887dad --- /dev/null +++ b/frontend/src/lib/config.ts @@ -0,0 +1,23 @@ +import { browser } from '$app/environment'; + +// API endpoints - all requests go through Oathkeeper for auth +export const config = { + graphql: { + url: 'https://api.example.com/graphql' + }, + kratos: { + publicUrl: 'https://auth.example.com', + adminUrl: 'http://localhost:6050' + }, + app: { + origin: browser ? window.location.origin : 'https://local.example.com:5173' + }, + // Dispatch account - services/projects assigned to this account are considered "dispatched" + // and tracked by monitoring services. + dispatch: { + profileId: '00000000-0000-0000-0000-000000000000' // Replace with your dispatch profile ID + } +} as const; + +// Cookie name used by Kratos +export const KRATOS_SESSION_COOKIE = 'ory_kratos_session'; diff --git a/frontend/src/lib/graphql/client.ts b/frontend/src/lib/graphql/client.ts new file mode 100644 index 0000000..69aada3 --- /dev/null +++ b/frontend/src/lib/graphql/client.ts @@ -0,0 +1,33 @@ +import { ApolloClient, InMemoryCache, HttpLink } from '@apollo/client/core'; +import UploadHttpLink from 'apollo-upload-client/UploadHttpLink.mjs'; +import { browser } from '$app/environment'; +import { config } from '$lib/config'; + +// Upload link for browser requests - supports file uploads + regular queries +// Uses GraphQL multipart request spec for file uploads +const uploadLink = new UploadHttpLink({ + uri: config.graphql.url, + credentials: 'include' +}); + +// Browser client - used for client-side queries and mutations (including uploads) +export const client = new ApolloClient({ + link: uploadLink, + cache: new InMemoryCache(), + ssrMode: !browser +}); + +// Server client factory - creates a client with cookie header for SSR +// Note: Server-side doesn't need upload support, uses regular HttpLink +export function createServerClient(cookie: string | null) { + const serverHttpLink = new HttpLink({ + uri: config.graphql.url, + headers: cookie ? { Cookie: cookie } : {} + }); + + return new ApolloClient({ + link: serverHttpLink, + cache: new InMemoryCache(), + ssrMode: true + }); +} diff --git a/frontend/src/lib/graphql/mutations/account.ts b/frontend/src/lib/graphql/mutations/account.ts new file mode 100644 index 0000000..90af2fd --- /dev/null +++ b/frontend/src/lib/graphql/mutations/account.ts @@ -0,0 +1,193 @@ +import { gql } from '@apollo/client/core'; + +// ==================== ACCOUNT MUTATIONS ==================== + +export const CREATE_ACCOUNT = gql` + mutation CreateAccount($input: CreateAccountInput!) { + createAccount(input: $input) { + id + customerId + name + status + startDate + endDate + isActive + } + } +`; + +export const UPDATE_ACCOUNT = gql` + mutation UpdateAccount($id: UUID!, $input: UpdateAccountInput!) { + updateAccount(id: $id, input: $input) { + id + customerId + name + status + startDate + endDate + isActive + } + } +`; + +export const DELETE_ACCOUNT = gql` + mutation DeleteAccount($id: UUID!) { + deleteAccount(id: $id) + } +`; + +// ==================== ACCOUNT CONTACT MUTATIONS ==================== + +export const CREATE_ACCOUNT_CONTACT = gql` + mutation CreateAccountContact($accountId: UUID!, $input: CreateAccountContactInput!) { + createAccountContact(accountId: $accountId, input: $input) { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + } +`; + +export const UPDATE_ACCOUNT_CONTACT = gql` + mutation UpdateAccountContact($id: UUID!, $input: UpdateAccountContactInput!) { + updateAccountContact(id: $id, input: $input) { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + } +`; + +export const DELETE_ACCOUNT_CONTACT = gql` + mutation DeleteAccountContact($id: UUID!) { + deleteAccountContact(id: $id) + } +`; + +// ==================== ACCOUNT ADDRESS MUTATIONS ==================== + +export const CREATE_ACCOUNT_ADDRESS = gql` + mutation CreateAccountAddress($accountId: UUID!, $input: CreateAccountAddressInput!) { + createAccountAddress(accountId: $accountId, input: $input) { + id + accountId + name + streetAddress + city + state + zipCode + isActive + isPrimary + notes + } + } +`; + +export const DELETE_ACCOUNT_ADDRESS = gql` + mutation DeleteAccountAddress($id: UUID!) { + deleteAccountAddress(id: $id) + } +`; + +// ==================== REVENUE MUTATIONS ==================== + +export const CREATE_REVENUE = gql` + mutation CreateRevenue($accountId: UUID!, $input: CreateRevenueInput!) { + createRevenue(accountId: $accountId, input: $input) { + id + amount + startDate + endDate + waveServiceId + isActive + } + } +`; + +export const UPDATE_REVENUE = gql` + mutation UpdateRevenue($id: UUID!, $input: UpdateRevenueInput!) { + updateRevenue(id: $id, input: $input) { + id + amount + startDate + endDate + waveServiceId + isActive + } + } +`; + +export const DELETE_REVENUE = gql` + mutation DeleteRevenue($id: UUID!) { + deleteRevenue(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateAccountInput { + customerId: string; + name: string; + status?: 'ACTIVE' | 'INACTIVE' | 'PENDING'; + startDate?: string; + endDate?: string; +} + +export interface UpdateAccountInput { + name?: string; + status?: 'ACTIVE' | 'INACTIVE' | 'PENDING'; + startDate?: string; + endDate?: string; +} + +export interface CreateAccountContactInput { + firstName: string; + lastName: string; + email?: string; + phone?: string; + notes?: string; + isPrimary?: boolean; +} + +export interface UpdateAccountContactInput { + firstName?: string; + lastName?: string; + email?: string; + phone?: string; + notes?: string; + isPrimary?: boolean; + isActive?: boolean; +} + +export interface CreateAccountAddressInput { + name?: string; + streetAddress: string; + city: string; + state: string; + zipCode: string; + notes?: string; + isPrimary?: boolean; +} + +export interface CreateRevenueInput { + amount: number; + startDate: string; + waveServiceId?: string; +} + +export interface UpdateRevenueInput { + amount?: number; + startDate?: string; + endDate?: string; + waveServiceId?: string; +} diff --git a/frontend/src/lib/graphql/mutations/calendar.ts b/frontend/src/lib/graphql/mutations/calendar.ts new file mode 100644 index 0000000..f3a3fb0 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/calendar.ts @@ -0,0 +1,196 @@ +import { gql } from '@apollo/client/core'; + +export const CREATE_CALENDAR_EVENT = gql` + mutation CreateCalendarEvent($input: CreateCalendarEventInput!) { + createCalendarEvent(input: $input) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + htmlLink + status + } + } +`; + +export const UPDATE_CALENDAR_EVENT = gql` + mutation UpdateCalendarEvent($eventId: String!, $input: UpdateCalendarEventInput!) { + updateCalendarEvent(eventId: $eventId, input: $input) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + htmlLink + status + } + } +`; + +export const DELETE_CALENDAR_EVENT = gql` + mutation DeleteCalendarEvent($eventId: String!) { + deleteCalendarEvent(eventId: $eventId) + } +`; + +export const LINK_CALENDAR_EVENT_TO_SERVICE = gql` + mutation LinkCalendarEventToService($serviceId: UUID!, $eventId: String!) { + linkCalendarEventToService(serviceId: $serviceId, eventId: $eventId) + } +`; + +export const UNLINK_CALENDAR_EVENT_FROM_SERVICE = gql` + mutation UnlinkCalendarEventFromService($serviceId: UUID!) { + unlinkCalendarEventFromService(serviceId: $serviceId) + } +`; + +export const LINK_CALENDAR_EVENT_TO_PROJECT = gql` + mutation LinkCalendarEventToProject($projectId: UUID!, $eventId: String!) { + linkCalendarEventToProject(projectId: $projectId, eventId: $eventId) + } +`; + +export const UNLINK_CALENDAR_EVENT_FROM_PROJECT = gql` + mutation UnlinkCalendarEventFromProject($projectId: UUID!) { + unlinkCalendarEventFromProject(projectId: $projectId) + } +`; + +export const CREATE_SERVICE_CALENDAR_EVENT = gql` + mutation CreateServiceCalendarEvent($serviceId: UUID!, $input: CreateCalendarEventInput!) { + createServiceCalendarEvent(serviceId: $serviceId, input: $input) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + htmlLink + status + } + } +`; + +export const CREATE_PROJECT_CALENDAR_EVENT = gql` + mutation CreateProjectCalendarEvent($projectId: UUID!, $input: CreateCalendarEventInput!) { + createProjectCalendarEvent(projectId: $projectId, input: $input) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + htmlLink + status + } + } +`; + +export const DELETE_SERVICE_CALENDAR_EVENT = gql` + mutation DeleteServiceCalendarEvent($serviceId: UUID!) { + deleteServiceCalendarEvent(serviceId: $serviceId) + } +`; + +export const DELETE_PROJECT_CALENDAR_EVENT = gql` + mutation DeleteProjectCalendarEvent($projectId: UUID!) { + deleteProjectCalendarEvent(projectId: $projectId) + } +`; + +// Input types +export interface EventDateTimeInput { + dateTime?: string; + date?: string; + timeZone?: string; +} + +export interface AttendeeInput { + email: string; + displayName?: string; + optional?: boolean; +} + +export interface EventReminderInput { + method: string; + minutes: number; +} + +export interface EventRemindersInput { + useDefault: boolean; + overrides?: EventReminderInput[]; +} + +export interface CreateCalendarEventInput { + id?: string; + summary: string; + description?: string; + location?: string; + start: EventDateTimeInput; + end: EventDateTimeInput; + attendees?: AttendeeInput[]; + reminders?: EventRemindersInput; + colorId?: string; +} + +export interface UpdateCalendarEventInput { + summary?: string; + description?: string; + location?: string; + start?: EventDateTimeInput; + end?: EventDateTimeInput; + attendees?: AttendeeInput[]; + reminders?: EventRemindersInput; + colorId?: string; +} + +export interface CreateCalendarEventResult { + createCalendarEvent: { + id: string; + summary: string; + description?: string; + location?: string; + htmlLink?: string; + status?: string; + } | null; +} diff --git a/frontend/src/lib/graphql/mutations/customer.ts b/frontend/src/lib/graphql/mutations/customer.ts new file mode 100644 index 0000000..94b853e --- /dev/null +++ b/frontend/src/lib/graphql/mutations/customer.ts @@ -0,0 +1,181 @@ +import { gql } from '@apollo/client/core'; + +// ==================== CUSTOMER MUTATIONS ==================== + +export const CREATE_CUSTOMER = gql` + mutation CreateCustomer($input: CreateCustomerInput!) { + createCustomer(input: $input) { + id + name + status + startDate + endDate + billingTerms + billingEmail + waveCustomerId + isActive + } + } +`; + +export const UPDATE_CUSTOMER = gql` + mutation UpdateCustomer($id: UUID!, $input: UpdateCustomerInput!) { + updateCustomer(id: $id, input: $input) { + id + name + status + startDate + endDate + billingTerms + billingEmail + waveCustomerId + isActive + } + } +`; + +export const DELETE_CUSTOMER = gql` + mutation DeleteCustomer($id: UUID!) { + deleteCustomer(id: $id) + } +`; + +// ==================== CONTACT MUTATIONS ==================== + +export const CREATE_CUSTOMER_CONTACT = gql` + mutation CreateCustomerContact($customerId: UUID!, $input: CreateCustomerContactInput!) { + createCustomerContact(customerId: $customerId, input: $input) { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + } +`; + +export const UPDATE_CUSTOMER_CONTACT = gql` + mutation UpdateCustomerContact($id: UUID!, $input: UpdateCustomerContactInput!) { + updateCustomerContact(id: $id, input: $input) { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + } +`; + +export const DELETE_CUSTOMER_CONTACT = gql` + mutation DeleteCustomerContact($id: UUID!) { + deleteCustomerContact(id: $id) + } +`; + +// ==================== ADDRESS MUTATIONS ==================== + +export const CREATE_CUSTOMER_ADDRESS = gql` + mutation CreateCustomerAddress($customerId: UUID!, $input: CreateCustomerAddressInput!) { + createCustomerAddress(customerId: $customerId, input: $input) { + id + name + streetAddress + city + state + zipCode + isActive + isPrimary + notes + } + } +`; + +export const UPDATE_CUSTOMER_ADDRESS = gql` + mutation UpdateCustomerAddress($id: UUID!, $input: UpdateCustomerAddressInput!) { + updateCustomerAddress(id: $id, input: $input) { + id + name + streetAddress + city + state + zipCode + isActive + isPrimary + notes + } + } +`; + +export const DELETE_CUSTOMER_ADDRESS = gql` + mutation DeleteCustomerAddress($id: UUID!) { + deleteCustomerAddress(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateCustomerInput { + name: string; + status?: 'ACTIVE' | 'INACTIVE' | 'PENDING'; + billingEmail?: string; + billingTerms?: string; + waveCustomerId?: string; + startDate?: string; + endDate?: string; +} + +export interface UpdateCustomerInput { + name?: string; + status?: 'ACTIVE' | 'INACTIVE' | 'PENDING'; + billingEmail?: string; + billingTerms?: string; + waveCustomerId?: string; + startDate?: string; + endDate?: string; +} + +export interface CreateCustomerContactInput { + firstName: string; + lastName: string; + email?: string; + phone?: string; + notes?: string; + isPrimary?: boolean; +} + +export interface UpdateCustomerContactInput { + firstName?: string; + lastName?: string; + email?: string; + phone?: string; + notes?: string; + isPrimary?: boolean; + isActive?: boolean; +} + +export interface CreateCustomerAddressInput { + name?: string; + streetAddress: string; + city: string; + state: string; + zipCode: string; + notes?: string; + isPrimary?: boolean; +} + +export interface UpdateCustomerAddressInput { + name?: string; + streetAddress?: string; + city?: string; + state?: string; + zipCode?: string; + notes?: string; + isPrimary?: boolean; + isActive?: boolean; +} diff --git a/frontend/src/lib/graphql/mutations/invoices.ts b/frontend/src/lib/graphql/mutations/invoices.ts new file mode 100644 index 0000000..943d558 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/invoices.ts @@ -0,0 +1,94 @@ +import { gql } from '@apollo/client/core'; + +export const CREATE_INVOICE = gql` + mutation CreateInvoice($input: CreateInvoiceInput!) { + createInvoice(input: $input) { + id + customerId + startDate + endDate + status + } + } +`; + +export const UPDATE_INVOICE = gql` + mutation UpdateInvoice($id: UUID!, $input: UpdateInvoiceInput!) { + updateInvoice(id: $id, input: $input) { + id + customerId + startDate + endDate + status + datePaid + waveInvoiceId + } + } +`; + +export const DELETE_INVOICE = gql` + mutation DeleteInvoice($id: UUID!) { + deleteInvoice(id: $id) + } +`; + +export const ADD_REVENUE_TO_INVOICE = gql` + mutation AddRevenueToInvoice($invoiceId: UUID!, $revenueId: UUID!) { + addRevenueToInvoice(invoiceId: $invoiceId, revenueId: $revenueId) { + id + invoiceId + revenueId + amount + } + } +`; + +export const ADD_PROJECT_TO_INVOICE = gql` + mutation AddProjectToInvoice($invoiceId: UUID!, $projectId: UUID!) { + addProjectToInvoice(invoiceId: $invoiceId, projectId: $projectId) { + id + invoiceId + projectId + amount + } + } +`; + +export const REMOVE_REVENUE_FROM_INVOICE = gql` + mutation RemoveRevenueFromInvoice($id: UUID!) { + removeRevenueFromInvoice(id: $id) + } +`; + +export const REMOVE_PROJECT_FROM_INVOICE = gql` + mutation RemoveProjectFromInvoice($id: UUID!) { + removeProjectFromInvoice(id: $id) + } +`; + +export const ADD_ALL_ELIGIBLE_REVENUES = gql` + mutation AddAllEligibleRevenuesToInvoice($invoiceId: UUID!) { + addAllEligibleRevenuesToInvoice(invoiceId: $invoiceId) + } +`; + +export const ADD_ALL_ELIGIBLE_PROJECTS = gql` + mutation AddAllEligibleProjectsToInvoice($invoiceId: UUID!) { + addAllEligibleProjectsToInvoice(invoiceId: $invoiceId) + } +`; + +// Input types for mutations +export interface CreateInvoiceInput { + customerId: string; + startDate: string; + endDate: string; +} + +export interface UpdateInvoiceInput { + startDate?: string; + endDate?: string; + status?: 'DRAFT' | 'SENT' | 'PAID' | 'OVERDUE' | 'CANCELLED'; + datePaid?: string; + waveInvoiceId?: string; +} diff --git a/frontend/src/lib/graphql/mutations/location.ts b/frontend/src/lib/graphql/mutations/location.ts new file mode 100644 index 0000000..3786ce2 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/location.ts @@ -0,0 +1,199 @@ +import { gql } from '@apollo/client/core'; + +// ==================== LABOR MUTATIONS ==================== + +export const CREATE_LABOR = gql` + mutation CreateLabor($accountAddressId: UUID!, $input: CreateLaborInput!) { + createLabor(accountAddressId: $accountAddressId, input: $input) { + id + amount + startDate + endDate + isActive + } + } +`; + +export const UPDATE_LABOR = gql` + mutation UpdateLabor($id: UUID!, $input: UpdateLaborInput!) { + updateLabor(id: $id, input: $input) { + id + amount + startDate + endDate + isActive + } + } +`; + +export const DELETE_LABOR = gql` + mutation DeleteLabor($id: UUID!) { + deleteLabor(id: $id) + } +`; + +// ==================== SCHEDULE MUTATIONS ==================== + +export const CREATE_SCHEDULE = gql` + mutation CreateSchedule($accountAddressId: UUID!, $input: CreateScheduleInput!) { + createSchedule(accountAddressId: $accountAddressId, input: $input) { + id + name + monday + tuesday + wednesday + thursday + friday + saturday + sunday + weekendService + scheduleException + startDate + endDate + isActive + } + } +`; + +export const UPDATE_SCHEDULE = gql` + mutation UpdateSchedule($id: UUID!, $input: UpdateScheduleInput!) { + updateSchedule(id: $id, input: $input) { + id + name + monday + tuesday + wednesday + thursday + friday + saturday + sunday + weekendService + scheduleException + startDate + endDate + isActive + } + } +`; + +export const DELETE_SCHEDULE = gql` + mutation DeleteSchedule($id: UUID!) { + deleteSchedule(id: $id) + } +`; + +// ==================== SERVICE SCOPE MUTATIONS ==================== + +export const CREATE_SERVICE_SCOPE = gql` + mutation CreateServiceScope( + $accountId: UUID! + $accountAddressId: UUID! + $input: CreateServiceScopeInput! + ) { + createServiceScope(accountId: $accountId, accountAddressId: $accountAddressId, input: $input) { + id + name + description + isActive + } + } +`; + +export const UPDATE_SERVICE_SCOPE = gql` + mutation UpdateServiceScope($id: UUID!, $input: UpdateServiceScopeInput!) { + updateServiceScope(id: $id, input: $input) { + id + name + description + isActive + } + } +`; + +export const DELETE_SERVICE_SCOPE = gql` + mutation DeleteServiceScope($id: UUID!) { + deleteServiceScope(id: $id) + } +`; + +// ==================== SCOPE AREA MUTATIONS ==================== + +export const CREATE_SCOPE_AREA = gql` + mutation CreateScopeArea($scopeId: UUID!, $input: CreateScopeAreaInput!) { + createScopeArea(scopeId: $scopeId, input: $input) { + id + name + order + } + } +`; + +export const UPDATE_SCOPE_AREA = gql` + mutation UpdateScopeArea($id: UUID!, $input: UpdateScopeAreaInput!) { + updateScopeArea(id: $id, input: $input) { + id + name + order + } + } +`; + +export const DELETE_SCOPE_AREA = gql` + mutation DeleteScopeArea($id: UUID!) { + deleteScopeArea(id: $id) + } +`; + +// ==================== SCOPE TASK MUTATIONS ==================== + +export const CREATE_SCOPE_TASK = gql` + mutation CreateScopeTask($areaId: UUID!, $input: CreateScopeTaskInput!) { + createScopeTask(areaId: $areaId, input: $input) { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const UPDATE_SCOPE_TASK = gql` + mutation UpdateScopeTask($id: UUID!, $input: UpdateScopeTaskInput!) { + updateScopeTask(id: $id, input: $input) { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const DELETE_SCOPE_TASK = gql` + mutation DeleteScopeTask($id: UUID!) { + deleteScopeTask(id: $id) + } +`; + +// ==================== ACCOUNT ADDRESS (LOCATION) MUTATIONS ==================== + +export const UPDATE_ACCOUNT_ADDRESS = gql` + mutation UpdateAccountAddress($id: UUID!, $input: UpdateAccountAddressInput!) { + updateAccountAddress(id: $id, input: $input) { + id + name + streetAddress + city + state + zipCode + notes + isPrimary + isActive + } + } +`; diff --git a/frontend/src/lib/graphql/mutations/messaging.ts b/frontend/src/lib/graphql/mutations/messaging.ts new file mode 100644 index 0000000..7d1c096 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/messaging.ts @@ -0,0 +1,227 @@ +import { gql } from '@apollo/client/core'; +import type { + Conversation, + ConversationParticipant, + Message, + ConversationType +} from '../queries/messaging'; + +// ==================== CONVERSATION MUTATIONS ==================== + +export const CREATE_CONVERSATION = gql` + mutation CreateConversation($input: CreateConversationInput!) { + createConversation(input: $input) { + id + subject + conversationType + entityType + entityId + lastMessageAt + isArchived + createdAt + participants { + id + participantType + participantId + participantProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } + } +`; + +export const UPDATE_CONVERSATION = gql` + mutation UpdateConversation($id: UUID!, $input: UpdateConversationInput!) { + updateConversation(id: $id, input: $input) { + id + subject + updatedAt + } + } +`; + +export const ARCHIVE_CONVERSATION = gql` + mutation ArchiveConversation($id: UUID!, $isArchived: Boolean!) { + archiveConversation(id: $id, isArchived: $isArchived) { + id + isArchived + updatedAt + } + } +`; + +export const MUTE_CONVERSATION = gql` + mutation MuteConversation($id: UUID!, $isMuted: Boolean!) { + muteConversation(id: $id, isMuted: $isMuted) + } +`; + +export const LEAVE_CONVERSATION = gql` + mutation LeaveConversation($conversationId: UUID!) { + leaveConversation(conversationId: $conversationId) + } +`; + +// ==================== MESSAGE MUTATIONS ==================== + +export const SEND_MESSAGE = gql` + mutation SendMessage($input: SendMessageInput!) { + sendMessage(input: $input) { + id + conversationId + content + isDeleted + isSystemMessage + replyToId + attachments + createdAt + authorType + authorId + authorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } +`; + +export const DELETE_MESSAGE = gql` + mutation DeleteMessage($id: UUID!) { + deleteMessage(id: $id) + } +`; + +// ==================== READ TRACKING MUTATIONS ==================== + +export const MARK_CONVERSATION_AS_READ = gql` + mutation MarkConversationAsRead($id: UUID!) { + markConversationAsRead(id: $id) + } +`; + +export const MARK_ALL_CONVERSATIONS_AS_READ = gql` + mutation MarkAllConversationsAsRead { + markAllConversationsAsRead + } +`; + +// ==================== PARTICIPANT MUTATIONS ==================== + +export const ADD_PARTICIPANT = gql` + mutation AddParticipant($conversationId: UUID!, $participant: ParticipantInput!) { + addParticipant(conversationId: $conversationId, participant: $participant) { + id + conversationId + participantType + participantId + isArchived + joinedAt + participantProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } +`; + +export const REMOVE_PARTICIPANT = gql` + mutation RemoveParticipant($conversationId: UUID!, $participant: ParticipantInput!) { + removeParticipant(conversationId: $conversationId, participant: $participant) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface ParticipantInput { + participantType: string; + participantId: string; +} + +export interface CreateConversationInput { + subject?: string; + conversationType: ConversationType; + entityType?: string; + entityId?: string; + participantIds: ParticipantInput[]; + initialMessage?: string; +} + +export interface UpdateConversationInput { + subject?: string; +} + +export interface SendMessageInput { + conversationId: string; + content: string; + replyToId?: string; + attachments?: unknown; +} + +// ==================== RESULT TYPES ==================== + +export interface CreateConversationResult { + createConversation: Conversation; +} + +export interface UpdateConversationResult { + updateConversation: Pick; +} + +export interface ArchiveConversationResult { + archiveConversation: Pick; +} + +export interface MuteConversationResult { + muteConversation: boolean; +} + +export interface LeaveConversationResult { + leaveConversation: boolean; +} + +export interface SendMessageResult { + sendMessage: Message; +} + +export interface DeleteMessageResult { + deleteMessage: boolean; +} + +export interface MarkConversationAsReadResult { + markConversationAsRead: boolean; +} + +export interface MarkAllConversationsAsReadResult { + markAllConversationsAsRead: number; +} + +export interface AddParticipantResult { + addParticipant: ConversationParticipant; +} + +export interface RemoveParticipantResult { + removeParticipant: boolean; +} diff --git a/frontend/src/lib/graphql/mutations/notifications.ts b/frontend/src/lib/graphql/mutations/notifications.ts new file mode 100644 index 0000000..eab53f4 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/notifications.ts @@ -0,0 +1,141 @@ +import { gql } from '@apollo/client/core'; +import type { Notification, NotificationRule } from '../queries/notifications'; + +// ==================== USER NOTIFICATION MUTATIONS ==================== + +export const MARK_NOTIFICATION_AS_READ = gql` + mutation MarkNotificationAsRead($id: UUID!) { + markNotificationAsRead(id: $id) { + id + status + readAt + isRead + updatedAt + } + } +`; + +export const MARK_ALL_NOTIFICATIONS_AS_READ = gql` + mutation MarkAllNotificationsAsRead { + markAllNotificationsAsRead + } +`; + +export const DELETE_NOTIFICATION = gql` + mutation DeleteNotification($id: UUID!) { + deleteNotification(id: $id) + } +`; + +// ==================== ADMIN NOTIFICATION RULE MUTATIONS ==================== + +export const CREATE_NOTIFICATION_RULE = gql` + mutation CreateNotificationRule($input: CreateNotificationRuleInput!) { + createNotificationRule(input: $input) { + id + name + description + isActive + eventTypes + channels + targetRoles + conditions + subjectTemplate + bodyTemplate + createdAt + updatedAt + } + } +`; + +export const UPDATE_NOTIFICATION_RULE = gql` + mutation UpdateNotificationRule($id: UUID!, $input: UpdateNotificationRuleInput!) { + updateNotificationRule(id: $id, input: $input) { + id + name + description + isActive + eventTypes + channels + targetRoles + conditions + subjectTemplate + bodyTemplate + updatedAt + } + } +`; + +export const DELETE_NOTIFICATION_RULE = gql` + mutation DeleteNotificationRule($id: UUID!) { + deleteNotificationRule(id: $id) + } +`; + +export const TOGGLE_NOTIFICATION_RULE = gql` + mutation ToggleNotificationRule($id: UUID!, $isActive: Boolean!) { + toggleNotificationRule(id: $id, isActive: $isActive) { + id + isActive + updatedAt + } + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateNotificationRuleInput { + name: string; + description?: string; + eventTypes: string[]; + channels: string[]; + targetRoles?: string[]; + conditions?: unknown; + subjectTemplate?: string; + bodyTemplate?: string; + targetTeamProfileIds?: string[]; + targetCustomerProfileIds?: string[]; +} + +export interface UpdateNotificationRuleInput { + name?: string; + description?: string; + eventTypes?: string[]; + channels?: string[]; + targetRoles?: string[]; + conditions?: unknown; + subjectTemplate?: string; + bodyTemplate?: string; + targetTeamProfileIds?: string[]; + targetCustomerProfileIds?: string[]; +} + +// ==================== RESULT TYPES ==================== + +export interface MarkNotificationAsReadResult { + markNotificationAsRead: Pick; +} + +export interface MarkAllNotificationsAsReadResult { + markAllNotificationsAsRead: number; +} + +export interface DeleteNotificationResult { + deleteNotification: boolean; +} + +export interface CreateNotificationRuleResult { + createNotificationRule: NotificationRule; +} + +export interface UpdateNotificationRuleResult { + updateNotificationRule: NotificationRule; +} + +export interface DeleteNotificationRuleResult { + deleteNotificationRule: boolean; +} + +export interface ToggleNotificationRuleResult { + toggleNotificationRule: Pick; +} diff --git a/frontend/src/lib/graphql/mutations/profile.ts b/frontend/src/lib/graphql/mutations/profile.ts new file mode 100644 index 0000000..f8a08c9 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/profile.ts @@ -0,0 +1,130 @@ +import { gql } from '@apollo/client/core'; +import type { TeamRole, EntityStatus } from '../queries/team'; + +// ==================== TEAM PROFILE MUTATIONS ==================== + +export const CREATE_TEAM_PROFILE = gql` + mutation CreateTeamProfile($input: CreateTeamProfileInput!) { + createTeamProfile(input: $input) { + id + firstName + lastName + fullName + email + phone + role + status + notes + createdAt + updatedAt + } + } +`; + +export const UPDATE_TEAM_PROFILE = gql` + mutation UpdateTeamProfile($id: UUID!, $input: UpdateTeamProfileInput!) { + updateTeamProfile(id: $id, input: $input) { + id + firstName + lastName + fullName + email + phone + role + status + notes + createdAt + updatedAt + } + } +`; + +export const DELETE_TEAM_PROFILE = gql` + mutation DeleteTeamProfile($id: UUID!) { + deleteTeamProfile(id: $id) + } +`; + +// ==================== CUSTOMER PROFILE MUTATIONS ==================== + +export const CREATE_CUSTOMER_PROFILE = gql` + mutation CreateCustomerProfile($input: CreateCustomerProfileInput!) { + createCustomerProfile(input: $input) { + id + firstName + lastName + fullName + email + phone + status + notes + createdAt + updatedAt + } + } +`; + +export const UPDATE_CUSTOMER_PROFILE = gql` + mutation UpdateCustomerProfile($id: UUID!, $input: UpdateCustomerProfileInput!) { + updateCustomerProfile(id: $id, input: $input) { + id + firstName + lastName + fullName + email + phone + status + notes + createdAt + updatedAt + } + } +`; + +export const DELETE_CUSTOMER_PROFILE = gql` + mutation DeleteCustomerProfile($id: UUID!) { + deleteCustomerProfile(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateTeamProfileInput { + kratosIdentityId: string; + firstName: string; + lastName: string; + email: string; + phone: string; + role: TeamRole; + status?: EntityStatus; + notes: string; +} + +export interface UpdateTeamProfileInput { + firstName?: string; + lastName?: string; + email?: string; + phone?: string; + role?: TeamRole; + status?: EntityStatus; + notes?: string; +} + +export interface CreateCustomerProfileInput { + kratosIdentityId: string; + firstName: string; + lastName: string; + email: string; + phone: string; + status?: EntityStatus; + notes: string; +} + +export interface UpdateCustomerProfileInput { + firstName?: string; + lastName?: string; + email?: string; + phone?: string; + status?: EntityStatus; + notes?: string; +} diff --git a/frontend/src/lib/graphql/mutations/project-scope-template.ts b/frontend/src/lib/graphql/mutations/project-scope-template.ts new file mode 100644 index 0000000..7f2b205 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/project-scope-template.ts @@ -0,0 +1,190 @@ +import { gql } from '@apollo/client/core'; + +// ==================== TEMPLATE MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_TEMPLATE = gql` + mutation CreateProjectScopeTemplate($input: CreateProjectScopeTemplateInput!) { + createProjectScopeTemplate(input: $input) { + id + name + description + isActive + } + } +`; + +export const UPDATE_PROJECT_SCOPE_TEMPLATE = gql` + mutation UpdateProjectScopeTemplate($id: UUID!, $input: UpdateProjectScopeTemplateInput!) { + updateProjectScopeTemplate(id: $id, input: $input) { + id + name + description + isActive + } + } +`; + +export const DELETE_PROJECT_SCOPE_TEMPLATE = gql` + mutation DeleteProjectScopeTemplate($id: UUID!) { + deleteProjectScopeTemplate(id: $id) + } +`; + +export const IMPORT_PROJECT_SCOPE_TEMPLATE = gql` + mutation ImportProjectScopeTemplate($input: ImportProjectScopeTemplateInput!) { + importProjectScopeTemplate(input: $input) { + id + name + description + isActive + } + } +`; + +// ==================== CATEGORY MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_TEMPLATE_CATEGORY = gql` + mutation CreateProjectScopeTemplateCategory( + $templateId: UUID! + $input: CreateProjectScopeTemplateCategoryInput! + ) { + createProjectScopeTemplateCategory(templateId: $templateId, input: $input) { + id + templateId + name + order + } + } +`; + +export const UPDATE_PROJECT_SCOPE_TEMPLATE_CATEGORY = gql` + mutation UpdateProjectScopeTemplateCategory( + $id: UUID! + $input: UpdateProjectScopeTemplateCategoryInput! + ) { + updateProjectScopeTemplateCategory(id: $id, input: $input) { + id + templateId + name + order + } + } +`; + +export const DELETE_PROJECT_SCOPE_TEMPLATE_CATEGORY = gql` + mutation DeleteProjectScopeTemplateCategory($id: UUID!) { + deleteProjectScopeTemplateCategory(id: $id) + } +`; + +// ==================== TASK MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_TEMPLATE_TASK = gql` + mutation CreateProjectScopeTemplateTask( + $categoryId: UUID! + $input: CreateProjectScopeTemplateTaskInput! + ) { + createProjectScopeTemplateTask(categoryId: $categoryId, input: $input) { + id + categoryId + scopeDescription + checklistDescription + sessionDescription + order + estimatedMinutes + } + } +`; + +export const UPDATE_PROJECT_SCOPE_TEMPLATE_TASK = gql` + mutation UpdateProjectScopeTemplateTask( + $id: UUID! + $input: UpdateProjectScopeTemplateTaskInput! + ) { + updateProjectScopeTemplateTask(id: $id, input: $input) { + id + categoryId + scopeDescription + checklistDescription + sessionDescription + order + estimatedMinutes + } + } +`; + +export const DELETE_PROJECT_SCOPE_TEMPLATE_TASK = gql` + mutation DeleteProjectScopeTemplateTask($id: UUID!) { + deleteProjectScopeTemplateTask(id: $id) + } +`; + +// ==================== INSTANTIATION MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_FROM_TEMPLATE = gql` + mutation CreateProjectScopeFromTemplate($templateId: UUID!, $projectId: UUID!) { + createProjectScopeFromTemplate(templateId: $templateId, projectId: $projectId) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateProjectScopeTemplateInput { + name: string; + description?: string; + categories?: CreateProjectScopeTemplateCategoryInput[]; +} + +export interface UpdateProjectScopeTemplateInput { + name?: string; + description?: string; + isActive?: boolean; +} + +export interface CreateProjectScopeTemplateCategoryInput { + name: string; + order?: number; + tasks?: CreateProjectScopeTemplateTaskInput[]; +} + +export interface UpdateProjectScopeTemplateCategoryInput { + name?: string; + order?: number; +} + +export interface CreateProjectScopeTemplateTaskInput { + scopeDescription: string; + checklistDescription?: string; + sessionDescription?: string; + order?: number; + estimatedMinutes?: number; +} + +export interface UpdateProjectScopeTemplateTaskInput { + scopeDescription?: string; + checklistDescription?: string; + sessionDescription?: string; + order?: number; + estimatedMinutes?: number; +} + +export interface ImportProjectScopeTemplateInput { + name: string; + description?: string; + categories: ImportProjectScopeTemplateCategoryInput[]; + replace?: boolean; +} + +export interface ImportProjectScopeTemplateCategoryInput { + name: string; + order: number; + tasks: ImportProjectScopeTemplateTaskInput[]; +} + +export interface ImportProjectScopeTemplateTaskInput { + scopeDescription: string; + checklistDescription?: string; + sessionDescription?: string; + order: number; + estimatedMinutes?: number; +} diff --git a/frontend/src/lib/graphql/mutations/project.ts b/frontend/src/lib/graphql/mutations/project.ts new file mode 100644 index 0000000..2982f42 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/project.ts @@ -0,0 +1,257 @@ +import { gql } from '@apollo/client/core'; + +// ==================== PROJECT MUTATIONS ==================== + +export const CREATE_PROJECT = gql` + mutation CreateProject($input: CreateProjectInput!) { + createProject(input: $input) { + id + customerId + name + date + status + labor + amount + notes + calendarEventId + waveServiceId + accountAddressId + streetAddress + city + state + zipCode + } + } +`; + +export const UPDATE_PROJECT = gql` + mutation UpdateProject($id: UUID!, $input: UpdateProjectInput!) { + updateProject(id: $id, input: $input) { + id + customerId + name + date + status + labor + amount + notes + calendarEventId + waveServiceId + accountAddressId + streetAddress + city + state + zipCode + } + } +`; + +export const DELETE_PROJECT = gql` + mutation DeleteProject($id: UUID!) { + deleteProject(id: $id) + } +`; + +// ==================== TEAM MEMBER ASSIGNMENT ==================== + +export const ASSIGN_PROJECT_TEAM_MEMBER = gql` + mutation AssignProjectTeamMember($projectId: UUID!, $teamProfileId: UUID!) { + assignProjectTeamMember(projectId: $projectId, teamProfileId: $teamProfileId) { + id + projectId + teamProfileId + teamProfile { + id + fullName + role + } + } + } +`; + +export const REMOVE_PROJECT_TEAM_MEMBER = gql` + mutation RemoveProjectTeamMember($id: UUID!) { + removeProjectTeamMember(id: $id) + } +`; + +// ==================== PROJECT SCOPE MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE = gql` + mutation CreateProjectScope($projectId: UUID!, $input: CreateProjectScopeInput!) { + createProjectScope(projectId: $projectId, input: $input) { + id + name + description + isActive + categories { + id + name + order + tasks { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } + } + } +`; + +export const UPDATE_PROJECT_SCOPE = gql` + mutation UpdateProjectScope($id: UUID!, $input: UpdateProjectScopeInput!) { + updateProjectScope(id: $id, input: $input) { + id + name + description + isActive + } + } +`; + +export const DELETE_PROJECT_SCOPE = gql` + mutation DeleteProjectScope($id: UUID!) { + deleteProjectScope(id: $id) + } +`; + +// ==================== PROJECT SCOPE CATEGORY MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_CATEGORY = gql` + mutation CreateProjectScopeCategory($scopeId: UUID!, $input: CreateProjectCategoryInput!) { + createProjectScopeCategory(scopeId: $scopeId, input: $input) { + id + name + order + } + } +`; + +export const UPDATE_PROJECT_SCOPE_CATEGORY = gql` + mutation UpdateProjectScopeCategory($id: UUID!, $input: UpdateProjectCategoryInput!) { + updateProjectScopeCategory(id: $id, input: $input) { + id + name + order + } + } +`; + +export const DELETE_PROJECT_SCOPE_CATEGORY = gql` + mutation DeleteProjectScopeCategory($id: UUID!) { + deleteProjectScopeCategory(id: $id) + } +`; + +// ==================== PROJECT SCOPE TASK MUTATIONS ==================== + +export const CREATE_PROJECT_SCOPE_TASK = gql` + mutation CreateProjectScopeTask($categoryId: UUID!, $input: CreateScopeTaskInput!) { + createProjectScopeTask(categoryId: $categoryId, input: $input) { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const UPDATE_PROJECT_SCOPE_TASK = gql` + mutation UpdateProjectScopeTask($id: UUID!, $input: UpdateScopeTaskInput!) { + updateProjectScopeTask(id: $id, input: $input) { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const DELETE_PROJECT_SCOPE_TASK = gql` + mutation DeleteProjectScopeTask($id: UUID!) { + deleteProjectScopeTask(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export type WorkStatus = 'SCHEDULED' | 'IN_PROGRESS' | 'COMPLETED' | 'CANCELLED'; + +export interface CreateProjectInput { + customerId: string; + name: string; + date: string; + status?: WorkStatus; + accountAddressId?: string; + streetAddress?: string; + city?: string; + state?: string; + zipCode?: string; +} + +export interface UpdateProjectInput { + name?: string; + date?: string; + status?: WorkStatus; + labor?: number; + amount?: number; + waveServiceId?: string; + notes?: string; + calendarEventId?: string; + accountAddressId?: string; + streetAddress?: string; + city?: string; + state?: string; + zipCode?: string; +} + +export interface CreateProjectScopeInput { + name: string; + description?: string; + categories?: CreateProjectCategoryInput[]; +} + +export interface UpdateProjectScopeInput { + name?: string; + description?: string; + isActive?: boolean; +} + +export interface CreateProjectCategoryInput { + name: string; + order?: number; + tasks?: CreateScopeTaskInput[]; +} + +export interface UpdateProjectCategoryInput { + name?: string; + order?: number; +} + +export interface CreateScopeTaskInput { + scopeDescription: string; + checklistDescription?: string; + sessionDescription?: string; + frequency?: 'DAILY' | 'WEEKLY' | 'MONTHLY' | 'QUARTERLY' | 'TRIANNUAL' | 'ANNUAL' | 'AS_NEEDED'; + order?: number; + estimatedMinutes?: number; +} + +export interface UpdateScopeTaskInput { + scopeDescription?: string; + checklistDescription?: string; + sessionDescription?: string; + frequency?: 'DAILY' | 'WEEKLY' | 'MONTHLY' | 'QUARTERLY' | 'TRIANNUAL' | 'ANNUAL' | 'AS_NEEDED'; + order?: number; + estimatedMinutes?: number; +} diff --git a/frontend/src/lib/graphql/mutations/reports.ts b/frontend/src/lib/graphql/mutations/reports.ts new file mode 100644 index 0000000..b517733 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/reports.ts @@ -0,0 +1,90 @@ +import { gql } from '@apollo/client/core'; + +export const CREATE_REPORT = gql` + mutation CreateReport($input: CreateReportInput!) { + createReport(input: $input) { + id + teamProfileId + startDate + endDate + status + } + } +`; + +export const UPDATE_REPORT = gql` + mutation UpdateReport($id: UUID!, $input: UpdateReportInput!) { + updateReport(id: $id, input: $input) { + id + teamProfileId + startDate + endDate + status + } + } +`; + +export const DELETE_REPORT = gql` + mutation DeleteReport($id: UUID!) { + deleteReport(id: $id) + } +`; + +export const ADD_SERVICE_TO_REPORT = gql` + mutation AddServiceToReport($reportId: UUID!, $serviceId: UUID!) { + addServiceToReport(reportId: $reportId, serviceId: $serviceId) { + id + reportId + serviceId + laborShare + } + } +`; + +export const ADD_PROJECT_TO_REPORT = gql` + mutation AddProjectToReport($reportId: UUID!, $projectId: UUID!) { + addProjectToReport(reportId: $reportId, projectId: $projectId) { + id + reportId + projectId + laborShare + } + } +`; + +export const REMOVE_SERVICE_FROM_REPORT = gql` + mutation RemoveServiceFromReport($id: UUID!) { + removeServiceFromReport(id: $id) + } +`; + +export const REMOVE_PROJECT_FROM_REPORT = gql` + mutation RemoveProjectFromReport($id: UUID!) { + removeProjectFromReport(id: $id) + } +`; + +export const ADD_ALL_ELIGIBLE_SERVICES = gql` + mutation AddAllEligibleServicesToReport($reportId: UUID!) { + addAllEligibleServicesToReport(reportId: $reportId) + } +`; + +export const ADD_ALL_ELIGIBLE_PROJECTS = gql` + mutation AddAllEligibleProjectsToReport($reportId: UUID!) { + addAllEligibleProjectsToReport(reportId: $reportId) + } +`; + +// Input types for mutations +export interface CreateReportInput { + teamProfileId: string; + startDate: string; + endDate: string; +} + +export interface UpdateReportInput { + startDate?: string; + endDate?: string; + status?: 'DRAFT' | 'FINALIZED' | 'PAID'; +} diff --git a/frontend/src/lib/graphql/mutations/service-scope-template.ts b/frontend/src/lib/graphql/mutations/service-scope-template.ts new file mode 100644 index 0000000..d108315 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/service-scope-template.ts @@ -0,0 +1,204 @@ +import { gql } from '@apollo/client/core'; +import type { TaskFrequency } from '../queries/service-scope-templates'; + +// ==================== TEMPLATE MUTATIONS ==================== + +export const CREATE_SERVICE_SCOPE_TEMPLATE = gql` + mutation CreateServiceScopeTemplate($input: CreateServiceScopeTemplateInput!) { + createServiceScopeTemplate(input: $input) { + id + name + description + isActive + } + } +`; + +export const UPDATE_SERVICE_SCOPE_TEMPLATE = gql` + mutation UpdateServiceScopeTemplate($id: UUID!, $input: UpdateServiceScopeTemplateInput!) { + updateServiceScopeTemplate(id: $id, input: $input) { + id + name + description + isActive + } + } +`; + +export const DELETE_SERVICE_SCOPE_TEMPLATE = gql` + mutation DeleteServiceScopeTemplate($id: UUID!) { + deleteServiceScopeTemplate(id: $id) + } +`; + +export const IMPORT_SERVICE_SCOPE_TEMPLATE = gql` + mutation ImportServiceScopeTemplate($input: ImportServiceScopeTemplateInput!) { + importServiceScopeTemplate(input: $input) { + id + name + description + isActive + } + } +`; + +// ==================== AREA MUTATIONS ==================== + +export const CREATE_SERVICE_SCOPE_TEMPLATE_AREA = gql` + mutation CreateServiceScopeTemplateArea( + $templateId: UUID! + $input: CreateServiceScopeTemplateAreaInput! + ) { + createServiceScopeTemplateArea(templateId: $templateId, input: $input) { + id + templateId + name + order + } + } +`; + +export const UPDATE_SERVICE_SCOPE_TEMPLATE_AREA = gql` + mutation UpdateServiceScopeTemplateArea( + $id: UUID! + $input: UpdateServiceScopeTemplateAreaInput! + ) { + updateServiceScopeTemplateArea(id: $id, input: $input) { + id + templateId + name + order + } + } +`; + +export const DELETE_SERVICE_SCOPE_TEMPLATE_AREA = gql` + mutation DeleteServiceScopeTemplateArea($id: UUID!) { + deleteServiceScopeTemplateArea(id: $id) + } +`; + +// ==================== TASK MUTATIONS ==================== + +export const CREATE_SERVICE_SCOPE_TEMPLATE_TASK = gql` + mutation CreateServiceScopeTemplateTask( + $areaId: UUID! + $input: CreateServiceScopeTemplateTaskInput! + ) { + createServiceScopeTemplateTask(areaId: $areaId, input: $input) { + id + areaId + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const UPDATE_SERVICE_SCOPE_TEMPLATE_TASK = gql` + mutation UpdateServiceScopeTemplateTask( + $id: UUID! + $input: UpdateServiceScopeTemplateTaskInput! + ) { + updateServiceScopeTemplateTask(id: $id, input: $input) { + id + areaId + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } +`; + +export const DELETE_SERVICE_SCOPE_TEMPLATE_TASK = gql` + mutation DeleteServiceScopeTemplateTask($id: UUID!) { + deleteServiceScopeTemplateTask(id: $id) + } +`; + +// ==================== INSTANTIATION MUTATIONS ==================== + +export const CREATE_SERVICE_SCOPE_FROM_TEMPLATE = gql` + mutation CreateServiceScopeFromTemplate( + $templateId: UUID! + $accountId: UUID! + $accountAddressId: UUID! + ) { + createServiceScopeFromTemplate( + templateId: $templateId + accountId: $accountId + accountAddressId: $accountAddressId + ) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface CreateServiceScopeTemplateInput { + name: string; + description?: string; + areas?: CreateServiceScopeTemplateAreaInput[]; +} + +export interface UpdateServiceScopeTemplateInput { + name?: string; + description?: string; + isActive?: boolean; +} + +export interface CreateServiceScopeTemplateAreaInput { + name: string; + order?: number; + tasks?: CreateServiceScopeTemplateTaskInput[]; +} + +export interface UpdateServiceScopeTemplateAreaInput { + name?: string; + order?: number; +} + +export interface CreateServiceScopeTemplateTaskInput { + scopeDescription: string; + checklistDescription?: string; + sessionDescription?: string; + frequency?: TaskFrequency; + order?: number; + estimatedMinutes?: number; +} + +export interface UpdateServiceScopeTemplateTaskInput { + scopeDescription?: string; + checklistDescription?: string; + sessionDescription?: string; + frequency?: TaskFrequency; + order?: number; + estimatedMinutes?: number; +} + +export interface ImportServiceScopeTemplateInput { + name: string; + description?: string; + areas: ImportServiceScopeTemplateAreaInput[]; + replace?: boolean; +} + +export interface ImportServiceScopeTemplateAreaInput { + name: string; + order: number; + tasks: ImportServiceScopeTemplateTaskInput[]; +} + +export interface ImportServiceScopeTemplateTaskInput { + scopeDescription: string; + checklistDescription?: string; + sessionDescription?: string; + frequency?: TaskFrequency; + order: number; + estimatedMinutes?: number; +} diff --git a/frontend/src/lib/graphql/mutations/service.ts b/frontend/src/lib/graphql/mutations/service.ts new file mode 100644 index 0000000..ba00ec8 --- /dev/null +++ b/frontend/src/lib/graphql/mutations/service.ts @@ -0,0 +1,110 @@ +import { gql } from '@apollo/client/core'; + +// ==================== SERVICE MUTATIONS ==================== + +export const CREATE_SERVICE = gql` + mutation CreateService($input: CreateServiceInput!) { + createService(input: $input) { + id + accountId + accountAddressId + date + status + notes + calendarEventId + } + } +`; + +export const UPDATE_SERVICE = gql` + mutation UpdateService($id: UUID!, $input: UpdateServiceInput!) { + updateService(id: $id, input: $input) { + id + accountId + accountAddressId + date + status + notes + calendarEventId + } + } +`; + +export const DELETE_SERVICE = gql` + mutation DeleteService($id: UUID!) { + deleteService(id: $id) + } +`; + +// ==================== TEAM MEMBER ASSIGNMENT ==================== + +export const ASSIGN_SERVICE_TEAM_MEMBER = gql` + mutation AssignServiceTeamMember($serviceId: UUID!, $teamProfileId: UUID!) { + assignServiceTeamMember(serviceId: $serviceId, teamProfileId: $teamProfileId) { + id + serviceId + teamProfileId + teamProfile { + id + fullName + role + } + } + } +`; + +export const REMOVE_SERVICE_TEAM_MEMBER = gql` + mutation RemoveServiceTeamMember($id: UUID!) { + removeServiceTeamMember(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export type WorkStatus = 'SCHEDULED' | 'IN_PROGRESS' | 'COMPLETED' | 'CANCELLED'; + +export interface CreateServiceInput { + accountId: string; + accountAddressId: string; + date: string; + status?: WorkStatus; + notes?: string; +} + +export interface UpdateServiceInput { + date?: string; + status?: WorkStatus; + notes?: string; + calendarEventId?: string; +} + +// ==================== GENERATE SERVICES ==================== + +export const GENERATE_SERVICES_BY_MONTH = gql` + mutation GenerateServicesByMonth($input: GenerateServicesInput!) { + generateServicesByMonth(input: $input) { + id + date + status + notes + } + } +`; + +export interface GenerateServicesInput { + accountAddressId: string; + scheduleId: string; + month: number; + year: number; +} + +export interface GeneratedService { + id: string; + date: string; + status: WorkStatus; + notes: string | null; +} + +export interface GenerateServicesByMonthResult { + generateServicesByMonth: GeneratedService[]; +} diff --git a/frontend/src/lib/graphql/mutations/session.ts b/frontend/src/lib/graphql/mutations/session.ts new file mode 100644 index 0000000..be3330d --- /dev/null +++ b/frontend/src/lib/graphql/mutations/session.ts @@ -0,0 +1,553 @@ +import { gql } from '@apollo/client/core'; + +// ==================== SESSION LIFECYCLE ==================== + +export const OPEN_SERVICE_SESSION = gql` + mutation OpenServiceSession($input: OpenServiceSessionInput!) { + openServiceSession(input: $input) { + id + serviceId + accountId + accountAddressId + customerId + scopeId + start + date + createdById + isActive + createdBy { + id + fullName + } + } + } +`; + +export const CLOSE_SERVICE_SESSION = gql` + mutation CloseServiceSession($input: CloseServiceSessionInput!) { + closeServiceSession(input: $input) { + id + serviceId + start + end + date + isActive + durationSeconds + closedById + closedBy { + id + fullName + } + } + } +`; + +export const REVERT_SERVICE_SESSION = gql` + mutation RevertServiceSession($serviceId: UUID!) { + revertServiceSession(serviceId: $serviceId) + } +`; + +export const OPEN_PROJECT_SESSION = gql` + mutation OpenProjectSession($input: OpenProjectSessionInput!) { + openProjectSession(input: $input) { + id + projectId + accountId + accountAddressId + customerId + scopeId + start + date + createdById + isActive + createdBy { + id + fullName + } + } + } +`; + +export const CLOSE_PROJECT_SESSION = gql` + mutation CloseProjectSession($input: CloseProjectSessionInput!) { + closeProjectSession(input: $input) { + id + projectId + start + end + date + isActive + durationSeconds + closedById + closedBy { + id + fullName + } + } + } +`; + +export const REVERT_PROJECT_SESSION = gql` + mutation RevertProjectSession($projectId: UUID!) { + revertProjectSession(projectId: $projectId) + } +`; + +// ==================== TASK COMPLETIONS ==================== + +export const ADD_SERVICE_TASK_COMPLETION = gql` + mutation AddServiceTaskCompletion($serviceId: UUID!, $taskId: UUID!, $notes: String) { + addServiceTaskCompletion(serviceId: $serviceId, taskId: $taskId, notes: $notes) { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + frequency + order + } + completedBy { + id + fullName + } + } + } +`; + +export const REMOVE_SERVICE_TASK_COMPLETION = gql` + mutation RemoveServiceTaskCompletion($serviceId: UUID!, $taskId: UUID!) { + removeServiceTaskCompletion(serviceId: $serviceId, taskId: $taskId) + } +`; + +export const ADD_PROJECT_TASK_COMPLETION = gql` + mutation AddProjectTaskCompletion($projectId: UUID!, $taskId: UUID!, $notes: String) { + addProjectTaskCompletion(projectId: $projectId, taskId: $taskId, notes: $notes) { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + order + } + completedBy { + id + fullName + } + } + } +`; + +export const REMOVE_PROJECT_TASK_COMPLETION = gql` + mutation RemoveProjectTaskCompletion($projectId: UUID!, $taskId: UUID!) { + removeProjectTaskCompletion(projectId: $projectId, taskId: $taskId) + } +`; + +// ==================== SERVICE SESSION NOTES ==================== + +export const CREATE_SERVICE_SESSION_NOTE = gql` + mutation CreateServiceSessionNote($input: CreateSessionNoteInput!) { + createServiceSessionNote(input: $input) { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + } +`; + +export const UPDATE_SERVICE_SESSION_NOTE = gql` + mutation UpdateServiceSessionNote($id: UUID!, $input: UpdateSessionNoteInput!) { + updateServiceSessionNote(id: $id, input: $input) { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + } +`; + +export const DELETE_SERVICE_SESSION_NOTE = gql` + mutation DeleteServiceSessionNote($id: UUID!) { + deleteServiceSessionNote(id: $id) + } +`; + +// ==================== PROJECT SESSION NOTES ==================== + +export const CREATE_PROJECT_SESSION_NOTE = gql` + mutation CreateProjectSessionNote($input: CreateSessionNoteInput!) { + createProjectSessionNote(input: $input) { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + } +`; + +export const UPDATE_PROJECT_SESSION_NOTE = gql` + mutation UpdateProjectSessionNote($id: UUID!, $input: UpdateSessionNoteInput!) { + updateProjectSessionNote(id: $id, input: $input) { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + } +`; + +export const DELETE_PROJECT_SESSION_NOTE = gql` + mutation DeleteProjectSessionNote($id: UUID!) { + deleteProjectSessionNote(id: $id) + } +`; + +// ==================== SERVICE SESSION IMAGES ==================== + +export const UPLOAD_SERVICE_SESSION_IMAGE = gql` + mutation UploadServiceSessionImage( + $sessionId: UUID! + $file: Upload! + $title: String + $notes: String + $internal: Boolean + ) { + uploadServiceSessionImage( + sessionId: $sessionId + file: $file + title: $title + notes: $notes + internal: $internal + ) { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const UPDATE_SERVICE_SESSION_IMAGE = gql` + mutation UpdateServiceSessionImage($id: UUID!, $input: UpdateSessionMediaInput!) { + updateServiceSessionImage(id: $id, input: $input) { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const DELETE_SERVICE_SESSION_IMAGE = gql` + mutation DeleteServiceSessionImage($id: UUID!) { + deleteServiceSessionImage(id: $id) + } +`; + +// ==================== PROJECT SESSION IMAGES ==================== + +export const UPLOAD_PROJECT_SESSION_IMAGE = gql` + mutation UploadProjectSessionImage( + $sessionId: UUID! + $file: Upload! + $title: String + $notes: String + $internal: Boolean + ) { + uploadProjectSessionImage( + sessionId: $sessionId + file: $file + title: $title + notes: $notes + internal: $internal + ) { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const UPDATE_PROJECT_SESSION_IMAGE = gql` + mutation UpdateProjectSessionImage($id: UUID!, $input: UpdateSessionMediaInput!) { + updateProjectSessionImage(id: $id, input: $input) { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const DELETE_PROJECT_SESSION_IMAGE = gql` + mutation DeleteProjectSessionImage($id: UUID!) { + deleteProjectSessionImage(id: $id) + } +`; + +// ==================== SERVICE SESSION VIDEOS ==================== + +export const UPLOAD_SERVICE_SESSION_VIDEO = gql` + mutation UploadServiceSessionVideo( + $sessionId: UUID! + $file: Upload! + $title: String + $notes: String + $internal: Boolean + ) { + uploadServiceSessionVideo( + sessionId: $sessionId + file: $file + title: $title + notes: $notes + internal: $internal + ) { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const UPDATE_SERVICE_SESSION_VIDEO = gql` + mutation UpdateServiceSessionVideo($id: UUID!, $input: UpdateSessionMediaInput!) { + updateServiceSessionVideo(id: $id, input: $input) { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const DELETE_SERVICE_SESSION_VIDEO = gql` + mutation DeleteServiceSessionVideo($id: UUID!) { + deleteServiceSessionVideo(id: $id) + } +`; + +// ==================== PROJECT SESSION VIDEOS ==================== + +export const UPLOAD_PROJECT_SESSION_VIDEO = gql` + mutation UploadProjectSessionVideo( + $sessionId: UUID! + $file: Upload! + $title: String + $notes: String + $internal: Boolean + ) { + uploadProjectSessionVideo( + sessionId: $sessionId + file: $file + title: $title + notes: $notes + internal: $internal + ) { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const UPDATE_PROJECT_SESSION_VIDEO = gql` + mutation UpdateProjectSessionVideo($id: UUID!, $input: UpdateSessionMediaInput!) { + updateProjectSessionVideo(id: $id, input: $input) { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + } +`; + +export const DELETE_PROJECT_SESSION_VIDEO = gql` + mutation DeleteProjectSessionVideo($id: UUID!) { + deleteProjectSessionVideo(id: $id) + } +`; + +// ==================== INPUT TYPES ==================== + +export interface OpenServiceSessionInput { + serviceId: string; +} + +export interface CloseServiceSessionInput { + serviceId: string; + taskIds: string[]; +} + +export interface OpenProjectSessionInput { + projectId: string; +} + +export interface CloseProjectSessionInput { + projectId: string; + taskIds: string[]; +} + +export interface CreateSessionNoteInput { + sessionId: string; + content: string; + internal?: boolean; +} + +export interface UpdateSessionNoteInput { + content?: string; + internal?: boolean; +} + +export interface UpdateSessionMediaInput { + title?: string; + notes?: string; + internal?: boolean; +} diff --git a/frontend/src/lib/graphql/mutations/wave.ts b/frontend/src/lib/graphql/mutations/wave.ts new file mode 100644 index 0000000..37440bd --- /dev/null +++ b/frontend/src/lib/graphql/mutations/wave.ts @@ -0,0 +1,180 @@ +import { gql } from '@apollo/client/core'; + +// Mutations +export const CREATE_WAVE_INVOICE = gql` + mutation CreateWaveInvoice($input: CreateWaveInvoiceInput!) { + createWaveInvoice(input: $input) { + success + error + invoice { + id + invoiceNumber + invoiceDate + status + total { + value + currencyCode + currencySymbol + } + pdfUrl + } + nexusInvoice { + id + waveInvoiceId + } + } + } +`; + +export const CREATE_WAVE_CUSTOMER = gql` + mutation CreateWaveCustomer($input: CreateWaveCustomerInput!) { + createWaveCustomer(input: $input) { + success + error + customer { + id + name + email + } + nexusCustomer { + id + waveCustomerId + } + } + } +`; + +export const LINK_CUSTOMER_TO_WAVE = gql` + mutation LinkCustomerToWave($customerId: UUID!, $waveCustomerId: String!) { + linkCustomerToWave(customerId: $customerId, waveCustomerId: $waveCustomerId) { + id + name + waveCustomerId + } + } +`; + +export const UNLINK_CUSTOMER_FROM_WAVE = gql` + mutation UnlinkCustomerFromWave($customerId: UUID!) { + unlinkCustomerFromWave(customerId: $customerId) { + id + name + waveCustomerId + } + } +`; + +// Invoice Lifecycle Mutations +export const APPROVE_WAVE_INVOICE = gql` + mutation ApproveWaveInvoice($invoiceId: String!) { + approveWaveInvoice(invoiceId: $invoiceId) { + success + error + data { + id + invoiceNumber + status + pdfUrl + } + } + } +`; + +export const SEND_WAVE_INVOICE = gql` + mutation SendWaveInvoice($invoiceId: String!) { + sendWaveInvoice(invoiceId: $invoiceId) { + success + error + data { + id + invoiceNumber + status + } + } + } +`; + +export const DELETE_WAVE_INVOICE = gql` + mutation DeleteWaveInvoice($invoiceId: String!) { + deleteWaveInvoice(invoiceId: $invoiceId) { + success + error + data + } + } +`; + +// Product Mutations +export const CREATE_WAVE_PRODUCT = gql` + mutation CreateWaveProduct($input: CreateWaveProductInput!) { + createWaveProduct(input: $input) { + success + error + data { + id + name + description + unitPrice + isSold + isArchived + } + } + } +`; + +export const UPDATE_WAVE_PRODUCT = gql` + mutation UpdateWaveProduct($input: UpdateWaveProductInput!) { + updateWaveProduct(input: $input) { + success + error + data { + id + name + description + unitPrice + isSold + isArchived + } + } + } +`; + +export const ARCHIVE_WAVE_PRODUCT = gql` + mutation ArchiveWaveProduct($productId: String!) { + archiveWaveProduct(productId: $productId) { + success + error + data { + id + name + isArchived + } + } + } +`; + +// Customer Mutations +export const UPDATE_WAVE_CUSTOMER = gql` + mutation UpdateWaveCustomer($input: UpdateWaveCustomerInput!) { + updateWaveCustomer(input: $input) { + success + error + data { + id + name + email + currencyCode + } + } + } +`; + +export const DELETE_WAVE_CUSTOMER = gql` + mutation DeleteWaveCustomer($customerId: String!) { + deleteWaveCustomer(customerId: $customerId) { + success + error + data + } + } +`; diff --git a/frontend/src/lib/graphql/queries/account.ts b/frontend/src/lib/graphql/queries/account.ts new file mode 100644 index 0000000..8e103e0 --- /dev/null +++ b/frontend/src/lib/graphql/queries/account.ts @@ -0,0 +1,203 @@ +import { gql } from '@apollo/client/core'; + +export const ACCOUNT_QUERY = gql` + query Account($id: UUID!) { + account(id: $id) { + id + customerId + name + status + startDate + endDate + isActive + customer { + id + name + } + contacts { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + addresses { + id + accountId + name + streetAddress + city + state + zipCode + isActive + isPrimary + notes + labor { + id + amount + startDate + endDate + isActive + } + schedules { + id + name + monday + tuesday + wednesday + thursday + friday + saturday + sunday + weekendService + scheduleException + startDate + endDate + isActive + } + scopes { + id + name + description + isActive + areas { + id + name + order + tasks { + id + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } + } + } + revenues { + id + amount + startDate + endDate + waveServiceId + waveProductName + isActive + } + } + } +`; + +export interface AccountContact { + id: string; + firstName: string; + lastName: string; + email: string | null; + phone: string | null; + isActive: boolean; + isPrimary: boolean; + notes: string | null; +} + +export interface AddressLabor { + id: string; + amount: string; + startDate: string; + endDate: string | null; + isActive: boolean; +} + +export interface AddressSchedule { + id: string; + name: string | null; + monday: boolean; + tuesday: boolean; + wednesday: boolean; + thursday: boolean; + friday: boolean; + saturday: boolean; + sunday: boolean; + weekendService: boolean; + scheduleException: string | null; + startDate: string | null; + endDate: string | null; + isActive: boolean; +} + +export interface ScopeTask { + id: string; + scopeDescription: string; + checklistDescription: string; + sessionDescription: string; + frequency: string; + order: number; + estimatedMinutes: number | null; +} + +export interface ScopeArea { + id: string; + name: string; + order: number; + tasks: ScopeTask[]; +} + +export interface AddressScope { + id: string; + name: string; + description: string | null; + isActive: boolean; + areas: ScopeArea[]; +} + +export interface AccountAddress { + id: string; + accountId: string; + name: string | null; + streetAddress: string; + city: string; + state: string; + zipCode: string; + isActive: boolean; + isPrimary: boolean; + notes: string | null; + labor: AddressLabor[]; + schedules: AddressSchedule[]; + scopes: AddressScope[]; +} + +export interface AccountCustomer { + id: string; + name: string; +} + +export interface AccountRevenue { + id: string; + amount: string; // Decimal comes as string from GraphQL + startDate: string; + endDate: string | null; + waveServiceId: string | null; + waveProductName: string | null; + isActive: boolean; +} + +export interface Account { + id: string; + customerId: string; + name: string; + status: string; + startDate: string | null; + endDate: string | null; + isActive: boolean; + customer: AccountCustomer | null; + contacts: AccountContact[]; + addresses: AccountAddress[]; + revenues: AccountRevenue[]; +} + +export interface AccountQueryResult { + account: Account | null; +} diff --git a/frontend/src/lib/graphql/queries/accounts.ts b/frontend/src/lib/graphql/queries/accounts.ts new file mode 100644 index 0000000..2623c60 --- /dev/null +++ b/frontend/src/lib/graphql/queries/accounts.ts @@ -0,0 +1,132 @@ +import { gql } from '@apollo/client/core'; + +export const ACCOUNTS_QUERY = gql` + query Accounts($customerId: UUID, $filter: AccountFilter) { + accounts(customerId: $customerId, filter: $filter) { + id + customerId + name + status + isActive + customer { + id + name + } + addresses { + id + city + state + isPrimary + } + } + } +`; + +export type EntityStatus = 'ACTIVE' | 'INACTIVE' | 'PENDING'; + +export interface AccountFilterInput { + name?: string; + status?: EntityStatus; + isActive?: boolean; +} + +export interface AccountListAddress { + id: string; + city: string; + state: string; + isPrimary: boolean; +} + +export interface AccountListCustomer { + id: string; + name: string; +} + +export interface AccountListItem { + id: string; + customerId: string; + name: string; + status: EntityStatus; + isActive: boolean; + customer: AccountListCustomer | null; + addresses: AccountListAddress[]; +} + +export interface AccountsQueryResult { + accounts: AccountListItem[]; +} + +// ==================== ACCOUNTS WITH SCHEDULES QUERY ==================== + +export const ACCOUNTS_WITH_SCHEDULES_QUERY = gql` + query AccountsWithSchedules { + accounts(filter: { status: ACTIVE }) { + id + name + status + customer { + id + name + } + addresses { + id + name + city + state + isPrimary + isActive + schedules { + id + name + monday + tuesday + wednesday + thursday + friday + saturday + sunday + weekendService + startDate + endDate + } + } + } + } +`; + +export interface ScheduleForGeneration { + id: string; + name: string | null; + monday: boolean; + tuesday: boolean; + wednesday: boolean; + thursday: boolean; + friday: boolean; + saturday: boolean; + sunday: boolean; + weekendService: boolean; + startDate: string | null; + endDate: string | null; +} + +export interface AddressForGeneration { + id: string; + name: string | null; + city: string; + state: string; + isPrimary: boolean; + isActive: boolean; + schedules: ScheduleForGeneration[]; +} + +export interface AccountForGeneration { + id: string; + name: string; + status: EntityStatus; + customer: AccountListCustomer | null; + addresses: AddressForGeneration[]; +} + +export interface AccountsWithSchedulesQueryResult { + accounts: AccountForGeneration[]; +} diff --git a/frontend/src/lib/graphql/queries/calendar.ts b/frontend/src/lib/graphql/queries/calendar.ts new file mode 100644 index 0000000..7c2e93c --- /dev/null +++ b/frontend/src/lib/graphql/queries/calendar.ts @@ -0,0 +1,200 @@ +import { gql } from '@apollo/client/core'; + +export const CALENDAR_EVENTS_QUERY = gql` + query CalendarEvents($filter: CalendarEventFilterInput) { + calendarEvents(filter: $filter) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + attendees { + email + displayName + optional + responseStatus + } + colorId + htmlLink + created + updated + status + } + } +`; + +export const CALENDAR_EVENT_QUERY = gql` + query CalendarEvent($eventId: String!) { + calendarEvent(eventId: $eventId) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + attendees { + email + displayName + optional + responseStatus + } + reminders { + useDefault + overrides { + method + minutes + } + } + colorId + htmlLink + created + updated + status + } + } +`; + +export const TODAYS_EVENTS_QUERY = gql` + query TodaysEvents { + todaysEvents { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + status + } + } +`; + +export const UPCOMING_EVENTS_QUERY = gql` + query UpcomingEvents($days: Int) { + upcomingEvents(days: $days) { + id + summary + description + location + start { + dateTime + date + timeZone + } + end { + dateTime + date + timeZone + } + colorId + status + } + } +`; + +// Types +export interface EventDateTime { + dateTime: string | null; + date: string | null; + timeZone: string | null; +} + +export interface Attendee { + email: string; + displayName: string | null; + optional: boolean | null; + responseStatus: string | null; +} + +export interface EventReminder { + method: string; + minutes: number; +} + +export interface EventReminders { + useDefault: boolean; + overrides: EventReminder[] | null; +} + +export interface CalendarEvent { + id: string; + summary: string; + description: string | null; + location: string | null; + start: EventDateTime; + end: EventDateTime; + attendees: Attendee[] | null; + reminders?: EventReminders | null; + colorId: string | null; + htmlLink: string | null; + created: string | null; + updated: string | null; + status: string | null; +} + +export interface CalendarEventFilterInput { + timeMin?: string; + timeMax?: string; + maxResults?: number; + q?: string; +} + +export interface CalendarEventsQueryResult { + calendarEvents: CalendarEvent[]; +} + +export interface CalendarEventQueryResult { + calendarEvent: CalendarEvent | null; +} + +export interface TodaysEventsQueryResult { + todaysEvents: CalendarEvent[]; +} + +export interface UpcomingEventsQueryResult { + upcomingEvents: CalendarEvent[]; +} + +// Google Calendar color IDs and their display values +export const EVENT_COLORS = [ + { id: '1', name: 'Lavender', bg: 'bg-purple-300', text: 'text-purple-900' }, + { id: '2', name: 'Sage', bg: 'bg-green-300', text: 'text-green-900' }, + { id: '3', name: 'Grape', bg: 'bg-violet-400', text: 'text-violet-900' }, + { id: '4', name: 'Flamingo', bg: 'bg-pink-400', text: 'text-pink-900' }, + { id: '5', name: 'Banana', bg: 'bg-yellow-300', text: 'text-yellow-900' }, + { id: '6', name: 'Tangerine', bg: 'bg-orange-400', text: 'text-orange-900' }, + { id: '7', name: 'Peacock', bg: 'bg-cyan-500', text: 'text-cyan-900' }, + { id: '8', name: 'Graphite', bg: 'bg-gray-400', text: 'text-gray-900' }, + { id: '9', name: 'Blueberry', bg: 'bg-blue-500', text: 'text-blue-100' }, + { id: '10', name: 'Basil', bg: 'bg-emerald-600', text: 'text-emerald-100' }, + { id: '11', name: 'Tomato', bg: 'bg-red-500', text: 'text-red-100' } +] as const; + +export function getEventColor(colorId: string | null) { + return EVENT_COLORS.find((c) => c.id === colorId) ?? null; +} diff --git a/frontend/src/lib/graphql/queries/customer.ts b/frontend/src/lib/graphql/queries/customer.ts new file mode 100644 index 0000000..03a01ca --- /dev/null +++ b/frontend/src/lib/graphql/queries/customer.ts @@ -0,0 +1,93 @@ +import { gql } from '@apollo/client/core'; + +export const CUSTOMER_QUERY = gql` + query Customer($id: UUID!) { + customer(id: $id) { + id + name + status + startDate + endDate + billingTerms + billingEmail + waveCustomerId + waveCustomerName + isActive + contacts { + id + firstName + lastName + email + phone + isActive + isPrimary + notes + } + addresses { + id + name + streetAddress + city + state + zipCode + isActive + isPrimary + } + accounts { + id + name + status + isActive + } + } + } +`; + +export interface CustomerContact { + id: string; + firstName: string; + lastName: string; + email: string | null; + phone: string | null; + isActive: boolean; + isPrimary: boolean; + notes: string | null; +} + +export interface CustomerAddress { + id: string; + name: string | null; + streetAddress: string; + city: string; + state: string; + zipCode: string; + isActive: boolean; + isPrimary: boolean; +} + +export interface CustomerAccount { + id: string; + name: string; + status: string; + isActive: boolean; +} + +export interface Customer { + id: string; + name: string; + status: string; + startDate: string | null; + endDate: string | null; + billingTerms: string | null; + billingEmail: string | null; + waveCustomerId: string | null; + waveCustomerName: string | null; + isActive: boolean; + contacts: CustomerContact[]; + addresses: CustomerAddress[]; + accounts: CustomerAccount[]; +} + +export interface CustomerQueryResult { + customer: Customer | null; +} diff --git a/frontend/src/lib/graphql/queries/customerProfile.ts b/frontend/src/lib/graphql/queries/customerProfile.ts new file mode 100644 index 0000000..5dad5fe --- /dev/null +++ b/frontend/src/lib/graphql/queries/customerProfile.ts @@ -0,0 +1,67 @@ +import { gql } from '@apollo/client/core'; +import type { EntityStatus } from './team'; + +export const CUSTOMER_PROFILES_QUERY = gql` + query CustomerProfiles { + customerProfiles { + id + firstName + lastName + fullName + email + phone + status + notes + createdAt + updatedAt + } + } +`; + +export const CUSTOMER_PROFILE_QUERY = gql` + query CustomerProfile($id: UUID!) { + customerProfile(id: $id) { + id + firstName + lastName + fullName + email + phone + status + notes + createdAt + updatedAt + customers { + id + name + } + } + } +`; + +export interface CustomerInfo { + id: string; + name: string; +} + +export interface CustomerProfile { + id: string; + firstName: string; + lastName: string; + fullName: string; + email: string | null; + phone: string | null; + status: EntityStatus; + notes: string | null; + createdAt: string; + updatedAt: string; + customers?: CustomerInfo[]; +} + +export interface CustomerProfilesQueryResult { + customerProfiles: CustomerProfile[]; +} + +export interface CustomerProfileQueryResult { + customerProfile: CustomerProfile; +} diff --git a/frontend/src/lib/graphql/queries/customers.ts b/frontend/src/lib/graphql/queries/customers.ts new file mode 100644 index 0000000..5b8ac5f --- /dev/null +++ b/frontend/src/lib/graphql/queries/customers.ts @@ -0,0 +1,37 @@ +import { gql } from '@apollo/client/core'; + +export const CUSTOMERS_QUERY = gql` + query Customers($filter: CustomerFilter) { + customers(filter: $filter) { + id + name + status + isActive + waveCustomerId + accounts { + id + } + } + } +`; + +export type EntityStatus = 'ACTIVE' | 'INACTIVE' | 'PENDING'; + +export interface CustomerFilterInput { + name?: string; + status?: EntityStatus; + isActive?: boolean; +} + +export interface CustomerListItem { + id: string; + name: string; + status: EntityStatus; + isActive: boolean; + waveCustomerId: string | null; + accounts: { id: string }[]; +} + +export interface CustomersQueryResult { + customers: CustomerListItem[]; +} diff --git a/frontend/src/lib/graphql/queries/events.ts b/frontend/src/lib/graphql/queries/events.ts new file mode 100644 index 0000000..538b4a8 --- /dev/null +++ b/frontend/src/lib/graphql/queries/events.ts @@ -0,0 +1,118 @@ +import { gql } from '@apollo/client/core'; + +export interface Event { + id: string; + eventType: string; + entityType: string; + entityId: string; + actorType: string | null; + actorId: string | null; + metadata: Record | null; + timestamp: string; + createdAt: string; + description: string; + actorProfile: { + __typename: string; + id: string; + fullName: string; + } | null; +} + +export const RECENT_EVENTS_QUERY = gql` + query RecentEvents($limit: Int, $offset: Int) { + recentEvents(limit: $limit, offset: $offset) { + id + eventType + entityType + entityId + actorType + actorId + metadata + timestamp + createdAt + description + actorProfile { + ... on TeamProfileType { + __typename + id + fullName + } + ... on CustomerProfileType { + __typename + id + fullName + } + } + } + } +`; + +export interface RecentEventsQueryResult { + recentEvents: Event[]; +} + +export const EVENTS_FOR_ENTITY_QUERY = gql` + query EventsForEntity($entityType: String!, $entityId: UUID!, $limit: Int, $offset: Int) { + eventsForEntity(entityType: $entityType, entityId: $entityId, limit: $limit, offset: $offset) { + id + eventType + entityType + entityId + actorType + actorId + metadata + timestamp + createdAt + description + actorProfile { + ... on TeamProfileType { + __typename + id + fullName + } + ... on CustomerProfileType { + __typename + id + fullName + } + } + } + } +`; + +export interface EventsForEntityQueryResult { + eventsForEntity: Event[]; +} + +export const EVENTS_BY_ACTOR_QUERY = gql` + query EventsByActor($actorType: String!, $actorId: UUID!, $limit: Int, $offset: Int) { + eventsByActor(actorType: $actorType, actorId: $actorId, limit: $limit, offset: $offset) { + id + eventType + entityType + entityId + actorType + actorId + metadata + timestamp + createdAt + description + actorProfile { + ... on TeamProfileType { + __typename + id + fullName + } + ... on CustomerProfileType { + __typename + id + fullName + } + } + } + } +`; + +export interface EventsByActorQueryResult { + eventsByActor: Event[]; +} diff --git a/frontend/src/lib/graphql/queries/invoices.ts b/frontend/src/lib/graphql/queries/invoices.ts new file mode 100644 index 0000000..a26f1ad --- /dev/null +++ b/frontend/src/lib/graphql/queries/invoices.ts @@ -0,0 +1,256 @@ +import { gql } from '@apollo/client/core'; + +// Query for paginated invoices with filtering +export const INVOICES_QUERY = gql` + query Invoices($filter: InvoiceFilterInput, $pagination: PaginationInput) { + invoices(filter: $filter, pagination: $pagination) { + items { + id + customerId + startDate + endDate + status + datePaid + waveInvoiceId + customer { + id + name + } + revenuesTotal + projectsTotal + totalAmount + revenueCount + projectCount + } + totalCount + hasNextPage + } + } +`; + +// Query for a single invoice with full details +export const INVOICE_QUERY = gql` + query Invoice($id: UUID!) { + invoice(id: $id) { + id + customerId + startDate + endDate + status + datePaid + waveInvoiceId + customer { + id + name + } + revenues { + id + revenueId + amount + account { + id + name + } + revenue { + id + amount + startDate + endDate + waveServiceId + isActive + } + } + projects { + id + projectId + amount + account { + id + name + } + project { + id + name + date + amount + formattedAddress + } + } + revenuesTotal + projectsTotal + totalAmount + revenueCount + projectCount + } + } +`; + +// Query for eligible revenues to add to an invoice +export const ELIGIBLE_REVENUES_QUERY = gql` + query EligibleRevenuesForInvoice($customerId: UUID!, $dateFrom: NaiveDate!, $dateTo: NaiveDate!) { + eligibleRevenuesForInvoice(customerId: $customerId, dateFrom: $dateFrom, dateTo: $dateTo) { + revenueId + accountId + accountName + amount + revenue { + id + amount + startDate + endDate + waveServiceId + } + } + } +`; + +// Query for eligible projects to add to an invoice +export const ELIGIBLE_PROJECTS_QUERY = gql` + query EligibleProjectsForInvoice($customerId: UUID!, $dateFrom: NaiveDate!, $dateTo: NaiveDate!) { + eligibleProjectsForInvoice(customerId: $customerId, dateFrom: $dateFrom, dateTo: $dateTo) { + projectId + name + date + amount + project { + id + name + date + amount + formattedAddress + } + } + } +`; + +// Types +export type InvoiceStatus = 'DRAFT' | 'SENT' | 'PAID' | 'OVERDUE' | 'CANCELLED'; + +export interface InvoiceCustomer { + id: string; + name: string; +} + +export interface InvoiceAccount { + id: string; + name: string; +} + +export interface InvoiceRevenueEntry { + id: string; + revenueId: string; + amount: number; + account: InvoiceAccount | null; + revenue: { + id: string; + amount: number; + startDate: string; + endDate: string | null; + waveServiceId: string | null; + isActive: boolean; + } | null; +} + +export interface InvoiceProjectEntry { + id: string; + projectId: string; + amount: number; + account: InvoiceAccount | null; + project: { + id: string; + name: string; + date: string; + amount: number | null; + formattedAddress: string | null; + } | null; +} + +export interface InvoiceListItem { + id: string; + customerId: string; + startDate: string; + endDate: string; + status: InvoiceStatus; + datePaid: string | null; + waveInvoiceId: string | null; + customer: InvoiceCustomer | null; + revenuesTotal: number; + projectsTotal: number; + totalAmount: number; + revenueCount: number; + projectCount: number; +} + +export interface InvoiceDetail { + id: string; + customerId: string; + startDate: string; + endDate: string; + status: InvoiceStatus; + datePaid: string | null; + waveInvoiceId: string | null; + customer: InvoiceCustomer | null; + revenues: InvoiceRevenueEntry[]; + projects: InvoiceProjectEntry[]; + revenuesTotal: number; + projectsTotal: number; + totalAmount: number; + revenueCount: number; + projectCount: number; +} + +export interface InvoicesQueryResult { + invoices: { + items: InvoiceListItem[]; + totalCount: number; + hasNextPage: boolean; + }; +} + +export interface InvoiceQueryResult { + invoice: InvoiceDetail | null; +} + +export interface EligibleRevenue { + revenueId: string; + accountId: string; + accountName: string; + amount: number; + revenue: { + id: string; + amount: number; + startDate: string; + endDate: string | null; + waveServiceId: string | null; + } | null; +} + +export interface EligibleInvoiceProject { + projectId: string; + name: string; + date: string; + amount: number; + project: { + id: string; + name: string; + date: string; + amount: number | null; + formattedAddress: string | null; + } | null; +} + +export interface EligibleRevenuesQueryResult { + eligibleRevenuesForInvoice: EligibleRevenue[]; +} + +export interface EligibleProjectsQueryResult { + eligibleProjectsForInvoice: EligibleInvoiceProject[]; +} + +export interface InvoiceFilterInput { + customerId?: string; + status?: InvoiceStatus; + date?: string; + startDateFrom?: string; + endDateTo?: string; +} diff --git a/frontend/src/lib/graphql/queries/me.ts b/frontend/src/lib/graphql/queries/me.ts new file mode 100644 index 0000000..0e697a4 --- /dev/null +++ b/frontend/src/lib/graphql/queries/me.ts @@ -0,0 +1,31 @@ +import { gql } from '@apollo/client/core'; +import type { User } from '../../../app'; + +export const ME_QUERY = gql` + query Me { + me { + __typename + ... on TeamProfileType { + id + fullName + email + phone + role + } + ... on CustomerProfileType { + id + fullName + email + phone + customers { + id + name + } + } + } + } +`; + +export interface MeQueryResult { + me: User | null; +} diff --git a/frontend/src/lib/graphql/queries/messaging.ts b/frontend/src/lib/graphql/queries/messaging.ts new file mode 100644 index 0000000..b64d762 --- /dev/null +++ b/frontend/src/lib/graphql/queries/messaging.ts @@ -0,0 +1,311 @@ +import { gql } from '@apollo/client/core'; + +// ==================== CONVERSATION QUERIES ==================== + +export const MY_CONVERSATIONS_QUERY = gql` + query MyConversations($includeArchived: Boolean, $limit: Int, $offset: Int) { + myConversations(includeArchived: $includeArchived, limit: $limit, offset: $offset) { + id + subject + conversationType + entityType + entityId + lastMessageAt + isArchived + createdAt + updatedAt + unreadCount + participants { + id + participantType + participantId + isArchived + isMuted + participantProfile { + __typename + ... on TeamProfileType { + id + fullName + email + role + } + ... on CustomerProfileType { + id + fullName + email + } + } + } + createdByProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } +`; + +export const CONVERSATION_QUERY = gql` + query Conversation($id: UUID!) { + conversation(id: $id) { + id + subject + conversationType + entityType + entityId + lastMessageAt + isArchived + createdAt + updatedAt + unreadCount + participants { + id + participantType + participantId + isArchived + isMuted + lastReadAt + unreadCount + participantProfile { + __typename + ... on TeamProfileType { + id + fullName + email + role + } + ... on CustomerProfileType { + id + fullName + email + } + } + } + messages(limit: 100) { + id + content + isDeleted + isSystemMessage + replyToId + attachments + createdAt + authorType + authorId + authorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } + } +`; + +export const CONVERSATIONS_BY_ENTITY_QUERY = gql` + query ConversationsByEntity($entityType: String!, $entityId: UUID!) { + conversationsByEntity(entityType: $entityType, entityId: $entityId) { + id + subject + conversationType + lastMessageAt + isArchived + createdAt + unreadCount + participants { + id + participantProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } + } +`; + +export const UNREAD_MESSAGE_COUNT_QUERY = gql` + query UnreadMessageCount { + unreadMessageCount + } +`; + +// ==================== MESSAGE QUERIES ==================== + +export const MESSAGES_QUERY = gql` + query Messages( + $conversationId: UUID! + $limit: Int + $offset: Int + $includeDeleted: Boolean + $includeSystem: Boolean + ) { + messages( + conversationId: $conversationId + limit: $limit + offset: $offset + includeDeleted: $includeDeleted + includeSystem: $includeSystem + ) { + id + conversationId + content + isDeleted + isSystemMessage + replyToId + attachments + createdAt + authorType + authorId + authorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + replyTo { + id + content + authorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } + } +`; + +export const MESSAGE_QUERY = gql` + query Message($id: UUID!) { + message(id: $id) { + id + conversationId + content + isDeleted + isSystemMessage + replyToId + attachments + createdAt + authorType + authorId + authorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + } +`; + +// ==================== TYPES ==================== + +export type ConversationType = 'DIRECT' | 'GROUP' | 'SUPPORT'; + +export interface ProfileRef { + __typename: 'TeamProfileType' | 'CustomerProfileType'; + id: string; + fullName: string; + email?: string; + role?: string; +} + +export interface ConversationParticipant { + id: string; + participantType: string; + participantId: string; + isArchived: boolean; + isMuted: boolean; + lastReadAt?: string; + unreadCount?: number; + participantProfile: ProfileRef | null; +} + +export interface Message { + id: string; + conversationId: string; + content: string; + isDeleted: boolean; + isSystemMessage: boolean; + replyToId: string | null; + attachments: unknown | null; + createdAt: string; + authorType: string; + authorId: string; + authorProfile: ProfileRef | null; + replyTo?: Message | null; +} + +export interface Conversation { + id: string; + subject: string | null; + conversationType: ConversationType; + entityType: string | null; + entityId: string | null; + lastMessageAt: string | null; + isArchived: boolean; + createdAt: string; + updatedAt: string; + unreadCount: number; + participants: ConversationParticipant[]; + createdByProfile: ProfileRef | null; + messages?: Message[]; +} + +export interface MyConversationsQueryResult { + myConversations: Conversation[]; +} + +export interface ConversationQueryResult { + conversation: Conversation | null; +} + +export interface ConversationsByEntityQueryResult { + conversationsByEntity: Conversation[]; +} + +export interface UnreadMessageCountQueryResult { + unreadMessageCount: number; +} + +export interface MessagesQueryResult { + messages: Message[]; +} + +export interface MessageQueryResult { + message: Message | null; +} diff --git a/frontend/src/lib/graphql/queries/myProfile.ts b/frontend/src/lib/graphql/queries/myProfile.ts new file mode 100644 index 0000000..e899956 --- /dev/null +++ b/frontend/src/lib/graphql/queries/myProfile.ts @@ -0,0 +1,76 @@ +import { gql } from '@apollo/client/core'; +import type { TeamRole, EntityStatus } from './team'; +import type { CustomerInfo } from './customerProfile'; + +export const MY_PROFILE_QUERY = gql` + query MyProfile { + me { + __typename + ... on TeamProfileType { + id + firstName + lastName + fullName + email + phone + role + status + notes + createdAt + updatedAt + } + ... on CustomerProfileType { + id + firstName + lastName + fullName + email + phone + status + notes + createdAt + updatedAt + customers { + id + name + } + } + } + } +`; + +export interface MyTeamProfile { + __typename: 'TeamProfileType'; + id: string; + firstName: string; + lastName: string; + fullName: string; + email: string | null; + phone: string | null; + role: TeamRole; + status: EntityStatus; + notes: string | null; + createdAt: string; + updatedAt: string; +} + +export interface MyCustomerProfile { + __typename: 'CustomerProfileType'; + id: string; + firstName: string; + lastName: string; + fullName: string; + email: string | null; + phone: string | null; + status: EntityStatus; + notes: string | null; + createdAt: string; + updatedAt: string; + customers: CustomerInfo[]; +} + +export type MyProfile = MyTeamProfile | MyCustomerProfile; + +export interface MyProfileQueryResult { + me: MyProfile | null; +} diff --git a/frontend/src/lib/graphql/queries/notifications.ts b/frontend/src/lib/graphql/queries/notifications.ts new file mode 100644 index 0000000..c56106b --- /dev/null +++ b/frontend/src/lib/graphql/queries/notifications.ts @@ -0,0 +1,299 @@ +import { gql } from '@apollo/client/core'; + +// ==================== USER NOTIFICATION QUERIES ==================== + +export const MY_NOTIFICATIONS_QUERY = gql` + query MyNotifications($unreadOnly: Boolean, $limit: Int, $offset: Int) { + myNotifications(unreadOnly: $unreadOnly, limit: $limit, offset: $offset) { + id + recipientType + recipientId + ruleId + eventId + status + subject + body + actionUrl + readAt + isRead + createdAt + updatedAt + event { + id + eventType + entityType + entityId + timestamp + } + } + } +`; + +export const MY_UNREAD_NOTIFICATION_COUNT_QUERY = gql` + query MyUnreadNotificationCount { + myUnreadNotificationCount + } +`; + +export const NOTIFICATION_QUERY = gql` + query Notification($id: UUID!) { + notification(id: $id) { + id + recipientType + recipientId + ruleId + eventId + status + subject + body + actionUrl + readAt + isRead + metadata + createdAt + updatedAt + recipientProfile { + __typename + ... on TeamProfileType { + id + fullName + email + } + ... on CustomerProfileType { + id + fullName + email + } + } + event { + id + eventType + entityType + entityId + actorType + actorId + timestamp + metadata + actorProfile { + __typename + ... on TeamProfileType { + id + fullName + } + ... on CustomerProfileType { + id + fullName + } + } + } + rule { + id + name + } + deliveries { + id + channel + status + attempts + sentAt + deliveredAt + errorMessage + isSuccessful + isFailed + } + } + } +`; + +// ==================== ADMIN NOTIFICATION RULE QUERIES ==================== + +export const NOTIFICATION_RULES_QUERY = gql` + query NotificationRules($isActive: Boolean) { + notificationRules(isActive: $isActive) { + id + name + description + isActive + eventTypes + channels + targetRoles + conditions + subjectTemplate + bodyTemplate + createdAt + updatedAt + } + } +`; + +export const NOTIFICATION_RULE_QUERY = gql` + query NotificationRule($id: UUID!) { + notificationRule(id: $id) { + id + name + description + isActive + eventTypes + channels + targetRoles + conditions + subjectTemplate + bodyTemplate + createdAt + updatedAt + targetTeamProfiles { + id + fullName + email + role + } + targetCustomerProfiles { + id + fullName + email + } + } + } +`; + +export const NOTIFICATION_DELIVERIES_QUERY = gql` + query NotificationDeliveries($notificationId: UUID!) { + notificationDeliveries(notificationId: $notificationId) { + id + notificationId + channel + status + attempts + lastAttemptAt + sentAt + deliveredAt + errorMessage + externalId + isSuccessful + isFailed + createdAt + updatedAt + } + } +`; + +// ==================== TYPES ==================== + +export type NotificationStatus = 'PENDING' | 'SENT' | 'READ' | 'FAILED'; +export type NotificationChannel = 'IN_APP' | 'EMAIL' | 'SMS'; +export type DeliveryStatus = + | 'PENDING' + | 'QUEUED' + | 'SENDING' + | 'SENT' + | 'DELIVERED' + | 'FAILED' + | 'BOUNCED'; + +export interface ProfileRef { + __typename: 'TeamProfileType' | 'CustomerProfileType'; + id: string; + fullName: string; + email?: string; + role?: string; +} + +export interface NotificationEvent { + id: string; + eventType: string; + entityType: string; + entityId: string; + actorType?: string; + actorId?: string; + timestamp: string; + metadata?: unknown; + actorProfile?: ProfileRef | null; +} + +export interface NotificationDelivery { + id: string; + notificationId: string; + channel: NotificationChannel; + status: DeliveryStatus; + attempts: number; + lastAttemptAt: string | null; + sentAt: string | null; + deliveredAt: string | null; + errorMessage: string | null; + externalId: string | null; + isSuccessful: boolean; + isFailed: boolean; + createdAt: string; + updatedAt: string; +} + +export interface NotificationRule { + id: string; + name: string; + description: string | null; + isActive: boolean; + eventTypes: string[]; + channels: string[]; + targetRoles: string[] | null; + conditions: unknown | null; + subjectTemplate: string | null; + bodyTemplate: string | null; + createdAt: string; + updatedAt: string; + targetTeamProfiles?: Array<{ + id: string; + fullName: string; + email: string | null; + role: string; + }>; + targetCustomerProfiles?: Array<{ + id: string; + fullName: string; + email: string | null; + }>; +} + +export interface Notification { + id: string; + recipientType: string; + recipientId: string; + ruleId: string | null; + eventId: string | null; + status: NotificationStatus; + subject: string; + body: string; + actionUrl: string | null; + readAt: string | null; + isRead: boolean; + metadata?: unknown; + createdAt: string; + updatedAt: string; + recipientProfile?: ProfileRef | null; + event?: NotificationEvent | null; + rule?: { id: string; name: string } | null; + deliveries?: NotificationDelivery[]; +} + +export interface MyNotificationsQueryResult { + myNotifications: Notification[]; +} + +export interface MyUnreadNotificationCountQueryResult { + myUnreadNotificationCount: number; +} + +export interface NotificationQueryResult { + notification: Notification | null; +} + +export interface NotificationRulesQueryResult { + notificationRules: NotificationRule[]; +} + +export interface NotificationRuleQueryResult { + notificationRule: NotificationRule | null; +} + +export interface NotificationDeliveriesQueryResult { + notificationDeliveries: NotificationDelivery[]; +} diff --git a/frontend/src/lib/graphql/queries/project-scope-templates.ts b/frontend/src/lib/graphql/queries/project-scope-templates.ts new file mode 100644 index 0000000..4729084 --- /dev/null +++ b/frontend/src/lib/graphql/queries/project-scope-templates.ts @@ -0,0 +1,85 @@ +import { gql } from '@apollo/client/core'; + +export const PROJECT_SCOPE_TEMPLATES_QUERY = gql` + query ProjectScopeTemplates($isActive: Boolean) { + projectScopeTemplates(isActive: $isActive) { + id + name + description + isActive + categoryCount + taskCount + } + } +`; + +export const PROJECT_SCOPE_TEMPLATE_QUERY = gql` + query ProjectScopeTemplate($id: UUID!) { + projectScopeTemplate(id: $id) { + id + name + description + isActive + categories { + id + templateId + name + order + taskCount + tasks { + id + categoryId + scopeDescription + checklistDescription + sessionDescription + order + estimatedMinutes + } + } + } + } +`; + +export interface ProjectScopeTemplateListItem { + id: string; + name: string; + description: string | null; + isActive: boolean; + categoryCount: number; + taskCount: number; +} + +export interface ProjectScopeTemplateTask { + id: string; + categoryId: string; + scopeDescription: string; + checklistDescription: string; + sessionDescription: string; + order: number; + estimatedMinutes: number | null; +} + +export interface ProjectScopeTemplateCategory { + id: string; + templateId: string; + name: string; + order: number; + taskCount: number; + tasks: ProjectScopeTemplateTask[]; +} + +export interface ProjectScopeTemplate { + id: string; + name: string; + description: string | null; + isActive: boolean; + categories: ProjectScopeTemplateCategory[]; +} + +export interface ProjectScopeTemplatesQueryResult { + projectScopeTemplates: ProjectScopeTemplateListItem[]; +} + +export interface ProjectScopeTemplateQueryResult { + projectScopeTemplate: ProjectScopeTemplate | null; +} diff --git a/frontend/src/lib/graphql/queries/projects.ts b/frontend/src/lib/graphql/queries/projects.ts new file mode 100644 index 0000000..691db70 --- /dev/null +++ b/frontend/src/lib/graphql/queries/projects.ts @@ -0,0 +1,286 @@ +import { gql } from '@apollo/client/core'; + +export const PROJECTS_QUERY = gql` + query Projects($filter: ProjectFilterInput, $pagination: PaginationInput) { + projects(filter: $filter, pagination: $pagination) { + items { + id + customerId + name + date + status + labor + amount + notes + calendarEventId + accountAddressId + streetAddress + city + state + zipCode + formattedAddress + customer { + id + name + } + accountAddress { + id + name + streetAddress + city + state + zipCode + account { + id + name + } + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + totalCount + hasNextPage + } + } +`; + +export const PROJECT_QUERY = gql` + query Project($id: UUID!) { + project(id: $id) { + id + customerId + name + date + status + labor + amount + notes + calendarEventId + waveServiceId + waveProductName + accountAddressId + streetAddress + city + state + zipCode + formattedAddress + customer { + id + name + } + accountAddress { + id + name + streetAddress + city + state + zipCode + account { + id + name + } + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + scopes { + id + name + description + isActive + categories { + id + name + order + tasks { + id + scopeDescription + checklistDescription + sessionDescription + order + estimatedMinutes + } + } + } + } + } +`; + +export const PROJECT_STATUS_COUNTS_QUERY = gql` + query ProjectStatusCounts( + $dateFrom: NaiveDate + $dateTo: NaiveDate + $teamProfileId: UUID + $customerIds: [UUID!] + ) { + projectStatusCounts( + dateFrom: $dateFrom + dateTo: $dateTo + teamProfileId: $teamProfileId + customerIds: $customerIds + ) { + scheduled + inProgress + completed + cancelled + } + } +`; + +export type WorkStatus = 'SCHEDULED' | 'IN_PROGRESS' | 'COMPLETED' | 'CANCELLED'; + +export interface ProjectFilterInput { + dateFrom?: string; + dateTo?: string; + status?: WorkStatus; + customerId?: string; + customerIds?: string[]; + accountAddressId?: string; + teamProfileId?: string; +} + +export interface PaginationInput { + offset?: number; + limit?: number; +} + +export interface ProjectCustomer { + id: string; + name: string; +} + +export interface ProjectAddressAccount { + id: string; + name: string; +} + +export interface ProjectAddress { + id: string; + name: string | null; + streetAddress: string; + city: string; + state: string; + zipCode: string; + account: ProjectAddressAccount; +} + +export type TeamRole = 'ADMIN' | 'TEAM_LEADER' | 'TEAM_MEMBER'; + +export interface ProjectTeamMemberProfile { + id: string; + fullName: string; + role: TeamRole; +} + +export interface ProjectTeamMember { + id: string; + teamProfileId: string; + teamProfile: ProjectTeamMemberProfile | null; +} + +export interface ProjectListItem { + id: string; + customerId: string; + name: string; + date: string; + status: WorkStatus; + labor: number | null; + amount: number | null; + notes: string | null; + calendarEventId: string | null; + accountAddressId: string | null; + streetAddress: string | null; + city: string | null; + state: string | null; + zipCode: string | null; + formattedAddress: string | null; + customer: ProjectCustomer | null; + accountAddress: ProjectAddress | null; + teamMembers: ProjectTeamMember[]; +} + +export interface ProjectConnection { + items: ProjectListItem[]; + totalCount: number; + hasNextPage: boolean; +} + +export interface ProjectsQueryResult { + projects: ProjectConnection; +} + +export interface ProjectStatusCounts { + scheduled: number; + inProgress: number; + completed: number; + cancelled: number; +} + +export interface ProjectStatusCountsQueryResult { + projectStatusCounts: ProjectStatusCounts; +} + +export interface ProjectScopeTask { + id: string; + scopeDescription: string; + checklistDescription: string; + sessionDescription: string; + order: number; + estimatedMinutes: number | null; +} + +export interface ProjectScopeCategory { + id: string; + name: string; + order: number; + tasks: ProjectScopeTask[]; +} + +export interface ProjectScope { + id: string; + name: string; + description: string | null; + isActive: boolean; + categories: ProjectScopeCategory[]; +} + +export interface ProjectDetail { + id: string; + customerId: string; + name: string; + date: string; + status: WorkStatus; + labor: number | null; + amount: number | null; + notes: string | null; + calendarEventId: string | null; + waveServiceId: string | null; + waveProductName: string | null; + accountAddressId: string | null; + streetAddress: string | null; + city: string | null; + state: string | null; + zipCode: string | null; + formattedAddress: string | null; + customer: ProjectCustomer | null; + accountAddress: ProjectAddress | null; + teamMembers: ProjectTeamMember[]; + scopes: ProjectScope[]; +} + +export interface ProjectQueryResult { + project: ProjectDetail | null; +} diff --git a/frontend/src/lib/graphql/queries/reports.ts b/frontend/src/lib/graphql/queries/reports.ts new file mode 100644 index 0000000..f9564ae --- /dev/null +++ b/frontend/src/lib/graphql/queries/reports.ts @@ -0,0 +1,302 @@ +import { gql } from '@apollo/client/core'; + +// Query for paginated reports with filtering +export const REPORTS_QUERY = gql` + query Reports($filter: ReportFilterInput, $pagination: PaginationInput) { + reports(filter: $filter, pagination: $pagination) { + items { + id + teamProfileId + startDate + endDate + status + teamProfile { + id + fullName + role + } + servicesTotal + projectsTotal + totalLabor + serviceCount + projectCount + } + totalCount + hasNextPage + } + } +`; + +// Query for a single report with full details +export const REPORT_QUERY = gql` + query Report($id: UUID!) { + report(id: $id) { + id + teamProfileId + startDate + endDate + status + teamProfile { + id + fullName + role + } + services { + id + serviceId + laborShare + service { + id + date + account { + id + name + customer { + id + name + } + } + accountAddress { + id + name + streetAddress + city + } + } + } + projects { + id + projectId + laborShare + project { + id + name + date + customer { + id + name + } + formattedAddress + } + } + servicesTotal + projectsTotal + totalLabor + serviceCount + projectCount + } + } +`; + +// Query for eligible services to add to a report +export const ELIGIBLE_SERVICES_QUERY = gql` + query EligibleServicesForReport( + $teamProfileId: UUID! + $dateFrom: NaiveDate! + $dateTo: NaiveDate! + ) { + eligibleServicesForReport(teamProfileId: $teamProfileId, dateFrom: $dateFrom, dateTo: $dateTo) { + serviceId + date + laborShare + laborTotal + teamMemberCount + service { + id + date + account { + id + name + customer { + id + name + } + } + accountAddress { + id + name + streetAddress + city + } + } + } + } +`; + +// Query for eligible projects to add to a report +export const ELIGIBLE_PROJECTS_QUERY = gql` + query EligibleProjectsForReport( + $teamProfileId: UUID! + $dateFrom: NaiveDate! + $dateTo: NaiveDate! + ) { + eligibleProjectsForReport(teamProfileId: $teamProfileId, dateFrom: $dateFrom, dateTo: $dateTo) { + projectId + date + laborShare + laborTotal + teamMemberCount + project { + id + name + date + customer { + id + name + } + formattedAddress + } + } + } +`; + +// Types +export type ReportStatus = 'DRAFT' | 'FINALIZED' | 'PAID'; + +export interface ReportTeamProfile { + id: string; + fullName: string; + role: 'ADMIN' | 'TEAM_LEADER' | 'TEAM_MEMBER'; +} + +export interface ReportServiceEntry { + id: string; + serviceId: string; + laborShare: number; + service: { + id: string; + date: string; + account: { + id: string; + name: string; + customer: { + id: string; + name: string; + } | null; + } | null; + accountAddress: { + id: string; + name: string | null; + streetAddress: string; + city: string; + } | null; + } | null; +} + +export interface ReportProjectEntry { + id: string; + projectId: string; + laborShare: number; + project: { + id: string; + name: string; + date: string; + customer: { + id: string; + name: string; + } | null; + formattedAddress: string | null; + } | null; +} + +export interface ReportListItem { + id: string; + teamProfileId: string; + startDate: string; + endDate: string; + status: ReportStatus; + teamProfile: ReportTeamProfile | null; + servicesTotal: number; + projectsTotal: number; + totalLabor: number; + serviceCount: number; + projectCount: number; +} + +export interface ReportDetail { + id: string; + teamProfileId: string; + startDate: string; + endDate: string; + status: ReportStatus; + teamProfile: ReportTeamProfile | null; + services: ReportServiceEntry[]; + projects: ReportProjectEntry[]; + servicesTotal: number; + projectsTotal: number; + totalLabor: number; + serviceCount: number; + projectCount: number; +} + +export interface ReportsQueryResult { + reports: { + items: ReportListItem[]; + totalCount: number; + hasNextPage: boolean; + }; +} + +export interface ReportQueryResult { + report: ReportDetail | null; +} + +export interface EligibleService { + serviceId: string; + date: string; + laborShare: number; + laborTotal: number; + teamMemberCount: number; + service: { + id: string; + date: string; + account: { + id: string; + name: string; + customer: { + id: string; + name: string; + } | null; + } | null; + accountAddress: { + id: string; + name: string | null; + streetAddress: string; + city: string; + } | null; + } | null; +} + +export interface EligibleProject { + projectId: string; + date: string; + laborShare: number; + laborTotal: number; + teamMemberCount: number; + project: { + id: string; + name: string; + date: string; + customer: { + id: string; + name: string; + } | null; + formattedAddress: string | null; + } | null; +} + +export interface EligibleServicesQueryResult { + eligibleServicesForReport: EligibleService[]; +} + +export interface EligibleProjectsQueryResult { + eligibleProjectsForReport: EligibleProject[]; +} + +export interface ReportFilterInput { + teamProfileId?: string; + status?: ReportStatus; + date?: string; + startDateFrom?: string; + endDateTo?: string; +} diff --git a/frontend/src/lib/graphql/queries/service-scope-templates.ts b/frontend/src/lib/graphql/queries/service-scope-templates.ts new file mode 100644 index 0000000..b59553b --- /dev/null +++ b/frontend/src/lib/graphql/queries/service-scope-templates.ts @@ -0,0 +1,116 @@ +import { gql } from '@apollo/client/core'; + +export const SERVICE_SCOPE_TEMPLATES_QUERY = gql` + query ServiceScopeTemplates($isActive: Boolean) { + serviceScopeTemplates(isActive: $isActive) { + id + name + description + isActive + areaCount + taskCount + } + } +`; + +export const SERVICE_SCOPE_TEMPLATE_QUERY = gql` + query ServiceScopeTemplate($id: UUID!) { + serviceScopeTemplate(id: $id) { + id + name + description + isActive + areas { + id + templateId + name + order + taskCount + tasks { + id + areaId + scopeDescription + checklistDescription + sessionDescription + frequency + order + estimatedMinutes + } + } + } + } +`; + +export type TaskFrequency = + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'TRIANNUAL' + | 'ANNUAL' + | 'AS_NEEDED'; + +export interface ServiceScopeTemplateListItem { + id: string; + name: string; + description: string | null; + isActive: boolean; + areaCount: number; + taskCount: number; +} + +export interface ServiceScopeTemplateTask { + id: string; + areaId: string; + scopeDescription: string; + checklistDescription: string; + sessionDescription: string; + frequency: TaskFrequency; + order: number; + estimatedMinutes: number | null; +} + +export interface ServiceScopeTemplateArea { + id: string; + templateId: string; + name: string; + order: number; + taskCount: number; + tasks: ServiceScopeTemplateTask[]; +} + +export interface ServiceScopeTemplate { + id: string; + name: string; + description: string | null; + isActive: boolean; + areas: ServiceScopeTemplateArea[]; +} + +export interface ServiceScopeTemplatesQueryResult { + serviceScopeTemplates: ServiceScopeTemplateListItem[]; +} + +export interface ServiceScopeTemplateQueryResult { + serviceScopeTemplate: ServiceScopeTemplate | null; +} + +export const FREQUENCY_LABELS: Record = { + DAILY: 'Daily', + WEEKLY: 'Weekly', + MONTHLY: 'Monthly', + QUARTERLY: 'Quarterly', + TRIANNUAL: 'Triannual', + ANNUAL: 'Annual', + AS_NEEDED: 'As Needed' +}; + +export const FREQUENCY_SHORT_LABELS: Record = { + DAILY: 'D', + WEEKLY: 'W', + MONTHLY: 'M', + QUARTERLY: 'Q', + TRIANNUAL: '3Y', + ANNUAL: 'A', + AS_NEEDED: 'PRN' +}; diff --git a/frontend/src/lib/graphql/queries/services.ts b/frontend/src/lib/graphql/queries/services.ts new file mode 100644 index 0000000..c03aa52 --- /dev/null +++ b/frontend/src/lib/graphql/queries/services.ts @@ -0,0 +1,396 @@ +import { gql } from '@apollo/client/core'; + +export const SERVICES_QUERY = gql` + query Services($filter: ServiceFilterInput, $pagination: PaginationInput) { + services(filter: $filter, pagination: $pagination) { + items { + id + accountId + accountAddressId + date + status + notes + calendarEventId + account { + id + name + customer { + id + name + } + } + accountAddress { + id + name + streetAddress + city + state + zipCode + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + totalCount + hasNextPage + } + } +`; + +export const SERVICE_QUERY = gql` + query Service($id: UUID!) { + service(id: $id) { + id + accountId + accountAddressId + date + status + notes + calendarEventId + account { + id + name + status + customer { + id + name + } + } + accountAddress { + id + name + streetAddress + city + state + zipCode + labor { + id + amount + startDate + endDate + isActive + } + activeScope { + id + name + description + isActive + areas { + id + name + order + tasks { + id + scopeDescription + sessionDescription + frequency + order + } + } + } + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + } +`; + +export const SERVICE_STATUS_COUNTS_QUERY = gql` + query ServiceStatusCounts( + $dateFrom: NaiveDate + $dateTo: NaiveDate + $teamProfileId: UUID + $customerIds: [UUID!] + ) { + serviceStatusCounts( + dateFrom: $dateFrom + dateTo: $dateTo + teamProfileId: $teamProfileId + customerIds: $customerIds + ) { + scheduled + inProgress + completed + cancelled + } + } +`; + +export type WorkStatus = 'SCHEDULED' | 'IN_PROGRESS' | 'COMPLETED' | 'CANCELLED'; + +export interface ServiceFilterInput { + dateFrom?: string; + dateTo?: string; + status?: WorkStatus; + accountId?: string; + accountAddressId?: string; + teamProfileId?: string; + customerId?: string; + customerIds?: string[]; +} + +export interface PaginationInput { + offset?: number; + limit?: number; +} + +export interface ServiceAccount { + id: string; + name: string; + customer: { + id: string; + name: string; + } | null; +} + +export interface ServiceAddress { + id: string; + name: string | null; + streetAddress: string; + city: string; + state: string; + zipCode: string; +} + +export type TeamRole = 'ADMIN' | 'TEAM_LEADER' | 'TEAM_MEMBER'; + +export interface ServiceTeamMemberProfile { + id: string; + fullName: string; + role: TeamRole; +} + +export interface ServiceTeamMember { + id: string; + teamProfileId: string; + teamProfile: ServiceTeamMemberProfile | null; +} + +export interface ServiceListItem { + id: string; + accountId: string; + accountAddressId: string; + date: string; + status: WorkStatus; + notes: string | null; + calendarEventId: string | null; + account: ServiceAccount | null; + accountAddress: ServiceAddress | null; + teamMembers: ServiceTeamMember[]; +} + +export interface ServiceConnection { + items: ServiceListItem[]; + totalCount: number; + hasNextPage: boolean; +} + +export interface ServicesQueryResult { + services: ServiceConnection; +} + +export interface ServiceStatusCounts { + scheduled: number; + inProgress: number; + completed: number; + cancelled: number; +} + +export interface ServiceStatusCountsQueryResult { + serviceStatusCounts: ServiceStatusCounts; +} + +export interface ServiceLabor { + id: string; + amount: string; + startDate: string; + endDate: string | null; + isActive: boolean; +} + +export type TaskFrequency = + | 'DAILY' + | 'WEEKLY' + | 'MONTHLY' + | 'QUARTERLY' + | 'TRIANNUAL' + | 'ANNUAL' + | 'AS_NEEDED'; + +export interface ServiceScopeTask { + id: string; + scopeDescription: string; + sessionDescription: string; + frequency: TaskFrequency; + order: number; +} + +export interface ServiceScopeArea { + id: string; + name: string; + order: number; + tasks: ServiceScopeTask[]; +} + +export interface ServiceScope { + id: string; + name: string; + description: string | null; + isActive: boolean; + areas: ServiceScopeArea[]; +} + +export interface ServiceDetail { + id: string; + accountId: string; + accountAddressId: string; + date: string; + status: WorkStatus; + notes: string | null; + calendarEventId: string | null; + account: { + id: string; + name: string; + status: string; + customer: { + id: string; + name: string; + } | null; + } | null; + accountAddress: { + id: string; + name: string | null; + streetAddress: string; + city: string; + state: string; + zipCode: string; + labor: ServiceLabor[]; + activeScope: ServiceScope | null; + } | null; + teamMembers: ServiceTeamMember[]; +} + +export interface ServiceQueryResult { + service: ServiceDetail | null; +} + +// ==================== SERVICES FOR ASSIGNMENT QUERY ==================== + +export const SERVICES_FOR_ASSIGNMENT_QUERY = gql` + query ServicesForAssignment($dateFrom: NaiveDate!, $dateTo: NaiveDate!) { + servicesForAssignment(dateFrom: $dateFrom, dateTo: $dateTo) { + unassigned { + id + date + status + account { + id + name + } + accountAddress { + id + name + city + state + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + readyToAssign { + id + date + status + account { + id + name + } + accountAddress { + id + name + city + state + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + assigned { + id + date + status + account { + id + name + } + accountAddress { + id + name + city + state + } + teamMembers { + id + teamProfileId + teamProfile { + id + fullName + role + } + } + } + } + } +`; + +export interface ServiceForAssignment { + id: string; + date: string; + status: WorkStatus; + account: { + id: string; + name: string; + } | null; + accountAddress: { + id: string; + name: string | null; + city: string; + state: string; + } | null; + teamMembers: ServiceTeamMember[]; +} + +export interface ServicesForAssignmentResult { + unassigned: ServiceForAssignment[]; + readyToAssign: ServiceForAssignment[]; + assigned: ServiceForAssignment[]; +} + +export interface ServicesForAssignmentQueryResult { + servicesForAssignment: ServicesForAssignmentResult; +} diff --git a/frontend/src/lib/graphql/queries/session.ts b/frontend/src/lib/graphql/queries/session.ts new file mode 100644 index 0000000..469a19f --- /dev/null +++ b/frontend/src/lib/graphql/queries/session.ts @@ -0,0 +1,916 @@ +import { gql } from '@apollo/client/core'; + +// ==================== SESSION FRAGMENTS ==================== + +export const SESSION_NOTE_FRAGMENT = gql` + fragment SessionNote on ServiceSessionNoteType { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } +`; + +export const SESSION_IMAGE_FRAGMENT = gql` + fragment SessionImage on ServiceSessionImageType { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } +`; + +export const SESSION_VIDEO_FRAGMENT = gql` + fragment SessionVideo on ServiceSessionVideoType { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } +`; + +export const TASK_COMPLETION_FRAGMENT = gql` + fragment TaskCompletion on ServiceTaskCompletionType { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + frequency + order + } + completedBy { + id + fullName + } + } +`; + +// ==================== SERVICE SESSION QUERIES ==================== + +export const SERVICE_SESSION_QUERY = gql` + query ServiceSession($id: UUID!) { + serviceSession(id: $id) { + id + serviceId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + frequency + order + } + completedBy { + id + fullName + } + } + } + } +`; + +export const SERVICE_SESSIONS_QUERY = gql` + query ServiceSessions($serviceId: UUID!) { + serviceSessions(serviceId: $serviceId) { + id + serviceId + start + end + date + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + } + } +`; + +export const LATEST_SERVICE_SESSION_QUERY = gql` + query LatestServiceSession($serviceId: UUID!) { + latestServiceSession(serviceId: $serviceId) { + id + serviceId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + frequency + order + } + completedBy { + id + fullName + } + } + } + } +`; + +export const ACTIVE_SERVICE_SESSION_QUERY = gql` + query ActiveServiceSession($serviceId: UUID!) { + activeServiceSession(serviceId: $serviceId) { + id + serviceId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + frequency + order + } + completedBy { + id + fullName + } + } + } + } +`; + +// ==================== PROJECT SESSION QUERIES ==================== + +export const PROJECT_SESSION_QUERY = gql` + query ProjectSession($id: UUID!) { + projectSession(id: $id) { + id + projectId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + order + } + completedBy { + id + fullName + } + } + } + } +`; + +export const PROJECT_SESSIONS_QUERY = gql` + query ProjectSessions($projectId: UUID!) { + projectSessions(projectId: $projectId) { + id + projectId + start + end + date + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + } + } +`; + +export const LATEST_PROJECT_SESSION_QUERY = gql` + query LatestProjectSession($projectId: UUID!) { + latestProjectSession(projectId: $projectId) { + id + projectId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + closedBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + order + } + completedBy { + id + fullName + } + } + } + } +`; + +export const ACTIVE_PROJECT_SESSION_QUERY = gql` + query ActiveProjectSession($projectId: UUID!) { + activeProjectSession(projectId: $projectId) { + id + projectId + accountId + accountAddressId + customerId + scopeId + start + end + date + createdById + closedById + createdAt + updatedAt + isActive + durationSeconds + createdBy { + id + fullName + } + notes { + id + sessionId + content + authorId + internal + createdAt + updatedAt + author { + id + fullName + } + } + images { + id + sessionId + title + image + thumbnail + contentType + width + height + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + videos { + id + sessionId + title + video + thumbnail + contentType + width + height + durationSeconds + fileSizeBytes + uploadedById + notes + internal + createdAt + uploadedBy { + id + fullName + } + } + completedTasks { + id + sessionId + taskId + completedById + completedAt + notes + task { + id + scopeDescription + sessionDescription + order + } + completedBy { + id + fullName + } + } + } + } +`; + +// ==================== TYPE DEFINITIONS ==================== + +export interface TeamProfileRef { + id: string; + fullName: string; +} + +export interface SessionNote { + id: string; + sessionId: string; + content: string; + authorId: string; + internal: boolean; + createdAt: string; + updatedAt: string; + author: TeamProfileRef | null; +} + +export interface SessionImage { + id: string; + sessionId: string; + title: string | null; + image: string; + thumbnail: string | null; + contentType: string; + width: number | null; + height: number | null; + uploadedById: string; + notes: string | null; + internal: boolean; + createdAt: string; + uploadedBy: TeamProfileRef | null; +} + +export interface SessionVideo { + id: string; + sessionId: string; + title: string | null; + video: string; + thumbnail: string | null; + contentType: string; + width: number | null; + height: number | null; + durationSeconds: number | null; + fileSizeBytes: number | null; + uploadedById: string; + notes: string | null; + internal: boolean; + createdAt: string; + uploadedBy: TeamProfileRef | null; +} + +export interface ServiceScopeTask { + id: string; + scopeDescription: string; + sessionDescription: string; + frequency: string; + order: number; +} + +export interface ProjectScopeTask { + id: string; + scopeDescription: string; + sessionDescription: string; + order: number; +} + +export interface ServiceTaskCompletion { + id: string; + sessionId: string; + taskId: string; + completedById: string; + completedAt: string; + notes: string | null; + task: ServiceScopeTask | null; + completedBy: TeamProfileRef | null; +} + +export interface ProjectTaskCompletion { + id: string; + sessionId: string; + taskId: string; + completedById: string; + completedAt: string; + notes: string | null; + task: ProjectScopeTask | null; + completedBy: TeamProfileRef | null; +} + +export interface ServiceSession { + id: string; + serviceId: string; + accountId: string; + accountAddressId: string; + customerId: string; + scopeId: string | null; + start: string; + end: string | null; + date: string; + createdById: string; + closedById: string | null; + createdAt: string; + updatedAt: string; + isActive: boolean; + durationSeconds: number | null; + createdBy: TeamProfileRef | null; + closedBy: TeamProfileRef | null; + notes: SessionNote[]; + images: SessionImage[]; + videos: SessionVideo[]; + completedTasks: ServiceTaskCompletion[]; +} + +export interface ServiceSessionSummary { + id: string; + serviceId: string; + start: string; + end: string | null; + date: string; + isActive: boolean; + durationSeconds: number | null; + createdBy: TeamProfileRef | null; + closedBy: TeamProfileRef | null; +} + +export interface ProjectSession { + id: string; + projectId: string; + accountId: string | null; + accountAddressId: string | null; + customerId: string; + scopeId: string | null; + start: string; + end: string | null; + date: string; + createdById: string; + closedById: string | null; + createdAt: string; + updatedAt: string; + isActive: boolean; + durationSeconds: number | null; + createdBy: TeamProfileRef | null; + closedBy: TeamProfileRef | null; + notes: SessionNote[]; + images: SessionImage[]; + videos: SessionVideo[]; + completedTasks: ProjectTaskCompletion[]; +} + +export interface ProjectSessionSummary { + id: string; + projectId: string; + start: string; + end: string | null; + date: string; + isActive: boolean; + durationSeconds: number | null; + createdBy: TeamProfileRef | null; + closedBy: TeamProfileRef | null; +} + +// Query result types +export interface ServiceSessionQueryResult { + serviceSession: ServiceSession | null; +} + +export interface ServiceSessionsQueryResult { + serviceSessions: ServiceSessionSummary[]; +} + +export interface ActiveServiceSessionQueryResult { + activeServiceSession: ServiceSession | null; +} + +export interface LatestServiceSessionQueryResult { + latestServiceSession: ServiceSession | null; +} + +export interface ProjectSessionQueryResult { + projectSession: ProjectSession | null; +} + +export interface ProjectSessionsQueryResult { + projectSessions: ProjectSessionSummary[]; +} + +export interface ActiveProjectSessionQueryResult { + activeProjectSession: ProjectSession | null; +} + +export interface LatestProjectSessionQueryResult { + latestProjectSession: ProjectSession | null; +} diff --git a/frontend/src/lib/graphql/queries/team.ts b/frontend/src/lib/graphql/queries/team.ts new file mode 100644 index 0000000..658053c --- /dev/null +++ b/frontend/src/lib/graphql/queries/team.ts @@ -0,0 +1,62 @@ +import { gql } from '@apollo/client/core'; + +export const TEAM_PROFILES_QUERY = gql` + query TeamProfiles { + teamProfiles { + id + firstName + lastName + fullName + email + phone + role + status + notes + createdAt + updatedAt + } + } +`; + +export const TEAM_PROFILE_QUERY = gql` + query TeamProfile($id: UUID!) { + teamProfile(id: $id) { + id + firstName + lastName + fullName + email + phone + role + status + notes + createdAt + updatedAt + } + } +`; + +export type TeamRole = 'ADMIN' | 'TEAM_LEADER' | 'TEAM_MEMBER'; +export type EntityStatus = 'ACTIVE' | 'INACTIVE' | 'PENDING'; + +export interface TeamProfile { + id: string; + firstName: string; + lastName: string; + fullName: string; + email: string | null; + phone: string | null; + role: TeamRole; + status: EntityStatus; + notes: string | null; + createdAt: string; + updatedAt: string; +} + +export interface TeamProfilesQueryResult { + teamProfiles: TeamProfile[]; +} + +export interface TeamProfileQueryResult { + teamProfile: TeamProfile; +} diff --git a/frontend/src/lib/graphql/queries/wave.ts b/frontend/src/lib/graphql/queries/wave.ts new file mode 100644 index 0000000..9be0449 --- /dev/null +++ b/frontend/src/lib/graphql/queries/wave.ts @@ -0,0 +1,261 @@ +import { gql } from '@apollo/client/core'; + +// Types +export interface WaveMoney { + value: string; + currencyCode: string; + currencySymbol: string; +} + +export interface WaveProduct { + id: string; + name: string; + description?: string; + unitPrice: number; + isSold: boolean; + isArchived: boolean; + incomeAccount?: { + id: string; + name: string; + }; +} + +export interface WaveCustomer { + id: string; + name: string; + email?: string; + currencyCode?: string; + address?: { + addressLine1?: string; + addressLine2?: string; + city?: string; + provinceCode?: string; + provinceName?: string; + postalCode?: string; + }; +} + +export interface WaveInvoiceItem { + description: string; + quantity: number; + unitPrice: number; + total: WaveMoney; +} + +export interface WaveInvoice { + id: string; + invoiceNumber: string; + invoiceDate: string; + dueDate?: string; + status: string; + customer: { + id: string; + name: string; + }; + items: WaveInvoiceItem[]; + subtotal: WaveMoney; + total: WaveMoney; + amountDue: WaveMoney; + amountPaid: WaveMoney; + pdfUrl?: string; +} + +export interface WaveInvoiceListItem { + id: string; + invoiceNumber: string; + invoiceDate: string; + dueDate?: string; + status: string; + customer: { + id: string; + name: string; + }; + subtotal: WaveMoney; + total: WaveMoney; + amountDue: WaveMoney; + amountPaid: WaveMoney; +} + +export interface WavePageInfo { + currentPage: number; + totalPages: number; + totalCount: number; +} + +export interface WaveInvoiceConnection { + pageInfo: WavePageInfo; + invoices: WaveInvoiceListItem[]; +} + +export interface WaveInvoiceReadiness { + ready: boolean; + issues: string[]; + waveCustomerId?: string; + readyItemCount: number; + missingWaveLinkCount: number; + totalAmount: string; +} + +// Queries +export const WAVE_PRODUCTS = gql` + query WaveProducts { + waveProducts { + id + name + description + unitPrice + isSold + isArchived + } + } +`; + +export const WAVE_CUSTOMERS = gql` + query WaveCustomers { + waveCustomers { + id + name + email + currencyCode + } + } +`; + +export const WAVE_INVOICE = gql` + query WaveInvoice($invoiceId: String!) { + waveInvoice(invoiceId: $invoiceId) { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + items { + description + quantity + unitPrice + total { + value + currencyCode + currencySymbol + } + } + subtotal { + value + currencyCode + currencySymbol + } + total { + value + currencyCode + currencySymbol + } + amountDue { + value + currencyCode + currencySymbol + } + amountPaid { + value + currencyCode + currencySymbol + } + pdfUrl + } + } +`; + +export const WAVE_INVOICE_READINESS = gql` + query WaveInvoiceReadiness($invoiceId: UUID!) { + waveInvoiceReadiness(invoiceId: $invoiceId) { + ready + issues + waveCustomerId + readyItemCount + missingWaveLinkCount + totalAmount + } + } +`; + +export const WAVE_INVOICES = gql` + query WaveInvoices($page: Int, $pageSize: Int) { + waveInvoices(page: $page, pageSize: $pageSize) { + pageInfo { + currentPage + totalPages + totalCount + } + invoices { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + subtotal { + value + currencyCode + currencySymbol + } + total { + value + currencyCode + currencySymbol + } + amountDue { + value + currencyCode + currencySymbol + } + amountPaid { + value + currencyCode + currencySymbol + } + } + } + } +`; + +export const WAVE_CUSTOMER = gql` + query WaveCustomer($customerId: String!) { + waveCustomer(customerId: $customerId) { + id + name + email + currencyCode + address { + addressLine1 + addressLine2 + city + provinceCode + provinceName + postalCode + } + } + } +`; + +export const WAVE_PRODUCT = gql` + query WaveProduct($productId: String!) { + waveProduct(productId: $productId) { + id + name + description + unitPrice + isSold + isArchived + incomeAccount { + id + name + } + } + } +`; diff --git a/frontend/src/lib/index.ts b/frontend/src/lib/index.ts new file mode 100644 index 0000000..856f2b6 --- /dev/null +++ b/frontend/src/lib/index.ts @@ -0,0 +1 @@ +// place files you want to import through the `$lib` alias in this folder. diff --git a/frontend/src/lib/permissions.ts b/frontend/src/lib/permissions.ts new file mode 100644 index 0000000..0b98c00 --- /dev/null +++ b/frontend/src/lib/permissions.ts @@ -0,0 +1,137 @@ +/** + * Permissions abstraction layer + * + * Phase 1: Role-based logic (current) + * Phase 2: Swap to Keto API calls + * + * Keto-compatible naming convention: + * - service:#assigned@team_profile: + * - service:#viewer@customer_profile: + * - account:#owner@customer: + */ + +import type { User } from '../app'; + +// Resource types in the system +export type Resource = + | 'service' + | 'project' + | 'account' + | 'customer' + | 'report' + | 'invoice' + | 'scope' + | 'profile'; + +// Actions that can be performed +export type Action = 'view' | 'edit' | 'delete' | 'manage' | 'session' | 'create'; + +/** + * Check if a user can perform an action on a resource + * + * @param user - The current user + * @param action - The action to check + * @param resource - The resource type + * @param resourceId - Optional specific resource ID (for future Keto integration) + */ +export function can( + user: User | null, + action: Action, + resource: Resource, + resourceId?: string +): boolean { + if (!user) return false; + + // Customer profiles: view only + if (user.__typename === 'CustomerProfileType') { + return action === 'view'; + } + + // Team profiles + if (user.__typename === 'TeamProfileType') { + // ADMIN and TEAM_LEADER: full access + if (user.role === 'ADMIN' || user.role === 'TEAM_LEADER') { + return true; + } + + // TEAM_MEMBER: view and session only + if (user.role === 'TEAM_MEMBER') { + return action === 'view' || action === 'session'; + } + } + + return false; +} + +/** + * Check if user can see internal-flagged content (notes, photos, videos) + * Only team members can see internal content + */ +export function canSeeInternal(user: User | null): boolean { + return user?.__typename === 'TeamProfileType'; +} + +/** + * Check if user can access admin routes + */ +export function canAccessAdmin(user: User | null): boolean { + if (!user) return false; + if (user.__typename !== 'TeamProfileType') return false; + return user.role === 'ADMIN' || user.role === 'TEAM_LEADER'; +} + +/** + * Check if user can access portal routes + * All authenticated users can access portal + */ +export function canAccessPortal(user: User | null): boolean { + return user !== null; +} + +/** + * Check if user is assigned to a service/project + * Used for determining session edit permissions + */ +export function isAssignedTo( + userId: string, + teamMembers: Array<{ teamProfileId: string }> | null | undefined +): boolean { + if (!teamMembers) return false; + return teamMembers.some((m) => m.teamProfileId === userId); +} + +/** + * Build permissions context object for passing to layouts/components + */ +export function buildPermissions(user: User | null) { + return { + // Resource permissions + canManageServices: can(user, 'manage', 'service'), + canManageProjects: can(user, 'manage', 'project'), + canManageAccounts: can(user, 'manage', 'account'), + canManageCustomers: can(user, 'manage', 'customer'), + canManageInvoices: can(user, 'manage', 'invoice'), + canManageScopes: can(user, 'manage', 'scope'), + canManageProfiles: can(user, 'manage', 'profile'), + canManageReports: can(user, 'manage', 'report'), + + // Session permissions (team members can interact with sessions) + canEditSessions: can(user, 'session', 'service'), + + // Content visibility + canSeeInternal: canSeeInternal(user), + + // Route access + canAccessAdmin: canAccessAdmin(user), + canAccessPortal: canAccessPortal(user), + + // User info for convenience + isTeamProfile: user?.__typename === 'TeamProfileType', + isCustomerProfile: user?.__typename === 'CustomerProfileType', + isAdmin: user?.__typename === 'TeamProfileType' && user.role === 'ADMIN', + isTeamLeader: user?.__typename === 'TeamProfileType' && user.role === 'TEAM_LEADER', + isTeamMember: user?.__typename === 'TeamProfileType' && user.role === 'TEAM_MEMBER' + }; +} + +export type Permissions = ReturnType; diff --git a/frontend/src/lib/stores/auth.svelte.ts b/frontend/src/lib/stores/auth.svelte.ts new file mode 100644 index 0000000..4ff6411 --- /dev/null +++ b/frontend/src/lib/stores/auth.svelte.ts @@ -0,0 +1,132 @@ +import { browser } from '$app/environment'; +import { config } from '$lib/config'; +import type { Session, SessionIdentity } from '../../app'; + +export type { Session, SessionIdentity }; + +const isBrowser = browser; + +function createAuthStore() { + let session = $state(null); + let checkInProgress = false; + + const isAuthenticated = $derived(Boolean(session?.active)); + const userEmail = $derived(session?.identity?.traits?.email ?? null); + const userFullName = $derived( + session?.identity?.traits?.name + ? `${session.identity.traits.name.first ?? ''} ${session.identity.traits.name.last ?? ''}`.trim() + : null + ); + + async function checkSession(fetchFn?: typeof fetch): Promise { + const fetchToUse = fetchFn || (isBrowser ? fetch : null); + if (!fetchToUse) return null; + if (checkInProgress && !fetchFn) return null; + + if (!fetchFn) checkInProgress = true; + + try { + const response = await fetchToUse(`${config.kratos.publicUrl}/sessions/whoami`, { + credentials: 'include', + headers: { + Accept: 'application/json' + } + }); + + if (response.ok) { + const sessionData = await response.json(); + session = sessionData; + return sessionData; + } else { + session = null; + return null; + } + } catch (error) { + console.warn('Failed to check session:', error); + session = null; + return null; + } finally { + if (!fetchFn) checkInProgress = false; + } + } + + async function logout(returnTo?: string): Promise { + if (!isBrowser) return; + + try { + // Build full URL for return_to parameter + const returnUrl = returnTo + ? returnTo.startsWith('http') + ? returnTo + : `${config.app.origin}${returnTo}` + : config.app.origin; + const logoutEndpoint = `${config.kratos.publicUrl}/self-service/logout/browser?return_to=${encodeURIComponent(returnUrl)}`; + const response = await fetch(logoutEndpoint, { + credentials: 'include' + }); + + if (response.ok) { + const logoutData = await response.json(); + if (logoutData.logout_url) { + window.location.href = logoutData.logout_url; + } + } + } catch (error) { + console.error('Logout failed:', error); + } finally { + session = null; + } + } + + function redirectToLogin(returnTo?: string): void { + if (!isBrowser) return; + + const returnUrl = returnTo + ? returnTo.startsWith('http') + ? returnTo + : `${config.app.origin}${returnTo}` + : window.location.href; + + window.location.href = `${config.kratos.publicUrl}/self-service/login/browser?return_to=${encodeURIComponent(returnUrl)}`; + } + + function redirectToRegistration(returnTo?: string): void { + if (!isBrowser) return; + + const returnUrl = returnTo + ? returnTo.startsWith('http') + ? returnTo + : `${config.app.origin}${returnTo}` + : window.location.href; + + window.location.href = `${config.kratos.publicUrl}/self-service/registration/browser?return_to=${encodeURIComponent(returnUrl)}`; + } + + // Initialize session check on browser + if (isBrowser) { + checkSession().catch(() => { + // Intentionally ignored - error already logged in checkSession + }); + } + + return { + get session() { + return session; + }, + get isAuthenticated() { + return isAuthenticated; + }, + get userEmail() { + return userEmail; + }, + get userFullName() { + return userFullName; + }, + checkSession, + logout, + redirectToLogin, + redirectToRegistration + }; +} + +export const auth = createAuthStore(); diff --git a/frontend/src/lib/stores/theme.svelte.ts b/frontend/src/lib/stores/theme.svelte.ts new file mode 100644 index 0000000..92c9816 --- /dev/null +++ b/frontend/src/lib/stores/theme.svelte.ts @@ -0,0 +1,87 @@ +import { browser } from '$app/environment'; + +type Theme = 'light' | 'dark' | 'system'; + +const STORAGE_KEY = 'theme-preference'; + +function getSystemTheme(): 'light' | 'dark' { + if (!browser) return 'light'; + return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'; +} + +function getStoredTheme(): Theme { + if (!browser) return 'system'; + const stored = localStorage.getItem(STORAGE_KEY); + if (stored === 'light' || stored === 'dark' || stored === 'system') { + return stored; + } + return 'system'; +} + +function createThemeStore() { + let preference = $state(getStoredTheme()); + let resolved = $derived<'light' | 'dark'>( + preference === 'system' ? getSystemTheme() : preference + ); + + function applyTheme(theme: 'light' | 'dark') { + if (!browser) return; + const html = document.documentElement; + // Only modify classes if needed to prevent flash + if (theme === 'dark') { + if (!html.classList.contains('dark')) { + html.classList.remove('light'); + html.classList.add('dark'); + } + } else { + if (!html.classList.contains('light')) { + html.classList.remove('dark'); + html.classList.add('light'); + } + } + } + + function setTheme(theme: Theme) { + preference = theme; + + if (browser) { + localStorage.setItem(STORAGE_KEY, theme); + applyTheme(resolved); + } + } + + function toggle() { + const newTheme = resolved === 'light' ? 'dark' : 'light'; + setTheme(newTheme); + } + + function init() { + if (!browser) return; + + applyTheme(resolved); + + const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); + mediaQuery.addEventListener('change', (e: MediaQueryListEvent) => { + if (preference === 'system') { + applyTheme(e.matches ? 'dark' : 'light'); + } + }); + } + + return { + get preference() { + return preference; + }, + get resolved() { + return resolved; + }, + get isDark() { + return resolved === 'dark'; + }, + setTheme, + toggle, + init + }; +} + +export const theme = createThemeStore(); diff --git a/frontend/src/lib/stores/unreadCounts.svelte.ts b/frontend/src/lib/stores/unreadCounts.svelte.ts new file mode 100644 index 0000000..137c805 --- /dev/null +++ b/frontend/src/lib/stores/unreadCounts.svelte.ts @@ -0,0 +1,177 @@ +import { browser } from '$app/environment'; +import { client } from '$lib/graphql/client'; +import { + UNREAD_MESSAGE_COUNT_QUERY, + type UnreadMessageCountQueryResult +} from '$lib/graphql/queries/messaging'; +import { + MY_UNREAD_NOTIFICATION_COUNT_QUERY, + type MyUnreadNotificationCountQueryResult +} from '$lib/graphql/queries/notifications'; + +const POLL_INTERVAL = 30000; // 30 seconds + +function createUnreadCountsStore() { + let messageCount = $state(0); + let notificationCount = $state(0); + let isPolling = $state(false); + let pollIntervalId: ReturnType | null = null; + + const totalCount = $derived(messageCount + notificationCount); + const hasUnread = $derived(totalCount > 0); + + /** + * Fetch current unread counts from the server + */ + async function fetchCounts(): Promise { + if (!browser) return; + + try { + const [messageResult, notificationResult] = await Promise.all([ + client.query({ + query: UNREAD_MESSAGE_COUNT_QUERY, + fetchPolicy: 'network-only' + }), + client.query({ + query: MY_UNREAD_NOTIFICATION_COUNT_QUERY, + fetchPolicy: 'network-only' + }) + ]); + + if (messageResult.data) { + messageCount = messageResult.data.unreadMessageCount; + } + if (notificationResult.data) { + notificationCount = notificationResult.data.myUnreadNotificationCount; + } + } catch (error) { + console.warn('Failed to fetch unread counts:', error); + } + } + + /** + * Start polling for unread counts + */ + function startPolling(): void { + if (!browser || isPolling) return; + + isPolling = true; + + // Fetch immediately + fetchCounts(); + + // Set up interval + pollIntervalId = setInterval(() => { + fetchCounts(); + }, POLL_INTERVAL); + } + + /** + * Stop polling for unread counts + */ + function stopPolling(): void { + if (pollIntervalId) { + clearInterval(pollIntervalId); + pollIntervalId = null; + } + isPolling = false; + } + + /** + * Increment message count (optimistic update) + */ + function incrementMessages(by = 1): void { + messageCount = Math.max(0, messageCount + by); + } + + /** + * Decrement message count (optimistic update) + */ + function decrementMessages(by = 1): void { + messageCount = Math.max(0, messageCount - by); + } + + /** + * Clear all message counts + */ + function clearMessages(): void { + messageCount = 0; + } + + /** + * Increment notification count (optimistic update) + */ + function incrementNotifications(by = 1): void { + notificationCount = Math.max(0, notificationCount + by); + } + + /** + * Decrement notification count (optimistic update) + */ + function decrementNotifications(by = 1): void { + notificationCount = Math.max(0, notificationCount - by); + } + + /** + * Clear all notification counts + */ + function clearNotifications(): void { + notificationCount = 0; + } + + /** + * Reset all counts + */ + function reset(): void { + messageCount = 0; + notificationCount = 0; + } + + /** + * Format count for display (caps at 99+) + */ + function formatCount(count: number): string { + if (count <= 0) return ''; + if (count > 99) return '99+'; + return count.toString(); + } + + return { + get messageCount() { + return messageCount; + }, + get notificationCount() { + return notificationCount; + }, + get totalCount() { + return totalCount; + }, + get hasUnread() { + return hasUnread; + }, + get isPolling() { + return isPolling; + }, + get formattedMessageCount() { + return formatCount(messageCount); + }, + get formattedNotificationCount() { + return formatCount(notificationCount); + }, + get formattedTotalCount() { + return formatCount(totalCount); + }, + fetchCounts, + startPolling, + stopPolling, + incrementMessages, + decrementMessages, + clearMessages, + incrementNotifications, + decrementNotifications, + clearNotifications, + reset + }; +} + +export const unreadCounts = createUnreadCountsStore(); diff --git a/frontend/src/lib/utils/date.ts b/frontend/src/lib/utils/date.ts new file mode 100644 index 0000000..89a34a5 --- /dev/null +++ b/frontend/src/lib/utils/date.ts @@ -0,0 +1,61 @@ +import { format, parseISO, startOfWeek, endOfWeek, addDays } from 'date-fns'; + +/** + * Format a date string for display. + * Handles date-only strings (YYYY-MM-DD) without timezone shifting. + */ +export function formatDate(dateStr: string | null | undefined): string { + if (!dateStr) return '—'; + + // parseISO correctly handles date-only strings without timezone shifting + const date = parseISO(dateStr); + return format(date, 'MMM d, yyyy'); +} + +/** + * Format a date string with time. + */ +export function formatDateTime(dateStr: string | null | undefined): string { + if (!dateStr) return '—'; + + const date = parseISO(dateStr); + return format(date, 'MMM d, yyyy h:mm a'); +} + +/** + * Get current month in YYYY-MM format. + */ +export function getCurrentMonth(): string { + return format(new Date(), 'yyyy-MM'); +} + +/** + * Get current week date range (Monday to Sunday). + * Returns dates in YYYY-MM-DD format. + */ +export function getWeekDateRange(): { start: string; end: string } { + const now = new Date(); + // Start of week (Monday) + const start = startOfWeek(now, { weekStartsOn: 1 }); + // End of week (Sunday) + const end = endOfWeek(now, { weekStartsOn: 1 }); + + return { + start: format(start, 'yyyy-MM-dd'), + end: format(end, 'yyyy-MM-dd') + }; +} + +/** + * Get the start and end of a month in YYYY-MM-DD format. + */ +export function getMonthDateRange(month: string): { start: string; end: string } { + const [year, monthNum] = month.split('-').map(Number); + const startDate = new Date(year, monthNum - 1, 1); + const endDate = new Date(year, monthNum, 0); // Last day of month + + return { + start: format(startDate, 'yyyy-MM-dd'), + end: format(endDate, 'yyyy-MM-dd') + }; +} diff --git a/frontend/src/routes/+error.svelte b/frontend/src/routes/+error.svelte new file mode 100644 index 0000000..b3100c3 --- /dev/null +++ b/frontend/src/routes/+error.svelte @@ -0,0 +1,9 @@ + + +
+

{$page.status}

+

{$page.error?.message ?? 'Something went wrong'}

+ Go Home +
diff --git a/frontend/src/routes/+layout.server.ts b/frontend/src/routes/+layout.server.ts new file mode 100644 index 0000000..1f6e96f --- /dev/null +++ b/frontend/src/routes/+layout.server.ts @@ -0,0 +1,28 @@ +import type { LayoutServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { ME_QUERY, type MeQueryResult } from '$lib/graphql/queries/me'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; + +export const load: LayoutServerLoad = async ({ locals }) => { + // If no cookie, user is not authenticated + if (!locals.cookie) { + return { user: null, teamProfiles: [] }; + } + + const client = createServerClient(locals.cookie); + + try { + const [meResult, teamResult] = await Promise.all([ + client.query({ query: ME_QUERY }), + client.query({ query: TEAM_PROFILES_QUERY }) + ]); + + return { + user: meResult.data?.me ?? null, + teamProfiles: teamResult.data?.teamProfiles ?? [] + }; + } catch (error) { + console.error('Failed to fetch user or team profiles:', error); + return { user: null, teamProfiles: [] }; + } +}; diff --git a/frontend/src/routes/+layout.svelte b/frontend/src/routes/+layout.svelte new file mode 100644 index 0000000..c0a3e97 --- /dev/null +++ b/frontend/src/routes/+layout.svelte @@ -0,0 +1,52 @@ + + + + + Nexus + + +
+ + + + +
+ {@render children()} +
+
diff --git a/frontend/src/routes/+page.svelte b/frontend/src/routes/+page.svelte new file mode 100644 index 0000000..56a4a78 --- /dev/null +++ b/frontend/src/routes/+page.svelte @@ -0,0 +1,406 @@ + + + + Acme Services - Commercial Cleaning Services + + + + +
+ +
+ Professional cleaning service + +
+
+ + + +
+

+ We Don't Just Clean. + We Deliver Peace of Mind. +

+

+ Acme Services provides exceptional commercial cleaning and floor care + services, built on 5 years of experience and specialized expertise. Outstanding workmanship + and complete client satisfaction, every single time. +

+ +
+
+
+ + +
+ +
+

Comprehensive Cleaning Solutions

+

+ From daily janitorial services to specialized deep cleaning, we keep your facility spotless + so you can focus on your business. +

+
+ +
+ +
+
+ + + +
+

Janitorial Service

+

+ Scheduled cleaning services tailored to your facility. Custom scope of work with task + frequencies that match your needs: daily, weekly, or monthly. +

+
+ + +
+
+ + + +
+

Floor Care

+

+ Professional floor maintenance including stripping, waxing, buffing, and deep cleaning. + Keep your floors looking their best year-round. +

+
+ + +
+
+ + + +
+

Commercial Kitchen

+

+ Comprehensive kitchen cleaning covering equipment, walls, ceiling tiles, and floors. Keep + your commercial kitchen spotless and sanitary. +

+
+
+ + +
+
+ + +
+ +
+

Why Choose Us

+

+ From your first call to the final walkthrough, you'll experience clear, professional + communication and a team that truly listens. Big or small, every project benefits from our + unwavering commitment to safety, reliability, and top-tier quality. +

+
+ +
+ +
+
+ + + +
+

No Contracts

+

+ One consultation, then book as needed. Our Service Standard guarantees quality at a + transparent price. +

+
+ + +
+
+ + + +
+

Safe & Effective

+

+ We carefully select our products for a spotless shine that's gentle on your workplace and + your people. +

+
+ + +
+
+ + + +
+

Easy Scheduling

+

+ When cleaning falls off your to-do list, we step in. Book online or call for fast, + reliable service. +

+
+ + +
+
+ + + +
+

Transparent Pricing

+

+ No hidden fees, no surprises. You'll know exactly what you're paying for before we start. +

+
+
+
+
+ + +
+ +
+
+ Professional cleaning team at work +
+
+

Our Commitment to You

+

+ We understand that inviting a cleaning team into your business requires trust. That's why + we're committed to earning it every single day. +

+
    +
  • + + + +
    + Trained & Insured Team +

    + Our cleaning professionals are thoroughly trained, background-checked, and fully + insured. +

    +
    +
  • +
  • + + + +
    + Customized Cleaning Plans +

    + Every facility is different. We build a scope of work around your specific needs and + schedule. +

    +
    +
  • +
  • + + + +
    + Customer Portal Access +

    + Track services, view reports, and communicate with our team through your online + account. +

    +
    +
  • +
  • + + + +
    + The Service Standard +

    + Our quality guarantee means if you're not satisfied, we'll make it right. +

    +
    +
  • +
+
+
+
+
+ + +
+ +
+

Ready for a cleaner facility?

+

+ Serving businesses throughout your service area. Get in touch + for a free consultation. We'll walk through your space, understand your needs, and build a + cleaning plan that works for you. +

+ + Get a Free Quote + +
+
+
diff --git a/frontend/src/routes/about/+page.svelte b/frontend/src/routes/about/+page.svelte new file mode 100644 index 0000000..5aec9d9 --- /dev/null +++ b/frontend/src/routes/about/+page.svelte @@ -0,0 +1,227 @@ + + + + About Us - Nexus + + + + +
+ + +
+

About Nexus

+

+ Acme Services is a commercial cleaning company dedicated + to providing dependable janitorial services, floor care, and specialty cleaning for + businesses throughout your region. +

+
+
+
+ + +
+ +
+
+

Our Story

+
+

+ Acme Services was founded on a simple principle: businesses deserve a + cleaning partner they can rely on. We've built our reputation on showing up when we say + we will and delivering consistent, quality results. +

+

+ We understand that a clean facility is more than just appearances. It's about creating a + healthy, productive environment for your employees and a welcoming space for your + customers. +

+

+ Today, we're proud to serve a growing roster of commercial clients across the region. + From office buildings and medical facilities to restaurants and industrial spaces. +

+
+
+
+ Professional cleaning team +
+
+
+
+ + +
+ +
+

Why We Do This

+
+

+ Commercial cleaning has a reputation problem. Too often, it's treated as an afterthought, + something to get done as cheaply as possible, by whoever will do it for less. The result? + A race to the bottom that hurts everyone: businesses get unreliable service, and the + people doing the work aren't valued for what they bring. +

+

+ We started Nexus because we believe cleaning deserves better. It's skilled work. It + keeps people healthy. It makes businesses function. And the people who do it well deserve + to be paid fairly and treated with respect. +

+

+ That's why we're committed to elevating the standard, not just in how we clean, but in how + we operate. We pay our team what the work is worth. We invest in proper training and + equipment. We show up when we say we will. And we stand behind every job with a guarantee that means something. +

+

+ This isn't about being the cheapest option. It's about being the partner you can actually + count on. +

+
+
+
+
+ + +
+ +
+

Our Values

+

+ These principles guide everything we do at Nexus. +

+
+ +
+
+
+ + + +
+

Dependability

+

+ We show up on time, every time. You can count on us. +

+
+ +
+
+ + + +
+

Quality

+

We take pride in our work and never cut corners.

+
+ +
+
+ + + +
+

Communication

+

We listen to your needs and keep you informed.

+
+ +
+
+ + + +
+

Respect

+

We treat your space and your people with care.

+
+
+
+
+ + +
+ +
+

Ready to work with us?

+

+ Contact us today for a free consultation and see how Nexus can help keep your facility + spotless. +

+ +
+
+
diff --git a/frontend/src/routes/admin/+layout.server.ts b/frontend/src/routes/admin/+layout.server.ts new file mode 100644 index 0000000..ca4ff91 --- /dev/null +++ b/frontend/src/routes/admin/+layout.server.ts @@ -0,0 +1,121 @@ +import type { LayoutServerLoad } from './$types'; +import { redirect, error } from '@sveltejs/kit'; +import { client } from '$lib/graphql/client'; +import { SERVICES_QUERY, type ServicesQueryResult } from '$lib/graphql/queries/services'; +import { PROJECTS_QUERY, type ProjectsQueryResult } from '$lib/graphql/queries/projects'; +import { INVOICES_QUERY, type InvoicesQueryResult } from '$lib/graphql/queries/invoices'; +import { getCurrentMonth, getMonthDateRange } from '$lib/utils/date'; + +export const load: LayoutServerLoad = async ({ url, parent, locals }) => { + const parentData = await parent(); + const me = parentData.user; + + // Not authenticated + if (!me) { + const returnTo = encodeURIComponent(url.pathname + url.search); + throw redirect(307, `/login?return_to=${returnTo}`); + } + + // Admin requires team profile with admin or team_leader role + if (me.__typename !== 'TeamProfileType') { + throw error(403, 'Admin access requires a team profile'); + } + + if (me.role !== 'ADMIN' && me.role !== 'TEAM_LEADER') { + throw error(403, 'Admin access requires an admin or team leader role'); + } + + // Get month from URL params or default to current + const month = url.searchParams.get('month') ?? getCurrentMonth(); + const { start, end } = getMonthDateRange(month); + + // Fetch services, projects, and invoices for the month in parallel + const [servicesResult, projectsResult, invoicesResult] = await Promise.all([ + client.query({ + query: SERVICES_QUERY, + variables: { + filter: { dateFrom: start, dateTo: end }, + pagination: { limit: 100 } + }, + context: { headers: { cookie: locals.cookie } }, + fetchPolicy: 'network-only' + }), + client.query({ + query: PROJECTS_QUERY, + variables: { + filter: { dateFrom: start, dateTo: end }, + pagination: { limit: 100 } + }, + context: { headers: { cookie: locals.cookie } }, + fetchPolicy: 'network-only' + }), + client.query({ + query: INVOICES_QUERY, + variables: { + pagination: { limit: 100 } + }, + context: { headers: { cookie: locals.cookie } }, + fetchPolicy: 'network-only' + }) + ]); + + const services = servicesResult.data?.services?.items ?? []; + const projects = projectsResult.data?.projects?.items ?? []; + const invoices = invoicesResult.data?.invoices?.items ?? []; + + // Build lookup maps for quick access + const accountLookup = new Map(); + const customerLookup = new Map(); + + // Populate lookups from services + for (const service of services) { + if (service.accountAddressId && service.account) { + accountLookup.set(service.accountAddressId, { + accountName: service.account.name, + addressName: service.accountAddress?.name || service.accountAddress?.city || 'Unknown' + }); + } + } + + // Populate lookups from projects + for (const project of projects) { + if (project.customer) { + customerLookup.set(project.customerId, { + customerName: project.customer.name + }); + } + if (project.accountAddressId && project.accountAddress) { + accountLookup.set(project.accountAddressId, { + accountName: project.accountAddress.account?.name || 'Unknown', + addressName: project.accountAddress.name || project.accountAddress.city || 'Unknown' + }); + } + } + + // Populate lookups from invoices + for (const invoice of invoices) { + if (invoice.customer) { + customerLookup.set(invoice.customerId, { + customerName: invoice.customer.name + }); + } + } + + return { + ...parentData, + services: { + scheduled: services.filter((s) => s.status === 'SCHEDULED'), + inProgress: services.filter((s) => s.status === 'IN_PROGRESS'), + completed: services.filter((s) => s.status === 'COMPLETED') + }, + projects: { + scheduled: projects.filter((p) => p.status === 'SCHEDULED'), + inProgress: projects.filter((p) => p.status === 'IN_PROGRESS'), + completed: projects.filter((p) => p.status === 'COMPLETED') + }, + invoices, + accountLookup, + customerLookup, + currentMonth: month + }; +}; diff --git a/frontend/src/routes/admin/+layout.svelte b/frontend/src/routes/admin/+layout.svelte new file mode 100644 index 0000000..f416b62 --- /dev/null +++ b/frontend/src/routes/admin/+layout.svelte @@ -0,0 +1,14 @@ + + + +
+ {@render children()} +
+ + + diff --git a/frontend/src/routes/admin/+page.server.ts b/frontend/src/routes/admin/+page.server.ts new file mode 100644 index 0000000..c4e8636 --- /dev/null +++ b/frontend/src/routes/admin/+page.server.ts @@ -0,0 +1,36 @@ +import type { PageServerLoad } from './$types'; +import { getWeekDateRange } from '$lib/utils/date'; + +export const load: PageServerLoad = async ({ parent }) => { + // Get parent data - no additional queries needed! + // The layout already fetched all month data, we just filter for this week + const parentData = await parent(); + + // Get current week's date range for filtering + const { start, end } = getWeekDateRange(); + + // Filter services and projects for this week from parent data + const isInWeek = (date: string) => date >= start && date <= end; + + const weekServices = { + scheduled: (parentData.services?.scheduled ?? []).filter((s) => isInWeek(s.date)), + inProgress: (parentData.services?.inProgress ?? []).filter((s) => isInWeek(s.date)) + }; + + const weekProjects = { + scheduled: (parentData.projects?.scheduled ?? []).filter((p) => isInWeek(p.date)), + inProgress: (parentData.projects?.inProgress ?? []).filter((p) => isInWeek(p.date)) + }; + + // Filter invoices for SENT status (awaiting payment) + const pendingInvoices = (parentData.invoices ?? []).filter((i) => i.status === 'SENT'); + + return { + ...parentData, + dashboard: { + services: weekServices, + projects: weekProjects, + invoices: pendingInvoices + } + }; +}; diff --git a/frontend/src/routes/admin/+page.svelte b/frontend/src/routes/admin/+page.svelte new file mode 100644 index 0000000..ce00b1e --- /dev/null +++ b/frontend/src/routes/admin/+page.svelte @@ -0,0 +1,478 @@ + + + + Admin Dashboard - Nexus + + +
+ + + + + + + + +
+ +
+
+
+

+ Services +

+ View all +
+
+ +
+

This Week

+ {#if thisWeekServices.length === 0} +

No services scheduled this week

+ {:else} + + {#if thisWeekServices.length > 5} +

+ +{thisWeekServices.length - 5} more services +

+ {/if} + {/if} +
+
+ + +
+
+
+

Projects

+ View all +
+
+ +
+

This Week

+ {#if thisWeekProjects.length === 0} +

No projects scheduled this week

+ {:else} + + {#if thisWeekProjects.length > 5} +

+ +{thisWeekProjects.length - 5} more projects +

+ {/if} + {/if} +
+
+ + +
+
+
+

Invoices

+ View all +
+
+ +
+

Awaiting Payment

+ {#if pendingInvoices.length === 0} +

No outstanding invoices

+ {:else} + + {#if pendingInvoices.length > 5} +

+ +{pendingInvoices.length - 5} more outstanding invoices +

+ {/if} + {/if} +
+
+ + +
+
+
+

Calendar

+ View all +
+
+ +
+

Upcoming Events

+
+
+ +
+

View your calendar for upcoming events

+ + Open Calendar + +
+
+
+
+
+
diff --git a/frontend/src/routes/admin/accounts/+page.server.ts b/frontend/src/routes/admin/accounts/+page.server.ts new file mode 100644 index 0000000..4bf19cb --- /dev/null +++ b/frontend/src/routes/admin/accounts/+page.server.ts @@ -0,0 +1,35 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { ACCOUNTS_QUERY, type AccountsQueryResult } from '$lib/graphql/queries/accounts'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { accounts: [] }; + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: ACCOUNTS_QUERY, + variables: { filter: null } + }) + .catch((err) => { + console.error('Failed to fetch accounts:', err); + return { data: null }; + }); + + return { accounts: data?.accounts ?? [] }; +}; diff --git a/frontend/src/routes/admin/accounts/+page.svelte b/frontend/src/routes/admin/accounts/+page.svelte new file mode 100644 index 0000000..0ae5450 --- /dev/null +++ b/frontend/src/routes/admin/accounts/+page.svelte @@ -0,0 +1,278 @@ + + + + Accounts - Admin - Nexus + + +
+ + + + {#snippet subtitleSnippet()} + {#if statusFilter === 'ALL'} + {totalCount} total accounts + {:else} + {filteredCount} of {totalCount} accounts + {/if} + {/snippet} + {#snippet actions()} + + {/snippet} + + + +
+ +
+ + + + +
+ + +
+ +
+ + +
+ + + {#if searchQuery || statusFilter !== 'ALL'} + + {/if} + + + + Showing {filteredCount} of {totalCount} + +
+
+ + + {#if filteredAccounts.length > 0} + + {:else if searchQuery || statusFilter !== 'ALL'} + +
+ + + +

No accounts found

+

No accounts match your current filters.

+ +
+ {:else} + +
+ + + +

No accounts yet

+

Get started by adding your first account.

+
+ {/if} +
+
+ + + + + diff --git a/frontend/src/routes/admin/accounts/[account]/+page.server.ts b/frontend/src/routes/admin/accounts/[account]/+page.server.ts new file mode 100644 index 0000000..6ec9041 --- /dev/null +++ b/frontend/src/routes/admin/accounts/[account]/+page.server.ts @@ -0,0 +1,56 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { ACCOUNT_QUERY, type AccountQueryResult } from '$lib/graphql/queries/account'; +import { WAVE_PRODUCTS, type WaveProduct } from '$lib/graphql/queries/wave'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const [accountResult, waveProductsResult] = await Promise.all([ + client + .query({ + query: ACCOUNT_QUERY, + variables: { id: params.account } + }) + .catch((err) => { + console.error('Failed to fetch account:', err); + throw error(500, 'Failed to load account'); + }), + client + .query<{ waveProducts: WaveProduct[] }>({ + query: WAVE_PRODUCTS + }) + .catch((err) => { + console.error('Failed to fetch wave products:', err); + return { data: null }; + }) + ]); + + if (!accountResult.data?.account) { + throw error(404, 'Account not found'); + } + + return { + account: accountResult.data.account, + contacts: accountResult.data.account.contacts, + addresses: accountResult.data.account.addresses, + revenues: accountResult.data.account.revenues, + waveProducts: waveProductsResult.data?.waveProducts ?? [] + }; +}; diff --git a/frontend/src/routes/admin/accounts/[account]/+page.svelte b/frontend/src/routes/admin/accounts/[account]/+page.svelte new file mode 100644 index 0000000..a8db109 --- /dev/null +++ b/frontend/src/routes/admin/accounts/[account]/+page.svelte @@ -0,0 +1,503 @@ + + + + {account?.name ?? 'Account'} - Admin - Nexus + + +
+ + {#if account} + + {account.status} + + +
+ +
+
+ +
+
+
+

Start Date

+

{formatDate(account.startDate)}

+
+
+

End Date

+

{formatDate(account.endDate)}

+
+ {#if account.customer} +
+

Parent Customer

+ + {account.customer.name} → + +
+ {/if} +
+

Monthly Revenue

+ {#if activeRevenue} +

{formatCurrency(activeRevenue.amount)}

+

+ Since {formatDate(activeRevenue.startDate)} + {#if activeRevenue.waveServiceId} + Wave + {#if activeRevenue.waveProductName} + ({activeRevenue.waveProductName}) + {/if} + {/if} +

+ {:else} +

Not set

+ {/if} +
+
+
+ + +
+
+ +
+
+ {#if contacts.length > 0} +
+ {#each contacts as contact (contact.id)} +
+
+ + +
+
+ + {contact.firstName} + {contact.lastName} + + {#if contact.email} +

{contact.email}

+ {/if} + {#if contact.phone} +

{contact.phone}

+ {/if} +
+ {#if contact.notes || contact.isPrimary} +
+ {#if contact.isPrimary} + Primary + {/if} + {#if contact.notes} + {contact.notes} + {/if} +
+ {/if} +
+ {/each} +
+ {:else} +

No contacts added yet.

+ {/if} +
+
+ + + {#if historicalRevenues.length > 0} +
+
+ +
+
+
+ {#each historicalRevenues as revenue (revenue.id)} +
+
+
+ + {formatCurrency(revenue.amount)} + + {#if revenue.waveServiceId} + Wave + {#if revenue.waveProductName} + ({revenue.waveProductName}) + {/if} + {/if} + Inactive +
+

+ {formatDate(revenue.startDate)} – {formatDate(revenue.endDate)} +

+
+
+ {/each} +
+
+
+ {/if} + + +
+
+ +
+ +
+ + +
+ + Danger Zone + + + + +
+

+ Deleting this account will remove all associated contacts and service locations. This + action cannot be undone. +

+ +
+
+
+ + + + {#if drawerMode?.type === 'account'} + + {:else if drawerMode?.type === 'contact'} + + {:else if drawerMode?.type === 'address'} + + {/if} + + + + + {:else} +
+ + + +

Account not found

+

+ The account you're looking for doesn't exist or has been deleted. +

+ Back to Accounts +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.server.ts b/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.server.ts new file mode 100644 index 0000000..74be2fd --- /dev/null +++ b/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.server.ts @@ -0,0 +1,48 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { ACCOUNT_QUERY, type AccountQueryResult } from '$lib/graphql/queries/account'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: ACCOUNT_QUERY, + variables: { id: params.account } + }) + .catch((err) => { + console.error('Failed to fetch account:', err); + throw error(500, 'Failed to load account'); + }); + + if (!data?.account) { + throw error(404, 'Account not found'); + } + + const location = data.account.addresses.find((addr) => addr.id === params.location); + + if (!location) { + throw error(404, 'Location not found'); + } + + return { + account: data.account, + location + }; +}; diff --git a/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.svelte b/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.svelte new file mode 100644 index 0000000..9003e03 --- /dev/null +++ b/frontend/src/routes/admin/accounts/[account]/locations/[location]/+page.svelte @@ -0,0 +1,1022 @@ + + + + {location?.name || 'Service Location'} - {account?.name ?? 'Account'} - Admin - Nexus + + +
+ + {#if location && account} + + {account.name} + {#if location.isPrimary} + Primary + {/if} + + +
+ +
+
+ openDrawer('location', 'edit')} + /> +
+
+
+

Location Name

+

{locationDisplayName}

+
+
+

Status

+ {#if location.isActive} + Active + {:else} + Inactive + {/if} +
+ +
+

Labor Paid

+ {#if activeLabor} +

{formatCurrency(activeLabor.amount)}

+

+ Since {formatDate(activeLabor.startDate)} +

+ {:else} +

Not set

+ {/if} +
+
+

Address

+

+ {location.streetAddress}, {location.city}, {location.state} + {location.zipCode} +

+
+ {#if location.notes} +
+

Notes

+

{location.notes}

+
+ {/if} +
+
+ + {#if !isProjectOnly} + +
+
+ openDrawer('schedule', activeSchedule ? 'edit' : 'add')} + /> +
+
+ {#if activeSchedule} +
+

+ {activeSchedule.name || 'Service Schedule'} +

+ +
+

Service Days

+
+ + {#each [{ key: 'monday', short: 'M', full: 'Monday' }, { key: 'tuesday', short: 'T', full: 'Tuesday' }, { key: 'wednesday', short: 'W', full: 'Wednesday' }, { key: 'thursday', short: 'Th', full: 'Thursday' }] as day} +
+ {day.short} + +
+ {/each} + + {#if activeSchedule.weekendService} +
+ Wknd* + +
+ {:else} + + {#each [{ key: 'friday', short: 'F', full: 'Friday' }, { key: 'saturday', short: 'Sa', full: 'Saturday' }, { key: 'sunday', short: 'Su', full: 'Sunday' }] as day} +
+ {day.short} + +
+ {/each} + {/if} +
+ {#if activeSchedule.weekendService} +

+ *Service can be completed anytime Fri–Sun +

+ {/if} +
+ {#if activeSchedule.startDate} +

+ {#if activeSchedule.endDate} + {formatDate(activeSchedule.startDate)} – {formatDate( + activeSchedule.endDate + )} + {:else} + Since {formatDate(activeSchedule.startDate)} + {/if} +

+ {/if} + {#if activeSchedule.scheduleException} +
+

Exceptions

+

{activeSchedule.scheduleException}

+
+ {/if} + +
+ {#if hasPastSchedules} + + {/if} + {#if hasFutureSchedules} + + {/if} + +
+
+ +
+
+ {:else} +

No schedule configured yet.

+ {/if} +
+
+ + +
+
+ openDrawer('scope', activeScope ? 'edit' : 'add')} + /> +
+
+ {#if activeScope} +
+

{activeScope.name || 'Service Scope'}

+ {#if activeScope.description} +

{activeScope.description}

+ {/if} + {#each activeScope.areas as area (area.id)} +
+ + + + + + {area.name} + ({area.tasks.length} task{area.tasks.length === 1 ? '' : 's'}) + + + + + + + + +
+ {#if area.tasks.length > 0} +
    + {#each area.tasks as task (task.id)} +
  • +
    + + + + + + + {task.scopeDescription} + + {#if task.frequency !== 'DAILY'} + {task.frequency} + {/if} + {#if task.estimatedMinutes} + {task.estimatedMinutes} min + {/if} + + + + + + + + + + +
    +
    + Checklist: + {task.checklistDescription || '—'} +
    +
    + Instructions: + {task.sessionDescription || '—'} +
    +
    +
    +
  • + {/each} +
+ {/if} + +
+
+ {/each} + + + +
+ {#if hasInactiveScopes} + + {/if} + +
+
+ +
+
+ {:else} +

No scope defined yet.

+ {/if} +
+
+ {/if} + + +
+ + Danger Zone + + + + +
+

+ Deleting this location will remove it from the account. This action cannot be undone. +

+ +
+
+
+ + + + {#if drawerType === 'location'} + + {:else if drawerType === 'schedule'} + + {:else if drawerType === 'scope'} + + {:else if drawerType === 'area' && activeScope} + + {:else if drawerType === 'task' && selectedTask} + + {/if} + + + + + + + + + + + {:else} +
+ + + +

Location not found

+

+ The service location you're looking for doesn't exist or has been deleted. +

+ Back to Accounts +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/calendar/+page.server.ts b/frontend/src/routes/admin/calendar/+page.server.ts new file mode 100644 index 0000000..3cd0968 --- /dev/null +++ b/frontend/src/routes/admin/calendar/+page.server.ts @@ -0,0 +1,78 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + CALENDAR_EVENTS_QUERY, + type CalendarEventsQueryResult +} from '$lib/graphql/queries/calendar'; +import { redirect } from '@sveltejs/kit'; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { timeMin: string; timeMax: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0, 23, 59, 59, 999); + + return { + timeMin: firstDay.toISOString(), + timeMax: lastDay.toISOString() + }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const { timeMin, timeMax } = getMonthDateRange(month); + + if (!locals.cookie) { + return { + events: [], + month, + error: null + }; + } + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: CALENDAR_EVENTS_QUERY, + variables: { + filter: { + timeMin, + timeMax, + maxResults: 100 + } + } + }); + + return { + events: result.data?.calendarEvents ?? [], + month, + error: null + }; + } catch (err) { + console.error('Failed to fetch calendar events:', err); + return { + events: [], + month, + error: err instanceof Error ? err.message : 'Failed to load calendar events' + }; + } +}; diff --git a/frontend/src/routes/admin/calendar/+page.svelte b/frontend/src/routes/admin/calendar/+page.svelte new file mode 100644 index 0000000..346283d --- /dev/null +++ b/frontend/src/routes/admin/calendar/+page.svelte @@ -0,0 +1,525 @@ + + + + Calendar - Admin - Nexus + + +
+ + + + {#snippet actions()} + + + + + Create Event + + {/snippet} + + + +
+ +
+ + + {#if data.error} +
+
+ + + +
+

+ Failed to load calendar events +

+

+ {data.error} +

+
+
+
+ {/if} + + +
+
+ + + + +
+
+ + +
+ {#each [{ id: 'all', label: 'All' }, { id: 'upcoming', label: 'Upcoming' }, { id: 'past', label: 'Past' }] as tab (tab.id)} + {@const isActive = timeFilter === tab.id} + {@const count = tabCounts[tab.id as keyof typeof tabCounts]} + + {/each} +
+ + + {#if eventsByDate.length > 0} +
+ {#each eventsByDate as [dateKey, dateEvents] (dateKey)} +
+ +
+
+

+ {formatDateHeader(dateKey)} +

+
+
+ + +
+ {#each dateEvents as event (event.id)} + {@const eventColor = getEventColor(event.colorId)} +
+ +
+
+ +
+ {#if eventColor} + + {/if} +

+ {event.summary} +

+ {#if event.start.date} + + All Day + + {/if} +
+ + +
+ + + + + + {formatEventTime(event)} + + + + {#if event.location} + + + + + + {event.location} + + {/if} + + + {#if event.attendees && event.attendees.length > 0} + + {/if} +
+ + + {#if event.description} + + {/if} +
+ + + + + +
+
+ + + +
+ {/each} +
+
+ {/each} +
+ {:else if searchQuery} +
+ + + +

No Events Found

+

+ No events match "{searchQuery}". Try a different search term. +

+ +
+ {:else} +
+ + + +

No Events

+

No events scheduled for this month.

+ + + + + Create Event + +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/calendar/[event]/+page.server.ts b/frontend/src/routes/admin/calendar/[event]/+page.server.ts new file mode 100644 index 0000000..7960962 --- /dev/null +++ b/frontend/src/routes/admin/calendar/[event]/+page.server.ts @@ -0,0 +1,43 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { CALENDAR_EVENT_QUERY, type CalendarEventQueryResult } from '$lib/graphql/queries/calendar'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: CALENDAR_EVENT_QUERY, + variables: { + eventId: params.event + } + }); + + if (!result.data?.calendarEvent) { + throw error(404, 'Event not found'); + } + + return { + event: result.data.calendarEvent + }; + } catch (err) { + console.error('Failed to fetch calendar event:', err); + throw error(500, 'Failed to load calendar event'); + } +}; diff --git a/frontend/src/routes/admin/calendar/[event]/+page.svelte b/frontend/src/routes/admin/calendar/[event]/+page.svelte new file mode 100644 index 0000000..31ca1ac --- /dev/null +++ b/frontend/src/routes/admin/calendar/[event]/+page.svelte @@ -0,0 +1,433 @@ + + + + {event.summary} - Calendar - Admin - Nexus + + +
+ + +
+
+ + + + + +
+
+ {#if eventColor} + + {/if} +

+ {event.summary} +

+
+ {#if event.status === 'cancelled'} + + Cancelled + + {/if} +
+
+ + {#if !isEditing} +
+ + +
+ {/if} +
+ + {#if isEditing} + +
+

Edit Event

+ +
+ + + + + + + + + + + {#if editAllDay} +
+ + +
+ {:else} +
+ + +
+ {/if} + +
+ + +
+
+
+ {:else} + +
+
+ +
+

Date & Time

+

{formatDateTime(event)}

+
+ + + {#if event.location} +
+

Location

+

{event.location}

+
+ {/if} + + + {#if event.description} +
+

Description

+

{event.description}

+
+ {/if} + + + {#if event.attendees && event.attendees.length > 0} +
+

Attendees

+
    + {#each event.attendees as attendee (attendee.email)} +
  • +
    + {(attendee.displayName ?? attendee.email).charAt(0).toUpperCase()} +
    +
    +

    + {attendee.displayName ?? attendee.email} +

    + {#if attendee.displayName} +

    {attendee.email}

    + {/if} +
    + {#if attendee.responseStatus} + + {attendee.responseStatus} + + {/if} +
  • + {/each} +
+
+ {/if} + + + {#if event.htmlLink} + + {/if} + + +
+
+ {#if event.created} + Created: {new Date(event.created).toLocaleDateString()} + {/if} + {#if event.updated} + Updated: {new Date(event.updated).toLocaleDateString()} + {/if} + ID: {event.id} +
+
+
+
+ {/if} +
+
diff --git a/frontend/src/routes/admin/calendar/new/+page.server.ts b/frontend/src/routes/admin/calendar/new/+page.server.ts new file mode 100644 index 0000000..7ded62d --- /dev/null +++ b/frontend/src/routes/admin/calendar/new/+page.server.ts @@ -0,0 +1,21 @@ +import type { PageServerLoad } from './$types'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + return {}; +}; diff --git a/frontend/src/routes/admin/calendar/new/+page.svelte b/frontend/src/routes/admin/calendar/new/+page.svelte new file mode 100644 index 0000000..dcdc9a6 --- /dev/null +++ b/frontend/src/routes/admin/calendar/new/+page.svelte @@ -0,0 +1,223 @@ + + + + Create Event - Calendar - Admin - Nexus + + +
+ + +
+ + + + + +
+

+ Create Event +

+

Add a new event to the calendar

+
+
+ + +
+ {#if errorMessage} +
+

{errorMessage}

+
+ {/if} + +
+ + + + + + + + + + + {#if allDay} +
+ + +
+ {:else} +
+ + +
+ {/if} + +
+ Cancel + +
+
+
+
+
diff --git a/frontend/src/routes/admin/customers/+page.server.ts b/frontend/src/routes/admin/customers/+page.server.ts new file mode 100644 index 0000000..a2bfb54 --- /dev/null +++ b/frontend/src/routes/admin/customers/+page.server.ts @@ -0,0 +1,34 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { CUSTOMERS_QUERY, type CustomersQueryResult } from '$lib/graphql/queries/customers'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { customers: [] }; + } + + const client = createServerClient(locals.cookie); + + try { + const { data } = await client.query({ + query: CUSTOMERS_QUERY, + variables: { filter: null } + }); + return { customers: data?.customers ?? [] }; + } catch (error) { + console.error('Failed to fetch customers:', error); + return { customers: [] }; + } +}; diff --git a/frontend/src/routes/admin/customers/+page.svelte b/frontend/src/routes/admin/customers/+page.svelte new file mode 100644 index 0000000..1029ec2 --- /dev/null +++ b/frontend/src/routes/admin/customers/+page.svelte @@ -0,0 +1,263 @@ + + + + Customers - Admin - Nexus + + +
+ + + + {#snippet subtitleSnippet()} + {#if statusFilter === 'ALL'} + {totalCount} total customers + {:else} + {filteredCount} of {totalCount} customers + {/if} + {/snippet} + {#snippet actions()} + + {/snippet} + + + +
+ +
+ + + + +
+ + +
+ +
+ + +
+ + + {#if searchQuery || statusFilter !== 'ALL'} + + {/if} + + + + Showing {filteredCount} of {totalCount} + +
+
+ + + {#if filteredCustomers.length > 0} + + {:else if searchQuery || statusFilter !== 'ALL'} + +
+ + + +

No customers found

+

No customers match your current filters.

+ +
+ {:else} + +
+ + + +

No customers yet

+

Get started by adding your first customer.

+
+ {/if} +
+
+ + + + + diff --git a/frontend/src/routes/admin/customers/[customer]/+page.server.ts b/frontend/src/routes/admin/customers/[customer]/+page.server.ts new file mode 100644 index 0000000..38c8b81 --- /dev/null +++ b/frontend/src/routes/admin/customers/[customer]/+page.server.ts @@ -0,0 +1,44 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { CUSTOMER_QUERY, type CustomerQueryResult } from '$lib/graphql/queries/customer'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: CUSTOMER_QUERY, + variables: { id: params.customer } + }) + .catch((err) => { + console.error('Failed to fetch customer:', err); + throw error(500, 'Failed to load customer'); + }); + + if (!data?.customer) { + throw error(404, 'Customer not found'); + } + + return { + customer: data.customer, + contacts: data.customer.contacts, + addresses: data.customer.addresses, + accounts: data.customer.accounts + }; +}; diff --git a/frontend/src/routes/admin/customers/[customer]/+page.svelte b/frontend/src/routes/admin/customers/[customer]/+page.svelte new file mode 100644 index 0000000..5f02772 --- /dev/null +++ b/frontend/src/routes/admin/customers/[customer]/+page.svelte @@ -0,0 +1,474 @@ + + + + {customer?.name ?? 'Customer'} - Admin - Nexus + + +
+ + {#if customer} + + {customer.status} + + +
+ +
+
+ +
+
+
+

Billing Email

+

{customer.billingEmail || '—'}

+
+
+

Billing Terms

+

{customer.billingTerms || '—'}

+
+
+

Billing Address

+ {#if activeAddress} +

{activeAddress.streetAddress}

+

+ {activeAddress.city}, {activeAddress.state} + {activeAddress.zipCode} +

+ {:else} +

Not set

+ {/if} +
+
+

Start Date

+

{formatDate(customer.startDate)}

+
+
+

End Date

+

{formatDate(customer.endDate)}

+
+ {#if customer.waveCustomerId} +
+

Wave Customer

+
+ Linked + {#if customer.waveCustomerName} + {customer.waveCustomerName} + {:else} + ID: {customer.waveCustomerId} + {/if} +
+
+ {/if} +
+
+ + +
+
+ +
+
+ {#if contacts.length > 0} +
+ {#each contacts as contact (contact.id)} +
+
+ + +
+
+ + {contact.firstName} + {contact.lastName} + + {#if contact.email} +

{contact.email}

+ {/if} + {#if contact.phone} +

{contact.phone}

+ {/if} +
+ {#if contact.notes || contact.isPrimary} +
+ {#if contact.isPrimary} + Primary + {/if} + {#if contact.notes} + {contact.notes} + {/if} +
+ {/if} +
+ {/each} +
+ {:else} +

No contacts added yet.

+ {/if} +
+
+ + + {#if inactiveAddresses.length > 0} +
+
+ +
+
+
+ {#each inactiveAddresses as address (address.id)} +
+
+
+
+ {address.name || 'Previous Address'} + Inactive +
+

{address.streetAddress}

+

+ {address.city}, {address.state} + {address.zipCode} +

+
+
+
+ {/each} +
+
+
+ {/if} + + +
+ + + Accounts + ({accounts.length}) + + + + + + + + +
+ {#if accounts.length > 0} +
+ {#each accounts as account (account.id)} + +
+ + {account.name} + + {account.status} +
+ +
+ {/each} +
+ {:else} +

No accounts linked to this customer.

+ {/if} +
+
+ + +
+ + Danger Zone + + + + +
+

+ Deleting this customer will remove all associated contacts, addresses, and unlink all + accounts. This action cannot be undone. +

+ +
+
+
+ + + + {#if drawerMode?.type === 'customer' && customer} + + {:else if drawerMode?.type === 'contact' && drawerMode.mode === 'add' && customer} + + {:else if drawerMode?.type === 'contact' && drawerMode.mode === 'edit' && customer} + + {/if} + + + + + {:else} +
+ + + +

Customer not found

+

+ The customer you're looking for doesn't exist or has been deleted. +

+ Back to Customers +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/events/+page.server.ts b/frontend/src/routes/admin/events/+page.server.ts new file mode 100644 index 0000000..bc81adf --- /dev/null +++ b/frontend/src/routes/admin/events/+page.server.ts @@ -0,0 +1,46 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { RECENT_EVENTS_QUERY, type RecentEventsQueryResult } from '$lib/graphql/queries/events'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin + if (!user || user.__typename !== 'TeamProfileType' || user.role !== 'ADMIN') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { + events: [], + limit: 100, + offset: 0 + }; + } + + const limit = parseInt(url.searchParams.get('limit') ?? '100'); + const offset = parseInt(url.searchParams.get('offset') ?? '0'); + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: RECENT_EVENTS_QUERY, + variables: { limit, offset } + }); + + return { + events: result.data?.recentEvents ?? [], + limit, + offset + }; + } catch (err) { + console.error('Failed to fetch events:', err); + return { + events: [], + limit, + offset + }; + } +}; diff --git a/frontend/src/routes/admin/events/+page.svelte b/frontend/src/routes/admin/events/+page.svelte new file mode 100644 index 0000000..7234db9 --- /dev/null +++ b/frontend/src/routes/admin/events/+page.svelte @@ -0,0 +1,258 @@ + + + + Event Log - Admin - Nexus + + +
+ + + + {#snippet subtitleSnippet()} + System audit trail - {filteredEvents.length} event{filteredEvents.length !== 1 ? 's' : ''} + {#if offset > 0} + (showing {offset + 1} - {offset + events.length}) + {/if} + {/snippet} + + + +
+ + + + + {#if entityTypeFilter || eventTypeFilter} + + {/if} +
+ + + {#if filteredEvents.length > 0} +
+ {#each filteredEvents as event (event.id)} +
+
+
+ +

{event.description}

+ + +
+ + {formatEventType(event.eventType)} + + + {event.entityType.replace(/_/g, ' ')} + +
+ + + {#if event.metadata && Object.keys(event.metadata).length > 0} +
+ {#each Object.entries(event.metadata).slice(0, 3) as [key, value]} + + {key}: + {typeof value === 'string' ? value : JSON.stringify(value)} + + {/each} + {#if Object.keys(event.metadata).length > 3} + +{Object.keys(event.metadata).length - 3} more + {/if} +
+ {/if} +
+ + +
+ {#if event.actorProfile} +

{event.actorProfile.fullName}

+ {:else if event.actorType === 'system'} +

System

+ {/if} +

{formatRelativeTime(event.timestamp)}

+
+
+
+ {/each} +
+ + +
+ + + Showing {offset + 1} - {offset + events.length} + + +
+ {:else} + +
+ + + +

No events yet

+

System events will appear here as they occur.

+
+ {/if} +
+
diff --git a/frontend/src/routes/admin/invoices/+page.server.ts b/frontend/src/routes/admin/invoices/+page.server.ts new file mode 100644 index 0000000..d8fcd97 --- /dev/null +++ b/frontend/src/routes/admin/invoices/+page.server.ts @@ -0,0 +1,82 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { INVOICES_QUERY, type InvoicesQueryResult } from '$lib/graphql/queries/invoices'; +import { CUSTOMERS_QUERY, type CustomersQueryResult } from '$lib/graphql/queries/customers'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 20; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const customerId = url.searchParams.get('customerId') || null; + const status = url.searchParams.get('status') || null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + invoices: { items: [], totalCount: 0, hasNextPage: false }, + customers: [], + customerId, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Fetch invoices and customers in parallel + const [invoicesResult, customersResult] = await Promise.all([ + client + .query({ + query: INVOICES_QUERY, + variables: { + filter: { + customerId: customerId || undefined, + status: status || undefined + }, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch invoices:', err); + return { data: null }; + }), + client + .query({ + query: CUSTOMERS_QUERY, + variables: { + filter: { isActive: true } + } + }) + .catch((err) => { + console.error('Failed to fetch customers:', err); + return { data: null }; + }) + ]); + + return { + invoices: invoicesResult.data?.invoices ?? { items: [], totalCount: 0, hasNextPage: false }, + customers: customersResult.data?.customers ?? [], + customerId, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/admin/invoices/+page.svelte b/frontend/src/routes/admin/invoices/+page.svelte new file mode 100644 index 0000000..a199ad7 --- /dev/null +++ b/frontend/src/routes/admin/invoices/+page.svelte @@ -0,0 +1,371 @@ + + + + Invoices - Admin - Nexus + + +
+ + + + {#snippet actions()} + + + + + Wave + + + {/snippet} + + + +
+ + +
+ + +
+
+ + + {#if invoices.length > 0} + + + + + {:else} + +
+ + + +

No invoices yet

+

Create an invoice for a customer to track billing.

+ +
+ {/if} +
+
+ + +{#if showCreateModal} +
+ +
+

Create New Invoice

+ + {#if createError} +
+ {createError} +
+ {/if} + +
{ + e.preventDefault(); + handleCreateInvoice(); + }} + > + + +
+ + +
+ +
+ + +
+
+
+
+{/if} diff --git a/frontend/src/routes/admin/invoices/[invoice]/+page.server.ts b/frontend/src/routes/admin/invoices/[invoice]/+page.server.ts new file mode 100644 index 0000000..4eebaf5 --- /dev/null +++ b/frontend/src/routes/admin/invoices/[invoice]/+page.server.ts @@ -0,0 +1,98 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + INVOICE_QUERY, + ELIGIBLE_REVENUES_QUERY, + ELIGIBLE_PROJECTS_QUERY, + type InvoiceQueryResult, + type EligibleRevenuesQueryResult, + type EligibleProjectsQueryResult +} from '$lib/graphql/queries/invoices'; +import { WAVE_INVOICE_READINESS, type WaveInvoiceReadiness } from '$lib/graphql/queries/wave'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + // Fetch the invoice + const invoiceResult = await client + .query({ + query: INVOICE_QUERY, + variables: { id: params.invoice } + }) + .catch((err) => { + console.error('Failed to fetch invoice:', err); + return { data: null }; + }); + + const invoice = invoiceResult.data?.invoice; + + if (!invoice) { + throw error(404, 'Invoice not found'); + } + + // Fetch eligible revenues, projects, and Wave readiness in parallel + const [eligibleRevenuesResult, eligibleProjectsResult, waveReadinessResult] = await Promise.all([ + client + .query({ + query: ELIGIBLE_REVENUES_QUERY, + variables: { + customerId: invoice.customerId, + dateFrom: invoice.startDate, + dateTo: invoice.endDate + } + }) + .catch((err) => { + console.error('Failed to fetch eligible revenues:', err); + return { data: null }; + }), + client + .query({ + query: ELIGIBLE_PROJECTS_QUERY, + variables: { + customerId: invoice.customerId, + dateFrom: invoice.startDate, + dateTo: invoice.endDate + } + }) + .catch((err) => { + console.error('Failed to fetch eligible projects:', err); + return { data: null }; + }), + // Only fetch Wave readiness if not already synced + invoice.waveInvoiceId + ? Promise.resolve({ data: null }) + : client + .query<{ waveInvoiceReadiness: WaveInvoiceReadiness }>({ + query: WAVE_INVOICE_READINESS, + variables: { invoiceId: params.invoice }, + fetchPolicy: 'network-only' + }) + .catch((err) => { + console.error('Failed to fetch Wave readiness:', err); + return { data: null }; + }) + ]); + + return { + invoice, + eligibleRevenues: eligibleRevenuesResult.data?.eligibleRevenuesForInvoice ?? [], + eligibleProjects: eligibleProjectsResult.data?.eligibleProjectsForInvoice ?? [], + waveReadiness: waveReadinessResult.data?.waveInvoiceReadiness ?? null + }; +}; diff --git a/frontend/src/routes/admin/invoices/[invoice]/+page.svelte b/frontend/src/routes/admin/invoices/[invoice]/+page.svelte new file mode 100644 index 0000000..09d824b --- /dev/null +++ b/frontend/src/routes/admin/invoices/[invoice]/+page.svelte @@ -0,0 +1,1270 @@ + + + + {invoice.customer?.name ?? 'Invoice'} - Invoices - Admin - Nexus + + +
+ + + + {invoice.status} + + + + +
+ {#if invoice.status === 'DRAFT'} + + + {:else if invoice.status === 'SENT'} + + + + {:else if invoice.status === 'OVERDUE'} + + + {:else if invoice.status === 'PAID'} + + {:else if invoice.status === 'CANCELLED'} + + + {/if} + {#if invoice.status !== 'CANCELLED' && invoice.status !== 'PAID'} + + {/if} +
+ +
+ +
+
+

Summary

+
+
+
+

Revenues

+

+ {invoice.revenueCount} +

+

{formatCurrency(invoice.revenuesTotal)}

+
+
+

Projects

+

+ {invoice.projectCount} +

+

{formatCurrency(invoice.projectsTotal)}

+
+
+

Total Amount

+

+ {formatCurrency(invoice.totalAmount)} +

+
+
+
+ + +
+
+ {#if invoice.status === 'DRAFT' && eligibleRevenues.length > 0} + (showAddRevenuesModal = true)} + /> + {:else} +

Revenues ({invoice.revenues.length})

+ {/if} +
+
+ {#if invoice.revenues.length > 0} + +
+ {#each invoice.revenues as entry (entry.id)} +
+
+
+

+ {entry.account?.name ?? 'Unknown Account'} +

+

+ {entry.revenue?.startDate ? formatDate(entry.revenue.startDate) : ''} - {entry + .revenue?.endDate + ? formatDate(entry.revenue.endDate) + : 'Ongoing'} +

+
+
+ + {formatCurrency(entry.amount)} + + {#if invoice.status === 'DRAFT'} + + {/if} +
+
+
+ {/each} +
+ + + + {:else} +
+ {#if invoice.status === 'DRAFT'} + {#if eligibleRevenues.length > 0} + No revenues added yet. + + {:else} +

No eligible revenues for this customer and invoice period.

+ + Go to customer page to manage accounts + + {/if} + {:else} + No revenues in this invoice. + {/if} +
+ {/if} +
+
+ + +
+
+ {#if invoice.status === 'DRAFT' && eligibleProjects.length > 0} + (showAddProjectsModal = true)} + /> + {:else} +

Projects ({invoice.projects.length})

+ {/if} +
+
+ {#if invoice.projects.length > 0} + +
+ {#each invoice.projects as entry (entry.id)} +
+
+
+

{entry.project?.name ?? 'Unknown'}

+

+ {formatDate(entry.project?.date ?? '')} + {#if entry.account?.name} + · {entry.account.name} + {/if} +

+ {#if entry.project?.formattedAddress} +

+ {entry.project.formattedAddress} +

+ {/if} +
+
+ + {formatCurrency(entry.amount)} + + {#if invoice.status === 'DRAFT'} + + {/if} +
+
+
+ {/each} +
+ + + + {:else} +
+ {#if invoice.status === 'DRAFT'} + {#if eligibleProjects.length > 0} + No projects added yet. + + {:else} +

No eligible projects for this customer and invoice period.

+ + View all projects + + {/if} + {:else} + No projects in this invoice. + {/if} +
+ {/if} +
+
+ + +
+
+
+

Wave Accounting

+ + Integration + +
+
+
+ {#if invoice.waveInvoiceId} + +
+
+
+ + + +
+
+

Synced to Wave

+

+ {#if waveInvoice?.invoiceNumber} + Wave #{waveInvoice.invoiceNumber} + {:else} + Loading... + {/if} +

+
+
+ {#if waveInvoice?.pdfUrl} + + + + + Download PDF + + {/if} +
+ {:else if invoice.status === 'CANCELLED'} + +
+
+ + + +
+
+

Not synced

+

Cancelled invoices cannot be synced to Wave

+
+
+ {:else if invoice.status !== 'DRAFT'} + +
+
+ + + +
+
+

Not synced

+

+ This invoice was not synced to Wave before being sent +

+
+
+ {:else if waveError} + +
+
+
+ + + +
+
+

Wave Sync Failed

+

{waveError}

+
+
+ +
+ {:else if waveReadiness} + + {#if waveReadiness.ready} + +
+
+
+ + + +
+
+

Ready to sync

+

+ {waveReadiness.readyItemCount} items totaling {formatCurrency( + parseFloat(waveReadiness.totalAmount) + )} +

+
+
+ +
+ {:else} + +
+
+
+ + + +
+
+

Not ready to sync

+

Please resolve the following issues:

+
+
+
    + {#each waveReadiness.issues as issue} +
  • + + + {issue} + + {#if issue.includes('Customer is not linked')} + + {/if} +
  • + {/each} +
+ {#if waveReadiness.readyItemCount > 0} +

+ {waveReadiness.readyItemCount} of {waveReadiness.readyItemCount + + waveReadiness.missingWaveLinkCount} items are linked to Wave products +

+ {/if} +
+ {/if} + {/if} +
+
+
+
+
+ + + (showAddRevenuesModal = false)} +> + {#if eligibleRevenues.length > 0} +
+ {#each eligibleRevenues as revenue (revenue.revenueId)} +
+
+

+ {revenue.accountName} +

+

+ {formatCurrency(revenue.amount)}/month +

+
+ +
+ {/each} +
+ {:else} +
+

No eligible revenues available for this invoice period.

+ + + + + Go to Customer Page + +
+ {/if} + + {#snippet footer()} + {#if eligibleRevenues.length > 0} +
+ + +
+ {:else} + + {/if} + {/snippet} +
+ + + (showAddProjectsModal = false)} +> + {#if eligibleProjects.length > 0} +
+ {#each eligibleProjects as project (project.projectId)} +
+
+

+ {project.name} +

+

+ {formatDate(project.date)} - {formatCurrency(project.amount)} +

+
+ +
+ {/each} +
+ {:else} +
+

No eligible projects available for this invoice period.

+ + + + + View Projects + +
+ {/if} + + {#snippet footer()} + {#if eligibleProjects.length > 0} +
+ + +
+ {:else} + + {/if} + {/snippet} +
+ + +{#if showDeleteConfirm} +
+ +
+

Delete Invoice?

+

+ Are you sure you want to delete this invoice? This action cannot be undone. +

+
+ + +
+
+
+{/if} + + + (showLinkCustomerModal = false)} +> +

+ Select the Wave customer that corresponds to {invoice.customer?.name}: +

+ + + {#snippet footer()} +
+ + +
+ {/snippet} +
diff --git a/frontend/src/routes/admin/invoices/wave/+page.server.ts b/frontend/src/routes/admin/invoices/wave/+page.server.ts new file mode 100644 index 0000000..6b62c0c --- /dev/null +++ b/frontend/src/routes/admin/invoices/wave/+page.server.ts @@ -0,0 +1,73 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + WAVE_INVOICES, + WAVE_PRODUCTS, + WAVE_CUSTOMERS, + type WaveInvoiceConnection, + type WaveProduct, + type WaveCustomer +} from '$lib/graphql/queries/wave'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + // Get pagination params + const page = parseInt(url.searchParams.get('page') || '1'); + const pageSize = 20; + + // Fetch all Wave data in parallel + const [invoicesResult, productsResult, customersResult] = await Promise.all([ + client + .query<{ waveInvoices: WaveInvoiceConnection }>({ + query: WAVE_INVOICES, + variables: { page, pageSize } + }) + .catch((err) => { + console.error('Failed to fetch Wave invoices:', err); + return { data: null }; + }), + client + .query<{ waveProducts: WaveProduct[] }>({ + query: WAVE_PRODUCTS + }) + .catch((err) => { + console.error('Failed to fetch Wave products:', err); + return { data: null }; + }), + client + .query<{ waveCustomers: WaveCustomer[] }>({ + query: WAVE_CUSTOMERS + }) + .catch((err) => { + console.error('Failed to fetch Wave customers:', err); + return { data: null }; + }) + ]); + + return { + invoices: invoicesResult.data?.waveInvoices ?? { + pageInfo: { currentPage: 1, totalPages: 1, totalCount: 0 }, + invoices: [] + }, + products: productsResult.data?.waveProducts ?? [], + customers: customersResult.data?.waveCustomers ?? [], + currentPage: page + }; +}; diff --git a/frontend/src/routes/admin/invoices/wave/+page.svelte b/frontend/src/routes/admin/invoices/wave/+page.svelte new file mode 100644 index 0000000..d649d83 --- /dev/null +++ b/frontend/src/routes/admin/invoices/wave/+page.svelte @@ -0,0 +1,1270 @@ + + + + Wave Management | Nexus + + +
+ + + + + + {#if actionError} +
+
+ + + +
+

{actionError}

+
+ +
+
+ {/if} + + +
+ +
+ {tabs[activeTabIndex].label} + + {getTabCount(tabs[activeTabIndex].id)} + +
+ +
+ + + + + + {#if activeTab === 'invoices'} + + {#if invoices.invoices.length === 0} +
+ + + +

No invoices in Wave

+

+ Create invoices from the Nexus invoices page to sync them to Wave. +

+
+ {:else} +
+ {#each invoices.invoices as invoice (invoice.id)} +
+ + +
+ #{invoice.invoiceNumber} + + {invoice.status} + +
+ + +

{invoice.customer.name}

+ + +
+ Issued: {formatDate(invoice.invoiceDate)} + {#if invoice.dueDate} + Due: {formatDate(invoice.dueDate)} + {/if} +
+ + +
+
+ {#if parseFloat(invoice.amountDue.value.replace(/,/g, '')) > 0} + Due: + + {formatMoney(invoice.amountDue.value, invoice.amountDue.currencySymbol)} + + {:else} + Paid + {/if} +
+ + {formatMoney(invoice.total.value, invoice.total.currencySymbol)} + +
+
+ + + {#if invoice.status === 'DRAFT' || invoice.status === 'SAVED'} +
+ {#if invoice.status === 'DRAFT'} + + + {:else if invoice.status === 'SAVED'} + + {/if} +
+ {/if} +
+ {/each} +
+ + + {#if invoices.pageInfo.totalPages > 1} +
+

+ Page {invoices.pageInfo.currentPage} of {invoices.pageInfo.totalPages} + ({invoices.pageInfo.totalCount} total) +

+ +
+ {/if} + {/if} + {:else if activeTab === 'products'} + +
+ + +
+ + {#if activeProducts.length === 0 && !showArchivedProducts} +
+ + + +

No products in Wave

+

Create a product to start using it on invoices.

+ +
+ {:else} +
+ {#each showArchivedProducts ? products : activeProducts as product (product.id)} +
+ +
+

{product.name}

+ {#if product.isArchived} + + Archived + + {/if} +
+ + + {#if product.description} +

{product.description}

+ {:else} +

No description

+ {/if} + + +
+ + ${product.unitPrice.toLocaleString('en-US', { + minimumFractionDigits: 2, + maximumFractionDigits: 2 + })} + + {#if product.isSold} + + For Sale + + {:else} + + Expense + + {/if} +
+ + + {#if !product.isArchived} +
+ + +
+ {/if} +
+ {/each} +
+ {/if} + {:else if activeTab === 'customers'} + + {#if customers.length === 0} +
+ + + +

No customers in Wave

+

+ Link Nexus customers to Wave from the customer edit page. +

+
+ {:else} +
+ {#each customers as customer (customer.id)} +
+ +

{customer.name}

+ + + {#if customer.email} +

{customer.email}

+ {:else} +

No email

+ {/if} + + +
+ Currency + + {customer.currencyCode || 'USD'} + +
+ + +
+ + +
+
+ {/each} +
+ {/if} + {/if} +
+
+ + +{#if showCustomerPanel} + + + + + +{/if} + + +{#if showProductPanel} + + + + + +{/if} diff --git a/frontend/src/routes/admin/invoices/wave/[id]/+page.server.ts b/frontend/src/routes/admin/invoices/wave/[id]/+page.server.ts new file mode 100644 index 0000000..13a22f5 --- /dev/null +++ b/frontend/src/routes/admin/invoices/wave/[id]/+page.server.ts @@ -0,0 +1,42 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { WAVE_INVOICE, type WaveInvoice } from '$lib/graphql/queries/wave'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + // Fetch the invoice + const result = await client + .query<{ waveInvoice: WaveInvoice }>({ + query: WAVE_INVOICE, + variables: { invoiceId: params.id } + }) + .catch((err) => { + console.error('Failed to fetch Wave invoice:', err); + return { data: null }; + }); + + if (!result.data?.waveInvoice) { + throw error(404, 'Invoice not found'); + } + + return { + invoice: result.data.waveInvoice + }; +}; diff --git a/frontend/src/routes/admin/invoices/wave/[id]/+page.svelte b/frontend/src/routes/admin/invoices/wave/[id]/+page.svelte new file mode 100644 index 0000000..564880f --- /dev/null +++ b/frontend/src/routes/admin/invoices/wave/[id]/+page.svelte @@ -0,0 +1,672 @@ + + + + Invoice #{invoice.invoiceNumber} | Wave | Nexus + + + +
+ + + + {#snippet actions()} + {#if invoice.pdfUrl} + + + + + + PDF + + {/if} + {/snippet} + + + + {#if actionError} +
+
+ + + +
+

{actionError}

+
+ +
+
+ {/if} + + +
+
+
+
+ Total +

+ {formatMoney(invoice.total.value, invoice.total.currencySymbol)} +

+
+
+ + {invoice.status} + + {#if parseFloat(invoice.amountDue.value.replace(/,/g, '')) > 0} +

+ Due: + + {formatMoney(invoice.amountDue.value, invoice.amountDue.currencySymbol)} + +

+ {:else} +

+ Paid in full +

+ {/if} +
+
+
+ Issued: {formatDate(invoice.invoiceDate)} + {#if invoice.dueDate} + Due: {formatDate(invoice.dueDate)} + {/if} +
+
+
+ +
+ +
+ + + + +
+

Customer

+

{invoice.customer.name}

+
+ + +
+

Line Items ({invoice.items.length})

+ + {#if invoice.items.length === 0} +

No line items

+ {:else} + + + + +
+ {#each invoice.items as item} +
+

+ {item.description} +

+
+ {item.quantity} × {formatLineAmount(item.unitPrice)} + {formatMoney(item.total.value, item.total.currencySymbol)} +
+
+ {/each} +
+ + +
+ Subtotal + {formatMoney(invoice.subtotal.value, invoice.subtotal.currencySymbol)} +
+ {/if} +
+
+ + + +
+
+
+ + +{#if invoice.status === 'DRAFT' || invoice.status === 'SAVED'} +
+
+ {#if invoice.status === 'DRAFT'} + + + {:else if invoice.status === 'SAVED'} + + {/if} +
+
+{/if} diff --git a/frontend/src/routes/admin/notifications/+page.server.ts b/frontend/src/routes/admin/notifications/+page.server.ts new file mode 100644 index 0000000..10470ed --- /dev/null +++ b/frontend/src/routes/admin/notifications/+page.server.ts @@ -0,0 +1,50 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + NOTIFICATION_RULES_QUERY, + type NotificationRulesQueryResult +} from '$lib/graphql/queries/notifications'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin + if (!user || user.__typename !== 'TeamProfileType' || user.role !== 'ADMIN') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { + rules: [], + teamProfiles: [] + }; + } + + const client = createServerClient(locals.cookie); + + try { + const [rulesResult, teamResult] = await Promise.all([ + client.query({ + query: NOTIFICATION_RULES_QUERY + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch(() => ({ data: null })) + ]); + + return { + rules: rulesResult.data?.notificationRules ?? [], + teamProfiles: teamResult.data?.teamProfiles ?? [] + }; + } catch (err) { + console.error('Failed to fetch notification rules:', err); + return { + rules: [], + teamProfiles: [] + }; + } +}; diff --git a/frontend/src/routes/admin/notifications/+page.svelte b/frontend/src/routes/admin/notifications/+page.svelte new file mode 100644 index 0000000..d3f400d --- /dev/null +++ b/frontend/src/routes/admin/notifications/+page.svelte @@ -0,0 +1,593 @@ + + + + Notification Rules - Admin - Nexus + + +
+ + + + {#snippet actions()} + + {/snippet} + + + + {#if rules.length > 0} +
+ {#each rules as rule (rule.id)} +
+
+
+
+

{rule.name}

+ + {rule.isActive ? 'Active' : 'Inactive'} + +
+ {#if rule.description} +

{rule.description}

+ {/if} + + +
+ + {#each (Array.isArray(rule.eventTypes) ? rule.eventTypes : []).slice(0, 3) as eventType} + + {eventType} + + {/each} + {#if Array.isArray(rule.eventTypes) && rule.eventTypes.length > 3} + + +{rule.eventTypes.length - 3} more + + {/if} + + + {#each Array.isArray(rule.channels) ? rule.channels : [] as channel} + + {channel} + + {/each} +
+
+ + +
+ + + +
+
+
+ {/each} +
+ {:else} + +
+ + + +

No notification rules

+

+ Create a rule to start sending notifications for events. +

+ +
+ {/if} +
+
+ + +{#if showModal} +
+ +
+

+ {editingRule ? 'Edit Rule' : 'Create Rule'} +

+ + {#if error} +
+ {error} +
+ {/if} + +
{ + e.preventDefault(); + handleSubmit(); + }} + > + + + + + + + +
+ Event Types * +
+ {#each eventTypeOptions as eventType} + + {/each} +
+
+ + +
+ Channels * +
+ {#each channelOptions as channel} + + {/each} +
+
+ + +
+ Target Roles (optional) +
+ {#each roleOptions as role} + + {/each} +
+
+ + + + + + + + + {#if teamProfiles.length > 0} +
+ + Specific Team Members (optional) + +
+ {#each teamProfiles as profile} + + {/each} +
+
+ {/if} + +
+ + +
+
+
+
+{/if} diff --git a/frontend/src/routes/admin/profiles/+page.server.ts b/frontend/src/routes/admin/profiles/+page.server.ts new file mode 100644 index 0000000..9020364 --- /dev/null +++ b/frontend/src/routes/admin/profiles/+page.server.ts @@ -0,0 +1,56 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + CUSTOMER_PROFILES_QUERY, + type CustomerProfilesQueryResult +} from '$lib/graphql/queries/customerProfile'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Get active tab from URL params (default to 'team') + const tab = url.searchParams.get('tab') || 'team'; + + if (!locals.cookie) { + return { teamProfiles: [], customerProfiles: [], tab }; + } + + const client = createServerClient(locals.cookie); + + // Fetch both profile types in parallel + const [teamResult, customerResult] = await Promise.all([ + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }), + client + .query({ + query: CUSTOMER_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch customer profiles:', err); + return { data: null }; + }) + ]); + + return { + teamProfiles: teamResult.data?.teamProfiles ?? [], + customerProfiles: customerResult.data?.customerProfiles ?? [], + tab + }; +}; diff --git a/frontend/src/routes/admin/profiles/+page.svelte b/frontend/src/routes/admin/profiles/+page.svelte new file mode 100644 index 0000000..0058e18 --- /dev/null +++ b/frontend/src/routes/admin/profiles/+page.svelte @@ -0,0 +1,470 @@ + + + + Profiles - Admin - Nexus + + +
+ + + + {#snippet subtitleSnippet()} + {#if activeTab === 'team'} + {#if statusFilter === 'ALL' && roleFilter === 'ALL'} + {teamTotal} team member{teamTotal !== 1 ? 's' : ''} + {:else} + {teamFiltered} of {teamTotal} team members + {/if} + {:else if statusFilter === 'ALL'} + {customerTotal} customer profile{customerTotal !== 1 ? 's' : ''} + {:else} + {customerFiltered} of {customerTotal} customer profiles + {/if} + {/snippet} + {#snippet actions()} + + {/snippet} + + + +
+ +
+ + +
+ +
+ + + + +
+ + +
+ + + + + {#if activeTab === 'team'} + + {/if} + + + {#if searchQuery || statusFilter !== 'ALL' || (activeTab === 'team' && roleFilter !== 'ALL')} + + {/if} + + + + {#if activeTab === 'team'} + Showing {teamFiltered} of {teamTotal} + {:else} + Showing {customerFiltered} of {customerTotal} + {/if} + +
+
+ + + {#if activeTab === 'team'} + {#if filteredTeamProfiles.length > 0} + + {:else if searchQuery || statusFilter !== 'ALL' || roleFilter !== 'ALL'} + +
+ + + +

No profiles found

+

No team members match your current filters.

+ +
+ {:else} + +
+ + + +

No team profiles

+

Create a profile by linking a Kratos identity.

+ +
+ {/if} + {/if} + + + {#if activeTab === 'customer'} + {#if filteredCustomerProfiles.length > 0} + + {:else if searchQuery || statusFilter !== 'ALL'} + +
+ + + +

No profiles found

+

No customer profiles match your current filters.

+ +
+ {:else} + +
+ + + +

No customer profiles

+

Create a profile by linking a Kratos identity.

+ +
+ {/if} + {/if} +
+ + + + {#if activeTab === 'team'} + + {:else} + + {/if} + +
diff --git a/frontend/src/routes/admin/profiles/[profile]/+page.server.ts b/frontend/src/routes/admin/profiles/[profile]/+page.server.ts new file mode 100644 index 0000000..7af822f --- /dev/null +++ b/frontend/src/routes/admin/profiles/[profile]/+page.server.ts @@ -0,0 +1,41 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { TEAM_PROFILE_QUERY, type TeamProfileQueryResult } from '$lib/graphql/queries/team'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: TEAM_PROFILE_QUERY, + variables: { id: params.profile } + }) + .catch((err) => { + console.error('Failed to fetch team profile:', err); + throw error(500, 'Failed to load team profile'); + }); + + if (!data?.teamProfile) { + throw error(404, 'Team profile not found'); + } + + return { + profile: data.teamProfile + }; +}; diff --git a/frontend/src/routes/admin/profiles/[profile]/+page.svelte b/frontend/src/routes/admin/profiles/[profile]/+page.svelte new file mode 100644 index 0000000..2d17fc2 --- /dev/null +++ b/frontend/src/routes/admin/profiles/[profile]/+page.svelte @@ -0,0 +1,213 @@ + + + + {profile?.fullName ?? 'Team Profile'} - Admin - Nexus + + +
+ + {#if profile} + + {profile.status} + + +
+ +
+
+ +
+
+
+

First Name

+

{profile.firstName}

+
+
+

Last Name

+

{profile.lastName}

+
+
+

Email

+

{profile.email || '—'}

+
+
+

Phone

+

{profile.phone || '—'}

+
+
+
+ + +
+
+ +
+
+
+

Role

+ + {formatRole(profile.role)} + +
+
+

Status

+ {profile.status} +
+
+
+ + +
+
+ +
+
+ {#if profile.notes} +

{profile.notes}

+ {:else} +

No notes

+ {/if} +
+
+ + +
+
+ +
+
+
+

Kratos Identity ID

+

{profile.id}

+
+
+

Created

+

{formatDate(profile.createdAt)}

+
+
+

Last Updated

+

{formatDate(profile.updatedAt)}

+
+
+
+
+ + + + + + {:else} +
+ + + +

Profile not found

+

+ The team profile you're looking for doesn't exist or has been deleted. +

+ Back to Profiles +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/profiles/customer/[profile]/+page.server.ts b/frontend/src/routes/admin/profiles/customer/[profile]/+page.server.ts new file mode 100644 index 0000000..b7c7796 --- /dev/null +++ b/frontend/src/routes/admin/profiles/customer/[profile]/+page.server.ts @@ -0,0 +1,44 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { + CUSTOMER_PROFILE_QUERY, + type CustomerProfileQueryResult +} from '$lib/graphql/queries/customerProfile'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: CUSTOMER_PROFILE_QUERY, + variables: { id: params.profile } + }) + .catch((err) => { + console.error('Failed to fetch customer profile:', err); + throw error(500, 'Failed to load customer profile'); + }); + + if (!data?.customerProfile) { + throw error(404, 'Customer profile not found'); + } + + return { + profile: data.customerProfile + }; +}; diff --git a/frontend/src/routes/admin/profiles/customer/[profile]/+page.svelte b/frontend/src/routes/admin/profiles/customer/[profile]/+page.svelte new file mode 100644 index 0000000..5fb71fc --- /dev/null +++ b/frontend/src/routes/admin/profiles/customer/[profile]/+page.svelte @@ -0,0 +1,216 @@ + + + + {profile?.fullName ?? 'Customer Profile'} - Admin - Nexus + + +
+ + {#if profile} + + {profile.status} + + +
+ +
+
+ +
+
+
+

First Name

+

{profile.firstName}

+
+
+

Last Name

+

{profile.lastName}

+
+
+

Email

+

{profile.email || '—'}

+
+
+

Phone

+

{profile.phone || '—'}

+
+
+
+ + +
+
+ +
+
+
+

Status

+ {profile.status} +
+
+
+ + + {#if profile.customers && profile.customers.length > 0} +
+
+ +
+
+
+ {#each profile.customers as customer} + + {customer.name} + + + + + {/each} +
+
+
+ {/if} + + +
+
+ +
+
+ {#if profile.notes} +

{profile.notes}

+ {:else} +

No notes

+ {/if} +
+
+ + +
+
+ +
+
+
+

Kratos Identity ID

+

{profile.id}

+
+
+

Created

+

{formatDate(profile.createdAt)}

+
+
+

Last Updated

+

{formatDate(profile.updatedAt)}

+
+
+
+
+ + + + + + {:else} +
+ + + +

Profile not found

+

+ The customer profile you're looking for doesn't exist or has been deleted. +

+ Back to Profiles +
+ {/if} +
+
diff --git a/frontend/src/routes/admin/projects/+page.server.ts b/frontend/src/routes/admin/projects/+page.server.ts new file mode 100644 index 0000000..5e55834 --- /dev/null +++ b/frontend/src/routes/admin/projects/+page.server.ts @@ -0,0 +1,115 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + PROJECTS_QUERY, + PROJECT_STATUS_COUNTS_QUERY, + type ProjectsQueryResult, + type ProjectStatusCountsQueryResult, + type WorkStatus +} from '$lib/graphql/queries/projects'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 20; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { dateFrom: string; dateTo: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0); + + const dateFrom = firstDay.toISOString().split('T')[0]; + const dateTo = lastDay.toISOString().split('T')[0]; + + return { dateFrom, dateTo }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const statusParam = url.searchParams.get('status'); + const status: WorkStatus | null = statusParam as WorkStatus | null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + + const { dateFrom, dateTo } = getMonthDateRange(month); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + projects: { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: { scheduled: 0, inProgress: 0, completed: 0, cancelled: 0 }, + month, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Fetch both projects and status counts in parallel + const [projectsResult, countsResult] = await Promise.all([ + client + .query({ + query: PROJECTS_QUERY, + variables: { + filter: { + dateFrom, + dateTo, + status + }, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch projects:', err); + return { data: null }; + }), + client + .query({ + query: PROJECT_STATUS_COUNTS_QUERY, + variables: { + dateFrom, + dateTo + } + }) + .catch((err) => { + console.error('Failed to fetch status counts:', err); + return { data: null }; + }) + ]); + + return { + projects: projectsResult.data?.projects ?? { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: countsResult.data?.projectStatusCounts ?? { + scheduled: 0, + inProgress: 0, + completed: 0, + cancelled: 0 + }, + month, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/admin/projects/+page.svelte b/frontend/src/routes/admin/projects/+page.svelte new file mode 100644 index 0000000..d61d903 --- /dev/null +++ b/frontend/src/routes/admin/projects/+page.svelte @@ -0,0 +1,316 @@ + + + + Projects - Admin - Nexus + + +
+ + + + {#snippet actions()} + + {/snippet} + + + +
+ + +
+ + +
+
+ + + + +
+
+ + + {#if filteredProjects.length > 0} + + + + + {:else if searchQuery} + +
+ + + +

No projects found

+

No projects match your search.

+ +
+ {:else if data.projects.totalCount === 0} + +
+ + + +

No projects

+

+ {#if data.status} + No {formatStatusLabel(data.status).toLowerCase()} projects for this month. + {:else} + No projects scheduled for this month. + {/if} +

+
+ {/if} +
+
+ + +{#if showCreateModal} +
e.target === e.currentTarget && (showCreateModal = false)} + onkeydown={(e) => e.key === 'Escape' && (showCreateModal = false)} + role="dialog" + aria-modal="true" + tabindex="-1" + > +
+
+

Create Project

+ +
+ (showCreateModal = false)} + /> +
+
+{/if} diff --git a/frontend/src/routes/admin/projects/[project]/+page.server.ts b/frontend/src/routes/admin/projects/[project]/+page.server.ts new file mode 100644 index 0000000..5c6b968 --- /dev/null +++ b/frontend/src/routes/admin/projects/[project]/+page.server.ts @@ -0,0 +1,77 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { PROJECT_QUERY, type ProjectQueryResult } from '$lib/graphql/queries/projects'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + LATEST_PROJECT_SESSION_QUERY, + type LatestProjectSessionQueryResult +} from '$lib/graphql/queries/session'; +import { WAVE_PRODUCTS, type WaveProduct } from '$lib/graphql/queries/wave'; +import { error, redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const [projectResult, teamResult, sessionResult, waveProductsResult] = await Promise.all([ + client + .query({ + query: PROJECT_QUERY, + variables: { id: params.project } + }) + .catch((err) => { + console.error('Failed to fetch project:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }), + client + .query({ + query: LATEST_PROJECT_SESSION_QUERY, + variables: { projectId: params.project } + }) + .catch((err) => { + console.error('Failed to fetch session:', err); + return { data: null }; + }), + client + .query<{ waveProducts: WaveProduct[] }>({ + query: WAVE_PRODUCTS + }) + .catch((err) => { + console.error('Failed to fetch wave products:', err); + return { data: null }; + }) + ]); + + if (!projectResult.data?.project) { + throw error(404, 'Project not found'); + } + + return { + project: projectResult.data.project, + teamProfiles: teamResult.data?.teamProfiles ?? [], + session: sessionResult.data?.latestProjectSession ?? null, + waveProducts: waveProductsResult.data?.waveProducts ?? [] + }; +}; diff --git a/frontend/src/routes/admin/projects/[project]/+page.svelte b/frontend/src/routes/admin/projects/[project]/+page.svelte new file mode 100644 index 0000000..47b2579 --- /dev/null +++ b/frontend/src/routes/admin/projects/[project]/+page.svelte @@ -0,0 +1,1768 @@ + + + + {project.name} - Projects - Admin - Nexus + + +
+ + +
+ + {formatStatusLabel(project.status)} + + {#if isDispatched} + + Dispatched + + {/if} + {#if isAssigned} + + Assigned + + {/if} + {formatDate(project.date)} +
+
+ +
+ +
+
+ +
+
+
+
Date
+
{formatDate(project.date)}
+
+
+
Status
+
+ + {formatStatusLabel(project.status)} + +
+
+
+
Labor
+
{formatCurrency(project.labor)}
+
+
+
Amount
+
+ {#if project.amount} +
+ {formatCurrency(project.amount)} + {#if project.waveServiceId} + Wave + {#if project.waveProductName} + ({project.waveProductName}) + {/if} + {/if} +
+ {:else} + Not set + {/if} +
+
+
+
Calendar Event
+
+ {#if project.calendarEventId} + + View Event → + + {:else} + + {/if} +
+
+
+
+ + +
+
+

Account & Location

+
+
+
+
Account
+
+ {#if project.accountAddress?.account} + + {project.accountAddress.account.name} + + {:else} + No account linked + {/if} +
+
+ {#if project.customer} + + {/if} +
+
Location
+
+ {#if project.accountAddress} + + {#if project.accountAddress.name} + {project.accountAddress.name} - + {/if} + {project.formattedAddress} + + {:else if project.formattedAddress} + {project.formattedAddress} + {:else} + No location specified + {/if} +
+
+
+
+ + + {#if project.notes} +
+
+

Project Notes

+
+
+

{project.notes}

+
+
+ {/if} + + +
+
+ {#if isScopeLocked} +

Scope

+ {:else} + openScopeDrawer(activeScope ? 'edit' : 'add')} + /> + {/if} +
+
+ {#if activeScope} +
+ {#if activeScope.name} +

{activeScope.name}

+ {/if} + {#if activeScope.description} +

{activeScope.description}

+ {/if} + {#each activeScope.categories as category (category.id)} +
+ + + + + + {category.name} + ({category.tasks.length} task{category.tasks.length === 1 + ? '' + : 's'}) + + {#if !isScopeLocked} + + + + + + + {/if} + +
+ {#if category.tasks.length > 0} +
    + {#each category.tasks as task (task.id)} +
  • +
    + + + + + + + {task.scopeDescription} + {#if task.estimatedMinutes} + {task.estimatedMinutes} min + {/if} + + + {#if !isScopeLocked} + + + + + + + {/if} + +
    +
    + Checklist: + {task.checklistDescription || '—'} +
    +
    + Instructions: + {task.sessionDescription || '—'} +
    +
    +
    +
  • + {/each} +
+ {/if} + {#if !isScopeLocked} + + {/if} +
+
+ {/each} + {#if !isScopeLocked} + + +
+ +
+ {/if} +
+ {:else} +

No scope defined yet.

+ {/if} +
+
+ + +
+
+
+

Team Members

+
+ {#if teamMembers.length > 0} + + {/if} + {#if !isDispatched} + + + {:else} + +
+ + {#if assignMenuOpen} +
+ +
+

+ Assign Team Members +

+ {#each availableTeamProfiles.filter((p) => p.id !== config.dispatch.profileId) as profile (profile.id)} + {@const alreadyAssigned = project.teamMembers.some( + (m) => m.teamProfileId === profile.id + )} + {@const isSelected = selectedTeamMembers.has(profile.id)} + + {/each} + {#if availableTeamProfiles.filter((p) => p.id !== config.dispatch.profileId).length === 0} +

+ No team members available +

+ {/if} + {#if selectedTeamMembers.size > 0} +
+ + {/if} +
+ {#if teamMembers.length > 0} +

+ This will also remove {teamMembers.length} assigned team member{teamMembers.length > + 1 + ? 's' + : ''} +

+ {/if} + +
+
+ {/if} +
+ {/if} +
+
+
+ {#if teamMembers.length > 0} +
+
    + {#each teamMembers as member (member.id)} +
  • +
    +
    +
    + + + +
    +
    +

    + {member.teamProfile?.fullName ?? 'Unknown'} +

    + {#if member.teamProfile?.role} +

    + {member.teamProfile.role === 'TEAM_LEADER' + ? 'Team Leader' + : 'Team Member'} +

    + {/if} +
    +
    + +
    +
  • + {/each} +
+
+ {:else if isDispatched} +

Dispatched. Ready to assign team members.

+ {:else} +

Dispatch first to assign team members.

+ {/if} +
+ + +
+
+

Work Session

+
+
+ {#if session} + + ({ id: p.id, fullName: p.fullName })) ?? + []} + areas={sessionAreas} + tasks={allTasks} + {selectedTaskIds} + {completedTaskIds} + {completedTasksByArea} + {readyToSubmitByArea} + {availableTasksCount} + isSubmitting={isClosingSession} + isReverting={isRevertingSession} + {submittingTaskId} + {removingTaskId} + onClose={handleCloseSession} + onRevert={handleRevertSession} + onToggleTask={handleToggleTask} + onSubmitTask={handleSubmitTask} + onRemoveTask={handleRemoveTask} + onRemoveCompletedTask={handleRemoveCompletedTask} + onSubmitAllTasks={handleSubmitAllTasks} + onClearSelection={handleClearSelection} + onAddNote={handleAddNote} + onUpdateNote={handleUpdateNote} + onDeleteNote={handleDeleteNote} + onUploadPhoto={handleUploadPhoto} + onUploadVideo={handleUploadVideo} + onUpdatePhoto={handleUpdatePhoto} + onUpdateVideo={handleUpdateVideo} + onDeletePhoto={handleDeletePhoto} + onDeleteVideo={handleDeleteVideo} + {getTeamMemberName} + /> + {:else if canStartSession} + +
+

+ Start a work session to track time, complete tasks, and capture notes/photos. +

+ +
+ {:else if project.status === 'IN_PROGRESS'} +

+ This project has an active session but it was not loaded. Try refreshing the page. +

+ {:else if project.status === 'COMPLETED'} +

+ This project has been completed. View the session history for details. +

+ {:else if !isAssigned} +

Assign team members to enable work sessions.

+ {:else if !activeScope} +

+ No active scope found for this project. Configure a scope to enable work sessions. +

+ {:else} +

+ Work session is not available for this project status. +

+ {/if} +
+
+ + +
+ + Danger Zone + + + + +
+

+ Deleting this project will permanently remove it and all associated data. This action + cannot be undone. +

+ +
+
+
+ + + + {#if drawerMode === 'project'} + + {:else if drawerMode === 'scope'} + + {:else if drawerMode === 'category' && activeScope} + + {:else if drawerMode === 'task' && selectedTask} + + {/if} + + + + + + + + + +
+
diff --git a/frontend/src/routes/admin/reports/+page.server.ts b/frontend/src/routes/admin/reports/+page.server.ts new file mode 100644 index 0000000..4766d8d --- /dev/null +++ b/frontend/src/routes/admin/reports/+page.server.ts @@ -0,0 +1,79 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { REPORTS_QUERY, type ReportsQueryResult } from '$lib/graphql/queries/reports'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 20; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const teamProfileId = url.searchParams.get('teamProfileId') || null; + const status = url.searchParams.get('status') || null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + reports: { items: [], totalCount: 0, hasNextPage: false }, + teamProfiles: [], + teamProfileId, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Fetch reports and team profiles in parallel + const [reportsResult, profilesResult] = await Promise.all([ + client + .query({ + query: REPORTS_QUERY, + variables: { + filter: { + teamProfileId: teamProfileId || undefined, + status: status || undefined + }, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch reports:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }) + ]); + + return { + reports: reportsResult.data?.reports ?? { items: [], totalCount: 0, hasNextPage: false }, + teamProfiles: profilesResult.data?.teamProfiles ?? [], + teamProfileId, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/admin/reports/+page.svelte b/frontend/src/routes/admin/reports/+page.svelte new file mode 100644 index 0000000..6f3e648 --- /dev/null +++ b/frontend/src/routes/admin/reports/+page.svelte @@ -0,0 +1,353 @@ + + + + Reports - Admin - Nexus + + +
+ + + + {#snippet actions()} + + {/snippet} + + + +
+ + +
+ + +
+
+ + + {#if reports.length > 0} + + + + + {:else} + +
+ + + +

No reports yet

+

+ Create a report for a team member to track their labor. +

+ +
+ {/if} +
+
+ + +{#if showCreateModal} +
+ +
+

Create New Report

+ + {#if createError} +
+ {createError} +
+ {/if} + +
{ + e.preventDefault(); + handleCreateReport(); + }} + > + + +
+ + +
+ +
+ + +
+
+
+
+{/if} diff --git a/frontend/src/routes/admin/reports/[report]/+page.server.ts b/frontend/src/routes/admin/reports/[report]/+page.server.ts new file mode 100644 index 0000000..48a7299 --- /dev/null +++ b/frontend/src/routes/admin/reports/[report]/+page.server.ts @@ -0,0 +1,83 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + REPORT_QUERY, + ELIGIBLE_SERVICES_QUERY, + ELIGIBLE_PROJECTS_QUERY, + type ReportQueryResult, + type EligibleServicesQueryResult, + type EligibleProjectsQueryResult +} from '$lib/graphql/queries/reports'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + // Fetch the report + const reportResult = await client + .query({ + query: REPORT_QUERY, + variables: { id: params.report } + }) + .catch((err) => { + console.error('Failed to fetch report:', err); + return { data: null }; + }); + + const report = reportResult.data?.report; + + if (!report) { + throw error(404, 'Report not found'); + } + + // Fetch eligible services and projects + const [eligibleServicesResult, eligibleProjectsResult] = await Promise.all([ + client + .query({ + query: ELIGIBLE_SERVICES_QUERY, + variables: { + teamProfileId: report.teamProfileId, + dateFrom: report.startDate, + dateTo: report.endDate + } + }) + .catch((err) => { + console.error('Failed to fetch eligible services:', err); + return { data: null }; + }), + client + .query({ + query: ELIGIBLE_PROJECTS_QUERY, + variables: { + teamProfileId: report.teamProfileId, + dateFrom: report.startDate, + dateTo: report.endDate + } + }) + .catch((err) => { + console.error('Failed to fetch eligible projects:', err); + return { data: null }; + }) + ]); + + return { + report, + eligibleServices: eligibleServicesResult.data?.eligibleServicesForReport ?? [], + eligibleProjects: eligibleProjectsResult.data?.eligibleProjectsForReport ?? [] + }; +}; diff --git a/frontend/src/routes/admin/reports/[report]/+page.svelte b/frontend/src/routes/admin/reports/[report]/+page.svelte new file mode 100644 index 0000000..ff68209 --- /dev/null +++ b/frontend/src/routes/admin/reports/[report]/+page.svelte @@ -0,0 +1,668 @@ + + + + {report.teamProfile?.fullName ?? 'Report'} - Reports - Admin - Nexus + + +
+ + + + {report.status} + + + + +
+ {#if report.status === 'DRAFT'} + + + {:else if report.status === 'FINALIZED'} + + + {:else} + + {/if} +
+ +
+ +
+
+

Summary

+
+
+
+

Services

+

+ {report.serviceCount} +

+

{formatCurrency(report.servicesTotal)}

+
+
+

Projects

+

+ {report.projectCount} +

+

{formatCurrency(report.projectsTotal)}

+
+
+

Total Labor

+

+ {formatCurrency(report.totalLabor)} +

+
+
+
+ + +
+
+ {#if report.status === 'DRAFT' && eligibleServices.length > 0} + (showAddServicesModal = true)} + /> + {:else} +

Services ({report.services.length})

+ {/if} +
+
+ {#if report.services.length > 0} +
+ + + + + + + + {#if report.status === 'DRAFT'} + + {/if} + + + + {#each report.services as entry (entry.id)} + + + + + + {#if report.status === 'DRAFT'} + + {/if} + + {/each} + +
DateAccountLocationLabor Share
+ {formatDate(entry.service?.date ?? '')} + + {entry.service?.account?.name ?? 'Unknown'} + + {entry.service?.accountAddress?.name ?? + entry.service?.accountAddress?.streetAddress ?? + ''} + + {formatCurrency(entry.laborShare)} + + +
+
+ {:else} +
+ No services added yet. + {#if report.status === 'DRAFT' && eligibleServices.length > 0} + + {/if} +
+ {/if} +
+
+ + +
+
+ {#if report.status === 'DRAFT' && eligibleProjects.length > 0} + (showAddProjectsModal = true)} + /> + {:else} +

Projects ({report.projects.length})

+ {/if} +
+
+ {#if report.projects.length > 0} +
+ + + + + + + + {#if report.status === 'DRAFT'} + + {/if} + + + + {#each report.projects as entry (entry.id)} + + + + + + {#if report.status === 'DRAFT'} + + {/if} + + {/each} + +
DateProjectCustomerLabor Share
+ {formatDate(entry.project?.date ?? '')} + + {entry.project?.name ?? 'Unknown'} + + {entry.project?.customer?.name ?? ''} + + {formatCurrency(entry.laborShare)} + + +
+
+ {:else} +
+ No projects added yet. + {#if report.status === 'DRAFT' && eligibleProjects.length > 0} + + {/if} +
+ {/if} +
+
+
+
+
+ + +{#if showAddServicesModal} +
+ +
+
+

Add Services

+ +
+
+ {#if eligibleServices.length > 0} +
+ {#each eligibleServices as service (service.serviceId)} +
+
+

+ {service.service?.account?.name ?? 'Unknown'} +

+

+ {formatDate(service.date)} - {service.service?.accountAddress?.city ?? ''} +

+

+ {formatCurrency(service.laborTotal)} ÷ {service.teamMemberCount} = {formatCurrency( + service.laborShare + )} +

+
+ +
+ {/each} +
+ {:else} +

No eligible services available.

+ {/if} +
+
+ +
+
+
+{/if} + + +{#if showAddProjectsModal} +
+ +
+
+

Add Projects

+ +
+
+ {#if eligibleProjects.length > 0} +
+ {#each eligibleProjects as project (project.projectId)} +
+
+

+ {project.project?.name ?? 'Unknown'} +

+

+ {formatDate(project.date)} - {project.project?.customer?.name ?? ''} +

+

+ {formatCurrency(project.laborTotal)} ÷ {project.teamMemberCount} = {formatCurrency( + project.laborShare + )} +

+
+ +
+ {/each} +
+ {:else} +

No eligible projects available.

+ {/if} +
+
+ +
+
+
+{/if} + + +{#if showDeleteConfirm} +
+ +
+

Delete Report?

+

+ Are you sure you want to delete this report? This action cannot be undone. +

+
+ + +
+
+
+{/if} diff --git a/frontend/src/routes/admin/scopes/+page.server.ts b/frontend/src/routes/admin/scopes/+page.server.ts new file mode 100644 index 0000000..c46842a --- /dev/null +++ b/frontend/src/routes/admin/scopes/+page.server.ts @@ -0,0 +1,50 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + SERVICE_SCOPE_TEMPLATES_QUERY, + type ServiceScopeTemplatesQueryResult +} from '$lib/graphql/queries/service-scope-templates'; +import { + PROJECT_SCOPE_TEMPLATES_QUERY, + type ProjectScopeTemplatesQueryResult +} from '$lib/graphql/queries/project-scope-templates'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { serviceTemplates: [], projectTemplates: [] }; + } + + const client = createServerClient(locals.cookie); + + try { + const [serviceResult, projectResult] = await Promise.all([ + client.query({ + query: SERVICE_SCOPE_TEMPLATES_QUERY, + fetchPolicy: 'network-only' + }), + client.query({ + query: PROJECT_SCOPE_TEMPLATES_QUERY, + fetchPolicy: 'network-only' + }) + ]); + return { + serviceTemplates: serviceResult.data?.serviceScopeTemplates ?? [], + projectTemplates: projectResult.data?.projectScopeTemplates ?? [] + }; + } catch (error) { + console.error('Failed to load scope templates:', error); + return { serviceTemplates: [], projectTemplates: [] }; + } +}; diff --git a/frontend/src/routes/admin/scopes/+page.svelte b/frontend/src/routes/admin/scopes/+page.svelte new file mode 100644 index 0000000..dd3dfc4 --- /dev/null +++ b/frontend/src/routes/admin/scopes/+page.svelte @@ -0,0 +1,1663 @@ + + + + Scope Templates - Admin - Nexus + + + + +
+
+ +
+
+ +
+ + +
+ + +
+

+ {activeTab === 'service' ? 'Service' : 'Project'} Templates +

+
+ + +
+
+ + + {#if showNewTemplateInput} +
+ { + if (e.key === 'Enter') createTemplate(); + if (e.key === 'Escape') { + showNewTemplateInput = false; + newTemplateName = ''; + } + }} + /> +
+ + +
+
+ {/if} + + +
+ {#if templates.length === 0} +
+

No templates yet.

+ +
+ {:else} +
+ {#each templates as template (template.id)} + + {/each} +
+ {/if} +
+
+
+ + +
+ {#if loading && !selectedTemplate} +
+
Loading...
+
+ {:else if selectedTemplate} +
+ +
+
+ + +
+ updateTemplateName(e.currentTarget.value)} + onkeydown={(e) => { + if (e.key === 'Enter') e.currentTarget.blur(); + }} + class="-ml-1 rounded bg-transparent px-1 text-xl font-semibold text-theme focus:ring-1 focus:ring-primary-500 focus:outline-none" + /> + {#if !selectedTemplate.isActive} + + Inactive + + {/if} +
+
+
+ + + +
+
+ + +
+ +
+ + + {#if error} +
+

{error}

+
+ {/if} + + +
+
+

{getSectionLabel()}s & Tasks

+ +
+ + {#if sections.length === 0} +
+

No {getSectionLabel().toLowerCase()}s yet.

+ +
+ {:else} +
+ {#each sections as section, sectionIndex (section.id)} +
+ +
+ + + {#if editingSectionId === section.id} + updateSectionName(section.id, e.currentTarget.value)} + onkeydown={(e) => { + if (e.key === 'Enter') e.currentTarget.blur(); + if (e.key === 'Escape') editingSectionId = null; + }} + class="input-base flex-1 py-1" + /> + {:else} + + {/if} + + + ({section.tasks.length} task{section.tasks.length !== 1 ? 's' : ''}) + + +
+ + + +
+
+ + + {#if expandedSections.has(section.id)} +
+ {#if section.tasks.length === 0} +

No tasks yet

+ {:else} +
+ {#each section.tasks as task (task.id)} +
+ {#if editingTaskId === task.id} + +
+ + + +
+ {#if activeTab === 'service'} + + {/if} + +
+ +
+ {:else} + +
+
+

+ {task.scopeDescription} +

+ {#if task.checklistDescription} +

+ {task.checklistDescription} +

+ {/if} + {#if task.sessionDescription} +

+ {task.sessionDescription} +

+ {/if} +
+ {#if activeTab === 'service'} + + {FREQUENCY_SHORT_LABELS[getTaskFrequency(task)]} + + {/if} + {#if task.estimatedMinutes} + + {task.estimatedMinutes} min + + {/if} +
+
+
+ + +
+
+ {/if} +
+ {/each} +
+ {/if} + +
+ {/if} +
+ {/each} +
+ {/if} +
+
+ {:else} +
+
+ + + +

Select a template to edit

+

or create a new one

+
+
+ {/if} +
+
+
+ + +{#if showImportModal} +
+
+

+ Import {activeTab === 'service' ? 'Service' : 'Project'} Scope Template +

+ + {#if importError} +
+

{importError}

+
+ {/if} + + +
+ + Show example JSON format + +
+ {#if activeTab === 'service'} +
{`{
+  "name": "Template Name",
+  "description": "Optional description",
+  "areas": [
+    {
+      "name": "Area Name",
+      "order": 0,
+      "tasks": [
+        {
+          "scope_description": "Customer-facing description",
+          "checklist_description": "QA/punchlist description",
+          "session_description": "Work instructions",
+          "frequency": "WEEKLY",
+          "order": 0,
+          "estimated_minutes": 15
+        }
+      ]
+    }
+  ]
+}`}
+

+ Frequency options: DAILY, WEEKLY, MONTHLY, QUARTERLY, TRIANNUAL, ANNUAL, + AS_NEEDED +

+ {:else} +
{`{
+  "name": "Template Name",
+  "description": "Optional description",
+  "categories": [
+    {
+      "name": "Category Name",
+      "order": 0,
+      "tasks": [
+        {
+          "scope_description": "Customer-facing description",
+          "checklist_description": "QA/punchlist description",
+          "session_description": "Work instructions",
+          "order": 0,
+          "estimated_minutes": 30
+        }
+      ]
+    }
+  ]
+}`}
+

+ Note: Project tasks don't have frequency (they're one-time tasks). +

+ {/if} +
+
+ + + +
+ + +
+ +
+ + +
+
+
+{/if} diff --git a/frontend/src/routes/admin/services/+page.server.ts b/frontend/src/routes/admin/services/+page.server.ts new file mode 100644 index 0000000..1cc42ae --- /dev/null +++ b/frontend/src/routes/admin/services/+page.server.ts @@ -0,0 +1,115 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + SERVICES_QUERY, + SERVICE_STATUS_COUNTS_QUERY, + type ServicesQueryResult, + type ServiceStatusCountsQueryResult, + type WorkStatus +} from '$lib/graphql/queries/services'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 20; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { dateFrom: string; dateTo: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0); + + const dateFrom = firstDay.toISOString().split('T')[0]; + const dateTo = lastDay.toISOString().split('T')[0]; + + return { dateFrom, dateTo }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const statusParam = url.searchParams.get('status'); + const status: WorkStatus | null = statusParam as WorkStatus | null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + + const { dateFrom, dateTo } = getMonthDateRange(month); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + services: { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: { scheduled: 0, inProgress: 0, completed: 0, cancelled: 0 }, + month, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Fetch both services and status counts in parallel + const [servicesResult, countsResult] = await Promise.all([ + client + .query({ + query: SERVICES_QUERY, + variables: { + filter: { + dateFrom, + dateTo, + status + }, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch services:', err); + return { data: null }; + }), + client + .query({ + query: SERVICE_STATUS_COUNTS_QUERY, + variables: { + dateFrom, + dateTo + } + }) + .catch((err) => { + console.error('Failed to fetch status counts:', err); + return { data: null }; + }) + ]); + + return { + services: servicesResult.data?.services ?? { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: countsResult.data?.serviceStatusCounts ?? { + scheduled: 0, + inProgress: 0, + completed: 0, + cancelled: 0 + }, + month, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/admin/services/+page.svelte b/frontend/src/routes/admin/services/+page.svelte new file mode 100644 index 0000000..3a47617 --- /dev/null +++ b/frontend/src/routes/admin/services/+page.svelte @@ -0,0 +1,373 @@ + + + + Services - Admin - Nexus + + +
+ + + + {#snippet actions()} + + + + + Assign + + + + {/snippet} + + + +
+ + +
+ + +
+
+ + + + +
+
+ + + {#if filteredServices.length > 0} + + + + + {:else if searchQuery} + +
+ + + +

No services found

+

No services match your search.

+ +
+ {:else if data.services.totalCount === 0} + +
+ + + +

No services

+

+ {#if data.status} + No {formatStatusLabel(data.status).toLowerCase()} services for this month. + {:else} + No services scheduled for this month. + {/if} +

+
+ {/if} +
+
+ + +{#if showCreateModal} +
e.target === e.currentTarget && (showCreateModal = false)} + onkeydown={(e) => e.key === 'Escape' && (showCreateModal = false)} + role="dialog" + aria-modal="true" + tabindex="-1" + > +
+
+

Create Service

+ +
+ (showCreateModal = false)} + /> +
+
+{/if} + + + (showGenerateModal = false)} + onSuccess={handleGenerateSuccess} +/> diff --git a/frontend/src/routes/admin/services/[service]/+page.server.ts b/frontend/src/routes/admin/services/[service]/+page.server.ts new file mode 100644 index 0000000..a0c69bc --- /dev/null +++ b/frontend/src/routes/admin/services/[service]/+page.server.ts @@ -0,0 +1,67 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { SERVICE_QUERY, type ServiceQueryResult } from '$lib/graphql/queries/services'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + LATEST_SERVICE_SESSION_QUERY, + type LatestServiceSessionQueryResult +} from '$lib/graphql/queries/session'; +import { error, redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const [serviceResult, teamResult, sessionResult] = await Promise.all([ + client + .query({ + query: SERVICE_QUERY, + variables: { id: params.service } + }) + .catch((err) => { + console.error('Failed to fetch service:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }), + client + .query({ + query: LATEST_SERVICE_SESSION_QUERY, + variables: { serviceId: params.service } + }) + .catch((err) => { + console.error('Failed to fetch session:', err); + return { data: null }; + }) + ]); + + if (!serviceResult.data?.service) { + throw error(404, 'Service not found'); + } + + return { + service: serviceResult.data.service, + teamProfiles: teamResult.data?.teamProfiles ?? [], + session: sessionResult.data?.latestServiceSession ?? null + }; +}; diff --git a/frontend/src/routes/admin/services/[service]/+page.svelte b/frontend/src/routes/admin/services/[service]/+page.svelte new file mode 100644 index 0000000..64f14cb --- /dev/null +++ b/frontend/src/routes/admin/services/[service]/+page.svelte @@ -0,0 +1,1283 @@ + + + + Service - {service.account?.name ?? 'Unknown'} - Admin - Nexus + + +
+ + +
+ + {formatStatusLabel(service.status)} + + {#if isDispatched} + + Dispatched + + {/if} + {#if isAssigned} + + Assigned + + {/if} + {formatDate(service.date)} +
+
+ +
+ +
+
+ +
+
+
+
Date
+
{formatDate(service.date)}
+
+
+
Status
+
+ + {formatStatusLabel(service.status)} + +
+
+
+
Labor
+
+ {#if activeLabor} + {formatCurrency(activeLabor.amount)} + {:else} + Not set + {/if} +
+
+
+
Calendar Event
+
+ {#if service.calendarEventId} + + View Event → + + {:else} + + {/if} +
+
+
+
+ + +
+
+

Account & Location

+
+
+
+
Account
+
+ {#if service.account} + + {service.account.name} + + {:else} + Unknown + {/if} +
+
+ {#if service.account?.customer} + + {/if} + {#if service.accountAddress} + + {/if} +
+
+ + + {#if service.notes} +
+
+

Service Notes

+
+
+

{service.notes}

+
+
+ {/if} + + +
+
+
+

Team Members

+
+ {#if teamMembers.length > 0} + + {/if} + {#if !isDispatched} + + + {:else} + +
+ + {#if assignMenuOpen} +
+ +
+

+ Assign Team Members +

+ {#each availableTeamProfiles.filter((p) => p.id !== config.dispatch.profileId) as profile (profile.id)} + {@const alreadyAssigned = service.teamMembers.some( + (m) => m.teamProfileId === profile.id + )} + {@const isSelected = selectedTeamMembers.has(profile.id)} + + {/each} + {#if availableTeamProfiles.filter((p) => p.id !== config.dispatch.profileId).length === 0} +

+ No team members available +

+ {/if} + {#if selectedTeamMembers.size > 0} +
+ + {/if} +
+ {#if teamMembers.length > 0} +

+ This will also remove {teamMembers.length} assigned team member{teamMembers.length > + 1 + ? 's' + : ''} +

+ {/if} + +
+
+ {/if} +
+ {/if} +
+
+
+ {#if teamMembers.length > 0} +
+
    + {#each teamMembers as member (member.id)} +
  • +
    +
    +
    + + + +
    +
    +

    + {member.teamProfile?.fullName ?? 'Unknown'} +

    + {#if member.teamProfile?.role} +

    + {member.teamProfile.role === 'TEAM_LEADER' + ? 'Team Leader' + : 'Team Member'} +

    + {/if} +
    +
    + +
    +
  • + {/each} +
+
+ {:else if isDispatched} +

Dispatched. Ready to assign team members.

+ {:else} +

Dispatch first to assign team members.

+ {/if} +
+ + +
+
+

Work Session

+
+
+ {#if session} + + ({ id: p.id, fullName: p.fullName })) ?? + []} + areas={sessionAreas} + tasks={allTasks} + {selectedTaskIds} + {completedTaskIds} + {completedTasksByArea} + {readyToSubmitByArea} + {availableTasksCount} + isSubmitting={isClosingSession} + isReverting={isRevertingSession} + {submittingTaskId} + {removingTaskId} + onClose={handleCloseSession} + onRevert={handleRevertSession} + onToggleTask={handleToggleTask} + onSubmitTask={handleSubmitTask} + onRemoveTask={handleRemoveTask} + onRemoveCompletedTask={handleRemoveCompletedTask} + onSubmitAllTasks={handleSubmitAllTasks} + onClearSelection={handleClearSelection} + onAddNote={handleAddNote} + onUpdateNote={handleUpdateNote} + onDeleteNote={handleDeleteNote} + onUploadPhoto={handleUploadPhoto} + onUploadVideo={handleUploadVideo} + onUpdatePhoto={handleUpdatePhoto} + onUpdateVideo={handleUpdateVideo} + onDeletePhoto={handleDeletePhoto} + onDeleteVideo={handleDeleteVideo} + {getTeamMemberName} + /> + {:else if canStartSession} + +
+

+ Start a work session to track time, complete tasks, and capture notes/photos. +

+ +
+ {:else if service.status === 'IN_PROGRESS'} +

+ This service has an active session but it was not loaded. Try refreshing the page. +

+ {:else if service.status === 'COMPLETED'} +

+ This service has been completed. View the session history for details. +

+ {:else if !isAssigned} +

Assign team members to enable work sessions.

+ {:else if !scope} +

+ No active scope found for this location. Configure a scope to enable work sessions. +

+ {:else} +

+ Work session is not available for this service status. +

+ {/if} +
+
+ + +
+ + Danger Zone + + + + +
+

+ Deleting this service will remove it from the schedule. This action cannot be undone. +

+ +
+
+
+ + + + {#if drawerMode === 'service'} + + {/if} + + + + + + + + + +
+
diff --git a/frontend/src/routes/admin/services/assign/+page.server.ts b/frontend/src/routes/admin/services/assign/+page.server.ts new file mode 100644 index 0000000..b1cca74 --- /dev/null +++ b/frontend/src/routes/admin/services/assign/+page.server.ts @@ -0,0 +1,89 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + SERVICES_FOR_ASSIGNMENT_QUERY, + type ServicesForAssignmentQueryResult +} from '$lib/graphql/queries/services'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { redirect } from '@sveltejs/kit'; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { dateFrom: string; dateTo: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0); + + const dateFrom = firstDay.toISOString().split('T')[0]; + const dateTo = lastDay.toISOString().split('T')[0]; + + return { dateFrom, dateTo }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not an admin/team leader + if ( + !user || + user.__typename !== 'TeamProfileType' || + (user.role !== 'ADMIN' && user.role !== 'TEAM_LEADER') + ) { + throw redirect(303, '/'); + } + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const { dateFrom, dateTo } = getMonthDateRange(month); + + if (!locals.cookie) { + return { + services: { unassigned: [], readyToAssign: [], assigned: [] }, + teamProfiles: [], + month + }; + } + + const client = createServerClient(locals.cookie); + + // Fetch services for assignment and team profiles in parallel + const [servicesResult, teamResult] = await Promise.all([ + client + .query({ + query: SERVICES_FOR_ASSIGNMENT_QUERY, + variables: { dateFrom, dateTo } + }) + .catch((err) => { + console.error('Failed to fetch services for assignment:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }) + ]); + + // Filter to only active non-admin team members for assignment + const teamProfiles = + teamResult.data?.teamProfiles.filter((p) => p.status === 'ACTIVE' && p.role !== 'ADMIN') ?? []; + + return { + services: servicesResult.data?.servicesForAssignment ?? { + unassigned: [], + readyToAssign: [], + assigned: [] + }, + teamProfiles, + month, + currentUserProfileId: user.id + }; +}; diff --git a/frontend/src/routes/admin/services/assign/+page.svelte b/frontend/src/routes/admin/services/assign/+page.svelte new file mode 100644 index 0000000..c7cba6d --- /dev/null +++ b/frontend/src/routes/admin/services/assign/+page.svelte @@ -0,0 +1,1066 @@ + + + + Bulk Assign Services - Admin - Nexus + + +
+ + + + {#snippet actions()} + + {/snippet} + + + +
+ + + +
+ Group by: +
+ + + +
+
+
+ + +
+ +
+
+
+ +

Scheduled

+ + {unassigned.length} + +
+
+ + +
+
+ + + {#if groupedUnassigned.size > 0} +
+ + | + +
+ {/if} + + + {#if selectedUnassigned.size > 0} +
+ +
+ {/if} + + +
+ {#each [...groupedUnassigned.entries()] as [key, services] (key)} +
+ + {#if expandedUnassigned.has(key)} +
+ {#each services as service (service.id)} +
+ + (selectedUnassigned = toggleSelection(selectedUnassigned, service.id))} + class="mt-1 h-4 w-4 rounded border-theme text-primary-500 focus:ring-primary-500" + /> +
+

+ {service.account?.name ?? 'Unknown'} +

+

+ {formatAddress(service.accountAddress)} +

+ {#if groupMode !== 'date'} +

{formatDate(service.date)}

+ {/if} +
+ +
+ {/each} +
+ {/if} +
+ {/each} + + {#if unassigned.length === 0} +

No scheduled services

+ {/if} +
+
+ + +
+
+
+ +

Dispatched

+ + {readyToAssign.length} + +
+
+ + +
+
+ + + {#if groupedReadyToAssign.size > 0} +
+ + | + +
+ {/if} + + + {#if selectedReadyToAssign.size > 0} +
+ +
+ + +
+ + +
+ {/if} + + +
+ {#each [...groupedReadyToAssign.entries()] as [key, services] (key)} +
+ + {#if expandedReadyToAssign.has(key)} +
+ {#each services as service (service.id)} + + {/each} +
+ {/if} +
+ {/each} + + {#if readyToAssign.length === 0} +

No dispatched services

+ {/if} +
+
+ + +
+
+
+ +

Assigned

+ + {assigned.length} + +
+
+ + +
+
+ + + {#if groupedAssigned.size > 0} +
+ + | + +
+ {/if} + + + {#if selectedAssigned.size > 0} +
+ +
+ {/if} + + +
+ {#each [...groupedAssigned.entries()] as [key, services] (key)} +
+ + {#if expandedAssigned.has(key)} +
+ {#each services as service (service.id)} +
+
+ + (selectedAssigned = toggleSelection(selectedAssigned, service.id))} + class="mt-1 h-4 w-4 rounded border-theme text-primary-500 focus:ring-primary-500" + /> +
+

+ {service.account?.name ?? 'Unknown'} +

+

+ {formatAddress(service.accountAddress)} +

+ {#if groupMode !== 'date'} +

{formatDate(service.date)}

+ {/if} + + +
+ {#each service.teamMembers as member (member.id)} + {@const isDispatcher = + member.teamProfile?.role === 'ADMIN' || + member.teamProfileId === currentUserId} +
+ + {member.teamProfile?.fullName ?? 'Unknown'} + {#if isDispatcher} + (dispatch) + {/if} + + {#if !isDispatcher} + + {/if} +
+ {/each} + + + {#if addingTeamMemberTo === service.id} +
+ + + +
+ {:else} + + {/if} +
+
+
+
+ {/each} +
+ {/if} +
+ {/each} + + {#if assigned.length === 0} +

No assigned services

+ {/if} +
+
+
+
+
+ + + (showGenerateModal = false)} + onSuccess={handleGenerateSuccess} +/> + + +{#if showDeleteConfirm && serviceToDelete} +
e.target === e.currentTarget && cancelDelete()} + onkeydown={(e) => e.key === 'Escape' && cancelDelete()} + role="dialog" + aria-modal="true" + tabindex="-1" + > +
+
+
+ + + +
+

Delete Service

+
+

+ Are you sure you want to delete the service for + {serviceToDelete.account?.name ?? 'Unknown'} + on + {formatDate(serviceToDelete.date)}? This action + cannot be undone. +

+
+ + +
+
+
+{/if} diff --git a/frontend/src/routes/contact/+page.server.ts b/frontend/src/routes/contact/+page.server.ts new file mode 100644 index 0000000..2b27216 --- /dev/null +++ b/frontend/src/routes/contact/+page.server.ts @@ -0,0 +1,28 @@ +import { fail } from '@sveltejs/kit'; +import type { Actions } from './$types'; + +export const actions = { + default: async ({ request }) => { + const formData = await request.formData(); + const name = formData.get('name') as string; + const email = formData.get('email') as string; + const subject = formData.get('subject') as string; + const message = formData.get('message') as string; + + if (!name || !email || !subject || !message) { + return fail(400, { error: 'All fields are required' }); + } + + // TODO: Implement email service integration + // For now, log the contact form submission + console.log('Contact form submission:', { + name, + email, + subject, + message: message.substring(0, 100) + (message.length > 100 ? '...' : '') + }); + + // Simulate success for now + return { success: true }; + } +} satisfies Actions; diff --git a/frontend/src/routes/contact/+page.svelte b/frontend/src/routes/contact/+page.svelte new file mode 100644 index 0000000..331c8a2 --- /dev/null +++ b/frontend/src/routes/contact/+page.svelte @@ -0,0 +1,164 @@ + + + + Contact Us - Nexus + + + + +
+ + +
+

Get in Touch

+

+ Have a question, feedback, or just want to say hello? We'd love to hear from you. Fill out + the form below and we'll get back to you as soon as possible. +

+
+
+
+ + +
+ +
+ +
+ + + + +
+

Hours

+
+
+

Office Hours

+

9:00 AM - 5:00 PM
By appointment only

+
+
+

Overnight Dispatch

+

+ 6:00 PM - 2:00 AM
Emergency service available +

+
+
+
+
+ + +
+
+
+ + + +
+ + + + + +
+ + + {#if submitStatus === 'success'} +

Thank you! Your message has been sent.

+ {:else if submitStatus === 'error'} +

+ {errorMessage} +

+ {/if} +
+
+
+
+
+
diff --git a/frontend/src/routes/customer/+layout.server.ts b/frontend/src/routes/customer/+layout.server.ts new file mode 100644 index 0000000..3bd1541 --- /dev/null +++ b/frontend/src/routes/customer/+layout.server.ts @@ -0,0 +1,118 @@ +import type { LayoutServerLoad } from './$types'; +import { redirect, error } from '@sveltejs/kit'; +import { createServerClient } from '$lib/graphql/client'; +import { SERVICES_QUERY, type ServicesQueryResult } from '$lib/graphql/queries/services'; +import { PROJECTS_QUERY, type ProjectsQueryResult } from '$lib/graphql/queries/projects'; +import { CUSTOMER_QUERY, type CustomerQueryResult } from '$lib/graphql/queries/customer'; +import { getCurrentMonth, getMonthDateRange } from '$lib/utils/date'; + +export const load: LayoutServerLoad = async ({ url, parent, locals }) => { + const parentData = await parent(); + const user = parentData.user; + + // Not authenticated - redirect to login + if (!user) { + const returnTo = encodeURIComponent(url.pathname + url.search); + throw redirect(307, `/login?return_to=${returnTo}`); + } + + // Must be a CustomerProfileType + if (user.__typename !== 'CustomerProfileType') { + throw error(403, 'This area is only accessible to customers'); + } + + if (!locals.cookie) { + return { + ...parentData, + customers: [], + accounts: [], + services: { scheduled: [], inProgress: [], completed: [] }, + projects: { scheduled: [], inProgress: [], completed: [] } + }; + } + + const client = createServerClient(locals.cookie); + + // Get assigned customer IDs + const customerIds = user.customers?.map((c) => c.id) ?? []; + + // Fetch full customer data for each assigned customer + const customersData = await Promise.all( + customerIds.map(async (id) => { + const result = await client + .query({ + query: CUSTOMER_QUERY, + variables: { id } + }) + .catch((err) => { + console.error(`Failed to fetch customer ${id}:`, err); + return { data: null }; + }); + return result.data?.customer ?? null; + }) + ); + + const customers = customersData.filter((c) => c !== null); + + // Flatten all accounts from all customers + const accounts = customers.flatMap((customer) => + (customer.accounts ?? []).map((account) => ({ + ...account, + customerName: customer.name, + customerId: customer.id + })) + ); + + // Get month from URL params or default to current + const month = url.searchParams.get('month') ?? getCurrentMonth(); + const { start, end } = getMonthDateRange(month); + + // Build filters for services and projects + const filter = { + dateFrom: start, + dateTo: end, + customerIds: customerIds.length > 0 ? customerIds : undefined + }; + + // Fetch services and projects + const [servicesResult, projectsResult] = await Promise.all([ + client + .query({ + query: SERVICES_QUERY, + variables: { filter, pagination: { limit: 100 } } + }) + .catch((err) => { + console.error('Failed to fetch services:', err); + return { data: null }; + }), + client + .query({ + query: PROJECTS_QUERY, + variables: { filter, pagination: { limit: 100 } } + }) + .catch((err) => { + console.error('Failed to fetch projects:', err); + return { data: null }; + }) + ]); + + const allServices = servicesResult.data?.services?.items ?? []; + const allProjects = projectsResult.data?.projects?.items ?? []; + + return { + ...parentData, + customers, + accounts, + services: { + scheduled: allServices.filter((s) => s.status === 'SCHEDULED'), + inProgress: allServices.filter((s) => s.status === 'IN_PROGRESS'), + completed: allServices.filter((s) => s.status === 'COMPLETED') + }, + projects: { + scheduled: allProjects.filter((p) => p.status === 'SCHEDULED'), + inProgress: allProjects.filter((p) => p.status === 'IN_PROGRESS'), + completed: allProjects.filter((p) => p.status === 'COMPLETED') + }, + currentMonth: month + }; +}; diff --git a/frontend/src/routes/customer/+layout.svelte b/frontend/src/routes/customer/+layout.svelte new file mode 100644 index 0000000..104644a --- /dev/null +++ b/frontend/src/routes/customer/+layout.svelte @@ -0,0 +1,14 @@ + + + +
+ {@render children()} +
+ + + diff --git a/frontend/src/routes/customer/+page.svelte b/frontend/src/routes/customer/+page.svelte new file mode 100644 index 0000000..bea995f --- /dev/null +++ b/frontend/src/routes/customer/+page.svelte @@ -0,0 +1,342 @@ + + + + Dashboard - Nexus + + +
+ + + + +
+
+

+ {data.accounts?.length ?? 0} +

+

Accounts

+
+
+

+ {scheduledServices.length + scheduledProjects.length} +

+

Scheduled

+
+
+

+ {inProgressServices.length + inProgressProjects.length} +

+

In Progress

+
+
+

+ {(data.services?.completed?.length ?? 0) + (data.projects?.completed?.length ?? 0)} +

+

Completed

+
+
+ + +
+ +
+
+
+

+ Services +

+ + View all + +
+
+ +
+ {#if upcomingServices.length === 0} +

No services scheduled

+ {:else} + + {#if upcomingServices.length > 5} +

+ +{upcomingServices.length - 5} more services +

+ {/if} + {/if} +
+
+ + +
+
+
+

Projects

+ + View all + +
+
+ +
+ {#if upcomingProjects.length === 0} +

No projects scheduled

+ {:else} + + {#if upcomingProjects.length > 5} +

+ +{upcomingProjects.length - 5} more projects +

+ {/if} + {/if} +
+
+
+ + +
+

Quick Links

+ +
+
+
diff --git a/frontend/src/routes/customer/accounts/+page.svelte b/frontend/src/routes/customer/accounts/+page.svelte new file mode 100644 index 0000000..50f2a35 --- /dev/null +++ b/frontend/src/routes/customer/accounts/+page.svelte @@ -0,0 +1,73 @@ + + + + Accounts - Nexus + + +
+ + + + {#if accounts.length > 0} + + {:else} +
+ + + +

No accounts found

+

You don't have any service accounts assigned.

+
+ {/if} +
+
diff --git a/frontend/src/routes/customer/accounts/[account]/+page.server.ts b/frontend/src/routes/customer/accounts/[account]/+page.server.ts new file mode 100644 index 0000000..47a1393 --- /dev/null +++ b/frontend/src/routes/customer/accounts/[account]/+page.server.ts @@ -0,0 +1,40 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { error } from '@sveltejs/kit'; +import { ACCOUNT_QUERY, type AccountQueryResult } from '$lib/graphql/queries/account'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user, accounts } = await parent(); + + // Verify the account belongs to one of the assigned customers + const assignedAccountIds = accounts?.map((a: { id: string }) => a.id) ?? []; + if (!assignedAccountIds.includes(params.account)) { + throw error(403, 'You do not have access to this account'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: ACCOUNT_QUERY, + variables: { id: params.account } + }) + .catch((err) => { + console.error('Failed to fetch account:', err); + throw error(500, 'Failed to load account'); + }); + + if (!data?.account) { + throw error(404, 'Account not found'); + } + + return { + account: data.account, + contacts: data.account.contacts, + addresses: data.account.addresses + }; +}; diff --git a/frontend/src/routes/customer/accounts/[account]/+page.svelte b/frontend/src/routes/customer/accounts/[account]/+page.svelte new file mode 100644 index 0000000..dc97dc1 --- /dev/null +++ b/frontend/src/routes/customer/accounts/[account]/+page.svelte @@ -0,0 +1,113 @@ + + + + {account?.name ?? 'Account'} - Nexus + + +
+ + {#if account} + + {account.status} + + +
+ + + {#if contacts.length > 0} +
+ {#each contacts as contact (contact.id)} +
+ + {contact.firstName} + {contact.lastName} + + {#if contact.email} +

{contact.email}

+ {/if} + {#if contact.phone} +

{contact.phone}

+ {/if} + {#if contact.isPrimary} +
+ Primary +
+ {/if} +
+ {/each} +
+ {:else} +

No contacts listed.

+ {/if} +
+ + + + {#if addresses.length > 0} +
+ {#each addresses as address (address.id)} +
+ + {address.name || + (addresses.length === 1 ? 'Primary Location' : 'Service Location')} + +

{address.streetAddress}

+

+ {address.city}, {address.state} + {address.zipCode} +

+ {#if address.isPrimary} +
+ Primary +
+ {/if} +
+ {/each} +
+ {:else} +

No service locations listed.

+ {/if} +
+
+ {:else} +
+ + + +

Account not found

+

+ The account you're looking for doesn't exist or you don't have access. +

+ Back to Accounts +
+ {/if} +
+
diff --git a/frontend/src/routes/customer/history/+page.svelte b/frontend/src/routes/customer/history/+page.svelte new file mode 100644 index 0000000..48d2a93 --- /dev/null +++ b/frontend/src/routes/customer/history/+page.svelte @@ -0,0 +1,106 @@ + + + + History - Nexus + + +
+ + + + {#if completedWork.length > 0} + + {:else} +
+ + + +

No completed work

+

You don't have any completed services or projects yet.

+
+ {/if} +
+
diff --git a/frontend/src/routes/customer/invoices/+page.server.ts b/frontend/src/routes/customer/invoices/+page.server.ts new file mode 100644 index 0000000..68b211e --- /dev/null +++ b/frontend/src/routes/customer/invoices/+page.server.ts @@ -0,0 +1,58 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { INVOICES_QUERY, type InvoicesQueryResult } from '$lib/graphql/queries/invoices'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user, customers } = await parent(); + + // Must be a customer profile + if (!user || user.__typename !== 'CustomerProfileType') { + throw error(403, 'Access denied'); + } + + if (!locals.cookie) { + return { + invoices: [] + }; + } + + const client = createServerClient(locals.cookie); + + // Get customer IDs from the customer profile + const customerIds = customers?.map((c: { id: string }) => c.id) ?? []; + + if (customerIds.length === 0) { + return { + invoices: [] + }; + } + + // Fetch invoices for all customer's customers + // We need to fetch for each customerId since the filter only accepts one + const invoiceResults = await Promise.all( + customerIds.map((customerId: string) => + client + .query({ + query: INVOICES_QUERY, + variables: { + filter: { customerId }, + pagination: { limit: 100 } + } + }) + .catch((err) => { + console.error(`Failed to fetch invoices for customer ${customerId}:`, err); + return { data: null }; + }) + ) + ); + + // Combine and sort all invoices by endDate descending + const allInvoices = invoiceResults + .flatMap((result) => result.data?.invoices?.items ?? []) + .sort((a, b) => b.endDate.localeCompare(a.endDate)); + + return { + invoices: allInvoices + }; +}; diff --git a/frontend/src/routes/customer/invoices/+page.svelte b/frontend/src/routes/customer/invoices/+page.svelte new file mode 100644 index 0000000..32d2e41 --- /dev/null +++ b/frontend/src/routes/customer/invoices/+page.svelte @@ -0,0 +1,116 @@ + + + + Invoices - Nexus + + + diff --git a/frontend/src/routes/customer/invoices/[invoice]/+page.server.ts b/frontend/src/routes/customer/invoices/[invoice]/+page.server.ts new file mode 100644 index 0000000..86fab8e --- /dev/null +++ b/frontend/src/routes/customer/invoices/[invoice]/+page.server.ts @@ -0,0 +1,46 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { INVOICE_QUERY, type InvoiceQueryResult } from '$lib/graphql/queries/invoices'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user, customers } = await parent(); + + // Must be a customer profile + if (!user || user.__typename !== 'CustomerProfileType') { + throw error(403, 'Access denied'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + // Fetch the invoice + const invoiceResult = await client + .query({ + query: INVOICE_QUERY, + variables: { id: params.invoice } + }) + .catch((err) => { + console.error('Failed to fetch invoice:', err); + return { data: null }; + }); + + const invoice = invoiceResult.data?.invoice; + + if (!invoice) { + throw error(404, 'Invoice not found'); + } + + // Verify the customer owns this invoice + const customerIds = customers?.map((c: { id: string }) => c.id) ?? []; + if (!customerIds.includes(invoice.customerId)) { + throw error(403, 'You do not have access to this invoice'); + } + + return { + invoice + }; +}; diff --git a/frontend/src/routes/customer/invoices/[invoice]/+page.svelte b/frontend/src/routes/customer/invoices/[invoice]/+page.svelte new file mode 100644 index 0000000..abac0cf --- /dev/null +++ b/frontend/src/routes/customer/invoices/[invoice]/+page.svelte @@ -0,0 +1,185 @@ + + + + Invoice - {formatDateRange(invoice.startDate, invoice.endDate)} - Nexus + + +
+ + + + {formatStatusLabel(invoice.status)} + + + +
+ + +
+
+

Services

+

+ {invoice.revenueCount} +

+

{formatCurrency(invoice.revenuesTotal)}

+
+
+

Projects

+

+ {invoice.projectCount} +

+

{formatCurrency(invoice.projectsTotal)}

+
+
+

Total

+

+ {formatCurrency(invoice.totalAmount)} +

+
+
+
+ + + {#if invoice.revenues.length > 0} + +
+ {#each invoice.revenues as entry (entry.id)} +
+
+
+

{entry.account?.name ?? 'Unknown Account'}

+

+ {entry.revenue?.startDate ? formatDate(entry.revenue.startDate) : ''} - {entry + .revenue?.endDate + ? formatDate(entry.revenue.endDate) + : 'Ongoing'} +

+
+ + {formatCurrency(entry.amount)} + +
+
+ {/each} +
+
+ {/if} + + + {#if invoice.projects.length > 0} + +
+ {#each invoice.projects as entry (entry.id)} +
+
+
+

{entry.project?.name ?? 'Unknown'}

+

+ {formatDate(entry.project?.date ?? '')} + {#if entry.account?.name} + - {entry.account.name} + {/if} +

+ {#if entry.project?.formattedAddress} +

{entry.project.formattedAddress}

+ {/if} +
+ + {formatCurrency(entry.amount)} + +
+
+ {/each} +
+
+ {/if} + + + {#if invoice.revenues.length === 0 && invoice.projects.length === 0} +
+ + + +

No items

+

This invoice doesn't have any line items yet.

+
+ {/if} +
+
+
diff --git a/frontend/src/routes/customer/schedule/+page.svelte b/frontend/src/routes/customer/schedule/+page.svelte new file mode 100644 index 0000000..8afcf22 --- /dev/null +++ b/frontend/src/routes/customer/schedule/+page.svelte @@ -0,0 +1,121 @@ + + + + Schedule - Nexus + + +
+ + + + {#if upcomingWork.length > 0} + + {:else} +
+ + + +

No upcoming work

+

You don't have any scheduled services or projects.

+
+ {/if} +
+
diff --git a/frontend/src/routes/customer/schedule/project/[project]/+page.server.ts b/frontend/src/routes/customer/schedule/project/[project]/+page.server.ts new file mode 100644 index 0000000..34dd35d --- /dev/null +++ b/frontend/src/routes/customer/schedule/project/[project]/+page.server.ts @@ -0,0 +1,46 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { PROJECT_QUERY, type ProjectQueryResult } from '$lib/graphql/queries/projects'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user, customers } = await parent(); + + // Must be a customer profile + if (!user || user.__typename !== 'CustomerProfileType') { + throw error(403, 'Access denied'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const projectResult = await client + .query({ + query: PROJECT_QUERY, + variables: { id: params.project } + }) + .catch((err) => { + console.error('Failed to fetch project:', err); + return { data: null }; + }); + + const project = projectResult.data?.project; + + if (!project) { + throw error(404, 'Project not found'); + } + + // Verify customer has access to this project + const customerIds = customers?.map((c: { id: string }) => c.id) ?? []; + + if (!project.customerId || !customerIds.includes(project.customerId)) { + throw error(403, 'You do not have access to this project'); + } + + return { + project + }; +}; diff --git a/frontend/src/routes/customer/schedule/project/[project]/+page.svelte b/frontend/src/routes/customer/schedule/project/[project]/+page.svelte new file mode 100644 index 0000000..ddcbeca --- /dev/null +++ b/frontend/src/routes/customer/schedule/project/[project]/+page.svelte @@ -0,0 +1,186 @@ + + + + {project.name} - Nexus + + +
+ + + + {formatStatusLabel(project.status)} + + + +
+ + {#if addressDisplay || project.accountAddress} + +
+ {#if project.accountAddress?.name} +

{project.accountAddress.name}

+ {/if} + + {#if addressDisplay} +

{addressDisplay}

+ {/if} + {#if project.accountAddress?.account} +

+ Account: {project.accountAddress.account.name} +

+ {/if} +
+
+ {/if} + + + {#if activeScope} + +
+
+

{activeScope.name}

+ {#if activeScope.description} +

{activeScope.description}

+ {/if} +
+ + {#if sortedCategories.length > 0} +
+ {#each sortedCategories as category (category.id)} + {@const sortedTasks = [...category.tasks].sort((a, b) => a.order - b.order)} +
+
{category.name}
+ {#if sortedTasks.length > 0} +
    + {#each sortedTasks as task (task.id)} +
  • + + {task.scopeDescription} +
  • + {/each} +
+ {:else} +

No tasks defined

+ {/if} +
+ {/each} +
+ {:else} +

No work categories defined for this scope.

+ {/if} +
+
+ {/if} + + + {#if project.teamMembers.length > 0} + +
+ {#each project.teamMembers as member (member.id)} +
+
+ + + +
+
+

+ {member.teamProfile?.fullName ?? 'Team Member'} +

+ {#if member.teamProfile?.role} +

+ {member.teamProfile.role === 'TEAM_LEADER' ? 'Team Leader' : 'Team Member'} +

+ {/if} +
+
+ {/each} +
+
+ {/if} + + + {#if project.notes} + +

{project.notes}

+
+ {/if} +
+
+
diff --git a/frontend/src/routes/customer/schedule/service/[service]/+page.server.ts b/frontend/src/routes/customer/schedule/service/[service]/+page.server.ts new file mode 100644 index 0000000..a895484 --- /dev/null +++ b/frontend/src/routes/customer/schedule/service/[service]/+page.server.ts @@ -0,0 +1,47 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { SERVICE_QUERY, type ServiceQueryResult } from '$lib/graphql/queries/services'; +import { error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user, customers } = await parent(); + + // Must be a customer profile + if (!user || user.__typename !== 'CustomerProfileType') { + throw error(403, 'Access denied'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const serviceResult = await client + .query({ + query: SERVICE_QUERY, + variables: { id: params.service } + }) + .catch((err) => { + console.error('Failed to fetch service:', err); + return { data: null }; + }); + + const service = serviceResult.data?.service; + + if (!service) { + throw error(404, 'Service not found'); + } + + // Verify customer has access to this service + const customerIds = customers?.map((c: { id: string }) => c.id) ?? []; + const serviceCustomerId = service.account?.customer?.id; + + if (!serviceCustomerId || !customerIds.includes(serviceCustomerId)) { + throw error(403, 'You do not have access to this service'); + } + + return { + service + }; +}; diff --git a/frontend/src/routes/customer/schedule/service/[service]/+page.svelte b/frontend/src/routes/customer/schedule/service/[service]/+page.svelte new file mode 100644 index 0000000..68256ff --- /dev/null +++ b/frontend/src/routes/customer/schedule/service/[service]/+page.svelte @@ -0,0 +1,184 @@ + + + + {service.account?.name ?? 'Service'} - Nexus + + +
+ + + + {formatStatusLabel(service.status)} + + + +
+ + {#if service.accountAddress} + +
+ {#if service.accountAddress.name} +

{service.accountAddress.name}

+ {/if} +

{service.accountAddress.streetAddress}

+

+ {service.accountAddress.city}, {service.accountAddress.state} + {service.accountAddress.zipCode} +

+
+
+ {/if} + + + {#if scope} + +
+
+

{scope.name}

+ {#if scope.description} +

{scope.description}

+ {/if} +
+ + {#if sortedAreas.length > 0} +
+ {#each sortedAreas as area (area.id)} + {@const sortedTasks = [...area.tasks].sort((a, b) => a.order - b.order)} +
+
{area.name}
+ {#if sortedTasks.length > 0} +
    + {#each sortedTasks as task (task.id)} +
  • + + {task.scopeDescription} +
  • + {/each} +
+ {:else} +

No tasks defined

+ {/if} +
+ {/each} +
+ {:else} +

No work areas defined for this scope.

+ {/if} +
+
+ {/if} + + + {#if service.teamMembers.length > 0} + +
+ {#each service.teamMembers as member (member.id)} +
+
+ + + +
+
+

+ {member.teamProfile?.fullName ?? 'Team Member'} +

+ {#if member.teamProfile?.role} +

+ {member.teamProfile.role === 'TEAM_LEADER' ? 'Team Leader' : 'Team Member'} +

+ {/if} +
+
+ {/each} +
+
+ {/if} + + + {#if service.notes} + +

{service.notes}

+
+ {/if} +
+
+
diff --git a/frontend/src/routes/layout.css b/frontend/src/routes/layout.css new file mode 100644 index 0000000..32244c3 --- /dev/null +++ b/frontend/src/routes/layout.css @@ -0,0 +1,913 @@ +@import 'tailwindcss'; +@plugin '@tailwindcss/forms'; +@plugin '@tailwindcss/typography'; + +/* ============================================ + THEME COLOR SYSTEM + ============================================ + Primary: Blue + Secondary: Green + Primary Accent: Orange + Secondary Accent: Purple + Alert/Error: Red + Warning: Yellow + Success: Green (distinct from secondary) + ============================================ */ + +@theme { + /* Primary - Blue (muted/professional) */ + --color-primary-50: #f0f6fc; + --color-primary-100: #dbe8f7; + --color-primary-200: #bdd4f0; + --color-primary-300: #8fb8e5; + --color-primary-400: #5a94d6; + --color-primary-500: #3b78c4; + --color-primary-600: #2d5fa6; + --color-primary-700: #274d87; + --color-primary-800: #254270; + --color-primary-900: #23395e; + --color-primary-950: #18253f; + + /* Secondary - Green (muted/professional) */ + --color-secondary-50: #f2f8f4; + --color-secondary-100: #e0efe4; + --color-secondary-200: #c3dfcc; + --color-secondary-300: #96c7a6; + --color-secondary-400: #65a97b; + --color-secondary-500: #458c5e; + --color-secondary-600: #33714a; + --color-secondary-700: #2a5b3d; + --color-secondary-800: #244933; + --color-secondary-900: #1f3c2b; + --color-secondary-950: #102118; + + /* Accent Primary - Orange (muted/professional) */ + --color-accent-50: #fdf6f0; + --color-accent-100: #fbe9db; + --color-accent-200: #f6d0b6; + --color-accent-300: #f0b088; + --color-accent-400: #e88958; + --color-accent-500: #e16a36; + --color-accent-600: #d2522b; + --color-accent-700: #ae3f26; + --color-accent-800: #8b3425; + --color-accent-900: #712e22; + --color-accent-950: #3d1510; + + /* Accent Secondary - Purple (muted/professional) */ + --color-accent2-50: #f6f4fb; + --color-accent2-100: #ede9f7; + --color-accent2-200: #ddd5f0; + --color-accent2-300: #c5b6e4; + --color-accent2-400: #a78fd4; + --color-accent2-500: #8b6bc2; + --color-accent2-600: #7652ab; + --color-accent2-700: #634391; + --color-accent2-800: #533978; + --color-accent2-900: #463162; + --color-accent2-950: #2c1c42; + + /* Accent Tertiary - Teal (customers) */ + --color-accent3-50: #f0fdfa; + --color-accent3-100: #ccfbf1; + --color-accent3-200: #99f6e4; + --color-accent3-300: #5eead4; + --color-accent3-400: #2dd4bf; + --color-accent3-500: #14b8a6; + --color-accent3-600: #0d9488; + --color-accent3-700: #0f766e; + --color-accent3-800: #115e59; + --color-accent3-900: #134e4a; + --color-accent3-950: #042f2e; + + /* Accent Quaternary - Rose (profiles) */ + --color-accent4-50: #fff1f2; + --color-accent4-100: #ffe4e6; + --color-accent4-200: #fecdd3; + --color-accent4-300: #fda4af; + --color-accent4-400: #fb7185; + --color-accent4-500: #f43f5e; + --color-accent4-600: #e11d48; + --color-accent4-700: #be123c; + --color-accent4-800: #9f1239; + --color-accent4-900: #881337; + --color-accent4-950: #4c0519; + + /* Accent Quinary - Amber (specialty) */ + --color-accent5-50: #fffbeb; + --color-accent5-100: #fef3c7; + --color-accent5-200: #fde68a; + --color-accent5-300: #fcd34d; + --color-accent5-400: #fbbf24; + --color-accent5-500: #f59e0b; + --color-accent5-600: #d97706; + --color-accent5-700: #b45309; + --color-accent5-800: #92400e; + --color-accent5-900: #78350f; + --color-accent5-950: #451a03; + + /* Accent Senary - Indigo (invoices) */ + --color-accent6-50: #eef2ff; + --color-accent6-100: #e0e7ff; + --color-accent6-200: #c7d2fe; + --color-accent6-300: #a5b4fc; + --color-accent6-400: #818cf8; + --color-accent6-500: #6366f1; + --color-accent6-600: #4f46e5; + --color-accent6-700: #4338ca; + --color-accent6-800: #3730a3; + --color-accent6-900: #312e81; + --color-accent6-950: #1e1b4b; + + /* Accent Septenary - Cyan (calendar) */ + --color-accent7-50: #ecfeff; + --color-accent7-100: #cffafe; + --color-accent7-200: #a5f3fc; + --color-accent7-300: #67e8f9; + --color-accent7-400: #22d3ee; + --color-accent7-500: #06b6d4; + --color-accent7-600: #0891b2; + --color-accent7-700: #0e7490; + --color-accent7-800: #155e75; + --color-accent7-900: #164e63; + --color-accent7-950: #083344; + + /* Messages - Slate Blue (professional communication) */ + --color-message-50: #f1f5f9; + --color-message-100: #e2e8f0; + --color-message-200: #cbd5e1; + --color-message-300: #94a3b8; + --color-message-400: #64748b; + --color-message-500: #475569; + --color-message-600: #334155; + --color-message-700: #1e293b; + --color-message-800: #0f172a; + --color-message-900: #020617; + --color-message-950: #010313; + + /* Notifications - Coral/Salmon (attention-grabbing but warm) */ + --color-notification-50: #fff5f5; + --color-notification-100: #ffe4e4; + --color-notification-200: #fecaca; + --color-notification-300: #fca5a5; + --color-notification-400: #f87171; + --color-notification-500: #ef5350; + --color-notification-600: #dc2626; + --color-notification-700: #b91c1c; + --color-notification-800: #991b1b; + --color-notification-900: #7f1d1d; + --color-notification-950: #450a0a; + + /* Error/Alert - Red (muted/professional) */ + --color-error-50: #fdf3f3; + --color-error-100: #fce4e4; + --color-error-200: #fbcdcd; + --color-error-300: #f6a8a8; + --color-error-400: #ee7676; + --color-error-500: #e14a4a; + --color-error-600: #cd2d2d; + --color-error-700: #ac2323; + --color-error-800: #8e2121; + --color-error-900: #772222; + --color-error-950: #400d0d; + + /* Warning - Yellow (muted/professional) */ + --color-warning-50: #fdfaeb; + --color-warning-100: #faf2c9; + --color-warning-200: #f5e394; + --color-warning-300: #efd05b; + --color-warning-400: #e8bb30; + --color-warning-500: #d8a01d; + --color-warning-600: #ba7c16; + --color-warning-700: #955916; + --color-warning-800: #7b4619; + --color-warning-900: #693a1a; + --color-warning-950: #3d1e0a; + + /* Success - Green (distinct from secondary, muted) */ + --color-success-50: #f0fdf2; + --color-success-100: #dcfce2; + --color-success-200: #bbf7c6; + --color-success-300: #86ef9b; + --color-success-400: #4ade6a; + --color-success-500: #22c546; + --color-success-600: #16a336; + --color-success-700: #16802e; + --color-success-800: #176528; + --color-success-900: #155324; + --color-success-950: #052e10; + + /* Neutral/Surface colors for theming */ + --color-surface-50: #f8fafc; + --color-surface-100: #f1f5f9; + --color-surface-200: #e2e8f0; + --color-surface-300: #cbd5e1; + --color-surface-400: #94a3b8; + --color-surface-500: #64748b; + --color-surface-600: #475569; + --color-surface-700: #334155; + --color-surface-800: #1e293b; + --color-surface-900: #0f172a; + --color-surface-950: #020617; +} + +/* ============================================ + LIGHT THEME (default) + ============================================ */ +:root { + color-scheme: light; + + /* Background colors - subtle blue tint for softer appearance */ + --theme-bg: var(--color-primary-50); + --theme-bg-secondary: #e8f0f8; + --theme-bg-tertiary: var(--color-primary-100); + + /* Text colors */ + --theme-text: var(--color-surface-900); + --theme-text-secondary: var(--color-surface-600); + --theme-text-muted: var(--color-surface-400); + + /* Border colors */ + --theme-border: var(--color-surface-200); + --theme-border-hover: var(--color-surface-300); + + /* Interactive states */ + --theme-hover: var(--color-primary-100); + --theme-active: var(--color-primary-200); + + /* Shadows */ + --theme-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); + --theme-shadow-md: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --theme-shadow-lg: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + + /* Card/Panel backgrounds - subtle blue tint to match overall theme */ + --theme-card: #f5f8fc; + --theme-card-hover: #edf2f9; + + /* Form input backgrounds - white for clear input affordance */ + --theme-input: #ffffff; +} + +/* ============================================ + DARK THEME + ============================================ */ +.dark { + color-scheme: dark; + + /* Background colors */ + --theme-bg: var(--color-surface-900); + --theme-bg-secondary: var(--color-surface-800); + --theme-bg-tertiary: var(--color-surface-700); + + /* Text colors */ + --theme-text: var(--color-surface-50); + --theme-text-secondary: var(--color-surface-300); + --theme-text-muted: var(--color-surface-500); + + /* Border colors */ + --theme-border: var(--color-surface-700); + --theme-border-hover: var(--color-surface-600); + + /* Interactive states */ + --theme-hover: var(--color-surface-800); + --theme-active: var(--color-surface-700); + + /* Shadows (more subtle in dark mode) */ + --theme-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.3), 0 1px 2px -1px rgb(0 0 0 / 0.3); + --theme-shadow-md: 0 4px 6px -1px rgb(0 0 0 / 0.3), 0 2px 4px -2px rgb(0 0 0 / 0.3); + --theme-shadow-lg: 0 10px 15px -3px rgb(0 0 0 / 0.3), 0 4px 6px -4px rgb(0 0 0 / 0.3); + + /* Card/Panel backgrounds */ + --theme-card: var(--color-surface-800); + --theme-card-hover: var(--color-surface-700); + + /* Form input backgrounds - slightly lighter than card for clear affordance */ + --theme-input: var(--color-surface-700); +} + +/* ============================================ + BASE STYLES + ============================================ */ +html { + background-color: var(--theme-bg); + color: var(--theme-text); + transition: + background-color 0.2s ease, + color 0.2s ease; +} + +body { + background-color: var(--theme-bg); + min-height: 100vh; +} + +/* ============================================ + UTILITY CLASSES + ============================================ */ +@utility bg-theme { + background-color: var(--theme-bg); +} + +@utility bg-theme-secondary { + background-color: var(--theme-bg-secondary); +} + +@utility bg-theme-tertiary { + background-color: var(--theme-bg-tertiary); +} + +@utility bg-theme-card { + background-color: var(--theme-card); +} + +@utility bg-theme-input { + background-color: var(--theme-input); +} + +@utility text-theme { + color: var(--theme-text); +} + +@utility text-theme-secondary { + color: var(--theme-text-secondary); +} + +@utility text-theme-muted { + color: var(--theme-text-muted); +} + +@utility border-theme { + border-color: var(--theme-border); +} + +@utility border-theme-hover { + border-color: var(--theme-border-hover); +} + +@utility shadow-theme { + box-shadow: var(--theme-shadow); +} + +@utility shadow-theme-md { + box-shadow: var(--theme-shadow-md); +} + +@utility shadow-theme-lg { + box-shadow: var(--theme-shadow-lg); +} + +/* ============================================ + COMPONENT STYLES + ============================================ */ + +/* Page Titles */ +@utility page-title { + @apply text-2xl font-bold md:text-3xl; + color: var(--theme-text); +} + +@utility page-title-primary { + @apply text-3xl font-bold text-primary-500 sm:text-4xl; +} + +@utility page-subtitle { + @apply mt-2; + color: var(--theme-text-secondary); +} + +/* Section Headers */ +@utility section-title { + @apply text-xl font-semibold text-primary-500; +} + +@utility section-title-accent { + @apply text-xl font-semibold text-accent-500; +} + +/* Cards */ +@utility card { + @apply rounded-lg border; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +@utility card-interactive { + @apply rounded-lg border transition-colors hover:border-primary-500/30 hover:bg-black/5 dark:hover:bg-white/5; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +@utility card-padded { + @apply rounded-lg border p-6; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +/* Empty State */ +@utility empty-state { + @apply rounded-lg border p-8 text-center; + border-color: var(--theme-border); + background-color: var(--theme-card); +} + +@utility empty-state-icon { + @apply mx-auto mb-4 h-16 w-16; + color: var(--theme-text-muted); +} + +@utility empty-state-title { + @apply mb-2 text-lg font-semibold; + color: var(--theme-text); +} + +@utility empty-state-text { + @apply mx-auto max-w-md; + color: var(--theme-text-muted); +} + +/* Badges/Tags */ +@utility badge { + @apply rounded px-2 py-0.5 text-xs font-semibold; +} + +@utility badge-primary { + @apply rounded bg-primary-500/20 px-2 py-0.5 text-xs font-semibold text-primary-500; +} + +@utility badge-secondary { + @apply rounded bg-secondary-500/20 px-2 py-0.5 text-xs font-semibold text-secondary-500; +} + +@utility badge-accent { + @apply rounded bg-accent-500/20 px-2 py-0.5 text-xs font-semibold text-accent-500; +} + +@utility badge-warning { + @apply rounded bg-amber-500/20 px-2 py-0.5 text-xs font-semibold text-amber-600 dark:text-amber-400; +} + +@utility badge-success { + @apply rounded bg-success-500/20 px-2 py-0.5 text-xs font-semibold text-success-600 dark:text-success-400; +} + +/* Detail Labels (for info cards) */ +@utility detail-label { + @apply mb-1 text-xs font-medium tracking-wide uppercase; + color: var(--theme-text-muted); +} + +/* Back Links */ +@utility back-link { + @apply inline-flex items-center gap-2 text-primary-500 transition-colors hover:text-primary-600; +} + +/* Buttons */ +@utility btn { + @apply inline-block rounded-lg px-4 py-2 font-medium transition-colors; +} + +@utility btn-primary { + @apply inline-block rounded-lg bg-primary-500 px-4 py-2 font-medium text-white transition-colors hover:bg-primary-600 active:bg-primary-700; +} + +@utility btn-accent { + @apply inline-block rounded-lg bg-accent-500 px-4 py-2 font-medium text-white transition-colors hover:bg-accent-600 active:bg-accent-700; +} + +@utility btn-secondary { + @apply inline-block rounded-lg border border-theme bg-theme px-4 py-2 font-medium text-theme transition-colors hover:bg-black/5 dark:hover:bg-white/10; +} + +@utility btn-ghost { + @apply inline-block rounded-lg px-4 py-2 font-medium transition-colors hover:bg-black/5 dark:hover:bg-white/10; +} + +@utility btn-outline { + @apply inline-block rounded-lg border border-theme bg-transparent px-4 py-2 font-medium text-theme transition-colors hover:bg-black/5 dark:hover:bg-white/10; +} + +@utility btn-accent7 { + @apply inline-block rounded-lg bg-accent7-500 px-4 py-2 font-medium text-white transition-colors hover:bg-accent7-600 active:bg-accent7-700; +} + +/* Icon Buttons */ +@utility btn-icon { + @apply inline-flex items-center justify-center rounded-lg p-2 transition-colors hover:bg-black/5 active:bg-black/10 dark:hover:bg-white/10 dark:active:bg-white/15; +} + +@utility btn-icon-sm { + @apply inline-flex items-center justify-center rounded-lg p-1.5 transition-colors hover:bg-black/5 active:bg-black/10 dark:hover:bg-white/10 dark:active:bg-white/15; +} + +@utility btn-icon-xs { + @apply inline-flex items-center justify-center rounded p-1 transition-colors hover:bg-black/5 active:bg-black/10 dark:hover:bg-white/10 dark:active:bg-white/15; +} + +/* Form Layout */ +@utility form-section { + @apply space-y-6; +} + +@utility form-fields { + @apply space-y-4; +} + +/* Expandable Sections (for use with
) */ +@utility expandable-header { + @apply flex w-full cursor-pointer list-none items-center justify-between transition-opacity hover:opacity-80; +} + +@utility expandable-chevron { + @apply h-5 w-5 flex-shrink-0 transition-transform group-open:rotate-180; +} + +/* Scope/Area Styles */ +@utility scope-container { + @apply rounded-lg border border-accent-500/20 bg-gradient-to-br from-accent-500/5 to-accent-500/10 p-5 text-sm shadow-sm dark:from-accent-500/10 dark:to-accent-500/5; +} + +@utility scope-title { + @apply text-base font-bold text-accent-700 dark:text-accent-300; +} + +@utility area-container { + @apply rounded-r-md border-l-4 border-primary-500 py-2 pr-2 pl-4; + background-color: color-mix(in srgb, var(--theme-card) 40%, transparent); +} + +@utility area-title { + @apply text-sm font-semibold text-primary-600 dark:text-primary-400; +} + +@utility task-container { + @apply rounded-md border-l-2 border-surface-400/20 bg-black/5 p-3 dark:bg-white/5; +} + +/* Schedule Day Badges */ +@utility schedule-day { + @apply rounded bg-primary-500/20 px-2 py-1 text-xs text-primary-500; +} + +@utility schedule-day-weekend { + @apply rounded bg-accent-500/20 px-2 py-1 text-xs text-accent-500; +} + +/* Contact Cards */ +@utility contact-name { + @apply font-medium; + color: var(--theme-text); +} + +@utility contact-email { + @apply text-sm hover:text-primary-500; + color: var(--theme-text-secondary); +} + +/* List Item Divider */ +@utility list-divider { + @apply border-b pb-4; + border-color: var(--theme-border); +} + +/* Subtle Backgrounds */ +@utility bg-subtle { + @apply bg-black/5 dark:bg-white/5; +} + +@utility bg-subtle-hover { + @apply hover:bg-black/5 dark:hover:bg-white/10; +} + +@utility bg-subtle-active { + @apply active:bg-black/10 dark:active:bg-white/15; +} + +/* Card Variants by Section */ +@utility card-account { + @apply card-padded card-interactive border-l-4 border-l-primary-500/50; +} + +@utility card-service { + @apply card-padded card-interactive border-l-4 border-l-secondary-500/50; +} + +@utility card-project { + @apply card-padded card-interactive border-l-4 border-l-accent-500/50; +} + +@utility card-report { + @apply card-padded card-interactive border-l-4 border-l-accent2-500/50; +} + +@utility card-customer { + @apply card-padded card-interactive border-l-4 border-l-accent3-500/50; +} + +@utility card-profile { + @apply overflow-hidden card-padded card-interactive border-l-4 border-l-accent4-500/50; +} + +@utility card-invoice { + @apply card-padded card-interactive border-l-4 border-l-accent6-500/50; +} + +@utility card-team { + @apply card-padded card-interactive border-l-4 border-l-accent5-500/50; +} + +@utility card-specialty { + @apply card-padded card-interactive border-l-4 border-l-accent5-500/50; +} + +@utility card-calendar { + @apply card-padded card-interactive border-l-4 border-l-accent7-500/50; +} + +@utility card-message { + @apply card-padded card-interactive border-l-4 border-l-message-400/50; +} + +@utility card-notification { + @apply card-padded card-interactive border-l-4 border-l-notification-500/50; +} + +/* Section Header Variants */ +@utility section-header-primary { + @apply border-b border-primary-500/20 bg-primary-500/5; +} + +@utility section-header-secondary { + @apply border-b border-secondary-500/20 bg-secondary-500/5; +} + +@utility section-header-accent { + @apply border-b border-accent-500/20 bg-accent-500/5; +} + +@utility section-header-accent2 { + @apply border-b border-accent2-500/20 bg-accent2-500/5; +} + +@utility section-header-accent3 { + @apply border-b border-accent3-500/20 bg-accent3-500/5; +} + +@utility section-header-accent4 { + @apply border-b border-accent4-500/20 bg-accent4-500/5; +} + +@utility section-header-accent5 { + @apply border-b border-accent5-500/20 bg-accent5-500/5; +} + +@utility section-header-accent6 { + @apply border-b border-accent6-500/20 bg-accent6-500/5; +} + +@utility section-header-accent7 { + @apply border-b border-accent7-500/20 bg-accent7-500/5; +} + +@utility section-header-message { + @apply border-b border-message-400/20 bg-message-400/5; +} + +@utility section-header-notification { + @apply border-b border-notification-500/20 bg-notification-500/5; +} + +/* ============================================ + SEMANTIC STATE UTILITIES + ============================================ */ + +/* Danger/Error States */ +@utility bg-danger { + @apply bg-error-50 dark:bg-error-900/20; +} + +@utility text-danger { + @apply text-error-700 dark:text-error-400; +} + +@utility border-danger { + @apply border-error-200 dark:border-error-800; +} + +@utility danger-zone { + @apply rounded-lg border border-danger bg-danger p-6; +} + +@utility danger-zone-title { + @apply mb-2 text-lg font-semibold text-danger; +} + +@utility danger-zone-text { + @apply mb-4 text-sm text-error-600 dark:text-error-300; +} + +@utility btn-danger { + @apply inline-block rounded-lg bg-error-600 px-4 py-2 text-sm font-medium text-white transition-colors hover:bg-error-700 active:bg-error-800; +} + +/* Warning States */ +@utility bg-warning { + @apply bg-warning-50 dark:bg-warning-900/20; +} + +@utility text-warning { + @apply text-warning-700 dark:text-warning-300; +} + +@utility border-warning { + @apply border-warning-200 dark:border-warning-800; +} + +@utility warning-box { + @apply rounded-xl border border-warning bg-warning p-6; +} + +@utility warning-box-title { + @apply font-medium text-warning-800 dark:text-warning-200; +} + +@utility warning-box-text { + @apply mt-1 text-sm text-warning-700 dark:text-warning-300; +} + +@utility warning-box-icon { + @apply h-6 w-6 flex-shrink-0 text-warning-500; +} + +/* Success States */ +@utility bg-success { + @apply bg-success-50 dark:bg-success-900/20; +} + +@utility text-success { + @apply text-success-700 dark:text-success-400; +} + +@utility border-success { + @apply border-success-200 dark:border-success-700; +} + +@utility success-box { + @apply rounded-lg border border-success bg-success p-3; +} + +@utility success-box-title { + @apply flex items-center gap-2 text-sm font-medium text-success-700 dark:text-success-400; +} + +@utility success-box-text { + @apply mt-1 text-sm text-success-600 dark:text-success-300; +} + +/* Info States */ +@utility bg-info { + @apply bg-primary-50 dark:bg-primary-900/20; +} + +@utility text-info { + @apply text-primary-700 dark:text-primary-400; +} + +@utility border-info { + @apply border-primary-200 dark:border-primary-800; +} + +@utility info-box { + @apply rounded-lg border border-info bg-info p-3; +} + +/* Neutral/Inactive States */ +@utility badge-neutral { + @apply rounded bg-surface-100 px-2 py-0.5 text-xs font-semibold text-surface-600 dark:bg-surface-700 dark:text-surface-300; +} + +@utility badge-error { + @apply rounded bg-error-500/20 px-2 py-0.5 text-xs font-semibold text-error-600 dark:text-error-400; +} + +/* Form Input Borders (for checkboxes, radios) */ +@utility border-input { + @apply border-surface-300 dark:border-surface-600; +} + +/* Overlay Backgrounds */ +@utility bg-overlay { + @apply bg-black/50 dark:bg-black/70; +} + +@utility bg-overlay-heavy { + @apply bg-black/80; +} + +/* ============================================ + FORM UTILITIES + ============================================ */ + +/* Form input base - unifies input styling across all forms */ +@utility input-base { + @apply w-full rounded-lg border border-theme bg-theme-input px-3 py-2 text-theme placeholder:text-theme-muted focus:border-primary-500 focus:ring-1 focus:ring-primary-500 disabled:cursor-not-allowed disabled:opacity-50; +} + +/* Textarea base - same as input but allows resize */ +@utility textarea-base { + @apply w-full rounded-lg border border-theme bg-theme-input px-3 py-2 text-theme placeholder:text-theme-muted focus:border-primary-500 focus:ring-1 focus:ring-primary-500 disabled:cursor-not-allowed disabled:opacity-50; +} + +/* Select base */ +@utility select-base { + @apply w-full rounded-lg border border-theme bg-theme-input px-3 py-2 text-theme focus:border-primary-500 focus:ring-1 focus:ring-primary-500 disabled:cursor-not-allowed disabled:opacity-50; +} + +/* Form label */ +@utility form-label { + @apply mb-1.5 block text-sm font-medium text-theme; +} + +/* Required indicator */ +@utility required-indicator { + @apply text-red-500; +} + +/* Form button row */ +@utility form-actions { + @apply flex items-center gap-3 pt-4; +} + +/* Submit button */ +@utility btn-submit { + @apply flex-1 rounded-lg bg-primary-500 px-4 py-2 font-medium text-white transition-colors hover:bg-primary-600 disabled:cursor-not-allowed disabled:opacity-50; +} + +/* Cancel button */ +@utility btn-cancel { + @apply rounded-lg border border-theme bg-theme px-4 py-2 font-medium text-theme transition-colors hover:bg-black/5 disabled:cursor-not-allowed disabled:opacity-50 dark:hover:bg-white/10; +} + +/* ============================================ + ALERT UTILITIES + ============================================ */ + +/* Error alert - replaces hardcoded red error boxes */ +@utility alert-error { + @apply rounded-lg border border-red-400 bg-red-50 p-3 text-sm text-red-700 dark:border-red-600 dark:bg-red-900/20 dark:text-red-400; +} + +/* ============================================ + INTERACTIVE STATE UTILITIES + ============================================ */ + +/* Interactive hover/active - unifies hover states */ +@utility interactive { + @apply transition-colors hover:bg-black/5 active:bg-black/10 dark:hover:bg-white/10 dark:active:bg-white/15; +} + +/* Delete button hover state */ +@utility btn-delete-hover { + @apply transition-colors hover:bg-red-50 hover:text-red-500 dark:hover:bg-red-900/20; +} + +/* ============================================ + MOBILE UTILITIES + ============================================ */ + +/* Mobile FAB (Floating Action Button) */ +@utility fab-primary { + @apply fixed right-4 bottom-20 z-30 flex h-14 w-14 items-center justify-center rounded-full bg-primary-500 text-white shadow-lg transition-colors hover:bg-primary-600 sm:hidden; +} + +/* ============================================ + SEARCH UTILITIES + ============================================ */ + +/* Search container */ +@utility search-container { + @apply relative; +} + +/* Search icon positioning */ +@utility search-icon { + @apply pointer-events-none absolute top-1/2 left-3 h-5 w-5 -translate-y-1/2 text-theme-muted; +} + +/* Search input with icon space */ +@utility search-input { + @apply input-base py-2 pr-4 pl-10; +} + +/* ============================================ + CHECKBOX/RADIO UTILITIES + ============================================ */ + +/* Checkbox base */ +@utility checkbox-base { + @apply h-4 w-4 rounded border-surface-300 text-primary-500 focus:ring-primary-500 dark:border-surface-600; +} + +/* Checkbox label */ +@utility checkbox-label { + @apply text-sm text-theme; +} diff --git a/frontend/src/routes/login/+page.svelte b/frontend/src/routes/login/+page.svelte new file mode 100644 index 0000000..f0d5331 --- /dev/null +++ b/frontend/src/routes/login/+page.svelte @@ -0,0 +1,12 @@ + + +
+

Redirecting to login...

+
diff --git a/frontend/src/routes/logout/+page.svelte b/frontend/src/routes/logout/+page.svelte new file mode 100644 index 0000000..d5d97d9 --- /dev/null +++ b/frontend/src/routes/logout/+page.svelte @@ -0,0 +1,12 @@ + + +
+

Logging out...

+
diff --git a/frontend/src/routes/messages/+page.server.ts b/frontend/src/routes/messages/+page.server.ts new file mode 100644 index 0000000..f92e636 --- /dev/null +++ b/frontend/src/routes/messages/+page.server.ts @@ -0,0 +1,61 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + MY_CONVERSATIONS_QUERY, + type MyConversationsQueryResult +} from '$lib/graphql/queries/messaging'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 50; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated + if (!user) { + throw redirect(303, '/login'); + } + + // Parse URL parameters + const includeArchived = url.searchParams.get('archived') === 'true'; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + conversations: [], + includeArchived, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: MY_CONVERSATIONS_QUERY, + variables: { + includeArchived, + limit, + offset + } + }); + + return { + conversations: result.data?.myConversations ?? [], + includeArchived, + page, + limit + }; + } catch (err) { + console.error('Failed to fetch conversations:', err); + return { + conversations: [], + includeArchived, + page, + limit + }; + } +}; diff --git a/frontend/src/routes/messages/+page.svelte b/frontend/src/routes/messages/+page.svelte new file mode 100644 index 0000000..3642ff4 --- /dev/null +++ b/frontend/src/routes/messages/+page.svelte @@ -0,0 +1,216 @@ + + + + Messages - Nexus + + + diff --git a/frontend/src/routes/messages/[conversation]/+page.server.ts b/frontend/src/routes/messages/[conversation]/+page.server.ts new file mode 100644 index 0000000..59bb12d --- /dev/null +++ b/frontend/src/routes/messages/[conversation]/+page.server.ts @@ -0,0 +1,48 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { CONVERSATION_QUERY, type ConversationQueryResult } from '$lib/graphql/queries/messaging'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { redirect, error } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated + if (!user) { + throw redirect(303, '/login'); + } + + if (!locals.cookie) { + throw error(500, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + try { + const [conversationResult, teamResult] = await Promise.all([ + client.query({ + query: CONVERSATION_QUERY, + variables: { id: params.conversation } + }), + // Fetch team profiles for adding participants + client + .query({ + query: TEAM_PROFILES_QUERY, + variables: { filter: { isActive: true } } + }) + .catch(() => ({ data: null })) + ]); + + if (!conversationResult.data?.conversation) { + throw error(404, 'Conversation not found'); + } + + return { + conversation: conversationResult.data.conversation, + teamProfiles: teamResult.data?.teamProfiles ?? [] + }; + } catch (err) { + console.error('Failed to fetch conversation:', err); + throw error(500, 'Failed to load conversation'); + } +}; diff --git a/frontend/src/routes/messages/[conversation]/+page.svelte b/frontend/src/routes/messages/[conversation]/+page.svelte new file mode 100644 index 0000000..a4c9307 --- /dev/null +++ b/frontend/src/routes/messages/[conversation]/+page.svelte @@ -0,0 +1,323 @@ + + + + {getConversationTitle()} - Messages - Nexus + + +
+ +
+
+
+ + + + + +
+

{getConversationTitle()}

+

+ {conversation.participants.length} participant{conversation.participants.length !== 1 + ? 's' + : ''} +

+
+
+ + +
+ + +
+
+
+ + +
+
+ {#each messages as message (message.id)} + {#if message.isSystemMessage} + +
+
+ + + + {message.content} +
+
+ {:else} + +
+ +
+ {message.authorProfile?.fullName?.charAt(0).toUpperCase() ?? '?'} +
+ +
+ +
+ + {message.authorProfile?.fullName ?? 'Unknown'} + + + {formatTime(message.createdAt)} + +
+ + +
+ {#if message.isDeleted} + This message was deleted + {:else} + {message.content} + {/if} +
+
+
+ {/if} + {:else} +
+ No messages yet. Start the conversation! +
+ {/each} +
+
+ + +
+
{ + e.preventDefault(); + handleSendMessage(); + }} + > + + + +
+
+
diff --git a/frontend/src/routes/messages/new/+page.server.ts b/frontend/src/routes/messages/new/+page.server.ts new file mode 100644 index 0000000..14c39db --- /dev/null +++ b/frontend/src/routes/messages/new/+page.server.ts @@ -0,0 +1,77 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + CUSTOMER_PROFILES_QUERY, + type CustomerProfilesQueryResult +} from '$lib/graphql/queries/customerProfile'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated + if (!user) { + throw redirect(303, '/login'); + } + + // Check if user is admin (can message customers) + const isAdmin = user.__typename === 'TeamProfileType' && user.role === 'ADMIN'; + const isCustomer = user.__typename === 'CustomerProfileType'; + + // Parse URL params for pre-filling the form + const prefill = { + subject: url.searchParams.get('subject') ?? undefined, + entityType: url.searchParams.get('entityType') ?? undefined, + entityId: url.searchParams.get('entityId') ?? undefined, + participants: url.searchParams.get('participants')?.split(',').filter(Boolean) ?? [] + }; + + if (!locals.cookie) { + return { + teamProfiles: [], + customerProfiles: [], + isAdmin, + prefill + }; + } + + const client = createServerClient(locals.cookie); + + try { + // Fetch team profiles for recipient selection + const teamResult = await client.query({ + query: TEAM_PROFILES_QUERY + }); + + // Fetch customer profiles if admin + let customerProfiles: CustomerProfilesQueryResult['customerProfiles'] = []; + if (isAdmin) { + const customerResult = await client.query({ + query: CUSTOMER_PROFILES_QUERY + }); + customerProfiles = customerResult.data?.customerProfiles ?? []; + } + + // Filter team profiles to only admins if user is a customer + let teamProfiles = teamResult.data?.teamProfiles ?? []; + if (isCustomer) { + teamProfiles = teamProfiles.filter((p) => p.role === 'ADMIN'); + } + + return { + teamProfiles, + customerProfiles, + isAdmin, + prefill + }; + } catch (err) { + console.error('Failed to fetch profiles:', err); + return { + teamProfiles: [], + customerProfiles: [], + isAdmin, + prefill + }; + } +}; diff --git a/frontend/src/routes/messages/new/+page.svelte b/frontend/src/routes/messages/new/+page.svelte new file mode 100644 index 0000000..496396c --- /dev/null +++ b/frontend/src/routes/messages/new/+page.svelte @@ -0,0 +1,422 @@ + + + + New Message - Nexus + + +
+ + +
+ + + + + +

New Message

+
+ + {#if error} +
+ {error} +
+ {/if} + +
+
{ + e.preventDefault(); + handleCreate(); + }} + > + + + + + {#if selectedParticipants.length > 1 && !hasCustomerParticipant} +
+ Conversation Type +
+ + +
+
+ {/if} + + {#if hasCustomerParticipant} +
+ Support conversation - Conversations with customers are automatically + set as Support type and include dispatch. +
+ {/if} + + +
+ Recipients + + {#if selectedParticipants.length > 0} +
+ {#each selectedParticipants as participant} + + {#if participant.type === 'customer_profile'} + + + + {/if} + {getParticipantName(participant)} + + + {/each} +
+ {/if} + + + {#if isAdmin && customerProfiles.length > 0} +
+ + +
+ {/if} + + + + + + +
+ {#if activeTab === 'team'} + {#each filteredTeamProfiles as profile (profile.id)} + + {:else} +
No team members found
+ {/each} + {:else} + {#each filteredCustomerProfiles as profile (profile.id)} + + {:else} +
No customers found
+ {/each} + {/if} +
+
+ + + + + +
+ Cancel + +
+
+
+
+
diff --git a/frontend/src/routes/notifications/+page.server.ts b/frontend/src/routes/notifications/+page.server.ts new file mode 100644 index 0000000..1b01620 --- /dev/null +++ b/frontend/src/routes/notifications/+page.server.ts @@ -0,0 +1,61 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + MY_NOTIFICATIONS_QUERY, + type MyNotificationsQueryResult +} from '$lib/graphql/queries/notifications'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 50; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated + if (!user) { + throw redirect(303, '/login'); + } + + // Parse URL parameters + const unreadOnly = url.searchParams.get('unread') === 'true'; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + notifications: [], + unreadOnly, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: MY_NOTIFICATIONS_QUERY, + variables: { + unreadOnly, + limit, + offset + } + }); + + return { + notifications: result.data?.myNotifications ?? [], + unreadOnly, + page, + limit + }; + } catch (err) { + console.error('Failed to fetch notifications:', err); + return { + notifications: [], + unreadOnly, + page, + limit + }; + } +}; diff --git a/frontend/src/routes/notifications/+page.svelte b/frontend/src/routes/notifications/+page.svelte new file mode 100644 index 0000000..4cd04bb --- /dev/null +++ b/frontend/src/routes/notifications/+page.svelte @@ -0,0 +1,276 @@ + + + + Notifications - Nexus + + +
+ + +
+
+

+ Notifications +

+

Stay updated with your latest activity

+
+ {#if notifications.some((n) => !n.isRead)} + + {/if} +
+ + +
+ + +
+ + + {#if notifications.length > 0} +
+ {#each notifications as notification (notification.id)} +
+ + + + +
+ {/each} +
+ {:else} + +
+ + + +

+ {activeTab === 'unread' ? 'No unread notifications' : 'No notifications yet'} +

+

+ {activeTab === 'unread' + ? "You're all caught up!" + : "When you receive notifications, they'll appear here."} +

+
+ {/if} +
+
diff --git a/frontend/src/routes/pricing/+page.svelte b/frontend/src/routes/pricing/+page.svelte new file mode 100644 index 0000000..dc48b91 --- /dev/null +++ b/frontend/src/routes/pricing/+page.svelte @@ -0,0 +1,372 @@ + + + + Pricing - Acme Services + + + + +
+ + +
+

+ Honest Pricing, No Surprises +

+

+ Commercial cleaning isn't one-size-fits-all, and neither is pricing. Here's a transparent + look at how we determine costs and what you can expect when working with us. +

+
+
+
+ + +
+ +
+

The Reality of Cleaning Costs

+
+

+ Let's be upfront: quality commercial cleaning has real costs. Professional-grade + chemicals, reliable equipment, trained staff, insurance, and the simple reality of getting + to your location all factor into what you pay. +

+

+ We've seen the race to the bottom in this industry. Companies quoting impossibly low + prices, then cutting corners or disappearing entirely. That's not how we operate. Our + pricing reflects the true cost of doing the job right, consistently, with people who are + paid fairly for their work. +

+

+ What you get in return is peace of mind. A cleaning partner who shows up, does the work + properly, and stands behind it. +

+
+
+
+
+ + +
+ +
+
+
+ + + + Recurring Service +
+

Janitorial Service Accounts

+

+ Our routine janitorial service starts at $400/month. This baseline typically covers: +

+
    +
  • + + + + Smaller facilities up to approximately 5,000 sq ft +
  • +
  • + + + + Once-per-week service frequency +
  • +
  • + + + + Single-stall restroom facilities +
  • +
  • + + + + Standard office/retail cleaning scope +
  • +
+

+ This represents our sweet spot: facilities that aren't so large they need a dedicated + crew, but substantial enough to benefit from professional service. It's the kind of + building where the owner or manager has real work to do and shouldn't be spending their + evenings emptying trash cans. +

+
+ +
+

What Affects Your Price

+
+
+

Square Footage

+

+ More space means more time. A 15,000 sq ft facility costs more than a 3,000 sq ft + office. That's just physics. +

+
+
+

Service Frequency

+

+ Once a week is our baseline. Twice weekly, three times, or daily service scales the + price accordingly. +

+
+
+

Restroom Count & Complexity

+

+ A single-stall bathroom is quick. A facility with multiple multi-stall restrooms + requires significantly more time and supplies. +

+
+
+

Facility Type

+

+ Medical offices have different requirements than retail stores. Industrial spaces + differ from professional offices. We tailor our approach and pricing accordingly. +

+
+
+

Scope of Work

+

+ Basic cleaning vs. detailed cleaning. Standard tasks vs. specialized needs. Your scope + determines your price. +

+
+
+
+
+
+
+ + +
+ +
+
+

What Goes Into the Price

+
+
+
+

Minimum Project Cost

+ $300 +
+

+ For smaller, straightforward projects under ideal conditions. +

+
+
+

+ Every project has real costs before we even start cleaning: professional-grade + chemicals, equipment, transportation, and the expertise to use them properly. +

+

+ We price our work to reflect what it actually costs to do it right and to pay our team + fairly for skilled work. That's not an apology. It's a commitment to the standard we hold ourselves to. +

+
+
+
+ +
+
+ + + + One-Time Projects +
+

Project-Based Work

+

+ Floor care, deep cleans, post-construction cleanup, and other one-time projects start as + low as $300, depending on the scope and + circumstances. +

+

+ Project pricing varies more than recurring service because the variables are endless. A + small office floor strip-and-wax is very different from a restaurant kitchen deep clean or + a post-renovation cleanup. +

+

+ We provide detailed quotes after understanding exactly what you need. No ballpark figures + that balloon later. Just honest pricing based on the actual work involved. +

+
+
+
+
+ + +
+ +
+

+ Why We Don't Race to the Bottom +

+
+

+ You can find cheaper cleaning services. We know that. There's always someone willing to + quote less, pay their workers less, skip the insurance, or cut corners on supplies. +

+

+ Here's what we've learned in this industry: you get what you pay for. The company quoting + half our price often delivers half the service, if they show up at all. We've taken over + accounts from bargain providers more times than we can count, always hearing the same + story: inconsistent service, missed cleanings, poor communication, and eventually, just... + nothing. +

+

+ Our pricing reflects what it actually costs to run a professional operation. We pay our + team fairly. We carry proper insurance. We use quality products. We answer our phones. We + show up. +

+

+ This isn't just about business. It's about respect for the work itself. Cleaning is + skilled labor that keeps workplaces healthy, safe, and functional. It deserves to be done + well and compensated fairly. That belief is at the core of who we are. +

+
+
+
+
+ + +
+ +
+

Getting Your Quote

+

+ We don't do blind quotes. Here's our process for getting you accurate pricing. +

+
+ +
+
+
+ 1 +
+

Initial Conversation

+

+ Tell us about your facility and what you're looking for. We'll ask questions to understand + your needs. +

+
+
+
+ 2 +
+

Site Walkthrough

+

+ We visit your location to see the space firsthand. This is how we give you an accurate + quote, not a guess. +

+
+
+
+ 3 +
+

Detailed Proposal

+

+ You receive a clear proposal outlining exactly what's included, how often, and what it + costs. No hidden fees. +

+
+
+
+
+ + +
+ +
+

Let's talk numbers

+

+ Every facility is different. Contact us for a free walkthrough and honest quote. No + obligation, no pressure. +

+ + Request a Quote + +
+
+
diff --git a/frontend/src/routes/profile/+page.server.ts b/frontend/src/routes/profile/+page.server.ts new file mode 100644 index 0000000..05fdd3c --- /dev/null +++ b/frontend/src/routes/profile/+page.server.ts @@ -0,0 +1,31 @@ +import { redirect } from '@sveltejs/kit'; +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { MY_PROFILE_QUERY, type MyProfileQueryResult } from '$lib/graphql/queries/myProfile'; + +export const load: PageServerLoad = async ({ locals }) => { + // Require authentication + if (!locals.cookie) { + redirect(302, '/login'); + } + + const client = createServerClient(locals.cookie); + + try { + const result = await client.query({ + query: MY_PROFILE_QUERY, + fetchPolicy: 'network-only' + }); + + if (!result.data?.me) { + redirect(302, '/login'); + } + + return { + profile: result.data.me + }; + } catch (error) { + console.error('Failed to fetch profile:', error); + redirect(302, '/login'); + } +}; diff --git a/frontend/src/routes/profile/+page.svelte b/frontend/src/routes/profile/+page.svelte new file mode 100644 index 0000000..0a2be25 --- /dev/null +++ b/frontend/src/routes/profile/+page.svelte @@ -0,0 +1,349 @@ + + + + My Profile - Nexus + + +
+ + {#if profile} + + {profile.status} + + +
+ +
+
+ +
+
+
+

First Name

+

{profile.firstName}

+
+
+

Last Name

+

{profile.lastName}

+
+
+

Email

+

{profile.email || '—'}

+
+
+

Phone

+

{profile.phone || '—'}

+
+
+
+ + + {#if isTeamProfile(profile)} +
+
+ +
+
+
+

Role

+ + {formatRole(profile.role)} + +
+
+

Status

+ {profile.status} +
+
+
+ {/if} + + + {#if isCustomerProfile(profile) && profile.customers.length > 0} +
+
+ +
+
+
    + {#each profile.customers as customer} +
  • {customer.name}
  • + {/each} +
+
+
+ {/if} + + + {#if profile.notes} +
+
+ +
+
+

{profile.notes}

+
+
+ {/if} + + +
+
+ +
+
+
+

Account Type

+

+ {isTeamProfile(profile) ? 'Team Member' : 'Customer'} +

+
+
+

Member Since

+

{formatDate(profile.createdAt)}

+
+
+

Last Updated

+

{formatDate(profile.updatedAt)}

+
+
+
+
+ + + +
+ {#if error} +
+

{error}

+
+ {/if} + +
+ + + +
+ + + + + +
+ + +
+
+
+ {:else} +
+ + + +

Profile not found

+

Unable to load your profile. Please try logging in again.

+ Login +
+ {/if} +
+
diff --git a/frontend/src/routes/services/+page.svelte b/frontend/src/routes/services/+page.svelte new file mode 100644 index 0000000..d69f250 --- /dev/null +++ b/frontend/src/routes/services/+page.svelte @@ -0,0 +1,545 @@ + + + + Our Services - Acme Services + + + + +
+ + +
+

Our Services

+

+ From routine janitorial maintenance to specialized deep cleaning projects, we provide + comprehensive commercial cleaning services throughout your region. +

+
+
+
+ + +
+ +
+
+
+ + + + Recurring Service +
+

Janitorial Service

+

+ Our core service offering. We work with you to develop a customized scope of work that + covers every area of your facility, with task frequencies tailored to your needs and + schedule. +

+
+
+ + + + Daily, weekly, and monthly task scheduling +
+
+ + + + Restroom cleaning and sanitation +
+
+ + + + Trash removal and recycling +
+
+ + + + Dusting, vacuuming, and mopping +
+
+ + + + Break room and common area maintenance +
+
+ + + + Window and glass cleaning (interior) +
+
+
+
+ Office cleaning service +
+
+
+
+ + +
+ +
+
+ Floor care and maintenance +
+
+
+ + + + Project-Based +
+

Floor Care

+

+ Keep your floors looking their best with professional floor care services. Whether you + need regular maintenance or a complete restoration, we have the equipment and expertise to + handle it. +

+
+
+ + + + Strip and wax (VCT, LVT, and other hard floors) +
+
+ + + + High-speed buffing and burnishing +
+
+ + + + Scrub and recoat maintenance +
+
+ + + + Carpet extraction and deep cleaning +
+
+ + + + Tile and grout cleaning +
+
+
+
+
+
+ + +
+ +
+
+
+ + + + Specialty Service +
+

Commercial Kitchen Cleaning

+

+ A thorough top-down cleaning service for commercial kitchens. We cover everything from + ceiling tiles to floors, giving your kitchen a deep clean that goes beyond daily + maintenance. +

+
+
+ + + + Ceiling tile cleaning and degreasing +
+
+ + + + Wall washing and degreasing +
+
+ + + + Equipment exterior cleaning +
+
+ + + + Conveyor pizza oven cleaning +
+
+ + + + Floor degreasing and deep cleaning +
+
+ +
+
+ Commercial kitchen cleaning +
+
+
+
+ + +
+ +
+

Specialty & One-Time Projects

+

+ Beyond our core services, we handle a variety of specialty cleaning projects. If you have a + unique cleaning need, let's talk. +

+
+ +
+
+

Impact Cleaning

+

+ Intensive deep cleaning to reset your facility. Perfect for spaces that need a fresh start + or have fallen behind on maintenance. +

+
+
+

Turnovers / Move-Out Cleaning

+

+ Deep cleaning for tenant transitions and property turnovers. We work with property + managers to get spaces move-in ready. +

+
+
+

Post-Construction Cleanup

+

+ Remove construction dust and debris to prepare your space for occupancy. +

+
+
+

Window Cleaning

+

+ Interior and exterior window cleaning for a spotless finish. +

+
+
+

Emergency Response

+

+ Rapid response cleaning for unexpected situations. +

+
+
+

Something Else?

+

+ Have a unique cleaning challenge? Give us a call. You'd be surprised what we've tackled. + We love a good project. +

+
+
+
+
+ + +
+ +
+

How It Works

+

+ Getting started is simple. Here's what to expect. +

+
+ +
+
+
+ 1 +
+

Consultation

+

+ We visit your facility to understand your needs and assess the scope of work. +

+
+
+
+ 2 +
+

Custom Proposal

+

+ You receive a detailed proposal outlining services, frequency, and pricing. +

+
+
+
+ 3 +
+

Onboarding

+

+ We set up your service schedule and provide access to your customer portal. +

+
+
+
+ 4 +
+

Service Begins

+

+ Our team arrives on schedule to deliver consistent, quality cleaning. +

+
+
+
+
+ + +
+ +
+

Service Area

+

+ We provide commercial cleaning services to businesses throughout your region. + Contact us to confirm service availability in your area. +

+
+
+
+ + +
+ +
+

Let's discuss your cleaning needs

+

+ Every facility is different. Contact us for a free consultation and customized quote. +

+ + Request a Quote + +
+
+
diff --git a/frontend/src/routes/standard/+page.svelte b/frontend/src/routes/standard/+page.svelte new file mode 100644 index 0000000..2ba0c14 --- /dev/null +++ b/frontend/src/routes/standard/+page.svelte @@ -0,0 +1,438 @@ + + + + The Service Standard - Our Commitment to Excellence + + + + +
+ + +
+

+ The Service Standard +

+

+ Excellence isn't a buzzword. It's how we operate. The Service Standard is our promise that + every service, every visit, and every interaction meets the level of quality your business + deserves. +

+
+
+
+ + +
+ +
+
+

What It Means

+
+

+ When we say "Service Standard", we're talking about a measurable commitment, not just + good intentions. It means your facility gets the same thorough attention whether it's + our first visit or our hundredth. +

+

+ It means when something isn't right, we make it right. No excuses, no runaround. If + you're not satisfied with a service, we'll return to address it at no additional cost. +

+

+ This standard exists because we believe cleaning work deserves respect, and that starts with doing it well. When we hold ourselves to a higher bar, everyone + benefits: our clients get reliable service, and our team takes pride in work that + matters. +

+
+
+
+ Quality cleaning service +
+
+
+
+ + +
+ +
+

The Four Pillars

+

+ Every aspect of our service is built on these foundational commitments. +

+
+ +
+ +
+
+ + + +
+

Consistency

+

+ A clean building one week and a messy one the next isn't service. It's a gamble. We use + detailed checklists and documented scopes of work to ensure every task gets completed, + every time. +

+
    +
  • + + + + Documented task lists for every visit +
  • +
  • + + + + Same cleaning team when possible +
  • +
  • + + + + Routine quality inspections +
  • +
+
+ + +
+
+ + + +
+

Accountability

+

+ We own our work. When something falls short, we don't make excuses. We fix it. Our + satisfaction guarantee means you never pay for service that doesn't meet expectations. +

+
    +
  • + + + + Satisfaction guarantee on every service +
  • +
  • + + + + Direct line to management +
  • +
  • + + + + Rapid response to concerns +
  • +
+
+ + +
+
+ + + +
+

Communication

+

+ No surprises, no ghosting. We keep you informed about your service, respond promptly to + questions, and proactively let you know if anything changes. +

+
    +
  • + + + + Customer portal for service tracking +
  • +
  • + + + + Prompt responses to inquiries +
  • +
  • + + + + Proactive schedule updates +
  • +
+
+ + +
+
+ + + +
+

Professionalism

+

+ We represent your facility when we're in it. Our team arrives prepared, treats your space + with respect, and conducts themselves in a way that reflects well on both of us. +

+
    +
  • + + + + Background-checked team members +
  • +
  • + + + + Fully insured operations +
  • +
  • + + + + Respectful of your space and people +
  • +
+
+
+
+
+ + +
+ +
+
+ + + +
+

Our Guarantee

+

+ If you're not satisfied with any service we provide, let us know within 24 hours and we'll + return to make it right, at no additional charge. No fine print, no hoops to jump through. +

+

+ This isn't just a policy. It's a reflection of how we do business. We'd rather lose money on + a re-clean than lose your trust. +

+
+
+
+ + +
+ +
+

Experience the difference

+

+ Ready to see what the Service Standard looks like in your facility? Let's talk. +

+ + Request a Consultation + +
+
+
diff --git a/frontend/src/routes/team/+layout.server.ts b/frontend/src/routes/team/+layout.server.ts new file mode 100644 index 0000000..aea43f7 --- /dev/null +++ b/frontend/src/routes/team/+layout.server.ts @@ -0,0 +1,77 @@ +import type { LayoutServerLoad } from './$types'; +import { redirect, error } from '@sveltejs/kit'; +import { client } from '$lib/graphql/client'; +import { SERVICES_QUERY, type ServicesQueryResult } from '$lib/graphql/queries/services'; +import { PROJECTS_QUERY, type ProjectsQueryResult } from '$lib/graphql/queries/projects'; +import { getCurrentMonth, getMonthDateRange } from '$lib/utils/date'; + +export const load: LayoutServerLoad = async ({ url, parent, locals }) => { + const parentData = await parent(); + const user = parentData.user; + + // Not authenticated - redirect to login + if (!user) { + const returnTo = encodeURIComponent(url.pathname + url.search); + throw redirect(307, `/login?return_to=${returnTo}`); + } + + // Must be a TeamProfileType + if (user.__typename !== 'TeamProfileType') { + throw error(403, 'This area is only accessible to team members'); + } + + // Get month from URL params or default to current + const month = url.searchParams.get('month') ?? getCurrentMonth(); + const { start, end } = getMonthDateRange(month); + + // Build filter - everyone on team interface sees only their assigned work + const filter: Record = { + dateFrom: start, + dateTo: end, + teamProfileId: user.id + }; + + // Fetch services and projects + const [servicesResult, projectsResult] = await Promise.all([ + client + .query({ + query: SERVICES_QUERY, + variables: { filter, pagination: { limit: 100 } }, + context: { headers: { cookie: locals.cookie } }, + fetchPolicy: 'network-only' + }) + .catch((err) => { + console.error('Failed to fetch services:', err); + return { data: null }; + }), + client + .query({ + query: PROJECTS_QUERY, + variables: { filter, pagination: { limit: 100 } }, + context: { headers: { cookie: locals.cookie } }, + fetchPolicy: 'network-only' + }) + .catch((err) => { + console.error('Failed to fetch projects:', err); + return { data: null }; + }) + ]); + + const allServices = servicesResult.data?.services?.items ?? []; + const allProjects = projectsResult.data?.projects?.items ?? []; + + return { + ...parentData, + services: { + scheduled: allServices.filter((s) => s.status === 'SCHEDULED'), + inProgress: allServices.filter((s) => s.status === 'IN_PROGRESS'), + completed: allServices.filter((s) => s.status === 'COMPLETED') + }, + projects: { + scheduled: allProjects.filter((p) => p.status === 'SCHEDULED'), + inProgress: allProjects.filter((p) => p.status === 'IN_PROGRESS'), + completed: allProjects.filter((p) => p.status === 'COMPLETED') + }, + currentMonth: month + }; +}; diff --git a/frontend/src/routes/team/+layout.svelte b/frontend/src/routes/team/+layout.svelte new file mode 100644 index 0000000..683aa81 --- /dev/null +++ b/frontend/src/routes/team/+layout.svelte @@ -0,0 +1,14 @@ + + + +
+ {@render children()} +
+ + + diff --git a/frontend/src/routes/team/+page.svelte b/frontend/src/routes/team/+page.svelte new file mode 100644 index 0000000..1f93b78 --- /dev/null +++ b/frontend/src/routes/team/+page.svelte @@ -0,0 +1,339 @@ + + + + Dashboard - Nexus + + +
+ + + + + + + + +
+ +
+
+
+

+ Services +

+ View all +
+
+ +
+

This Week

+ {#if thisWeekServices.length === 0} +

No services scheduled this week

+ {:else} + + {#if thisWeekServices.length > 5} +

+ +{thisWeekServices.length - 5} more services +

+ {/if} + {/if} +
+
+ + +
+
+
+

Projects

+ View all +
+
+ +
+

This Week

+ {#if thisWeekProjects.length === 0} +

No projects scheduled this week

+ {:else} + + {#if thisWeekProjects.length > 5} +

+ +{thisWeekProjects.length - 5} more projects +

+ {/if} + {/if} +
+
+
+
+
diff --git a/frontend/src/routes/team/accounts/+page.server.ts b/frontend/src/routes/team/accounts/+page.server.ts new file mode 100644 index 0000000..85a13a1 --- /dev/null +++ b/frontend/src/routes/team/accounts/+page.server.ts @@ -0,0 +1,33 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { ACCOUNTS_QUERY, type AccountsQueryResult } from '$lib/graphql/queries/accounts'; +import { redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + return { accounts: [] }; + } + + const client = createServerClient(locals.cookie); + + // For team members, we fetch all accounts but the backend should filter + // based on their assignments. For now fetch all accounts. + const { data } = await client + .query({ + query: ACCOUNTS_QUERY, + variables: { filter: { status: 'ACTIVE' } } + }) + .catch((err) => { + console.error('Failed to fetch accounts:', err); + return { data: null }; + }); + + return { accounts: data?.accounts ?? [] }; +}; diff --git a/frontend/src/routes/team/accounts/+page.svelte b/frontend/src/routes/team/accounts/+page.svelte new file mode 100644 index 0000000..da3e588 --- /dev/null +++ b/frontend/src/routes/team/accounts/+page.svelte @@ -0,0 +1,142 @@ + + + + Accounts - Nexus + + +
+ + + + +
+
+ + + + +
+
+ + + {#if filteredAccounts.length > 0} +
+ {#each filteredAccounts as account (account.id)} + {@const primaryAddress = getPrimaryAddress(account)} + + {/each} +
+ {:else if searchQuery} +
+ + + +

No accounts found

+

No accounts match your search.

+ +
+ {:else} +
+ + + +

No accounts

+

No accounts available.

+
+ {/if} +
+
diff --git a/frontend/src/routes/team/accounts/[account]/+page.server.ts b/frontend/src/routes/team/accounts/[account]/+page.server.ts new file mode 100644 index 0000000..1c47016 --- /dev/null +++ b/frontend/src/routes/team/accounts/[account]/+page.server.ts @@ -0,0 +1,39 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { ACCOUNT_QUERY, type AccountQueryResult } from '$lib/graphql/queries/account'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: ACCOUNT_QUERY, + variables: { id: params.account } + }) + .catch((err) => { + console.error('Failed to fetch account:', err); + throw error(500, 'Failed to load account'); + }); + + if (!data?.account) { + throw error(404, 'Account not found'); + } + + return { + account: data.account, + contacts: data.account.contacts, + addresses: data.account.addresses + }; +}; diff --git a/frontend/src/routes/team/accounts/[account]/+page.svelte b/frontend/src/routes/team/accounts/[account]/+page.svelte new file mode 100644 index 0000000..5694166 --- /dev/null +++ b/frontend/src/routes/team/accounts/[account]/+page.svelte @@ -0,0 +1,176 @@ + + + + {account.name} - Accounts - Nexus + + +
+ + + + {account.status} + + + +
+ +
+
+

Account Details

+
+
+
+
Account Name
+
{account.name}
+
+ {#if account.customer} +
+
Customer
+
{account.customer.name}
+
+ {/if} +
+
Status
+
+ + {account.status} + +
+
+
+
+ + + {#if contacts.length > 0} +
+
+

Contacts

+
+
+ {#each contacts as contact} +
+
+
+

{contact.firstName} {contact.lastName}

+
+ {#if contact.isPrimary} + + Primary + + {/if} +
+
+ {#if contact.email} +

+ {contact.email} +

+ {/if} + {#if contact.phone} +

+ {contact.phone} +

+ {/if} +
+
+ {/each} +
+
+ {/if} + + + {#if addresses.length > 0} +
+
+

Locations

+
+ +
+ {/if} +
+
+
diff --git a/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.server.ts b/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.server.ts new file mode 100644 index 0000000..9a32699 --- /dev/null +++ b/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.server.ts @@ -0,0 +1,45 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { ACCOUNT_QUERY, type AccountQueryResult } from '$lib/graphql/queries/account'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: ACCOUNT_QUERY, + variables: { id: params.account } + }) + .catch((err) => { + console.error('Failed to fetch account:', err); + throw error(500, 'Failed to load account'); + }); + + if (!data?.account) { + throw error(404, 'Account not found'); + } + + // Find the specific location + const location = data.account.addresses.find((addr) => addr.id === params.location); + + if (!location) { + throw error(404, 'Location not found'); + } + + return { + account: data.account, + location + }; +}; diff --git a/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.svelte b/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.svelte new file mode 100644 index 0000000..2613732 --- /dev/null +++ b/frontend/src/routes/team/accounts/[account]/locations/[location]/+page.svelte @@ -0,0 +1,285 @@ + + + + {location.name ?? formatAddress()} - {account.name} - Nexus + + +
+ + + +
+ +
+
+

Labor Rate

+
+
+ {#if activeLabor} +
+ + {formatCurrency(activeLabor.amount)} + + per service +
+ {#if activeLabor.startDate} +

+ Effective from {new Date(activeLabor.startDate + 'T00:00:00').toLocaleDateString( + 'en-US', + { + month: 'short', + day: 'numeric', + year: 'numeric' + } + )} +

+ {/if} + {:else} +

No labor rate configured

+ {/if} +
+
+ + +
+
+

Schedule

+
+
+ {#if activeSchedules.length > 0} +
+ {#each activeSchedules as schedule (schedule.id)} +
+ {#if schedule.name} +

{schedule.name}

+ {/if} + + +
+ {#each dayKeys as day, i} + + {dayNames[i]} + + {/each} +
+ + {#if schedule.weekendService} +

+ + + + + Weekend service available + +

+ {/if} + + {#if schedule.scheduleException} +

+ Note: + {schedule.scheduleException} +

+ {/if} +
+ {/each} +
+ {:else} +

No schedule configured

+ {/if} +
+
+ + +
+
+

Scope of Work

+
+
+ {#if activeScopes.length > 0} +
+ {#each activeScopes as scope (scope.id)} +
+

+ {scope.name} +

+ {#if scope.description} +

{scope.description}

+ {/if} + + {#if scope.areas.length > 0} +
+ {#each scope.areas.sort((a, b) => a.order - b.order) as area (area.id)} +
+ +

{area.name}

+ + + {area.tasks.length} task{area.tasks.length !== 1 ? 's' : ''} + + + + + +
+ + {#if area.tasks.length > 0} +
+ {#each area.tasks.sort((a, b) => a.order - b.order) as task (task.id)} +
+
+

{task.checklistDescription}

+ + {formatFrequency(task.frequency)} + +
+ {#if task.estimatedMinutes} +

+ Est. {task.estimatedMinutes} min +

+ {/if} +
+ {/each} +
+ {/if} +
+ {/each} +
+ {/if} +
+ {/each} +
+ {:else} +

No scope of work configured

+ {/if} +
+
+
+
+
diff --git a/frontend/src/routes/team/projects/+page.server.ts b/frontend/src/routes/team/projects/+page.server.ts new file mode 100644 index 0000000..aca251e --- /dev/null +++ b/frontend/src/routes/team/projects/+page.server.ts @@ -0,0 +1,110 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + PROJECTS_QUERY, + PROJECT_STATUS_COUNTS_QUERY, + type ProjectsQueryResult, + type ProjectStatusCountsQueryResult, + type WorkStatus +} from '$lib/graphql/queries/projects'; + +const DEFAULT_LIMIT = 20; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { dateFrom: string; dateTo: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0); + + const dateFrom = firstDay.toISOString().split('T')[0]; + const dateTo = lastDay.toISOString().split('T')[0]; + + return { dateFrom, dateTo }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const statusParam = url.searchParams.get('status'); + const status: WorkStatus | null = statusParam as WorkStatus | null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + + const { dateFrom, dateTo } = getMonthDateRange(month); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + projects: { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: { scheduled: 0, inProgress: 0, completed: 0, cancelled: 0 }, + month, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Build filter - everyone on team interface sees only their assigned work + const filter: Record = { + dateFrom, + dateTo, + status, + teamProfileId: user?.id + }; + + // Fetch both projects and status counts in parallel + const [projectsResult, countsResult] = await Promise.all([ + client + .query({ + query: PROJECTS_QUERY, + variables: { + filter, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch projects:', err); + return { data: null }; + }), + client + .query({ + query: PROJECT_STATUS_COUNTS_QUERY, + variables: { + dateFrom, + dateTo, + teamProfileId: user?.id + } + }) + .catch((err) => { + console.error('Failed to fetch status counts:', err); + return { data: null }; + }) + ]); + + return { + projects: projectsResult.data?.projects ?? { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: countsResult.data?.projectStatusCounts ?? { + scheduled: 0, + inProgress: 0, + completed: 0, + cancelled: 0 + }, + month, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/team/projects/+page.svelte b/frontend/src/routes/team/projects/+page.svelte new file mode 100644 index 0000000..c633ab0 --- /dev/null +++ b/frontend/src/routes/team/projects/+page.svelte @@ -0,0 +1,262 @@ + + + + My Projects - Nexus + + +
+ + + + +
+ + +
+ + +
+
+ + + + +
+
+ + + {#if filteredProjects.length > 0} + + + + + {:else if searchQuery} + +
+ + + +

No projects found

+

No projects match your search.

+ +
+ {:else if (data.projects?.totalCount ?? 0) === 0} + +
+ + + +

No projects

+

+ {#if data.status} + No {formatStatusLabel(data.status).toLowerCase()} projects for this month. + {:else} + No projects assigned to you for this month. + {/if} +

+
+ {/if} +
+
diff --git a/frontend/src/routes/team/projects/[project]/+page.server.ts b/frontend/src/routes/team/projects/[project]/+page.server.ts new file mode 100644 index 0000000..5f4d757 --- /dev/null +++ b/frontend/src/routes/team/projects/[project]/+page.server.ts @@ -0,0 +1,72 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { PROJECT_QUERY, type ProjectQueryResult } from '$lib/graphql/queries/projects'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + LATEST_PROJECT_SESSION_QUERY, + type LatestProjectSessionQueryResult +} from '$lib/graphql/queries/session'; +import { error, redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const [projectResult, teamResult, sessionResult] = await Promise.all([ + client + .query({ + query: PROJECT_QUERY, + variables: { id: params.project } + }) + .catch((err) => { + console.error('Failed to fetch project:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }), + client + .query({ + query: LATEST_PROJECT_SESSION_QUERY, + variables: { projectId: params.project } + }) + .catch((err) => { + console.error('Failed to fetch session:', err); + return { data: null }; + }) + ]); + + if (!projectResult.data?.project) { + throw error(404, 'Project not found'); + } + + // Verify the user is assigned to this project (for TEAM_MEMBER role) + const project = projectResult.data.project; + if (user.role === 'TEAM_MEMBER') { + const isAssigned = project.teamMembers.some((m) => m.teamProfileId === user.id); + if (!isAssigned) { + throw error(403, 'You are not assigned to this project'); + } + } + + return { + project, + teamProfiles: teamResult.data?.teamProfiles ?? [], + session: sessionResult.data?.latestProjectSession ?? null + }; +}; diff --git a/frontend/src/routes/team/projects/[project]/+page.svelte b/frontend/src/routes/team/projects/[project]/+page.svelte new file mode 100644 index 0000000..646686b --- /dev/null +++ b/frontend/src/routes/team/projects/[project]/+page.svelte @@ -0,0 +1,681 @@ + + + + Project - {project.name} - Nexus + + +
+ + +
+ + {formatStatusLabel(project.status)} + + {formatDate(project.date)} +
+
+ +
+ +
+
+

Project Details

+
+
+
+
Date
+
{formatDate(project.date)}
+
+
+
Status
+
+ + {formatStatusLabel(project.status)} + +
+
+ {#if project.customer} +
+
Customer
+
{project.customer.name}
+
+ {/if} + {#if project.accountAddress} +
+
Location
+
+ {#if project.accountAddress.name} + {project.accountAddress.name} - + {/if} + {formatAddress(project.accountAddress)} +
+
+ {/if} +
+
+ + + {#if project.notes} +
+
+

Project Notes

+
+
+

{project.notes}

+
+
+ {/if} + + +
+
+
+

Team Members

+ {#if teamMembers.length > 0} + + {/if} +
+
+ {#if teamMembers.length > 0} +
+
    + {#each teamMembers as member} +
  • +
    +
    + + + +
    +
    +

    + {member.teamProfile?.fullName ?? 'Unknown'} +

    + {#if member.teamProfile?.role} +

    + {member.teamProfile.role === 'TEAM_LEADER' + ? 'Team Leader' + : 'Team Member'} +

    + {/if} +
    +
    +
  • + {/each} +
+
+ {:else} +

No team members assigned yet.

+ {/if} +
+ + +
+
+

Work Session

+
+
+ {#if session} + ({ id: p.id, fullName: p.fullName })) ?? + []} + areas={sessionAreas} + tasks={allTasks} + {selectedTaskIds} + {completedTaskIds} + {completedTasksByArea} + {readyToSubmitByArea} + {availableTasksCount} + isSubmitting={isClosingSession} + isReverting={isRevertingSession} + {submittingTaskId} + {removingTaskId} + onClose={handleCloseSession} + onRevert={handleRevertSession} + onToggleTask={handleToggleTask} + onSubmitTask={handleSubmitTask} + onRemoveTask={handleRemoveTask} + onRemoveCompletedTask={handleRemoveCompletedTask} + onSubmitAllTasks={handleSubmitAllTasks} + onClearSelection={handleClearSelection} + onAddNote={handleAddNote} + onUpdateNote={handleUpdateNote} + onDeleteNote={handleDeleteNote} + onUploadPhoto={handleUploadPhoto} + onUploadVideo={handleUploadVideo} + onUpdatePhoto={handleUpdatePhoto} + onUpdateVideo={handleUpdateVideo} + onDeletePhoto={handleDeletePhoto} + onDeleteVideo={handleDeleteVideo} + {getTeamMemberName} + /> + {:else if canStartSession} +
+

+ Start a work session to track time, complete tasks, and capture notes/photos. +

+ +
+ {:else if project.status === 'IN_PROGRESS'} +

+ This project has an active session but it was not loaded. Try refreshing the page. +

+ {:else if project.status === 'COMPLETED'} +

This project has been completed.

+ {:else if !isDispatched} +

This project has not been dispatched yet.

+ {:else if !activeScope} +

No active scope found for this project.

+ {:else} +

+ Work session is not available for this project status. +

+ {/if} +
+
+
+
+
diff --git a/frontend/src/routes/team/reports/+page.server.ts b/frontend/src/routes/team/reports/+page.server.ts new file mode 100644 index 0000000..105f142 --- /dev/null +++ b/frontend/src/routes/team/reports/+page.server.ts @@ -0,0 +1,59 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { REPORTS_QUERY, type ReportsQueryResult } from '$lib/graphql/queries/reports'; +import { redirect } from '@sveltejs/kit'; + +const DEFAULT_LIMIT = 20; + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + // Parse URL parameters + const status = url.searchParams.get('status') || null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + reports: { items: [], totalCount: 0, hasNextPage: false }, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Team members see their own reports only + const reportsResult = await client + .query({ + query: REPORTS_QUERY, + variables: { + filter: { + teamProfileId: user.id, + status: status || undefined + }, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch reports:', err); + return { data: null }; + }); + + return { + reports: reportsResult.data?.reports ?? { items: [], totalCount: 0, hasNextPage: false }, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/team/reports/+page.svelte b/frontend/src/routes/team/reports/+page.svelte new file mode 100644 index 0000000..0577095 --- /dev/null +++ b/frontend/src/routes/team/reports/+page.svelte @@ -0,0 +1,165 @@ + + + + My Reports - Nexus + + +
+ + + + +
+ +
+ + +
+
+ + + {#if reports.length > 0} + + + + + {:else} + +
+ + + +

No reports yet

+

Your labor reports will appear here once created.

+
+ {/if} +
+
diff --git a/frontend/src/routes/team/reports/[report]/+page.server.ts b/frontend/src/routes/team/reports/[report]/+page.server.ts new file mode 100644 index 0000000..084973f --- /dev/null +++ b/frontend/src/routes/team/reports/[report]/+page.server.ts @@ -0,0 +1,42 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { redirect, error } from '@sveltejs/kit'; +import { REPORT_QUERY, type ReportQueryResult } from '$lib/graphql/queries/reports'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const { data } = await client + .query({ + query: REPORT_QUERY, + variables: { id: params.report } + }) + .catch((err) => { + console.error('Failed to fetch report:', err); + throw error(500, 'Failed to load report'); + }); + + if (!data?.report) { + throw error(404, 'Report not found'); + } + + // Verify the report belongs to this team member (unless admin/team leader) + if (user.role === 'TEAM_MEMBER' && data.report.teamProfileId !== user.id) { + throw error(403, 'You do not have access to this report'); + } + + return { + report: data.report + }; +}; diff --git a/frontend/src/routes/team/reports/[report]/+page.svelte b/frontend/src/routes/team/reports/[report]/+page.svelte new file mode 100644 index 0000000..124ff73 --- /dev/null +++ b/frontend/src/routes/team/reports/[report]/+page.svelte @@ -0,0 +1,216 @@ + + + + Report - {formatDateRange(report.startDate, report.endDate)} - Nexus + + +
+ + + + {report.status} + + + +
+ +
+
+

Summary

+
+
+
+

{report.serviceCount}

+

+ Service{report.serviceCount !== 1 ? 's' : ''} +

+
+
+

{report.projectCount}

+

+ Project{report.projectCount !== 1 ? 's' : ''} +

+
+
+

+ {formatCurrency(report.servicesTotal)} +

+

Services Total

+
+
+

+ {formatCurrency(report.projectsTotal)} +

+

Projects Total

+
+
+
+
+ Total Labor + + {formatCurrency(report.totalLabor)} + +
+
+
+ + + {#if report.services.length > 0} +
+
+

+ Services ({report.serviceCount}) +

+
+
+ {#each report.services as entry (entry.id)} +
+
+

+ {entry.service?.account?.customer?.name ?? 'Unknown Customer'} +

+

+ {getLocationDisplay(entry.service)} +

+

+ {entry.service ? formatDate(entry.service.date) : 'Unknown date'} +

+
+
+

+ {formatCurrency(entry.laborShare)} +

+
+
+ {/each} +
+
+ {/if} + + + {#if report.projects.length > 0} +
+
+

+ Projects ({report.projectCount}) +

+
+
+ {#each report.projects as entry (entry.id)} +
+
+

+ {entry.project?.name ?? 'Unknown Project'} +

+

+ {entry.project?.customer?.name ?? 'Unknown Customer'} +

+ {#if entry.project?.formattedAddress} +

{entry.project.formattedAddress}

+ {/if} +

+ {entry.project ? formatDate(entry.project.date) : 'Unknown date'} +

+
+
+

+ {formatCurrency(entry.laborShare)} +

+
+
+ {/each} +
+
+ {/if} + + + {#if report.services.length === 0 && report.projects.length === 0} +
+ + + +

No entries in this report

+

+ This report doesn't have any services or projects yet. +

+
+ {/if} +
+
+
diff --git a/frontend/src/routes/team/services/+page.server.ts b/frontend/src/routes/team/services/+page.server.ts new file mode 100644 index 0000000..0e7c695 --- /dev/null +++ b/frontend/src/routes/team/services/+page.server.ts @@ -0,0 +1,110 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { + SERVICES_QUERY, + SERVICE_STATUS_COUNTS_QUERY, + type ServicesQueryResult, + type ServiceStatusCountsQueryResult, + type WorkStatus +} from '$lib/graphql/queries/services'; + +const DEFAULT_LIMIT = 20; + +function getCurrentMonth(): string { + const now = new Date(); + const year = now.getFullYear(); + const month = String(now.getMonth() + 1).padStart(2, '0'); + return `${year}-${month}`; +} + +function getMonthDateRange(month: string): { dateFrom: string; dateTo: string } { + const [year, monthNum] = month.split('-').map(Number); + const firstDay = new Date(year, monthNum - 1, 1); + const lastDay = new Date(year, monthNum, 0); + + const dateFrom = firstDay.toISOString().split('T')[0]; + const dateTo = lastDay.toISOString().split('T')[0]; + + return { dateFrom, dateTo }; +} + +export const load: PageServerLoad = async ({ locals, parent, url }) => { + const { user } = await parent(); + + // Parse URL parameters + const month = url.searchParams.get('month') || getCurrentMonth(); + const statusParam = url.searchParams.get('status'); + const status: WorkStatus | null = statusParam as WorkStatus | null; + const page = parseInt(url.searchParams.get('page') || '1', 10); + const limit = parseInt(url.searchParams.get('limit') || String(DEFAULT_LIMIT), 10); + + const { dateFrom, dateTo } = getMonthDateRange(month); + const offset = (page - 1) * limit; + + if (!locals.cookie) { + return { + services: { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: { scheduled: 0, inProgress: 0, completed: 0, cancelled: 0 }, + month, + status, + page, + limit + }; + } + + const client = createServerClient(locals.cookie); + + // Build filter - everyone on team interface sees only their assigned work + const filter: Record = { + dateFrom, + dateTo, + status, + teamProfileId: user?.id + }; + + // Fetch both services and status counts in parallel + const [servicesResult, countsResult] = await Promise.all([ + client + .query({ + query: SERVICES_QUERY, + variables: { + filter, + pagination: { + offset, + limit + } + } + }) + .catch((err) => { + console.error('Failed to fetch services:', err); + return { data: null }; + }), + client + .query({ + query: SERVICE_STATUS_COUNTS_QUERY, + variables: { + dateFrom, + dateTo, + teamProfileId: user?.id + } + }) + .catch((err) => { + console.error('Failed to fetch status counts:', err); + return { data: null }; + }) + ]); + + return { + services: servicesResult.data?.services ?? { items: [], totalCount: 0, hasNextPage: false }, + statusCounts: countsResult.data?.serviceStatusCounts ?? { + scheduled: 0, + inProgress: 0, + completed: 0, + cancelled: 0 + }, + month, + status, + page, + limit + }; +}; diff --git a/frontend/src/routes/team/services/+page.svelte b/frontend/src/routes/team/services/+page.svelte new file mode 100644 index 0000000..5e3a658 --- /dev/null +++ b/frontend/src/routes/team/services/+page.svelte @@ -0,0 +1,262 @@ + + + + My Services - Nexus + + +
+ + + + +
+ + +
+ + +
+
+ + + + +
+
+ + + {#if filteredServices.length > 0} + + + + + {:else if searchQuery} + +
+ + + +

No services found

+

No services match your search.

+ +
+ {:else if (data.services?.totalCount ?? 0) === 0} + +
+ + + +

No services

+

+ {#if data.status} + No {formatStatusLabel(data.status).toLowerCase()} services for this month. + {:else} + No services assigned to you for this month. + {/if} +

+
+ {/if} +
+
diff --git a/frontend/src/routes/team/services/[service]/+page.server.ts b/frontend/src/routes/team/services/[service]/+page.server.ts new file mode 100644 index 0000000..32b0abb --- /dev/null +++ b/frontend/src/routes/team/services/[service]/+page.server.ts @@ -0,0 +1,72 @@ +import type { PageServerLoad } from './$types'; +import { createServerClient } from '$lib/graphql/client'; +import { SERVICE_QUERY, type ServiceQueryResult } from '$lib/graphql/queries/services'; +import { TEAM_PROFILES_QUERY, type TeamProfilesQueryResult } from '$lib/graphql/queries/team'; +import { + LATEST_SERVICE_SESSION_QUERY, + type LatestServiceSessionQueryResult +} from '$lib/graphql/queries/session'; +import { error, redirect } from '@sveltejs/kit'; + +export const load: PageServerLoad = async ({ locals, parent, params }) => { + const { user } = await parent(); + + // Redirect if not authenticated or not a team member + if (!user || user.__typename !== 'TeamProfileType') { + throw redirect(303, '/'); + } + + if (!locals.cookie) { + throw error(401, 'Not authenticated'); + } + + const client = createServerClient(locals.cookie); + + const [serviceResult, teamResult, sessionResult] = await Promise.all([ + client + .query({ + query: SERVICE_QUERY, + variables: { id: params.service } + }) + .catch((err) => { + console.error('Failed to fetch service:', err); + return { data: null }; + }), + client + .query({ + query: TEAM_PROFILES_QUERY + }) + .catch((err) => { + console.error('Failed to fetch team profiles:', err); + return { data: null }; + }), + client + .query({ + query: LATEST_SERVICE_SESSION_QUERY, + variables: { serviceId: params.service } + }) + .catch((err) => { + console.error('Failed to fetch session:', err); + return { data: null }; + }) + ]); + + if (!serviceResult.data?.service) { + throw error(404, 'Service not found'); + } + + // Verify the user is assigned to this service (for TEAM_MEMBER role) + const service = serviceResult.data.service; + if (user.role === 'TEAM_MEMBER') { + const isAssigned = service.teamMembers.some((m) => m.teamProfileId === user.id); + if (!isAssigned) { + throw error(403, 'You are not assigned to this service'); + } + } + + return { + service, + teamProfiles: teamResult.data?.teamProfiles ?? [], + session: sessionResult.data?.latestServiceSession ?? null + }; +}; diff --git a/frontend/src/routes/team/services/[service]/+page.svelte b/frontend/src/routes/team/services/[service]/+page.svelte new file mode 100644 index 0000000..2d03e67 --- /dev/null +++ b/frontend/src/routes/team/services/[service]/+page.svelte @@ -0,0 +1,682 @@ + + + + Service - {service.account?.name ?? 'Unknown'} - Nexus + + +
+ + +
+ + {formatStatusLabel(service.status)} + + {formatDate(service.date)} +
+
+ +
+ +
+
+

Service Details

+
+
+
+
Date
+
{formatDate(service.date)}
+
+
+
Status
+
+ + {formatStatusLabel(service.status)} + +
+
+
+
Account
+
{service.account?.name ?? 'Unknown'}
+
+ {#if service.account?.customer} +
+
Customer
+
{service.account.customer.name}
+
+ {/if} + {#if service.accountAddress} +
+
Location
+
+ {#if service.accountAddress.name} + {service.accountAddress.name} - + {/if} + {formatAddress(service.accountAddress)} +
+
+ {/if} +
+
+ + + {#if service.notes} +
+
+

Service Notes

+
+
+

{service.notes}

+
+
+ {/if} + + +
+
+
+

Team Members

+ {#if teamMembers.length > 0} + + {/if} +
+
+ {#if teamMembers.length > 0} +
+
    + {#each teamMembers as member} +
  • +
    +
    + + + +
    +
    +

    + {member.teamProfile?.fullName ?? 'Unknown'} +

    + {#if member.teamProfile?.role} +

    + {member.teamProfile.role === 'TEAM_LEADER' + ? 'Team Leader' + : 'Team Member'} +

    + {/if} +
    +
    +
  • + {/each} +
+
+ {:else} +

No team members assigned yet.

+ {/if} +
+ + +
+
+

Work Session

+
+
+ {#if session} + ({ id: p.id, fullName: p.fullName })) ?? + []} + areas={sessionAreas} + tasks={allTasks} + {selectedTaskIds} + {completedTaskIds} + {completedTasksByArea} + {readyToSubmitByArea} + {availableTasksCount} + isSubmitting={isClosingSession} + isReverting={isRevertingSession} + {submittingTaskId} + {removingTaskId} + onClose={handleCloseSession} + onRevert={handleRevertSession} + onToggleTask={handleToggleTask} + onSubmitTask={handleSubmitTask} + onRemoveTask={handleRemoveTask} + onRemoveCompletedTask={handleRemoveCompletedTask} + onSubmitAllTasks={handleSubmitAllTasks} + onClearSelection={handleClearSelection} + onAddNote={handleAddNote} + onUpdateNote={handleUpdateNote} + onDeleteNote={handleDeleteNote} + onUploadPhoto={handleUploadPhoto} + onUploadVideo={handleUploadVideo} + onUpdatePhoto={handleUpdatePhoto} + onUpdateVideo={handleUpdateVideo} + onDeletePhoto={handleDeletePhoto} + onDeleteVideo={handleDeleteVideo} + {getTeamMemberName} + /> + {:else if canStartSession} +
+

+ Start a work session to track time, complete tasks, and capture notes/photos. +

+ +
+ {:else if service.status === 'IN_PROGRESS'} +

+ This service has an active session but it was not loaded. Try refreshing the page. +

+ {:else if service.status === 'COMPLETED'} +

This service has been completed.

+ {:else if !isDispatched} +

This service has not been dispatched yet.

+ {:else if !scope} +

No active scope found for this location.

+ {:else} +

+ Work session is not available for this service status. +

+ {/if} +
+
+
+
+
diff --git a/frontend/static/robots.txt b/frontend/static/robots.txt new file mode 100644 index 0000000..b6dd667 --- /dev/null +++ b/frontend/static/robots.txt @@ -0,0 +1,3 @@ +# allow crawling everything by default +User-agent: * +Disallow: diff --git a/frontend/svelte.config.js b/frontend/svelte.config.js new file mode 100644 index 0000000..833cead --- /dev/null +++ b/frontend/svelte.config.js @@ -0,0 +1,13 @@ +import adapter from '@sveltejs/adapter-node'; +import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + // Consult https://svelte.dev/docs/kit/integrations + // for more information about preprocessors + preprocess: vitePreprocess(), + + kit: { adapter: adapter() } +}; + +export default config; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..2c2ed3c --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "rewriteRelativeImportExtensions": true, + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "moduleResolution": "bundler" + } + // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias + // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files + // + // To make changes to top-level options such as include and exclude, we recommend extending + // the generated config; see https://svelte.dev/docs/kit/configuration#typescript +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..54ec314 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,13 @@ +import devtoolsJson from 'vite-plugin-devtools-json'; +import tailwindcss from '@tailwindcss/vite'; +import { sveltekit } from '@sveltejs/kit/vite'; +import { defineConfig } from 'vite'; +import mkcert from 'vite-plugin-mkcert'; + +export default defineConfig({ + plugins: [tailwindcss(), sveltekit(), devtoolsJson(), mkcert()], + server: { + host: 'local.example.com', + port: 5173 + } +}); diff --git a/kratos/config/identity.schema.json b/kratos/config/identity.schema.json new file mode 100644 index 0000000..77f43a7 --- /dev/null +++ b/kratos/config/identity.schema.json @@ -0,0 +1,58 @@ +{ + "$id": "https://schemas.ory.sh/presets/kratos/identity.email.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "properties": { + "traits": { + "type": "object", + "properties": { + "email": { + "type": "string", + "format": "email", + "title": "E-Mail", + "minLength": 3, + "maxLength": 320, + "ory.sh/kratos": { + "credentials": { + "password": { + "identifier": true + }, + "webauthn": { + "identifier": true + }, + "totp": { + "account_name": true + } + }, + "verification": { + "via": "email" + }, + "recovery": { + "via": "email" + } + } + }, + "name": { + "type": "object", + "properties": { + "first": { + "type": "string", + "title": "First Name", + "minLength": 1, + "maxLength": 100 + }, + "last": { + "type": "string", + "title": "Last Name", + "minLength": 1, + "maxLength": 100 + } + } + } + }, + "required": ["email"], + "additionalProperties": false + } + } +} \ No newline at end of file diff --git a/kratos/config/identity.v2.schema.json b/kratos/config/identity.v2.schema.json new file mode 100644 index 0000000..65c9092 --- /dev/null +++ b/kratos/config/identity.v2.schema.json @@ -0,0 +1,81 @@ +{ + "$id": "https://schemas.example.local/identity.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Nexus Profile", + "type": "object", + "properties": { + "traits": { + "type": "object", + "properties": { + "email": { + "type": "string", + "format": "email", + "title": "E-Mail", + "minLength": 3, + "maxLength": 320, + "ory.sh/kratos": { + "credentials": { + "password": { + "identifier": true + }, + "webauthn": { + "identifier": true + }, + "totp": { + "account_name": true + } + }, + "verification": { + "via": "email" + }, + "recovery": { + "via": "email" + } + } + }, + "name": { + "type": "object", + "properties": { + "first": { + "type": "string", + "title": "First Name", + "minLength": 1, + "maxLength": 100 + }, + "last": { + "type": "string", + "title": "Last Name", + "minLength": 1, + "maxLength": 100 + } + }, + "required": ["first", "last"] + }, + "phone": { + "type": "string", + "title": "Phone Number", + "pattern": "^[0-9\\s+()-]*$", + "maxLength": 30 + }, + "profile_type": { + "type": "string", + "title": "Profile Type", + "enum": ["team", "customer"], + "description": "Determines whether this is a team member or customer profile" + } + }, + "required": ["email", "name", "profile_type"], + "additionalProperties": false + }, + "metadata_public": { + "type": "object", + "properties": { + "django_profile_id": { + "type": "string", + "title": "Nexus Profile ID", + "description": "UUID of the linked Django profile (TeamProfile or CustomerProfile)" + } + } + } + } +} diff --git a/kratos/config/kratos.yml b/kratos/config/kratos.yml new file mode 100644 index 0000000..90550c9 --- /dev/null +++ b/kratos/config/kratos.yml @@ -0,0 +1,150 @@ +version: v1.1.0 + +dsn: env://DSN + +serve: + public: + base_url: https://auth.example.com + port: 6000 + cors: + enabled: false + # CORS is handled by Oathkeeper proxy to avoid duplicate headers + admin: + base_url: http://localhost:6050 + port: 6050 + +selfservice: + default_browser_return_url: https://account.example.com + allowed_return_urls: + - https://account.example.com + - https://auth.example.com + - https://app.example.com + - https://admin.example.com + - http://localhost:5000 + - http://localhost:5173 + - https://local.example.com:5173 + - http://localhost:7000 + + methods: + password: + enabled: true + config: + haveibeenpwned_enabled: true + min_password_length: 8 + identifier_similarity_check_enabled: true + + totp: + enabled: true + config: + issuer: Example App + + webauthn: + enabled: true + config: + rp: + display_name: Example App + id: example.com + origins: + - https://account.example.com + - https://auth.example.com + - https://app.example.com + - https://admin.example.com + - http://localhost:5000 + - http://localhost:5173 + - https://local.example.com:5173 + - http://localhost:7000 + + link: + enabled: true + + code: + enabled: true + + flows: + error: + ui_url: https://account.example.com/error + + settings: + ui_url: https://account.example.com/settings + privileged_session_max_age: 15m + required_aal: highest_available + + recovery: + enabled: true + ui_url: https://account.example.com/recovery + use: code + lifespan: 4h + after: + default_browser_return_url: https://account.example.com/settings + + verification: + enabled: true + ui_url: https://account.example.com/verification + use: code + lifespan: 24h + after: + default_browser_return_url: https://account.example.com/ + + logout: + after: + default_browser_return_url: https://account.example.com/login + + login: + ui_url: https://account.example.com/login + lifespan: 10m + + registration: + lifespan: 10m + ui_url: https://account.example.com/registration + after: + default_browser_return_url: https://account.example.com/ + password: + hooks: + - hook: session + +log: + level: info + format: text + leak_sensitive_values: false + +secrets: + cookie: + - env://SECRETS_COOKIE + cipher: + - env://SECRETS_CIPHER + default: + - env://SECRETS_DEFAULT + +ciphers: + algorithm: xchacha20-poly1305 + +hashers: + algorithm: bcrypt + bcrypt: + cost: 12 + +identity: + default_schema_id: nexus-v2 + schemas: + - id: default + url: file:///etc/kratos/identity.schema.json + - id: nexus-v2 + url: file:///etc/kratos/identity.v2.schema.json + +cookies: + domain: .example.com + same_site: Lax + +session: + lifespan: 24h + earliest_possible_extend: 1h + cookie: + domain: .example.com + same_site: Lax + persistent: true + +courier: + smtp: + connection_uri: env://COURIER_SMTP_CONNECTION_URI + from_address: env://COURIER_SMTP_FROM_ADDRESS + from_name: env://COURIER_SMTP_FROM_NAME diff --git a/migrations/20260101000001_create_types.sql b/migrations/20260101000001_create_types.sql new file mode 100644 index 0000000..0ceecbd --- /dev/null +++ b/migrations/20260101000001_create_types.sql @@ -0,0 +1,276 @@ +-- Migration 001: Create custom PostgreSQL types (enums) +-- These types must be created before any tables that reference them + +-- Enable btree_gist extension for exclusion constraints (non-overlapping date ranges) +CREATE EXTENSION IF NOT EXISTS btree_gist; + +-- Entity lifecycle status +CREATE TYPE entity_status AS ENUM ( + 'ACTIVE', + 'INACTIVE', + 'PENDING' +); + +-- Work item status (services, projects) +CREATE TYPE work_status AS ENUM ( + 'SCHEDULED', + 'IN_PROGRESS', + 'COMPLETED', + 'CANCELLED' +); + +-- Invoice lifecycle status +CREATE TYPE invoice_status AS ENUM ( + 'DRAFT', + 'SENT', + 'PAID', + 'OVERDUE', + 'CANCELLED' +); + +-- Payment method types +CREATE TYPE payment_type AS ENUM ( + 'CHECK', + 'CREDIT_CARD', + 'BANK_TRANSFER', + 'CASH' +); + +-- Task frequency for scope tasks +CREATE TYPE task_frequency AS ENUM ( + 'DAILY', + 'WEEKLY', + 'MONTHLY', + 'QUARTERLY', + 'TRIANNUAL', + 'ANNUAL', + 'AS_NEEDED' +); + +-- Team member role hierarchy +CREATE TYPE team_role AS ENUM ( + 'ADMIN', + 'TEAM_LEADER', + 'TEAM_MEMBER' +); + +-- Conversation types for messaging +CREATE TYPE conversation_type AS ENUM ( + 'DIRECT', + 'GROUP', + 'SUPPORT' +); + +-- Notification delivery channels +CREATE TYPE notification_channel AS ENUM ( + 'IN_APP', + 'EMAIL', + 'SMS' +); + +-- Notification status +CREATE TYPE notification_status AS ENUM ( + 'PENDING', + 'SENT', + 'READ', + 'FAILED' +); + +-- Delivery attempt status +CREATE TYPE delivery_status AS ENUM ( + 'PENDING', + 'QUEUED', + 'SENDING', + 'SENT', + 'DELIVERED', + 'FAILED', + 'BOUNCED' +); + +-- Comprehensive event types for audit trail +CREATE TYPE event_type AS ENUM ( + -- Customer events + 'CUSTOMER_CREATED', + 'CUSTOMER_UPDATED', + 'CUSTOMER_DELETED', + 'CUSTOMER_STATUS_CHANGED', + 'CUSTOMER_ADDRESS_CREATED', + 'CUSTOMER_ADDRESS_UPDATED', + 'CUSTOMER_ADDRESS_DELETED', + 'CUSTOMER_CONTACT_CREATED', + 'CUSTOMER_CONTACT_UPDATED', + 'CUSTOMER_CONTACT_DELETED', + + -- Account events + 'ACCOUNT_CREATED', + 'ACCOUNT_UPDATED', + 'ACCOUNT_DELETED', + 'ACCOUNT_STATUS_CHANGED', + 'ACCOUNT_ADDRESS_CREATED', + 'ACCOUNT_ADDRESS_UPDATED', + 'ACCOUNT_ADDRESS_DELETED', + 'ACCOUNT_ADDRESS_PRIMARY_CHANGED', + 'ACCOUNT_CONTACT_CREATED', + 'ACCOUNT_CONTACT_UPDATED', + 'ACCOUNT_CONTACT_DELETED', + 'ACCOUNT_CONTACT_PRIMARY_CHANGED', + + -- Service events + 'SERVICE_CREATED', + 'SERVICE_UPDATED', + 'SERVICE_DELETED', + 'SERVICE_STATUS_CHANGED', + 'SERVICE_ASSIGNED', + 'SERVICE_UNASSIGNED', + 'SERVICE_RESCHEDULED', + + -- Project events + 'PROJECT_CREATED', + 'PROJECT_UPDATED', + 'PROJECT_DELETED', + 'PROJECT_STATUS_CHANGED', + 'PROJECT_ASSIGNED', + 'PROJECT_UNASSIGNED', + 'PROJECT_RESCHEDULED', + 'PROJECT_AMOUNT_CHANGED', + + -- Schedule events + 'SCHEDULE_CREATED', + 'SCHEDULE_UPDATED', + 'SCHEDULE_DELETED', + 'SCHEDULE_FREQUENCY_CHANGED', + + -- Session events + 'SERVICE_SESSION_STARTED', + 'SERVICE_SESSION_ENDED', + 'SERVICE_SESSION_REVERTED', + 'PROJECT_SESSION_STARTED', + 'PROJECT_SESSION_ENDED', + 'PROJECT_SESSION_REVERTED', + + -- Session media events + 'SESSION_NOTE_CREATED', + 'SESSION_NOTE_UPDATED', + 'SESSION_NOTE_DELETED', + 'SESSION_IMAGE_UPLOADED', + 'SESSION_IMAGE_UPDATED', + 'SESSION_IMAGE_DELETED', + 'SESSION_VIDEO_UPLOADED', + 'SESSION_VIDEO_UPDATED', + 'SESSION_VIDEO_DELETED', + 'SESSION_MEDIA_INTERNAL_FLAGGED', + + -- Task events + 'SERVICE_TASK_COMPLETED', + 'SERVICE_TASK_UNCOMPLETED', + 'PROJECT_TASK_COMPLETED', + 'PROJECT_TASK_UNCOMPLETED', + + -- Service scope events + 'SERVICE_SCOPE_CREATED', + 'SERVICE_SCOPE_UPDATED', + 'SERVICE_SCOPE_DELETED', + 'SERVICE_SCOPE_ACTIVATED', + 'SERVICE_SCOPE_DEACTIVATED', + 'SERVICE_SCOPE_AREA_CREATED', + 'SERVICE_SCOPE_AREA_UPDATED', + 'SERVICE_SCOPE_AREA_DELETED', + 'SERVICE_SCOPE_TASK_CREATED', + 'SERVICE_SCOPE_TASK_UPDATED', + 'SERVICE_SCOPE_TASK_DELETED', + + -- Project scope events + 'PROJECT_SCOPE_CREATED', + 'PROJECT_SCOPE_UPDATED', + 'PROJECT_SCOPE_DELETED', + 'PROJECT_SCOPE_ACTIVATED', + 'PROJECT_SCOPE_DEACTIVATED', + 'PROJECT_SCOPE_CATEGORY_CREATED', + 'PROJECT_SCOPE_CATEGORY_UPDATED', + 'PROJECT_SCOPE_CATEGORY_DELETED', + 'PROJECT_SCOPE_TASK_CREATED', + 'PROJECT_SCOPE_TASK_UPDATED', + 'PROJECT_SCOPE_TASK_DELETED', + + -- Scope template events + 'SCOPE_TEMPLATE_CREATED', + 'SCOPE_TEMPLATE_UPDATED', + 'SCOPE_TEMPLATE_DELETED', + 'SCOPE_TEMPLATE_ACTIVATED', + 'SCOPE_TEMPLATE_DEACTIVATED', + 'SCOPE_TEMPLATE_INSTANTIATED', + + -- Profile events + 'TEAM_PROFILE_CREATED', + 'TEAM_PROFILE_UPDATED', + 'TEAM_PROFILE_DELETED', + 'TEAM_PROFILE_ROLE_CHANGED', + 'TEAM_PROFILE_STATUS_CHANGED', + 'CUSTOMER_PROFILE_CREATED', + 'CUSTOMER_PROFILE_UPDATED', + 'CUSTOMER_PROFILE_DELETED', + 'CUSTOMER_PROFILE_STATUS_CHANGED', + 'CUSTOMER_PROFILE_ACCESS_GRANTED', + 'CUSTOMER_PROFILE_ACCESS_REVOKED', + + -- Financial events + 'LABOR_CREATED', + 'LABOR_UPDATED', + 'LABOR_DELETED', + 'LABOR_RATE_CHANGED', + 'REVENUE_CREATED', + 'REVENUE_UPDATED', + 'REVENUE_DELETED', + 'REVENUE_AMOUNT_CHANGED', + 'INVOICE_CREATED', + 'INVOICE_UPDATED', + 'INVOICE_DELETED', + 'INVOICE_STATUS_CHANGED', + 'INVOICE_SENT', + 'INVOICE_PAID', + 'INVOICE_OVERDUE', + + -- Report events + 'REPORT_CREATED', + 'REPORT_UPDATED', + 'REPORT_DELETED', + 'REPORT_SUBMITTED', + 'REPORT_APPROVED', + + -- Conversation events + 'CONVERSATION_CREATED', + 'CONVERSATION_UPDATED', + 'CONVERSATION_ARCHIVED', + 'CONVERSATION_UNARCHIVED', + 'CONVERSATION_PARTICIPANT_ADDED', + 'CONVERSATION_PARTICIPANT_REMOVED', + + -- Message events + 'MESSAGE_SENT', + 'MESSAGE_UPDATED', + 'MESSAGE_DELETED', + 'MESSAGE_READ', + + -- Notification events + 'NOTIFICATION_RULE_CREATED', + 'NOTIFICATION_RULE_UPDATED', + 'NOTIFICATION_RULE_DELETED', + 'NOTIFICATION_RULE_ACTIVATED', + 'NOTIFICATION_RULE_DEACTIVATED', + 'NOTIFICATION_CREATED', + 'NOTIFICATION_SENT', + 'NOTIFICATION_READ', + 'NOTIFICATION_FAILED', + 'NOTIFICATION_DELIVERY_ATTEMPTED', + 'NOTIFICATION_DELIVERY_SUCCEEDED', + 'NOTIFICATION_DELIVERY_FAILED', + + -- System events + 'SYSTEM_STARTUP', + 'SYSTEM_SHUTDOWN', + 'MONITORING_TASK_RUN', + 'MONITORING_ALERT_TRIGGERED', + 'BACKGROUND_JOB_STARTED', + 'BACKGROUND_JOB_COMPLETED', + 'BACKGROUND_JOB_FAILED' +); diff --git a/migrations/20260101000002_create_profiles.sql b/migrations/20260101000002_create_profiles.sql new file mode 100644 index 0000000..ad938fa --- /dev/null +++ b/migrations/20260101000002_create_profiles.sql @@ -0,0 +1,48 @@ +-- Migration 002: Create profile tables +-- Profiles are the base for authentication/authorization +-- TeamProfile: role-based access (Admin > TeamLeader > TeamMember) +-- CustomerProfile: data-scoped access via customer_profile_access M2M + +-- Team member profiles (internal users) +-- Note: id IS the Kratos identity UUID - no separate ory_kratos_id column +-- This allows Oathkeeper's X-User-ID header to be used directly for profile lookup +CREATE TABLE team_profiles ( + id UUID PRIMARY KEY, -- = Kratos identity.id (not auto-generated) + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- Contact info + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + phone VARCHAR(20), + email VARCHAR(254), + + -- Authorization + role team_role NOT NULL DEFAULT 'TEAM_MEMBER', + status entity_status NOT NULL DEFAULT 'ACTIVE', + + notes TEXT +); + +-- Customer profiles (external users) +-- Note: id IS the Kratos identity UUID - no separate ory_kratos_id column +CREATE TABLE customer_profiles ( + id UUID PRIMARY KEY, -- = Kratos identity.id (not auto-generated) + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- Contact info + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + phone VARCHAR(20), + email VARCHAR(254), + + status entity_status NOT NULL DEFAULT 'ACTIVE', + + notes TEXT +); + +-- Indexes for common queries +CREATE INDEX idx_team_profiles_status ON team_profiles(status); +CREATE INDEX idx_team_profiles_role ON team_profiles(role); +CREATE INDEX idx_customer_profiles_status ON customer_profiles(status); diff --git a/migrations/20260101000003_create_customers.sql b/migrations/20260101000003_create_customers.sql new file mode 100644 index 0000000..cc21179 --- /dev/null +++ b/migrations/20260101000003_create_customers.sql @@ -0,0 +1,80 @@ +-- Migration 003: Create customer hierarchy tables +-- Customer is the top-level business entity +-- CustomerAddress and CustomerContact are for customers without accounts + +-- Top-level business entity +CREATE TABLE customers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(200) NOT NULL UNIQUE, + status entity_status NOT NULL DEFAULT 'ACTIVE', + + start_date DATE, + end_date DATE, + + -- Billing info + billing_terms TEXT, + billing_email VARCHAR(254), + + -- Wave accounting integration + wave_customer_id VARCHAR(255) +); + +-- Customer addresses (for customers without accounts) +CREATE TABLE customer_addresses ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + name VARCHAR(200), + street_address VARCHAR(255) NOT NULL, + city VARCHAR(100) NOT NULL, + state VARCHAR(100) NOT NULL, + zip_code VARCHAR(20) NOT NULL, + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + is_primary BOOLEAN NOT NULL DEFAULT FALSE, + + notes TEXT +); + +-- Customer contacts +CREATE TABLE customer_contacts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + phone VARCHAR(20), + email VARCHAR(254), + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + is_primary BOOLEAN NOT NULL DEFAULT FALSE, + + notes TEXT +); + +-- M2M: CustomerProfile access to Customer (determines data authorization) +CREATE TABLE customer_profile_access ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + customer_profile_id UUID NOT NULL REFERENCES customer_profiles(id) ON DELETE CASCADE, + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + UNIQUE (customer_profile_id, customer_id) +); + +-- Indexes +CREATE INDEX idx_customers_status ON customers(status); +CREATE INDEX idx_customer_addresses_customer ON customer_addresses(customer_id); +CREATE INDEX idx_customer_contacts_customer ON customer_contacts(customer_id); +CREATE INDEX idx_customer_profile_access_profile ON customer_profile_access(customer_profile_id); +CREATE INDEX idx_customer_profile_access_customer ON customer_profile_access(customer_id); diff --git a/migrations/20260101000004_create_accounts.sql b/migrations/20260101000004_create_accounts.sql new file mode 100644 index 0000000..bdf4b86 --- /dev/null +++ b/migrations/20260101000004_create_accounts.sql @@ -0,0 +1,68 @@ +-- Migration 004: Create account hierarchy tables +-- Account: business entity under a Customer +-- AccountAddress: location (where work happens) +-- AccountContact: contact person at account (exactly ONE active per account) + +-- Business entity under a customer +CREATE TABLE accounts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + name VARCHAR(200) NOT NULL, + status entity_status NOT NULL DEFAULT 'ACTIVE', + + start_date DATE NOT NULL, + end_date DATE +); + +-- Physical locations for accounts (where work happens) +CREATE TABLE account_addresses ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + + name VARCHAR(200) NOT NULL, + street_address VARCHAR(255) NOT NULL, + city VARCHAR(100) NOT NULL, + state VARCHAR(100) NOT NULL, + zip_code VARCHAR(20) NOT NULL, + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + is_primary BOOLEAN NOT NULL DEFAULT FALSE, + + notes TEXT +); + +-- Contact persons at accounts +CREATE TABLE account_contacts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + phone VARCHAR(20), + email VARCHAR(254) NOT NULL, + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + is_primary BOOLEAN NOT NULL DEFAULT FALSE, + + notes TEXT +); + +-- Constraint: exactly ONE active contact per account +CREATE UNIQUE INDEX idx_account_contact_active + ON account_contacts (account_id) WHERE is_active = TRUE; + +-- Indexes +CREATE INDEX idx_accounts_customer ON accounts(customer_id); +CREATE INDEX idx_accounts_status ON accounts(status); +CREATE INDEX idx_account_addresses_account ON account_addresses(account_id); +CREATE INDEX idx_account_contacts_account ON account_contacts(account_id); diff --git a/migrations/20260101000005_create_scope_templates.sql b/migrations/20260101000005_create_scope_templates.sql new file mode 100644 index 0000000..bc76687 --- /dev/null +++ b/migrations/20260101000005_create_scope_templates.sql @@ -0,0 +1,50 @@ +-- Migration 005: Create scope template tables +-- Templates are reusable blueprints that can be instantiated as ServiceScope or ProjectScope +-- Structure: ScopeTemplate → ScopeTemplateArea → ScopeTemplateTask + +-- Reusable scope template +CREATE TABLE scope_templates ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(255) NOT NULL, + description TEXT, + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +-- Area within a scope template +CREATE TABLE scope_template_areas ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + template_id UUID NOT NULL REFERENCES scope_templates(id) ON DELETE CASCADE, + + name VARCHAR(100) NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0) +); + +-- Task within a scope template area +-- Has THREE descriptions for different audiences +CREATE TABLE scope_template_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + area_id UUID NOT NULL REFERENCES scope_template_areas(id) ON DELETE CASCADE, + + -- Three descriptions for different audiences + scope_description TEXT NOT NULL, -- Customer-facing + checklist_description TEXT NOT NULL, -- QA/punchlist format + session_description TEXT NOT NULL, -- Team member work instructions + + frequency task_frequency NOT NULL DEFAULT 'DAILY', + "order" INTEGER NOT NULL CHECK ("order" >= 0), + estimated_minutes INTEGER CHECK (estimated_minutes >= 0) +); + +-- Indexes +CREATE INDEX idx_scope_templates_active ON scope_templates(is_active); +CREATE INDEX idx_scope_template_areas_template ON scope_template_areas(template_id); +CREATE INDEX idx_scope_template_tasks_area ON scope_template_tasks(area_id); diff --git a/migrations/20260101000006_create_service_scopes.sql b/migrations/20260101000006_create_service_scopes.sql new file mode 100644 index 0000000..a1e55f5 --- /dev/null +++ b/migrations/20260101000006_create_service_scopes.sql @@ -0,0 +1,58 @@ +-- Migration 006: Create service scope tables +-- ServiceScope: assigned at AccountAddress level (exactly ONE active per location) +-- Structure: ServiceScope → ServiceScopeArea → ServiceScopeTask + +-- Service scope assigned to an account address (location) +CREATE TABLE service_scopes ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(255) NOT NULL, + description TEXT, + + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + account_address_id UUID NOT NULL REFERENCES account_addresses(id) ON DELETE CASCADE, + + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +-- Constraint: exactly ONE active service scope per account address +CREATE UNIQUE INDEX idx_service_scope_active + ON service_scopes (account_address_id) WHERE is_active = TRUE; + +-- Area within a service scope +CREATE TABLE service_scope_areas ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + scope_id UUID NOT NULL REFERENCES service_scopes(id) ON DELETE CASCADE, + + name VARCHAR(100) NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0) +); + +-- Task within a service scope area +CREATE TABLE service_scope_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + area_id UUID NOT NULL REFERENCES service_scope_areas(id) ON DELETE CASCADE, + + -- Three descriptions for different audiences + scope_description TEXT NOT NULL, -- Customer-facing + checklist_description TEXT NOT NULL, -- QA/punchlist format + session_description TEXT NOT NULL, -- Team member work instructions + + frequency task_frequency NOT NULL DEFAULT 'DAILY', + "order" INTEGER NOT NULL CHECK ("order" >= 0), + estimated_minutes INTEGER CHECK (estimated_minutes >= 0) +); + +-- Indexes +CREATE INDEX idx_service_scopes_account ON service_scopes(account_id); +CREATE INDEX idx_service_scopes_address ON service_scopes(account_address_id); +CREATE INDEX idx_service_scope_areas_scope ON service_scope_areas(scope_id); +CREATE INDEX idx_service_scope_tasks_area ON service_scope_tasks(area_id); diff --git a/migrations/20260101000007_create_project_scopes.sql b/migrations/20260101000007_create_project_scopes.sql new file mode 100644 index 0000000..912f3b3 --- /dev/null +++ b/migrations/20260101000007_create_project_scopes.sql @@ -0,0 +1,61 @@ +-- Migration 007: Create project scope tables +-- ProjectScope: assigned directly to a Project (not location-based) +-- Structure: ProjectScope → ProjectScopeCategory → ProjectScopeTask + +-- Forward declaration: projects table will be created in migration 009 +-- We create project_scopes with a deferred FK constraint + +-- Project scope assigned directly to a project +CREATE TABLE project_scopes ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(255) NOT NULL, + description TEXT, + + -- Project reference (FK added in migration 009 after projects table exists) + project_id UUID NOT NULL, + + -- Optional: if project is at a specific location + account_id UUID REFERENCES accounts(id) ON DELETE SET NULL, + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +-- Category within a project scope (equivalent to Area for services) +CREATE TABLE project_scope_categories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + scope_id UUID NOT NULL REFERENCES project_scopes(id) ON DELETE CASCADE, + + name VARCHAR(255) NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0) +); + +-- Task within a project scope category +CREATE TABLE project_scope_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + category_id UUID NOT NULL REFERENCES project_scope_categories(id) ON DELETE CASCADE, + + -- Three descriptions for different audiences + scope_description TEXT NOT NULL, -- Customer-facing + checklist_description TEXT NOT NULL, -- QA/punchlist format + session_description TEXT NOT NULL, -- Team member work instructions + + frequency task_frequency NOT NULL DEFAULT 'AS_NEEDED', + "order" INTEGER NOT NULL CHECK ("order" >= 0), + estimated_minutes INTEGER CHECK (estimated_minutes >= 0) +); + +-- Indexes +CREATE INDEX idx_project_scopes_project ON project_scopes(project_id); +CREATE INDEX idx_project_scopes_account ON project_scopes(account_id); +CREATE INDEX idx_project_scope_categories_scope ON project_scope_categories(scope_id); +CREATE INDEX idx_project_scope_tasks_category ON project_scope_tasks(category_id); diff --git a/migrations/20260101000008_create_services.sql b/migrations/20260101000008_create_services.sql new file mode 100644 index 0000000..a9cb821 --- /dev/null +++ b/migrations/20260101000008_create_services.sql @@ -0,0 +1,76 @@ +-- Migration 008: Create services and schedules tables +-- Schedule: defines when services occur at a location (exactly ONE active per location, non-overlapping dates) +-- Service: individual work occurrence generated from schedule + +-- Schedule for recurring services at an account address +CREATE TABLE schedules ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_address_id UUID REFERENCES account_addresses(id) ON DELETE CASCADE, + + name VARCHAR(255), + + -- Day-of-week flags + monday_service BOOLEAN NOT NULL DEFAULT FALSE, + tuesday_service BOOLEAN NOT NULL DEFAULT FALSE, + wednesday_service BOOLEAN NOT NULL DEFAULT FALSE, + thursday_service BOOLEAN NOT NULL DEFAULT FALSE, + friday_service BOOLEAN NOT NULL DEFAULT FALSE, + saturday_service BOOLEAN NOT NULL DEFAULT FALSE, + sunday_service BOOLEAN NOT NULL DEFAULT FALSE, + weekend_service BOOLEAN NOT NULL DEFAULT FALSE, + + -- Schedule exceptions (free text) + schedule_exception TEXT, + + -- Validity period (non-overlapping per location) + start_date DATE NOT NULL, + end_date DATE +); + +-- Constraint: non-overlapping schedules per account address +ALTER TABLE schedules ADD CONSTRAINT schedules_no_overlap + EXCLUDE USING gist ( + account_address_id WITH =, + daterange(start_date, COALESCE(end_date, '9999-12-31'::DATE), '[]') WITH && + ); + +-- Individual service occurrence +CREATE TABLE services ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_id UUID REFERENCES accounts(id) ON DELETE SET NULL, + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + date DATE NOT NULL, + status work_status NOT NULL DEFAULT 'SCHEDULED', + + notes TEXT, + + -- Google Calendar integration + calendar_event_id VARCHAR(255) +); + +-- M2M: Service to TeamProfile (assigned team members) +CREATE TABLE service_team_members ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + service_id UUID NOT NULL REFERENCES services(id) ON DELETE CASCADE, + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + + UNIQUE (service_id, team_profile_id) +); + +-- Indexes +CREATE INDEX idx_schedules_address ON schedules(account_address_id); +CREATE INDEX idx_services_account ON services(account_id); +CREATE INDEX idx_services_address ON services(account_address_id); +CREATE INDEX idx_services_date ON services(date); +CREATE INDEX idx_services_status ON services(status); +CREATE INDEX idx_service_team_members_service ON service_team_members(service_id); +CREATE INDEX idx_service_team_members_team ON service_team_members(team_profile_id); diff --git a/migrations/20260101000009_create_projects.sql b/migrations/20260101000009_create_projects.sql new file mode 100644 index 0000000..70db710 --- /dev/null +++ b/migrations/20260101000009_create_projects.sql @@ -0,0 +1,71 @@ +-- Migration 009: Create projects table and add FK to project_scopes +-- Project: one-time work, MUST have Customer, MAY have AccountAddress + +-- Project (one-time work) +CREATE TABLE projects ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- MUST have a customer + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + -- MAY have a location (AccountAddress) OR standalone address + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + -- Standalone address (used when account_address_id is NULL) + street_address VARCHAR(255), + city VARCHAR(100), + state VARCHAR(100), + zip_code VARCHAR(20), + + -- XOR constraint: either account_address OR standalone address + CONSTRAINT project_addr_xor_check CHECK ( + (account_address_id IS NOT NULL AND street_address IS NULL AND city IS NULL AND state IS NULL AND zip_code IS NULL) + OR + (account_address_id IS NULL AND street_address IS NOT NULL AND city IS NOT NULL AND state IS NOT NULL AND zip_code IS NOT NULL) + OR + (account_address_id IS NULL AND street_address IS NULL AND city IS NULL AND state IS NULL AND zip_code IS NULL) + ), + + name VARCHAR(200) NOT NULL, + date DATE NOT NULL, + status work_status NOT NULL DEFAULT 'SCHEDULED', + + notes TEXT, + + -- Financials + labor DECIMAL(10, 2) NOT NULL DEFAULT 0, + amount DECIMAL(10, 2) NOT NULL DEFAULT 0, + + -- Scope reference (optional, assigned directly to project) + scope_id UUID REFERENCES project_scopes(id) ON DELETE SET NULL, + + -- External integrations + calendar_event_id VARCHAR(255), + wave_service_id VARCHAR(255) +); + +-- Now add FK from project_scopes to projects +ALTER TABLE project_scopes + ADD CONSTRAINT fk_project_scopes_project + FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; + +-- M2M: Project to TeamProfile (assigned team members) +CREATE TABLE project_team_members ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + + UNIQUE (project_id, team_profile_id) +); + +-- Indexes +CREATE INDEX idx_projects_customer ON projects(customer_id); +CREATE INDEX idx_projects_address ON projects(account_address_id); +CREATE INDEX idx_projects_date ON projects(date); +CREATE INDEX idx_projects_status ON projects(status); +CREATE INDEX idx_project_team_members_project ON project_team_members(project_id); +CREATE INDEX idx_project_team_members_team ON project_team_members(team_profile_id); diff --git a/migrations/20260101000010_create_service_sessions.sql b/migrations/20260101000010_create_service_sessions.sql new file mode 100644 index 0000000..863850a --- /dev/null +++ b/migrations/20260101000010_create_service_sessions.sql @@ -0,0 +1,124 @@ +-- Migration 010: Create service sessions and related media tables +-- ServiceSession: active work session during a service +-- During session: task completions, notes, photos, videos + +-- Service work session +CREATE TABLE service_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + service_id UUID NOT NULL REFERENCES services(id) ON DELETE CASCADE, + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + account_address_id UUID NOT NULL REFERENCES account_addresses(id) ON DELETE CASCADE, + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + scope_id UUID NOT NULL REFERENCES service_scopes(id) ON DELETE RESTRICT, + + date DATE NOT NULL, + start TIMESTAMPTZ NOT NULL, + "end" TIMESTAMPTZ, + + created_by_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE RESTRICT, + closed_by_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + + -- Constraint: end must be after start (or null for active sessions) + CONSTRAINT service_session_end_gt_start_or_null CHECK ("end" IS NULL OR "end" > start) +); + +-- Service task completion record +CREATE TABLE service_task_completions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + service_id UUID NOT NULL REFERENCES services(id) ON DELETE CASCADE, + task_id UUID NOT NULL REFERENCES service_scope_tasks(id) ON DELETE CASCADE, + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + completed_by_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE RESTRICT, + completed_at TIMESTAMPTZ NOT NULL, + + -- For tracking monthly/annual task frequencies + year INTEGER NOT NULL CHECK (year >= 0), + month INTEGER NOT NULL CHECK (month >= 0 AND month <= 12), + + notes TEXT +); + +-- M2M: Session to task completions +CREATE TABLE service_session_completed_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES service_sessions(id) ON DELETE CASCADE, + task_completion_id UUID NOT NULL REFERENCES service_task_completions(id) ON DELETE CASCADE, + + UNIQUE (session_id, task_completion_id) +); + +-- Session notes +CREATE TABLE service_session_notes ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES service_sessions(id) ON DELETE CASCADE, + + content TEXT NOT NULL, + author_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Session images +CREATE TABLE service_session_images ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES service_sessions(id) ON DELETE CASCADE, + + title VARCHAR(255) NOT NULL, + image VARCHAR(100) NOT NULL, + thumbnail VARCHAR(100), + + content_type VARCHAR(100) NOT NULL, + width INTEGER NOT NULL CHECK (width >= 0), + height INTEGER NOT NULL CHECK (height >= 0), + + uploaded_by_team_profile_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + notes TEXT NOT NULL DEFAULT '', + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Session videos +CREATE TABLE service_session_videos ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES service_sessions(id) ON DELETE CASCADE, + + title VARCHAR(255) NOT NULL, + video VARCHAR(100) NOT NULL, + thumbnail VARCHAR(100), + + content_type VARCHAR(100) NOT NULL, + width INTEGER NOT NULL CHECK (width >= 0), + height INTEGER NOT NULL CHECK (height >= 0), + duration_seconds INTEGER NOT NULL CHECK (duration_seconds >= 0), + file_size_bytes BIGINT NOT NULL CHECK (file_size_bytes >= 0), + + uploaded_by_team_profile_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + notes TEXT NOT NULL DEFAULT '', + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Indexes +CREATE INDEX idx_service_sessions_service ON service_sessions(service_id); +CREATE INDEX idx_service_sessions_date ON service_sessions(date); +CREATE INDEX idx_service_sessions_created_by ON service_sessions(created_by_id); +CREATE INDEX idx_service_task_completions_service ON service_task_completions(service_id); +CREATE INDEX idx_service_task_completions_task ON service_task_completions(task_id); +CREATE INDEX idx_service_session_notes_session ON service_session_notes(session_id); +CREATE INDEX idx_service_session_images_session ON service_session_images(session_id); +CREATE INDEX idx_service_session_videos_session ON service_session_videos(session_id); diff --git a/migrations/20260101000011_create_project_sessions.sql b/migrations/20260101000011_create_project_sessions.sql new file mode 100644 index 0000000..04a873c --- /dev/null +++ b/migrations/20260101000011_create_project_sessions.sql @@ -0,0 +1,125 @@ +-- Migration 011: Create project sessions and related media tables +-- ProjectSession: active work session during a project +-- During session: task completions, notes, photos, videos + +-- Project work session +CREATE TABLE project_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + scope_id UUID NOT NULL REFERENCES project_scopes(id) ON DELETE RESTRICT, + + -- Optional location (projects may or may not have a location) + account_id UUID REFERENCES accounts(id) ON DELETE SET NULL, + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + date DATE NOT NULL, + start TIMESTAMPTZ NOT NULL, + "end" TIMESTAMPTZ, + + created_by_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE RESTRICT, + closed_by_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + + -- Constraint: end must be after start (or null for active sessions) + CONSTRAINT project_session_end_gt_start_or_null CHECK ("end" IS NULL OR "end" > start) +); + +-- Project task completion record +CREATE TABLE project_task_completions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + task_id UUID NOT NULL REFERENCES project_scope_tasks(id) ON DELETE CASCADE, + + -- Optional location context + account_id UUID REFERENCES accounts(id) ON DELETE SET NULL, + account_address_id UUID REFERENCES account_addresses(id) ON DELETE SET NULL, + + completed_by_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE RESTRICT, + completed_at TIMESTAMPTZ NOT NULL, + + notes TEXT +); + +-- M2M: Session to task completions +CREATE TABLE project_session_completed_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES project_sessions(id) ON DELETE CASCADE, + task_completion_id UUID NOT NULL REFERENCES project_task_completions(id) ON DELETE CASCADE, + + UNIQUE (session_id, task_completion_id) +); + +-- Session notes +CREATE TABLE project_session_notes ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES project_sessions(id) ON DELETE CASCADE, + + content TEXT NOT NULL, + author_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Session images +CREATE TABLE project_session_images ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES project_sessions(id) ON DELETE CASCADE, + + title VARCHAR(255) NOT NULL, + image VARCHAR(100) NOT NULL, + thumbnail VARCHAR(100), + + content_type VARCHAR(100) NOT NULL, + width INTEGER NOT NULL CHECK (width >= 0), + height INTEGER NOT NULL CHECK (height >= 0), + + uploaded_by_team_profile_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + notes TEXT NOT NULL DEFAULT '', + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Session videos +CREATE TABLE project_session_videos ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES project_sessions(id) ON DELETE CASCADE, + + title VARCHAR(255) NOT NULL, + video VARCHAR(100) NOT NULL, + thumbnail VARCHAR(100), + + content_type VARCHAR(100) NOT NULL, + width INTEGER NOT NULL CHECK (width >= 0), + height INTEGER NOT NULL CHECK (height >= 0), + duration_seconds INTEGER NOT NULL CHECK (duration_seconds >= 0), + file_size_bytes BIGINT NOT NULL CHECK (file_size_bytes >= 0), + + uploaded_by_team_profile_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL, + notes TEXT NOT NULL DEFAULT '', + internal BOOLEAN NOT NULL DEFAULT FALSE +); + +-- Indexes +CREATE INDEX idx_project_sessions_project ON project_sessions(project_id); +CREATE INDEX idx_project_sessions_date ON project_sessions(date); +CREATE INDEX idx_project_sessions_created_by ON project_sessions(created_by_id); +CREATE INDEX idx_project_task_completions_project ON project_task_completions(project_id); +CREATE INDEX idx_project_task_completions_task ON project_task_completions(task_id); +CREATE INDEX idx_project_session_notes_session ON project_session_notes(session_id); +CREATE INDEX idx_project_session_images_session ON project_session_images(session_id); +CREATE INDEX idx_project_session_videos_session ON project_session_videos(session_id); diff --git a/migrations/20260101000012_create_financial.sql b/migrations/20260101000012_create_financial.sql new file mode 100644 index 0000000..3b98a62 --- /dev/null +++ b/migrations/20260101000012_create_financial.sql @@ -0,0 +1,99 @@ +-- Migration 012: Create financial tables +-- Labor: rate per account address (exactly ONE active per location, non-overlapping dates) +-- Revenue: expected revenue per account (exactly ONE active, non-overlapping dates) +-- Invoice: billing document with Wave integration + +-- Labor rate for account address +CREATE TABLE labor ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_address_id UUID REFERENCES account_addresses(id) ON DELETE CASCADE, + + amount DECIMAL(10, 2) NOT NULL, + + -- Validity period (non-overlapping per location) + start_date DATE NOT NULL, + end_date DATE +); + +-- Constraint: non-overlapping labor rates per account address +ALTER TABLE labor ADD CONSTRAINT labor_no_overlap + EXCLUDE USING gist ( + account_address_id WITH =, + daterange(start_date, COALESCE(end_date, '9999-12-31'::DATE), '[]') WITH && + ); + +-- Revenue expectation for account +CREATE TABLE revenues ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + + amount DECIMAL(10, 2) NOT NULL, + + -- Validity period (non-overlapping per account) + start_date DATE NOT NULL, + end_date DATE, + + -- Wave accounting integration + wave_service_id VARCHAR(255) +); + +-- Constraint: non-overlapping revenues per account +ALTER TABLE revenues ADD CONSTRAINT revenues_no_overlap + EXCLUDE USING gist ( + account_id WITH =, + daterange(start_date, COALESCE(end_date, '9999-12-31'::DATE), '[]') WITH && + ); + +-- Customer invoice +CREATE TABLE invoices ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + + date DATE NOT NULL, + status invoice_status NOT NULL DEFAULT 'DRAFT', + date_paid DATE, + payment_type payment_type, + + -- Wave accounting integration + wave_invoice_id VARCHAR(255) +); + +-- M2M: Invoice to projects +CREATE TABLE invoice_projects ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + invoice_id UUID NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + + UNIQUE (invoice_id, project_id) +); + +-- M2M: Invoice to revenues +CREATE TABLE invoice_revenues ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + invoice_id UUID NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, + revenue_id UUID NOT NULL REFERENCES revenues(id) ON DELETE CASCADE, + + UNIQUE (invoice_id, revenue_id) +); + +-- Indexes +CREATE INDEX idx_labor_address ON labor(account_address_id); +CREATE INDEX idx_revenues_account ON revenues(account_id); +CREATE INDEX idx_invoices_customer ON invoices(customer_id); +CREATE INDEX idx_invoices_date ON invoices(date); +CREATE INDEX idx_invoices_status ON invoices(status); +CREATE INDEX idx_invoice_projects_invoice ON invoice_projects(invoice_id); +CREATE INDEX idx_invoice_revenues_invoice ON invoice_revenues(invoice_id); diff --git a/migrations/20260101000013_create_reports.sql b/migrations/20260101000013_create_reports.sql new file mode 100644 index 0000000..080ad63 --- /dev/null +++ b/migrations/20260101000013_create_reports.sql @@ -0,0 +1,46 @@ +-- Migration 013: Create reports tables +-- Report: aggregates completed services/projects for a team member on a date +-- Used to calculate labor share for payment + +-- Team member work report +CREATE TABLE reports ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + date DATE NOT NULL, + + -- One report per team member per date + UNIQUE (team_profile_id, date) +); + +-- M2M: Report to services +CREATE TABLE report_services ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + report_id UUID NOT NULL REFERENCES reports(id) ON DELETE CASCADE, + service_id UUID NOT NULL REFERENCES services(id) ON DELETE CASCADE, + + UNIQUE (report_id, service_id) +); + +-- M2M: Report to projects +CREATE TABLE report_projects ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + report_id UUID NOT NULL REFERENCES reports(id) ON DELETE CASCADE, + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + + UNIQUE (report_id, project_id) +); + +-- Indexes +CREATE INDEX idx_reports_team_profile ON reports(team_profile_id); +CREATE INDEX idx_reports_date ON reports(date); +CREATE INDEX idx_report_services_report ON report_services(report_id); +CREATE INDEX idx_report_services_service ON report_services(service_id); +CREATE INDEX idx_report_projects_report ON report_projects(report_id); +CREATE INDEX idx_report_projects_project ON report_projects(project_id); diff --git a/migrations/20260101000014_create_messaging.sql b/migrations/20260101000014_create_messaging.sql new file mode 100644 index 0000000..8c31f0a --- /dev/null +++ b/migrations/20260101000014_create_messaging.sql @@ -0,0 +1,106 @@ +-- Migration 014: Create messaging tables +-- Conversation: message thread +-- ConversationParticipant: user participation (polymorphic: team_profile or customer_profile) +-- Message: individual message +-- MessageReadReceipt: tracks when messages are read + +-- Message thread/conversation +CREATE TABLE conversations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + subject VARCHAR(500), + conversation_type conversation_type NOT NULL DEFAULT 'DIRECT', + + -- Polymorphic: entity this conversation is about + entity_type VARCHAR(50), + entity_id UUID, + + -- Polymorphic: who created the conversation + created_by_type VARCHAR(20), -- 'team_profile' or 'customer_profile' + created_by_id UUID, + + last_message_at TIMESTAMPTZ, + is_archived BOOLEAN NOT NULL DEFAULT FALSE, + metadata JSONB NOT NULL DEFAULT '{}'::JSONB +); + +-- Conversation participant (polymorphic) +CREATE TABLE conversation_participants ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + conversation_id UUID NOT NULL REFERENCES conversations(id) ON DELETE CASCADE, + + -- Polymorphic: participant type + participant_type VARCHAR(20) NOT NULL, -- 'team_profile' or 'customer_profile' + participant_id UUID NOT NULL, + + last_read_at TIMESTAMPTZ, + unread_count INTEGER NOT NULL DEFAULT 0 CHECK (unread_count >= 0), + is_muted BOOLEAN NOT NULL DEFAULT FALSE, + is_archived BOOLEAN NOT NULL DEFAULT FALSE, + joined_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- One entry per participant per conversation + UNIQUE (conversation_id, participant_type, participant_id) +); + +-- Individual message +CREATE TABLE messages ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + conversation_id UUID NOT NULL REFERENCES conversations(id) ON DELETE CASCADE, + + -- Polymorphic: author type + author_type VARCHAR(20) NOT NULL, -- 'team_profile' or 'customer_profile' + author_id UUID NOT NULL, + + content TEXT NOT NULL, + is_deleted BOOLEAN NOT NULL DEFAULT FALSE, + + -- For threaded replies + reply_to_id UUID REFERENCES messages(id) ON DELETE SET NULL, + + -- File attachments (JSONB array of attachment metadata) + attachments JSONB NOT NULL DEFAULT '[]'::JSONB, + + -- System-generated message (e.g., "User joined conversation") + is_system_message BOOLEAN NOT NULL DEFAULT FALSE, + + -- Additional metadata (formatting, mentions, etc.) + metadata JSONB NOT NULL DEFAULT '{}'::JSONB +); + +-- Message read receipts (tracks when specific messages are read) +CREATE TABLE message_read_receipts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + message_id UUID NOT NULL REFERENCES messages(id) ON DELETE CASCADE, + + -- Polymorphic: reader type + reader_type VARCHAR(20) NOT NULL, -- 'team_profile' or 'customer_profile' + reader_id UUID NOT NULL, + + read_at TIMESTAMPTZ NOT NULL, + + -- One receipt per message per reader + UNIQUE (message_id, reader_type, reader_id) +); + +-- Indexes +CREATE INDEX idx_conversations_type ON conversations(conversation_type); +CREATE INDEX idx_conversations_entity ON conversations(entity_type, entity_id); +CREATE INDEX idx_conversations_last_message ON conversations(last_message_at DESC); +CREATE INDEX idx_conversation_participants_conversation ON conversation_participants(conversation_id); +CREATE INDEX idx_conversation_participants_participant ON conversation_participants(participant_type, participant_id); +CREATE INDEX idx_messages_conversation ON messages(conversation_id); +CREATE INDEX idx_messages_author ON messages(author_type, author_id); +CREATE INDEX idx_messages_created ON messages(created_at DESC); +CREATE INDEX idx_message_read_receipts_message ON message_read_receipts(message_id); diff --git a/migrations/20260101000015_create_notifications.sql b/migrations/20260101000015_create_notifications.sql new file mode 100644 index 0000000..eb36691 --- /dev/null +++ b/migrations/20260101000015_create_notifications.sql @@ -0,0 +1,112 @@ +-- Migration 015: Create notification tables +-- NotificationRule: admin-defined rules for triggering notifications +-- Notification: individual notification instance +-- NotificationDelivery: tracks delivery attempts per channel + +-- Admin-defined notification rule +CREATE TABLE notification_rules ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(200) NOT NULL, + description TEXT, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + + -- Event types that trigger this rule (array of event_type strings) + event_types VARCHAR(50)[] NOT NULL, + + -- Channels to deliver through (array of notification_channel strings) + channels VARCHAR(10)[] NOT NULL, + + -- Target roles (array of team_role strings, optional) + target_roles VARCHAR(20)[], + + -- Custom conditions for triggering (JSONB) + conditions JSONB NOT NULL DEFAULT '{}'::JSONB, + + -- Templates + subject_template VARCHAR(500), + body_template TEXT +); + +-- M2M: NotificationRule targets specific TeamProfiles +CREATE TABLE notification_rule_team_profiles ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + rule_id UUID NOT NULL REFERENCES notification_rules(id) ON DELETE CASCADE, + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + + UNIQUE (rule_id, team_profile_id) +); + +-- M2M: NotificationRule targets specific CustomerProfiles +CREATE TABLE notification_rule_customer_profiles ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + rule_id UUID NOT NULL REFERENCES notification_rules(id) ON DELETE CASCADE, + customer_profile_id UUID NOT NULL REFERENCES customer_profiles(id) ON DELETE CASCADE, + + UNIQUE (rule_id, customer_profile_id) +); + +-- Individual notification instance +CREATE TABLE notifications ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- Polymorphic: recipient type + recipient_type VARCHAR(20) NOT NULL, -- 'team_profile' or 'customer_profile' + recipient_id UUID NOT NULL, + + -- References + rule_id UUID REFERENCES notification_rules(id) ON DELETE SET NULL, + event_id UUID, -- FK added in migration 016 after events table exists + + status notification_status NOT NULL DEFAULT 'PENDING', + subject VARCHAR(500) NOT NULL, + body TEXT NOT NULL, + + -- URL to navigate to when notification is clicked + action_url VARCHAR(500), + + read_at TIMESTAMPTZ, + metadata JSONB NOT NULL DEFAULT '{}'::JSONB +); + +-- Notification delivery attempt +CREATE TABLE notification_deliveries ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + notification_id UUID NOT NULL REFERENCES notifications(id) ON DELETE CASCADE, + + channel notification_channel NOT NULL, + status delivery_status NOT NULL DEFAULT 'PENDING', + + attempts INTEGER NOT NULL DEFAULT 0 CHECK (attempts >= 0), + last_attempt_at TIMESTAMPTZ, + sent_at TIMESTAMPTZ, + delivered_at TIMESTAMPTZ, + + error_message TEXT, + + -- External service ID (e.g., email provider message ID) + external_id VARCHAR(200), + + metadata JSONB NOT NULL DEFAULT '{}'::JSONB +); + +-- Indexes +CREATE INDEX idx_notification_rules_active ON notification_rules(is_active); +CREATE INDEX idx_notification_rule_team_profiles_rule ON notification_rule_team_profiles(rule_id); +CREATE INDEX idx_notification_rule_customer_profiles_rule ON notification_rule_customer_profiles(rule_id); +CREATE INDEX idx_notifications_recipient ON notifications(recipient_type, recipient_id); +CREATE INDEX idx_notifications_status ON notifications(status); +CREATE INDEX idx_notifications_created ON notifications(created_at DESC); +CREATE INDEX idx_notification_deliveries_notification ON notification_deliveries(notification_id); +CREATE INDEX idx_notification_deliveries_status ON notification_deliveries(status); diff --git a/migrations/20260101000016_create_events.sql b/migrations/20260101000016_create_events.sql new file mode 100644 index 0000000..7751f56 --- /dev/null +++ b/migrations/20260101000016_create_events.sql @@ -0,0 +1,40 @@ +-- Migration 016: Create events table (audit trail) +-- Event: comprehensive audit trail for all system actions +-- Uses polymorphic actor (team_profile, customer_profile, or system) + +-- Audit trail events +CREATE TABLE events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + event_type event_type NOT NULL, + + -- What entity was affected + entity_type VARCHAR(100) NOT NULL, + entity_id UUID NOT NULL, + + -- Who triggered the event (polymorphic) + actor_type VARCHAR(20), -- 'team_profile', 'customer_profile', or 'system' + actor_id UUID, + + -- Additional context (old_status, new_status, changed_fields, etc.) + metadata JSONB NOT NULL DEFAULT '{}'::JSONB, + + -- When the event occurred (business timestamp, may differ from created_at) + timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Add FK from notifications to events +ALTER TABLE notifications + ADD CONSTRAINT fk_notifications_event + FOREIGN KEY (event_id) REFERENCES events(id) ON DELETE SET NULL; + +-- High-performance indexes for common query patterns +CREATE INDEX idx_events_entity ON events(entity_type, entity_id); +CREATE INDEX idx_events_actor ON events(actor_type, actor_id); +CREATE INDEX idx_events_timestamp ON events(timestamp DESC); +CREATE INDEX idx_events_type ON events(event_type); +CREATE INDEX idx_events_created ON events(created_at DESC); + +-- Composite index for filtering by entity and time +CREATE INDEX idx_events_entity_timestamp ON events(entity_type, entity_id, timestamp DESC); diff --git a/migrations/20260101000017_create_indexes.sql b/migrations/20260101000017_create_indexes.sql new file mode 100644 index 0000000..1d9b7dd --- /dev/null +++ b/migrations/20260101000017_create_indexes.sql @@ -0,0 +1,46 @@ +-- Migration 017: Additional indexes and constraints +-- Performance optimizations for common query patterns + +-- Composite indexes for dashboard queries +CREATE INDEX idx_services_address_date ON services(account_address_id, date); +CREATE INDEX idx_projects_customer_date ON projects(customer_id, date); + +-- For finding active work by team member +CREATE INDEX idx_service_team_date ON service_team_members(team_profile_id); +CREATE INDEX idx_project_team_date ON project_team_members(team_profile_id); + +-- For task completion tracking +CREATE INDEX idx_service_task_completions_year_month ON service_task_completions(year, month); +CREATE INDEX idx_service_task_completions_completed_at ON service_task_completions(completed_at DESC); +CREATE INDEX idx_project_task_completions_completed_at ON project_task_completions(completed_at DESC); + +-- For session media queries +CREATE INDEX idx_service_session_images_internal ON service_session_images(internal); +CREATE INDEX idx_service_session_videos_internal ON service_session_videos(internal); +CREATE INDEX idx_project_session_images_internal ON project_session_images(internal); +CREATE INDEX idx_project_session_videos_internal ON project_session_videos(internal); + +-- For financial reporting +CREATE INDEX idx_invoices_date_status ON invoices(date, status); +CREATE INDEX idx_labor_start_date ON labor(start_date); +CREATE INDEX idx_revenues_start_date ON revenues(start_date); + +-- For scope hierarchy traversal +CREATE INDEX idx_scope_template_areas_order ON scope_template_areas(template_id, "order"); +CREATE INDEX idx_scope_template_tasks_order ON scope_template_tasks(area_id, "order"); +CREATE INDEX idx_service_scope_areas_order ON service_scope_areas(scope_id, "order"); +CREATE INDEX idx_service_scope_tasks_order ON service_scope_tasks(area_id, "order"); +CREATE INDEX idx_project_scope_categories_order ON project_scope_categories(scope_id, "order"); +CREATE INDEX idx_project_scope_tasks_order ON project_scope_tasks(category_id, "order"); + +-- For conversation/message pagination +CREATE INDEX idx_messages_conversation_created ON messages(conversation_id, created_at DESC); + +-- For notification delivery retry logic +CREATE INDEX idx_notification_deliveries_pending ON notification_deliveries(status, last_attempt_at) + WHERE status IN ('PENDING', 'QUEUED', 'FAILED'); + +-- Partial indexes for active records +CREATE INDEX idx_accounts_active ON accounts(customer_id) WHERE status = 'ACTIVE'; +CREATE INDEX idx_team_profiles_active ON team_profiles(role) WHERE status = 'ACTIVE'; +CREATE INDEX idx_customer_profiles_active ON customer_profiles(id) WHERE status = 'ACTIVE'; diff --git a/migrations/20260101000018_create_chat_mcp.sql b/migrations/20260101000018_create_chat_mcp.sql new file mode 100644 index 0000000..2495c95 --- /dev/null +++ b/migrations/20260101000018_create_chat_mcp.sql @@ -0,0 +1,147 @@ +-- Migration 018: Create Chat & MCP Registry tables +-- AI assistant with full MCP (Model Context Protocol) server registry + +-- ==================== MCP SERVER REGISTRY ==================== + +-- MCP Server definitions +CREATE TABLE mcp_servers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(100) NOT NULL UNIQUE, + description TEXT, + + -- Connection config + transport_type VARCHAR(20) NOT NULL, -- 'stdio', 'sse', 'websocket' + command VARCHAR(500), -- for stdio: command to run + args JSONB NOT NULL DEFAULT '[]', -- command arguments + env JSONB NOT NULL DEFAULT '{}', -- environment variables + url VARCHAR(500), -- for sse/websocket + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + + -- Rate limiting + rate_limit_per_minute INTEGER CHECK (rate_limit_per_minute > 0), + rate_limit_per_hour INTEGER CHECK (rate_limit_per_hour > 0) +); + +-- Tools provided by MCP servers +CREATE TABLE mcp_tools ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + server_id UUID NOT NULL REFERENCES mcp_servers(id) ON DELETE CASCADE, + + name VARCHAR(100) NOT NULL, + description TEXT, + input_schema JSONB NOT NULL DEFAULT '{}', + + is_active BOOLEAN NOT NULL DEFAULT TRUE, + + UNIQUE (server_id, name) +); + +-- Role-based permissions for MCP tools +CREATE TABLE mcp_tool_permissions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + tool_id UUID NOT NULL REFERENCES mcp_tools(id) ON DELETE CASCADE, + role team_role NOT NULL, + + UNIQUE (tool_id, role) +); + +-- Per-user tool permissions (override role-based) +CREATE TABLE mcp_tool_user_permissions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + tool_id UUID NOT NULL REFERENCES mcp_tools(id) ON DELETE CASCADE, + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + + is_allowed BOOLEAN NOT NULL, -- explicit allow/deny + + UNIQUE (tool_id, team_profile_id) +); + +-- ==================== CHAT (AI ASSISTANT) ==================== + +-- Chat conversations (AI assistant) +CREATE TABLE chat_conversations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + title VARCHAR(255) NOT NULL, + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +-- Chat messages with tool call tracking +CREATE TABLE chat_messages ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + conversation_id UUID NOT NULL REFERENCES chat_conversations(id) ON DELETE CASCADE, + + role VARCHAR(20) NOT NULL CHECK (role IN ('user', 'assistant', 'tool')), + content TEXT, + + -- For tool calls/results + tool_calls JSONB NOT NULL DEFAULT '[]', + tool_results JSONB NOT NULL DEFAULT '[]' +); + +-- ==================== MCP TOOL EXECUTION LOG ==================== + +-- Tool execution log (for auditing, rate limiting, debugging) +CREATE TABLE mcp_tool_executions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + tool_id UUID NOT NULL REFERENCES mcp_tools(id) ON DELETE CASCADE, + team_profile_id UUID NOT NULL REFERENCES team_profiles(id) ON DELETE CASCADE, + chat_message_id UUID REFERENCES chat_messages(id) ON DELETE SET NULL, + + -- Execution details + input JSONB NOT NULL, + output JSONB, + error TEXT, + + started_at TIMESTAMPTZ NOT NULL, + completed_at TIMESTAMPTZ, + duration_ms INTEGER CHECK (duration_ms >= 0), + + status VARCHAR(20) NOT NULL CHECK (status IN ('pending', 'running', 'success', 'error')) +); + +-- ==================== INDEXES ==================== + +-- MCP server queries +CREATE INDEX idx_mcp_servers_active ON mcp_servers(is_active); + +-- MCP tool queries +CREATE INDEX idx_mcp_tools_server ON mcp_tools(server_id); +CREATE INDEX idx_mcp_tools_active ON mcp_tools(is_active); + +-- Permission lookups +CREATE INDEX idx_mcp_tool_permissions_tool ON mcp_tool_permissions(tool_id); +CREATE INDEX idx_mcp_tool_permissions_role ON mcp_tool_permissions(role); +CREATE INDEX idx_mcp_tool_user_permissions_tool ON mcp_tool_user_permissions(tool_id); +CREATE INDEX idx_mcp_tool_user_permissions_user ON mcp_tool_user_permissions(team_profile_id); + +-- Chat queries +CREATE INDEX idx_chat_conversations_user ON chat_conversations(team_profile_id); +CREATE INDEX idx_chat_conversations_active ON chat_conversations(team_profile_id, is_active); +CREATE INDEX idx_chat_messages_conversation ON chat_messages(conversation_id); +CREATE INDEX idx_chat_messages_conversation_created ON chat_messages(conversation_id, created_at DESC); + +-- Rate limiting queries +CREATE INDEX idx_mcp_tool_executions_rate_limit + ON mcp_tool_executions (tool_id, team_profile_id, created_at DESC); +CREATE INDEX idx_mcp_tool_executions_status ON mcp_tool_executions(status); +CREATE INDEX idx_mcp_tool_executions_started ON mcp_tool_executions(started_at DESC); diff --git a/migrations/20260101000019_create_punchlists.sql b/migrations/20260101000019_create_punchlists.sql new file mode 100644 index 0000000..1edcb9c --- /dev/null +++ b/migrations/20260101000019_create_punchlists.sql @@ -0,0 +1,113 @@ +-- Migration 019: Create Punchlist tables +-- Customer-facing snapshot generated from completed sessions +-- Uses checklist_description field for punchlist-formatted task text + +-- ==================== SERVICE PUNCHLISTS ==================== + +-- Service punchlist (generated from completed session) +CREATE TABLE service_punchlists ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES service_sessions(id) ON DELETE CASCADE, + + -- Denormalized for easy access/reporting + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + account_id UUID NOT NULL REFERENCES accounts(id) ON DELETE CASCADE, + account_address_id UUID NOT NULL REFERENCES account_addresses(id) ON DELETE CASCADE, + date DATE NOT NULL, + + -- PDF/export tracking + pdf_url VARCHAR(500), + exported_at TIMESTAMPTZ, + + notes TEXT +); + +-- Individual task entries on the service punchlist (snapshot of scope tasks) +CREATE TABLE service_punchlist_items ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + punchlist_id UUID NOT NULL REFERENCES service_punchlists(id) ON DELETE CASCADE, + task_id UUID REFERENCES service_scope_tasks(id) ON DELETE SET NULL, -- nullable if task deleted + + -- Snapshot of task at time of punchlist creation + checklist_description TEXT NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0), + + -- Completion status + is_completed BOOLEAN NOT NULL DEFAULT FALSE, + completed_at TIMESTAMPTZ, + completed_by_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL +); + +-- ==================== PROJECT PUNCHLISTS ==================== + +-- Project punchlist (generated from completed session) +CREATE TABLE project_punchlists ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + session_id UUID NOT NULL REFERENCES project_sessions(id) ON DELETE CASCADE, + + -- Denormalized for easy access/reporting + customer_id UUID NOT NULL REFERENCES customers(id) ON DELETE CASCADE, + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + date DATE NOT NULL, + + -- PDF/export tracking + pdf_url VARCHAR(500), + exported_at TIMESTAMPTZ, + + notes TEXT +); + +-- Individual task entries on the project punchlist (snapshot of scope tasks) +CREATE TABLE project_punchlist_items ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + punchlist_id UUID NOT NULL REFERENCES project_punchlists(id) ON DELETE CASCADE, + task_id UUID REFERENCES project_scope_tasks(id) ON DELETE SET NULL, -- nullable if task deleted + + -- Snapshot of task at time of punchlist creation + checklist_description TEXT NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0), + + -- Completion status + is_completed BOOLEAN NOT NULL DEFAULT FALSE, + completed_at TIMESTAMPTZ, + completed_by_id UUID REFERENCES team_profiles(id) ON DELETE SET NULL +); + +-- ==================== INDEXES ==================== + +-- Service punchlist queries +CREATE INDEX idx_service_punchlists_session ON service_punchlists(session_id); +CREATE INDEX idx_service_punchlists_customer ON service_punchlists(customer_id); +CREATE INDEX idx_service_punchlists_account ON service_punchlists(account_id); +CREATE INDEX idx_service_punchlists_address ON service_punchlists(account_address_id); +CREATE INDEX idx_service_punchlists_date ON service_punchlists(date DESC); + +-- Service punchlist items +CREATE INDEX idx_service_punchlist_items_punchlist ON service_punchlist_items(punchlist_id); +CREATE INDEX idx_service_punchlist_items_order ON service_punchlist_items(punchlist_id, "order"); +CREATE INDEX idx_service_punchlist_items_completed ON service_punchlist_items(is_completed); + +-- Project punchlist queries +CREATE INDEX idx_project_punchlists_session ON project_punchlists(session_id); +CREATE INDEX idx_project_punchlists_customer ON project_punchlists(customer_id); +CREATE INDEX idx_project_punchlists_project ON project_punchlists(project_id); +CREATE INDEX idx_project_punchlists_date ON project_punchlists(date DESC); + +-- Project punchlist items +CREATE INDEX idx_project_punchlist_items_punchlist ON project_punchlist_items(punchlist_id); +CREATE INDEX idx_project_punchlist_items_order ON project_punchlist_items(punchlist_id, "order"); +CREATE INDEX idx_project_punchlist_items_completed ON project_punchlist_items(is_completed); + +-- Unique constraint: one punchlist per session +CREATE UNIQUE INDEX idx_service_punchlist_unique_session ON service_punchlists(session_id); +CREATE UNIQUE INDEX idx_project_punchlist_unique_session ON project_punchlists(session_id); diff --git a/migrations/20260101000020_add_constraints.sql b/migrations/20260101000020_add_constraints.sql new file mode 100644 index 0000000..217daa1 --- /dev/null +++ b/migrations/20260101000020_add_constraints.sql @@ -0,0 +1,74 @@ +-- Migration 020: Additional constraints discovered from Django audit +-- Business logic constraints that ensure data integrity + +-- ==================== SERVICE CONSTRAINTS ==================== + +-- One service per location per day +-- Prevents duplicate services at the same address on the same date +CREATE UNIQUE INDEX idx_service_unique_per_address_date + ON services (account_address_id, date); + +-- Only one active session per service +-- A service can only have one session that hasn't ended +CREATE UNIQUE INDEX idx_service_session_unique_active + ON service_sessions (service_id) WHERE "end" IS NULL; + +-- ==================== PROJECT CONSTRAINTS ==================== + +-- Only one active session per project +-- A project can only have one session that hasn't ended +CREATE UNIQUE INDEX idx_project_session_unique_active + ON project_sessions (project_id) WHERE "end" IS NULL; + +-- ==================== NOTIFICATION CONSTRAINTS ==================== + +-- One delivery attempt per channel per notification +-- Prevents duplicate delivery attempts for the same notification via the same channel +CREATE UNIQUE INDEX idx_notification_delivery_unique_channel + ON notification_deliveries (notification_id, channel); + +-- ==================== TEAM MEMBER CONSTRAINTS ==================== + +-- One team member assignment per service per profile +-- Prevents duplicate team member assignments +CREATE UNIQUE INDEX idx_service_team_member_unique + ON service_team_members (service_id, team_profile_id); + +-- One team member assignment per project per profile +-- Prevents duplicate team member assignments +CREATE UNIQUE INDEX idx_project_team_member_unique + ON project_team_members (project_id, team_profile_id); + +-- ==================== CONVERSATION CONSTRAINTS ==================== + +-- Note: conversation_participants already has UNIQUE (conversation_id, participant_type, participant_id) +-- defined in migration 014, so no additional constraint needed here. + +-- ==================== INVOICE CONSTRAINTS ==================== + +-- One revenue association per invoice +CREATE UNIQUE INDEX idx_invoice_revenue_unique + ON invoice_revenues (invoice_id, revenue_id); + +-- One project association per invoice +CREATE UNIQUE INDEX idx_invoice_project_unique + ON invoice_projects (invoice_id, project_id); + +-- ==================== REPORT CONSTRAINTS ==================== + +-- One service association per report +CREATE UNIQUE INDEX idx_report_service_unique + ON report_services (report_id, service_id); + +-- One project association per report +CREATE UNIQUE INDEX idx_report_project_unique + ON report_projects (report_id, project_id); + +-- ==================== SESSION COMPLETED TASKS CONSTRAINTS ==================== + +-- One task completion record per session per task completion +CREATE UNIQUE INDEX idx_service_session_completed_task_unique + ON service_session_completed_tasks (session_id, task_completion_id); + +CREATE UNIQUE INDEX idx_project_session_completed_task_unique + ON project_session_completed_tasks (session_id, task_completion_id); diff --git a/migrations/20260101000021_create_kratos_schema.sql b/migrations/20260101000021_create_kratos_schema.sql new file mode 100644 index 0000000..3aafd33 --- /dev/null +++ b/migrations/20260101000021_create_kratos_schema.sql @@ -0,0 +1,46 @@ +-- Migration 021: Create Kratos schema within nexus database +-- Kratos tables will live in a separate schema for isolation while sharing the database + +-- Create kratos schema +CREATE SCHEMA IF NOT EXISTS kratos; + +-- Create kratos roles (for Vault dynamic credentials) +-- These are NOLOGIN roles that Vault-created temp users will inherit +DO $$ +BEGIN + IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'kratos_app') THEN + CREATE ROLE kratos_app NOLOGIN NOINHERIT; + END IF; + IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'kratos_migrate') THEN + CREATE ROLE kratos_migrate NOLOGIN NOINHERIT; + END IF; +END +$$; + +-- Grant schema access +GRANT USAGE ON SCHEMA kratos TO kratos_app; +GRANT USAGE ON SCHEMA kratos TO kratos_migrate; +GRANT ALL PRIVILEGES ON SCHEMA kratos TO kratos_migrate; + +-- Grant nexus_owner rights to manage kratos schema +GRANT ALL PRIVILEGES ON SCHEMA kratos TO nexus_owner; + +-- Default privileges for future tables in kratos schema +-- When tables are created by nexus_owner (via migrations), these permissions apply +ALTER DEFAULT PRIVILEGES FOR ROLE nexus_owner IN SCHEMA kratos + GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO kratos_app; + +ALTER DEFAULT PRIVILEGES FOR ROLE nexus_owner IN SCHEMA kratos + GRANT ALL PRIVILEGES ON TABLES TO kratos_migrate; + +-- Grant default privileges on sequences +ALTER DEFAULT PRIVILEGES FOR ROLE nexus_owner IN SCHEMA kratos + GRANT USAGE, SELECT ON SEQUENCES TO kratos_app; + +ALTER DEFAULT PRIVILEGES FOR ROLE nexus_owner IN SCHEMA kratos + GRANT ALL PRIVILEGES ON SEQUENCES TO kratos_migrate; + +-- Grant roles to vault_admin for dynamic credential creation +-- WITH ADMIN OPTION allows vault_admin to grant these roles to dynamically created users +GRANT kratos_app TO vault_admin WITH ADMIN OPTION; +GRANT kratos_migrate TO vault_admin WITH ADMIN OPTION; diff --git a/migrations/20260101000022_add_address_constraints.sql b/migrations/20260101000022_add_address_constraints.sql new file mode 100644 index 0000000..a1437d7 --- /dev/null +++ b/migrations/20260101000022_add_address_constraints.sql @@ -0,0 +1,7 @@ +-- Migration 022: Customer address constraint +-- Enforce single active address per customer + +-- Only one active address per customer +-- When address changes, old address is deactivated, new one becomes active +CREATE UNIQUE INDEX idx_customer_address_one_active + ON customer_addresses (customer_id) WHERE is_active = true; diff --git a/migrations/20260101000023_remove_project_task_frequency.sql b/migrations/20260101000023_remove_project_task_frequency.sql new file mode 100644 index 0000000..c118394 --- /dev/null +++ b/migrations/20260101000023_remove_project_task_frequency.sql @@ -0,0 +1,4 @@ +-- Migration 023: Remove frequency from project scope tasks +-- Project tasks are one-time, they don't have recurring frequencies like service tasks + +ALTER TABLE project_scope_tasks DROP COLUMN frequency; diff --git a/migrations/20260101000024_create_project_scope_templates.sql b/migrations/20260101000024_create_project_scope_templates.sql new file mode 100644 index 0000000..9e0a314 --- /dev/null +++ b/migrations/20260101000024_create_project_scope_templates.sql @@ -0,0 +1,67 @@ +-- Migration 024: Rename scope templates to service_scope_templates and create project_scope_templates +-- This provides consistent naming: service_scope_templates vs project_scope_templates +-- Note: Service tasks have frequency, project tasks do not + +-- ==================== RENAME SERVICE SCOPE TEMPLATES ==================== + +-- Rename tables for consistency +ALTER TABLE scope_templates RENAME TO service_scope_templates; +ALTER TABLE scope_template_areas RENAME TO service_scope_template_areas; +ALTER TABLE scope_template_tasks RENAME TO service_scope_template_tasks; + +-- Rename indexes +ALTER INDEX idx_scope_templates_active RENAME TO idx_service_scope_templates_active; +ALTER INDEX idx_scope_template_areas_template RENAME TO idx_service_scope_template_areas_template; +ALTER INDEX idx_scope_template_tasks_area RENAME TO idx_service_scope_template_tasks_area; + +-- ==================== CREATE PROJECT SCOPE TEMPLATES ==================== + +-- Reusable project scope template +CREATE TABLE project_scope_templates ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + name VARCHAR(255) NOT NULL, + description TEXT, + is_active BOOLEAN NOT NULL DEFAULT TRUE +); + +-- Category within a project scope template (equivalent to Area for services) +CREATE TABLE project_scope_template_categories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + template_id UUID NOT NULL REFERENCES project_scope_templates(id) ON DELETE CASCADE, + + name VARCHAR(100) NOT NULL, + "order" INTEGER NOT NULL CHECK ("order" >= 0) +); + +-- Task within a project scope template category +-- Note: No frequency field - project tasks are one-time +CREATE TABLE project_scope_template_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + category_id UUID NOT NULL REFERENCES project_scope_template_categories(id) ON DELETE CASCADE, + + -- Three descriptions for different audiences + scope_description TEXT NOT NULL, -- Customer-facing + checklist_description TEXT NOT NULL, -- QA/punchlist format + session_description TEXT NOT NULL, -- Team member work instructions + + "order" INTEGER NOT NULL CHECK ("order" >= 0), + estimated_minutes INTEGER CHECK (estimated_minutes >= 0) +); + +-- Indexes for project scope templates +CREATE INDEX idx_project_scope_templates_active ON project_scope_templates(is_active); +CREATE INDEX idx_project_scope_template_categories_template ON project_scope_template_categories(template_id); +CREATE INDEX idx_project_scope_template_tasks_category ON project_scope_template_tasks(category_id); + +-- Note: Data migration from Django tables removed as they don't exist in production. +-- If needed locally, run INSERT statements manually from core_projectscopetemplate, +-- core_projectareatemplate, and core_projecttasktemplate tables. diff --git a/migrations/20260108000001_alter_reports_pay_period.sql b/migrations/20260108000001_alter_reports_pay_period.sql new file mode 100644 index 0000000..cbdad21 --- /dev/null +++ b/migrations/20260108000001_alter_reports_pay_period.sql @@ -0,0 +1,91 @@ +-- Migration: Update reports for pay period support and snapshot labor amounts +-- Reports now cover a date range and store calculated labor_share at time of addition + +-- 1. Add status enum for report workflow +CREATE TYPE report_status AS ENUM ('DRAFT', 'FINALIZED', 'PAID'); + +-- 2. Alter reports table: date → start_date/end_date, add status +ALTER TABLE reports + ADD COLUMN start_date DATE, + ADD COLUMN end_date DATE, + ADD COLUMN status report_status NOT NULL DEFAULT 'DRAFT'; + +-- Migrate existing data (use date as both start and end) +UPDATE reports SET start_date = date, end_date = date WHERE start_date IS NULL; + +-- Make date columns required +ALTER TABLE reports + ALTER COLUMN start_date SET NOT NULL, + ALTER COLUMN end_date SET NOT NULL; + +-- Drop old column and constraint +ALTER TABLE reports DROP CONSTRAINT IF EXISTS reports_team_profile_id_date_key; +ALTER TABLE reports DROP COLUMN date; + +-- Add check: end_date >= start_date +ALTER TABLE reports ADD CONSTRAINT reports_date_range_check + CHECK (end_date >= start_date); + +-- Index for querying by period +CREATE INDEX idx_reports_period ON reports(start_date, end_date); +CREATE INDEX idx_reports_status ON reports(status); + +-- 3. Add labor_share snapshot to report_services +-- This stores the calculated amount at time of addition (immutable for payroll) +ALTER TABLE report_services + ADD COLUMN labor_share NUMERIC(10,2) NOT NULL DEFAULT 0.00; + +-- Remove default after backfill +ALTER TABLE report_services ALTER COLUMN labor_share DROP DEFAULT; + +-- 4. Add labor_share snapshot to report_projects +ALTER TABLE report_projects + ADD COLUMN labor_share NUMERIC(10,2) NOT NULL DEFAULT 0.00; + +ALTER TABLE report_projects ALTER COLUMN labor_share DROP DEFAULT; + +-- 5. Prevent same service in multiple reports for same team member +-- A completed service should only be paid once per team member +-- Using trigger-based approach since PostgreSQL doesn't support subqueries in index expressions + +-- Create a function to check uniqueness +CREATE OR REPLACE FUNCTION check_report_service_unique() +RETURNS TRIGGER AS $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM report_services rs + JOIN reports r ON rs.report_id = r.id + WHERE rs.service_id = NEW.service_id + AND r.team_profile_id = (SELECT team_profile_id FROM reports WHERE id = NEW.report_id) + AND rs.id IS DISTINCT FROM NEW.id + ) THEN + RAISE EXCEPTION 'Service % is already in another report for this team member', NEW.service_id; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_report_service_unique + BEFORE INSERT OR UPDATE ON report_services + FOR EACH ROW EXECUTE FUNCTION check_report_service_unique(); + +-- Same for projects +CREATE OR REPLACE FUNCTION check_report_project_unique() +RETURNS TRIGGER AS $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM report_projects rp + JOIN reports r ON rp.report_id = r.id + WHERE rp.project_id = NEW.project_id + AND r.team_profile_id = (SELECT team_profile_id FROM reports WHERE id = NEW.report_id) + AND rp.id IS DISTINCT FROM NEW.id + ) THEN + RAISE EXCEPTION 'Project % is already in another report for this team member', NEW.project_id; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_report_project_unique + BEFORE INSERT OR UPDATE ON report_projects + FOR EACH ROW EXECUTE FUNCTION check_report_project_unique(); diff --git a/migrations/20260108000002_alter_invoices_pay_period.sql b/migrations/20260108000002_alter_invoices_pay_period.sql new file mode 100644 index 0000000..dba88db --- /dev/null +++ b/migrations/20260108000002_alter_invoices_pay_period.sql @@ -0,0 +1,79 @@ +-- Migration: Update invoices for pay period support and snapshot amounts +-- Invoices now cover a date range and store revenue/project amounts at time of addition + +-- 1. Alter invoices table: date → start_date/end_date +ALTER TABLE invoices + ADD COLUMN start_date DATE, + ADD COLUMN end_date DATE; + +-- Migrate existing data (use date as both start and end) +UPDATE invoices SET start_date = date, end_date = date WHERE start_date IS NULL; + +-- Make date columns required +ALTER TABLE invoices + ALTER COLUMN start_date SET NOT NULL, + ALTER COLUMN end_date SET NOT NULL; + +-- Drop old column +ALTER TABLE invoices DROP COLUMN date; + +-- Add check: end_date >= start_date +ALTER TABLE invoices ADD CONSTRAINT invoices_date_range_check + CHECK (end_date >= start_date); + +-- Index for querying by period +CREATE INDEX idx_invoices_period ON invoices(start_date, end_date); + +-- 2. Add amount snapshot to invoice_revenues +-- This stores the revenue amount at time of addition (immutable for billing) +ALTER TABLE invoice_revenues + ADD COLUMN amount NUMERIC(10,2) NOT NULL DEFAULT 0.00; + +-- Remove default after adding +ALTER TABLE invoice_revenues ALTER COLUMN amount DROP DEFAULT; + +-- 3. Add amount snapshot to invoice_projects +ALTER TABLE invoice_projects + ADD COLUMN amount NUMERIC(10,2) NOT NULL DEFAULT 0.00; + +ALTER TABLE invoice_projects ALTER COLUMN amount DROP DEFAULT; + +-- 4. Add global uniqueness constraint for revenues +-- A revenue can only be on ONE invoice ever +CREATE OR REPLACE FUNCTION check_invoice_revenue_unique() +RETURNS TRIGGER AS $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM invoice_revenues + WHERE revenue_id = NEW.revenue_id + AND id IS DISTINCT FROM NEW.id + ) THEN + RAISE EXCEPTION 'Revenue % is already on another invoice', NEW.revenue_id; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_invoice_revenue_unique + BEFORE INSERT OR UPDATE ON invoice_revenues + FOR EACH ROW EXECUTE FUNCTION check_invoice_revenue_unique(); + +-- 5. Add global uniqueness constraint for projects +-- A project can only be on ONE invoice ever +CREATE OR REPLACE FUNCTION check_invoice_project_unique() +RETURNS TRIGGER AS $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM invoice_projects + WHERE project_id = NEW.project_id + AND id IS DISTINCT FROM NEW.id + ) THEN + RAISE EXCEPTION 'Project % is already on another invoice', NEW.project_id; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_invoice_project_unique + BEFORE INSERT OR UPDATE ON invoice_projects + FOR EACH ROW EXECUTE FUNCTION check_invoice_project_unique(); diff --git a/migrations/20260108000003_invoice_revenue_period_uniqueness.sql b/migrations/20260108000003_invoice_revenue_period_uniqueness.sql new file mode 100644 index 0000000..eb3c2d7 --- /dev/null +++ b/migrations/20260108000003_invoice_revenue_period_uniqueness.sql @@ -0,0 +1,26 @@ +-- Change invoice_revenues uniqueness from global to per-period +-- A revenue can be on multiple invoices, just not on invoices with overlapping periods + +-- Replace the trigger function to check for overlapping periods instead of global uniqueness +CREATE OR REPLACE FUNCTION check_invoice_revenue_unique() +RETURNS TRIGGER AS $$ +BEGIN + -- Check if this revenue is already on an invoice with an overlapping period + IF EXISTS ( + SELECT 1 + FROM invoice_revenues ir + JOIN invoices existing ON existing.id = ir.invoice_id + JOIN invoices new_inv ON new_inv.id = NEW.invoice_id + WHERE ir.revenue_id = NEW.revenue_id + AND ir.id != COALESCE(NEW.id, '00000000-0000-0000-0000-000000000000'::uuid) + AND existing.start_date <= new_inv.end_date + AND existing.end_date >= new_inv.start_date + ) THEN + RAISE EXCEPTION 'Revenue % is already on an invoice with an overlapping period', NEW.revenue_id; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- The trigger already exists, so no need to recreate it +-- It will use the updated function automatically diff --git a/migrations/20260110000001_add_task_completion_unique.sql b/migrations/20260110000001_add_task_completion_unique.sql new file mode 100644 index 0000000..3d4509a --- /dev/null +++ b/migrations/20260110000001_add_task_completion_unique.sql @@ -0,0 +1,8 @@ +-- Add unique constraints for task completions per service/project +-- Matches Django's unique_task_per_service constraint + +CREATE UNIQUE INDEX idx_service_task_completion_unique +ON service_task_completions(service_id, task_id); + +CREATE UNIQUE INDEX idx_project_task_completion_unique +ON project_task_completions(project_id, task_id); diff --git a/oathkeeper/Dockerfile b/oathkeeper/Dockerfile new file mode 100644 index 0000000..e68fc73 --- /dev/null +++ b/oathkeeper/Dockerfile @@ -0,0 +1,17 @@ +FROM oryd/oathkeeper:v0.40.9 + +USER root + +# Install envsubst (gettext) and su-exec +RUN apk add --no-cache gettext su-exec + +# Copy config as templates (JWKS mounted at runtime via docker-compose) +COPY config/oathkeeper.yml /etc/oathkeeper/oathkeeper.yml.template +COPY config/rules/ /etc/oathkeeper/rules.template/ + +# Copy entrypoint +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] +CMD ["serve", "--config", "/etc/oathkeeper/oathkeeper.yml"] diff --git a/oathkeeper/config/oathkeeper.yml b/oathkeeper/config/oathkeeper.yml new file mode 100644 index 0000000..ed4f123 --- /dev/null +++ b/oathkeeper/config/oathkeeper.yml @@ -0,0 +1,131 @@ +serve: + proxy: + port: 7200 + trust_forwarded_headers: true + cors: + enabled: true + allowed_origins: + - "https://account.example.com" + - "https://auth.example.com" + - "https://app.example.com" + - "https://admin.example.com" + - "https://api.example.com" + - "http://localhost:5000" + - "http://localhost:5173" + - "http://localhost:7200" + - "https://local.example.com:5173" + allowed_methods: + - GET + - POST + - PUT + - PATCH + - DELETE + allowed_headers: + - Authorization + - Content-Type + - X-Session-Token + - Cookie + - Accept + exposed_headers: + - Content-Type + - Set-Cookie + allow_credentials: true + debug: false + + api: + port: 7250 + +access_rules: + matching_strategy: glob + repositories: + - file:///etc/oathkeeper/rules/nexus.yml + - file:///etc/oathkeeper/rules/kratos.yml + - file:///etc/oathkeeper/rules/django.yml + +authenticators: + cookie_session: + enabled: true + config: + check_session_url: http://127.0.0.1:6000/sessions/whoami + preserve_path: true + extra_from: "@this" + subject_from: "identity.id" + only: + - ory_kratos_session + + bearer_token: + enabled: true + config: + check_session_url: http://127.0.0.1:6000/sessions/whoami + token_from: + header: Authorization + preserve_path: true + extra_from: "@this" + subject_from: "identity.id" + + noop: + enabled: true + + anonymous: + enabled: true + config: + subject: guest + +authorizers: + allow: + enabled: true + + deny: + enabled: true + +mutators: + noop: + enabled: true + + header: + enabled: true + config: + headers: + # Security: Shared secret for Django backend + X-Oathkeeper-Secret: "${OATHKEEPER_SECRET}" + X-User-ID: "{{ print .Subject }}" + X-User-Email: "{{ print .Extra.identity.traits.email }}" + X-User-First-Name: "{{ print .Extra.identity.traits.name.first }}" + X-User-Last-Name: "{{ print .Extra.identity.traits.name.last }}" + X-User-Phone: "{{ print .Extra.identity.traits.phone }}" + X-User-Profile-Type: "{{ print .Extra.identity.traits.profile_type }}" + # Django uses X-Django-Profile-ID, Rust uses X-Profile-ID + X-Profile-ID: "{{ with .Extra.identity.metadata_public }}{{ with .django_profile_id }}{{ . }}{{ end }}{{ end }}" + X-Django-Profile-ID: "{{ with .Extra.identity.metadata_public }}{{ with .django_profile_id }}{{ . }}{{ end }}{{ end }}" + + cookie: + enabled: true + config: + cookies: + user_id: "{{ print .Subject }}" + +errors: + fallback: + - json + handlers: + json: + enabled: true + config: + verbose: true + + redirect: + enabled: true + config: + to: https://account.example.com/login + when: + - error: + - unauthorized + - forbidden + request: + header: + accept: + - text/html + +log: + level: info + format: text diff --git a/oathkeeper/config/rules/kratos.yml b/oathkeeper/config/rules/kratos.yml new file mode 100644 index 0000000..bc69735 --- /dev/null +++ b/oathkeeper/config/rules/kratos.yml @@ -0,0 +1,194 @@ +# ==================================== +# Kratos Public Routes - Production (auth.example.com) +# ==================================== + +# Self-service routes for auth.example.com +- id: "kratos:public:self-service:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "https://auth.example.com/self-service/<**>" + methods: + - GET + - POST + - DELETE + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# WebAuthn JavaScript for auth.example.com +- id: "kratos:public:webauthn:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "https://auth.example.com/.well-known/ory/webauthn.js" + methods: + - GET + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# Session whoami endpoint for auth.example.com +- id: "kratos:public:whoami:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "https://auth.example.com/sessions/whoami" + methods: + - GET + - POST + - DELETE + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# ==================================== +# Kratos Public Routes - Local Development (port 7200) +# ==================================== + +# Self-service routes for localhost +- id: "kratos:public:self-service:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "http://localhost:7200/self-service/<**>" + methods: + - GET + - POST + - DELETE + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# WebAuthn JavaScript for localhost +- id: "kratos:public:webauthn:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "http://localhost:7200/.well-known/ory/webauthn.js" + methods: + - GET + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# Session whoami endpoint for localhost +- id: "kratos:public:whoami:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6000" + preserve_host: false + match: + url: "http://localhost:7200/sessions/whoami" + methods: + - GET + - POST + - DELETE + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# ==================================== +# Kratos Admin Routes - Production (auth.example.com) +# ==================================== + +# Admin identities endpoint +- id: "kratos:admin:identities:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6050" + preserve_host: false + match: + url: "https://auth.example.com/admin/identities<**>" + methods: + - GET + - POST + - PUT + - PATCH + - DELETE + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: noop + +# Admin sessions endpoint +- id: "kratos:admin:sessions:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6050" + preserve_host: false + match: + url: "https://auth.example.com/admin/sessions<**>" + methods: + - GET + - DELETE + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: noop + +# Admin courier messages endpoint +- id: "kratos:admin:courier:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6050" + preserve_host: false + match: + url: "https://auth.example.com/admin/courier<**>" + methods: + - GET + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: noop + +# Admin recovery link endpoint +- id: "kratos:admin:recovery:auth" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:6050" + preserve_host: false + match: + url: "https://auth.example.com/admin/recovery<**>" + methods: + - POST + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: noop diff --git a/oathkeeper/config/rules/nexus.yml b/oathkeeper/config/rules/nexus.yml new file mode 100644 index 0000000..3fa196f --- /dev/null +++ b/oathkeeper/config/rules/nexus.yml @@ -0,0 +1,171 @@ +# ==================================== +# Nexus API Routes (Rust Backend) +# ==================================== + +# Static files - public, no auth (logo for emails, etc.) +- id: "nexus:static:public" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/static/<**>" + methods: + - GET + - HEAD + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# Health endpoint - public, no auth +- id: "nexus:health:public" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/health" + methods: + - GET + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# GraphQL endpoint - CORS preflight (must come before authenticated rule) +- id: "nexus:graphql:preflight" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/graphql" + methods: + - OPTIONS + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# GraphQL endpoint - authenticated +- id: "nexus:graphql:authenticated" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/graphql" + methods: + - GET + - POST + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: header + +# GraphQL Playground - authenticated (same as graphql) +- id: "nexus:graphql:playground" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/graphql/playground" + methods: + - GET + authenticators: + - handler: cookie_session + - handler: anonymous + authorizer: + handler: allow + mutators: + - handler: noop + +# Media endpoint - authenticated (session photos/videos) +- id: "nexus:media:authenticated" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "https://api.example.com/api/media/<**>" + methods: + - GET + - HEAD + - OPTIONS + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: header + errors: + - handler: json + +# ==================================== +# Local Development Routes (port 7200) +# ==================================== + +# Health endpoint - localhost +- id: "nexus:health:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "http://localhost:7200/health" + methods: + - GET + authenticators: + - handler: noop + authorizer: + handler: allow + mutators: + - handler: noop + +# GraphQL endpoint - localhost +- id: "nexus:graphql:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "http://localhost:7200/graphql" + methods: + - GET + - POST + - OPTIONS + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: header + +# Media endpoint - localhost +- id: "nexus:media:localhost" + version: "v0.40.0" + upstream: + url: "http://127.0.0.1:5050" + preserve_host: false + match: + url: "http://localhost:7200/api/media/<**>" + methods: + - GET + - HEAD + - OPTIONS + authenticators: + - handler: cookie_session + authorizer: + handler: allow + mutators: + - handler: header diff --git a/oathkeeper/entrypoint.sh b/oathkeeper/entrypoint.sh new file mode 100644 index 0000000..564d1ba --- /dev/null +++ b/oathkeeper/entrypoint.sh @@ -0,0 +1,28 @@ +#!/bin/sh +set -e + +# Source vault secrets if available +if [ -f /vault/secrets/.env ]; then + echo "Loading secrets from Vault..." + export $(grep -v '^#' /vault/secrets/.env | xargs) +fi + +echo "Processing Oathkeeper configuration templates..." + +# Substitute environment variables in oathkeeper.yml +envsubst < /etc/oathkeeper/oathkeeper.yml.template > /etc/oathkeeper/oathkeeper.yml +echo "✓ Processed oathkeeper.yml" + +# Create rules directory and process templates +mkdir -p /etc/oathkeeper/rules +for template in /etc/oathkeeper/rules.template/*.yml; do + filename=$(basename "$template") + envsubst < "$template" > "/etc/oathkeeper/rules/$filename" + echo "✓ Processed rules/$filename" +done + +# Set proper ownership +chown -R ory:ory /etc/oathkeeper + +echo "Starting Oathkeeper as ory user..." +exec su-exec ory oathkeeper "$@" diff --git a/oathkeeper/scripts/generate-jwks.sh b/oathkeeper/scripts/generate-jwks.sh new file mode 100755 index 0000000..af88476 --- /dev/null +++ b/oathkeeper/scripts/generate-jwks.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONFIG_DIR="$SCRIPT_DIR/../config" +JWKS_FILE="$CONFIG_DIR/id_token.jwks.json" + +# Check if JWKS file already has keys +if [ -f "$JWKS_FILE" ]; then + KEY_COUNT=$(cat "$JWKS_FILE" | jq '.keys | length' 2>/dev/null || echo "0") + if [ "$KEY_COUNT" -gt 0 ]; then + echo "JWKS keys already exist at $JWKS_FILE" + echo "If you want to regenerate, delete the file first." + exit 0 + fi +fi + +echo "Generating JWKS keys..." +docker run --rm oryd/oathkeeper:v0.40.9 credentials generate --alg RS256 > "$JWKS_FILE" + +if [ $? -eq 0 ]; then + echo "✓ JWKS keys successfully generated at $JWKS_FILE" +else + echo "✗ Failed to generate JWKS keys" + exit 1 +fi diff --git a/pgbouncer/Dockerfile b/pgbouncer/Dockerfile new file mode 100644 index 0000000..b42db7a --- /dev/null +++ b/pgbouncer/Dockerfile @@ -0,0 +1,39 @@ +# PgBouncer with Vault Agent sidecar +# Vault Agent renders credentials, PgBouncer proxies connections + +FROM alpine:3.20 + +# Install PgBouncer and dependencies +RUN apk add --no-cache \ + pgbouncer \ + curl \ + bash \ + postgresql-client \ + unzip \ + su-exec + +# Create pgbouncer user and directories +# Remove default config - vault-agent will render our config +RUN adduser -D -H pgbouncer \ + && mkdir -p /var/run/pgbouncer /etc/pgbouncer /vault/templates /vault/secrets /vault/config /var/log/pgbouncer \ + && rm -f /etc/pgbouncer/pgbouncer.ini \ + && chown -R pgbouncer:pgbouncer /var/run/pgbouncer /etc/pgbouncer /var/log/pgbouncer + +# Install Vault +ARG VAULT_VERSION=1.18.3 +RUN curl -fsSL https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip -o /tmp/vault.zip \ + && unzip /tmp/vault.zip -d /usr/local/bin \ + && rm /tmp/vault.zip \ + && chmod +x /usr/local/bin/vault + +# Copy static userlist +COPY userlist.txt /etc/pgbouncer/userlist.txt +RUN chown pgbouncer:pgbouncer /etc/pgbouncer/userlist.txt + +# Copy entrypoint +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +EXPOSE 6432 + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/pgbouncer/entrypoint.sh b/pgbouncer/entrypoint.sh new file mode 100644 index 0000000..bc202fa --- /dev/null +++ b/pgbouncer/entrypoint.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -e + +echo "Starting Vault Agent to render PgBouncer config..." + +# Start Vault Agent in background (runs as root for Vault capabilities) +vault agent -config=/vault/config/agent-config.hcl & +VAULT_PID=$! + +# Wait for config to be rendered +echo "Waiting for PgBouncer config to be rendered..." +while [ ! -f /etc/pgbouncer/pgbouncer.ini ]; do + sleep 1 +done +echo "PgBouncer config rendered." + +# Fix ownership of rendered config +chown pgbouncer:pgbouncer /etc/pgbouncer/pgbouncer.ini +chown -R pgbouncer:pgbouncer /var/run/pgbouncer + +echo "Starting PgBouncer as pgbouncer user..." +# Run pgbouncer as non-root user +# Vault Agent (running as root) will send SIGHUP when credentials rotate +exec su-exec pgbouncer pgbouncer /etc/pgbouncer/pgbouncer.ini diff --git a/pgbouncer/userlist.txt b/pgbouncer/userlist.txt new file mode 100644 index 0000000..960adac --- /dev/null +++ b/pgbouncer/userlist.txt @@ -0,0 +1,4 @@ +; PgBouncer userlist +; With auth_type=trust, passwords are not validated but users must be listed +"pgbouncer" "" +"kratos" "" diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000..b677e24 --- /dev/null +++ b/setup.sh @@ -0,0 +1,247 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SECRETS_DIR="${SCRIPT_DIR}/secrets" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +log_info() { echo -e "${GREEN}[INFO]${NC} $1"; } +log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; } +log_error() { echo -e "${RED}[ERROR]${NC} $1"; } + +usage() { + echo "Usage: $0 " + echo "" + echo "Commands:" + echo " init Initialize secrets directory with AppRole credentials" + echo " start Start the application (migrations run automatically)" + echo " stop Stop the application" + echo " restart Safe restart (down + up, preserves startup order)" + echo " logs View application logs" + echo " rebuild Rebuild and restart (migrations run automatically)" + echo " rebuild --no-cache Force full rebuild without Docker cache" + echo "" + echo "WARNING: Do not use 'docker compose restart ' directly!" + echo " This breaks service dependencies. Always use './setup.sh restart'" + echo "" + echo "Environment variables (for init):" + echo " VAULT_APP_ROLE_ID AppRole ID for nexus app runtime" + echo " VAULT_APP_SECRET_ID AppRole Secret ID for nexus app runtime" + echo " VAULT_MIGRATE_ROLE_ID AppRole ID for nexus migrations" + echo " VAULT_MIGRATE_SECRET_ID AppRole Secret ID for nexus migrations" + echo " VAULT_KRATOS_APP_ROLE_ID AppRole ID for kratos runtime" + echo " VAULT_KRATOS_APP_SECRET_ID AppRole Secret ID for kratos runtime" + echo " VAULT_KRATOS_MIGRATE_ROLE_ID AppRole ID for kratos migrations" + echo " VAULT_KRATOS_MIGRATE_SECRET_ID AppRole Secret ID for kratos migrations" + echo " VAULT_OATHKEEPER_ROLE_ID AppRole ID for oathkeeper runtime" + echo " VAULT_OATHKEEPER_SECRET_ID AppRole Secret ID for oathkeeper runtime" +} + +init_secrets() { + log_info "Initializing secrets directory..." + + # Create directories for all services + mkdir -p "${SECRETS_DIR}/app" "${SECRETS_DIR}/migrate" + mkdir -p "${SECRETS_DIR}/kratos-app" "${SECRETS_DIR}/kratos-migrate" + mkdir -p "${SECRETS_DIR}/oathkeeper" + mkdir -p "${SCRIPT_DIR}/run/app" "${SCRIPT_DIR}/run/migrate" + mkdir -p "${SCRIPT_DIR}/run/kratos" "${SCRIPT_DIR}/run/kratos-migrate" + mkdir -p "${SCRIPT_DIR}/run/oathkeeper" + + # Check for required nexus environment variables + if [ -z "$VAULT_APP_ROLE_ID" ] || [ -z "$VAULT_APP_SECRET_ID" ]; then + log_error "VAULT_APP_ROLE_ID and VAULT_APP_SECRET_ID must be set" + exit 1 + fi + + if [ -z "$VAULT_MIGRATE_ROLE_ID" ] || [ -z "$VAULT_MIGRATE_SECRET_ID" ]; then + log_error "VAULT_MIGRATE_ROLE_ID and VAULT_MIGRATE_SECRET_ID must be set" + exit 1 + fi + + # Check for required kratos environment variables + if [ -z "$VAULT_KRATOS_APP_ROLE_ID" ] || [ -z "$VAULT_KRATOS_APP_SECRET_ID" ]; then + log_error "VAULT_KRATOS_APP_ROLE_ID and VAULT_KRATOS_APP_SECRET_ID must be set" + exit 1 + fi + + if [ -z "$VAULT_KRATOS_MIGRATE_ROLE_ID" ] || [ -z "$VAULT_KRATOS_MIGRATE_SECRET_ID" ]; then + log_error "VAULT_KRATOS_MIGRATE_ROLE_ID and VAULT_KRATOS_MIGRATE_SECRET_ID must be set" + exit 1 + fi + + # Check for required oathkeeper environment variables + if [ -z "$VAULT_OATHKEEPER_ROLE_ID" ] || [ -z "$VAULT_OATHKEEPER_SECRET_ID" ]; then + log_error "VAULT_OATHKEEPER_ROLE_ID and VAULT_OATHKEEPER_SECRET_ID must be set" + exit 1 + fi + + # Write nexus app credentials (644 for container read access) + echo -n "$VAULT_APP_ROLE_ID" > "${SECRETS_DIR}/app/role-id" + echo -n "$VAULT_APP_SECRET_ID" > "${SECRETS_DIR}/app/secret-id" + chmod 644 "${SECRETS_DIR}/app/role-id" "${SECRETS_DIR}/app/secret-id" + log_info "Nexus app credentials written to ${SECRETS_DIR}/app/" + + # Write nexus migrate credentials (644 for container read access) + echo -n "$VAULT_MIGRATE_ROLE_ID" > "${SECRETS_DIR}/migrate/role-id" + echo -n "$VAULT_MIGRATE_SECRET_ID" > "${SECRETS_DIR}/migrate/secret-id" + chmod 644 "${SECRETS_DIR}/migrate/role-id" "${SECRETS_DIR}/migrate/secret-id" + log_info "Nexus migrate credentials written to ${SECRETS_DIR}/migrate/" + + # Write kratos app credentials (644 for container read access) + echo -n "$VAULT_KRATOS_APP_ROLE_ID" > "${SECRETS_DIR}/kratos-app/role-id" + echo -n "$VAULT_KRATOS_APP_SECRET_ID" > "${SECRETS_DIR}/kratos-app/secret-id" + chmod 644 "${SECRETS_DIR}/kratos-app/role-id" "${SECRETS_DIR}/kratos-app/secret-id" + log_info "Kratos app credentials written to ${SECRETS_DIR}/kratos-app/" + + # Write kratos migrate credentials (644 for container read access) + echo -n "$VAULT_KRATOS_MIGRATE_ROLE_ID" > "${SECRETS_DIR}/kratos-migrate/role-id" + echo -n "$VAULT_KRATOS_MIGRATE_SECRET_ID" > "${SECRETS_DIR}/kratos-migrate/secret-id" + chmod 644 "${SECRETS_DIR}/kratos-migrate/role-id" "${SECRETS_DIR}/kratos-migrate/secret-id" + log_info "Kratos migrate credentials written to ${SECRETS_DIR}/kratos-migrate/" + + # Write oathkeeper credentials (644 for container read access) + echo -n "$VAULT_OATHKEEPER_ROLE_ID" > "${SECRETS_DIR}/oathkeeper/role-id" + echo -n "$VAULT_OATHKEEPER_SECRET_ID" > "${SECRETS_DIR}/oathkeeper/secret-id" + chmod 644 "${SECRETS_DIR}/oathkeeper/role-id" "${SECRETS_DIR}/oathkeeper/secret-id" + log_info "Oathkeeper credentials written to ${SECRETS_DIR}/oathkeeper/" + + log_info "All secrets initialized successfully!" +} + +start_app() { + log_info "Starting application..." + + if [ ! -f "${SECRETS_DIR}/app/role-id" ]; then + log_error "Nexus secrets not initialized. Run '$0 init' first." + exit 1 + fi + + if [ ! -f "${SECRETS_DIR}/kratos-app/role-id" ]; then + log_error "Kratos secrets not initialized. Run '$0 init' first." + exit 1 + fi + + if [ ! -f "${SECRETS_DIR}/oathkeeper/role-id" ]; then + log_error "Oathkeeper secrets not initialized. Run '$0 init' first." + exit 1 + fi + + # Start all services (migrations run automatically before app) + docker compose up -d + log_info "Application started!" + log_info "Health checks:" + log_info " Nexus: curl http://localhost:5050/health/ready" + log_info " Kratos: curl http://localhost:6050/health/alive" + log_info " Oathkeeper: curl http://localhost:7250/health/alive" + log_info " Frontend: curl http://localhost:5000/" +} + +stop_app() { + log_info "Stopping application..." + docker compose down + log_info "Application stopped!" +} + +restart_app() { + log_info "Restarting application (safe restart)..." + + if [ ! -f "${SECRETS_DIR}/app/role-id" ]; then + log_error "Nexus secrets not initialized. Run '$0 init' first." + exit 1 + fi + + # Safe restart: down then up preserves dependency order + docker compose down + docker compose up -d + + log_info "Application restarted!" + log_info "Health checks:" + log_info " Nexus: curl http://localhost:5050/health/ready" + log_info " Kratos: curl http://localhost:6050/health/alive" + log_info " Oathkeeper: curl http://localhost:7250/health/alive" + log_info " Frontend: curl http://localhost:5000/" +} + +rebuild_app() { + local no_cache="${1:-}" + + if [ "$no_cache" = "--no-cache" ]; then + log_info "Rebuilding without cache and restarting application..." + else + log_info "Rebuilding and restarting application..." + fi + + if [ ! -f "${SECRETS_DIR}/app/role-id" ]; then + log_error "Nexus secrets not initialized. Run '$0 init' first." + exit 1 + fi + + if [ ! -f "${SECRETS_DIR}/kratos-app/role-id" ]; then + log_error "Kratos secrets not initialized. Run '$0 init' first." + exit 1 + fi + + if [ ! -f "${SECRETS_DIR}/oathkeeper/role-id" ]; then + log_error "Oathkeeper secrets not initialized. Run '$0 init' first." + exit 1 + fi + + # Pull latest code + git pull + + # Rebuild and restart (migrations run automatically) + if [ "$no_cache" = "--no-cache" ]; then + docker compose build --no-cache + docker compose up -d + else + docker compose up -d --build + fi + + log_info "Application rebuilt and started!" + log_info "Health checks:" + log_info " Nexus: curl http://localhost:5050/health/ready" + log_info " Kratos: curl http://localhost:6050/health/alive" + log_info " Oathkeeper: curl http://localhost:7250/health/alive" + log_info " Frontend: curl http://localhost:5000/" +} + +view_logs() { + local service="${1:-}" + if [ -z "$service" ]; then + docker compose logs -f nexus kratos oathkeeper frontend + else + docker compose logs -f "$service" + fi +} + +# Main +case "${1:-}" in + init) + init_secrets + ;; + start) + start_app + ;; + stop) + stop_app + ;; + restart) + restart_app + ;; + rebuild) + rebuild_app "${2:-}" + ;; + logs) + view_logs + ;; + *) + usage + exit 1 + ;; +esac diff --git a/src/auth/mod.rs b/src/auth/mod.rs new file mode 100644 index 0000000..16f0c85 --- /dev/null +++ b/src/auth/mod.rs @@ -0,0 +1,167 @@ +use axum::{ + extract::Request, + http::{header::HeaderMap, StatusCode}, + middleware::Next, + response::Response, +}; +use uuid::Uuid; + +/// Profile type extracted from Ory headers +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ProfileType { + Team, + Customer, +} + +impl ProfileType { + fn from_str(s: &str) -> Option { + match s.to_lowercase().as_str() { + "team" => Some(Self::Team), + "customer" => Some(Self::Customer), + _ => None, + } + } +} + +/// User context extracted from Ory Oathkeeper headers +#[derive(Debug, Clone)] +pub struct UserContext { + /// Kratos identity UUID - also serves as profile primary key + pub user_id: Uuid, + pub profile_type: ProfileType, + pub email: Option, + pub first_name: Option, + pub last_name: Option, + pub phone: Option, +} + +impl UserContext { + /// Extract user context from Ory Oathkeeper headers + pub fn from_headers(headers: &HeaderMap) -> Option { + let user_id = headers + .get("X-User-ID") + .and_then(|v| v.to_str().ok()) + .and_then(|s| Uuid::parse_str(s).ok())?; + + let profile_type = headers + .get("X-User-Profile-Type") + .and_then(|v| v.to_str().ok()) + .and_then(ProfileType::from_str)?; + + let email = headers + .get("X-User-Email") + .and_then(|v| v.to_str().ok()) + .map(String::from); + + let first_name = headers + .get("X-User-First-Name") + .and_then(|v| v.to_str().ok()) + .map(String::from); + + let last_name = headers + .get("X-User-Last-Name") + .and_then(|v| v.to_str().ok()) + .map(String::from); + + let phone = headers + .get("X-User-Phone") + .and_then(|v| v.to_str().ok()) + .map(String::from); + + Some(Self { + user_id, + profile_type, + email, + first_name, + last_name, + phone, + }) + } +} + +/// Middleware to validate Ory Oathkeeper secret and extract user context +pub async fn ory_auth_middleware( + mut request: Request, + next: Next, +) -> Result { + let headers = request.headers(); + + // Get expected secret from environment + let expected_secret = std::env::var("OATHKEEPER_SECRET").unwrap_or_default(); + + // Validate Oathkeeper secret + let provided_secret = headers + .get("X-Oathkeeper-Secret") + .and_then(|v| v.to_str().ok()) + .unwrap_or_default(); + + // Debug logging for auth troubleshooting + let has_expected = !expected_secret.is_empty(); + let has_provided = !provided_secret.is_empty(); + let secrets_match = provided_secret == expected_secret; + + tracing::debug!( + has_expected_secret = has_expected, + has_provided_secret = has_provided, + secrets_match = secrets_match, + user_id = ?headers.get("X-User-ID").and_then(|v| v.to_str().ok()), + "Auth middleware validation" + ); + + if !expected_secret.is_empty() && provided_secret != expected_secret { + tracing::warn!( + expected_len = expected_secret.len(), + provided_len = provided_secret.len(), + "Invalid or missing Oathkeeper secret" + ); + return Err(StatusCode::UNAUTHORIZED); + } + + // Extract user context if present + if let Some(user_ctx) = UserContext::from_headers(headers) { + request.extensions_mut().insert(user_ctx); + } + + Ok(next.run(request).await) +} + +/// Extractor for optional user context +#[derive(Debug, Clone)] +pub struct OptionalUser(pub Option); + +impl axum::extract::FromRequestParts for OptionalUser +where + S: Send + Sync, +{ + type Rejection = std::convert::Infallible; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + _state: &S, + ) -> Result { + Ok(Self(parts.extensions.get::().cloned())) + } +} + +/// Extractor for required user context +#[derive(Debug, Clone)] +pub struct RequiredUser(pub UserContext); + +impl axum::extract::FromRequestParts for RequiredUser +where + S: Send + Sync, +{ + type Rejection = StatusCode; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + _state: &S, + ) -> Result { + parts + .extensions + .get::() + .cloned() + .map(Self) + .ok_or(StatusCode::UNAUTHORIZED) + } +} diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..b82c690 --- /dev/null +++ b/src/config.rs @@ -0,0 +1,67 @@ +use std::env; + +/// Application configuration loaded from environment variables +#[derive(Debug, Clone)] +pub struct Config { + /// Server host + pub host: String, + /// Server port + pub port: u16, + /// Database URL + pub database_url: String, + /// Valkey/Redis URL + pub valkey_url: String, + /// Oathkeeper shared secret + pub oathkeeper_secret: String, + /// S3 endpoint + pub s3_endpoint: String, + /// S3 bucket name + pub s3_bucket: String, + /// S3 access key + pub s3_access_key: String, + /// S3 secret key + pub s3_secret_key: String, + /// Log level + pub log_level: String, + /// Google service account key (JSON, base64, or file path) + pub google_service_account_key: Option, + /// Google Calendar ID + pub google_calendar_id: Option, + /// Gmail impersonation email (for domain-wide delegation) + pub google_gmail_user: Option, + /// Wave API access token + pub wave_access_token: Option, + /// Wave business ID + pub wave_business_id: Option, +} + +impl Config { + /// Load configuration from environment variables + pub fn from_env() -> Result { + Ok(Self { + host: env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string()), + port: env::var("PORT") + .unwrap_or_else(|_| "3000".to_string()) + .parse() + .unwrap_or(3000), + database_url: env::var("DATABASE_URL")?, + valkey_url: env::var("VALKEY_URL").unwrap_or_else(|_| "redis://localhost:6379".to_string()), + oathkeeper_secret: env::var("OATHKEEPER_SECRET").unwrap_or_default(), + s3_endpoint: env::var("S3_ENDPOINT").unwrap_or_default(), + s3_bucket: env::var("S3_BUCKET").unwrap_or_else(|_| "nexus".to_string()), + s3_access_key: env::var("S3_ACCESS_KEY").unwrap_or_default(), + s3_secret_key: env::var("S3_SECRET_KEY").unwrap_or_default(), + log_level: env::var("RUST_LOG").unwrap_or_else(|_| "info".to_string()), + google_service_account_key: env::var("GOOGLE_SERVICE_ACCOUNT_KEY").ok(), + google_calendar_id: env::var("GOOGLE_CALENDAR_ID").ok(), + google_gmail_user: env::var("GOOGLE_GMAIL_USER").ok(), + wave_access_token: env::var("WAVE_ACCESS_TOKEN").ok(), + wave_business_id: env::var("WAVE_BUSINESS_ID").ok(), + }) + } + + /// Get the server bind address + pub fn bind_addr(&self) -> String { + format!("{}:{}", self.host, self.port) + } +} diff --git a/src/db.rs b/src/db.rs new file mode 100644 index 0000000..2b2539c --- /dev/null +++ b/src/db.rs @@ -0,0 +1,88 @@ +use std::path::Path; +use std::sync::Arc; + +use sqlx::postgres::{PgPool, PgPoolOptions}; +use tokio::sync::RwLock; + +const VAULT_SECRETS_PATH: &str = "/vault/secrets/.env"; +const MAX_CONNECTIONS: u32 = 10; + +/// Database connection pool that supports credential refresh +#[derive(Clone)] +pub struct Database { + pool: Arc>, +} + +impl Database { + /// Create a new database connection using DATABASE_URL from environment + pub async fn connect() -> Result { + let database_url = std::env::var("DATABASE_URL") + .expect("DATABASE_URL must be set"); + + let pool = PgPoolOptions::new() + .max_connections(MAX_CONNECTIONS) + .connect(&database_url) + .await?; + + tracing::info!("Database connection established"); + + Ok(Self { + pool: Arc::new(RwLock::new(pool)), + }) + } + + /// Get a reference to the current connection pool + pub async fn pool(&self) -> tokio::sync::RwLockReadGuard<'_, PgPool> { + self.pool.read().await + } + + /// Refresh the database connection with new credentials + /// Called when SIGHUP is received (Vault Agent rotated credentials) + pub async fn refresh(&self) -> Result<(), Box> { + tracing::info!("Refreshing database credentials..."); + + // Read the new DATABASE_URL from the Vault-rendered env file + let new_url = Self::read_database_url_from_file()?; + + // Create a new connection pool with the new credentials + let new_pool = PgPoolOptions::new() + .max_connections(MAX_CONNECTIONS) + .connect(&new_url) + .await?; + + // Test the new connection + sqlx::query("SELECT 1") + .execute(&new_pool) + .await?; + + // Swap the pools + let mut pool = self.pool.write().await; + let old_pool = std::mem::replace(&mut *pool, new_pool); + + // Close old connections gracefully + old_pool.close().await; + + tracing::info!("Database credentials refreshed successfully"); + Ok(()) + } + + /// Read DATABASE_URL from the Vault-rendered .env file + fn read_database_url_from_file() -> Result> { + let path = Path::new(VAULT_SECRETS_PATH); + + if !path.exists() { + return Err("Vault secrets file not found".into()); + } + + let content = std::fs::read_to_string(path)?; + + for line in content.lines() { + let line = line.trim(); + if line.starts_with("DATABASE_URL=") { + return Ok(line.trim_start_matches("DATABASE_URL=").to_string()); + } + } + + Err("DATABASE_URL not found in secrets file".into()) + } +} diff --git a/src/graphql/mod.rs b/src/graphql/mod.rs new file mode 100644 index 0000000..b49a6cc --- /dev/null +++ b/src/graphql/mod.rs @@ -0,0 +1,6 @@ +mod mutations; +mod queries; +mod schema; +pub mod types; + +pub use schema::*; diff --git a/src/graphql/mutations/account.rs b/src/graphql/mutations/account.rs new file mode 100644 index 0000000..b693445 --- /dev/null +++ b/src/graphql/mutations/account.rs @@ -0,0 +1,634 @@ +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::auth::UserContext; +use crate::db::Database; +use crate::graphql::types::{AccountAddressType, AccountContactType, AccountType, EntityStatusType, RevenueType}; +use crate::models::{Account, AccountAddress, AccountContact, EntityStatus, EventType, Revenue}; +use crate::services::{EventPublisher, JobQueue}; + +/// Input for creating an account +#[derive(InputObject)] +pub struct CreateAccountInput { + pub customer_id: Uuid, + pub name: String, + pub status: Option, + pub start_date: Option, + pub end_date: Option, +} + +/// Input for updating an account +#[derive(InputObject)] +pub struct UpdateAccountInput { + pub name: Option, + pub status: Option, + pub start_date: Option, + pub end_date: Option, +} + +// ==================== CONTACT INPUT TYPES ==================== + +/// Input for creating an account contact +#[derive(InputObject)] +pub struct CreateAccountContactInput { + pub first_name: String, + pub last_name: String, + pub email: Option, + pub phone: Option, + pub notes: Option, + pub is_primary: Option, +} + +/// Input for updating an account contact +#[derive(InputObject)] +pub struct UpdateAccountContactInput { + pub first_name: Option, + pub last_name: Option, + pub email: Option, + pub phone: Option, + pub notes: Option, + pub is_primary: Option, + pub is_active: Option, +} + +// ==================== ADDRESS INPUT TYPES ==================== + +/// Input for creating an account address +#[derive(InputObject)] +pub struct CreateAccountAddressInput { + pub name: Option, + pub street_address: String, + pub city: String, + pub state: String, + pub zip_code: String, + pub notes: Option, + pub is_primary: Option, +} + +// ==================== REVENUE INPUT TYPES ==================== + +/// Input for creating a revenue +#[derive(InputObject)] +pub struct CreateRevenueInput { + pub amount: Decimal, + pub start_date: NaiveDate, + pub wave_service_id: Option, +} + +/// Input for updating a revenue +#[derive(InputObject)] +pub struct UpdateRevenueInput { + pub amount: Option, + pub start_date: Option, + pub end_date: Option, + pub wave_service_id: Option, +} + +#[derive(Default)] +pub struct AccountMutation; + +#[Object] +impl AccountMutation { + /// Create a new account + async fn create_account( + &self, + ctx: &Context<'_>, + input: CreateAccountInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let status = input.status.map(EntityStatus::from).unwrap_or(EntityStatus::Active); + + let account: Account = sqlx::query_as::<_, Account>( + r#" + INSERT INTO accounts ( + id, created_at, updated_at, customer_id, name, status, start_date, end_date + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5 + ) + RETURNING id, created_at, updated_at, customer_id, name, status, start_date, end_date + "#, + ) + .bind(input.customer_id) + .bind(&input.name) + .bind(status) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + // Publish AccountCreated event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::AccountCreated, + "account", + account.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(AccountType::from(account)) + } + + /// Update an account + async fn update_account( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateAccountInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM accounts WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(EntityStatus::from); + + let account: Account = sqlx::query_as::<_, Account>( + r#" + UPDATE accounts + SET + name = COALESCE($2, name), + status = COALESCE($3, status), + start_date = COALESCE($4, start_date), + end_date = COALESCE($5, end_date), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, customer_id, name, status, start_date, end_date + "#, + ) + .bind(id) + .bind(&input.name) + .bind(new_status) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + // Publish event for status change + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::AccountStatusChanged, + "account", + account.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + } + + Ok(AccountType::from(account)) + } + + /// Delete an account (cascades to addresses, contacts, revenues, services) + async fn delete_account(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete services first (they reference account_id) + sqlx::query("DELETE FROM services WHERE account_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete revenues + sqlx::query("DELETE FROM revenues WHERE account_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete contacts + sqlx::query("DELETE FROM account_contacts WHERE account_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete addresses (need to delete child records first) + // Get all address IDs for this account + let address_ids: Vec = sqlx::query_scalar( + "SELECT id FROM account_addresses WHERE account_id = $1", + ) + .bind(id) + .fetch_all(&*pool) + .await?; + + for address_id in address_ids { + // Delete labor rates + sqlx::query("DELETE FROM labor WHERE account_address_id = $1") + .bind(address_id) + .execute(&*pool) + .await?; + + // Delete schedules + sqlx::query("DELETE FROM schedules WHERE account_address_id = $1") + .bind(address_id) + .execute(&*pool) + .await?; + + // Delete service scope tasks and areas first + let scope_ids: Vec = sqlx::query_scalar( + "SELECT id FROM service_scopes WHERE account_address_id = $1", + ) + .bind(address_id) + .fetch_all(&*pool) + .await?; + + for scope_id in scope_ids { + let area_ids: Vec = sqlx::query_scalar( + "SELECT id FROM service_scope_areas WHERE scope_id = $1", + ) + .bind(scope_id) + .fetch_all(&*pool) + .await?; + + for area_id in area_ids { + sqlx::query("DELETE FROM service_scope_tasks WHERE area_id = $1") + .bind(area_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM service_scope_areas WHERE scope_id = $1") + .bind(scope_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM service_scopes WHERE account_address_id = $1") + .bind(address_id) + .execute(&*pool) + .await?; + } + + // Now delete addresses + sqlx::query("DELETE FROM account_addresses WHERE account_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Finally delete the account + let result = sqlx::query("DELETE FROM accounts WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish AccountDeleted event + if result.rows_affected() > 0 { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::AccountDeleted, + "account", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(result.rows_affected() > 0) + } + + // ==================== ACCOUNT CONTACT MUTATIONS ==================== + + /// Create a new contact for an account + async fn create_account_contact( + &self, + ctx: &Context<'_>, + account_id: Uuid, + input: CreateAccountContactInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If this contact is primary, unset other primary contacts + if input.is_primary.unwrap_or(false) { + sqlx::query( + "UPDATE account_contacts SET is_primary = false WHERE account_id = $1 AND is_primary = true", + ) + .bind(account_id) + .execute(&*pool) + .await?; + } + + let contact: AccountContact = sqlx::query_as::<_, AccountContact>( + r#" + INSERT INTO account_contacts ( + id, created_at, updated_at, account_id, first_name, last_name, + phone, email, is_active, is_primary, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, true, $6, $7 + ) + RETURNING id, created_at, updated_at, account_id, first_name, last_name, + phone, email, is_active, is_primary, notes + "#, + ) + .bind(account_id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.phone) + .bind(&input.email) + .bind(input.is_primary.unwrap_or(false)) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + Ok(AccountContactType::from(contact)) + } + + /// Update an account contact + async fn update_account_contact( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateAccountContactInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If setting this contact as primary, unset other primary contacts + if input.is_primary == Some(true) { + let account_id: Uuid = + sqlx::query_scalar("SELECT account_id FROM account_contacts WHERE id = $1") + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + "UPDATE account_contacts SET is_primary = false WHERE account_id = $1 AND id != $2 AND is_primary = true", + ) + .bind(account_id) + .bind(id) + .execute(&*pool) + .await?; + } + + let contact: AccountContact = sqlx::query_as::<_, AccountContact>( + r#" + UPDATE account_contacts + SET + first_name = COALESCE($2, first_name), + last_name = COALESCE($3, last_name), + email = COALESCE($4, email), + phone = COALESCE($5, phone), + notes = COALESCE($6, notes), + is_primary = COALESCE($7, is_primary), + is_active = COALESCE($8, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_id, first_name, last_name, + phone, email, is_active, is_primary, notes + "#, + ) + .bind(id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.email) + .bind(&input.phone) + .bind(&input.notes) + .bind(input.is_primary) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(AccountContactType::from(contact)) + } + + /// Delete an account contact + async fn delete_account_contact(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM account_contacts WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== ACCOUNT ADDRESS MUTATIONS ==================== + + /// Create a new address for an account + async fn create_account_address( + &self, + ctx: &Context<'_>, + account_id: Uuid, + input: CreateAccountAddressInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If this address is primary, unset other primary addresses + if input.is_primary.unwrap_or(false) { + sqlx::query( + "UPDATE account_addresses SET is_primary = false WHERE account_id = $1 AND is_primary = true", + ) + .bind(account_id) + .execute(&*pool) + .await?; + } + + let address: AccountAddress = sqlx::query_as::<_, AccountAddress>( + r#" + INSERT INTO account_addresses ( + id, created_at, updated_at, account_id, street_address, city, state, + zip_code, is_active, is_primary, name, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, true, $6, $7, $8 + ) + RETURNING id, created_at, updated_at, account_id, street_address, city, state, + zip_code, is_active, is_primary, name, notes + "#, + ) + .bind(account_id) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .bind(input.is_primary.unwrap_or(false)) + .bind(&input.name) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + Ok(AccountAddressType::from(address)) + } + + /// Delete an account address (cascades to labor, schedules, scopes) + async fn delete_account_address(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete labor rates + sqlx::query("DELETE FROM labor WHERE account_address_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete schedules + sqlx::query("DELETE FROM schedules WHERE account_address_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete service scope tasks and areas first + let scope_ids: Vec = sqlx::query_scalar( + "SELECT id FROM service_scopes WHERE account_address_id = $1", + ) + .bind(id) + .fetch_all(&*pool) + .await?; + + for scope_id in scope_ids { + let area_ids: Vec = sqlx::query_scalar( + "SELECT id FROM service_scope_areas WHERE scope_id = $1", + ) + .bind(scope_id) + .fetch_all(&*pool) + .await?; + + for area_id in area_ids { + sqlx::query("DELETE FROM service_scope_tasks WHERE area_id = $1") + .bind(area_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM service_scope_areas WHERE scope_id = $1") + .bind(scope_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM service_scopes WHERE account_address_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Finally delete the address + let result = sqlx::query("DELETE FROM account_addresses WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== REVENUE MUTATIONS ==================== + + /// Create a new revenue for an account (deactivates existing active revenues) + async fn create_revenue( + &self, + ctx: &Context<'_>, + account_id: Uuid, + input: CreateRevenueInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // End any existing active revenues for this account + let today = chrono::Utc::now().date_naive(); + sqlx::query( + r#" + UPDATE revenues + SET end_date = $2, updated_at = NOW() + WHERE account_id = $1 AND (end_date IS NULL OR end_date > $2) + "#, + ) + .bind(account_id) + .bind(today) + .execute(&*pool) + .await?; + + let revenue: Revenue = sqlx::query_as::<_, Revenue>( + r#" + INSERT INTO revenues (id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, NULL, $4) + RETURNING id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id + "#, + ) + .bind(account_id) + .bind(input.amount) + .bind(input.start_date) + .bind(&input.wave_service_id) + .fetch_one(&*pool) + .await?; + + Ok(RevenueType::from(revenue)) + } + + /// Update a revenue + async fn update_revenue( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateRevenueInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let revenue: Revenue = sqlx::query_as::<_, Revenue>( + r#" + UPDATE revenues + SET + amount = COALESCE($2, amount), + start_date = COALESCE($3, start_date), + end_date = COALESCE($4, end_date), + wave_service_id = COALESCE($5, wave_service_id), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id + "#, + ) + .bind(id) + .bind(input.amount) + .bind(input.start_date) + .bind(input.end_date) + .bind(&input.wave_service_id) + .fetch_one(&*pool) + .await?; + + Ok(RevenueType::from(revenue)) + } + + /// Delete a revenue + async fn delete_revenue(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM revenues WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } +} diff --git a/src/graphql/mutations/calendar.rs b/src/graphql/mutations/calendar.rs new file mode 100644 index 0000000..a457d43 --- /dev/null +++ b/src/graphql/mutations/calendar.rs @@ -0,0 +1,302 @@ +use async_graphql::{Context, Object, Result}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + CalendarEventType, CreateCalendarEventInput, UpdateCalendarEventInput, +}; +use crate::models::{Project, Service}; +use crate::services::google_calendar::{CreateEventRequest, GoogleCalendarService, UpdateEventRequest}; + +#[derive(Default)] +pub struct CalendarMutation; + +#[Object] +impl CalendarMutation { + /// Create a new calendar event + async fn create_calendar_event( + &self, + ctx: &Context<'_>, + input: CreateCalendarEventInput, + ) -> Result { + let calendar = ctx.data::>()?; + + let request = CreateEventRequest::from(input); + let event = calendar.create_event(request).await?; + + Ok(CalendarEventType::from(event)) + } + + /// Update a calendar event + async fn update_calendar_event( + &self, + ctx: &Context<'_>, + event_id: String, + input: UpdateCalendarEventInput, + ) -> Result { + let calendar = ctx.data::>()?; + + let request = UpdateEventRequest::from(input); + let event = calendar.update_event(&event_id, request).await?; + + Ok(CalendarEventType::from(event)) + } + + /// Delete a calendar event + async fn delete_calendar_event( + &self, + ctx: &Context<'_>, + event_id: String, + ) -> Result { + let calendar = ctx.data::>()?; + + calendar.delete_event(&event_id).await?; + + Ok(true) + } + + /// Link a calendar event to a service + async fn link_calendar_event_to_service( + &self, + ctx: &Context<'_>, + service_id: Uuid, + event_id: String, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + sqlx::query( + r#" + UPDATE services + SET calendar_event_id = $2, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(service_id) + .bind(&event_id) + .execute(&*pool) + .await?; + + Ok(true) + } + + /// Unlink a calendar event from a service + async fn unlink_calendar_event_from_service( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + sqlx::query( + r#" + UPDATE services + SET calendar_event_id = NULL, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(service_id) + .execute(&*pool) + .await?; + + Ok(true) + } + + /// Link a calendar event to a project + async fn link_calendar_event_to_project( + &self, + ctx: &Context<'_>, + project_id: Uuid, + event_id: String, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + sqlx::query( + r#" + UPDATE projects + SET calendar_event_id = $2, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(project_id) + .bind(&event_id) + .execute(&*pool) + .await?; + + Ok(true) + } + + /// Unlink a calendar event from a project + async fn unlink_calendar_event_from_project( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + sqlx::query( + r#" + UPDATE projects + SET calendar_event_id = NULL, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(project_id) + .execute(&*pool) + .await?; + + Ok(true) + } + + /// Create a calendar event for a service and link it + async fn create_service_calendar_event( + &self, + ctx: &Context<'_>, + service_id: Uuid, + input: CreateCalendarEventInput, + ) -> Result { + let db = ctx.data::()?; + let calendar = ctx.data::>()?; + let pool = db.pool().await; + + // Create the event + let request = CreateEventRequest::from(input); + let event = calendar.create_event(request).await?; + + // Link to service + sqlx::query( + r#" + UPDATE services + SET calendar_event_id = $2, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(service_id) + .bind(&event.id) + .execute(&*pool) + .await?; + + Ok(CalendarEventType::from(event)) + } + + /// Create a calendar event for a project and link it + async fn create_project_calendar_event( + &self, + ctx: &Context<'_>, + project_id: Uuid, + input: CreateCalendarEventInput, + ) -> Result { + let db = ctx.data::()?; + let calendar = ctx.data::>()?; + let pool = db.pool().await; + + // Create the event + let request = CreateEventRequest::from(input); + let event = calendar.create_event(request).await?; + + // Link to project + sqlx::query( + r#" + UPDATE projects + SET calendar_event_id = $2, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(project_id) + .bind(&event.id) + .execute(&*pool) + .await?; + + Ok(CalendarEventType::from(event)) + } + + /// Delete a service's calendar event and unlink it + async fn delete_service_calendar_event( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let calendar = ctx.data::>()?; + let pool = db.pool().await; + + // Get the current calendar event ID + let service = sqlx::query_as::<_, Service>( + r#" + SELECT id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + FROM services + WHERE id = $1 + "#, + ) + .bind(service_id) + .fetch_one(&*pool) + .await?; + + if let Some(event_id) = service.calendar_event_id { + // Delete from Google Calendar + calendar.delete_event(&event_id).await?; + + // Unlink from service + sqlx::query( + r#" + UPDATE services + SET calendar_event_id = NULL, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(service_id) + .execute(&*pool) + .await?; + } + + Ok(true) + } + + /// Delete a project's calendar event and unlink it + async fn delete_project_calendar_event( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let calendar = ctx.data::>()?; + let pool = db.pool().await; + + // Get the current calendar event ID + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(project_id) + .fetch_one(&*pool) + .await?; + + if let Some(event_id) = project.calendar_event_id { + // Delete from Google Calendar + calendar.delete_event(&event_id).await?; + + // Unlink from project + sqlx::query( + r#" + UPDATE projects + SET calendar_event_id = NULL, updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(project_id) + .execute(&*pool) + .await?; + } + + Ok(true) + } +} diff --git a/src/graphql/mutations/customer.rs b/src/graphql/mutations/customer.rs new file mode 100644 index 0000000..3ce456d --- /dev/null +++ b/src/graphql/mutations/customer.rs @@ -0,0 +1,432 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::auth::UserContext; +use crate::db::Database; +use crate::graphql::types::{ + CreateCustomerAddressInput, CreateCustomerContactInput, CreateCustomerInput, + CustomerAddressType, CustomerContactType, CustomerType, UpdateCustomerAddressInput, + UpdateCustomerContactInput, UpdateCustomerInput, +}; +use crate::models::{Customer, CustomerAddress, CustomerContact, EntityStatus, EventType}; +use crate::services::{EventPublisher, JobQueue}; + +#[derive(Default)] +pub struct CustomerMutation; + +#[Object] +impl CustomerMutation { + // ==================== CUSTOMER MUTATIONS ==================== + + /// Create a new customer + async fn create_customer( + &self, + ctx: &Context<'_>, + input: CreateCustomerInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer: Customer = sqlx::query_as::<_, Customer>( + r#" + INSERT INTO customers ( + id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7 + ) + RETURNING id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + "#, + ) + .bind(&input.name) + .bind(input.status.map(EntityStatus::from).unwrap_or(EntityStatus::Active)) + .bind(input.start_date) + .bind(input.end_date) + .bind(&input.billing_terms) + .bind(&input.billing_email) + .bind(&input.wave_customer_id) + .fetch_one(&*pool) + .await?; + + // Publish CustomerCreated event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::CustomerCreated, + "customer", + customer.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(CustomerType::from(customer)) + } + + /// Update a customer + async fn update_customer( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateCustomerInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM customers WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(EntityStatus::from); + + // Build dynamic update query + let customer: Customer = sqlx::query_as::<_, Customer>( + r#" + UPDATE customers + SET + name = COALESCE($2, name), + status = COALESCE($3, status), + billing_email = COALESCE($4, billing_email), + billing_terms = COALESCE($5, billing_terms), + wave_customer_id = COALESCE($6, wave_customer_id), + start_date = COALESCE($7, start_date), + end_date = COALESCE($8, end_date), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + "#, + ) + .bind(id) + .bind(&input.name) + .bind(new_status) + .bind(&input.billing_email) + .bind(&input.billing_terms) + .bind(&input.wave_customer_id) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + // Publish event for status change + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::CustomerStatusChanged, + "customer", + customer.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + } + + Ok(CustomerType::from(customer)) + } + + /// Delete a customer (cascades to contacts and addresses) + async fn delete_customer(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete contacts first + sqlx::query("DELETE FROM customer_contacts WHERE customer_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete addresses + sqlx::query("DELETE FROM customer_addresses WHERE customer_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Unlink accounts (set customer_id to NULL instead of deleting) + sqlx::query("UPDATE accounts SET customer_id = NULL WHERE customer_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete customer + let result = sqlx::query("DELETE FROM customers WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish CustomerDeleted event + if result.rows_affected() > 0 { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::CustomerDeleted, + "customer", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(result.rows_affected() > 0) + } + + // ==================== CONTACT MUTATIONS ==================== + + /// Create a new contact for a customer + async fn create_customer_contact( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + input: CreateCustomerContactInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If this contact is primary, unset other primary contacts + if input.is_primary.unwrap_or(false) { + sqlx::query( + "UPDATE customer_contacts SET is_primary = false WHERE customer_id = $1 AND is_primary = true", + ) + .bind(customer_id) + .execute(&*pool) + .await?; + } + + let contact: CustomerContact = sqlx::query_as::<_, CustomerContact>( + r#" + INSERT INTO customer_contacts ( + id, created_at, updated_at, customer_id, first_name, last_name, + phone, email, is_active, is_primary, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, true, $6, $7 + ) + RETURNING id, created_at, updated_at, customer_id, first_name, last_name, + phone, email, is_active, is_primary, notes + "#, + ) + .bind(customer_id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.phone) + .bind(&input.email) + .bind(input.is_primary.unwrap_or(false)) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + Ok(CustomerContactType::from(contact)) + } + + /// Update a customer contact + async fn update_customer_contact( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateCustomerContactInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If setting this contact as primary, unset other primary contacts + if input.is_primary == Some(true) { + // First get the customer_id for this contact + let customer_id: Uuid = + sqlx::query_scalar("SELECT customer_id FROM customer_contacts WHERE id = $1") + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + "UPDATE customer_contacts SET is_primary = false WHERE customer_id = $1 AND id != $2 AND is_primary = true", + ) + .bind(customer_id) + .bind(id) + .execute(&*pool) + .await?; + } + + let contact: CustomerContact = sqlx::query_as::<_, CustomerContact>( + r#" + UPDATE customer_contacts + SET + first_name = COALESCE($2, first_name), + last_name = COALESCE($3, last_name), + email = COALESCE($4, email), + phone = COALESCE($5, phone), + notes = COALESCE($6, notes), + is_primary = COALESCE($7, is_primary), + is_active = COALESCE($8, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, customer_id, first_name, last_name, + phone, email, is_active, is_primary, notes + "#, + ) + .bind(id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.email) + .bind(&input.phone) + .bind(&input.notes) + .bind(input.is_primary) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(CustomerContactType::from(contact)) + } + + /// Delete a customer contact + async fn delete_customer_contact(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM customer_contacts WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== ADDRESS MUTATIONS ==================== + + /// Create a new address for a customer + async fn create_customer_address( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + input: CreateCustomerAddressInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If this address is primary, unset other primary addresses + if input.is_primary.unwrap_or(false) { + sqlx::query( + "UPDATE customer_addresses SET is_primary = false WHERE customer_id = $1 AND is_primary = true", + ) + .bind(customer_id) + .execute(&*pool) + .await?; + } + + let address: CustomerAddress = sqlx::query_as::<_, CustomerAddress>( + r#" + INSERT INTO customer_addresses ( + id, created_at, updated_at, customer_id, street_address, city, state, + zip_code, is_active, is_primary, name, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, true, $6, $7, $8 + ) + RETURNING id, created_at, updated_at, customer_id, street_address, city, state, + zip_code, is_active, is_primary, name, notes + "#, + ) + .bind(customer_id) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .bind(input.is_primary.unwrap_or(false)) + .bind(&input.name) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + Ok(CustomerAddressType::from(address)) + } + + /// Update a customer address + async fn update_customer_address( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateCustomerAddressInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If setting this address as primary, unset other primary addresses + if input.is_primary == Some(true) { + let customer_id: Uuid = + sqlx::query_scalar("SELECT customer_id FROM customer_addresses WHERE id = $1") + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + "UPDATE customer_addresses SET is_primary = false WHERE customer_id = $1 AND id != $2 AND is_primary = true", + ) + .bind(customer_id) + .bind(id) + .execute(&*pool) + .await?; + } + + let address: CustomerAddress = sqlx::query_as::<_, CustomerAddress>( + r#" + UPDATE customer_addresses + SET + name = COALESCE($2, name), + street_address = COALESCE($3, street_address), + city = COALESCE($4, city), + state = COALESCE($5, state), + zip_code = COALESCE($6, zip_code), + notes = COALESCE($7, notes), + is_primary = COALESCE($8, is_primary), + is_active = COALESCE($9, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, customer_id, street_address, city, state, + zip_code, is_active, is_primary, name, notes + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .bind(&input.notes) + .bind(input.is_primary) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(CustomerAddressType::from(address)) + } + + /// Delete a customer address + async fn delete_customer_address(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM customer_addresses WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } +} diff --git a/src/graphql/mutations/email.rs b/src/graphql/mutations/email.rs new file mode 100644 index 0000000..3267a77 --- /dev/null +++ b/src/graphql/mutations/email.rs @@ -0,0 +1,147 @@ +use async_graphql::{Context, Object, Result}; +use std::collections::HashMap; +use std::sync::Arc; + +use crate::graphql::types::{ + EmailTemplateType, SendEmailInput, SendEmailResult, SendTemplateEmailInput, +}; +use crate::services::email_templates::EmailTemplateRegistry; +use crate::services::gmail::{GmailService, SendEmailRequest}; + +#[derive(Default)] +pub struct EmailMutation; + +#[Object] +impl EmailMutation { + /// Send an email + async fn send_email( + &self, + ctx: &Context<'_>, + input: SendEmailInput, + ) -> Result { + let gmail = ctx.data::>()?; + + let request = SendEmailRequest { + to: input.to, + cc: input.cc, + bcc: input.bcc, + subject: input.subject, + body: input.body, + content_type: input.content_type, + from_name: input.from_name, + attachments: None, + }; + + match gmail.send_email(request).await { + Ok(response) => Ok(SendEmailResult { + success: true, + message_id: Some(response.id), + thread_id: Some(response.thread_id), + error: None, + }), + Err(e) => Ok(SendEmailResult { + success: false, + message_id: None, + thread_id: None, + error: Some(e.to_string()), + }), + } + } + + /// Send an email using a template + async fn send_template_email( + &self, + ctx: &Context<'_>, + input: SendTemplateEmailInput, + ) -> Result { + let gmail = ctx.data::>()?; + let registry = ctx.data::()?; + + // Get the template + let template = match registry.get(&input.template_id) { + Some(t) => t, + None => { + return Ok(SendEmailResult { + success: false, + message_id: None, + thread_id: None, + error: Some(format!("Template not found: {}", input.template_id)), + }); + } + }; + + // Convert variables to HashMap + let variables: HashMap = input + .variables + .into_iter() + .map(|v| (v.name, v.value)) + .collect(); + + // Render the template + let (subject, body) = match template.render(&variables) { + Ok(result) => result, + Err(e) => { + return Ok(SendEmailResult { + success: false, + message_id: None, + thread_id: None, + error: Some(e.to_string()), + }); + } + }; + + // Send the email + let request = SendEmailRequest { + to: input.to, + cc: input.cc, + bcc: input.bcc, + subject, + body, + content_type: Some(template.content_type.clone()), + from_name: input.from_name, + attachments: None, + }; + + match gmail.send_email(request).await { + Ok(response) => Ok(SendEmailResult { + success: true, + message_id: Some(response.id), + thread_id: Some(response.thread_id), + error: None, + }), + Err(e) => Ok(SendEmailResult { + success: false, + message_id: None, + thread_id: None, + error: Some(e.to_string()), + }), + } + } + + /// List all available email templates + async fn list_email_templates( + &self, + ctx: &Context<'_>, + ) -> Result> { + let registry = ctx.data::()?; + + let templates: Vec = registry + .list() + .into_iter() + .map(EmailTemplateType::from) + .collect(); + + Ok(templates) + } + + /// Get a specific email template by ID + async fn get_email_template( + &self, + ctx: &Context<'_>, + template_id: String, + ) -> Result> { + let registry = ctx.data::()?; + + Ok(registry.get(&template_id).map(EmailTemplateType::from)) + } +} diff --git a/src/graphql/mutations/invoice.rs b/src/graphql/mutations/invoice.rs new file mode 100644 index 0000000..b2a1611 --- /dev/null +++ b/src/graphql/mutations/invoice.rs @@ -0,0 +1,457 @@ +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{InvoiceProjectType, InvoiceRevenueType, InvoiceStatusType, InvoiceType}; +use crate::models::{EventType, Invoice, InvoiceProject, InvoiceRevenue, InvoiceStatus}; +use crate::services::EventPublisher; +use crate::auth::UserContext; + +/// Input for creating an invoice +#[derive(InputObject)] +pub struct CreateInvoiceInput { + pub customer_id: Uuid, + pub start_date: NaiveDate, + pub end_date: NaiveDate, +} + +/// Input for updating an invoice +#[derive(InputObject)] +pub struct UpdateInvoiceInput { + pub start_date: Option, + pub end_date: Option, + pub status: Option, + pub date_paid: Option, + pub wave_invoice_id: Option, +} + +#[derive(Default)] +pub struct InvoiceMutation; + +#[Object] +impl InvoiceMutation { + /// Create a new invoice for a customer + async fn create_invoice(&self, ctx: &Context<'_>, input: CreateInvoiceInput) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Validate date range + if input.end_date < input.start_date { + return Err("End date must be on or after start date".into()); + } + + let invoice: Invoice = sqlx::query_as::<_, Invoice>( + r#" + INSERT INTO invoices ( + id, created_at, updated_at, customer_id, start_date, end_date, status + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, 'DRAFT' + ) + RETURNING id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + "#, + ) + .bind(input.customer_id) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(user) = ctx.data::() { + let _ = EventPublisher::publish( + &pool, + EventType::InvoiceCreated, + "invoice", + invoice.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(InvoiceType::from(invoice)) + } + + /// Update an invoice + async fn update_invoice( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateInvoiceInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Validate date range if both provided + if let (Some(start), Some(end)) = (input.start_date, input.end_date) { + if end < start { + return Err("End date must be on or after start date".into()); + } + } + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM invoices WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(InvoiceStatus::from); + + let invoice: Invoice = sqlx::query_as::<_, Invoice>( + r#" + UPDATE invoices + SET + start_date = COALESCE($2, start_date), + end_date = COALESCE($3, end_date), + status = COALESCE($4, status), + date_paid = COALESCE($5, date_paid), + wave_invoice_id = COALESCE($6, wave_invoice_id), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + "#, + ) + .bind(id) + .bind(input.start_date) + .bind(input.end_date) + .bind(new_status) + .bind(input.date_paid) + .bind(input.wave_invoice_id) + .fetch_one(&*pool) + .await?; + + // Publish events for status changes + if let Ok(user) = ctx.data::() { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let event_type = match new { + InvoiceStatus::Sent => Some(EventType::InvoiceSent), + InvoiceStatus::Paid => Some(EventType::InvoicePaid), + _ => None, + }; + + if let Some(event_type) = event_type { + let _ = EventPublisher::publish( + &pool, + event_type, + "invoice", + invoice.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + } + } + + Ok(InvoiceType::from(invoice)) + } + + /// Delete an invoice and all its entries + async fn delete_invoice(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete entries first (cascade should handle this, but be explicit) + sqlx::query("DELETE FROM invoice_revenues WHERE invoice_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + sqlx::query("DELETE FROM invoice_projects WHERE invoice_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM invoices WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Add a revenue to an invoice with snapshot amount + async fn add_revenue_to_invoice( + &self, + ctx: &Context<'_>, + invoice_id: Uuid, + revenue_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice to verify customer + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices WHERE id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or("Invoice not found")?; + + // Verify the revenue belongs to an account under this customer + let revenue_info: Option = sqlx::query_as::<_, RevenueInfo>( + r#" + SELECT r.amount, a.customer_id + FROM revenues r + JOIN accounts a ON a.id = r.account_id + WHERE r.id = $1 + "#, + ) + .bind(revenue_id) + .fetch_optional(&*pool) + .await?; + + let revenue_info = revenue_info.ok_or("Revenue not found")?; + + if revenue_info.customer_id != invoice.customer_id { + return Err("Revenue does not belong to an account under this customer".into()); + } + + // Insert the invoice revenue with snapshot amount + let entry: InvoiceRevenue = sqlx::query_as::<_, InvoiceRevenue>( + r#" + INSERT INTO invoice_revenues (id, invoice_id, revenue_id, amount, created_at) + VALUES (gen_random_uuid(), $1, $2, $3, NOW()) + RETURNING id, invoice_id, revenue_id, amount, created_at + "#, + ) + .bind(invoice_id) + .bind(revenue_id) + .bind(revenue_info.amount) + .fetch_one(&*pool) + .await?; + + Ok(InvoiceRevenueType::from(entry)) + } + + /// Add a project to an invoice with snapshot amount + async fn add_project_to_invoice( + &self, + ctx: &Context<'_>, + invoice_id: Uuid, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice to verify customer + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices WHERE id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or("Invoice not found")?; + + // Verify the project belongs to this customer and is completed + let project_info: Option = sqlx::query_as::<_, ProjectInfo>( + r#" + SELECT customer_id, amount, status + FROM projects + WHERE id = $1 + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await?; + + let project_info = project_info.ok_or("Project not found")?; + + if project_info.customer_id != invoice.customer_id { + return Err("Project does not belong to this customer".into()); + } + + if project_info.status != "COMPLETED" { + return Err("Project must be completed before adding to invoice".into()); + } + + let amount = project_info.amount.unwrap_or(Decimal::ZERO); + + // Insert the invoice project with snapshot amount + let entry: InvoiceProject = sqlx::query_as::<_, InvoiceProject>( + r#" + INSERT INTO invoice_projects (id, invoice_id, project_id, amount, created_at) + VALUES (gen_random_uuid(), $1, $2, $3, NOW()) + RETURNING id, invoice_id, project_id, amount, created_at + "#, + ) + .bind(invoice_id) + .bind(project_id) + .bind(amount) + .fetch_one(&*pool) + .await?; + + Ok(InvoiceProjectType::from(entry)) + } + + /// Remove a revenue from an invoice + async fn remove_revenue_from_invoice(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM invoice_revenues WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Remove a project from an invoice + async fn remove_project_from_invoice(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM invoice_projects WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Add all eligible revenues to an invoice at once + /// Returns the number of revenues added + async fn add_all_eligible_revenues_to_invoice( + &self, + ctx: &Context<'_>, + invoice_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices WHERE id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or("Invoice not found")?; + + // Find all eligible revenues and insert them + // A revenue is eligible if it's active during the period and not already + // on an invoice with an overlapping period + let result = sqlx::query( + r#" + INSERT INTO invoice_revenues (id, invoice_id, revenue_id, amount, created_at) + SELECT + gen_random_uuid(), + $1, + r.id, + r.amount, + NOW() + FROM revenues r + JOIN accounts a ON a.id = r.account_id + WHERE a.customer_id = $2 + AND r.start_date <= $4 + AND (r.end_date IS NULL OR r.end_date >= $3) + AND NOT EXISTS ( + SELECT 1 FROM invoice_revenues ir + JOIN invoices i ON i.id = ir.invoice_id + WHERE ir.revenue_id = r.id + AND i.start_date <= $4 + AND i.end_date >= $3 + ) + "#, + ) + .bind(invoice_id) + .bind(invoice.customer_id) + .bind(invoice.start_date) + .bind(invoice.end_date) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + /// Add all eligible projects to an invoice at once + /// Returns the number of projects added + async fn add_all_eligible_projects_to_invoice( + &self, + ctx: &Context<'_>, + invoice_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices WHERE id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or("Invoice not found")?; + + // Find all eligible projects and insert them + let result = sqlx::query( + r#" + INSERT INTO invoice_projects (id, invoice_id, project_id, amount, created_at) + SELECT + gen_random_uuid(), + $1, + p.id, + COALESCE(p.amount, 0), + NOW() + FROM projects p + WHERE p.customer_id = $2 + AND p.status = 'COMPLETED' + AND p.date >= $3 + AND p.date <= $4 + AND NOT EXISTS ( + SELECT 1 FROM invoice_projects ip WHERE ip.project_id = p.id + ) + "#, + ) + .bind(invoice_id) + .bind(invoice.customer_id) + .bind(invoice.start_date) + .bind(invoice.end_date) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } +} + +/// Helper struct for revenue info query +#[derive(sqlx::FromRow)] +struct RevenueInfo { + amount: Decimal, + customer_id: Uuid, +} + +/// Helper struct for project info query +#[derive(sqlx::FromRow)] +struct ProjectInfo { + customer_id: Uuid, + amount: Option, + status: String, +} diff --git a/src/graphql/mutations/jobs.rs b/src/graphql/mutations/jobs.rs new file mode 100644 index 0000000..51e97be --- /dev/null +++ b/src/graphql/mutations/jobs.rs @@ -0,0 +1,199 @@ +//! Job GraphQL mutations +//! +//! Admin-only mutations for manually triggering background jobs. + +use async_graphql::{Context, Object, Result, SimpleObject}; +use chrono::{DateTime, Utc}; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::jobs::{ + EventCleanupJob, EventCleanupResult as JobCleanupResult, + IncompleteWorkReminderJob, IncompleteWorkReminderResult as JobReminderResult, + NightlyAssignmentsJob, NightlyAssignmentsResult as JobAssignmentsResult, +}; +use crate::models::TeamRole; +use crate::services::GmailService; +use std::sync::Arc; + +// ==================== RESULT TYPES ==================== + +/// Result of running the event cleanup job +#[derive(SimpleObject)] +pub struct EventCleanupResultType { + /// Number of events deleted + pub deleted_count: i64, + /// Cutoff date used for cleanup + pub cutoff_date: DateTime, + /// Retention period in days + pub retention_days: i64, +} + +impl From for EventCleanupResultType { + fn from(result: JobCleanupResult) -> Self { + Self { + deleted_count: result.deleted_count, + cutoff_date: result.cutoff_date, + retention_days: result.retention_days, + } + } +} + +/// Result of running the incomplete work reminder job +#[derive(SimpleObject)] +pub struct IncompleteWorkReminderResultType { + /// Number of team members notified + pub team_members_notified: i32, + /// Number of incomplete services found + pub services_count: i32, + /// Number of incomplete projects found + pub projects_count: i32, + /// Number of emails successfully sent + pub emails_sent: i32, + /// Any errors that occurred + pub errors: Vec, +} + +impl From for IncompleteWorkReminderResultType { + fn from(result: JobReminderResult) -> Self { + Self { + team_members_notified: result.team_members_notified, + services_count: result.services_count, + projects_count: result.projects_count, + emails_sent: result.emails_sent, + errors: result.errors, + } + } +} + +/// Result of running the nightly assignments job +#[derive(SimpleObject)] +pub struct NightlyAssignmentsResultType { + /// Number of team members notified + pub team_members_notified: i32, + /// Number of scheduled services + pub services_count: i32, + /// Number of scheduled projects + pub projects_count: i32, + /// Number of emails successfully sent + pub emails_sent: i32, + /// Any errors that occurred + pub errors: Vec, +} + +impl From for NightlyAssignmentsResultType { + fn from(result: JobAssignmentsResult) -> Self { + Self { + team_members_notified: result.team_members_notified, + services_count: result.services_count, + projects_count: result.projects_count, + emails_sent: result.emails_sent, + errors: result.errors, + } + } +} + +// ==================== MUTATION IMPLEMENTATION ==================== + +#[derive(Default)] +pub struct JobMutation; + +#[Object] +impl JobMutation { + /// Run the event cleanup job manually (ADMIN only) + /// + /// This job deletes non-critical events older than 90 days. + /// Mission-critical events (session, invoice, report events) are preserved. + async fn run_event_cleanup(&self, ctx: &Context<'_>) -> Result { + // Verify admin access + verify_admin_access(ctx).await?; + + let db = ctx.data::()?; + let pool = db.pool().await; + + tracing::info!("Manual event cleanup triggered via GraphQL"); + + let result = EventCleanupJob::execute(&pool).await?; + + Ok(result.into()) + } + + /// Run the incomplete work reminder job manually (ADMIN only) + /// + /// This job sends email reminders to team members about incomplete + /// services and projects from yesterday (or Friday for Monday runs). + async fn run_incomplete_work_reminder( + &self, + ctx: &Context<'_>, + ) -> Result { + // Verify admin access + verify_admin_access(ctx).await?; + + let db = ctx.data::()?; + let pool = db.pool().await; + let gmail_service = ctx.data::>>().ok().and_then(|o| o.clone()); + + tracing::info!("Manual incomplete work reminder triggered via GraphQL"); + + let result = IncompleteWorkReminderJob::execute( + &pool, + gmail_service.as_ref().map(|s| s.as_ref()), + ) + .await?; + + Ok(result.into()) + } + + /// Run the nightly assignments job manually (ADMIN only) + /// + /// This job sends email notifications to team members about + /// their scheduled services and projects for today. + async fn run_nightly_assignments( + &self, + ctx: &Context<'_>, + ) -> Result { + // Verify admin access + verify_admin_access(ctx).await?; + + let db = ctx.data::()?; + let pool = db.pool().await; + let gmail_service = ctx.data::>>().ok().and_then(|o| o.clone()); + + tracing::info!("Manual nightly assignments triggered via GraphQL"); + + let result = NightlyAssignmentsJob::execute( + &pool, + gmail_service.as_ref().map(|s| s.as_ref()), + ) + .await?; + + Ok(result.into()) + } +} + +/// Verify that the current user is an admin +async fn verify_admin_access(ctx: &Context<'_>) -> Result<()> { + let user = ctx.data::()?; + + // Must be a team profile + if user.profile_type != ProfileType::Team { + return Err("Only team members can access this resource".into()); + } + + // Fetch role from database + let db = ctx.data::()?; + let pool = db.pool().await; + + let role: Option = sqlx::query_scalar( + "SELECT role FROM team_profiles WHERE id = $1", + ) + .bind(user.user_id) + .fetch_optional(&*pool) + .await?; + + match role { + Some(TeamRole::Admin) => Ok(()), + Some(_) => Err("Admin access required".into()), + None => Err("Team profile not found".into()), + } +} diff --git a/src/graphql/mutations/location.rs b/src/graphql/mutations/location.rs new file mode 100644 index 0000000..b5596e4 --- /dev/null +++ b/src/graphql/mutations/location.rs @@ -0,0 +1,736 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + AccountAddressType, CreateLaborInput, CreateScheduleInput, CreateServiceScopeInput, + CreateScopeAreaInput, CreateScopeTaskInput, LaborType, ScheduleType, ServiceScopeAreaType, + ServiceScopeTaskType, ServiceScopeType, TaskFrequencyType, UpdateAccountAddressInput, + UpdateLaborInput, UpdateScheduleInput, UpdateServiceScopeInput, UpdateScopeAreaInput, + UpdateScopeTaskInput, +}; +use crate::models::{ + AccountAddress, Labor, Schedule, ServiceScope, ServiceScopeArea, ServiceScopeTask, TaskFrequency, +}; + +#[derive(Default)] +pub struct LocationMutation; + +#[Object] +impl LocationMutation { + // ==================== LABOR MUTATIONS ==================== + + /// Create a new labor rate for an address (deactivates existing active labor) + async fn create_labor( + &self, + ctx: &Context<'_>, + account_address_id: Uuid, + input: CreateLaborInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // End any existing active labor rates for this address + let today = chrono::Utc::now().date_naive(); + sqlx::query( + r#" + UPDATE labor + SET end_date = $2, updated_at = NOW() + WHERE account_address_id = $1 AND (end_date IS NULL OR end_date > $2) + "#, + ) + .bind(account_address_id) + .bind(today) + .execute(&*pool) + .await?; + + let labor: Labor = sqlx::query_as::<_, Labor>( + r#" + INSERT INTO labor (id, created_at, updated_at, account_address_id, amount, start_date, end_date) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, NULL) + RETURNING id, created_at, updated_at, account_address_id, amount, start_date, end_date + "#, + ) + .bind(account_address_id) + .bind(input.amount) + .bind(input.start_date) + .fetch_one(&*pool) + .await?; + + Ok(LaborType::from(labor)) + } + + /// Update a labor rate + async fn update_labor( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateLaborInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let labor: Labor = sqlx::query_as::<_, Labor>( + r#" + UPDATE labor + SET + amount = COALESCE($2, amount), + start_date = COALESCE($3, start_date), + end_date = COALESCE($4, end_date), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_address_id, amount, start_date, end_date + "#, + ) + .bind(id) + .bind(input.amount) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + Ok(LaborType::from(labor)) + } + + /// Delete a labor rate + async fn delete_labor(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM labor WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SCHEDULE MUTATIONS ==================== + + /// Create a new schedule for an address (ends existing active schedules) + async fn create_schedule( + &self, + ctx: &Context<'_>, + account_address_id: Uuid, + input: CreateScheduleInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // End any existing active schedules for this address + let today = chrono::Utc::now().date_naive(); + sqlx::query( + r#" + UPDATE schedules + SET end_date = $2, updated_at = NOW() + WHERE account_address_id = $1 AND (end_date IS NULL OR end_date > $2) + "#, + ) + .bind(account_address_id) + .bind(today) + .execute(&*pool) + .await?; + + let schedule: Schedule = sqlx::query_as::<_, Schedule>( + r#" + INSERT INTO schedules ( + id, created_at, updated_at, account_address_id, name, + monday_service, tuesday_service, wednesday_service, thursday_service, + friday_service, saturday_service, sunday_service, weekend_service, + schedule_exception, start_date, end_date + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, + $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, NULL + ) + RETURNING id, created_at, updated_at, account_address_id, name, + monday_service as monday, tuesday_service as tuesday, wednesday_service as wednesday, + thursday_service as thursday, friday_service as friday, saturday_service as saturday, + sunday_service as sunday, weekend_service, schedule_exception, start_date, end_date + "#, + ) + .bind(account_address_id) + .bind(&input.name) + .bind(input.monday.unwrap_or(false)) + .bind(input.tuesday.unwrap_or(false)) + .bind(input.wednesday.unwrap_or(false)) + .bind(input.thursday.unwrap_or(false)) + .bind(input.friday.unwrap_or(false)) + .bind(input.saturday.unwrap_or(false)) + .bind(input.sunday.unwrap_or(false)) + .bind(input.weekend_service.unwrap_or(false)) + .bind(&input.schedule_exception) + .bind(input.start_date) + .fetch_one(&*pool) + .await?; + + Ok(ScheduleType::from(schedule)) + } + + /// Update a schedule + async fn update_schedule( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateScheduleInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let schedule: Schedule = sqlx::query_as::<_, Schedule>( + r#" + UPDATE schedules + SET + name = COALESCE($2, name), + monday_service = COALESCE($3, monday_service), + tuesday_service = COALESCE($4, tuesday_service), + wednesday_service = COALESCE($5, wednesday_service), + thursday_service = COALESCE($6, thursday_service), + friday_service = COALESCE($7, friday_service), + saturday_service = COALESCE($8, saturday_service), + sunday_service = COALESCE($9, sunday_service), + weekend_service = COALESCE($10, weekend_service), + schedule_exception = COALESCE($11, schedule_exception), + start_date = COALESCE($12, start_date), + end_date = COALESCE($13, end_date), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_address_id, name, + monday_service as monday, tuesday_service as tuesday, wednesday_service as wednesday, + thursday_service as thursday, friday_service as friday, saturday_service as saturday, + sunday_service as sunday, weekend_service, schedule_exception, start_date, end_date + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.monday) + .bind(input.tuesday) + .bind(input.wednesday) + .bind(input.thursday) + .bind(input.friday) + .bind(input.saturday) + .bind(input.sunday) + .bind(input.weekend_service) + .bind(&input.schedule_exception) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + Ok(ScheduleType::from(schedule)) + } + + /// Delete a schedule + async fn delete_schedule(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM schedules WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SERVICE SCOPE MUTATIONS ==================== + + /// Create a new service scope for an address (deactivates existing active scopes) + async fn create_service_scope( + &self, + ctx: &Context<'_>, + account_id: Uuid, + account_address_id: Uuid, + input: CreateServiceScopeInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Deactivate any existing active scopes for this address + sqlx::query( + r#" + UPDATE service_scopes + SET is_active = false, updated_at = NOW() + WHERE account_address_id = $1 AND is_active = true + "#, + ) + .bind(account_address_id) + .execute(&*pool) + .await?; + + // Create the scope + let scope: ServiceScope = sqlx::query_as::<_, ServiceScope>( + r#" + INSERT INTO service_scopes ( + id, created_at, updated_at, name, account_id, account_address_id, description, is_active + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, true) + RETURNING id, created_at, updated_at, name, account_id, account_address_id, description, is_active + "#, + ) + .bind(&input.name) + .bind(account_id) + .bind(account_address_id) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create areas and tasks if provided + if let Some(areas) = input.areas { + for (area_order, area_input) in areas.into_iter().enumerate() { + let area: ServiceScopeArea = sqlx::query_as::<_, ServiceScopeArea>( + r#" + INSERT INTO service_scope_areas (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(scope.base.id) + .bind(&area_input.name) + .bind(area_input.order.unwrap_or(area_order as i32)) + .fetch_one(&*pool) + .await?; + + if let Some(tasks) = area_input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + let frequency = task_input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + sqlx::query( + r#" + INSERT INTO service_scope_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(area.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(frequency) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + } + } + + Ok(ServiceScopeType::from(scope)) + } + + /// Update a service scope + async fn update_service_scope( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateServiceScopeInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If activating this scope, deactivate others for same address + if input.is_active == Some(true) { + let address_id: Uuid = sqlx::query_scalar( + "SELECT account_address_id FROM service_scopes WHERE id = $1", + ) + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + r#" + UPDATE service_scopes + SET is_active = false, updated_at = NOW() + WHERE account_address_id = $1 AND id != $2 AND is_active = true + "#, + ) + .bind(address_id) + .bind(id) + .execute(&*pool) + .await?; + } + + let scope: ServiceScope = sqlx::query_as::<_, ServiceScope>( + r#" + UPDATE service_scopes + SET + name = COALESCE($2, name), + description = COALESCE($3, description), + is_active = COALESCE($4, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, account_id, account_address_id, description, is_active + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.description) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeType::from(scope)) + } + + /// Delete a service scope (soft-delete if sessions exist, hard delete otherwise) + async fn delete_service_scope(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if any service_sessions reference this scope + let session_count: i64 = sqlx::query_scalar( + "SELECT COUNT(*) FROM service_sessions WHERE scope_id = $1", + ) + .bind(id) + .fetch_one(&*pool) + .await?; + + if session_count > 0 { + // Soft-delete: just mark as inactive to preserve historical session data + let result = sqlx::query( + "UPDATE service_scopes SET is_active = false, updated_at = NOW() WHERE id = $1", + ) + .bind(id) + .execute(&*pool) + .await?; + + return Ok(result.rows_affected() > 0); + } + + // Hard delete: no sessions reference this scope, safe to remove completely + // Get all area IDs for this scope + let area_ids: Vec = sqlx::query_scalar( + "SELECT id FROM service_scope_areas WHERE scope_id = $1", + ) + .bind(id) + .fetch_all(&*pool) + .await?; + + // Delete tasks for all areas + for area_id in &area_ids { + sqlx::query("DELETE FROM service_scope_tasks WHERE area_id = $1") + .bind(area_id) + .execute(&*pool) + .await?; + } + + // Delete areas + sqlx::query("DELETE FROM service_scope_areas WHERE scope_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete scope + let result = sqlx::query("DELETE FROM service_scopes WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SCOPE AREA MUTATIONS ==================== + + /// Create a new area within a scope + async fn create_scope_area( + &self, + ctx: &Context<'_>, + scope_id: Uuid, + input: CreateScopeAreaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing areas + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM service_scope_areas WHERE scope_id = $1"#, + ) + .bind(scope_id) + .fetch_one(&*pool) + .await?; + + let area: ServiceScopeArea = sqlx::query_as::<_, ServiceScopeArea>( + r#" + INSERT INTO service_scope_areas (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(scope_id) + .bind(&input.name) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .fetch_one(&*pool) + .await?; + + // Create tasks if provided + if let Some(tasks) = input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + let frequency = task_input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + sqlx::query( + r#" + INSERT INTO service_scope_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(area.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(frequency) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ServiceScopeAreaType::from(area)) + } + + /// Update a scope area + async fn update_scope_area( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateScopeAreaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let area: ServiceScopeArea = sqlx::query_as::<_, ServiceScopeArea>( + r#" + UPDATE service_scope_areas + SET + name = COALESCE($2, name), + "order" = COALESCE($3, "order"), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.order) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeAreaType::from(area)) + } + + /// Delete a scope area (cascades to tasks) + async fn delete_scope_area(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete tasks first + sqlx::query("DELETE FROM service_scope_tasks WHERE area_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete area + let result = sqlx::query("DELETE FROM service_scope_areas WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SCOPE TASK MUTATIONS ==================== + + /// Create a new task within an area + async fn create_scope_task( + &self, + ctx: &Context<'_>, + area_id: Uuid, + input: CreateScopeTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing tasks + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM service_scope_tasks WHERE area_id = $1"#, + ) + .bind(area_id) + .fetch_one(&*pool) + .await?; + + let frequency = input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + let task: ServiceScopeTask = sqlx::query_as::<_, ServiceScopeTask>( + r#" + INSERT INTO service_scope_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + RETURNING id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + "#, + ) + .bind(area_id) + .bind(&input.scope_description) + .bind(input.checklist_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.session_description.as_deref().unwrap_or(&input.scope_description)) + .bind(frequency) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTaskType::from(task)) + } + + /// Update a scope task + async fn update_scope_task( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateScopeTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task: ServiceScopeTask = sqlx::query_as::<_, ServiceScopeTask>( + r#" + UPDATE service_scope_tasks + SET + scope_description = COALESCE($2, scope_description), + checklist_description = COALESCE($3, checklist_description), + session_description = COALESCE($4, session_description), + frequency = COALESCE($5, frequency), + "order" = COALESCE($6, "order"), + estimated_minutes = COALESCE($7, estimated_minutes), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + "#, + ) + .bind(id) + .bind(&input.scope_description) + .bind(&input.checklist_description) + .bind(&input.session_description) + .bind(input.frequency.map(TaskFrequency::from)) + .bind(input.order) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTaskType::from(task)) + } + + /// Delete a scope task + async fn delete_scope_task(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM service_scope_tasks WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== ACCOUNT ADDRESS (LOCATION) MUTATIONS ==================== + + /// Update an account address (location) + async fn update_account_address( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateAccountAddressInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If setting as primary, unset other addresses for this account + if input.is_primary == Some(true) { + let account_id: Uuid = sqlx::query_scalar( + "SELECT account_id FROM account_addresses WHERE id = $1", + ) + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + r#" + UPDATE account_addresses + SET is_primary = false, updated_at = NOW() + WHERE account_id = $1 AND id != $2 AND is_primary = true + "#, + ) + .bind(account_id) + .bind(id) + .execute(&*pool) + .await?; + } + + // For nullable fields (name, notes), use the value directly (allows clearing to null) + // For required fields, use COALESCE as a safety net + let address: AccountAddress = sqlx::query_as::<_, AccountAddress>( + r#" + UPDATE account_addresses + SET + name = $2, + street_address = COALESCE($3, street_address), + city = COALESCE($4, city), + state = COALESCE($5, state), + zip_code = COALESCE($6, zip_code), + notes = $7, + is_primary = COALESCE($8, is_primary), + is_active = COALESCE($9, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_id, name, street_address, city, state, zip_code, notes, is_primary, is_active + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .bind(&input.notes) + .bind(input.is_primary) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(AccountAddressType::from(address)) + } +} + +// Helper conversion from GraphQL enum to model enum +impl From for TaskFrequency { + fn from(freq: TaskFrequencyType) -> Self { + match freq { + TaskFrequencyType::Daily => TaskFrequency::Daily, + TaskFrequencyType::Weekly => TaskFrequency::Weekly, + TaskFrequencyType::Monthly => TaskFrequency::Monthly, + TaskFrequencyType::Quarterly => TaskFrequency::Quarterly, + TaskFrequencyType::Triannual => TaskFrequency::Triannual, + TaskFrequencyType::Annual => TaskFrequency::Annual, + TaskFrequencyType::AsNeeded => TaskFrequency::AsNeeded, + } + } +} diff --git a/src/graphql/mutations/messaging.rs b/src/graphql/mutations/messaging.rs new file mode 100644 index 0000000..041358b --- /dev/null +++ b/src/graphql/mutations/messaging.rs @@ -0,0 +1,792 @@ +//! Messaging GraphQL mutations +//! +//! Mutations for conversations, messages, and participants. + +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::Utc; +use serde_json::Value as JsonValue; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::graphql::types::{ + ConversationParticipantType, ConversationType_, ConversationTypeEnum, MessageType, +}; +use crate::models::{Conversation, ConversationParticipant, ConversationType, Message}; + +// ==================== HELPER FUNCTIONS ==================== + +/// Get participant name based on type and ID +async fn get_participant_name( + pool: &PgPool, + participant_type: &str, + participant_id: Uuid, +) -> Result { + let name: Option = match participant_type { + "team_profile" => { + sqlx::query_scalar( + r#"SELECT first_name || ' ' || last_name FROM team_profiles WHERE id = $1"#, + ) + .bind(participant_id) + .fetch_optional(pool) + .await? + } + "customer_profile" => { + sqlx::query_scalar( + r#"SELECT first_name || ' ' || last_name FROM customer_profiles WHERE id = $1"#, + ) + .bind(participant_id) + .fetch_optional(pool) + .await? + } + _ => None, + }; + + Ok(name.unwrap_or_else(|| "Unknown".to_string())) +} + +/// Insert a system message into a conversation +async fn insert_system_message( + pool: &PgPool, + conversation_id: Uuid, + content: &str, +) -> Result<()> { + let now = Utc::now(); + + sqlx::query( + r#" + INSERT INTO messages ( + id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + ) + VALUES (gen_random_uuid(), $1, $1, $2, NULL, NULL, $3, false, NULL, '[]'::jsonb, true, '{}'::jsonb) + "#, + ) + .bind(now) + .bind(conversation_id) + .bind(content) + .execute(pool) + .await?; + + // Update conversation's last_message_at + sqlx::query( + r#"UPDATE conversations SET last_message_at = $1, updated_at = $1 WHERE id = $2"#, + ) + .bind(now) + .bind(conversation_id) + .execute(pool) + .await?; + + Ok(()) +} + +/// Get the first admin team profile ID (dispatch) +async fn get_dispatch_profile_id(pool: &PgPool) -> Result> { + let id: Option = sqlx::query_scalar( + r#"SELECT id FROM team_profiles WHERE role = 'ADMIN' AND status = 'ACTIVE' ORDER BY created_at LIMIT 1"#, + ) + .fetch_optional(pool) + .await?; + + Ok(id) +} + +/// Check if conversation should be auto-archived (no active participants) +async fn check_and_archive_conversation(pool: &PgPool, conversation_id: Uuid) -> Result { + let remaining: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) FROM conversation_participants + WHERE conversation_id = $1 AND is_archived = false + "#, + ) + .bind(conversation_id) + .fetch_one(pool) + .await?; + + if remaining == 0 { + // Archive the conversation + sqlx::query( + r#"UPDATE conversations SET is_archived = true, updated_at = NOW() WHERE id = $1"#, + ) + .bind(conversation_id) + .execute(pool) + .await?; + + // Insert final system message + insert_system_message(pool, conversation_id, "Conversation archived - no remaining participants").await?; + + return Ok(true); + } + + Ok(false) +} + +// ==================== INPUT TYPES ==================== + +/// Input for creating a new conversation +#[derive(InputObject)] +pub struct CreateConversationInput { + /// Optional subject line + pub subject: Option, + /// Type of conversation + pub conversation_type: ConversationTypeEnum, + /// Entity type this conversation is about (optional) + pub entity_type: Option, + /// Entity ID this conversation is about (optional) + pub entity_id: Option, + /// Initial participant IDs (will include creator automatically) + pub participant_ids: Vec, + /// Initial message content (optional) + pub initial_message: Option, +} + +/// Input for specifying a participant +#[derive(InputObject, Clone)] +pub struct ParticipantInput { + pub participant_type: String, + pub participant_id: Uuid, +} + +/// Input for sending a message +#[derive(InputObject)] +pub struct SendMessageInput { + pub conversation_id: Uuid, + pub content: String, + pub reply_to_id: Option, + pub attachments: Option, +} + +/// Input for updating a conversation +#[derive(InputObject)] +pub struct UpdateConversationInput { + pub subject: Option, +} + +// ==================== MUTATION IMPLEMENTATION ==================== + +#[derive(Default)] +pub struct MessagingMutation; + +#[Object] +impl MessagingMutation { + // ==================== CONVERSATION MUTATIONS ==================== + + /// Create a new conversation + async fn create_conversation( + &self, + ctx: &Context<'_>, + input: CreateConversationInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let created_by_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let conversation_type = match input.conversation_type { + ConversationTypeEnum::Direct => ConversationType::Direct, + ConversationTypeEnum::Group => ConversationType::Group, + ConversationTypeEnum::Support => ConversationType::Support, + }; + + let now = Utc::now(); + + // Create the conversation + let conversation: Conversation = sqlx::query_as( + r#" + INSERT INTO conversations ( + id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + ) + VALUES ( + gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, NULL, false, '{}'::jsonb + ) + RETURNING id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + "#, + ) + .bind(now) + .bind(&input.subject) + .bind(conversation_type) + .bind(&input.entity_type) + .bind(input.entity_id) + .bind(created_by_type) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + // Add the creator as a participant + sqlx::query( + r#" + INSERT INTO conversation_participants ( + id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $1, 0, false, false, $1) + "#, + ) + .bind(now) + .bind(conversation.base.id) + .bind(created_by_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + // Add other participants + for participant in &input.participant_ids { + // Skip if this is the creator (already added) + if participant.participant_type == created_by_type + && participant.participant_id == user.user_id + { + continue; + } + + sqlx::query( + r#" + INSERT INTO conversation_participants ( + id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, NULL, 0, false, false, $1) + "#, + ) + .bind(now) + .bind(conversation.base.id) + .bind(&participant.participant_type) + .bind(participant.participant_id) + .execute(&*pool) + .await?; + } + + // For SUPPORT conversations, auto-add dispatch (admin) if not already included + if conversation_type == ConversationType::Support { + if let Some(dispatch_id) = get_dispatch_profile_id(&pool).await? { + // Check if dispatch is already a participant + let dispatch_already_added = input + .participant_ids + .iter() + .any(|p| p.participant_type == "team_profile" && p.participant_id == dispatch_id) + || (created_by_type == "team_profile" && user.user_id == dispatch_id); + + if !dispatch_already_added { + sqlx::query( + r#" + INSERT INTO conversation_participants ( + id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + ) + VALUES (gen_random_uuid(), $1, $1, $2, 'team_profile', $3, NULL, 0, false, false, $1) + "#, + ) + .bind(now) + .bind(conversation.base.id) + .bind(dispatch_id) + .execute(&*pool) + .await?; + } + } + } + + // If there's an initial message, send it + if let Some(content) = input.initial_message { + sqlx::query( + r#" + INSERT INTO messages ( + id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, false, NULL, '[]'::jsonb, false, '{}'::jsonb) + "#, + ) + .bind(now) + .bind(conversation.base.id) + .bind(created_by_type) + .bind(user.user_id) + .bind(&content) + .execute(&*pool) + .await?; + + // Update last_message_at and increment unread counts for other participants + sqlx::query( + r#" + UPDATE conversations SET last_message_at = $1, updated_at = $1 WHERE id = $2 + "#, + ) + .bind(now) + .bind(conversation.base.id) + .execute(&*pool) + .await?; + + sqlx::query( + r#" + UPDATE conversation_participants + SET unread_count = unread_count + 1, updated_at = $1 + WHERE conversation_id = $2 + AND NOT (participant_type = $3 AND participant_id = $4) + "#, + ) + .bind(now) + .bind(conversation.base.id) + .bind(created_by_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + } + + Ok(ConversationType_::from(conversation)) + } + + /// Update a conversation + async fn update_conversation( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateConversationInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let conversation: Conversation = sqlx::query_as( + r#" + UPDATE conversations + SET subject = COALESCE($1, subject), + updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + "#, + ) + .bind(&input.subject) + .bind(id) + .fetch_one(&*pool) + .await?; + + Ok(ConversationType_::from(conversation)) + } + + /// Archive or unarchive a conversation + async fn archive_conversation( + &self, + ctx: &Context<'_>, + id: Uuid, + is_archived: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let conversation: Conversation = sqlx::query_as( + r#" + UPDATE conversations + SET is_archived = $1, updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + "#, + ) + .bind(is_archived) + .bind(id) + .fetch_one(&*pool) + .await?; + + Ok(ConversationType_::from(conversation)) + } + + /// Mute or unmute a conversation for the current user + async fn mute_conversation( + &self, + ctx: &Context<'_>, + id: Uuid, + is_muted: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let result = sqlx::query( + r#" + UPDATE conversation_participants + SET is_muted = $1, updated_at = NOW() + WHERE conversation_id = $2 AND participant_type = $3 AND participant_id = $4 + "#, + ) + .bind(is_muted) + .bind(id) + .bind(participant_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== MESSAGE MUTATIONS ==================== + + /// Send a message to a conversation + async fn send_message(&self, ctx: &Context<'_>, input: SendMessageInput) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let author_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let now = Utc::now(); + + // Verify user is a participant + let is_participant: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM conversation_participants + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 AND is_archived = false + ) + "#, + ) + .bind(input.conversation_id) + .bind(author_type) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + if !is_participant { + return Err(async_graphql::Error::new( + "You are not a participant in this conversation", + )); + } + + // Create the message + let message: Message = sqlx::query_as( + r#" + INSERT INTO messages ( + id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, false, $6, COALESCE($7, '[]'::jsonb), false, '{}'::jsonb) + RETURNING id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + "#, + ) + .bind(now) + .bind(input.conversation_id) + .bind(author_type) + .bind(user.user_id) + .bind(&input.content) + .bind(input.reply_to_id) + .bind(&input.attachments) + .fetch_one(&*pool) + .await?; + + // Update conversation's last_message_at + sqlx::query( + r#" + UPDATE conversations SET last_message_at = $1, updated_at = $1 WHERE id = $2 + "#, + ) + .bind(now) + .bind(input.conversation_id) + .execute(&*pool) + .await?; + + // Increment unread count for other participants + sqlx::query( + r#" + UPDATE conversation_participants + SET unread_count = unread_count + 1, updated_at = $1 + WHERE conversation_id = $2 + AND is_archived = false + AND NOT (participant_type = $3 AND participant_id = $4) + "#, + ) + .bind(now) + .bind(input.conversation_id) + .bind(author_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + // Mark conversation as read for the sender + sqlx::query( + r#" + UPDATE conversation_participants + SET last_read_at = $1, unread_count = 0, updated_at = $1 + WHERE conversation_id = $2 AND participant_type = $3 AND participant_id = $4 + "#, + ) + .bind(now) + .bind(input.conversation_id) + .bind(author_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(MessageType::from(message)) + } + + /// Delete (soft-delete) a message + async fn delete_message(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let author_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + // Only the author can delete their own message + let result = sqlx::query( + r#" + UPDATE messages + SET is_deleted = true, updated_at = NOW() + WHERE id = $1 AND author_type = $2 AND author_id = $3 + "#, + ) + .bind(id) + .bind(author_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== READ TRACKING MUTATIONS ==================== + + /// Mark a conversation as read for the current user + async fn mark_conversation_as_read(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let now = Utc::now(); + + let result = sqlx::query( + r#" + UPDATE conversation_participants + SET last_read_at = $1, unread_count = 0, updated_at = $1 + WHERE conversation_id = $2 AND participant_type = $3 AND participant_id = $4 + "#, + ) + .bind(now) + .bind(id) + .bind(participant_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Mark all conversations as read for the current user + async fn mark_all_conversations_as_read(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let now = Utc::now(); + + let result = sqlx::query( + r#" + UPDATE conversation_participants + SET last_read_at = $1, unread_count = 0, updated_at = $1 + WHERE participant_type = $2 AND participant_id = $3 AND unread_count > 0 + "#, + ) + .bind(now) + .bind(participant_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + // ==================== PARTICIPANT MUTATIONS ==================== + + /// Add a participant to a conversation + async fn add_participant( + &self, + ctx: &Context<'_>, + conversation_id: Uuid, + participant: ParticipantInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let now = Utc::now(); + + // Check if participant already exists (even if archived) + let existing: Option = sqlx::query_scalar( + r#" + SELECT is_archived FROM conversation_participants + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 + "#, + ) + .bind(conversation_id) + .bind(&participant.participant_type) + .bind(participant.participant_id) + .fetch_optional(&*pool) + .await?; + + let cp: ConversationParticipant = sqlx::query_as( + r#" + INSERT INTO conversation_participants ( + id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, NULL, 0, false, false, $1) + ON CONFLICT (conversation_id, participant_type, participant_id) + DO UPDATE SET is_archived = false, updated_at = $1 + RETURNING id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + "#, + ) + .bind(now) + .bind(conversation_id) + .bind(&participant.participant_type) + .bind(participant.participant_id) + .fetch_one(&*pool) + .await?; + + // Create system message for join + // Only if this is a new participant OR was previously archived (rejoining) + let should_create_message = existing.is_none() || existing == Some(true); + if should_create_message { + let name = get_participant_name(&pool, &participant.participant_type, participant.participant_id).await?; + insert_system_message(&pool, conversation_id, &format!("{} joined the conversation", name)).await?; + } + + Ok(ConversationParticipantType::from(cp)) + } + + /// Remove a participant from a conversation (soft-remove) + async fn remove_participant( + &self, + ctx: &Context<'_>, + conversation_id: Uuid, + participant: ParticipantInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if participant is actually active + let is_active_participant: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM conversation_participants + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 AND is_archived = false + ) + "#, + ) + .bind(conversation_id) + .bind(&participant.participant_type) + .bind(participant.participant_id) + .fetch_one(&*pool) + .await?; + + if !is_active_participant { + return Ok(false); + } + + // Get participant name for system message + let name = get_participant_name(&pool, &participant.participant_type, participant.participant_id).await?; + + // Create system message + insert_system_message(&pool, conversation_id, &format!("{} was removed from the conversation", name)).await?; + + // Soft-remove the participant + let result = sqlx::query( + r#" + UPDATE conversation_participants + SET is_archived = true, updated_at = NOW() + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 + "#, + ) + .bind(conversation_id) + .bind(&participant.participant_type) + .bind(participant.participant_id) + .execute(&*pool) + .await?; + + // Check if conversation should be auto-archived + check_and_archive_conversation(&pool, conversation_id).await?; + + Ok(result.rows_affected() > 0) + } + + /// Leave a conversation (current user removes themselves) + async fn leave_conversation(&self, ctx: &Context<'_>, conversation_id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + // Check if user is actually a participant (and not already archived) + let is_active_participant: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM conversation_participants + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 AND is_archived = false + ) + "#, + ) + .bind(conversation_id) + .bind(participant_type) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + if !is_active_participant { + return Ok(false); + } + + // Get participant name for system message + let name = get_participant_name(&pool, participant_type, user.user_id).await?; + + // Create system message before leaving + insert_system_message(&pool, conversation_id, &format!("{} left the conversation", name)).await?; + + // Soft-remove the participant + let result = sqlx::query( + r#" + UPDATE conversation_participants + SET is_archived = true, updated_at = NOW() + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 + "#, + ) + .bind(conversation_id) + .bind(participant_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + // Check if conversation should be auto-archived + check_and_archive_conversation(&pool, conversation_id).await?; + + Ok(result.rows_affected() > 0) + } +} diff --git a/src/graphql/mutations/mod.rs b/src/graphql/mutations/mod.rs new file mode 100644 index 0000000..4cadb2b --- /dev/null +++ b/src/graphql/mutations/mod.rs @@ -0,0 +1,58 @@ +mod account; +mod calendar; +mod customer; +mod email; +mod invoice; +mod jobs; +mod location; +mod messaging; +mod notification; +mod profile; +pub mod project; +mod project_scope_template; +mod report; +mod service; +mod service_scope_template; +mod session; +mod wave; + +use async_graphql::MergedObject; + +pub use account::AccountMutation; +pub use calendar::CalendarMutation; +pub use customer::CustomerMutation; +pub use email::EmailMutation; +pub use invoice::InvoiceMutation; +pub use jobs::JobMutation; +pub use location::LocationMutation; +pub use messaging::MessagingMutation; +pub use notification::NotificationMutation; +pub use profile::ProfileMutation; +pub use project::ProjectMutation; +pub use project_scope_template::ProjectScopeTemplateMutation; +pub use report::ReportMutation; +pub use service::ServiceMutation; +pub use service_scope_template::ServiceScopeTemplateMutation; +pub use session::SessionMutation; +pub use wave::WaveMutation; + +#[derive(MergedObject, Default)] +pub struct MutationRoot( + AccountMutation, + CalendarMutation, + CustomerMutation, + EmailMutation, + InvoiceMutation, + JobMutation, + LocationMutation, + MessagingMutation, + NotificationMutation, + ProfileMutation, + ProjectMutation, + ReportMutation, + ServiceMutation, + ServiceScopeTemplateMutation, + ProjectScopeTemplateMutation, + SessionMutation, + WaveMutation, +); diff --git a/src/graphql/mutations/notification.rs b/src/graphql/mutations/notification.rs new file mode 100644 index 0000000..764afd6 --- /dev/null +++ b/src/graphql/mutations/notification.rs @@ -0,0 +1,376 @@ +//! Notification GraphQL mutations +//! +//! Mutations for managing notifications and notification rules. + +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::Utc; +use serde_json::Value as JsonValue; +use uuid::Uuid; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::graphql::types::{NotificationRuleType, NotificationType}; +use crate::models::{Notification, NotificationRule, NotificationStatus}; + +// ==================== INPUT TYPES ==================== + +/// Input for creating a notification rule +#[derive(InputObject)] +pub struct CreateNotificationRuleInput { + pub name: String, + pub description: Option, + /// Event types that trigger this rule (array of event type strings) + pub event_types: JsonValue, + /// Channels to deliver through (array of channel strings, e.g., ["IN_APP", "EMAIL"]) + pub channels: JsonValue, + /// Target roles (array of role strings, e.g., ["ADMIN", "TEAM_LEADER"]) + pub target_roles: Option, + /// Custom conditions for triggering (JSON) + pub conditions: Option, + /// Notification subject template + pub subject_template: Option, + /// Notification body template + pub body_template: Option, + /// Specific team profiles to target + pub target_team_profile_ids: Option>, + /// Specific customer profiles to target + pub target_customer_profile_ids: Option>, +} + +/// Input for updating a notification rule +#[derive(InputObject)] +pub struct UpdateNotificationRuleInput { + pub name: Option, + pub description: Option, + pub event_types: Option, + pub channels: Option, + pub target_roles: Option, + pub conditions: Option, + pub subject_template: Option, + pub body_template: Option, + /// Specific team profiles to target (replaces existing) + pub target_team_profile_ids: Option>, + /// Specific customer profiles to target (replaces existing) + pub target_customer_profile_ids: Option>, +} + +// ==================== MUTATION IMPLEMENTATION ==================== + +#[derive(Default)] +pub struct NotificationMutation; + +#[Object] +impl NotificationMutation { + // ==================== USER NOTIFICATION MUTATIONS ==================== + + /// Mark a notification as read + async fn mark_notification_as_read( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let recipient_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let now = Utc::now(); + + let notification: Notification = sqlx::query_as( + r#" + UPDATE notifications + SET read_at = $1, status = $2, updated_at = $1 + WHERE id = $3 AND recipient_type = $4 AND recipient_id = $5 + RETURNING id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + "#, + ) + .bind(now) + .bind(NotificationStatus::Read) + .bind(id) + .bind(recipient_type) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + Ok(NotificationType::from(notification)) + } + + /// Mark all notifications as read for the current user + async fn mark_all_notifications_as_read(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let recipient_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let now = Utc::now(); + + let result = sqlx::query( + r#" + UPDATE notifications + SET read_at = $1, status = $2, updated_at = $1 + WHERE recipient_type = $3 AND recipient_id = $4 AND read_at IS NULL + "#, + ) + .bind(now) + .bind(NotificationStatus::Read) + .bind(recipient_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + /// Delete a notification + async fn delete_notification(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let recipient_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let result = sqlx::query( + r#" + DELETE FROM notifications + WHERE id = $1 AND recipient_type = $2 AND recipient_id = $3 + "#, + ) + .bind(id) + .bind(recipient_type) + .bind(user.user_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== ADMIN NOTIFICATION RULE MUTATIONS ==================== + + /// Create a notification rule (admin only) + async fn create_notification_rule( + &self, + ctx: &Context<'_>, + input: CreateNotificationRuleInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let now = Utc::now(); + + // Create the rule + let rule: NotificationRule = sqlx::query_as( + r#" + INSERT INTO notification_rules ( + id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, true, $4, $5, $6, $7, $8, $9) + RETURNING id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + "#, + ) + .bind(now) + .bind(&input.name) + .bind(&input.description) + .bind(&input.event_types) + .bind(&input.channels) + .bind(&input.target_roles) + .bind(&input.conditions) + .bind(&input.subject_template) + .bind(&input.body_template) + .fetch_one(&*pool) + .await?; + + // Add team profile targets + if let Some(team_ids) = input.target_team_profile_ids { + for team_id in team_ids { + sqlx::query( + r#" + INSERT INTO notification_rule_team_profiles (id, rule_id, team_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, $3) + "#, + ) + .bind(rule.base.id) + .bind(team_id) + .bind(now) + .execute(&*pool) + .await?; + } + } + + // Add customer profile targets + if let Some(customer_ids) = input.target_customer_profile_ids { + for customer_id in customer_ids { + sqlx::query( + r#" + INSERT INTO notification_rule_customer_profiles (id, rule_id, customer_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, $3) + "#, + ) + .bind(rule.base.id) + .bind(customer_id) + .bind(now) + .execute(&*pool) + .await?; + } + } + + Ok(NotificationRuleType::from(rule)) + } + + /// Update a notification rule (admin only) + async fn update_notification_rule( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateNotificationRuleInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let now = Utc::now(); + + let rule: NotificationRule = sqlx::query_as( + r#" + UPDATE notification_rules + SET name = COALESCE($1, name), + description = COALESCE($2, description), + event_types = COALESCE($3, event_types), + channels = COALESCE($4, channels), + target_roles = COALESCE($5, target_roles), + conditions = COALESCE($6, conditions), + subject_template = COALESCE($7, subject_template), + body_template = COALESCE($8, body_template), + updated_at = $9 + WHERE id = $10 + RETURNING id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + "#, + ) + .bind(&input.name) + .bind(&input.description) + .bind(&input.event_types) + .bind(&input.channels) + .bind(&input.target_roles) + .bind(&input.conditions) + .bind(&input.subject_template) + .bind(&input.body_template) + .bind(now) + .bind(id) + .fetch_one(&*pool) + .await?; + + // Update team profile targets if provided + if let Some(team_ids) = input.target_team_profile_ids { + // Remove existing + sqlx::query("DELETE FROM notification_rule_team_profiles WHERE rule_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Add new + for team_id in team_ids { + sqlx::query( + r#" + INSERT INTO notification_rule_team_profiles (id, rule_id, team_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, $3) + "#, + ) + .bind(id) + .bind(team_id) + .bind(now) + .execute(&*pool) + .await?; + } + } + + // Update customer profile targets if provided + if let Some(customer_ids) = input.target_customer_profile_ids { + // Remove existing + sqlx::query("DELETE FROM notification_rule_customer_profiles WHERE rule_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Add new + for customer_id in customer_ids { + sqlx::query( + r#" + INSERT INTO notification_rule_customer_profiles (id, rule_id, customer_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, $3) + "#, + ) + .bind(id) + .bind(customer_id) + .bind(now) + .execute(&*pool) + .await?; + } + } + + Ok(NotificationRuleType::from(rule)) + } + + /// Delete a notification rule (admin only) + async fn delete_notification_rule(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete associated targets first (due to foreign keys) + sqlx::query("DELETE FROM notification_rule_team_profiles WHERE rule_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + sqlx::query("DELETE FROM notification_rule_customer_profiles WHERE rule_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM notification_rules WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Toggle a notification rule's active status (admin only) + async fn toggle_notification_rule( + &self, + ctx: &Context<'_>, + id: Uuid, + is_active: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let rule: NotificationRule = sqlx::query_as( + r#" + UPDATE notification_rules + SET is_active = $1, updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + "#, + ) + .bind(is_active) + .bind(id) + .fetch_one(&*pool) + .await?; + + Ok(NotificationRuleType::from(rule)) + } +} diff --git a/src/graphql/mutations/profile.rs b/src/graphql/mutations/profile.rs new file mode 100644 index 0000000..6869c07 --- /dev/null +++ b/src/graphql/mutations/profile.rs @@ -0,0 +1,427 @@ +//! Profile GraphQL mutations for TeamProfile and CustomerProfile + +use async_graphql::{Context, InputObject, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{CustomerProfileType, EntityStatusType, TeamProfileType, TeamRoleType}; +use crate::models::{CustomerProfile, EntityStatus, EventType, TeamProfile, TeamRole}; +use crate::services::EventPublisher; +use crate::auth::UserContext; + +// ==================== TEAM PROFILE INPUTS ==================== + +/// Input for creating a team profile +/// The ID must be the Kratos identity UUID +#[derive(InputObject)] +pub struct CreateTeamProfileInput { + /// The Kratos identity UUID - this becomes the profile ID + pub kratos_identity_id: Uuid, + pub first_name: String, + pub last_name: String, + pub email: String, + pub phone: String, + pub role: TeamRoleType, + pub status: Option, + pub notes: String, +} + +/// Input for updating a team profile +/// Note: The profile ID (Kratos identity UUID) cannot be changed +#[derive(InputObject)] +pub struct UpdateTeamProfileInput { + pub first_name: Option, + pub last_name: Option, + pub email: Option, + pub phone: Option, + pub role: Option, + pub status: Option, + pub notes: Option, +} + +// ==================== CUSTOMER PROFILE INPUTS ==================== + +/// Input for creating a customer profile +/// The ID must be the Kratos identity UUID +#[derive(InputObject)] +pub struct CreateCustomerProfileInput { + /// The Kratos identity UUID - this becomes the profile ID + pub kratos_identity_id: Uuid, + pub first_name: String, + pub last_name: String, + pub email: String, + pub phone: String, + pub status: Option, + pub notes: String, +} + +/// Input for updating a customer profile +#[derive(InputObject)] +pub struct UpdateCustomerProfileInput { + pub first_name: Option, + pub last_name: Option, + pub email: Option, + pub phone: Option, + pub status: Option, + pub notes: Option, +} + +#[derive(Default)] +pub struct ProfileMutation; + +#[Object] +impl ProfileMutation { + // ==================== TEAM PROFILE MUTATIONS ==================== + + /// Create a new team profile linked to a Kratos identity + async fn create_team_profile( + &self, + ctx: &Context<'_>, + input: CreateTeamProfileInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if profile already exists + let existing: Option<(Uuid,)> = sqlx::query_as( + "SELECT id FROM team_profiles WHERE id = $1" + ) + .bind(input.kratos_identity_id) + .fetch_optional(&*pool) + .await?; + + if existing.is_some() { + return Err(async_graphql::Error::new( + "A team profile already exists for this Kratos identity" + )); + } + + let status = input.status.map(EntityStatus::from).unwrap_or(EntityStatus::Active); + let role = TeamRole::from(input.role); + + let profile: TeamProfile = sqlx::query_as::<_, TeamProfile>( + r#" + INSERT INTO team_profiles ( + id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + ) + VALUES ($1, NOW(), NOW(), $2, $3, NULLIF($4, ''), NULLIF($5, ''), $6, $7, NULLIF($8, '')) + RETURNING id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + "#, + ) + .bind(input.kratos_identity_id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.phone) + .bind(&input.email) + .bind(role) + .bind(status) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(user) = ctx.data::() { + let _ = EventPublisher::publish( + &pool, + EventType::TeamProfileCreated, + "team_profile", + profile.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(TeamProfileType::from(profile)) + } + + /// Update a team profile + async fn update_team_profile( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateTeamProfileInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status and role for event tracking + let old_values: Option<(EntityStatus, TeamRole)> = sqlx::query_as( + "SELECT status, role FROM team_profiles WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(EntityStatus::from); + let new_role = input.role.map(TeamRole::from); + + // Empty string clears optional fields, NULL means don't update + let profile: TeamProfile = sqlx::query_as::<_, TeamProfile>( + r#" + UPDATE team_profiles + SET + first_name = COALESCE(NULLIF($2, ''), first_name), + last_name = COALESCE(NULLIF($3, ''), last_name), + email = CASE WHEN $4 IS NULL THEN email ELSE NULLIF($4, '') END, + phone = CASE WHEN $5 IS NULL THEN phone ELSE NULLIF($5, '') END, + role = COALESCE($6, role), + status = COALESCE($7, status), + notes = CASE WHEN $8 IS NULL THEN notes ELSE NULLIF($8, '') END, + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + "#, + ) + .bind(id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.email) + .bind(&input.phone) + .bind(new_role) + .bind(new_status) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + // Publish events for status/role changes + if let Ok(user) = ctx.data::() { + if let Some((old_status, old_role)) = old_values { + // Status change event + if let Some(new) = new_status { + if old_status != new { + let _ = EventPublisher::publish( + &pool, + EventType::TeamProfileStatusChanged, + "team_profile", + profile.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old_status), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + + // Role change event + if let Some(new) = new_role { + if old_role != new { + let _ = EventPublisher::publish( + &pool, + EventType::TeamProfileRoleChanged, + "team_profile", + profile.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_role": format!("{:?}", old_role), + "new_role": format!("{:?}", new) + })), + ) + .await; + } + } + } + } + + Ok(TeamProfileType::from(profile)) + } + + /// Delete a team profile + async fn delete_team_profile(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM team_profiles WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let deleted = result.rows_affected() > 0; + + // Publish event for audit trail + if deleted { + if let Ok(user) = ctx.data::() { + let _ = EventPublisher::publish( + &pool, + EventType::TeamProfileDeleted, + "team_profile", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(deleted) + } + + // ==================== CUSTOMER PROFILE MUTATIONS ==================== + + /// Create a new customer profile linked to a Kratos identity + async fn create_customer_profile( + &self, + ctx: &Context<'_>, + input: CreateCustomerProfileInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if profile already exists + let existing: Option<(Uuid,)> = sqlx::query_as( + "SELECT id FROM customer_profiles WHERE id = $1" + ) + .bind(input.kratos_identity_id) + .fetch_optional(&*pool) + .await?; + + if existing.is_some() { + return Err(async_graphql::Error::new( + "A customer profile already exists for this Kratos identity" + )); + } + + let status = input.status.map(EntityStatus::from).unwrap_or(EntityStatus::Active); + + let profile: CustomerProfile = sqlx::query_as::<_, CustomerProfile>( + r#" + INSERT INTO customer_profiles ( + id, created_at, updated_at, first_name, last_name, phone, email, status, notes + ) + VALUES ($1, NOW(), NOW(), $2, $3, NULLIF($4, ''), NULLIF($5, ''), $6, NULLIF($7, '')) + RETURNING id, created_at, updated_at, first_name, last_name, phone, email, status, notes + "#, + ) + .bind(input.kratos_identity_id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.phone) + .bind(&input.email) + .bind(status) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(user) = ctx.data::() { + let _ = EventPublisher::publish( + &pool, + EventType::CustomerProfileCreated, + "customer_profile", + profile.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(CustomerProfileType::from(profile)) + } + + /// Update a customer profile + async fn update_customer_profile( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateCustomerProfileInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM customer_profiles WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(EntityStatus::from); + + // Empty string clears optional fields, NULL means don't update + let profile: CustomerProfile = sqlx::query_as::<_, CustomerProfile>( + r#" + UPDATE customer_profiles + SET + first_name = COALESCE(NULLIF($2, ''), first_name), + last_name = COALESCE(NULLIF($3, ''), last_name), + email = CASE WHEN $4 IS NULL THEN email ELSE NULLIF($4, '') END, + phone = CASE WHEN $5 IS NULL THEN phone ELSE NULLIF($5, '') END, + status = COALESCE($6, status), + notes = CASE WHEN $7 IS NULL THEN notes ELSE NULLIF($7, '') END, + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, first_name, last_name, phone, email, status, notes + "#, + ) + .bind(id) + .bind(&input.first_name) + .bind(&input.last_name) + .bind(&input.email) + .bind(&input.phone) + .bind(new_status) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + // Publish event for status change + if let Ok(user) = ctx.data::() { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let _ = EventPublisher::publish( + &pool, + EventType::CustomerProfileStatusChanged, + "customer_profile", + profile.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + } + + Ok(CustomerProfileType::from(profile)) + } + + /// Delete a customer profile + async fn delete_customer_profile(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // First remove customer access associations + sqlx::query("DELETE FROM customer_profile_access WHERE customer_profile_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM customer_profiles WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let deleted = result.rows_affected() > 0; + + // Publish event for audit trail + if deleted { + if let Ok(user) = ctx.data::() { + let _ = EventPublisher::publish( + &pool, + EventType::CustomerProfileDeleted, + "customer_profile", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(deleted) + } +} diff --git a/src/graphql/mutations/project.rs b/src/graphql/mutations/project.rs new file mode 100644 index 0000000..771a87d --- /dev/null +++ b/src/graphql/mutations/project.rs @@ -0,0 +1,908 @@ +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::auth::UserContext; +use crate::db::Database; +use crate::graphql::types::{ProjectTeamMemberType, ProjectType, WorkStatusType}; +use crate::models::{ + EventType, Project, ProjectScope, ProjectScopeCategory, ProjectScopeTask, ProjectTeamMember, + WorkStatus, +}; +use crate::services::{EventPublisher, JobQueue}; + +/// Input for creating a project +#[derive(InputObject)] +pub struct CreateProjectInput { + pub customer_id: Uuid, + pub name: String, + pub date: NaiveDate, + pub status: Option, + pub account_address_id: Option, + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, +} + +/// Input for updating a project +#[derive(InputObject)] +pub struct UpdateProjectInput { + pub name: Option, + pub date: Option, + pub status: Option, + pub labor: Option, + pub amount: Option, + pub wave_service_id: Option, + pub notes: Option, + pub calendar_event_id: Option, + pub account_address_id: Option, + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, +} + +/// Input for creating a project scope +#[derive(InputObject)] +pub struct CreateProjectScopeInput { + pub name: String, + pub description: Option, + pub categories: Option>, +} + +/// Input for updating a project scope +#[derive(InputObject)] +pub struct UpdateProjectScopeInput { + pub name: Option, + pub description: Option, + pub is_active: Option, +} + +/// Input for creating a project scope task (no frequency - project tasks are one-time) +#[derive(InputObject)] +pub struct CreateProjectScopeTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for updating a project scope task (no frequency - project tasks are one-time) +#[derive(InputObject)] +pub struct UpdateProjectScopeTaskInput { + pub scope_description: Option, + pub checklist_description: Option, + pub session_description: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for creating a project scope category +#[derive(InputObject)] +pub struct CreateProjectCategoryInput { + pub name: String, + pub order: Option, + pub tasks: Option>, +} + +/// Input for updating a project scope category +#[derive(InputObject)] +pub struct UpdateProjectCategoryInput { + pub name: Option, + pub order: Option, +} + +#[derive(Default)] +pub struct ProjectMutation; + +/// Project scope GraphQL type +#[derive(async_graphql::SimpleObject)] +#[graphql(complex)] +pub struct ProjectScopeType { + pub id: Uuid, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +#[async_graphql::ComplexObject] +impl ProjectScopeType { + /// Get categories for this scope + async fn categories(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let categories: Vec = sqlx::query_as::<_, ProjectScopeCategory>( + r#" + SELECT id, created_at, updated_at, scope_id, name, "order" + FROM project_scope_categories + WHERE scope_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(categories.into_iter().map(ProjectScopeCategoryType::from).collect()) + } +} + +impl From for ProjectScopeType { + fn from(scope: ProjectScope) -> Self { + Self { + id: scope.base.id, + name: scope.name, + description: scope.description, + is_active: scope.is_active, + } + } +} + +/// Project scope category GraphQL type +#[derive(async_graphql::SimpleObject)] +#[graphql(complex)] +pub struct ProjectScopeCategoryType { + pub id: Uuid, + pub name: String, + pub order: i32, +} + +#[async_graphql::ComplexObject] +impl ProjectScopeCategoryType { + /// Get tasks for this category + async fn tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let tasks: Vec = sqlx::query_as::<_, ProjectScopeTask>( + r#" + SELECT id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + FROM project_scope_tasks + WHERE category_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(tasks.into_iter().map(ProjectScopeTaskType::from).collect()) + } +} + +impl From for ProjectScopeCategoryType { + fn from(category: ProjectScopeCategory) -> Self { + Self { + id: category.base.id, + name: category.name, + order: category.order, + } + } +} + +/// Project scope task GraphQL type +/// Note: Unlike service tasks, project tasks don't have frequency - they are one-time +#[derive(async_graphql::SimpleObject)] +pub struct ProjectScopeTaskType { + pub id: Uuid, + pub scope_description: String, + pub checklist_description: String, + pub session_description: String, + pub order: i32, + pub estimated_minutes: Option, +} + +impl From for ProjectScopeTaskType { + fn from(task: ProjectScopeTask) -> Self { + Self { + id: task.base.id, + scope_description: task.scope_description, + checklist_description: task.checklist_description, + session_description: task.session_description, + order: task.order, + estimated_minutes: task.estimated_minutes, + } + } +} + +#[Object] +impl ProjectMutation { + /// Create a new project + async fn create_project( + &self, + ctx: &Context<'_>, + input: CreateProjectInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let status = input.status.map(WorkStatus::from).unwrap_or(WorkStatus::Scheduled); + + let project: Project = sqlx::query_as::<_, Project>( + r#" + INSERT INTO projects ( + id, created_at, updated_at, customer_id, name, date, status, + account_address_id, street_address, city, state, zip_code + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7, $8, $9 + ) + RETURNING id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + "#, + ) + .bind(input.customer_id) + .bind(&input.name) + .bind(input.date) + .bind(status) + .bind(input.account_address_id) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .fetch_one(&*pool) + .await?; + + // Publish ProjectCreated event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectCreated, + "project", + project.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ProjectType::from(project)) + } + + /// Update a project + async fn update_project( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM projects WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(WorkStatus::from); + + let project: Project = sqlx::query_as::<_, Project>( + r#" + UPDATE projects + SET + name = COALESCE($2, name), + date = COALESCE($3, date), + status = COALESCE($4, status), + labor = COALESCE($5, labor), + amount = COALESCE($6, amount), + wave_service_id = COALESCE($7, wave_service_id), + notes = COALESCE($8, notes), + calendar_event_id = COALESCE($9, calendar_event_id), + account_address_id = COALESCE($10, account_address_id), + street_address = COALESCE($11, street_address), + city = COALESCE($12, city), + state = COALESCE($13, state), + zip_code = COALESCE($14, zip_code), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.date) + .bind(new_status) + .bind(input.labor) + .bind(input.amount) + .bind(&input.wave_service_id) + .bind(&input.notes) + .bind(&input.calendar_event_id) + .bind(input.account_address_id) + .bind(&input.street_address) + .bind(&input.city) + .bind(&input.state) + .bind(&input.zip_code) + .fetch_one(&*pool) + .await?; + + // Publish event for status change + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectStatusChanged, + "project", + project.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + } + + Ok(ProjectType::from(project)) + } + + /// Delete a project + async fn delete_project(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete team member assignments first + sqlx::query("DELETE FROM project_team_members WHERE project_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete project scope tasks, categories, and scopes + let scope_ids: Vec = sqlx::query_scalar( + "SELECT id FROM project_scopes WHERE project_id = $1", + ) + .bind(id) + .fetch_all(&*pool) + .await?; + + for scope_id in scope_ids { + let category_ids: Vec = sqlx::query_scalar( + "SELECT id FROM project_scope_categories WHERE scope_id = $1", + ) + .bind(scope_id) + .fetch_all(&*pool) + .await?; + + for category_id in category_ids { + sqlx::query("DELETE FROM project_scope_tasks WHERE category_id = $1") + .bind(category_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM project_scope_categories WHERE scope_id = $1") + .bind(scope_id) + .execute(&*pool) + .await?; + } + + sqlx::query("DELETE FROM project_scopes WHERE project_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete the project + let result = sqlx::query("DELETE FROM projects WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish ProjectDeleted event + if result.rows_affected() > 0 { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectDeleted, + "project", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(result.rows_affected() > 0) + } + + /// Assign a team member to a project + async fn assign_project_team_member( + &self, + ctx: &Context<'_>, + project_id: Uuid, + team_profile_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let member: ProjectTeamMember = sqlx::query_as::<_, ProjectTeamMember>( + r#" + INSERT INTO project_team_members (id, project_id, team_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, NOW()) + ON CONFLICT (project_id, team_profile_id) DO UPDATE SET project_id = EXCLUDED.project_id + RETURNING id, project_id, team_profile_id, created_at + "#, + ) + .bind(project_id) + .bind(team_profile_id) + .fetch_one(&*pool) + .await?; + + // Publish ProjectAssigned event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectAssigned, + "project", + project_id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "assigned_team_profile_id": team_profile_id.to_string() + })), + ) + .await; + } + + Ok(ProjectTeamMemberType::from(member)) + } + + /// Remove a team member assignment from a project + async fn remove_project_team_member(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the project_id and team_profile_id before deleting + let assignment: Option<(Uuid, Uuid)> = sqlx::query_as( + "SELECT project_id, team_profile_id FROM project_team_members WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM project_team_members WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish ProjectUnassigned event + if result.rows_affected() > 0 { + if let Some((project_id, team_profile_id)) = assignment { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectUnassigned, + "project", + project_id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "unassigned_team_profile_id": team_profile_id.to_string() + })), + ) + .await; + } + } + } + + Ok(result.rows_affected() > 0) + } + + // ==================== PROJECT SCOPE MUTATIONS ==================== + + /// Create a new scope for a project (ONE scope per project - deactivates existing) + async fn create_project_scope( + &self, + ctx: &Context<'_>, + project_id: Uuid, + input: CreateProjectScopeInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get project's account_address_id and account_id if it has one + let project: Project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects WHERE id = $1 + "#, + ) + .bind(project_id) + .fetch_one(&*pool) + .await?; + + // Get account_id from account_address if present + let account_id: Option = if let Some(address_id) = project.account_address_id { + sqlx::query_scalar("SELECT account_id FROM account_addresses WHERE id = $1") + .bind(address_id) + .fetch_optional(&*pool) + .await? + } else { + None + }; + + // Deactivate any existing active scopes for this project + sqlx::query( + r#" + UPDATE project_scopes + SET is_active = false, updated_at = NOW() + WHERE project_id = $1 AND is_active = true + "#, + ) + .bind(project_id) + .execute(&*pool) + .await?; + + // Create the scope + let scope: ProjectScope = sqlx::query_as::<_, ProjectScope>( + r#" + INSERT INTO project_scopes ( + id, created_at, updated_at, name, project_id, account_id, account_address_id, description, is_active + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, true) + RETURNING id, created_at, updated_at, name, project_id, account_id, account_address_id, description, is_active + "#, + ) + .bind(&input.name) + .bind(project_id) + .bind(account_id) + .bind(project.account_address_id) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create categories and tasks if provided + if let Some(categories) = input.categories { + for (cat_order, cat_input) in categories.into_iter().enumerate() { + let category: ProjectScopeCategory = sqlx::query_as::<_, ProjectScopeCategory>( + r#" + INSERT INTO project_scope_categories (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(scope.base.id) + .bind(&cat_input.name) + .bind(cat_input.order.unwrap_or(cat_order as i32)) + .fetch_one(&*pool) + .await?; + + if let Some(tasks) = cat_input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + sqlx::query( + r#" + INSERT INTO project_scope_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(category.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + } + } + + Ok(ProjectScopeType::from(scope)) + } + + /// Update a project scope + async fn update_project_scope( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectScopeInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // If activating this scope, deactivate others for same project + if input.is_active == Some(true) { + let project_id: Uuid = sqlx::query_scalar( + "SELECT project_id FROM project_scopes WHERE id = $1", + ) + .bind(id) + .fetch_one(&*pool) + .await?; + + sqlx::query( + r#" + UPDATE project_scopes + SET is_active = false, updated_at = NOW() + WHERE project_id = $1 AND id != $2 AND is_active = true + "#, + ) + .bind(project_id) + .bind(id) + .execute(&*pool) + .await?; + } + + let scope: ProjectScope = sqlx::query_as::<_, ProjectScope>( + r#" + UPDATE project_scopes + SET + name = COALESCE($2, name), + description = COALESCE($3, description), + is_active = COALESCE($4, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, project_id, account_id, account_address_id, description, is_active + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.description) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeType::from(scope)) + } + + /// Delete a project scope (hard delete since sessions are tracked separately) + async fn delete_project_scope(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get all category IDs for this scope + let category_ids: Vec = sqlx::query_scalar( + "SELECT id FROM project_scope_categories WHERE scope_id = $1", + ) + .bind(id) + .fetch_all(&*pool) + .await?; + + // Delete tasks for all categories + for category_id in &category_ids { + sqlx::query("DELETE FROM project_scope_tasks WHERE category_id = $1") + .bind(category_id) + .execute(&*pool) + .await?; + } + + // Delete categories + sqlx::query("DELETE FROM project_scope_categories WHERE scope_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete scope + let result = sqlx::query("DELETE FROM project_scopes WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SCOPE CATEGORY MUTATIONS ==================== + + /// Create a new category within a project scope + async fn create_project_scope_category( + &self, + ctx: &Context<'_>, + scope_id: Uuid, + input: CreateProjectCategoryInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing categories + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM project_scope_categories WHERE scope_id = $1"#, + ) + .bind(scope_id) + .fetch_one(&*pool) + .await?; + + let category: ProjectScopeCategory = sqlx::query_as::<_, ProjectScopeCategory>( + r#" + INSERT INTO project_scope_categories (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(scope_id) + .bind(&input.name) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .fetch_one(&*pool) + .await?; + + // Create tasks if provided + if let Some(tasks) = input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + sqlx::query( + r#" + INSERT INTO project_scope_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(category.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ProjectScopeCategoryType::from(category)) + } + + /// Update a project scope category + async fn update_project_scope_category( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectCategoryInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let category: ProjectScopeCategory = sqlx::query_as::<_, ProjectScopeCategory>( + r#" + UPDATE project_scope_categories + SET + name = COALESCE($2, name), + "order" = COALESCE($3, "order"), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, scope_id, name, "order" + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.order) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeCategoryType::from(category)) + } + + /// Delete a project scope category (cascades to tasks) + async fn delete_project_scope_category(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete tasks first + sqlx::query("DELETE FROM project_scope_tasks WHERE category_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete category + let result = sqlx::query("DELETE FROM project_scope_categories WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SCOPE TASK MUTATIONS ==================== + + /// Create a new task within a project scope category + /// Note: Project tasks don't have frequency - they are one-time tasks + async fn create_project_scope_task( + &self, + ctx: &Context<'_>, + category_id: Uuid, + input: CreateProjectScopeTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing tasks + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM project_scope_tasks WHERE category_id = $1"#, + ) + .bind(category_id) + .fetch_one(&*pool) + .await?; + + let task: ProjectScopeTask = sqlx::query_as::<_, ProjectScopeTask>( + r#" + INSERT INTO project_scope_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + RETURNING id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + "#, + ) + .bind(category_id) + .bind(&input.scope_description) + .bind(input.checklist_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.session_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTaskType::from(task)) + } + + /// Update a project scope task + async fn update_project_scope_task( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectScopeTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task: ProjectScopeTask = sqlx::query_as::<_, ProjectScopeTask>( + r#" + UPDATE project_scope_tasks + SET + scope_description = COALESCE($2, scope_description), + checklist_description = COALESCE($3, checklist_description), + session_description = COALESCE($4, session_description), + "order" = COALESCE($5, "order"), + estimated_minutes = COALESCE($6, estimated_minutes), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + "#, + ) + .bind(id) + .bind(&input.scope_description) + .bind(&input.checklist_description) + .bind(&input.session_description) + .bind(input.order) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTaskType::from(task)) + } + + /// Delete a project scope task + async fn delete_project_scope_task(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM project_scope_tasks WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } +} + diff --git a/src/graphql/mutations/project_scope_template.rs b/src/graphql/mutations/project_scope_template.rs new file mode 100644 index 0000000..10c89eb --- /dev/null +++ b/src/graphql/mutations/project_scope_template.rs @@ -0,0 +1,524 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + CreateProjectScopeTemplateCategoryInput, CreateProjectScopeTemplateInput, CreateProjectScopeTemplateTaskInput, + ImportProjectScopeTemplateInput, ProjectScopeTemplateCategoryType, ProjectScopeTemplateTaskType, ProjectScopeTemplateType, + UpdateProjectScopeTemplateCategoryInput, UpdateProjectScopeTemplateInput, UpdateProjectScopeTemplateTaskInput, +}; +use crate::models::{ProjectScopeTemplate, ProjectScopeTemplateCategory, ProjectScopeTemplateTask}; + +#[derive(Default)] +pub struct ProjectScopeTemplateMutation; + +#[Object] +impl ProjectScopeTemplateMutation { + // ==================== PROJECT SCOPE TEMPLATE MUTATIONS ==================== + + /// Create a new project scope template + async fn create_project_scope_template( + &self, + ctx: &Context<'_>, + input: CreateProjectScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Create the template + let template: ProjectScopeTemplate = sqlx::query_as::<_, ProjectScopeTemplate>( + r#" + INSERT INTO project_scope_templates (id, created_at, updated_at, name, description, is_active) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, true) + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(&input.name) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create categories if provided + if let Some(categories) = input.categories { + for (cat_order, cat_input) in categories.into_iter().enumerate() { + let category: ProjectScopeTemplateCategory = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + INSERT INTO project_scope_template_categories (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template.base.id) + .bind(&cat_input.name) + .bind(cat_input.order.unwrap_or(cat_order as i32)) + .fetch_one(&*pool) + .await?; + + if let Some(tasks) = cat_input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + sqlx::query( + r#" + INSERT INTO project_scope_template_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(category.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + } + } + + Ok(ProjectScopeTemplateType::from(template)) + } + + /// Update a project scope template + async fn update_project_scope_template( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let template: ProjectScopeTemplate = sqlx::query_as::<_, ProjectScopeTemplate>( + r#" + UPDATE project_scope_templates + SET + name = COALESCE($2, name), + description = COALESCE($3, description), + is_active = COALESCE($4, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.description) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTemplateType::from(template)) + } + + /// Delete a project scope template (cascades to categories and tasks) + async fn delete_project_scope_template(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // CASCADE handles categories and tasks + let result = sqlx::query("DELETE FROM project_scope_templates WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Import a project scope template from JSON structure + async fn import_project_scope_template( + &self, + ctx: &Context<'_>, + input: ImportProjectScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if template with same name exists + let existing: Option = sqlx::query_as::<_, ProjectScopeTemplate>( + "SELECT id, created_at, updated_at, name, description, is_active FROM project_scope_templates WHERE name = $1", + ) + .bind(&input.name) + .fetch_optional(&*pool) + .await?; + + if let Some(existing_template) = existing { + if input.replace.unwrap_or(false) { + // Delete existing template (cascade deletes categories/tasks) + sqlx::query("DELETE FROM project_scope_templates WHERE id = $1") + .bind(existing_template.base.id) + .execute(&*pool) + .await?; + } else { + return Err(async_graphql::Error::new(format!( + "Template with name '{}' already exists. Use replace: true to overwrite.", + input.name + ))); + } + } + + // Create the template + let template: ProjectScopeTemplate = sqlx::query_as::<_, ProjectScopeTemplate>( + r#" + INSERT INTO project_scope_templates (id, created_at, updated_at, name, description, is_active) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, true) + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(&input.name) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create categories and tasks + for cat_input in input.categories { + let category: ProjectScopeTemplateCategory = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + INSERT INTO project_scope_template_categories (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template.base.id) + .bind(&cat_input.name) + .bind(cat_input.order) + .fetch_one(&*pool) + .await?; + + for task_input in cat_input.tasks { + sqlx::query( + r#" + INSERT INTO project_scope_template_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(category.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.order) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ProjectScopeTemplateType::from(template)) + } + + // ==================== PROJECT SCOPE TEMPLATE CATEGORY MUTATIONS ==================== + + /// Create a new category in a project scope template + async fn create_project_scope_template_category( + &self, + ctx: &Context<'_>, + template_id: Uuid, + input: CreateProjectScopeTemplateCategoryInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing categories + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM project_scope_template_categories WHERE template_id = $1"#, + ) + .bind(template_id) + .fetch_one(&*pool) + .await?; + + let category: ProjectScopeTemplateCategory = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + INSERT INTO project_scope_template_categories (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template_id) + .bind(&input.name) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .fetch_one(&*pool) + .await?; + + // Create tasks if provided + if let Some(tasks) = input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + sqlx::query( + r#" + INSERT INTO project_scope_template_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(category.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ProjectScopeTemplateCategoryType::from(category)) + } + + /// Update a project scope template category + async fn update_project_scope_template_category( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectScopeTemplateCategoryInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let category: ProjectScopeTemplateCategory = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + UPDATE project_scope_template_categories + SET + name = COALESCE($2, name), + "order" = COALESCE($3, "order"), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.order) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTemplateCategoryType::from(category)) + } + + /// Delete a project scope template category (cascades to tasks) + async fn delete_project_scope_template_category(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // CASCADE handles tasks + let result = sqlx::query("DELETE FROM project_scope_template_categories WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== PROJECT SCOPE TEMPLATE TASK MUTATIONS ==================== + + /// Create a new task in a project scope template category + async fn create_project_scope_template_task( + &self, + ctx: &Context<'_>, + category_id: Uuid, + input: CreateProjectScopeTemplateTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing tasks + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM project_scope_template_tasks WHERE category_id = $1"#, + ) + .bind(category_id) + .fetch_one(&*pool) + .await?; + + let task: ProjectScopeTemplateTask = sqlx::query_as::<_, ProjectScopeTemplateTask>( + r#" + INSERT INTO project_scope_template_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + RETURNING id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + "#, + ) + .bind(category_id) + .bind(&input.scope_description) + .bind(input.checklist_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.session_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTemplateTaskType::from(task)) + } + + /// Update a project scope template task + async fn update_project_scope_template_task( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateProjectScopeTemplateTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task: ProjectScopeTemplateTask = sqlx::query_as::<_, ProjectScopeTemplateTask>( + r#" + UPDATE project_scope_template_tasks + SET + scope_description = COALESCE($2, scope_description), + checklist_description = COALESCE($3, checklist_description), + session_description = COALESCE($4, session_description), + "order" = COALESCE($5, "order"), + estimated_minutes = COALESCE($6, estimated_minutes), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + "#, + ) + .bind(id) + .bind(&input.scope_description) + .bind(&input.checklist_description) + .bind(&input.session_description) + .bind(input.order) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ProjectScopeTemplateTaskType::from(task)) + } + + /// Delete a project scope template task + async fn delete_project_scope_template_task(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM project_scope_template_tasks WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== TEMPLATE INSTANTIATION ==================== + + /// Create a project scope from a project scope template + async fn create_project_scope_from_template( + &self, + ctx: &Context<'_>, + template_id: Uuid, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the template + let template: ProjectScopeTemplate = sqlx::query_as::<_, ProjectScopeTemplate>( + "SELECT id, created_at, updated_at, name, description, is_active FROM project_scope_templates WHERE id = $1", + ) + .bind(template_id) + .fetch_one(&*pool) + .await?; + + // Deactivate any existing active scopes for this project + sqlx::query( + r#" + UPDATE project_scopes + SET is_active = false, updated_at = NOW() + WHERE project_id = $1 AND is_active = true + "#, + ) + .bind(project_id) + .execute(&*pool) + .await?; + + // Create the project scope + let scope_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO project_scopes ( + id, created_at, updated_at, name, project_id, account_id, account_address_id, description, is_active + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, NULL, NULL, $3, true) + RETURNING id + "#, + ) + .bind(&template.name) + .bind(project_id) + .bind(&template.description) + .fetch_one(&*pool) + .await?; + + // Get template categories + let categories: Vec = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + SELECT id, created_at, updated_at, template_id, name, "order" + FROM project_scope_template_categories + WHERE template_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(template_id) + .fetch_all(&*pool) + .await?; + + // Copy categories and tasks + for category in categories { + let new_category_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO project_scope_categories (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id + "#, + ) + .bind(scope_id) + .bind(&category.name) + .bind(category.order) + .fetch_one(&*pool) + .await?; + + // Get tasks for this category + let tasks: Vec = sqlx::query_as::<_, ProjectScopeTemplateTask>( + r#" + SELECT id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + FROM project_scope_template_tasks + WHERE category_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(category.base.id) + .fetch_all(&*pool) + .await?; + + // Copy tasks (project tasks don't have frequency) + for task in tasks { + sqlx::query( + r#" + INSERT INTO project_scope_tasks ( + id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6) + "#, + ) + .bind(new_category_id) + .bind(&task.scope_description) + .bind(&task.checklist_description) + .bind(&task.session_description) + .bind(task.order) + .bind(task.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(true) + } +} diff --git a/src/graphql/mutations/report.rs b/src/graphql/mutations/report.rs new file mode 100644 index 0000000..2719f23 --- /dev/null +++ b/src/graphql/mutations/report.rs @@ -0,0 +1,546 @@ +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ReportProjectType, ReportServiceType, ReportStatusType, ReportType}; +use crate::models::{EventType, Report, ReportProject, ReportService, ReportStatus}; +use crate::services::EventPublisher; +use crate::auth::UserContext; + +/// Dispatch profile ID - excluded from labor share calculations +/// Replace with your dispatch profile UUID +const DISPATCH_PROFILE_ID: &str = "00000000-0000-0000-0000-000000000000"; + +/// Input for creating a report +#[derive(InputObject)] +pub struct CreateReportInput { + pub team_profile_id: Uuid, + pub start_date: NaiveDate, + pub end_date: NaiveDate, +} + +/// Input for updating a report +#[derive(InputObject)] +pub struct UpdateReportInput { + pub start_date: Option, + pub end_date: Option, + pub status: Option, +} + +#[derive(Default)] +pub struct ReportMutation; + +#[Object] +impl ReportMutation { + /// Create a new report for a team member + async fn create_report(&self, ctx: &Context<'_>, input: CreateReportInput) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Validate date range + if input.end_date < input.start_date { + return Err("End date must be on or after start date".into()); + } + + let report: Report = sqlx::query_as::<_, Report>( + r#" + INSERT INTO reports ( + id, created_at, updated_at, team_profile_id, start_date, end_date, status + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, 'DRAFT' + ) + RETURNING id, created_at, updated_at, team_profile_id, start_date, end_date, status + "#, + ) + .bind(input.team_profile_id) + .bind(input.start_date) + .bind(input.end_date) + .fetch_one(&*pool) + .await?; + + Ok(ReportType::from(report)) + } + + /// Update a report + async fn update_report( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateReportInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Validate date range if both provided + if let (Some(start), Some(end)) = (input.start_date, input.end_date) { + if end < start { + return Err("End date must be on or after start date".into()); + } + } + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM reports WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(ReportStatus::from); + + let report: Report = sqlx::query_as::<_, Report>( + r#" + UPDATE reports + SET + start_date = COALESCE($2, start_date), + end_date = COALESCE($3, end_date), + status = COALESCE($4, status), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, team_profile_id, start_date, end_date, status + "#, + ) + .bind(id) + .bind(input.start_date) + .bind(input.end_date) + .bind(new_status) + .fetch_one(&*pool) + .await?; + + // Publish events for status changes + if let Ok(user) = ctx.data::() { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let event_type = match new { + ReportStatus::Finalized => Some(EventType::ReportSubmitted), + ReportStatus::Paid => Some(EventType::ReportApproved), + _ => None, + }; + + if let Some(event_type) = event_type { + let _ = EventPublisher::publish( + &pool, + event_type, + "report", + report.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + } + } + + Ok(ReportType::from(report)) + } + + /// Delete a report and all its entries + async fn delete_report(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete entries first (cascade should handle this, but be explicit) + sqlx::query("DELETE FROM report_services WHERE report_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + sqlx::query("DELETE FROM report_projects WHERE report_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM reports WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Add a service to a report with calculated labor share + /// The labor share is calculated and stored as a snapshot at this moment + async fn add_service_to_report( + &self, + ctx: &Context<'_>, + report_id: Uuid, + service_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Get the report to find the team profile + let report = sqlx::query_as::<_, Report>( + "SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status FROM reports WHERE id = $1", + ) + .bind(report_id) + .fetch_optional(&*pool) + .await? + .ok_or("Report not found")?; + + // Verify the team member is assigned to this service + let is_assigned: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM service_team_members + WHERE service_id = $1 AND team_profile_id = $2 + ) + "#, + ) + .bind(service_id) + .bind(report.team_profile_id) + .fetch_one(&*pool) + .await?; + + if !is_assigned { + return Err("Team member is not assigned to this service".into()); + } + + // Verify the service is completed + let is_completed: bool = sqlx::query_scalar( + "SELECT status = 'COMPLETED' FROM services WHERE id = $1", + ) + .bind(service_id) + .fetch_one(&*pool) + .await?; + + if !is_completed { + return Err("Service must be completed before adding to report".into()); + } + + // Calculate labor share + // 1. Get the service date and location + let service_info: ServiceInfo = sqlx::query_as::<_, ServiceInfo>( + "SELECT date, account_address_id FROM services WHERE id = $1", + ) + .bind(service_id) + .fetch_one(&*pool) + .await?; + + // 2. Get the labor rate for the location on the service date + let labor_amount: Option = sqlx::query_scalar( + r#" + SELECT amount + FROM labors + WHERE account_address_id = $1 + AND start_date <= $2 + AND (end_date IS NULL OR end_date >= $2) + ORDER BY start_date DESC + LIMIT 1 + "#, + ) + .bind(service_info.account_address_id) + .bind(service_info.date) + .fetch_optional(&*pool) + .await?; + + let labor_total = labor_amount.unwrap_or(Decimal::ZERO); + + // 3. Count team members (excluding dispatch) + let team_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM service_team_members + WHERE service_id = $1 AND team_profile_id != $2 + "#, + ) + .bind(service_id) + .bind(dispatch_id) + .fetch_one(&*pool) + .await?; + + if team_count == 0 { + return Err("No eligible team members for labor calculation".into()); + } + + // 4. Calculate share + let labor_share = labor_total / Decimal::from(team_count); + + // Insert the report service with snapshot labor share + let entry: ReportService = sqlx::query_as::<_, ReportService>( + r#" + INSERT INTO report_services (id, report_id, service_id, labor_share, created_at) + VALUES (gen_random_uuid(), $1, $2, $3, NOW()) + RETURNING id, report_id, service_id, labor_share, created_at + "#, + ) + .bind(report_id) + .bind(service_id) + .bind(labor_share) + .fetch_one(&*pool) + .await?; + + Ok(ReportServiceType::from(entry)) + } + + /// Add a project to a report with calculated labor share + async fn add_project_to_report( + &self, + ctx: &Context<'_>, + report_id: Uuid, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Get the report to find the team profile + let report = sqlx::query_as::<_, Report>( + "SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status FROM reports WHERE id = $1", + ) + .bind(report_id) + .fetch_optional(&*pool) + .await? + .ok_or("Report not found")?; + + // Verify the team member is assigned to this project + let is_assigned: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM project_team_members + WHERE project_id = $1 AND team_profile_id = $2 + ) + "#, + ) + .bind(project_id) + .bind(report.team_profile_id) + .fetch_one(&*pool) + .await?; + + if !is_assigned { + return Err("Team member is not assigned to this project".into()); + } + + // Verify the project is completed + let is_completed: bool = sqlx::query_scalar( + "SELECT status = 'COMPLETED' FROM projects WHERE id = $1", + ) + .bind(project_id) + .fetch_one(&*pool) + .await?; + + if !is_completed { + return Err("Project must be completed before adding to report".into()); + } + + // Get project labor amount + let labor_total: Option = sqlx::query_scalar( + "SELECT labor FROM projects WHERE id = $1", + ) + .bind(project_id) + .fetch_one(&*pool) + .await?; + + let labor_total = labor_total.unwrap_or(Decimal::ZERO); + + // Count team members (excluding dispatch) + let team_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM project_team_members + WHERE project_id = $1 AND team_profile_id != $2 + "#, + ) + .bind(project_id) + .bind(dispatch_id) + .fetch_one(&*pool) + .await?; + + if team_count == 0 { + return Err("No eligible team members for labor calculation".into()); + } + + // Calculate share + let labor_share = labor_total / Decimal::from(team_count); + + // Insert the report project with snapshot labor share + let entry: ReportProject = sqlx::query_as::<_, ReportProject>( + r#" + INSERT INTO report_projects (id, report_id, project_id, labor_share, created_at) + VALUES (gen_random_uuid(), $1, $2, $3, NOW()) + RETURNING id, report_id, project_id, labor_share, created_at + "#, + ) + .bind(report_id) + .bind(project_id) + .bind(labor_share) + .fetch_one(&*pool) + .await?; + + Ok(ReportProjectType::from(entry)) + } + + /// Remove a service from a report + async fn remove_service_from_report(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM report_services WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Remove a project from a report + async fn remove_project_from_report(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM report_projects WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Add all eligible services to a report at once + /// Returns the number of services added + async fn add_all_eligible_services_to_report( + &self, + ctx: &Context<'_>, + report_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Get the report + let report = sqlx::query_as::<_, Report>( + "SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status FROM reports WHERE id = $1", + ) + .bind(report_id) + .fetch_optional(&*pool) + .await? + .ok_or("Report not found")?; + + // Find all eligible services and insert them + let result = sqlx::query( + r#" + INSERT INTO report_services (id, report_id, service_id, labor_share, created_at) + SELECT + gen_random_uuid(), + $1, + s.id, + COALESCE(l.amount, 0) / GREATEST( + (SELECT COUNT(*) FROM service_team_members stm2 + WHERE stm2.service_id = s.id AND stm2.team_profile_id != $5), + 1 + ), + NOW() + FROM services s + JOIN service_team_members stm ON stm.service_id = s.id AND stm.team_profile_id = $2 + LEFT JOIN account_addresses aa ON aa.id = s.account_address_id + LEFT JOIN LATERAL ( + SELECT amount + FROM labors + WHERE account_address_id = aa.id + AND start_date <= s.date + AND (end_date IS NULL OR end_date >= s.date) + ORDER BY start_date DESC + LIMIT 1 + ) l ON true + WHERE s.status = 'COMPLETED' + AND s.date >= $3 + AND s.date <= $4 + AND NOT EXISTS ( + SELECT 1 + FROM report_services rs + JOIN reports r ON r.id = rs.report_id + WHERE rs.service_id = s.id + AND r.team_profile_id = $2 + ) + AND (SELECT COUNT(*) FROM service_team_members stm2 + WHERE stm2.service_id = s.id AND stm2.team_profile_id != $5) > 0 + "#, + ) + .bind(report_id) + .bind(report.team_profile_id) + .bind(report.start_date) + .bind(report.end_date) + .bind(dispatch_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + /// Add all eligible projects to a report at once + /// Returns the number of projects added + async fn add_all_eligible_projects_to_report( + &self, + ctx: &Context<'_>, + report_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Get the report + let report = sqlx::query_as::<_, Report>( + "SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status FROM reports WHERE id = $1", + ) + .bind(report_id) + .fetch_optional(&*pool) + .await? + .ok_or("Report not found")?; + + // Find all eligible projects and insert them + let result = sqlx::query( + r#" + INSERT INTO report_projects (id, report_id, project_id, labor_share, created_at) + SELECT + gen_random_uuid(), + $1, + p.id, + COALESCE(p.labor, 0) / GREATEST( + (SELECT COUNT(*) FROM project_team_members ptm2 + WHERE ptm2.project_id = p.id AND ptm2.team_profile_id != $5), + 1 + ), + NOW() + FROM projects p + JOIN project_team_members ptm ON ptm.project_id = p.id AND ptm.team_profile_id = $2 + WHERE p.status = 'COMPLETED' + AND p.date >= $3 + AND p.date <= $4 + AND NOT EXISTS ( + SELECT 1 + FROM report_projects rp + JOIN reports r ON r.id = rp.report_id + WHERE rp.project_id = p.id + AND r.team_profile_id = $2 + ) + AND (SELECT COUNT(*) FROM project_team_members ptm2 + WHERE ptm2.project_id = p.id AND ptm2.team_profile_id != $5) > 0 + "#, + ) + .bind(report_id) + .bind(report.team_profile_id) + .bind(report.start_date) + .bind(report.end_date) + .bind(dispatch_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() as i32) + } +} + +/// Helper struct for service info query +#[derive(sqlx::FromRow)] +struct ServiceInfo { + date: NaiveDate, + account_address_id: Uuid, +} diff --git a/src/graphql/mutations/service.rs b/src/graphql/mutations/service.rs new file mode 100644 index 0000000..fdf1b45 --- /dev/null +++ b/src/graphql/mutations/service.rs @@ -0,0 +1,427 @@ +use async_graphql::{Context, InputObject, Object, Result}; +use chrono::{Datelike, NaiveDate}; +use uuid::Uuid; + +use crate::auth::UserContext; +use crate::db::Database; +use crate::graphql::types::{ServiceTeamMemberType, ServiceType, WorkStatusType}; +use crate::models::{EventType, Schedule, Service, ServiceTeamMember, WorkStatus}; +use crate::services::{EventPublisher, JobQueue}; + +/// Input for creating a service +#[derive(InputObject)] +pub struct CreateServiceInput { + pub account_id: Uuid, + pub account_address_id: Uuid, + pub date: NaiveDate, + pub status: Option, + pub notes: Option, +} + +/// Input for updating a service +#[derive(InputObject)] +pub struct UpdateServiceInput { + pub date: Option, + pub status: Option, + pub notes: Option, + pub calendar_event_id: Option, +} + +/// Input for generating services from a schedule +#[derive(InputObject)] +pub struct GenerateServicesInput { + pub account_address_id: Uuid, + pub schedule_id: Uuid, + pub month: i32, + pub year: i32, +} + +#[derive(Default)] +pub struct ServiceMutation; + +#[Object] +impl ServiceMutation { + /// Create a new service + async fn create_service( + &self, + ctx: &Context<'_>, + input: CreateServiceInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let status = input.status.map(WorkStatus::from).unwrap_or(WorkStatus::Scheduled); + + let service: Service = sqlx::query_as::<_, Service>( + r#" + INSERT INTO services ( + id, created_at, updated_at, account_id, account_address_id, date, status, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5 + ) + RETURNING id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + "#, + ) + .bind(input.account_id) + .bind(input.account_address_id) + .bind(input.date) + .bind(status) + .bind(&input.notes) + .fetch_one(&*pool) + .await?; + + // Publish ServiceCreated event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceCreated, + "service", + service.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ServiceType::from(service)) + } + + /// Update a service + async fn update_service( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateServiceInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get old status for event tracking + let old_status: Option = sqlx::query_scalar( + "SELECT status FROM services WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let new_status = input.status.map(WorkStatus::from); + + let service: Service = sqlx::query_as::<_, Service>( + r#" + UPDATE services + SET + date = COALESCE($2, date), + status = COALESCE($3, status), + notes = COALESCE($4, notes), + calendar_event_id = COALESCE($5, calendar_event_id), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + "#, + ) + .bind(id) + .bind(input.date) + .bind(new_status) + .bind(&input.notes) + .bind(&input.calendar_event_id) + .fetch_one(&*pool) + .await?; + + // Publish event for status change + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + if let (Some(old), Some(new)) = (old_status, new_status) { + if old != new { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceStatusChanged, + "service", + service.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "old_status": format!("{:?}", old), + "new_status": format!("{:?}", new) + })), + ) + .await; + } + } + } + + Ok(ServiceType::from(service)) + } + + /// Delete a service + async fn delete_service(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Delete team member assignments first + sqlx::query("DELETE FROM service_team_members WHERE service_id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Delete the service + let result = sqlx::query("DELETE FROM services WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish ServiceDeleted event + if result.rows_affected() > 0 { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceDeleted, + "service", + id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + } + + Ok(result.rows_affected() > 0) + } + + /// Assign a team member to a service + async fn assign_service_team_member( + &self, + ctx: &Context<'_>, + service_id: Uuid, + team_profile_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let member: ServiceTeamMember = sqlx::query_as::<_, ServiceTeamMember>( + r#" + INSERT INTO service_team_members (id, service_id, team_profile_id, created_at) + VALUES (gen_random_uuid(), $1, $2, NOW()) + ON CONFLICT (service_id, team_profile_id) DO UPDATE SET service_id = EXCLUDED.service_id + RETURNING id, service_id, team_profile_id, created_at + "#, + ) + .bind(service_id) + .bind(team_profile_id) + .fetch_one(&*pool) + .await?; + + // Publish ServiceAssigned event + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceAssigned, + "service", + service_id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "assigned_team_profile_id": team_profile_id.to_string() + })), + ) + .await; + } + + Ok(ServiceTeamMemberType::from(member)) + } + + /// Remove a team member assignment from a service + async fn remove_service_team_member(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the service_id and team_profile_id before deleting + let assignment: Option<(Uuid, Uuid)> = sqlx::query_as( + "SELECT service_id, team_profile_id FROM service_team_members WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + let result = sqlx::query("DELETE FROM service_team_members WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + // Publish ServiceUnassigned event + if result.rows_affected() > 0 { + if let Some((service_id, team_profile_id)) = assignment { + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceUnassigned, + "service", + service_id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "unassigned_team_profile_id": team_profile_id.to_string() + })), + ) + .await; + } + } + } + + Ok(result.rows_affected() > 0) + } + + /// Generate services for a month based on a schedule + /// + /// Creates services for all days in the specified month that match the schedule's day flags. + /// For weekend_service schedules, creates a Friday service with "Weekend service window (Fri-Sun)" note. + /// Fails if any service already exists for the generated dates (all-or-nothing). + async fn generate_services_by_month( + &self, + ctx: &Context<'_>, + input: GenerateServicesInput, + ) -> Result> { + // Validate month + if input.month < 1 || input.month > 12 { + return Err("Month must be between 1 and 12".into()); + } + + let db = ctx.data::()?; + let pool = db.pool().await; + + // Fetch schedule and verify ownership + let schedule: Schedule = sqlx::query_as::<_, Schedule>( + r#" + SELECT id, created_at, updated_at, account_address_id, name, + monday_service as monday, tuesday_service as tuesday, wednesday_service as wednesday, + thursday_service as thursday, friday_service as friday, saturday_service as saturday, + sunday_service as sunday, weekend_service, schedule_exception, start_date, end_date + FROM schedules + WHERE id = $1 + "#, + ) + .bind(input.schedule_id) + .fetch_optional(&*pool) + .await? + .ok_or("Schedule not found")?; + + // Verify schedule belongs to the address + if schedule.account_address_id != input.account_address_id { + return Err("Schedule does not belong to the specified address".into()); + } + + // Get account_id from address + let account_id: Uuid = sqlx::query_scalar( + "SELECT account_id FROM account_addresses WHERE id = $1", + ) + .bind(input.account_address_id) + .fetch_one(&*pool) + .await?; + + // Build list of target dates + let mut targets: Vec<(NaiveDate, Option)> = Vec::new(); + + // Get first and last day of month + let first_day = NaiveDate::from_ymd_opt(input.year, input.month as u32, 1) + .ok_or("Invalid date")?; + let last_day = if input.month == 12 { + NaiveDate::from_ymd_opt(input.year + 1, 1, 1) + } else { + NaiveDate::from_ymd_opt(input.year, input.month as u32 + 1, 1) + } + .ok_or("Invalid date")? + .pred_opt() + .ok_or("Invalid date")?; + + // Iterate through all days in the month + let mut current = first_day; + while current <= last_day { + // Check if schedule is active on this date + if schedule.is_active_on(current) { + // Get weekday (0=Monday, 6=Sunday in chrono) + let weekday = current.weekday().num_days_from_monday(); + let (should_generate, note) = schedule.should_generate_for_weekday(weekday); + + if should_generate { + targets.push((current, note.map(String::from))); + } + } + + current = current.succ_opt().ok_or("Date overflow")?; + } + + if targets.is_empty() { + return Ok(Vec::new()); + } + + // Check for existing services on any of these dates + let dates: Vec = targets.iter().map(|(d, _)| *d).collect(); + let existing_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) FROM services + WHERE account_address_id = $1 AND date = ANY($2) + "#, + ) + .bind(input.account_address_id) + .bind(&dates) + .fetch_one(&*pool) + .await?; + + if existing_count > 0 { + return Err(format!( + "Cannot generate services: {} service(s) already exist for the selected dates", + existing_count + ) + .into()); + } + + // Bulk insert services + let mut created_services = Vec::new(); + + for (date, note) in &targets { + let service: Service = sqlx::query_as::<_, Service>( + r#" + INSERT INTO services ( + id, created_at, updated_at, account_id, account_address_id, date, status, notes + ) + VALUES ( + gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5 + ) + RETURNING id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + "#, + ) + .bind(account_id) + .bind(input.account_address_id) + .bind(date) + .bind(WorkStatus::Scheduled) + .bind(note) + .fetch_one(&*pool) + .await?; + + created_services.push(service); + } + + // Publish events for created services + if let (Ok(user), Ok(job_queue)) = (ctx.data::(), ctx.data::()) { + for service in &created_services { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceCreated, + "service", + service.base.id, + Some(("team_profile", user.user_id)), + Some(serde_json::json!({ + "generated_from_schedule": input.schedule_id.to_string(), + "month": input.month, + "year": input.year + })), + ) + .await; + } + } + + Ok(created_services.into_iter().map(ServiceType::from).collect()) + } +} diff --git a/src/graphql/mutations/service_scope_template.rs b/src/graphql/mutations/service_scope_template.rs new file mode 100644 index 0000000..9e8b5a5 --- /dev/null +++ b/src/graphql/mutations/service_scope_template.rs @@ -0,0 +1,553 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + CreateServiceScopeTemplateAreaInput, CreateServiceScopeTemplateInput, CreateServiceScopeTemplateTaskInput, + ImportServiceScopeTemplateInput, ServiceScopeTemplateAreaType, ServiceScopeTemplateTaskType, ServiceScopeTemplateType, + UpdateServiceScopeTemplateAreaInput, UpdateServiceScopeTemplateInput, UpdateServiceScopeTemplateTaskInput, +}; +use crate::models::{ServiceScopeTemplate, ServiceScopeTemplateArea, ServiceScopeTemplateTask, TaskFrequency}; + +#[derive(Default)] +pub struct ServiceScopeTemplateMutation; + +#[Object] +impl ServiceScopeTemplateMutation { + // ==================== SERVICE SCOPE TEMPLATE MUTATIONS ==================== + + /// Create a new service scope template + async fn create_service_scope_template( + &self, + ctx: &Context<'_>, + input: CreateServiceScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Create the template + let template: ServiceScopeTemplate = sqlx::query_as::<_, ServiceScopeTemplate>( + r#" + INSERT INTO service_scope_templates (id, created_at, updated_at, name, description, is_active) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, true) + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(&input.name) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create areas if provided + if let Some(areas) = input.areas { + for (area_order, area_input) in areas.into_iter().enumerate() { + let area: ServiceScopeTemplateArea = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + INSERT INTO service_scope_template_areas (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template.base.id) + .bind(&area_input.name) + .bind(area_input.order.unwrap_or(area_order as i32)) + .fetch_one(&*pool) + .await?; + + if let Some(tasks) = area_input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + let frequency = task_input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + sqlx::query( + r#" + INSERT INTO service_scope_template_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(area.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(frequency) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + } + } + + Ok(ServiceScopeTemplateType::from(template)) + } + + /// Update a service scope template + async fn update_service_scope_template( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateServiceScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let template: ServiceScopeTemplate = sqlx::query_as::<_, ServiceScopeTemplate>( + r#" + UPDATE service_scope_templates + SET + name = COALESCE($2, name), + description = COALESCE($3, description), + is_active = COALESCE($4, is_active), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(id) + .bind(&input.name) + .bind(&input.description) + .bind(input.is_active) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTemplateType::from(template)) + } + + /// Delete a service scope template (cascades to areas and tasks) + async fn delete_service_scope_template(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // CASCADE handles areas and tasks + let result = sqlx::query("DELETE FROM service_scope_templates WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Import a service scope template from JSON structure + async fn import_service_scope_template( + &self, + ctx: &Context<'_>, + input: ImportServiceScopeTemplateInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Check if template with same name exists + let existing: Option = sqlx::query_as::<_, ServiceScopeTemplate>( + "SELECT id, created_at, updated_at, name, description, is_active FROM service_scope_templates WHERE name = $1", + ) + .bind(&input.name) + .fetch_optional(&*pool) + .await?; + + if let Some(existing_template) = existing { + if input.replace.unwrap_or(false) { + // Delete existing template (cascade deletes areas/tasks) + sqlx::query("DELETE FROM service_scope_templates WHERE id = $1") + .bind(existing_template.base.id) + .execute(&*pool) + .await?; + } else { + return Err(async_graphql::Error::new(format!( + "Template with name '{}' already exists. Use replace: true to overwrite.", + input.name + ))); + } + } + + // Create the template + let template: ServiceScopeTemplate = sqlx::query_as::<_, ServiceScopeTemplate>( + r#" + INSERT INTO service_scope_templates (id, created_at, updated_at, name, description, is_active) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, true) + RETURNING id, created_at, updated_at, name, description, is_active + "#, + ) + .bind(&input.name) + .bind(&input.description) + .fetch_one(&*pool) + .await?; + + // Create areas and tasks + for area_input in input.areas { + let area: ServiceScopeTemplateArea = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + INSERT INTO service_scope_template_areas (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template.base.id) + .bind(&area_input.name) + .bind(area_input.order) + .fetch_one(&*pool) + .await?; + + for task_input in area_input.tasks { + let frequency = task_input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + sqlx::query( + r#" + INSERT INTO service_scope_template_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(area.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(frequency) + .bind(task_input.order) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ServiceScopeTemplateType::from(template)) + } + + // ==================== SERVICE SCOPE TEMPLATE AREA MUTATIONS ==================== + + /// Create a new area in a service scope template + async fn create_service_scope_template_area( + &self, + ctx: &Context<'_>, + template_id: Uuid, + input: CreateServiceScopeTemplateAreaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing areas + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM service_scope_template_areas WHERE template_id = $1"#, + ) + .bind(template_id) + .fetch_one(&*pool) + .await?; + + let area: ServiceScopeTemplateArea = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + INSERT INTO service_scope_template_areas (id, created_at, updated_at, template_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(template_id) + .bind(&input.name) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .fetch_one(&*pool) + .await?; + + // Create tasks if provided + if let Some(tasks) = input.tasks { + for (task_order, task_input) in tasks.into_iter().enumerate() { + let frequency = task_input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + sqlx::query( + r#" + INSERT INTO service_scope_template_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(area.base.id) + .bind(&task_input.scope_description) + .bind(task_input.checklist_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(task_input.session_description.as_deref().unwrap_or(&task_input.scope_description)) + .bind(frequency) + .bind(task_input.order.unwrap_or(task_order as i32)) + .bind(task_input.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(ServiceScopeTemplateAreaType::from(area)) + } + + /// Update a service scope template area + async fn update_service_scope_template_area( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateServiceScopeTemplateAreaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let area: ServiceScopeTemplateArea = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + UPDATE service_scope_template_areas + SET + name = COALESCE($2, name), + "order" = COALESCE($3, "order"), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, template_id, name, "order" + "#, + ) + .bind(id) + .bind(&input.name) + .bind(input.order) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTemplateAreaType::from(area)) + } + + /// Delete a service scope template area (cascades to tasks) + async fn delete_service_scope_template_area(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // CASCADE handles tasks + let result = sqlx::query("DELETE FROM service_scope_template_areas WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SERVICE SCOPE TEMPLATE TASK MUTATIONS ==================== + + /// Create a new task in a service scope template area + async fn create_service_scope_template_task( + &self, + ctx: &Context<'_>, + area_id: Uuid, + input: CreateServiceScopeTemplateTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get max order for existing tasks + let max_order: Option = sqlx::query_scalar( + r#"SELECT MAX("order") FROM service_scope_template_tasks WHERE area_id = $1"#, + ) + .bind(area_id) + .fetch_one(&*pool) + .await?; + + let frequency = input + .frequency + .map(TaskFrequency::from) + .unwrap_or(TaskFrequency::Daily); + + let task: ServiceScopeTemplateTask = sqlx::query_as::<_, ServiceScopeTemplateTask>( + r#" + INSERT INTO service_scope_template_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + RETURNING id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + "#, + ) + .bind(area_id) + .bind(&input.scope_description) + .bind(input.checklist_description.as_deref().unwrap_or(&input.scope_description)) + .bind(input.session_description.as_deref().unwrap_or(&input.scope_description)) + .bind(frequency) + .bind(input.order.unwrap_or(max_order.unwrap_or(0) + 1)) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTemplateTaskType::from(task)) + } + + /// Update a service scope template task + async fn update_service_scope_template_task( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateServiceScopeTemplateTaskInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task: ServiceScopeTemplateTask = sqlx::query_as::<_, ServiceScopeTemplateTask>( + r#" + UPDATE service_scope_template_tasks + SET + scope_description = COALESCE($2, scope_description), + checklist_description = COALESCE($3, checklist_description), + session_description = COALESCE($4, session_description), + frequency = COALESCE($5, frequency), + "order" = COALESCE($6, "order"), + estimated_minutes = COALESCE($7, estimated_minutes), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + "#, + ) + .bind(id) + .bind(&input.scope_description) + .bind(&input.checklist_description) + .bind(&input.session_description) + .bind(input.frequency.map(TaskFrequency::from)) + .bind(input.order) + .bind(input.estimated_minutes) + .fetch_one(&*pool) + .await?; + + Ok(ServiceScopeTemplateTaskType::from(task)) + } + + /// Delete a service scope template task + async fn delete_service_scope_template_task(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM service_scope_template_tasks WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== TEMPLATE INSTANTIATION ==================== + + /// Create a service scope from a service scope template + async fn create_service_scope_from_template( + &self, + ctx: &Context<'_>, + template_id: Uuid, + account_id: Uuid, + account_address_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the template + let template: ServiceScopeTemplate = sqlx::query_as::<_, ServiceScopeTemplate>( + "SELECT id, created_at, updated_at, name, description, is_active FROM service_scope_templates WHERE id = $1", + ) + .bind(template_id) + .fetch_one(&*pool) + .await?; + + // Deactivate any existing active scopes for this address + sqlx::query( + r#" + UPDATE service_scopes + SET is_active = false, updated_at = NOW() + WHERE account_address_id = $1 AND is_active = true + "#, + ) + .bind(account_address_id) + .execute(&*pool) + .await?; + + // Create the service scope + let scope_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO service_scopes ( + id, created_at, updated_at, name, account_id, account_address_id, description, is_active + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, true) + RETURNING id + "#, + ) + .bind(&template.name) + .bind(account_id) + .bind(account_address_id) + .bind(&template.description) + .fetch_one(&*pool) + .await?; + + // Get template areas + let areas: Vec = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + SELECT id, created_at, updated_at, template_id, name, "order" + FROM service_scope_template_areas + WHERE template_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(template_id) + .fetch_all(&*pool) + .await?; + + // Copy areas and tasks + for area in areas { + let new_area_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO service_scope_areas (id, created_at, updated_at, scope_id, name, "order") + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3) + RETURNING id + "#, + ) + .bind(scope_id) + .bind(&area.name) + .bind(area.order) + .fetch_one(&*pool) + .await?; + + // Get tasks for this area + let tasks: Vec = sqlx::query_as::<_, ServiceScopeTemplateTask>( + r#" + SELECT id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + FROM service_scope_template_tasks + WHERE area_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(area.base.id) + .fetch_all(&*pool) + .await?; + + // Copy tasks + for task in tasks { + sqlx::query( + r#" + INSERT INTO service_scope_tasks ( + id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + ) + VALUES (gen_random_uuid(), NOW(), NOW(), $1, $2, $3, $4, $5, $6, $7) + "#, + ) + .bind(new_area_id) + .bind(&task.scope_description) + .bind(&task.checklist_description) + .bind(&task.session_description) + .bind(&task.frequency) + .bind(task.order) + .bind(task.estimated_minutes) + .execute(&*pool) + .await?; + } + } + + Ok(true) + } +} diff --git a/src/graphql/mutations/session.rs b/src/graphql/mutations/session.rs new file mode 100644 index 0000000..3de743d --- /dev/null +++ b/src/graphql/mutations/session.rs @@ -0,0 +1,1835 @@ +//! Session GraphQL mutations +//! +//! Mutations for managing service and project sessions, including +//! session lifecycle, task completions, notes, and media uploads. + +use std::sync::Arc; + +use async_graphql::{Context, InputObject, Object, Result, Upload}; +use bytes::Bytes; +use chrono::{Datelike, NaiveDate, Utc}; +use tempfile::NamedTempFile; +use uuid::Uuid; + +use crate::auth::UserContext; +use crate::db::Database; +use crate::models::EventType; +use crate::services::{EventPublisher, JobQueue}; +use crate::graphql::types::{ + ProjectSessionImageType, ProjectSessionNoteType, ProjectSessionType, ProjectSessionVideoType, + ProjectTaskCompletionType, ServiceSessionImageType, ServiceSessionNoteType, + ServiceSessionType, ServiceSessionVideoType, ServiceTaskCompletionType, +}; +use crate::models::{ + ProjectSession, ProjectSessionImage, ProjectSessionNote, ProjectSessionVideo, + ServiceSession, ServiceSessionImage, ServiceSessionNote, + ServiceSessionVideo, WorkStatus, +}; +use crate::services::{process_image, S3Service}; + +// ==================== INPUT TYPES ==================== + +/// Input for opening a service session +#[derive(InputObject)] +pub struct OpenServiceSessionInput { + pub service_id: Uuid, +} + +/// Input for closing a service session +#[derive(InputObject)] +pub struct CloseServiceSessionInput { + pub service_id: Uuid, + /// Task IDs to mark as completed when closing the session + pub task_ids: Vec, +} + +/// Input for opening a project session +#[derive(InputObject)] +pub struct OpenProjectSessionInput { + pub project_id: Uuid, +} + +/// Input for closing a project session +#[derive(InputObject)] +pub struct CloseProjectSessionInput { + pub project_id: Uuid, + /// Task IDs to mark as completed when closing the session + pub task_ids: Vec, +} + +/// Input for creating a session note +#[derive(InputObject)] +pub struct CreateSessionNoteInput { + pub session_id: Uuid, + pub content: String, + #[graphql(default = false)] + pub internal: bool, +} + +/// Input for updating a session note +#[derive(InputObject)] +pub struct UpdateSessionNoteInput { + pub content: Option, + pub internal: Option, +} + +/// Input for updating session media +#[derive(InputObject)] +pub struct UpdateSessionMediaInput { + pub title: Option, + pub notes: Option, + pub internal: Option, +} + +// ==================== MUTATION IMPLEMENTATION ==================== + +#[derive(Default)] +pub struct SessionMutation; + +#[Object] +impl SessionMutation { + // ==================== SERVICE SESSION LIFECYCLE ==================== + + /// Open a new service session + /// + /// - Validates service status is SCHEDULED + /// - Finds active scope for the service's account address + /// - Creates session with start=now, created_by=actor + /// - Sets service status to IN_PROGRESS + async fn open_service_session( + &self, + ctx: &Context<'_>, + input: OpenServiceSessionInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the service and validate status + let service: (Uuid, Uuid, Uuid, WorkStatus, NaiveDate) = sqlx::query_as( + r#" + SELECT s.account_id, s.account_address_id, a.customer_id, s.status, s.date + FROM services s + JOIN accounts a ON a.id = s.account_id + WHERE s.id = $1 + "#, + ) + .bind(input.service_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Service not found"))?; + + let (account_id, account_address_id, customer_id, status, service_date) = service; + + if status != WorkStatus::Scheduled { + return Err(async_graphql::Error::new(format!( + "Service must be in SCHEDULED status to open a session (current: {:?})", + status + ))); + } + + // Find active scope for this account address + let scope_id: Option = sqlx::query_scalar( + r#" + SELECT id FROM service_scopes + WHERE account_address_id = $1 AND is_active = true + LIMIT 1 + "#, + ) + .bind(account_address_id) + .fetch_optional(&*pool) + .await?; + + // Create the session + let now = Utc::now(); + let session: ServiceSession = sqlx::query_as( + r#" + INSERT INTO service_sessions ( + id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, created_by_id, date + ) + VALUES ( + gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $1, $7, $8 + ) + RETURNING id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + "#, + ) + .bind(now) + .bind(input.service_id) + .bind(account_id) + .bind(account_address_id) + .bind(customer_id) + .bind(scope_id) + .bind(user.user_id) + .bind(service_date) + .fetch_one(&*pool) + .await?; + + // Update service status to IN_PROGRESS + sqlx::query( + r#" + UPDATE services SET status = 'IN_PROGRESS', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(input.service_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceSessionStarted, + "service_session", + session.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ServiceSessionType::from(session)) + } + + /// Close a service session + /// + /// - Validates service status is IN_PROGRESS + /// - Processes task completions (deduplicate, validate scope ownership) + /// - Sets session end=now, closed_by=actor + /// - Sets service status to COMPLETED + async fn close_service_session( + &self, + ctx: &Context<'_>, + input: CloseServiceSessionInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the active session + let session: ServiceSession = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM service_sessions + WHERE service_id = $1 AND "end" IS NULL + "#, + ) + .bind(input.service_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found for this service"))?; + + // Verify service is IN_PROGRESS + let status: WorkStatus = sqlx::query_scalar( + "SELECT status FROM services WHERE id = $1", + ) + .bind(input.service_id) + .fetch_one(&*pool) + .await?; + + if status != WorkStatus::InProgress { + return Err(async_graphql::Error::new(format!( + "Service must be IN_PROGRESS to close a session (current: {:?})", + status + ))); + } + + // Process task completions + let now = Utc::now(); + let year = session.date.year(); + let month = session.date.month() as i32; + + for task_id in input.task_ids { + // Skip if already completed for this service + let existing: Option = sqlx::query_scalar( + "SELECT id FROM service_task_completions WHERE service_id = $1 AND task_id = $2", + ) + .bind(input.service_id) + .bind(task_id) + .fetch_optional(&*pool) + .await?; + + let completion_id = if let Some(id) = existing { + id + } else { + // Create new completion + sqlx::query_scalar( + r#" + INSERT INTO service_task_completions ( + id, created_at, updated_at, service_id, task_id, account_address_id, + completed_by_id, completed_at, year, month + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $1, $6, $7) + RETURNING id + "#, + ) + .bind(now) + .bind(input.service_id) + .bind(task_id) + .bind(session.account_address_id) + .bind(user.user_id) + .bind(year) + .bind(month) + .fetch_one(&*pool) + .await? + }; + + // Link to session via junction table (ignore if already linked) + sqlx::query( + r#" + INSERT INTO service_session_completed_tasks (id, created_at, session_id, task_completion_id) + VALUES (gen_random_uuid(), $1, $2, $3) + ON CONFLICT (session_id, task_completion_id) DO NOTHING + "#, + ) + .bind(now) + .bind(session.base.id) + .bind(completion_id) + .execute(&*pool) + .await?; + } + + // Close the session + let closed_session: ServiceSession = sqlx::query_as( + r#" + UPDATE service_sessions + SET "end" = $2, closed_by_id = $3, updated_at = $2 + WHERE id = $1 + RETURNING id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + "#, + ) + .bind(session.base.id) + .bind(now) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + // Update service status to COMPLETED + sqlx::query( + r#" + UPDATE services SET status = 'COMPLETED', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(input.service_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceSessionEnded, + "service_session", + closed_session.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ServiceSessionType::from(closed_session)) + } + + /// Revert a service session (admin only) + /// + /// - Deletes task completions linked to session + /// - Deletes the session + /// - Sets service status back to SCHEDULED + async fn revert_service_session( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Find the most recent session + let session_id: Option = sqlx::query_scalar( + r#" + SELECT id FROM service_sessions + WHERE service_id = $1 + ORDER BY start DESC + LIMIT 1 + "#, + ) + .bind(service_id) + .fetch_optional(&*pool) + .await?; + + let Some(session_id) = session_id else { + return Err(async_graphql::Error::new("No session found to revert")); + }; + + // Get all task completion IDs linked to this session + let completion_ids: Vec = sqlx::query_scalar( + "SELECT task_completion_id FROM service_session_completed_tasks WHERE session_id = $1", + ) + .bind(session_id) + .fetch_all(&*pool) + .await?; + + // Delete from junction table first + sqlx::query("DELETE FROM service_session_completed_tasks WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete the task completions + for completion_id in completion_ids { + sqlx::query("DELETE FROM service_task_completions WHERE id = $1") + .bind(completion_id) + .execute(&*pool) + .await?; + } + + // Delete notes + sqlx::query("DELETE FROM service_session_notes WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete images (S3 cleanup would happen separately) + sqlx::query("DELETE FROM service_session_images WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete videos + sqlx::query("DELETE FROM service_session_videos WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete the session + sqlx::query("DELETE FROM service_sessions WHERE id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Set service status back to SCHEDULED + sqlx::query( + r#" + UPDATE services SET status = 'SCHEDULED', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(service_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ServiceSessionReverted, + "service", + service_id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(true) + } + + // ==================== PROJECT SESSION LIFECYCLE ==================== + + /// Open a new project session + async fn open_project_session( + &self, + ctx: &Context<'_>, + input: OpenProjectSessionInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the project and validate status + let project: (Uuid, Option, Option, WorkStatus, NaiveDate) = sqlx::query_as( + r#" + SELECT p.customer_id, aa.account_id, p.account_address_id, p.status, p.date + FROM projects p + LEFT JOIN account_addresses aa ON aa.id = p.account_address_id + WHERE p.id = $1 + "#, + ) + .bind(input.project_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Project not found"))?; + + let (customer_id, account_id, account_address_id, status, project_date) = project; + + if status != WorkStatus::Scheduled { + return Err(async_graphql::Error::new(format!( + "Project must be in SCHEDULED status to open a session (current: {:?})", + status + ))); + } + + // Find active scope for this project + let scope_id: Option = sqlx::query_scalar( + r#" + SELECT id FROM project_scopes + WHERE project_id = $1 AND is_active = true + LIMIT 1 + "#, + ) + .bind(input.project_id) + .fetch_optional(&*pool) + .await?; + + // Create the session + let now = Utc::now(); + let session: ProjectSession = sqlx::query_as( + r#" + INSERT INTO project_sessions ( + id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, created_by_id, date + ) + VALUES ( + gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $1, $7, $8 + ) + RETURNING id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + "#, + ) + .bind(now) + .bind(input.project_id) + .bind(account_id) + .bind(account_address_id) + .bind(customer_id) + .bind(scope_id) + .bind(user.user_id) + .bind(project_date) + .fetch_one(&*pool) + .await?; + + // Update project status to IN_PROGRESS + sqlx::query( + r#" + UPDATE projects SET status = 'IN_PROGRESS', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(input.project_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectSessionStarted, + "project_session", + session.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ProjectSessionType::from(session)) + } + + /// Close a project session + async fn close_project_session( + &self, + ctx: &Context<'_>, + input: CloseProjectSessionInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the active session + let session: ProjectSession = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM project_sessions + WHERE project_id = $1 AND "end" IS NULL + "#, + ) + .bind(input.project_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found for this project"))?; + + // Verify project is IN_PROGRESS + let status: WorkStatus = sqlx::query_scalar( + "SELECT status FROM projects WHERE id = $1", + ) + .bind(input.project_id) + .fetch_one(&*pool) + .await?; + + if status != WorkStatus::InProgress { + return Err(async_graphql::Error::new(format!( + "Project must be IN_PROGRESS to close a session (current: {:?})", + status + ))); + } + + // Process task completions + let now = Utc::now(); + + for task_id in input.task_ids { + // Skip if already completed for this project + let existing: Option = sqlx::query_scalar( + "SELECT id FROM project_task_completions WHERE project_id = $1 AND task_id = $2", + ) + .bind(input.project_id) + .bind(task_id) + .fetch_optional(&*pool) + .await?; + + let completion_id = if let Some(id) = existing { + id + } else { + // Create new completion + sqlx::query_scalar( + r#" + INSERT INTO project_task_completions ( + id, created_at, updated_at, project_id, task_id, account_id, account_address_id, + completed_by_id, completed_at + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $1) + RETURNING id + "#, + ) + .bind(now) + .bind(input.project_id) + .bind(task_id) + .bind(session.account_id) + .bind(session.account_address_id) + .bind(user.user_id) + .fetch_one(&*pool) + .await? + }; + + // Link to session via junction table (ignore if already linked) + sqlx::query( + r#" + INSERT INTO project_session_completed_tasks (id, created_at, session_id, task_completion_id) + VALUES (gen_random_uuid(), $1, $2, $3) + ON CONFLICT (session_id, task_completion_id) DO NOTHING + "#, + ) + .bind(now) + .bind(session.base.id) + .bind(completion_id) + .execute(&*pool) + .await?; + } + + // Close the session + let closed_session: ProjectSession = sqlx::query_as( + r#" + UPDATE project_sessions + SET "end" = $2, closed_by_id = $3, updated_at = $2 + WHERE id = $1 + RETURNING id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + "#, + ) + .bind(session.base.id) + .bind(now) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + // Update project status to COMPLETED + sqlx::query( + r#" + UPDATE projects SET status = 'COMPLETED', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(input.project_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectSessionEnded, + "project_session", + closed_session.base.id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(ProjectSessionType::from(closed_session)) + } + + /// Revert a project session (admin only) + async fn revert_project_session( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Find the most recent session + let session_id: Option = sqlx::query_scalar( + r#" + SELECT id FROM project_sessions + WHERE project_id = $1 + ORDER BY start DESC + LIMIT 1 + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await?; + + let Some(session_id) = session_id else { + return Err(async_graphql::Error::new("No session found to revert")); + }; + + // Get all task completion IDs linked to this session + let completion_ids: Vec = sqlx::query_scalar( + "SELECT task_completion_id FROM project_session_completed_tasks WHERE session_id = $1", + ) + .bind(session_id) + .fetch_all(&*pool) + .await?; + + // Delete from junction table first + sqlx::query("DELETE FROM project_session_completed_tasks WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete the task completions + for completion_id in completion_ids { + sqlx::query("DELETE FROM project_task_completions WHERE id = $1") + .bind(completion_id) + .execute(&*pool) + .await?; + } + + // Delete notes + sqlx::query("DELETE FROM project_session_notes WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete images + sqlx::query("DELETE FROM project_session_images WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete videos + sqlx::query("DELETE FROM project_session_videos WHERE session_id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Delete the session + sqlx::query("DELETE FROM project_sessions WHERE id = $1") + .bind(session_id) + .execute(&*pool) + .await?; + + // Set project status back to SCHEDULED + sqlx::query( + r#" + UPDATE projects SET status = 'SCHEDULED', updated_at = NOW() + WHERE id = $1 + "#, + ) + .bind(project_id) + .execute(&*pool) + .await?; + + // Publish event for audit trail + if let Ok(job_queue) = ctx.data::() { + let _ = EventPublisher::publish_and_queue( + &pool, + job_queue, + EventType::ProjectSessionReverted, + "project", + project_id, + Some(("team_profile", user.user_id)), + None, + ) + .await; + } + + Ok(true) + } + + // ==================== TASK COMPLETIONS ==================== + + /// Add a task completion to the active service session + async fn add_service_task_completion( + &self, + ctx: &Context<'_>, + service_id: Uuid, + task_id: Uuid, + notes: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the active session with account_address_id and date + let session: (Uuid, Uuid, NaiveDate) = sqlx::query_as( + r#" + SELECT id, account_address_id, date FROM service_sessions + WHERE service_id = $1 AND "end" IS NULL + "#, + ) + .bind(service_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found"))?; + + let (session_id, account_address_id, date) = session; + let year = date.year(); + let month = date.month() as i32; + let now = Utc::now(); + + // Insert task completion (or update if already exists for this service+task) + let completion_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO service_task_completions ( + id, created_at, updated_at, service_id, task_id, account_address_id, + completed_by_id, completed_at, year, month, notes + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $1, $6, $7, $8) + ON CONFLICT (service_id, task_id) DO UPDATE SET + notes = COALESCE(EXCLUDED.notes, service_task_completions.notes), + updated_at = $1 + RETURNING id + "#, + ) + .bind(now) + .bind(service_id) + .bind(task_id) + .bind(account_address_id) + .bind(user.user_id) + .bind(year) + .bind(month) + .bind(¬es) + .fetch_one(&*pool) + .await?; + + // Link to session via junction table (ignore if already linked) + sqlx::query( + r#" + INSERT INTO service_session_completed_tasks (id, created_at, session_id, task_completion_id) + VALUES (gen_random_uuid(), $1, $2, $3) + ON CONFLICT (session_id, task_completion_id) DO NOTHING + "#, + ) + .bind(now) + .bind(session_id) + .bind(completion_id) + .execute(&*pool) + .await?; + + Ok(ServiceTaskCompletionType { + id: completion_id, + session_id, + task_id, + completed_by_id: user.user_id, + completed_at: now, + notes, + }) + } + + /// Remove a task completion from the active service session + async fn remove_service_task_completion( + &self, + ctx: &Context<'_>, + service_id: Uuid, + task_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the active session + let session_id: Uuid = sqlx::query_scalar( + r#" + SELECT id FROM service_sessions + WHERE service_id = $1 AND "end" IS NULL + "#, + ) + .bind(service_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found"))?; + + // Find the completion for this service+task + let completion_id: Option = sqlx::query_scalar( + "SELECT id FROM service_task_completions WHERE service_id = $1 AND task_id = $2", + ) + .bind(service_id) + .bind(task_id) + .fetch_optional(&*pool) + .await?; + + let Some(completion_id) = completion_id else { + // Nothing to remove + return Ok(false); + }; + + // Remove from junction table + sqlx::query( + "DELETE FROM service_session_completed_tasks WHERE session_id = $1 AND task_completion_id = $2", + ) + .bind(session_id) + .bind(completion_id) + .execute(&*pool) + .await?; + + // Delete the completion record + let result = sqlx::query("DELETE FROM service_task_completions WHERE id = $1") + .bind(completion_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + /// Add a task completion to the active project session + async fn add_project_task_completion( + &self, + ctx: &Context<'_>, + project_id: Uuid, + task_id: Uuid, + notes: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + // Get the active session with account_id and account_address_id + let session: (Uuid, Option, Option) = sqlx::query_as( + r#" + SELECT id, account_id, account_address_id FROM project_sessions + WHERE project_id = $1 AND "end" IS NULL + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found"))?; + + let (session_id, account_id, account_address_id) = session; + let now = Utc::now(); + + // Insert task completion (or update if already exists for this project+task) + let completion_id: Uuid = sqlx::query_scalar( + r#" + INSERT INTO project_task_completions ( + id, created_at, updated_at, project_id, task_id, account_id, account_address_id, + completed_by_id, completed_at, notes + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $1, $7) + ON CONFLICT (project_id, task_id) DO UPDATE SET + notes = COALESCE(EXCLUDED.notes, project_task_completions.notes), + updated_at = $1 + RETURNING id + "#, + ) + .bind(now) + .bind(project_id) + .bind(task_id) + .bind(account_id) + .bind(account_address_id) + .bind(user.user_id) + .bind(¬es) + .fetch_one(&*pool) + .await?; + + // Link to session via junction table (ignore if already linked) + sqlx::query( + r#" + INSERT INTO project_session_completed_tasks (id, created_at, session_id, task_completion_id) + VALUES (gen_random_uuid(), $1, $2, $3) + ON CONFLICT (session_id, task_completion_id) DO NOTHING + "#, + ) + .bind(now) + .bind(session_id) + .bind(completion_id) + .execute(&*pool) + .await?; + + Ok(ProjectTaskCompletionType { + id: completion_id, + session_id, + task_id, + completed_by_id: user.user_id, + completed_at: now, + notes, + }) + } + + /// Remove a task completion from the active project session + async fn remove_project_task_completion( + &self, + ctx: &Context<'_>, + project_id: Uuid, + task_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the active session + let session_id: Uuid = sqlx::query_scalar( + r#" + SELECT id FROM project_sessions + WHERE project_id = $1 AND "end" IS NULL + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("No active session found"))?; + + // Find the completion for this project+task + let completion_id: Option = sqlx::query_scalar( + "SELECT id FROM project_task_completions WHERE project_id = $1 AND task_id = $2", + ) + .bind(project_id) + .bind(task_id) + .fetch_optional(&*pool) + .await?; + + let Some(completion_id) = completion_id else { + // Nothing to remove + return Ok(false); + }; + + // Remove from junction table + sqlx::query( + "DELETE FROM project_session_completed_tasks WHERE session_id = $1 AND task_completion_id = $2", + ) + .bind(session_id) + .bind(completion_id) + .execute(&*pool) + .await?; + + // Delete the completion record + let result = sqlx::query("DELETE FROM project_task_completions WHERE id = $1") + .bind(completion_id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SERVICE SESSION NOTES ==================== + + /// Create a note for a service session + async fn create_service_session_note( + &self, + ctx: &Context<'_>, + input: CreateSessionNoteInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let now = Utc::now(); + let note: ServiceSessionNote = sqlx::query_as( + r#" + INSERT INTO service_session_notes ( + id, created_at, updated_at, session_id, content, author_id, internal + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5) + RETURNING id, created_at, updated_at, session_id, content, author_id, internal + "#, + ) + .bind(now) + .bind(input.session_id) + .bind(&input.content) + .bind(user.user_id) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionNoteType::from(note)) + } + + /// Update a service session note + async fn update_service_session_note( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionNoteInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let note: ServiceSessionNote = sqlx::query_as( + r#" + UPDATE service_session_notes + SET + content = COALESCE($2, content), + internal = COALESCE($3, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, content, author_id, internal + "#, + ) + .bind(id) + .bind(&input.content) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionNoteType::from(note)) + } + + /// Delete a service session note + async fn delete_service_session_note( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM service_session_notes WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== PROJECT SESSION NOTES ==================== + + /// Create a note for a project session + async fn create_project_session_note( + &self, + ctx: &Context<'_>, + input: CreateSessionNoteInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let now = Utc::now(); + let note: ProjectSessionNote = sqlx::query_as( + r#" + INSERT INTO project_session_notes ( + id, created_at, updated_at, session_id, content, author_id, internal + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5) + RETURNING id, created_at, updated_at, session_id, content, author_id, internal + "#, + ) + .bind(now) + .bind(input.session_id) + .bind(&input.content) + .bind(user.user_id) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionNoteType::from(note)) + } + + /// Update a project session note + async fn update_project_session_note( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionNoteInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let note: ProjectSessionNote = sqlx::query_as( + r#" + UPDATE project_session_notes + SET + content = COALESCE($2, content), + internal = COALESCE($3, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, content, author_id, internal + "#, + ) + .bind(id) + .bind(&input.content) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionNoteType::from(note)) + } + + /// Delete a project session note + async fn delete_project_session_note( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let result = sqlx::query("DELETE FROM project_session_notes WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SERVICE SESSION IMAGES ==================== + + /// Upload an image to a service session + /// + /// Handles HEIC/HEIF conversion to JPEG, generates thumbnails, + /// and uploads both to S3. + async fn upload_service_session_image( + &self, + ctx: &Context<'_>, + session_id: Uuid, + file: Upload, + title: Option, + notes: Option, + #[graphql(default = false)] internal: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + let s3 = ctx.data::>()?; + + // Read the uploaded file + let upload = file.value(ctx)?; + let filename = upload.filename.clone(); + let mut data = Vec::new(); + use std::io::Read; + let mut content = upload.into_read(); + content.read_to_end(&mut data)?; + + // Use filename as default title if not provided + let title = title.unwrap_or_else(|| filename.clone()); + let notes = notes.unwrap_or_default(); + + // Process the image (handles HEIC conversion, gets dimensions) + let processed = process_image(&data, &filename) + .map_err(|e| async_graphql::Error::new(format!("Image processing failed: {}", e)))?; + + // Generate thumbnail + let thumbnail = crate::services::generate_thumbnail(&processed.data, 320) + .map_err(|e| async_graphql::Error::new(format!("Thumbnail generation failed: {}", e)))?; + + // Generate unique paths + let image_id = Uuid::new_v4(); + let image_path = format!("sessions/service/{}/{}.jpg", session_id, image_id); + let thumbnail_path = format!("sessions/service/{}/{}_thumb.jpg", session_id, image_id); + + // Upload to S3 + s3.upload_file(&image_path, Bytes::from(processed.data.to_vec()), &processed.content_type) + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload image: {}", e)))?; + + s3.upload_file(&thumbnail_path, Bytes::from(thumbnail), "image/jpeg") + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload thumbnail: {}", e)))?; + + // Create database record + let now = Utc::now(); + let image: ServiceSessionImage = sqlx::query_as( + r#" + INSERT INTO service_session_images ( + id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id, notes, internal + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) + RETURNING id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal + "#, + ) + .bind(now) + .bind(session_id) + .bind(&title) + .bind(&image_path) + .bind(&thumbnail_path) + .bind(&processed.content_type) + .bind(processed.width as i32) + .bind(processed.height as i32) + .bind(user.user_id) + .bind(¬es) + .bind(internal) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionImageType::from(image)) + } + + /// Update a service session image metadata + async fn update_service_session_image( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionMediaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let image: ServiceSessionImage = sqlx::query_as( + r#" + UPDATE service_session_images + SET + title = COALESCE($2, title), + notes = COALESCE($3, notes), + internal = COALESCE($4, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal + "#, + ) + .bind(id) + .bind(&input.title) + .bind(&input.notes) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionImageType::from(image)) + } + + /// Delete a service session image + async fn delete_service_session_image( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let s3 = ctx.data::>()?; + + // Get paths before deleting + let paths: Option<(String, Option)> = sqlx::query_as( + "SELECT image, thumbnail FROM service_session_images WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + if let Some((image_path, thumbnail_path)) = paths { + // Delete from S3 + let _ = s3.delete_file(&image_path).await; + if let Some(thumb) = thumbnail_path { + let _ = s3.delete_file(&thumb).await; + } + } + + let result = sqlx::query("DELETE FROM service_session_images WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== PROJECT SESSION IMAGES ==================== + + /// Upload an image to a project session + async fn upload_project_session_image( + &self, + ctx: &Context<'_>, + session_id: Uuid, + file: Upload, + title: Option, + notes: Option, + #[graphql(default = false)] internal: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + let s3 = ctx.data::>()?; + + // Read the uploaded file + let upload = file.value(ctx)?; + let filename = upload.filename.clone(); + let mut data = Vec::new(); + use std::io::Read; + let mut content = upload.into_read(); + content.read_to_end(&mut data)?; + + // Use filename as default title if not provided + let title = title.unwrap_or_else(|| filename.clone()); + let notes = notes.unwrap_or_default(); + + // Process the image + let processed = process_image(&data, &filename) + .map_err(|e| async_graphql::Error::new(format!("Image processing failed: {}", e)))?; + + // Generate thumbnail + let thumbnail = crate::services::generate_thumbnail(&processed.data, 320) + .map_err(|e| async_graphql::Error::new(format!("Thumbnail generation failed: {}", e)))?; + + // Generate unique paths + let image_id = Uuid::new_v4(); + let image_path = format!("sessions/project/{}/{}.jpg", session_id, image_id); + let thumbnail_path = format!("sessions/project/{}/{}_thumb.jpg", session_id, image_id); + + // Upload to S3 + s3.upload_file(&image_path, Bytes::from(processed.data.to_vec()), &processed.content_type) + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload image: {}", e)))?; + + s3.upload_file(&thumbnail_path, Bytes::from(thumbnail), "image/jpeg") + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload thumbnail: {}", e)))?; + + // Create database record + let now = Utc::now(); + let image: ProjectSessionImage = sqlx::query_as( + r#" + INSERT INTO project_session_images ( + id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id, notes, internal + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) + RETURNING id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal + "#, + ) + .bind(now) + .bind(session_id) + .bind(&title) + .bind(&image_path) + .bind(&thumbnail_path) + .bind(&processed.content_type) + .bind(processed.width as i32) + .bind(processed.height as i32) + .bind(user.user_id) + .bind(¬es) + .bind(internal) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionImageType::from(image)) + } + + /// Update a project session image metadata + async fn update_project_session_image( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionMediaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let image: ProjectSessionImage = sqlx::query_as( + r#" + UPDATE project_session_images + SET + title = COALESCE($2, title), + notes = COALESCE($3, notes), + internal = COALESCE($4, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal + "#, + ) + .bind(id) + .bind(&input.title) + .bind(&input.notes) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionImageType::from(image)) + } + + /// Delete a project session image + async fn delete_project_session_image( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let s3 = ctx.data::>()?; + + // Get paths before deleting + let paths: Option<(String, Option)> = sqlx::query_as( + "SELECT image, thumbnail FROM project_session_images WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + if let Some((image_path, thumbnail_path)) = paths { + let _ = s3.delete_file(&image_path).await; + if let Some(thumb) = thumbnail_path { + let _ = s3.delete_file(&thumb).await; + } + } + + let result = sqlx::query("DELETE FROM project_session_images WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== SERVICE SESSION VIDEOS ==================== + + /// Upload a video to a service session + /// + /// Extracts metadata using ffprobe, generates thumbnail using ffmpeg, + /// and uploads both to S3. + async fn upload_service_session_video( + &self, + ctx: &Context<'_>, + session_id: Uuid, + file: Upload, + title: Option, + notes: Option, + #[graphql(default = false)] internal: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + let s3 = ctx.data::>()?; + + // Read the uploaded file + let upload = file.value(ctx)?; + let filename = upload.filename.clone(); + let mut data = Vec::new(); + use std::io::Read; + let mut content = upload.into_read(); + content.read_to_end(&mut data)?; + + // Use filename as default title if not provided + let title = title.unwrap_or_else(|| filename.clone()); + let notes = notes.unwrap_or_default(); + + // Verify video format + let content_type = crate::services::verify_video(&data, &filename) + .map_err(|e| async_graphql::Error::new(format!("Invalid video: {}", e)))?; + + // Write to temp file for ffmpeg processing + let temp_video = NamedTempFile::new()?; + std::fs::write(temp_video.path(), &data)?; + + // Extract metadata + let metadata = crate::services::extract_metadata(temp_video.path()) + .map_err(|e| async_graphql::Error::new(format!("Failed to extract metadata: {}", e)))?; + + // Generate thumbnail at 1 second + let temp_thumb = NamedTempFile::new()?; + let thumb_path = temp_thumb.path().with_extension("jpg"); + crate::services::generate_video_thumbnail(temp_video.path(), &thumb_path, 1.0) + .map_err(|e| async_graphql::Error::new(format!("Failed to generate thumbnail: {}", e)))?; + + let thumbnail_data = std::fs::read(&thumb_path)?; + + // Generate unique paths + let video_id = Uuid::new_v4(); + let ext = crate::services::video::extension_for_content_type(&content_type); + let video_path = format!("sessions/service/{}/{}.{}", session_id, video_id, ext); + let thumbnail_path = format!("sessions/service/{}/{}_thumb.jpg", session_id, video_id); + + // Upload to S3 + s3.upload_file(&video_path, Bytes::from(data.clone()), &content_type) + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload video: {}", e)))?; + + s3.upload_file(&thumbnail_path, Bytes::from(thumbnail_data), "image/jpeg") + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload thumbnail: {}", e)))?; + + // Create database record + let now = Utc::now(); + let video: ServiceSessionVideo = sqlx::query_as( + r#" + INSERT INTO service_session_videos ( + id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id, notes, internal, + duration_seconds, file_size_bytes + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + RETURNING id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal, + duration_seconds, file_size_bytes + "#, + ) + .bind(now) + .bind(session_id) + .bind(&title) + .bind(&video_path) + .bind(&thumbnail_path) + .bind(&content_type) + .bind(metadata.width as i32) + .bind(metadata.height as i32) + .bind(user.user_id) + .bind(¬es) + .bind(internal) + .bind(metadata.duration_seconds) + .bind(data.len() as i64) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionVideoType::from(video)) + } + + /// Update a service session video metadata + async fn update_service_session_video( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionMediaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let video: ServiceSessionVideo = sqlx::query_as( + r#" + UPDATE service_session_videos + SET + title = COALESCE($2, title), + notes = COALESCE($3, notes), + internal = COALESCE($4, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal, + duration_seconds, file_size_bytes + "#, + ) + .bind(id) + .bind(&input.title) + .bind(&input.notes) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ServiceSessionVideoType::from(video)) + } + + /// Delete a service session video + async fn delete_service_session_video( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let s3 = ctx.data::>()?; + + // Get paths before deleting + let paths: Option<(String, Option)> = sqlx::query_as( + "SELECT video, thumbnail FROM service_session_videos WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + if let Some((video_path, thumbnail_path)) = paths { + let _ = s3.delete_file(&video_path).await; + if let Some(thumb) = thumbnail_path { + let _ = s3.delete_file(&thumb).await; + } + } + + let result = sqlx::query("DELETE FROM service_session_videos WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } + + // ==================== PROJECT SESSION VIDEOS ==================== + + /// Upload a video to a project session + async fn upload_project_session_video( + &self, + ctx: &Context<'_>, + session_id: Uuid, + file: Upload, + title: Option, + notes: Option, + #[graphql(default = false)] internal: bool, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + let s3 = ctx.data::>()?; + + // Read the uploaded file + let upload = file.value(ctx)?; + let filename = upload.filename.clone(); + let mut data = Vec::new(); + use std::io::Read; + let mut content = upload.into_read(); + content.read_to_end(&mut data)?; + + // Use filename as default title if not provided + let title = title.unwrap_or_else(|| filename.clone()); + let notes = notes.unwrap_or_default(); + + // Verify video format + let content_type = crate::services::verify_video(&data, &filename) + .map_err(|e| async_graphql::Error::new(format!("Invalid video: {}", e)))?; + + // Write to temp file for ffmpeg processing + let temp_video = NamedTempFile::new()?; + std::fs::write(temp_video.path(), &data)?; + + // Extract metadata + let metadata = crate::services::extract_metadata(temp_video.path()) + .map_err(|e| async_graphql::Error::new(format!("Failed to extract metadata: {}", e)))?; + + // Generate thumbnail + let temp_thumb = NamedTempFile::new()?; + let thumb_path = temp_thumb.path().with_extension("jpg"); + crate::services::generate_video_thumbnail(temp_video.path(), &thumb_path, 1.0) + .map_err(|e| async_graphql::Error::new(format!("Failed to generate thumbnail: {}", e)))?; + + let thumbnail_data = std::fs::read(&thumb_path)?; + + // Generate unique paths + let video_id = Uuid::new_v4(); + let ext = crate::services::video::extension_for_content_type(&content_type); + let video_path = format!("sessions/project/{}/{}.{}", session_id, video_id, ext); + let thumbnail_path = format!("sessions/project/{}/{}_thumb.jpg", session_id, video_id); + + // Upload to S3 + s3.upload_file(&video_path, Bytes::from(data.clone()), &content_type) + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload video: {}", e)))?; + + s3.upload_file(&thumbnail_path, Bytes::from(thumbnail_data), "image/jpeg") + .await + .map_err(|e| async_graphql::Error::new(format!("Failed to upload thumbnail: {}", e)))?; + + // Create database record + let now = Utc::now(); + let video: ProjectSessionVideo = sqlx::query_as( + r#" + INSERT INTO project_session_videos ( + id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id, notes, internal, + duration_seconds, file_size_bytes + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + RETURNING id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal, + duration_seconds, file_size_bytes + "#, + ) + .bind(now) + .bind(session_id) + .bind(&title) + .bind(&video_path) + .bind(&thumbnail_path) + .bind(&content_type) + .bind(metadata.width as i32) + .bind(metadata.height as i32) + .bind(user.user_id) + .bind(¬es) + .bind(internal) + .bind(metadata.duration_seconds) + .bind(data.len() as i64) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionVideoType::from(video)) + } + + /// Update a project session video metadata + async fn update_project_session_video( + &self, + ctx: &Context<'_>, + id: Uuid, + input: UpdateSessionMediaInput, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let video: ProjectSessionVideo = sqlx::query_as( + r#" + UPDATE project_session_videos + SET + title = COALESCE($2, title), + notes = COALESCE($3, notes), + internal = COALESCE($4, internal), + updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, uploaded_by_team_profile_id AS uploaded_by_id, notes, internal, + duration_seconds, file_size_bytes + "#, + ) + .bind(id) + .bind(&input.title) + .bind(&input.notes) + .bind(input.internal) + .fetch_one(&*pool) + .await?; + + Ok(ProjectSessionVideoType::from(video)) + } + + /// Delete a project session video + async fn delete_project_session_video( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let s3 = ctx.data::>()?; + + // Get paths before deleting + let paths: Option<(String, Option)> = sqlx::query_as( + "SELECT video, thumbnail FROM project_session_videos WHERE id = $1", + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + if let Some((video_path, thumbnail_path)) = paths { + let _ = s3.delete_file(&video_path).await; + if let Some(thumb) = thumbnail_path { + let _ = s3.delete_file(&thumb).await; + } + } + + let result = sqlx::query("DELETE FROM project_session_videos WHERE id = $1") + .bind(id) + .execute(&*pool) + .await?; + + Ok(result.rows_affected() > 0) + } +} diff --git a/src/graphql/mutations/wave.rs b/src/graphql/mutations/wave.rs new file mode 100644 index 0000000..c6a7ea1 --- /dev/null +++ b/src/graphql/mutations/wave.rs @@ -0,0 +1,555 @@ +//! Wave Accounting GraphQL mutations +//! +//! Mutations for creating Wave customers and invoices from Nexus data. + +use async_graphql::{Context, Object, Result}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + CreateWaveCustomerInput, CreateWaveCustomerResult, CreateWaveInvoiceInput, + CreateWaveInvoiceResult, CreateWaveProductInput, CustomerType, InvoiceType, + UpdateWaveCustomerInput, UpdateWaveProductInput, WaveCustomerType, WaveInvoiceType, + WaveMutationResult, WaveProductType, +}; +use crate::models::{Customer, Invoice}; +use crate::services::{ + WaveCreateCustomerInput, WaveCreateInvoiceInput, WaveCreateInvoiceItemInput, + WaveCreateProductInput, WaveService, WaveUpdateCustomerInput, WaveUpdateProductInput, +}; + +#[derive(Default)] +pub struct WaveMutation; + +#[Object] +impl WaveMutation { + /// Create a Wave customer from a Nexus customer + /// + /// Links the Nexus customer to Wave by storing the wave_customer_id. + async fn create_wave_customer( + &self, + ctx: &Context<'_>, + input: CreateWaveCustomerInput, + ) -> Result { + let wave = ctx.data::>()?; + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the Nexus customer + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(input.customer_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Customer not found"))?; + + // Check if already linked + if customer.wave_customer_id.is_some() { + return Ok(CreateWaveCustomerResult { + success: false, + error: Some("Customer is already linked to Wave".to_string()), + customer: None, + nexus_customer: Some(CustomerType::from(customer)), + }); + } + + // Create Wave customer + let wave_input = WaveCreateCustomerInput { + business_id: wave.business_id().to_string(), + name: customer.name.clone(), + email: input.email.or(customer.billing_email.clone()), + address: None, // Could be enhanced to use customer address + currency: input.currency, + }; + + let wave_customer = match wave.create_customer(wave_input).await { + Ok(c) => c, + Err(e) => { + return Ok(CreateWaveCustomerResult { + success: false, + error: Some(format!("Failed to create Wave customer: {}", e)), + customer: None, + nexus_customer: Some(CustomerType::from(customer)), + }); + } + }; + + // Update Nexus customer with wave_customer_id + let updated_customer = sqlx::query_as::<_, Customer>( + r#" + UPDATE customers + SET wave_customer_id = $1, updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + "#, + ) + .bind(&wave_customer.id) + .bind(input.customer_id) + .fetch_one(&*pool) + .await?; + + Ok(CreateWaveCustomerResult { + success: true, + error: None, + customer: Some(WaveCustomerType::from(wave_customer)), + nexus_customer: Some(CustomerType::from(updated_customer)), + }) + } + + /// Create a Wave invoice from a Nexus invoice + /// + /// Requires: + /// - Customer must be linked to Wave (wave_customer_id set) + /// - Invoice items (revenues/projects) must be linked to Wave products (wave_service_id set) + /// + /// The amounts from Nexus override the Wave product default prices. + async fn create_wave_invoice( + &self, + ctx: &Context<'_>, + input: CreateWaveInvoiceInput, + ) -> Result { + let wave = ctx.data::>()?; + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, + status, date_paid, payment_type, wave_invoice_id + FROM invoices + WHERE id = $1 + "#, + ) + .bind(input.invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Invoice not found"))?; + + // Check if already synced + if invoice.wave_invoice_id.is_some() { + return Ok(CreateWaveInvoiceResult { + success: false, + error: Some("Invoice is already synced to Wave".to_string()), + invoice: None, + nexus_invoice: Some(InvoiceType::from(invoice)), + }); + } + + // Get customer's wave_customer_id + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(invoice.customer_id) + .fetch_one(&*pool) + .await?; + + let wave_customer_id = match &customer.wave_customer_id { + Some(id) => id.clone(), + None => { + return Ok(CreateWaveInvoiceResult { + success: false, + error: Some("Customer is not linked to Wave".to_string()), + invoice: None, + nexus_invoice: Some(InvoiceType::from(invoice)), + }); + } + }; + + // Get invoice revenues with wave_service_id + let revenues: Vec<(String, String, rust_decimal::Decimal)> = sqlx::query_as( + r#" + SELECT r.wave_service_id, a.name as description, ir.amount + FROM invoice_revenues ir + JOIN revenues r ON r.id = ir.revenue_id + JOIN accounts a ON a.id = r.account_id + WHERE ir.invoice_id = $1 AND r.wave_service_id IS NOT NULL + "#, + ) + .bind(input.invoice_id) + .fetch_all(&*pool) + .await?; + + // Get invoice projects with wave_service_id + let projects: Vec<(String, String, rust_decimal::Decimal)> = sqlx::query_as( + r#" + SELECT p.wave_service_id, p.name as description, ip.amount + FROM invoice_projects ip + JOIN projects p ON p.id = ip.project_id + WHERE ip.invoice_id = $1 AND p.wave_service_id IS NOT NULL + "#, + ) + .bind(input.invoice_id) + .fetch_all(&*pool) + .await?; + + // Build Wave invoice items + let mut items = Vec::new(); + + for (wave_service_id, description, amount) in revenues { + items.push(WaveCreateInvoiceItemInput { + product_id: wave_service_id, + description: Some(description), + quantity: 1.0, + unit_price: rust_decimal_to_f64(amount), + }); + } + + for (wave_service_id, description, amount) in projects { + items.push(WaveCreateInvoiceItemInput { + product_id: wave_service_id, + description: Some(description), + quantity: 1.0, + unit_price: rust_decimal_to_f64(amount), + }); + } + + if items.is_empty() { + return Ok(CreateWaveInvoiceResult { + success: false, + error: Some("No invoice items are linked to Wave products".to_string()), + invoice: None, + nexus_invoice: Some(InvoiceType::from(invoice)), + }); + } + + // Create Wave invoice + let wave_input = WaveCreateInvoiceInput { + business_id: wave.business_id().to_string(), + customer_id: wave_customer_id, + items, + invoice_date: input.invoice_date.unwrap_or_else(|| invoice.start_date.to_string()), + due_date: input.due_date, + memo: input.memo, + }; + + let wave_invoice = match wave.create_invoice(wave_input).await { + Ok(inv) => inv, + Err(e) => { + return Ok(CreateWaveInvoiceResult { + success: false, + error: Some(format!("Failed to create Wave invoice: {}", e)), + invoice: None, + nexus_invoice: Some(InvoiceType::from(invoice)), + }); + } + }; + + // Update Nexus invoice with wave_invoice_id + let updated_invoice = sqlx::query_as::<_, Invoice>( + r#" + UPDATE invoices + SET wave_invoice_id = $1, updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, customer_id, start_date, end_date, + status, date_paid, payment_type, wave_invoice_id + "#, + ) + .bind(&wave_invoice.id) + .bind(input.invoice_id) + .fetch_one(&*pool) + .await?; + + Ok(CreateWaveInvoiceResult { + success: true, + error: None, + invoice: Some(WaveInvoiceType::from(wave_invoice)), + nexus_invoice: Some(InvoiceType::from(updated_invoice)), + }) + } + + /// Link a Nexus customer to an existing Wave customer + async fn link_customer_to_wave( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + wave_customer_id: String, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + UPDATE customers + SET wave_customer_id = $1, updated_at = NOW() + WHERE id = $2 + RETURNING id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + "#, + ) + .bind(wave_customer_id) + .bind(customer_id) + .fetch_one(&*pool) + .await?; + + Ok(CustomerType::from(customer)) + } + + /// Unlink a Nexus customer from Wave + async fn unlink_customer_from_wave( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + UPDATE customers + SET wave_customer_id = NULL, updated_at = NOW() + WHERE id = $1 + RETURNING id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + "#, + ) + .bind(customer_id) + .fetch_one(&*pool) + .await?; + + Ok(CustomerType::from(customer)) + } + + // ========================================================================= + // Wave Invoice Lifecycle Mutations + // ========================================================================= + + /// Approve a Wave invoice (DRAFT -> SAVED) + /// + /// WARNING: This is a point of no return. Once approved, the invoice cannot be edited. + async fn approve_wave_invoice( + &self, + ctx: &Context<'_>, + invoice_id: String, + ) -> Result> { + let wave = ctx.data::>()?; + + match wave.approve_invoice(&invoice_id).await { + Ok(invoice) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveInvoiceType::from(invoice)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + /// Send a Wave invoice to the customer (SAVED -> SENT) + /// + /// This will email the invoice to the customer. + async fn send_wave_invoice( + &self, + ctx: &Context<'_>, + invoice_id: String, + ) -> Result> { + let wave = ctx.data::>()?; + + match wave.send_invoice(&invoice_id).await { + Ok(invoice) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveInvoiceType::from(invoice)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + /// Delete a Wave invoice (only DRAFT invoices can be deleted) + async fn delete_wave_invoice( + &self, + ctx: &Context<'_>, + invoice_id: String, + ) -> Result> { + let wave = ctx.data::>()?; + + match wave.delete_invoice(&invoice_id).await { + Ok(_) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(true), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + // ========================================================================= + // Wave Product Mutations + // ========================================================================= + + /// Create a new product in Wave + async fn create_wave_product( + &self, + ctx: &Context<'_>, + input: CreateWaveProductInput, + ) -> Result> { + let wave = ctx.data::>()?; + + let wave_input = WaveCreateProductInput { + business_id: wave.business_id().to_string(), + name: input.name, + description: input.description, + unit_price: input.unit_price, + is_sold: input.is_sold.unwrap_or(true), + income_account_id: input.income_account_id, + }; + + match wave.create_product(wave_input).await { + Ok(product) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveProductType::from(product)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + /// Update an existing product in Wave + async fn update_wave_product( + &self, + ctx: &Context<'_>, + input: UpdateWaveProductInput, + ) -> Result> { + let wave = ctx.data::>()?; + + let wave_input = WaveUpdateProductInput { + id: input.id, + name: input.name, + description: input.description, + unit_price: input.unit_price, + income_account_id: input.income_account_id, + }; + + match wave.update_product(wave_input).await { + Ok(product) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveProductType::from(product)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + /// Archive a product in Wave (soft delete) + async fn archive_wave_product( + &self, + ctx: &Context<'_>, + product_id: String, + ) -> Result> { + let wave = ctx.data::>()?; + + match wave.archive_product(&product_id).await { + Ok(product) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveProductType::from(product)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + // ========================================================================= + // Wave Customer Mutations + // ========================================================================= + + /// Update an existing customer in Wave + async fn update_wave_customer( + &self, + ctx: &Context<'_>, + input: UpdateWaveCustomerInput, + ) -> Result> { + let wave = ctx.data::>()?; + + let wave_input = WaveUpdateCustomerInput { + id: input.id, + name: input.name, + email: input.email, + address: input.address.map(|a| crate::services::WaveCreateAddressInput { + address_line1: a.address_line1, + address_line2: a.address_line2, + city: a.city, + province_code: a.province_code, + postal_code: a.postal_code, + country_code: a.country_code, + }), + currency: input.currency, + }; + + match wave.update_customer(wave_input).await { + Ok(customer) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(WaveCustomerType::from(customer)), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } + + /// Delete a customer in Wave + async fn delete_wave_customer( + &self, + ctx: &Context<'_>, + customer_id: String, + ) -> Result> { + let wave = ctx.data::>()?; + + match wave.delete_customer(&customer_id).await { + Ok(_) => Ok(WaveMutationResult { + success: true, + error: None, + data: Some(true), + }), + Err(e) => Ok(WaveMutationResult { + success: false, + error: Some(e.to_string()), + data: None, + }), + } + } +} + +/// Convert Decimal to f64 for Wave API +fn rust_decimal_to_f64(d: rust_decimal::Decimal) -> f64 { + use std::str::FromStr; + f64::from_str(&d.to_string()).unwrap_or(0.0) +} diff --git a/src/graphql/queries/account.rs b/src/graphql/queries/account.rs new file mode 100644 index 0000000..dfe17b7 --- /dev/null +++ b/src/graphql/queries/account.rs @@ -0,0 +1,74 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{AccountFilter, AccountType}; +use crate::models::{Account, EntityStatus}; + +#[derive(Default)] +pub struct AccountQuery; + +#[Object] +impl AccountQuery { + /// Get all accounts with optional filtering + async fn accounts( + &self, + ctx: &Context<'_>, + #[graphql(desc = "Filter by customer ID")] customer_id: Option, + filter: Option, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + + let name_pattern = filter + .name + .as_ref() + .map(|n| format!("%{}%", n.to_lowercase())); + let status: Option = filter.status.map(|s| s.into()); + + let accounts: Vec = sqlx::query_as::<_, Account>( + r#" + SELECT id, created_at, updated_at, customer_id, name, status, start_date, end_date + FROM accounts + WHERE ($1::uuid IS NULL OR customer_id = $1) + AND ($2::text IS NULL OR LOWER(name) LIKE $2) + AND ($3::entity_status IS NULL OR status = $3) + ORDER BY name ASC + "#, + ) + .bind(customer_id) + .bind(name_pattern) + .bind(status) + .fetch_all(&*pool) + .await?; + + let mut results: Vec = accounts.into_iter().map(AccountType::from).collect(); + + if let Some(is_active) = filter.is_active { + results.retain(|a| a.is_active == is_active); + } + + Ok(results) + } + + /// Get a single account by ID + async fn account(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let account = sqlx::query_as::<_, Account>( + r#" + SELECT id, created_at, updated_at, customer_id, name, status, start_date, end_date + FROM accounts + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(account.map(AccountType::from)) + } +} diff --git a/src/graphql/queries/calendar.rs b/src/graphql/queries/calendar.rs new file mode 100644 index 0000000..1b349f0 --- /dev/null +++ b/src/graphql/queries/calendar.rs @@ -0,0 +1,101 @@ +use async_graphql::{Context, Object, Result}; +use chrono::{DateTime, Utc}; +use std::sync::Arc; + +use crate::graphql::types::{CalendarEventFilterInput, CalendarEventType}; +use crate::services::google_calendar::{GoogleCalendarService, ListEventsQuery}; + +#[derive(Default)] +pub struct CalendarQuery; + +#[Object] +impl CalendarQuery { + /// Get a calendar event by ID + async fn calendar_event( + &self, + ctx: &Context<'_>, + event_id: String, + ) -> Result> { + let calendar = ctx.data::>()?; + + match calendar.get_event(&event_id).await { + Ok(event) => Ok(Some(CalendarEventType::from(event))), + Err(crate::services::google_calendar::CalendarError::NotFound) => Ok(None), + Err(e) => Err(e.into()), + } + } + + /// List calendar events + async fn calendar_events( + &self, + ctx: &Context<'_>, + filter: Option, + ) -> Result> { + let calendar = ctx.data::>()?; + + let filter = filter.unwrap_or_default(); + let query = ListEventsQuery { + time_min: filter.time_min, + time_max: filter.time_max, + max_results: filter.max_results.map(|v| v as u32), + q: filter.q, + single_events: Some(true), + order_by: Some("startTime".to_string()), + }; + + let events = calendar.list_events(query).await?; + + Ok(events.into_iter().map(CalendarEventType::from).collect()) + } + + /// Get calendar events for today + async fn todays_events( + &self, + ctx: &Context<'_>, + ) -> Result> { + let calendar = ctx.data::>()?; + + let now = Utc::now(); + let start_of_day = now.date_naive().and_hms_opt(0, 0, 0).unwrap(); + let end_of_day = now.date_naive().and_hms_opt(23, 59, 59).unwrap(); + + let query = ListEventsQuery { + time_min: Some(DateTime::from_naive_utc_and_offset(start_of_day, Utc)), + time_max: Some(DateTime::from_naive_utc_and_offset(end_of_day, Utc)), + max_results: Some(50), + q: None, + single_events: Some(true), + order_by: Some("startTime".to_string()), + }; + + let events = calendar.list_events(query).await?; + + Ok(events.into_iter().map(CalendarEventType::from).collect()) + } + + /// Get upcoming calendar events (next 7 days) + async fn upcoming_events( + &self, + ctx: &Context<'_>, + days: Option, + ) -> Result> { + let calendar = ctx.data::>()?; + + let days = days.unwrap_or(7) as i64; + let now = Utc::now(); + let end = now + chrono::Duration::days(days); + + let query = ListEventsQuery { + time_min: Some(now), + time_max: Some(end), + max_results: Some(100), + q: None, + single_events: Some(true), + order_by: Some("startTime".to_string()), + }; + + let events = calendar.list_events(query).await?; + + Ok(events.into_iter().map(CalendarEventType::from).collect()) + } +} diff --git a/src/graphql/queries/customer.rs b/src/graphql/queries/customer.rs new file mode 100644 index 0000000..8138b4e --- /dev/null +++ b/src/graphql/queries/customer.rs @@ -0,0 +1,75 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{CustomerFilter, CustomerType}; +use crate::models::{Customer, EntityStatus}; + +#[derive(Default)] +pub struct CustomerQuery; + +#[Object] +impl CustomerQuery { + /// Get all customers with optional filtering + async fn customers( + &self, + ctx: &Context<'_>, + filter: Option, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + + // Build the query with optional filters + let name_pattern = filter + .name + .as_ref() + .map(|n| format!("%{}%", n.to_lowercase())); + let status: Option = filter.status.map(|s| s.into()); + + let customers: Vec = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE ($1::text IS NULL OR LOWER(name) LIKE $1) + AND ($2::entity_status IS NULL OR status = $2) + ORDER BY name ASC + "#, + ) + .bind(name_pattern) + .bind(status) + .fetch_all(&*pool) + .await?; + + // Convert to GraphQL types and optionally filter by is_active (computed field) + let mut results: Vec = customers.into_iter().map(CustomerType::from).collect(); + + if let Some(is_active) = filter.is_active { + results.retain(|c| c.is_active == is_active); + } + + Ok(results) + } + + /// Get a single customer by ID + async fn customer(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(customer.map(CustomerType::from)) + } +} diff --git a/src/graphql/queries/event.rs b/src/graphql/queries/event.rs new file mode 100644 index 0000000..fc64fa8 --- /dev/null +++ b/src/graphql/queries/event.rs @@ -0,0 +1,127 @@ +//! Event GraphQL queries +//! +//! Queries for the event audit trail system. + +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::EventType_; +use crate::models::Event; + +#[derive(Default)] +pub struct EventQuery; + +#[Object] +impl EventQuery { + /// Get a single event by ID + async fn event(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let event = sqlx::query_as::<_, Event>( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, + metadata, timestamp, created_at + FROM events + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(event.map(EventType_::from)) + } + + /// Get events for an entity (audit trail) + async fn events_for_entity( + &self, + ctx: &Context<'_>, + entity_type: String, + entity_id: Uuid, + #[graphql(default = 50)] limit: i32, + #[graphql(default = 0)] offset: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let events: Vec = sqlx::query_as( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, + metadata, timestamp, created_at + FROM events + WHERE entity_type = $1 AND entity_id = $2 + ORDER BY timestamp DESC, created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(&entity_type) + .bind(entity_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + Ok(events.into_iter().map(EventType_::from).collect()) + } + + /// Get recent events across all entities (admin view) + async fn recent_events( + &self, + ctx: &Context<'_>, + #[graphql(default = 100)] limit: i32, + #[graphql(default = 0)] offset: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let events: Vec = sqlx::query_as( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, + metadata, timestamp, created_at + FROM events + ORDER BY timestamp DESC, created_at DESC + LIMIT $1 OFFSET $2 + "#, + ) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + Ok(events.into_iter().map(EventType_::from).collect()) + } + + /// Get events by actor (user activity history) + async fn events_by_actor( + &self, + ctx: &Context<'_>, + actor_type: String, + actor_id: Uuid, + #[graphql(default = 50)] limit: i32, + #[graphql(default = 0)] offset: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let events: Vec = sqlx::query_as( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, + metadata, timestamp, created_at + FROM events + WHERE actor_type = $1 AND actor_id = $2 + ORDER BY timestamp DESC, created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(&actor_type) + .bind(actor_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + Ok(events.into_iter().map(EventType_::from).collect()) + } +} diff --git a/src/graphql/queries/invoice.rs b/src/graphql/queries/invoice.rs new file mode 100644 index 0000000..a115db7 --- /dev/null +++ b/src/graphql/queries/invoice.rs @@ -0,0 +1,238 @@ +use async_graphql::{Context, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + EligibleInvoiceProjectType, EligibleRevenueType, InvoiceConnection, InvoiceFilterInput, + InvoiceType, PaginationInput, +}; +use crate::models::{Invoice, InvoiceStatus}; + +#[derive(Default)] +pub struct InvoiceQuery; + +#[Object] +impl InvoiceQuery { + /// Get paginated invoices with optional filtering + async fn invoices( + &self, + ctx: &Context<'_>, + filter: Option, + pagination: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + let pagination = pagination.unwrap_or_default(); + let offset = pagination.offset(); + let limit = pagination.limit(); + + let status: Option = filter.status.map(InvoiceStatus::from); + + // Get total count + let total_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM invoices + WHERE ($1::uuid IS NULL OR customer_id = $1) + AND ($2::invoice_status IS NULL OR status = $2) + AND ($3::date IS NULL OR (start_date <= $3 AND end_date >= $3)) + AND ($4::date IS NULL OR start_date >= $4) + AND ($5::date IS NULL OR end_date <= $5) + "#, + ) + .bind(filter.customer_id) + .bind(status) + .bind(filter.date) + .bind(filter.start_date_from) + .bind(filter.end_date_to) + .fetch_one(&*pool) + .await?; + + // Get paginated invoices + let invoices: Vec = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices + WHERE ($1::uuid IS NULL OR customer_id = $1) + AND ($2::invoice_status IS NULL OR status = $2) + AND ($3::date IS NULL OR (start_date <= $3 AND end_date >= $3)) + AND ($4::date IS NULL OR start_date >= $4) + AND ($5::date IS NULL OR end_date <= $5) + ORDER BY start_date DESC, id ASC + LIMIT $6 OFFSET $7 + "#, + ) + .bind(filter.customer_id) + .bind(status) + .bind(filter.date) + .bind(filter.start_date_from) + .bind(filter.end_date_to) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + let items: Vec = invoices.into_iter().map(InvoiceType::from).collect(); + let has_next_page = (offset + limit) < total_count; + + Ok(InvoiceConnection { + items, + total_count: total_count as i32, + has_next_page, + }) + } + + /// Get a single invoice by ID + async fn invoice(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let invoice = sqlx::query_as::<_, Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, status, + date_paid, payment_type, wave_invoice_id + FROM invoices + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(invoice.map(InvoiceType::from)) + } + + /// Get eligible revenues for a customer that can be added to an invoice + /// Returns revenues from accounts belonging to the customer that are not already on any invoice + async fn eligible_revenues_for_invoice( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + date_from: NaiveDate, + date_to: NaiveDate, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Find revenues that: + // 1. Belong to accounts under this customer + // 2. Are active during the date range + // 3. Are not already on an invoice with an overlapping period + let rows: Vec = sqlx::query_as::<_, EligibleRevenueRow>( + r#" + SELECT + r.id as revenue_id, + r.account_id, + a.name as account_name, + r.amount + FROM revenues r + JOIN accounts a ON a.id = r.account_id + WHERE a.customer_id = $1 + AND r.start_date <= $3 + AND (r.end_date IS NULL OR r.end_date >= $2) + AND NOT EXISTS ( + SELECT 1 FROM invoice_revenues ir + JOIN invoices i ON i.id = ir.invoice_id + WHERE ir.revenue_id = r.id + AND i.start_date <= $3 + AND i.end_date >= $2 + ) + ORDER BY a.name, r.start_date + "#, + ) + .bind(customer_id) + .bind(date_from) + .bind(date_to) + .fetch_all(&*pool) + .await?; + + let items = rows + .into_iter() + .map(|r| EligibleRevenueType { + revenue_id: r.revenue_id, + account_id: r.account_id, + account_name: r.account_name, + amount: r.amount, + }) + .collect(); + + Ok(items) + } + + /// Get eligible projects for a customer that can be added to an invoice + /// Returns completed projects for the customer that are not already on any invoice + async fn eligible_projects_for_invoice( + &self, + ctx: &Context<'_>, + customer_id: Uuid, + date_from: NaiveDate, + date_to: NaiveDate, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Find projects that: + // 1. Belong to this customer + // 2. Are completed + // 3. Are within the date range + // 4. Are not already on any invoice + let rows: Vec = sqlx::query_as::<_, EligibleProjectRow>( + r#" + SELECT + p.id as project_id, + p.name, + p.date, + COALESCE(p.amount, 0) as amount + FROM projects p + WHERE p.customer_id = $1 + AND p.status = 'COMPLETED' + AND p.date >= $2 + AND p.date <= $3 + AND NOT EXISTS ( + SELECT 1 FROM invoice_projects ip WHERE ip.project_id = p.id + ) + ORDER BY p.date, p.name + "#, + ) + .bind(customer_id) + .bind(date_from) + .bind(date_to) + .fetch_all(&*pool) + .await?; + + let items = rows + .into_iter() + .map(|r| EligibleInvoiceProjectType { + project_id: r.project_id, + name: r.name, + date: r.date, + amount: r.amount, + }) + .collect(); + + Ok(items) + } +} + +/// Helper struct for eligible revenue query +#[derive(sqlx::FromRow)] +struct EligibleRevenueRow { + revenue_id: Uuid, + account_id: Uuid, + account_name: String, + amount: Decimal, +} + +/// Helper struct for eligible project query +#[derive(sqlx::FromRow)] +struct EligibleProjectRow { + project_id: Uuid, + name: String, + date: NaiveDate, + amount: Decimal, +} diff --git a/src/graphql/queries/messaging.rs b/src/graphql/queries/messaging.rs new file mode 100644 index 0000000..1adb2a3 --- /dev/null +++ b/src/graphql/queries/messaging.rs @@ -0,0 +1,273 @@ +//! Messaging GraphQL queries +//! +//! Queries for conversations, messages, and participants. + +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::graphql::types::{ConversationType_, MessageType}; +use crate::models::{Conversation, Message}; + +#[derive(Default)] +pub struct MessagingQuery; + +#[Object] +impl MessagingQuery { + // ==================== CONVERSATION QUERIES ==================== + + /// Get a single conversation by ID + async fn conversation( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let conversation = sqlx::query_as::<_, Conversation>( + r#" + SELECT id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + FROM conversations + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(conversation.map(ConversationType_::from)) + } + + /// Get all conversations for the current user + async fn my_conversations( + &self, + ctx: &Context<'_>, + #[graphql(default = false)] include_archived: bool, + #[graphql(default = 50)] limit: i32, + #[graphql(default = 0)] offset: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let conversations: Vec = if include_archived { + sqlx::query_as( + r#" + SELECT c.id, c.created_at, c.updated_at, c.subject, c.conversation_type, + c.entity_type, c.entity_id, c.created_by_type, c.created_by_id, + c.last_message_at, c.is_archived, c.metadata + FROM conversations c + INNER JOIN conversation_participants cp ON c.id = cp.conversation_id + WHERE cp.participant_type = $1 + AND cp.participant_id = $2 + ORDER BY c.last_message_at DESC NULLS LAST, c.created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(participant_type) + .bind(user.user_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } else { + sqlx::query_as( + r#" + SELECT c.id, c.created_at, c.updated_at, c.subject, c.conversation_type, + c.entity_type, c.entity_id, c.created_by_type, c.created_by_id, + c.last_message_at, c.is_archived, c.metadata + FROM conversations c + INNER JOIN conversation_participants cp ON c.id = cp.conversation_id + WHERE cp.participant_type = $1 + AND cp.participant_id = $2 + AND c.is_archived = false + AND cp.is_archived = false + ORDER BY c.last_message_at DESC NULLS LAST, c.created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(participant_type) + .bind(user.user_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + }; + + Ok(conversations.into_iter().map(ConversationType_::from).collect()) + } + + /// Get conversations by entity (e.g., all conversations about a specific service) + async fn conversations_by_entity( + &self, + ctx: &Context<'_>, + entity_type: String, + entity_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let conversations: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, subject, conversation_type, entity_type, entity_id, + created_by_type, created_by_id, last_message_at, is_archived, metadata + FROM conversations + WHERE entity_type = $1 AND entity_id = $2 AND is_archived = false + ORDER BY last_message_at DESC NULLS LAST, created_at DESC + "#, + ) + .bind(&entity_type) + .bind(entity_id) + .fetch_all(&*pool) + .await?; + + Ok(conversations.into_iter().map(ConversationType_::from).collect()) + } + + /// Get total unread message count for the current user across all conversations + async fn unread_message_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let count: Option = sqlx::query_scalar( + r#" + SELECT COALESCE(SUM(unread_count), 0) + FROM conversation_participants cp + INNER JOIN conversations c ON c.id = cp.conversation_id + WHERE cp.participant_type = $1 + AND cp.participant_id = $2 + AND cp.is_archived = false + AND c.is_archived = false + "#, + ) + .bind(participant_type) + .bind(user.user_id) + .fetch_optional(&*pool) + .await?; + + Ok(count.unwrap_or(0) as i32) + } + + // ==================== MESSAGE QUERIES ==================== + + /// Get a single message by ID + async fn message(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let message = sqlx::query_as::<_, Message>( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(message.map(MessageType::from)) + } + + /// Get messages in a conversation with pagination + async fn messages( + &self, + ctx: &Context<'_>, + conversation_id: Uuid, + #[graphql(default = 50)] limit: i32, + #[graphql(default = 0)] offset: i32, + #[graphql(default = false)] include_deleted: bool, + #[graphql(default = false)] include_system: bool, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let messages: Vec = match (include_deleted, include_system) { + (true, true) => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + "#, + ) + .bind(conversation_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } + (true, false) => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 AND is_system_message = false + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + "#, + ) + .bind(conversation_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } + (false, true) => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 AND is_deleted = false + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + "#, + ) + .bind(conversation_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } + (false, false) => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 AND is_deleted = false AND is_system_message = false + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + "#, + ) + .bind(conversation_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } + }; + + Ok(messages.into_iter().map(MessageType::from).collect()) + } +} diff --git a/src/graphql/queries/mod.rs b/src/graphql/queries/mod.rs new file mode 100644 index 0000000..0dbb2c8 --- /dev/null +++ b/src/graphql/queries/mod.rs @@ -0,0 +1,52 @@ +mod account; +mod calendar; +mod customer; +mod event; +mod invoice; +mod messaging; +mod notification; +mod profile; +mod project; +mod project_scope_template; +mod report; +mod service; +mod service_scope_template; +mod session; +mod wave; + +use async_graphql::{MergedObject, Object}; + +use account::AccountQuery; +use calendar::CalendarQuery; +use customer::CustomerQuery; +use event::EventQuery; +use invoice::InvoiceQuery; +use messaging::MessagingQuery; +use notification::NotificationQuery; +use profile::ProfileQuery; +use project::ProjectQuery; +use project_scope_template::ProjectScopeTemplateQuery; +use report::ReportQuery; +use service::ServiceQuery; +use service_scope_template::ServiceScopeTemplateQuery; +use session::SessionQuery; +use wave::WaveQuery; + +#[derive(Default)] +pub struct BaseQuery; + +#[Object] +impl BaseQuery { + /// API version + async fn version(&self) -> &str { + env!("CARGO_PKG_VERSION") + } + + /// Health check + async fn health(&self) -> &str { + "ok" + } +} + +#[derive(MergedObject, Default)] +pub struct QueryRoot(BaseQuery, ProfileQuery, CustomerQuery, AccountQuery, ServiceQuery, ProjectQuery, ServiceScopeTemplateQuery, ProjectScopeTemplateQuery, ReportQuery, InvoiceQuery, CalendarQuery, WaveQuery, SessionQuery, MessagingQuery, EventQuery, NotificationQuery); diff --git a/src/graphql/queries/notification.rs b/src/graphql/queries/notification.rs new file mode 100644 index 0000000..5539b8e --- /dev/null +++ b/src/graphql/queries/notification.rs @@ -0,0 +1,217 @@ +//! Notification GraphQL queries +//! +//! Queries for notifications and notification rules. + +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::graphql::types::{NotificationDeliveryType, NotificationRuleType, NotificationType}; +use crate::models::{Notification, NotificationDelivery, NotificationRule}; + +#[derive(Default)] +pub struct NotificationQuery; + +#[Object] +impl NotificationQuery { + // ==================== USER NOTIFICATION QUERIES ==================== + + /// Get a single notification by ID + async fn notification( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let notification = sqlx::query_as::<_, Notification>( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(notification.map(NotificationType::from)) + } + + /// Get notifications for the current user + async fn my_notifications( + &self, + ctx: &Context<'_>, + #[graphql(default = false)] unread_only: bool, + #[graphql(default = 50)] limit: i32, + #[graphql(default = 0)] offset: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let recipient_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let notifications: Vec = if unread_only { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE recipient_type = $1 AND recipient_id = $2 AND read_at IS NULL + ORDER BY created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(recipient_type) + .bind(user.user_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + } else { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE recipient_type = $1 AND recipient_id = $2 + ORDER BY created_at DESC + LIMIT $3 OFFSET $4 + "#, + ) + .bind(recipient_type) + .bind(user.user_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await? + }; + + Ok(notifications.into_iter().map(NotificationType::from).collect()) + } + + /// Get unread notification count for the current user + async fn my_unread_notification_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + let user = ctx.data::()?; + + let recipient_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM notifications + WHERE recipient_type = $1 AND recipient_id = $2 AND read_at IS NULL + "#, + ) + .bind(recipient_type) + .bind(user.user_id) + .fetch_one(&*pool) + .await?; + + Ok(count as i32) + } + + // ==================== ADMIN NOTIFICATION RULE QUERIES ==================== + + /// Get all notification rules (admin only) + async fn notification_rules( + &self, + ctx: &Context<'_>, + #[graphql(default)] is_active: Option, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let rules: Vec = match is_active { + Some(active) => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + FROM notification_rules + WHERE is_active = $1 + ORDER BY name + "#, + ) + .bind(active) + .fetch_all(&*pool) + .await? + } + None => { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + FROM notification_rules + ORDER BY name + "#, + ) + .fetch_all(&*pool) + .await? + } + }; + + Ok(rules.into_iter().map(NotificationRuleType::from).collect()) + } + + /// Get a single notification rule by ID (admin only) + async fn notification_rule( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let rule = sqlx::query_as::<_, NotificationRule>( + r#" + SELECT id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + FROM notification_rules + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(rule.map(NotificationRuleType::from)) + } + + /// Get delivery history for a notification (admin only) + async fn notification_deliveries( + &self, + ctx: &Context<'_>, + notification_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let deliveries: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + FROM notification_deliveries + WHERE notification_id = $1 + ORDER BY created_at ASC + "#, + ) + .bind(notification_id) + .fetch_all(&*pool) + .await?; + + Ok(deliveries.into_iter().map(NotificationDeliveryType::from).collect()) + } +} diff --git a/src/graphql/queries/profile.rs b/src/graphql/queries/profile.rs new file mode 100644 index 0000000..e129e17 --- /dev/null +++ b/src/graphql/queries/profile.rs @@ -0,0 +1,177 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::auth::{ProfileType as AuthProfileType, UserContext}; +use crate::db::Database; +use crate::graphql::types::{CustomerInfo, CustomerProfileType, ProfileType, TeamProfileType}; +use crate::models::{CustomerProfile, EntityStatus, TeamProfile}; + +#[derive(Default)] +pub struct ProfileQuery; + +#[Object] +impl ProfileQuery { + /// Get all non-inactive team profiles + async fn team_profiles(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Exclude only explicitly inactive profiles - includes ACTIVE, PENDING, and any other status + let profiles = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE status != $1 + ORDER BY first_name, last_name + "#, + ) + .bind(EntityStatus::Inactive) + .fetch_all(&*pool) + .await?; + + Ok(profiles.into_iter().map(TeamProfileType::from).collect()) + } + + /// Get a single team profile by ID + async fn team_profile(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Team profile not found"))?; + + Ok(TeamProfileType::from(profile)) + } + + /// Get all non-inactive customer profiles + async fn customer_profiles(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profiles = sqlx::query_as::<_, CustomerProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, status, notes + FROM customer_profiles + WHERE status != $1 + ORDER BY first_name, last_name + "#, + ) + .bind(EntityStatus::Inactive) + .fetch_all(&*pool) + .await?; + + Ok(profiles.into_iter().map(CustomerProfileType::from).collect()) + } + + /// Get a single customer profile by ID + async fn customer_profile(&self, ctx: &Context<'_>, id: Uuid) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, CustomerProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, status, notes + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Customer profile not found"))?; + + // Fetch associated customers + let customers = sqlx::query_as::<_, (Uuid, String)>( + r#" + SELECT c.id, c.name + FROM customers c + INNER JOIN customer_profile_access cpa ON cpa.customer_id = c.id + WHERE cpa.customer_profile_id = $1 + "#, + ) + .bind(id) + .fetch_all(&*pool) + .await? + .into_iter() + .map(|(id, name)| CustomerInfo { id, name }) + .collect(); + + Ok(CustomerProfileType::from_profile(profile, customers)) + } + + /// Get the currently authenticated user's profile + async fn me(&self, ctx: &Context<'_>) -> Result> { + // Get user context from GraphQL context + let user_ctx = match ctx.data_opt::() { + Some(ctx) => ctx, + None => return Ok(None), // Not authenticated + }; + + // Get database from GraphQL context + let db = ctx.data::()?; + let pool = db.pool().await; + + match user_ctx.profile_type { + AuthProfileType::Team => { + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(user_ctx.user_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileType::TeamProfile(p.into()))) + } + AuthProfileType::Customer => { + let profile = sqlx::query_as::<_, CustomerProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, status, notes + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(user_ctx.user_id) + .fetch_optional(&*pool) + .await?; + + match profile { + Some(p) => { + // Fetch associated customers + let customers = sqlx::query_as::<_, (uuid::Uuid, String)>( + r#" + SELECT c.id, c.name + FROM customers c + INNER JOIN customer_profile_access cpa ON cpa.customer_id = c.id + WHERE cpa.customer_profile_id = $1 + "#, + ) + .bind(user_ctx.user_id) + .fetch_all(&*pool) + .await? + .into_iter() + .map(|(id, name)| CustomerInfo { id, name }) + .collect(); + + Ok(Some(ProfileType::CustomerProfile( + CustomerProfileType::from_profile(p, customers), + ))) + } + None => Ok(None), + } + } + } + } +} diff --git a/src/graphql/queries/project.rs b/src/graphql/queries/project.rs new file mode 100644 index 0000000..f43bad2 --- /dev/null +++ b/src/graphql/queries/project.rs @@ -0,0 +1,186 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + PaginationInput, ProjectConnection, ProjectFilterInput, ProjectType, +}; +use crate::models::{Project, WorkStatus}; + +#[derive(Default)] +pub struct ProjectQuery; + +#[Object] +impl ProjectQuery { + /// Get paginated projects with optional filtering + async fn projects( + &self, + ctx: &Context<'_>, + filter: Option, + pagination: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + let pagination = pagination.unwrap_or_default(); + let offset = pagination.offset(); + let limit = pagination.limit(); + + // Convert filter status to model enum + let status: Option = filter.status.map(WorkStatus::from); + + // Merge customer_id into customer_ids for backward compatibility + let customer_ids: Option> = match (&filter.customer_ids, &filter.customer_id) { + (Some(ids), _) if !ids.is_empty() => Some(ids.clone()), + (_, Some(id)) => Some(vec![*id]), + _ => None, + }; + + // Get total count first + let total_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM projects + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::work_status IS NULL OR status = $3) + AND ($4::uuid[] IS NULL OR customer_id = ANY($4)) + AND ($5::uuid IS NULL OR account_address_id = $5) + AND ($6::uuid IS NULL OR EXISTS ( + SELECT 1 FROM project_team_members ptm + WHERE ptm.project_id = projects.id AND ptm.team_profile_id = $6 + )) + "#, + ) + .bind(filter.date_from) + .bind(filter.date_to) + .bind(status) + .bind(&customer_ids) + .bind(filter.account_address_id) + .bind(filter.team_profile_id) + .fetch_one(&*pool) + .await?; + + // Get paginated projects + let projects: Vec = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, notes, + calendar_event_id, wave_service_id, account_address_id, street_address, city, state, zip_code + FROM projects + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::work_status IS NULL OR status = $3) + AND ($4::uuid[] IS NULL OR customer_id = ANY($4)) + AND ($5::uuid IS NULL OR account_address_id = $5) + AND ($6::uuid IS NULL OR EXISTS ( + SELECT 1 FROM project_team_members ptm + WHERE ptm.project_id = projects.id AND ptm.team_profile_id = $6 + )) + ORDER BY date ASC, name ASC + LIMIT $7 OFFSET $8 + "#, + ) + .bind(filter.date_from) + .bind(filter.date_to) + .bind(status) + .bind(&customer_ids) + .bind(filter.account_address_id) + .bind(filter.team_profile_id) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + let items: Vec = projects.into_iter().map(ProjectType::from).collect(); + let has_next_page = (offset + limit) < total_count; + + Ok(ProjectConnection { + items, + total_count: total_count as i32, + has_next_page, + }) + } + + /// Get a single project by ID + async fn project(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, notes, + calendar_event_id, wave_service_id, account_address_id, street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(project.map(ProjectType::from)) + } + + /// Get status counts for projects in a date range (for status tabs) + async fn project_status_counts( + &self, + ctx: &Context<'_>, + date_from: Option, + date_to: Option, + team_profile_id: Option, + customer_ids: Option>, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let counts = sqlx::query_as::<_, StatusCountRow>( + r#" + SELECT + COUNT(*) FILTER (WHERE status = 'SCHEDULED') as scheduled, + COUNT(*) FILTER (WHERE status = 'IN_PROGRESS') as in_progress, + COUNT(*) FILTER (WHERE status = 'COMPLETED') as completed, + COUNT(*) FILTER (WHERE status = 'CANCELLED') as cancelled + FROM projects + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::uuid IS NULL OR EXISTS ( + SELECT 1 FROM project_team_members ptm + WHERE ptm.project_id = projects.id AND ptm.team_profile_id = $3 + )) + AND ($4::uuid[] IS NULL OR customer_id = ANY($4)) + "#, + ) + .bind(date_from) + .bind(date_to) + .bind(team_profile_id) + .bind(&customer_ids) + .fetch_one(&*pool) + .await?; + + Ok(ProjectStatusCounts { + scheduled: counts.scheduled.unwrap_or(0) as i32, + in_progress: counts.in_progress.unwrap_or(0) as i32, + completed: counts.completed.unwrap_or(0) as i32, + cancelled: counts.cancelled.unwrap_or(0) as i32, + }) + } +} + +/// Status counts for the status tabs +#[derive(async_graphql::SimpleObject)] +pub struct ProjectStatusCounts { + pub scheduled: i32, + pub in_progress: i32, + pub completed: i32, + pub cancelled: i32, +} + +/// Helper struct for query results +#[derive(sqlx::FromRow)] +struct StatusCountRow { + scheduled: Option, + in_progress: Option, + completed: Option, + cancelled: Option, +} diff --git a/src/graphql/queries/project_scope_template.rs b/src/graphql/queries/project_scope_template.rs new file mode 100644 index 0000000..63ead54 --- /dev/null +++ b/src/graphql/queries/project_scope_template.rs @@ -0,0 +1,59 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::ProjectScopeTemplateType; +use crate::models::ProjectScopeTemplate; + +#[derive(Default)] +pub struct ProjectScopeTemplateQuery; + +#[Object] +impl ProjectScopeTemplateQuery { + /// Get all project scope templates + async fn project_scope_templates( + &self, + ctx: &Context<'_>, + #[graphql(desc = "Filter by active status")] is_active: Option, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let templates: Vec = sqlx::query_as::<_, ProjectScopeTemplate>( + r#" + SELECT id, created_at, updated_at, name, description, is_active + FROM project_scope_templates + WHERE ($1::boolean IS NULL OR is_active = $1) + ORDER BY name ASC + "#, + ) + .bind(is_active) + .fetch_all(&*pool) + .await?; + + Ok(templates + .into_iter() + .map(ProjectScopeTemplateType::from) + .collect()) + } + + /// Get a single project scope template by ID + async fn project_scope_template(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let template = sqlx::query_as::<_, ProjectScopeTemplate>( + r#" + SELECT id, created_at, updated_at, name, description, is_active + FROM project_scope_templates + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_one(&*pool) + .await + .ok(); + + Ok(template.map(ProjectScopeTemplateType::from)) + } +} diff --git a/src/graphql/queries/report.rs b/src/graphql/queries/report.rs new file mode 100644 index 0000000..0f7813c --- /dev/null +++ b/src/graphql/queries/report.rs @@ -0,0 +1,289 @@ +use async_graphql::{Context, Object, Result}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + EligibleProjectType, EligibleServiceType, PaginationInput, ReportConnection, ReportFilterInput, + ReportType, +}; +use crate::models::{Report, ReportStatus}; + +/// Dispatch profile ID - excluded from labor share calculations +/// Replace with your dispatch profile UUID +const DISPATCH_PROFILE_ID: &str = "00000000-0000-0000-0000-000000000000"; + +#[derive(Default)] +pub struct ReportQuery; + +#[Object] +impl ReportQuery { + /// Get paginated reports with optional filtering + async fn reports( + &self, + ctx: &Context<'_>, + filter: Option, + pagination: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + let pagination = pagination.unwrap_or_default(); + let offset = pagination.offset(); + let limit = pagination.limit(); + + let status: Option = filter.status.map(ReportStatus::from); + + // Get total count + let total_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM reports + WHERE ($1::uuid IS NULL OR team_profile_id = $1) + AND ($2::report_status IS NULL OR status = $2) + AND ($3::date IS NULL OR (start_date <= $3 AND end_date >= $3)) + AND ($4::date IS NULL OR start_date >= $4) + AND ($5::date IS NULL OR end_date <= $5) + "#, + ) + .bind(filter.team_profile_id) + .bind(status) + .bind(filter.date) + .bind(filter.start_date_from) + .bind(filter.end_date_to) + .fetch_one(&*pool) + .await?; + + // Get paginated reports + let reports: Vec = sqlx::query_as::<_, Report>( + r#" + SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status + FROM reports + WHERE ($1::uuid IS NULL OR team_profile_id = $1) + AND ($2::report_status IS NULL OR status = $2) + AND ($3::date IS NULL OR (start_date <= $3 AND end_date >= $3)) + AND ($4::date IS NULL OR start_date >= $4) + AND ($5::date IS NULL OR end_date <= $5) + ORDER BY start_date DESC, id ASC + LIMIT $6 OFFSET $7 + "#, + ) + .bind(filter.team_profile_id) + .bind(status) + .bind(filter.date) + .bind(filter.start_date_from) + .bind(filter.end_date_to) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + let items: Vec = reports.into_iter().map(ReportType::from).collect(); + let has_next_page = (offset + limit) < total_count; + + Ok(ReportConnection { + items, + total_count: total_count as i32, + has_next_page, + }) + } + + /// Get a single report by ID + async fn report(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let report = sqlx::query_as::<_, Report>( + r#" + SELECT id, created_at, updated_at, team_profile_id, start_date, end_date, status + FROM reports + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(report.map(ReportType::from)) + } + + /// Get eligible services for a team member that can be added to a report + /// Returns completed services where the team member is assigned and not already in a report + async fn eligible_services_for_report( + &self, + ctx: &Context<'_>, + team_profile_id: Uuid, + date_from: NaiveDate, + date_to: NaiveDate, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Find completed services where: + // 1. Team member is assigned + // 2. Service is within date range + // 3. Service is not already in a report for this team member + let rows: Vec = sqlx::query_as::<_, EligibleServiceRow>( + r#" + SELECT + s.id as service_id, + s.date, + COALESCE(l.amount, 0) as labor_total, + ( + SELECT COUNT(*) + FROM service_team_members stm2 + WHERE stm2.service_id = s.id + AND stm2.team_profile_id != $4 + ) as team_member_count + FROM services s + JOIN service_team_members stm ON stm.service_id = s.id AND stm.team_profile_id = $1 + LEFT JOIN account_addresses aa ON aa.id = s.account_address_id + LEFT JOIN LATERAL ( + SELECT amount + FROM labors + WHERE account_address_id = aa.id + AND start_date <= s.date + AND (end_date IS NULL OR end_date >= s.date) + ORDER BY start_date DESC + LIMIT 1 + ) l ON true + WHERE s.status = 'COMPLETED' + AND s.date >= $2 + AND s.date <= $3 + AND NOT EXISTS ( + SELECT 1 + FROM report_services rs + JOIN reports r ON r.id = rs.report_id + WHERE rs.service_id = s.id + AND r.team_profile_id = $1 + ) + ORDER BY s.date ASC + "#, + ) + .bind(team_profile_id) + .bind(date_from) + .bind(date_to) + .bind(dispatch_id) + .fetch_all(&*pool) + .await?; + + let items = rows + .into_iter() + .filter(|r| r.team_member_count > 0) + .map(|r| { + let labor_share = if r.team_member_count > 0 { + r.labor_total / Decimal::from(r.team_member_count) + } else { + Decimal::ZERO + }; + + EligibleServiceType { + service_id: r.service_id, + date: r.date, + labor_share, + labor_total: r.labor_total, + team_member_count: r.team_member_count as i32, + } + }) + .collect(); + + Ok(items) + } + + /// Get eligible projects for a team member that can be added to a report + /// Returns completed projects where the team member is assigned and not already in a report + async fn eligible_projects_for_report( + &self, + ctx: &Context<'_>, + team_profile_id: Uuid, + date_from: NaiveDate, + date_to: NaiveDate, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let dispatch_id = Uuid::parse_str(DISPATCH_PROFILE_ID)?; + + // Find completed projects where: + // 1. Team member is assigned + // 2. Project is within date range + // 3. Project is not already in a report for this team member + let rows: Vec = sqlx::query_as::<_, EligibleProjectRow>( + r#" + SELECT + p.id as project_id, + p.date, + COALESCE(p.labor, 0) as labor_total, + ( + SELECT COUNT(*) + FROM project_team_members ptm2 + WHERE ptm2.project_id = p.id + AND ptm2.team_profile_id != $4 + ) as team_member_count + FROM projects p + JOIN project_team_members ptm ON ptm.project_id = p.id AND ptm.team_profile_id = $1 + WHERE p.status = 'COMPLETED' + AND p.date >= $2 + AND p.date <= $3 + AND NOT EXISTS ( + SELECT 1 + FROM report_projects rp + JOIN reports r ON r.id = rp.report_id + WHERE rp.project_id = p.id + AND r.team_profile_id = $1 + ) + ORDER BY p.date ASC + "#, + ) + .bind(team_profile_id) + .bind(date_from) + .bind(date_to) + .bind(dispatch_id) + .fetch_all(&*pool) + .await?; + + let items = rows + .into_iter() + .filter(|r| r.team_member_count > 0) + .map(|r| { + let labor_share = if r.team_member_count > 0 { + r.labor_total / Decimal::from(r.team_member_count) + } else { + Decimal::ZERO + }; + + EligibleProjectType { + project_id: r.project_id, + date: r.date, + labor_share, + labor_total: r.labor_total, + team_member_count: r.team_member_count as i32, + } + }) + .collect(); + + Ok(items) + } +} + +/// Helper struct for eligible service query +#[derive(sqlx::FromRow)] +struct EligibleServiceRow { + service_id: Uuid, + date: NaiveDate, + labor_total: Decimal, + team_member_count: i64, +} + +/// Helper struct for eligible project query +#[derive(sqlx::FromRow)] +struct EligibleProjectRow { + project_id: Uuid, + date: NaiveDate, + labor_total: Decimal, + team_member_count: i64, +} diff --git a/src/graphql/queries/service.rs b/src/graphql/queries/service.rs new file mode 100644 index 0000000..16ea3a5 --- /dev/null +++ b/src/graphql/queries/service.rs @@ -0,0 +1,302 @@ +use async_graphql::{Context, Object, Result}; +use chrono::NaiveDate; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + PaginationInput, ServiceConnection, ServiceFilterInput, ServiceType, ServicesForAssignmentResult, +}; +use crate::models::{Service, WorkStatus}; + +#[derive(Default)] +pub struct ServiceQuery; + +#[Object] +impl ServiceQuery { + /// Get paginated services with optional filtering + async fn services( + &self, + ctx: &Context<'_>, + filter: Option, + pagination: Option, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let filter = filter.unwrap_or_default(); + let pagination = pagination.unwrap_or_default(); + let offset = pagination.offset(); + let limit = pagination.limit(); + + // Convert filter status to model enum + let status: Option = filter.status.map(WorkStatus::from); + + // Merge customer_id into customer_ids for backward compatibility + let customer_ids: Option> = match (&filter.customer_ids, &filter.customer_id) { + (Some(ids), _) if !ids.is_empty() => Some(ids.clone()), + (_, Some(id)) => Some(vec![*id]), + _ => None, + }; + + // Get total count first + let total_count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM services + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::work_status IS NULL OR status = $3) + AND ($4::uuid IS NULL OR account_id = $4) + AND ($5::uuid IS NULL OR account_address_id = $5) + AND ($6::uuid IS NULL OR EXISTS ( + SELECT 1 FROM service_team_members stm + WHERE stm.service_id = services.id AND stm.team_profile_id = $6 + )) + AND ($7::uuid[] IS NULL OR EXISTS ( + SELECT 1 FROM accounts a + WHERE a.id = services.account_id AND a.customer_id = ANY($7) + )) + "#, + ) + .bind(filter.date_from) + .bind(filter.date_to) + .bind(status) + .bind(filter.account_id) + .bind(filter.account_address_id) + .bind(filter.team_profile_id) + .bind(&customer_ids) + .fetch_one(&*pool) + .await?; + + // Get paginated services + let services: Vec = sqlx::query_as::<_, Service>( + r#" + SELECT id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + FROM services + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::work_status IS NULL OR status = $3) + AND ($4::uuid IS NULL OR account_id = $4) + AND ($5::uuid IS NULL OR account_address_id = $5) + AND ($6::uuid IS NULL OR EXISTS ( + SELECT 1 FROM service_team_members stm + WHERE stm.service_id = services.id AND stm.team_profile_id = $6 + )) + AND ($7::uuid[] IS NULL OR EXISTS ( + SELECT 1 FROM accounts a + WHERE a.id = services.account_id AND a.customer_id = ANY($7) + )) + ORDER BY date ASC, id ASC + LIMIT $8 OFFSET $9 + "#, + ) + .bind(filter.date_from) + .bind(filter.date_to) + .bind(status) + .bind(filter.account_id) + .bind(filter.account_address_id) + .bind(filter.team_profile_id) + .bind(&customer_ids) + .bind(limit) + .bind(offset) + .fetch_all(&*pool) + .await?; + + let items: Vec = services.into_iter().map(ServiceType::from).collect(); + let has_next_page = (offset + limit) < total_count; + + Ok(ServiceConnection { + items, + total_count: total_count as i32, + has_next_page, + }) + } + + /// Get a single service by ID + async fn service(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let service = sqlx::query_as::<_, Service>( + r#" + SELECT id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + FROM services + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(service.map(ServiceType::from)) + } + + /// Get status counts for services in a date range (for status tabs) + async fn service_status_counts( + &self, + ctx: &Context<'_>, + date_from: Option, + date_to: Option, + team_profile_id: Option, + customer_ids: Option>, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let counts = sqlx::query_as::<_, StatusCountRow>( + r#" + SELECT + COUNT(*) FILTER (WHERE status = 'SCHEDULED') as scheduled, + COUNT(*) FILTER (WHERE status = 'IN_PROGRESS') as in_progress, + COUNT(*) FILTER (WHERE status = 'COMPLETED') as completed, + COUNT(*) FILTER (WHERE status = 'CANCELLED') as cancelled + FROM services + WHERE ($1::date IS NULL OR date >= $1) + AND ($2::date IS NULL OR date <= $2) + AND ($3::uuid IS NULL OR EXISTS ( + SELECT 1 FROM service_team_members stm + WHERE stm.service_id = services.id AND stm.team_profile_id = $3 + )) + AND ($4::uuid[] IS NULL OR EXISTS ( + SELECT 1 FROM accounts a + WHERE a.id = services.account_id AND a.customer_id = ANY($4) + )) + "#, + ) + .bind(date_from) + .bind(date_to) + .bind(team_profile_id) + .bind(&customer_ids) + .fetch_one(&*pool) + .await?; + + Ok(ServiceStatusCounts { + scheduled: counts.scheduled.unwrap_or(0) as i32, + in_progress: counts.in_progress.unwrap_or(0) as i32, + completed: counts.completed.unwrap_or(0) as i32, + cancelled: counts.cancelled.unwrap_or(0) as i32, + }) + } + + /// Get services grouped by assignment status for bulk assignment page + /// + /// Groups services in a date range into three categories: + /// - Unassigned: No team members + /// - Ready to assign: Has dispatch (admin) but no other team members + /// - Assigned: Has dispatch and at least one other team member + async fn services_for_assignment( + &self, + ctx: &Context<'_>, + date_from: NaiveDate, + date_to: NaiveDate, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get all services in the date range with their team member assignments + // We need to know: + // 1. Which services have no team members + // 2. Which have only admin(s) + // 3. Which have admin(s) + non-admins + let rows: Vec = sqlx::query_as::<_, ServiceAssignmentRow>( + r#" + SELECT + s.id, s.created_at, s.updated_at, s.account_id, s.account_address_id, + s.date, s.status, s.notes, s.calendar_event_id, + COALESCE( + (SELECT COUNT(*) FROM service_team_members stm + JOIN team_profiles tp ON tp.id = stm.team_profile_id + WHERE stm.service_id = s.id AND tp.role = 'ADMIN'), 0 + ) as admin_count, + COALESCE( + (SELECT COUNT(*) FROM service_team_members stm + JOIN team_profiles tp ON tp.id = stm.team_profile_id + WHERE stm.service_id = s.id AND tp.role != 'ADMIN'), 0 + ) as non_admin_count + FROM services s + WHERE s.date >= $1 AND s.date <= $2 + AND s.status IN ('SCHEDULED', 'IN_PROGRESS') + ORDER BY s.date ASC, s.id ASC + "#, + ) + .bind(date_from) + .bind(date_to) + .fetch_all(&*pool) + .await?; + + let mut unassigned = Vec::new(); + let mut ready_to_assign = Vec::new(); + let mut assigned = Vec::new(); + + for row in rows { + let service = Service { + base: crate::models::BaseFields { + id: row.id, + created_at: row.created_at, + updated_at: row.updated_at, + }, + account_id: row.account_id, + account_address_id: row.account_address_id, + date: row.date, + status: row.status, + notes: row.notes, + calendar_event_id: row.calendar_event_id, + }; + let service_type = ServiceType::from(service); + + if row.admin_count == 0 && row.non_admin_count == 0 { + // No team members at all + unassigned.push(service_type); + } else if row.admin_count > 0 && row.non_admin_count == 0 { + // Has admin(s) but no non-admins + ready_to_assign.push(service_type); + } else if row.admin_count > 0 && row.non_admin_count > 0 { + // Has both admin and non-admin + assigned.push(service_type); + } else { + // Edge case: has non-admins but no admin - treat as unassigned + unassigned.push(service_type); + } + } + + Ok(ServicesForAssignmentResult { + unassigned, + ready_to_assign, + assigned, + }) + } +} + +/// Status counts for the status tabs +#[derive(async_graphql::SimpleObject)] +pub struct ServiceStatusCounts { + pub scheduled: i32, + pub in_progress: i32, + pub completed: i32, + pub cancelled: i32, +} + +/// Helper struct for query results +#[derive(sqlx::FromRow)] +struct StatusCountRow { + scheduled: Option, + in_progress: Option, + completed: Option, + cancelled: Option, +} + +/// Helper struct for services_for_assignment query +#[derive(sqlx::FromRow)] +struct ServiceAssignmentRow { + id: Uuid, + created_at: chrono::DateTime, + updated_at: chrono::DateTime, + account_id: Uuid, + account_address_id: Uuid, + date: NaiveDate, + status: WorkStatus, + notes: Option, + calendar_event_id: Option, + admin_count: i64, + non_admin_count: i64, +} diff --git a/src/graphql/queries/service_scope_template.rs b/src/graphql/queries/service_scope_template.rs new file mode 100644 index 0000000..05d1e0f --- /dev/null +++ b/src/graphql/queries/service_scope_template.rs @@ -0,0 +1,59 @@ +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::ServiceScopeTemplateType; +use crate::models::ServiceScopeTemplate; + +#[derive(Default)] +pub struct ServiceScopeTemplateQuery; + +#[Object] +impl ServiceScopeTemplateQuery { + /// Get all service scope templates + async fn service_scope_templates( + &self, + ctx: &Context<'_>, + #[graphql(desc = "Filter by active status")] is_active: Option, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let templates: Vec = sqlx::query_as::<_, ServiceScopeTemplate>( + r#" + SELECT id, created_at, updated_at, name, description, is_active + FROM service_scope_templates + WHERE ($1::boolean IS NULL OR is_active = $1) + ORDER BY name ASC + "#, + ) + .bind(is_active) + .fetch_all(&*pool) + .await?; + + Ok(templates + .into_iter() + .map(ServiceScopeTemplateType::from) + .collect()) + } + + /// Get a single service scope template by ID + async fn service_scope_template(&self, ctx: &Context<'_>, id: Uuid) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let template = sqlx::query_as::<_, ServiceScopeTemplate>( + r#" + SELECT id, created_at, updated_at, name, description, is_active + FROM service_scope_templates + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_one(&*pool) + .await + .ok(); + + Ok(template.map(ServiceScopeTemplateType::from)) + } +} diff --git a/src/graphql/queries/session.rs b/src/graphql/queries/session.rs new file mode 100644 index 0000000..560bf87 --- /dev/null +++ b/src/graphql/queries/session.rs @@ -0,0 +1,222 @@ +//! Session GraphQL queries +//! +//! Queries for service and project sessions. + +use async_graphql::{Context, Object, Result}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::types::{ + ProjectSessionType, ServiceSessionType, +}; +use crate::models::{ProjectSession, ServiceSession}; + +#[derive(Default)] +pub struct SessionQuery; + +#[Object] +impl SessionQuery { + // ==================== SERVICE SESSION QUERIES ==================== + + /// Get a single service session by ID + async fn service_session( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ServiceSession>( + r#" + SELECT id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM service_sessions + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ServiceSessionType::from)) + } + + /// Get all sessions for a service (ordered by date descending) + async fn service_sessions( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let sessions: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM service_sessions + WHERE service_id = $1 + ORDER BY date DESC, start DESC + "#, + ) + .bind(service_id) + .fetch_all(&*pool) + .await?; + + Ok(sessions.into_iter().map(ServiceSessionType::from).collect()) + } + + /// Get the active (unclosed) session for a service, if one exists + async fn active_service_session( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ServiceSession>( + r#" + SELECT id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM service_sessions + WHERE service_id = $1 AND "end" IS NULL + LIMIT 1 + "#, + ) + .bind(service_id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ServiceSessionType::from)) + } + + /// Get the most recent session for a service (active or closed) + async fn latest_service_session( + &self, + ctx: &Context<'_>, + service_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ServiceSession>( + r#" + SELECT id, created_at, updated_at, service_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM service_sessions + WHERE service_id = $1 + ORDER BY date DESC, start DESC + LIMIT 1 + "#, + ) + .bind(service_id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ServiceSessionType::from)) + } + + // ==================== PROJECT SESSION QUERIES ==================== + + /// Get a single project session by ID + async fn project_session( + &self, + ctx: &Context<'_>, + id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ProjectSession>( + r#" + SELECT id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM project_sessions + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ProjectSessionType::from)) + } + + /// Get all sessions for a project (ordered by date descending) + async fn project_sessions( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let sessions: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM project_sessions + WHERE project_id = $1 + ORDER BY date DESC, start DESC + "#, + ) + .bind(project_id) + .fetch_all(&*pool) + .await?; + + Ok(sessions.into_iter().map(ProjectSessionType::from).collect()) + } + + /// Get the active (unclosed) session for a project, if one exists + async fn active_project_session( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ProjectSession>( + r#" + SELECT id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM project_sessions + WHERE project_id = $1 AND "end" IS NULL + LIMIT 1 + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ProjectSessionType::from)) + } + + /// Get the most recent session for a project (active or closed) + async fn latest_project_session( + &self, + ctx: &Context<'_>, + project_id: Uuid, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let session = sqlx::query_as::<_, ProjectSession>( + r#" + SELECT id, created_at, updated_at, project_id, account_id, account_address_id, + customer_id, scope_id, start, "end", created_by_id, closed_by_id, date + FROM project_sessions + WHERE project_id = $1 + ORDER BY date DESC, start DESC + LIMIT 1 + "#, + ) + .bind(project_id) + .fetch_optional(&*pool) + .await?; + + Ok(session.map(ProjectSessionType::from)) + } +} diff --git a/src/graphql/queries/wave.rs b/src/graphql/queries/wave.rs new file mode 100644 index 0000000..a5db16b --- /dev/null +++ b/src/graphql/queries/wave.rs @@ -0,0 +1,212 @@ +//! Wave Accounting GraphQL queries +//! +//! Queries for fetching Wave products, customers, and invoices. + +use async_graphql::{Context, Object, Result}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::graphql::types::{ + WaveCustomerType, WaveInvoiceConnectionType, WaveInvoiceListItemType, WaveInvoiceReadiness, + WaveInvoiceType, WavePageInfoType, WaveProductType, +}; +use crate::services::WaveService; + +#[derive(Default)] +pub struct WaveQuery; + +#[Object] +impl WaveQuery { + /// List all products from Wave + async fn wave_products(&self, ctx: &Context<'_>) -> Result> { + let wave = ctx.data::>()?; + let products = wave.list_products().await?; + Ok(products.into_iter().map(WaveProductType::from).collect()) + } + + /// List all customers from Wave + async fn wave_customers(&self, ctx: &Context<'_>) -> Result> { + let wave = ctx.data::>()?; + let customers = wave.list_customers().await?; + Ok(customers.into_iter().map(WaveCustomerType::from).collect()) + } + + /// Get a single invoice from Wave by ID + async fn wave_invoice(&self, ctx: &Context<'_>, invoice_id: String) -> Result { + let wave = ctx.data::>()?; + let invoice = wave.get_invoice(&invoice_id).await?; + Ok(WaveInvoiceType::from(invoice)) + } + + /// List invoices from Wave with pagination + async fn wave_invoices( + &self, + ctx: &Context<'_>, + #[graphql(default = 1)] page: i32, + #[graphql(default = 20)] page_size: i32, + ) -> Result { + let wave = ctx.data::>()?; + let connection = wave.list_invoices(page, page_size).await?; + Ok(WaveInvoiceConnectionType { + page_info: WavePageInfoType { + current_page: connection.page_info.current_page, + total_pages: connection.page_info.total_pages, + total_count: connection.page_info.total_count, + }, + invoices: connection + .invoices + .into_iter() + .map(WaveInvoiceListItemType::from) + .collect(), + }) + } + + /// Get a single customer from Wave by ID + async fn wave_customer( + &self, + ctx: &Context<'_>, + customer_id: String, + ) -> Result { + let wave = ctx.data::>()?; + let customer = wave.get_customer(&customer_id).await?; + Ok(WaveCustomerType::from(customer)) + } + + /// Get a single product from Wave by ID + async fn wave_product(&self, ctx: &Context<'_>, product_id: String) -> Result { + let wave = ctx.data::>()?; + let product = wave.get_product(&product_id).await?; + Ok(WaveProductType::from(product)) + } + + /// Check if a Nexus invoice is ready to be synced to Wave + async fn wave_invoice_readiness( + &self, + ctx: &Context<'_>, + invoice_id: Uuid, + ) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get the invoice + let invoice = sqlx::query_as::<_, crate::models::Invoice>( + r#" + SELECT id, created_at, updated_at, customer_id, start_date, end_date, + status, date_paid, payment_type, wave_invoice_id + FROM invoices + WHERE id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(&*pool) + .await? + .ok_or_else(|| async_graphql::Error::new("Invoice not found"))?; + + let mut issues = Vec::new(); + + // Check if already synced + if invoice.wave_invoice_id.is_some() { + issues.push("Invoice is already synced to Wave".to_string()); + } + + // Get customer's wave_customer_id + let customer = sqlx::query_as::<_, crate::models::Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(invoice.customer_id) + .fetch_one(&*pool) + .await?; + + // Empty string '' means unlinked (backend uses '' to clear fields, not NULL) + let wave_customer_id = customer.wave_customer_id.clone(); + let has_wave_customer = wave_customer_id.as_ref().map(|id| !id.is_empty()).unwrap_or(false); + if !has_wave_customer { + issues.push("Customer is not linked to Wave".to_string()); + } + + // Count revenues with and without wave_service_id + // Empty string '' means unlinked (backend uses '' to clear fields, not NULL) + let revenue_stats: (i64, i64) = sqlx::query_as( + r#" + SELECT + COUNT(*) FILTER (WHERE r.wave_service_id != '') as linked, + COUNT(*) FILTER (WHERE r.wave_service_id = '') as unlinked + FROM invoice_revenues ir + JOIN revenues r ON r.id = ir.revenue_id + WHERE ir.invoice_id = $1 + "#, + ) + .bind(invoice_id) + .fetch_one(&*pool) + .await?; + + // Count projects with and without wave_service_id + // Empty string '' means unlinked (backend uses '' to clear fields, not NULL) + let project_stats: (i64, i64) = sqlx::query_as( + r#" + SELECT + COUNT(*) FILTER (WHERE p.wave_service_id != '') as linked, + COUNT(*) FILTER (WHERE p.wave_service_id = '') as unlinked + FROM invoice_projects ip + JOIN projects p ON p.id = ip.project_id + WHERE ip.invoice_id = $1 + "#, + ) + .bind(invoice_id) + .fetch_one(&*pool) + .await?; + + let ready_count = (revenue_stats.0 + project_stats.0) as i32; + let unlinked_count = (revenue_stats.1 + project_stats.1) as i32; + + // Report unlinked revenues separately + if revenue_stats.1 > 0 { + issues.push(format!( + "{} revenue(s) not linked to a Wave product", + revenue_stats.1 + )); + } + + // Report unlinked projects separately + if project_stats.1 > 0 { + issues.push(format!( + "{} project(s) not linked to a Wave product", + project_stats.1 + )); + } + + if ready_count == 0 && unlinked_count == 0 { + issues.push("Invoice has no items to sync".to_string()); + } + + // Get total amount + let total: rust_decimal::Decimal = sqlx::query_scalar( + r#" + SELECT COALESCE( + (SELECT SUM(amount) FROM invoice_revenues WHERE invoice_id = $1), + 0 + ) + COALESCE( + (SELECT SUM(amount) FROM invoice_projects WHERE invoice_id = $1), + 0 + ) + "#, + ) + .bind(invoice_id) + .fetch_one(&*pool) + .await?; + + Ok(WaveInvoiceReadiness { + ready: issues.is_empty(), + issues, + wave_customer_id, + ready_item_count: ready_count, + missing_wave_link_count: unlinked_count, + total_amount: total, + }) + } +} diff --git a/src/graphql/schema.rs b/src/graphql/schema.rs new file mode 100644 index 0000000..80948f4 --- /dev/null +++ b/src/graphql/schema.rs @@ -0,0 +1,53 @@ +use async_graphql::{EmptySubscription, Schema}; +use std::sync::Arc; + +use super::mutations::MutationRoot; +use super::queries::QueryRoot; +use crate::db::Database; +use crate::services::email_templates::EmailTemplateRegistry; +use crate::services::gmail::GmailService; +use crate::services::google_calendar::GoogleCalendarService; +use crate::services::job_queue::JobQueue; +use crate::services::s3::S3Service; +use crate::services::wave::WaveService; + +pub type NexusSchema = Schema; + +/// Options for building the schema with external services +pub struct SchemaOptions { + pub db: Database, + pub calendar_service: Option>, + pub gmail_service: Option>, + pub wave_service: Option>, + pub s3_service: Option>, + pub job_queue: Option, +} + +pub fn build_schema(options: SchemaOptions) -> NexusSchema { + let mut builder = Schema::build(QueryRoot::default(), MutationRoot::default(), EmptySubscription) + .data(options.db) + .data(EmailTemplateRegistry::new()); + + // Add optional services if they're configured + if let Some(calendar) = options.calendar_service { + builder = builder.data(calendar); + } + + if let Some(gmail) = options.gmail_service { + builder = builder.data(gmail); + } + + if let Some(wave) = options.wave_service { + builder = builder.data(wave); + } + + if let Some(s3) = options.s3_service { + builder = builder.data(s3); + } + + if let Some(job_queue) = options.job_queue { + builder = builder.data(job_queue); + } + + builder.finish() +} diff --git a/src/graphql/types/account.rs b/src/graphql/types/account.rs new file mode 100644 index 0000000..5c84640 --- /dev/null +++ b/src/graphql/types/account.rs @@ -0,0 +1,682 @@ +use std::sync::Arc; + +use async_graphql::{ComplexObject, Context, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use uuid::Uuid; + +use rust_decimal::Decimal; + +use crate::db::Database; +use crate::models::{ + Account, AccountAddress, AccountContact, Customer, Labor, Revenue, Schedule, ServiceScope, + ServiceScopeArea, ServiceScopeTask, TaskFrequency, +}; +use crate::services::WaveService; + +use super::{CustomerType, EntityStatusType}; + +/// Account GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct AccountType { + pub id: Uuid, + pub customer_id: Uuid, + pub name: String, + pub status: EntityStatusType, + pub start_date: Option, + pub end_date: Option, + /// Computed: whether account is currently active based on status and date range + pub is_active: bool, +} + +#[ComplexObject] +impl AccountType { + /// Get all contacts for this account + async fn contacts(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let contacts: Vec = sqlx::query_as::<_, AccountContact>( + r#" + SELECT id, created_at, updated_at, account_id, first_name, last_name, phone, + email, is_active, is_primary, notes + FROM account_contacts + WHERE account_id = $1 AND is_active = true + ORDER BY is_primary DESC, last_name ASC, first_name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(contacts.into_iter().map(AccountContactType::from).collect()) + } + + /// Get all addresses for this account + async fn addresses(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let addresses: Vec = sqlx::query_as::<_, AccountAddress>( + r#" + SELECT id, created_at, updated_at, account_id, street_address, city, state, zip_code, + is_active, is_primary, name, notes + FROM account_addresses + WHERE account_id = $1 AND is_active = true + ORDER BY is_primary DESC, name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(addresses.into_iter().map(AccountAddressType::from).collect()) + } + + /// Get the parent customer for this account + async fn customer(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(self.customer_id) + .fetch_optional(&*pool) + .await?; + + Ok(customer.map(CustomerType::from)) + } + + /// Get all revenues for this account (active and historical) + async fn revenues(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let revenues: Vec = sqlx::query_as::<_, Revenue>( + r#" + SELECT id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id + FROM revenues + WHERE account_id = $1 + ORDER BY start_date DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(revenues.into_iter().map(RevenueType::from).collect()) + } +} + +impl From for AccountType { + fn from(account: Account) -> Self { + let is_active = account.is_active(); + Self { + id: account.base.id, + customer_id: account.customer_id, + name: account.name, + status: account.status.into(), + start_date: account.start_date, + end_date: account.end_date, + is_active, + } + } +} + +/// Filter input for account queries +#[derive(InputObject, Default)] +pub struct AccountFilter { + /// Filter by name (partial match, case-insensitive) + pub name: Option, + /// Filter by status + pub status: Option, + /// Filter by active state (computed from status + date range) + pub is_active: Option, +} + +/// Account contact GraphQL type +#[derive(SimpleObject)] +pub struct AccountContactType { + pub id: Uuid, + pub first_name: String, + pub last_name: String, + pub email: Option, + pub phone: Option, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +impl From for AccountContactType { + fn from(contact: AccountContact) -> Self { + Self { + id: contact.base.id, + first_name: contact.contact.first_name, + last_name: contact.contact.last_name, + email: contact.email, + phone: contact.contact.phone, + is_active: contact.is_active, + is_primary: contact.is_primary, + notes: contact.notes, + } + } +} + +/// Account address GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct AccountAddressType { + pub id: Uuid, + pub account_id: Uuid, + pub name: Option, + pub street_address: String, + pub city: String, + pub state: String, + pub zip_code: String, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +#[ComplexObject] +impl AccountAddressType { + /// Get labor rates for this address + async fn labor(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let labor: Vec = sqlx::query_as::<_, Labor>( + r#" + SELECT id, created_at, updated_at, account_address_id, amount, start_date, end_date + FROM labor + WHERE account_address_id = $1 + ORDER BY start_date DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(labor.into_iter().map(LaborType::from).collect()) + } + + /// Get schedules for this address + async fn schedules(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let schedules: Vec = sqlx::query_as::<_, Schedule>( + r#" + SELECT id, created_at, updated_at, account_address_id, name, + monday_service as monday, tuesday_service as tuesday, wednesday_service as wednesday, + thursday_service as thursday, friday_service as friday, saturday_service as saturday, + sunday_service as sunday, weekend_service, schedule_exception, start_date, end_date + FROM schedules + WHERE account_address_id = $1 + ORDER BY start_date DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(schedules.into_iter().map(ScheduleType::from).collect()) + } + + /// Get service scopes for this address + async fn scopes(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let scopes: Vec = sqlx::query_as::<_, ServiceScope>( + r#" + SELECT id, created_at, updated_at, name, account_id, account_address_id, description, is_active + FROM service_scopes + WHERE account_address_id = $1 + ORDER BY is_active DESC, name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(scopes.into_iter().map(ServiceScopeType::from).collect()) + } + + /// Get the active service scope for this address + async fn active_scope(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let scope: Option = sqlx::query_as::<_, ServiceScope>( + r#" + SELECT id, created_at, updated_at, name, account_id, account_address_id, description, is_active + FROM service_scopes + WHERE account_address_id = $1 AND is_active = true + LIMIT 1 + "#, + ) + .bind(self.id) + .fetch_optional(&*pool) + .await?; + + Ok(scope.map(ServiceScopeType::from)) + } + + /// Get the account this address belongs to + async fn account(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let account: Account = sqlx::query_as::<_, Account>( + r#" + SELECT a.id, a.created_at, a.updated_at, a.customer_id, a.name, a.status, a.start_date, a.end_date + FROM accounts a + WHERE a.id = $1 + "#, + ) + .bind(self.account_id) + .fetch_one(&*pool) + .await?; + + Ok(AccountType::from(account)) + } +} + +impl From for AccountAddressType { + fn from(address: AccountAddress) -> Self { + Self { + id: address.base.id, + account_id: address.account_id, + name: address.name, + street_address: address.address.street_address, + city: address.address.city, + state: address.address.state, + zip_code: address.address.zip_code, + is_active: address.is_active, + is_primary: address.is_primary, + notes: address.notes, + } + } +} + +/// Revenue GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct RevenueType { + pub id: Uuid, + pub amount: Decimal, + pub start_date: NaiveDate, + pub end_date: Option, + pub wave_service_id: Option, + /// Computed: whether revenue is currently active (no end_date or end_date >= today) + pub is_active: bool, +} + +#[ComplexObject] +impl RevenueType { + /// Get the linked Wave product name (if wave_service_id is set) + async fn wave_product_name(&self, ctx: &Context<'_>) -> Result> { + let Some(wave_service_id) = &self.wave_service_id else { + return Ok(None); + }; + + let Some(wave) = ctx.data_opt::>() else { + return Ok(None); + }; + + match wave.get_product(wave_service_id).await { + Ok(product) => Ok(Some(product.name)), + Err(_) => Ok(None), + } + } +} + +impl From for RevenueType { + fn from(revenue: Revenue) -> Self { + let today = chrono::Utc::now().date_naive(); + let is_active = revenue.is_active_on(today); + Self { + id: revenue.base.id, + amount: revenue.amount, + start_date: revenue.start_date, + end_date: revenue.end_date, + wave_service_id: revenue.wave_service_id, + is_active, + } + } +} + +/// Labor rate GraphQL type +#[derive(SimpleObject)] +pub struct LaborType { + pub id: Uuid, + pub amount: Decimal, + pub start_date: NaiveDate, + pub end_date: Option, + /// Computed: whether labor rate is currently active + pub is_active: bool, +} + +impl From for LaborType { + fn from(labor: Labor) -> Self { + let today = chrono::Utc::now().date_naive(); + let is_active = labor.is_active_on(today); + Self { + id: labor.base.id, + amount: labor.amount, + start_date: labor.start_date, + end_date: labor.end_date, + is_active, + } + } +} + +/// Schedule GraphQL type +#[derive(SimpleObject)] +pub struct ScheduleType { + pub id: Uuid, + pub name: Option, + pub monday: bool, + pub tuesday: bool, + pub wednesday: bool, + pub thursday: bool, + pub friday: bool, + pub saturday: bool, + pub sunday: bool, + pub weekend_service: bool, + pub schedule_exception: Option, + pub start_date: Option, + pub end_date: Option, + /// Computed: whether schedule is currently active + pub is_active: bool, +} + +impl From for ScheduleType { + fn from(schedule: Schedule) -> Self { + let today = chrono::Utc::now().date_naive(); + let is_active = match (schedule.start_date, schedule.end_date) { + (Some(start), Some(end)) => today >= start && today <= end, + (Some(start), None) => today >= start, + (None, Some(end)) => today <= end, + (None, None) => true, + }; + Self { + id: schedule.base.id, + name: schedule.name, + monday: schedule.monday, + tuesday: schedule.tuesday, + wednesday: schedule.wednesday, + thursday: schedule.thursday, + friday: schedule.friday, + saturday: schedule.saturday, + sunday: schedule.sunday, + weekend_service: schedule.weekend_service, + schedule_exception: schedule.schedule_exception, + start_date: schedule.start_date, + end_date: schedule.end_date, + is_active, + } + } +} + +/// Task frequency enum for GraphQL +#[derive(async_graphql::Enum, Copy, Clone, Eq, PartialEq)] +pub enum TaskFrequencyType { + Daily, + Weekly, + Monthly, + Quarterly, + Triannual, + Annual, + AsNeeded, +} + +impl From for TaskFrequencyType { + fn from(freq: TaskFrequency) -> Self { + match freq { + TaskFrequency::Daily => TaskFrequencyType::Daily, + TaskFrequency::Weekly => TaskFrequencyType::Weekly, + TaskFrequency::Monthly => TaskFrequencyType::Monthly, + TaskFrequency::Quarterly => TaskFrequencyType::Quarterly, + TaskFrequency::Triannual => TaskFrequencyType::Triannual, + TaskFrequency::Annual => TaskFrequencyType::Annual, + TaskFrequency::AsNeeded => TaskFrequencyType::AsNeeded, + } + } +} + +/// Service scope GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceScopeType { + pub id: Uuid, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +#[ComplexObject] +impl ServiceScopeType { + /// Get areas for this scope + async fn areas(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let areas: Vec = sqlx::query_as::<_, ServiceScopeArea>( + r#" + SELECT id, created_at, updated_at, scope_id, name, "order" + FROM service_scope_areas + WHERE scope_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(areas.into_iter().map(ServiceScopeAreaType::from).collect()) + } +} + +impl From for ServiceScopeType { + fn from(scope: ServiceScope) -> Self { + Self { + id: scope.base.id, + name: scope.name, + description: scope.description, + is_active: scope.is_active, + } + } +} + +/// Service scope area GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceScopeAreaType { + pub id: Uuid, + pub name: String, + pub order: i32, +} + +#[ComplexObject] +impl ServiceScopeAreaType { + /// Get tasks for this area + async fn tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let tasks: Vec = sqlx::query_as::<_, ServiceScopeTask>( + r#" + SELECT id, created_at, updated_at, area_id, scope_description, checklist_description, + session_description, frequency, "order", estimated_minutes + FROM service_scope_tasks + WHERE area_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(tasks.into_iter().map(ServiceScopeTaskType::from).collect()) + } +} + +impl From for ServiceScopeAreaType { + fn from(area: ServiceScopeArea) -> Self { + Self { + id: area.base.id, + name: area.name, + order: area.order, + } + } +} + +/// Service scope task GraphQL type +#[derive(SimpleObject)] +pub struct ServiceScopeTaskType { + pub id: Uuid, + pub scope_description: String, + pub checklist_description: String, + pub session_description: String, + pub frequency: TaskFrequencyType, + pub order: i32, + pub estimated_minutes: Option, +} + +impl From for ServiceScopeTaskType { + fn from(task: ServiceScopeTask) -> Self { + Self { + id: task.base.id, + scope_description: task.scope_description, + checklist_description: task.checklist_description, + session_description: task.session_description, + frequency: task.frequency.into(), + order: task.order, + estimated_minutes: task.estimated_minutes, + } + } +} + +// ==================== MUTATION INPUT TYPES ==================== + +/// Input for creating a labor rate +#[derive(InputObject)] +pub struct CreateLaborInput { + pub amount: Decimal, + pub start_date: NaiveDate, +} + +/// Input for updating a labor rate +#[derive(InputObject)] +pub struct UpdateLaborInput { + pub amount: Option, + pub start_date: Option, + pub end_date: Option, +} + +/// Input for creating a schedule +#[derive(InputObject)] +pub struct CreateScheduleInput { + pub name: Option, + pub monday: Option, + pub tuesday: Option, + pub wednesday: Option, + pub thursday: Option, + pub friday: Option, + pub saturday: Option, + pub sunday: Option, + pub weekend_service: Option, + pub schedule_exception: Option, + pub start_date: Option, +} + +/// Input for updating a schedule +#[derive(InputObject)] +pub struct UpdateScheduleInput { + pub name: Option, + pub monday: Option, + pub tuesday: Option, + pub wednesday: Option, + pub thursday: Option, + pub friday: Option, + pub saturday: Option, + pub sunday: Option, + pub weekend_service: Option, + pub schedule_exception: Option, + pub start_date: Option, + pub end_date: Option, +} + +/// Input for creating a service scope +#[derive(InputObject)] +pub struct CreateServiceScopeInput { + pub name: String, + pub description: Option, + pub areas: Option>, +} + +/// Input for updating a service scope +#[derive(InputObject)] +pub struct UpdateServiceScopeInput { + pub name: Option, + pub description: Option, + pub is_active: Option, +} + +/// Input for creating a scope area +#[derive(InputObject)] +pub struct CreateScopeAreaInput { + pub name: String, + pub order: Option, + pub tasks: Option>, +} + +/// Input for updating a scope area +#[derive(InputObject)] +pub struct UpdateScopeAreaInput { + pub name: Option, + pub order: Option, +} + +/// Input for creating a scope task +#[derive(InputObject)] +pub struct CreateScopeTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub frequency: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for updating a scope task +#[derive(InputObject)] +pub struct UpdateScopeTaskInput { + pub scope_description: Option, + pub checklist_description: Option, + pub session_description: Option, + pub frequency: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for updating an account address (location) +#[derive(InputObject)] +pub struct UpdateAccountAddressInput { + pub name: Option, + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, + pub notes: Option, + pub is_primary: Option, + pub is_active: Option, +} diff --git a/src/graphql/types/calendar.rs b/src/graphql/types/calendar.rs new file mode 100644 index 0000000..a68f987 --- /dev/null +++ b/src/graphql/types/calendar.rs @@ -0,0 +1,266 @@ +use async_graphql::{InputObject, SimpleObject}; +use chrono::{DateTime, Utc}; + +use crate::services::google_calendar::{ + Attendee, CalendarEvent, CreateEventRequest, EventDateTime, EventReminder, EventReminders, + UpdateEventRequest, +}; + +/// Date/time for a calendar event +#[derive(SimpleObject)] +pub struct EventDateTimeType { + /// For timed events (RFC3339 format) + pub date_time: Option>, + /// For all-day events (YYYY-MM-DD format) + pub date: Option, + /// Timezone (e.g., "America/New_York") + pub time_zone: Option, +} + +impl From for EventDateTimeType { + fn from(dt: EventDateTime) -> Self { + Self { + date_time: dt.date_time, + date: dt.date, + time_zone: dt.time_zone, + } + } +} + +/// Input for event date/time +#[derive(InputObject)] +pub struct EventDateTimeInput { + /// For timed events (RFC3339 format) + pub date_time: Option>, + /// For all-day events (YYYY-MM-DD format) + pub date: Option, + /// Timezone (e.g., "America/New_York") + pub time_zone: Option, +} + +impl From for EventDateTime { + fn from(input: EventDateTimeInput) -> Self { + Self { + date_time: input.date_time, + date: input.date, + time_zone: input.time_zone, + } + } +} + +/// Calendar event attendee +#[derive(SimpleObject)] +pub struct AttendeeType { + pub email: String, + pub display_name: Option, + pub optional: Option, + pub response_status: Option, +} + +impl From for AttendeeType { + fn from(a: Attendee) -> Self { + Self { + email: a.email, + display_name: a.display_name, + optional: a.optional, + response_status: a.response_status, + } + } +} + +/// Input for attendee +#[derive(InputObject)] +pub struct AttendeeInput { + pub email: String, + pub display_name: Option, + pub optional: Option, +} + +impl From for Attendee { + fn from(input: AttendeeInput) -> Self { + Self { + email: input.email, + display_name: input.display_name, + optional: input.optional, + response_status: None, + } + } +} + +/// Event reminder +#[derive(SimpleObject)] +pub struct EventReminderType { + /// Method: "email" or "popup" + pub method: String, + /// Minutes before the event + pub minutes: i32, +} + +impl From for EventReminderType { + fn from(r: EventReminder) -> Self { + Self { + method: r.method, + minutes: r.minutes, + } + } +} + +/// Input for event reminder +#[derive(InputObject)] +pub struct EventReminderInput { + /// Method: "email" or "popup" + pub method: String, + /// Minutes before the event + pub minutes: i32, +} + +impl From for EventReminder { + fn from(input: EventReminderInput) -> Self { + Self { + method: input.method, + minutes: input.minutes, + } + } +} + +/// Reminders settings for an event +#[derive(SimpleObject)] +pub struct EventRemindersType { + pub use_default: bool, + pub overrides: Option>, +} + +impl From for EventRemindersType { + fn from(r: EventReminders) -> Self { + Self { + use_default: r.use_default, + overrides: r.overrides.map(|v| v.into_iter().map(EventReminderType::from).collect()), + } + } +} + +/// Input for reminders settings +#[derive(InputObject)] +pub struct EventRemindersInput { + pub use_default: bool, + pub overrides: Option>, +} + +impl From for EventReminders { + fn from(input: EventRemindersInput) -> Self { + Self { + use_default: input.use_default, + overrides: input.overrides.map(|v| v.into_iter().map(EventReminder::from).collect()), + } + } +} + +/// Calendar event +#[derive(SimpleObject)] +pub struct CalendarEventType { + pub id: String, + pub summary: String, + pub description: Option, + pub location: Option, + pub start: EventDateTimeType, + pub end: EventDateTimeType, + pub attendees: Option>, + pub reminders: Option, + pub color_id: Option, + pub html_link: Option, + pub created: Option>, + pub updated: Option>, + pub status: Option, +} + +impl From for CalendarEventType { + fn from(e: CalendarEvent) -> Self { + Self { + id: e.id, + summary: e.summary, + description: e.description, + location: e.location, + start: EventDateTimeType::from(e.start), + end: EventDateTimeType::from(e.end), + attendees: e.attendees.map(|v| v.into_iter().map(AttendeeType::from).collect()), + reminders: e.reminders.map(EventRemindersType::from), + color_id: e.color_id, + html_link: e.html_link, + created: e.created, + updated: e.updated, + status: e.status, + } + } +} + +/// Input for creating a calendar event +#[derive(InputObject)] +pub struct CreateCalendarEventInput { + /// Custom event ID (optional, must be 5-1024 chars, a-v and 0-9 only) + pub id: Option, + pub summary: String, + pub description: Option, + pub location: Option, + pub start: EventDateTimeInput, + pub end: EventDateTimeInput, + pub attendees: Option>, + pub reminders: Option, + pub color_id: Option, +} + +impl From for CreateEventRequest { + fn from(input: CreateCalendarEventInput) -> Self { + Self { + id: input.id, + summary: input.summary, + description: input.description, + location: input.location, + start: EventDateTime::from(input.start), + end: EventDateTime::from(input.end), + attendees: input.attendees.map(|v| v.into_iter().map(Attendee::from).collect()), + reminders: input.reminders.map(EventReminders::from), + color_id: input.color_id, + } + } +} + +/// Input for updating a calendar event +#[derive(InputObject)] +pub struct UpdateCalendarEventInput { + pub summary: Option, + pub description: Option, + pub location: Option, + pub start: Option, + pub end: Option, + pub attendees: Option>, + pub reminders: Option, + pub color_id: Option, +} + +impl From for UpdateEventRequest { + fn from(input: UpdateCalendarEventInput) -> Self { + Self { + summary: input.summary, + description: input.description, + location: input.location, + start: input.start.map(EventDateTime::from), + end: input.end.map(EventDateTime::from), + attendees: input.attendees.map(|v| v.into_iter().map(Attendee::from).collect()), + reminders: input.reminders.map(EventReminders::from), + color_id: input.color_id, + } + } +} + +/// Filter input for listing calendar events +#[derive(InputObject, Default)] +pub struct CalendarEventFilterInput { + /// Filter events starting from this time + pub time_min: Option>, + /// Filter events ending before this time + pub time_max: Option>, + /// Maximum number of events to return + pub max_results: Option, + /// Search query + pub q: Option, +} diff --git a/src/graphql/types/customer.rs b/src/graphql/types/customer.rs new file mode 100644 index 0000000..dad5b1e --- /dev/null +++ b/src/graphql/types/customer.rs @@ -0,0 +1,296 @@ +use std::sync::Arc; + +use async_graphql::{ComplexObject, Context, Enum, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{Account, Customer, CustomerAddress, CustomerContact, EntityStatus}; +use crate::services::WaveService; + +use super::AccountType; + +/// GraphQL enum for entity status +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum EntityStatusType { + Active, + Inactive, + Pending, +} + +impl From for EntityStatusType { + fn from(status: EntityStatus) -> Self { + match status { + EntityStatus::Active => EntityStatusType::Active, + EntityStatus::Inactive => EntityStatusType::Inactive, + EntityStatus::Pending => EntityStatusType::Pending, + } + } +} + +impl From for EntityStatus { + fn from(status: EntityStatusType) -> Self { + match status { + EntityStatusType::Active => EntityStatus::Active, + EntityStatusType::Inactive => EntityStatus::Inactive, + EntityStatusType::Pending => EntityStatus::Pending, + } + } +} + +/// Customer GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct CustomerType { + pub id: Uuid, + pub name: String, + pub status: EntityStatusType, + pub start_date: Option, + pub end_date: Option, + pub billing_terms: Option, + pub billing_email: Option, + pub wave_customer_id: Option, + /// Computed: whether customer is currently active based on status and date range + pub is_active: bool, +} + +#[ComplexObject] +impl CustomerType { + /// Get all contacts for this customer + async fn contacts(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let contacts: Vec = sqlx::query_as::<_, CustomerContact>( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, phone, + email, is_active, is_primary, notes + FROM customer_contacts + WHERE customer_id = $1 AND is_active = true + ORDER BY is_primary DESC, last_name ASC, first_name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(contacts.into_iter().map(CustomerContactType::from).collect()) + } + + /// Get all addresses for this customer (active and inactive for history) + async fn addresses(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let addresses: Vec = sqlx::query_as::<_, CustomerAddress>( + r#" + SELECT id, created_at, updated_at, customer_id, street_address, city, state, zip_code, + is_active, is_primary, name, notes + FROM customer_addresses + WHERE customer_id = $1 + ORDER BY is_active DESC, is_primary DESC, name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(addresses.into_iter().map(CustomerAddressType::from).collect()) + } + + /// Get all accounts linked to this customer + async fn accounts(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let accounts: Vec = sqlx::query_as::<_, Account>( + r#" + SELECT id, created_at, updated_at, customer_id, name, status, start_date, end_date + FROM accounts + WHERE customer_id = $1 + ORDER BY name ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(accounts.into_iter().map(AccountType::from).collect()) + } + + /// Get the linked Wave customer name (if wave_customer_id is set) + async fn wave_customer_name(&self, ctx: &Context<'_>) -> Result> { + let Some(wave_customer_id) = &self.wave_customer_id else { + return Ok(None); + }; + + let Some(wave) = ctx.data_opt::>() else { + return Ok(None); + }; + + match wave.get_customer(wave_customer_id).await { + Ok(customer) => Ok(Some(customer.name)), + Err(_) => Ok(None), + } + } +} + +impl From for CustomerType { + fn from(customer: Customer) -> Self { + let is_active = customer.is_active(); + Self { + id: customer.base.id, + name: customer.name, + status: customer.status.into(), + start_date: customer.start_date, + end_date: customer.end_date, + billing_terms: customer.billing_terms, + billing_email: customer.billing_email, + wave_customer_id: customer.wave_customer_id, + is_active, + } + } +} + +/// Filter input for customer queries +#[derive(InputObject, Default)] +pub struct CustomerFilter { + /// Filter by name (partial match, case-insensitive) + pub name: Option, + /// Filter by status + pub status: Option, + /// Filter by active state (computed from status + date range) + pub is_active: Option, +} + +// ==================== MUTATION INPUT TYPES ==================== + +/// Input for creating a customer +#[derive(InputObject)] +pub struct CreateCustomerInput { + pub name: String, + pub status: Option, + pub billing_email: Option, + pub billing_terms: Option, + pub wave_customer_id: Option, + pub start_date: Option, + pub end_date: Option, +} + +/// Input for updating a customer +#[derive(InputObject)] +pub struct UpdateCustomerInput { + pub name: Option, + pub status: Option, + pub billing_email: Option, + pub billing_terms: Option, + pub wave_customer_id: Option, + pub start_date: Option, + pub end_date: Option, +} + +/// Input for creating a customer contact +#[derive(InputObject)] +pub struct CreateCustomerContactInput { + pub first_name: String, + pub last_name: String, + pub email: Option, + pub phone: Option, + pub notes: Option, + pub is_primary: Option, +} + +/// Input for updating a customer contact +#[derive(InputObject)] +pub struct UpdateCustomerContactInput { + pub first_name: Option, + pub last_name: Option, + pub email: Option, + pub phone: Option, + pub notes: Option, + pub is_primary: Option, + pub is_active: Option, +} + +/// Input for creating a customer address +#[derive(InputObject)] +pub struct CreateCustomerAddressInput { + pub name: Option, + pub street_address: String, + pub city: String, + pub state: String, + pub zip_code: String, + pub notes: Option, + pub is_primary: Option, +} + +/// Input for updating a customer address +#[derive(InputObject)] +pub struct UpdateCustomerAddressInput { + pub name: Option, + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, + pub notes: Option, + pub is_primary: Option, + pub is_active: Option, +} + +/// Customer contact GraphQL type +#[derive(SimpleObject)] +pub struct CustomerContactType { + pub id: Uuid, + pub first_name: String, + pub last_name: String, + pub email: Option, + pub phone: Option, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +impl From for CustomerContactType { + fn from(contact: CustomerContact) -> Self { + Self { + id: contact.base.id, + first_name: contact.contact.first_name, + last_name: contact.contact.last_name, + email: contact.email, + phone: contact.contact.phone, + is_active: contact.is_active, + is_primary: contact.is_primary, + notes: contact.notes, + } + } +} + +/// Customer address GraphQL type +#[derive(SimpleObject)] +pub struct CustomerAddressType { + pub id: Uuid, + pub name: Option, + pub street_address: String, + pub city: String, + pub state: String, + pub zip_code: String, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +impl From for CustomerAddressType { + fn from(address: CustomerAddress) -> Self { + Self { + id: address.base.id, + name: address.name, + street_address: address.address.street_address, + city: address.address.city, + state: address.address.state, + zip_code: address.address.zip_code, + is_active: address.is_active, + is_primary: address.is_primary, + notes: address.notes, + } + } +} diff --git a/src/graphql/types/email.rs b/src/graphql/types/email.rs new file mode 100644 index 0000000..c9be8b4 --- /dev/null +++ b/src/graphql/types/email.rs @@ -0,0 +1,87 @@ +use async_graphql::{InputObject, SimpleObject}; + +use crate::services::email_templates::EmailTemplate; + +/// Email template type +#[derive(SimpleObject)] +pub struct EmailTemplateType { + pub id: String, + pub name: String, + pub description: String, + pub subject_template: String, + pub body_template: String, + /// Variable names used in this template + pub variables: Vec, + /// Content type (text/html or text/plain) + pub content_type: String, +} + +impl From<&EmailTemplate> for EmailTemplateType { + fn from(t: &EmailTemplate) -> Self { + Self { + id: t.id.clone(), + name: t.name.clone(), + description: t.description.clone(), + subject_template: t.subject_template.clone(), + body_template: t.body_template.clone(), + variables: t.variable_names(), + content_type: t.content_type.clone(), + } + } +} + +/// Input for sending an email +#[derive(InputObject)] +pub struct SendEmailInput { + /// Recipient email addresses + pub to: Vec, + /// CC recipients + pub cc: Option>, + /// BCC recipients + pub bcc: Option>, + /// Email subject + pub subject: String, + /// Email body + pub body: String, + /// Content type (defaults to "text/html") + pub content_type: Option, + /// Display name for the sender + pub from_name: Option, +} + +/// Template variable input +#[derive(InputObject)] +pub struct TemplateVariableInput { + pub name: String, + pub value: String, +} + +/// Input for sending a template email +#[derive(InputObject)] +pub struct SendTemplateEmailInput { + /// Recipient email addresses + pub to: Vec, + /// CC recipients + pub cc: Option>, + /// BCC recipients + pub bcc: Option>, + /// Template ID to use + pub template_id: String, + /// Template variables + pub variables: Vec, + /// Display name for the sender + pub from_name: Option, +} + +/// Result of sending an email +#[derive(SimpleObject)] +pub struct SendEmailResult { + /// Whether the email was sent successfully + pub success: bool, + /// Gmail message ID (if successful) + pub message_id: Option, + /// Gmail thread ID (if successful) + pub thread_id: Option, + /// Error message (if failed) + pub error: Option, +} diff --git a/src/graphql/types/event.rs b/src/graphql/types/event.rs new file mode 100644 index 0000000..f2e1109 --- /dev/null +++ b/src/graphql/types/event.rs @@ -0,0 +1,589 @@ +//! Event GraphQL types +//! +//! Types for the event audit trail system. + +use async_graphql::{ComplexObject, Context, Enum, Result, SimpleObject}; +use chrono::{DateTime, Utc}; +use serde_json::Value as JsonValue; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{Event, EventType, TeamProfile, CustomerProfile}; + +use super::{TeamProfileType, CustomerProfileType, ProfileUnion}; + +// ==================== EVENT TYPE ENUM ==================== + +/// Event type enum for GraphQL +/// Mirrors the EventType enum from models but exposed to GraphQL +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum EventTypeEnum { + // Customer events + CustomerCreated, + CustomerUpdated, + CustomerDeleted, + CustomerStatusChanged, + CustomerAddressCreated, + CustomerAddressUpdated, + CustomerAddressDeleted, + CustomerContactCreated, + CustomerContactUpdated, + CustomerContactDeleted, + + // Account events + AccountCreated, + AccountUpdated, + AccountDeleted, + AccountStatusChanged, + AccountAddressCreated, + AccountAddressUpdated, + AccountAddressDeleted, + AccountAddressPrimaryChanged, + AccountContactCreated, + AccountContactUpdated, + AccountContactDeleted, + AccountContactPrimaryChanged, + + // Service events + ServiceCreated, + ServiceUpdated, + ServiceDeleted, + ServiceStatusChanged, + ServiceAssigned, + ServiceUnassigned, + ServiceRescheduled, + + // Project events + ProjectCreated, + ProjectUpdated, + ProjectDeleted, + ProjectStatusChanged, + ProjectAssigned, + ProjectUnassigned, + ProjectRescheduled, + ProjectAmountChanged, + + // Schedule events + ScheduleCreated, + ScheduleUpdated, + ScheduleDeleted, + ScheduleFrequencyChanged, + + // Session events + ServiceSessionStarted, + ServiceSessionEnded, + ServiceSessionReverted, + ProjectSessionStarted, + ProjectSessionEnded, + ProjectSessionReverted, + + // Session media events + SessionNoteCreated, + SessionNoteUpdated, + SessionNoteDeleted, + SessionImageUploaded, + SessionImageUpdated, + SessionImageDeleted, + SessionVideoUploaded, + SessionVideoUpdated, + SessionVideoDeleted, + SessionMediaInternalFlagged, + + // Task events + ServiceTaskCompleted, + ServiceTaskUncompleted, + ProjectTaskCompleted, + ProjectTaskUncompleted, + + // Service scope events + ServiceScopeCreated, + ServiceScopeUpdated, + ServiceScopeDeleted, + ServiceScopeActivated, + ServiceScopeDeactivated, + ServiceScopeAreaCreated, + ServiceScopeAreaUpdated, + ServiceScopeAreaDeleted, + ServiceScopeTaskCreated, + ServiceScopeTaskUpdated, + ServiceScopeTaskDeleted, + + // Project scope events + ProjectScopeCreated, + ProjectScopeUpdated, + ProjectScopeDeleted, + ProjectScopeActivated, + ProjectScopeDeactivated, + ProjectScopeCategoryCreated, + ProjectScopeCategoryUpdated, + ProjectScopeCategoryDeleted, + ProjectScopeTaskCreated, + ProjectScopeTaskUpdated, + ProjectScopeTaskDeleted, + + // Scope template events + ScopeTemplateCreated, + ScopeTemplateUpdated, + ScopeTemplateDeleted, + ScopeTemplateActivated, + ScopeTemplateDeactivated, + ScopeTemplateInstantiated, + + // Profile events + TeamProfileCreated, + TeamProfileUpdated, + TeamProfileDeleted, + TeamProfileRoleChanged, + TeamProfileStatusChanged, + CustomerProfileCreated, + CustomerProfileUpdated, + CustomerProfileDeleted, + CustomerProfileStatusChanged, + CustomerProfileAccessGranted, + CustomerProfileAccessRevoked, + + // Financial events + LaborCreated, + LaborUpdated, + LaborDeleted, + LaborRateChanged, + RevenueCreated, + RevenueUpdated, + RevenueDeleted, + RevenueAmountChanged, + InvoiceCreated, + InvoiceUpdated, + InvoiceDeleted, + InvoiceStatusChanged, + InvoiceSent, + InvoicePaid, + InvoiceOverdue, + + // Report events + ReportCreated, + ReportUpdated, + ReportDeleted, + ReportSubmitted, + ReportApproved, + + // Conversation events + ConversationCreated, + ConversationUpdated, + ConversationArchived, + ConversationUnarchived, + ConversationParticipantAdded, + ConversationParticipantRemoved, + + // Message events + MessageSent, + MessageUpdated, + MessageDeleted, + MessageRead, + + // Notification events + NotificationRuleCreated, + NotificationRuleUpdated, + NotificationRuleDeleted, + NotificationRuleActivated, + NotificationRuleDeactivated, + NotificationCreated, + NotificationSent, + NotificationRead, + NotificationFailed, + NotificationDeliveryAttempted, + NotificationDeliverySucceeded, + NotificationDeliveryFailed, + + // System events + SystemStartup, + SystemShutdown, + MonitoringTaskRun, + MonitoringAlertTriggered, + BackgroundJobStarted, + BackgroundJobCompleted, + BackgroundJobFailed, +} + +impl From for EventTypeEnum { + fn from(et: EventType) -> Self { + match et { + EventType::CustomerCreated => Self::CustomerCreated, + EventType::CustomerUpdated => Self::CustomerUpdated, + EventType::CustomerDeleted => Self::CustomerDeleted, + EventType::CustomerStatusChanged => Self::CustomerStatusChanged, + EventType::CustomerAddressCreated => Self::CustomerAddressCreated, + EventType::CustomerAddressUpdated => Self::CustomerAddressUpdated, + EventType::CustomerAddressDeleted => Self::CustomerAddressDeleted, + EventType::CustomerContactCreated => Self::CustomerContactCreated, + EventType::CustomerContactUpdated => Self::CustomerContactUpdated, + EventType::CustomerContactDeleted => Self::CustomerContactDeleted, + + EventType::AccountCreated => Self::AccountCreated, + EventType::AccountUpdated => Self::AccountUpdated, + EventType::AccountDeleted => Self::AccountDeleted, + EventType::AccountStatusChanged => Self::AccountStatusChanged, + EventType::AccountAddressCreated => Self::AccountAddressCreated, + EventType::AccountAddressUpdated => Self::AccountAddressUpdated, + EventType::AccountAddressDeleted => Self::AccountAddressDeleted, + EventType::AccountAddressPrimaryChanged => Self::AccountAddressPrimaryChanged, + EventType::AccountContactCreated => Self::AccountContactCreated, + EventType::AccountContactUpdated => Self::AccountContactUpdated, + EventType::AccountContactDeleted => Self::AccountContactDeleted, + EventType::AccountContactPrimaryChanged => Self::AccountContactPrimaryChanged, + + EventType::ServiceCreated => Self::ServiceCreated, + EventType::ServiceUpdated => Self::ServiceUpdated, + EventType::ServiceDeleted => Self::ServiceDeleted, + EventType::ServiceStatusChanged => Self::ServiceStatusChanged, + EventType::ServiceAssigned => Self::ServiceAssigned, + EventType::ServiceUnassigned => Self::ServiceUnassigned, + EventType::ServiceRescheduled => Self::ServiceRescheduled, + + EventType::ProjectCreated => Self::ProjectCreated, + EventType::ProjectUpdated => Self::ProjectUpdated, + EventType::ProjectDeleted => Self::ProjectDeleted, + EventType::ProjectStatusChanged => Self::ProjectStatusChanged, + EventType::ProjectAssigned => Self::ProjectAssigned, + EventType::ProjectUnassigned => Self::ProjectUnassigned, + EventType::ProjectRescheduled => Self::ProjectRescheduled, + EventType::ProjectAmountChanged => Self::ProjectAmountChanged, + + EventType::ScheduleCreated => Self::ScheduleCreated, + EventType::ScheduleUpdated => Self::ScheduleUpdated, + EventType::ScheduleDeleted => Self::ScheduleDeleted, + EventType::ScheduleFrequencyChanged => Self::ScheduleFrequencyChanged, + + EventType::ServiceSessionStarted => Self::ServiceSessionStarted, + EventType::ServiceSessionEnded => Self::ServiceSessionEnded, + EventType::ServiceSessionReverted => Self::ServiceSessionReverted, + EventType::ProjectSessionStarted => Self::ProjectSessionStarted, + EventType::ProjectSessionEnded => Self::ProjectSessionEnded, + EventType::ProjectSessionReverted => Self::ProjectSessionReverted, + + EventType::SessionNoteCreated => Self::SessionNoteCreated, + EventType::SessionNoteUpdated => Self::SessionNoteUpdated, + EventType::SessionNoteDeleted => Self::SessionNoteDeleted, + EventType::SessionImageUploaded => Self::SessionImageUploaded, + EventType::SessionImageUpdated => Self::SessionImageUpdated, + EventType::SessionImageDeleted => Self::SessionImageDeleted, + EventType::SessionVideoUploaded => Self::SessionVideoUploaded, + EventType::SessionVideoUpdated => Self::SessionVideoUpdated, + EventType::SessionVideoDeleted => Self::SessionVideoDeleted, + EventType::SessionMediaInternalFlagged => Self::SessionMediaInternalFlagged, + + EventType::ServiceTaskCompleted => Self::ServiceTaskCompleted, + EventType::ServiceTaskUncompleted => Self::ServiceTaskUncompleted, + EventType::ProjectTaskCompleted => Self::ProjectTaskCompleted, + EventType::ProjectTaskUncompleted => Self::ProjectTaskUncompleted, + + EventType::ServiceScopeCreated => Self::ServiceScopeCreated, + EventType::ServiceScopeUpdated => Self::ServiceScopeUpdated, + EventType::ServiceScopeDeleted => Self::ServiceScopeDeleted, + EventType::ServiceScopeActivated => Self::ServiceScopeActivated, + EventType::ServiceScopeDeactivated => Self::ServiceScopeDeactivated, + EventType::ServiceScopeAreaCreated => Self::ServiceScopeAreaCreated, + EventType::ServiceScopeAreaUpdated => Self::ServiceScopeAreaUpdated, + EventType::ServiceScopeAreaDeleted => Self::ServiceScopeAreaDeleted, + EventType::ServiceScopeTaskCreated => Self::ServiceScopeTaskCreated, + EventType::ServiceScopeTaskUpdated => Self::ServiceScopeTaskUpdated, + EventType::ServiceScopeTaskDeleted => Self::ServiceScopeTaskDeleted, + + EventType::ProjectScopeCreated => Self::ProjectScopeCreated, + EventType::ProjectScopeUpdated => Self::ProjectScopeUpdated, + EventType::ProjectScopeDeleted => Self::ProjectScopeDeleted, + EventType::ProjectScopeActivated => Self::ProjectScopeActivated, + EventType::ProjectScopeDeactivated => Self::ProjectScopeDeactivated, + EventType::ProjectScopeCategoryCreated => Self::ProjectScopeCategoryCreated, + EventType::ProjectScopeCategoryUpdated => Self::ProjectScopeCategoryUpdated, + EventType::ProjectScopeCategoryDeleted => Self::ProjectScopeCategoryDeleted, + EventType::ProjectScopeTaskCreated => Self::ProjectScopeTaskCreated, + EventType::ProjectScopeTaskUpdated => Self::ProjectScopeTaskUpdated, + EventType::ProjectScopeTaskDeleted => Self::ProjectScopeTaskDeleted, + + EventType::ScopeTemplateCreated => Self::ScopeTemplateCreated, + EventType::ScopeTemplateUpdated => Self::ScopeTemplateUpdated, + EventType::ScopeTemplateDeleted => Self::ScopeTemplateDeleted, + EventType::ScopeTemplateActivated => Self::ScopeTemplateActivated, + EventType::ScopeTemplateDeactivated => Self::ScopeTemplateDeactivated, + EventType::ScopeTemplateInstantiated => Self::ScopeTemplateInstantiated, + + EventType::TeamProfileCreated => Self::TeamProfileCreated, + EventType::TeamProfileUpdated => Self::TeamProfileUpdated, + EventType::TeamProfileDeleted => Self::TeamProfileDeleted, + EventType::TeamProfileRoleChanged => Self::TeamProfileRoleChanged, + EventType::TeamProfileStatusChanged => Self::TeamProfileStatusChanged, + EventType::CustomerProfileCreated => Self::CustomerProfileCreated, + EventType::CustomerProfileUpdated => Self::CustomerProfileUpdated, + EventType::CustomerProfileDeleted => Self::CustomerProfileDeleted, + EventType::CustomerProfileStatusChanged => Self::CustomerProfileStatusChanged, + EventType::CustomerProfileAccessGranted => Self::CustomerProfileAccessGranted, + EventType::CustomerProfileAccessRevoked => Self::CustomerProfileAccessRevoked, + + EventType::LaborCreated => Self::LaborCreated, + EventType::LaborUpdated => Self::LaborUpdated, + EventType::LaborDeleted => Self::LaborDeleted, + EventType::LaborRateChanged => Self::LaborRateChanged, + EventType::RevenueCreated => Self::RevenueCreated, + EventType::RevenueUpdated => Self::RevenueUpdated, + EventType::RevenueDeleted => Self::RevenueDeleted, + EventType::RevenueAmountChanged => Self::RevenueAmountChanged, + EventType::InvoiceCreated => Self::InvoiceCreated, + EventType::InvoiceUpdated => Self::InvoiceUpdated, + EventType::InvoiceDeleted => Self::InvoiceDeleted, + EventType::InvoiceStatusChanged => Self::InvoiceStatusChanged, + EventType::InvoiceSent => Self::InvoiceSent, + EventType::InvoicePaid => Self::InvoicePaid, + EventType::InvoiceOverdue => Self::InvoiceOverdue, + + EventType::ReportCreated => Self::ReportCreated, + EventType::ReportUpdated => Self::ReportUpdated, + EventType::ReportDeleted => Self::ReportDeleted, + EventType::ReportSubmitted => Self::ReportSubmitted, + EventType::ReportApproved => Self::ReportApproved, + + EventType::ConversationCreated => Self::ConversationCreated, + EventType::ConversationUpdated => Self::ConversationUpdated, + EventType::ConversationArchived => Self::ConversationArchived, + EventType::ConversationUnarchived => Self::ConversationUnarchived, + EventType::ConversationParticipantAdded => Self::ConversationParticipantAdded, + EventType::ConversationParticipantRemoved => Self::ConversationParticipantRemoved, + + EventType::MessageSent => Self::MessageSent, + EventType::MessageUpdated => Self::MessageUpdated, + EventType::MessageDeleted => Self::MessageDeleted, + EventType::MessageRead => Self::MessageRead, + + EventType::NotificationRuleCreated => Self::NotificationRuleCreated, + EventType::NotificationRuleUpdated => Self::NotificationRuleUpdated, + EventType::NotificationRuleDeleted => Self::NotificationRuleDeleted, + EventType::NotificationRuleActivated => Self::NotificationRuleActivated, + EventType::NotificationRuleDeactivated => Self::NotificationRuleDeactivated, + EventType::NotificationCreated => Self::NotificationCreated, + EventType::NotificationSent => Self::NotificationSent, + EventType::NotificationRead => Self::NotificationRead, + EventType::NotificationFailed => Self::NotificationFailed, + EventType::NotificationDeliveryAttempted => Self::NotificationDeliveryAttempted, + EventType::NotificationDeliverySucceeded => Self::NotificationDeliverySucceeded, + EventType::NotificationDeliveryFailed => Self::NotificationDeliveryFailed, + + EventType::SystemStartup => Self::SystemStartup, + EventType::SystemShutdown => Self::SystemShutdown, + EventType::MonitoringTaskRun => Self::MonitoringTaskRun, + EventType::MonitoringAlertTriggered => Self::MonitoringAlertTriggered, + EventType::BackgroundJobStarted => Self::BackgroundJobStarted, + EventType::BackgroundJobCompleted => Self::BackgroundJobCompleted, + EventType::BackgroundJobFailed => Self::BackgroundJobFailed, + } + } +} + +// ==================== EVENT TYPE ==================== + +/// Audit trail event +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct EventType_ { + pub id: Uuid, + pub event_type: EventTypeEnum, + /// Entity type (e.g., "customer", "service", "project") + pub entity_type: String, + pub entity_id: Uuid, + /// Actor type ("team_profile", "customer_profile", or "system") + pub actor_type: Option, + pub actor_id: Option, + /// Additional context (old_status, new_status, changed_fields, etc.) + pub metadata: Option, + /// When the event occurred (business timestamp) + pub timestamp: DateTime, + pub created_at: DateTime, +} + +#[ComplexObject] +impl EventType_ { + /// The actor's profile (polymorphic - team or customer) + async fn actor_profile(&self, ctx: &Context<'_>) -> Result> { + let Some(ref actor_type) = self.actor_type else { + return Ok(None); + }; + let Some(actor_id) = self.actor_id else { + return Ok(None); + }; + + // System actors don't have profiles + if actor_type == "system" { + return Ok(None); + } + + let db = ctx.data::()?; + let pool = db.pool().await; + + match actor_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(actor_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(actor_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } + + /// Human-readable description of the event + async fn description(&self) -> String { + format_event_description(&self.event_type, &self.entity_type, self.metadata.as_ref()) + } +} + +impl From for EventType_ { + fn from(e: Event) -> Self { + Self { + id: e.id, + event_type: e.event_type.into(), + entity_type: e.entity_type, + entity_id: e.entity_id, + actor_type: e.actor_type, + actor_id: e.actor_id, + metadata: e.metadata, + timestamp: e.timestamp, + created_at: e.created_at, + } + } +} + +/// Format a human-readable description for an event +fn format_event_description( + event_type: &EventTypeEnum, + entity_type: &str, + metadata: Option<&JsonValue>, +) -> String { + let action = match event_type { + // Create events + EventTypeEnum::CustomerCreated + | EventTypeEnum::AccountCreated + | EventTypeEnum::ServiceCreated + | EventTypeEnum::ProjectCreated + | EventTypeEnum::ScheduleCreated + | EventTypeEnum::TeamProfileCreated + | EventTypeEnum::CustomerProfileCreated + | EventTypeEnum::LaborCreated + | EventTypeEnum::RevenueCreated + | EventTypeEnum::InvoiceCreated + | EventTypeEnum::ReportCreated + | EventTypeEnum::ConversationCreated + | EventTypeEnum::NotificationRuleCreated + | EventTypeEnum::NotificationCreated => "created", + + // Update events + EventTypeEnum::CustomerUpdated + | EventTypeEnum::AccountUpdated + | EventTypeEnum::ServiceUpdated + | EventTypeEnum::ProjectUpdated + | EventTypeEnum::ScheduleUpdated + | EventTypeEnum::TeamProfileUpdated + | EventTypeEnum::CustomerProfileUpdated + | EventTypeEnum::LaborUpdated + | EventTypeEnum::RevenueUpdated + | EventTypeEnum::InvoiceUpdated + | EventTypeEnum::ReportUpdated + | EventTypeEnum::ConversationUpdated + | EventTypeEnum::NotificationRuleUpdated + | EventTypeEnum::MessageUpdated => "updated", + + // Delete events + EventTypeEnum::CustomerDeleted + | EventTypeEnum::AccountDeleted + | EventTypeEnum::ServiceDeleted + | EventTypeEnum::ProjectDeleted + | EventTypeEnum::ScheduleDeleted + | EventTypeEnum::TeamProfileDeleted + | EventTypeEnum::CustomerProfileDeleted + | EventTypeEnum::LaborDeleted + | EventTypeEnum::RevenueDeleted + | EventTypeEnum::InvoiceDeleted + | EventTypeEnum::ReportDeleted + | EventTypeEnum::NotificationRuleDeleted + | EventTypeEnum::MessageDeleted => "deleted", + + // Status changes + EventTypeEnum::CustomerStatusChanged + | EventTypeEnum::AccountStatusChanged + | EventTypeEnum::ServiceStatusChanged + | EventTypeEnum::ProjectStatusChanged + | EventTypeEnum::TeamProfileStatusChanged + | EventTypeEnum::CustomerProfileStatusChanged + | EventTypeEnum::InvoiceStatusChanged => { + if let Some(meta) = metadata { + if let (Some(old), Some(new)) = ( + meta.get("old_status").and_then(|v| v.as_str()), + meta.get("new_status").and_then(|v| v.as_str()), + ) { + return format!("{} status changed from {} to {}", entity_type, old, new); + } + } + "status changed" + } + + // Session events + EventTypeEnum::ServiceSessionStarted | EventTypeEnum::ProjectSessionStarted => { + "session started" + } + EventTypeEnum::ServiceSessionEnded | EventTypeEnum::ProjectSessionEnded => "session ended", + EventTypeEnum::ServiceSessionReverted | EventTypeEnum::ProjectSessionReverted => { + "session reverted" + } + + // Task events + EventTypeEnum::ServiceTaskCompleted | EventTypeEnum::ProjectTaskCompleted => { + "task completed" + } + EventTypeEnum::ServiceTaskUncompleted | EventTypeEnum::ProjectTaskUncompleted => { + "task uncompleted" + } + + // Assignment events + EventTypeEnum::ServiceAssigned | EventTypeEnum::ProjectAssigned => "assigned", + EventTypeEnum::ServiceUnassigned | EventTypeEnum::ProjectUnassigned => "unassigned", + EventTypeEnum::ServiceRescheduled | EventTypeEnum::ProjectRescheduled => "rescheduled", + + // Invoice events + EventTypeEnum::InvoiceSent => "sent", + EventTypeEnum::InvoicePaid => "paid", + EventTypeEnum::InvoiceOverdue => "marked overdue", + + // Message events + EventTypeEnum::MessageSent => "message sent", + EventTypeEnum::MessageRead => "message read", + + // Notification events + EventTypeEnum::NotificationSent => "sent", + EventTypeEnum::NotificationRead => "read", + EventTypeEnum::NotificationFailed => "failed", + + // Default + _ => "event occurred", + }; + + format!("{} {}", entity_type, action) +} diff --git a/src/graphql/types/invoice.rs b/src/graphql/types/invoice.rs new file mode 100644 index 0000000..785a3ed --- /dev/null +++ b/src/graphql/types/invoice.rs @@ -0,0 +1,452 @@ +use std::sync::Arc; + +use async_graphql::{ComplexObject, Context, Enum, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{Account, Customer, Invoice, InvoiceProject, InvoiceRevenue, InvoiceStatus, Project, Revenue}; +use crate::services::wave::WaveService; + +use super::{AccountType, CustomerType, ProjectType, RevenueType}; + +/// GraphQL enum for invoice status +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum InvoiceStatusType { + Draft, + Sent, + Paid, + Overdue, + Cancelled, +} + +impl From for InvoiceStatusType { + fn from(status: InvoiceStatus) -> Self { + match status { + InvoiceStatus::Draft => InvoiceStatusType::Draft, + InvoiceStatus::Sent => InvoiceStatusType::Sent, + InvoiceStatus::Paid => InvoiceStatusType::Paid, + InvoiceStatus::Overdue => InvoiceStatusType::Overdue, + InvoiceStatus::Cancelled => InvoiceStatusType::Cancelled, + } + } +} + +impl From for InvoiceStatus { + fn from(status: InvoiceStatusType) -> Self { + match status { + InvoiceStatusType::Draft => InvoiceStatus::Draft, + InvoiceStatusType::Sent => InvoiceStatus::Sent, + InvoiceStatusType::Paid => InvoiceStatus::Paid, + InvoiceStatusType::Overdue => InvoiceStatus::Overdue, + InvoiceStatusType::Cancelled => InvoiceStatus::Cancelled, + } + } +} + +/// Invoice GraphQL type - customer billing invoice +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct InvoiceType { + pub id: Uuid, + pub customer_id: Uuid, + pub start_date: NaiveDate, + pub end_date: NaiveDate, + pub status: InvoiceStatusType, + pub date_paid: Option, + pub wave_invoice_id: Option, +} + +#[ComplexObject] +impl InvoiceType { + /// Get the customer this invoice belongs to + async fn customer(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(self.customer_id) + .fetch_optional(&*pool) + .await?; + + Ok(customer.map(CustomerType::from)) + } + + /// Get all revenues in this invoice with their amounts + async fn revenues(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let revenues: Vec = sqlx::query_as::<_, InvoiceRevenue>( + r#" + SELECT id, invoice_id, revenue_id, amount, created_at + FROM invoice_revenues + WHERE invoice_id = $1 + ORDER BY created_at + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(revenues.into_iter().map(InvoiceRevenueType::from).collect()) + } + + /// Get all projects in this invoice with their amounts + async fn projects(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let projects: Vec = sqlx::query_as::<_, InvoiceProject>( + r#" + SELECT id, invoice_id, project_id, amount, created_at + FROM invoice_projects + WHERE invoice_id = $1 + ORDER BY created_at + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(projects.into_iter().map(InvoiceProjectType::from).collect()) + } + + /// Total amount from all revenues in this invoice + async fn revenues_total(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let total: Option = sqlx::query_scalar( + r#" + SELECT COALESCE(SUM(amount), 0) + FROM invoice_revenues + WHERE invoice_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(total.unwrap_or_default()) + } + + /// Total amount from all projects in this invoice + async fn projects_total(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let total: Option = sqlx::query_scalar( + r#" + SELECT COALESCE(SUM(amount), 0) + FROM invoice_projects + WHERE invoice_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(total.unwrap_or_default()) + } + + /// Grand total amount (revenues + projects) + async fn total_amount(&self, ctx: &Context<'_>) -> Result { + let revenues = self.revenues_total(ctx).await?; + let projects = self.projects_total(ctx).await?; + Ok(revenues + projects) + } + + /// Count of revenues in this invoice + async fn revenue_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#"SELECT COUNT(*) FROM invoice_revenues WHERE invoice_id = $1"#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count as i32) + } + + /// Count of projects in this invoice + async fn project_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#"SELECT COUNT(*) FROM invoice_projects WHERE invoice_id = $1"#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count as i32) + } + + /// Get the Wave invoice number if this invoice is synced to Wave + async fn wave_invoice_number(&self, ctx: &Context<'_>) -> Result> { + let Some(wave_invoice_id) = &self.wave_invoice_id else { + return Ok(None); + }; + + let Some(wave) = ctx.data_opt::>() else { + return Ok(None); + }; + + match wave.get_invoice(wave_invoice_id).await { + Ok(invoice) => Ok(Some(invoice.invoice_number)), + Err(_) => Ok(None), + } + } +} + +impl From for InvoiceType { + fn from(invoice: Invoice) -> Self { + Self { + id: invoice.base.id, + customer_id: invoice.customer_id, + start_date: invoice.start_date, + end_date: invoice.end_date, + status: invoice.status.into(), + date_paid: invoice.date_paid, + wave_invoice_id: invoice.wave_invoice_id, + } + } +} + +/// Invoice-Revenue entry with snapshot amount +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct InvoiceRevenueType { + pub id: Uuid, + pub invoice_id: Uuid, + pub revenue_id: Uuid, + /// Snapshot amount - stored when added to invoice + pub amount: Decimal, +} + +#[ComplexObject] +impl InvoiceRevenueType { + /// Get the full revenue details + async fn revenue(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let revenue = sqlx::query_as::<_, Revenue>( + r#" + SELECT id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id + FROM revenues + WHERE id = $1 + "#, + ) + .bind(self.revenue_id) + .fetch_optional(&*pool) + .await?; + + Ok(revenue.map(RevenueType::from)) + } + + /// Get the account this revenue belongs to + async fn account(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let account = sqlx::query_as::<_, Account>( + r#" + SELECT a.id, a.created_at, a.updated_at, a.customer_id, a.name, a.status, a.start_date, a.end_date + FROM accounts a + JOIN revenues r ON r.account_id = a.id + WHERE r.id = $1 + "#, + ) + .bind(self.revenue_id) + .fetch_optional(&*pool) + .await?; + + Ok(account.map(AccountType::from)) + } +} + +impl From for InvoiceRevenueType { + fn from(ir: InvoiceRevenue) -> Self { + Self { + id: ir.id, + invoice_id: ir.invoice_id, + revenue_id: ir.revenue_id, + amount: ir.amount, + } + } +} + +/// Invoice-Project entry with snapshot amount +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct InvoiceProjectType { + pub id: Uuid, + pub invoice_id: Uuid, + pub project_id: Uuid, + /// Snapshot amount - stored when added to invoice + pub amount: Decimal, +} + +#[ComplexObject] +impl InvoiceProjectType { + /// Get the full project details + async fn project(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(self.project_id) + .fetch_optional(&*pool) + .await?; + + Ok(project.map(ProjectType::from)) + } + + /// Get the account this project is linked to (via account_address_id), if any + async fn account(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let account = sqlx::query_as::<_, Account>( + r#" + SELECT a.id, a.created_at, a.updated_at, a.customer_id, a.name, a.status, a.start_date, a.end_date + FROM accounts a + JOIN account_addresses aa ON aa.account_id = a.id + JOIN projects p ON p.account_address_id = aa.id + WHERE p.id = $1 + "#, + ) + .bind(self.project_id) + .fetch_optional(&*pool) + .await?; + + Ok(account.map(AccountType::from)) + } +} + +impl From for InvoiceProjectType { + fn from(ip: InvoiceProject) -> Self { + Self { + id: ip.id, + invoice_id: ip.invoice_id, + project_id: ip.project_id, + amount: ip.amount, + } + } +} + +/// Filter input for invoice queries +#[derive(InputObject, Default)] +pub struct InvoiceFilterInput { + /// Filter by customer ID + pub customer_id: Option, + /// Filter by status + pub status: Option, + /// Filter by invoices that overlap with this date + pub date: Option, + /// Filter by invoices starting on or after this date + pub start_date_from: Option, + /// Filter by invoices ending on or before this date + pub end_date_to: Option, +} + +/// Paginated invoice results +#[derive(SimpleObject)] +pub struct InvoiceConnection { + /// List of invoices + pub items: Vec, + /// Total count matching the filter + pub total_count: i32, + /// Whether there are more items after this page + pub has_next_page: bool, +} + +/// Eligible revenue for adding to an invoice +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct EligibleRevenueType { + pub revenue_id: Uuid, + pub account_id: Uuid, + pub account_name: String, + /// Revenue amount + pub amount: Decimal, +} + +#[ComplexObject] +impl EligibleRevenueType { + /// Get the full revenue details + async fn revenue(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let revenue = sqlx::query_as::<_, Revenue>( + r#" + SELECT id, created_at, updated_at, account_id, amount, start_date, end_date, wave_service_id + FROM revenues + WHERE id = $1 + "#, + ) + .bind(self.revenue_id) + .fetch_optional(&*pool) + .await?; + + Ok(revenue.map(RevenueType::from)) + } +} + +/// Eligible project for adding to an invoice +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct EligibleInvoiceProjectType { + pub project_id: Uuid, + pub name: String, + pub date: NaiveDate, + /// Project amount + pub amount: Decimal, +} + +#[ComplexObject] +impl EligibleInvoiceProjectType { + /// Get the full project details + async fn project(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(self.project_id) + .fetch_optional(&*pool) + .await?; + + Ok(project.map(ProjectType::from)) + } +} diff --git a/src/graphql/types/messaging.rs b/src/graphql/types/messaging.rs new file mode 100644 index 0000000..cbf69ff --- /dev/null +++ b/src/graphql/types/messaging.rs @@ -0,0 +1,508 @@ +//! Messaging GraphQL types +//! +//! Types for conversations, messages, participants, and read receipts. + +use async_graphql::{ComplexObject, Context, Enum, Result, SimpleObject}; +use chrono::{DateTime, Utc}; +use serde_json::Value as JsonValue; +use uuid::Uuid; + +use crate::auth::{ProfileType, UserContext}; +use crate::db::Database; +use crate::models::{ + Conversation, ConversationParticipant, ConversationType, Message, MessageReadReceipt, + TeamProfile, CustomerProfile, +}; + +use super::{TeamProfileType, CustomerProfileType}; + +// ==================== ENUMS ==================== + +/// Conversation type enum for GraphQL +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum ConversationTypeEnum { + Direct, + Group, + Support, +} + +impl From for ConversationTypeEnum { + fn from(ct: ConversationType) -> Self { + match ct { + ConversationType::Direct => Self::Direct, + ConversationType::Group => Self::Group, + ConversationType::Support => Self::Support, + } + } +} + +// ==================== CONVERSATION ==================== + +/// Message thread/conversation +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ConversationType_ { + pub id: Uuid, + pub subject: Option, + pub conversation_type: ConversationTypeEnum, + /// Entity this conversation is about (e.g., "service", "project", "account") + pub entity_type: Option, + pub entity_id: Option, + /// Who created the conversation (polymorphic type) + pub created_by_type: Option, + pub created_by_id: Option, + pub last_message_at: Option>, + pub is_archived: bool, + pub metadata: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ConversationType_ { + /// Participants in this conversation + async fn participants(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let participants: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, participant_type, participant_id, + last_read_at, unread_count, is_muted, is_archived, joined_at + FROM conversation_participants + WHERE conversation_id = $1 AND is_archived = false + ORDER BY joined_at ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(participants.into_iter().map(ConversationParticipantType::from).collect()) + } + + /// Messages in this conversation (most recent first) + async fn messages( + &self, + ctx: &Context<'_>, + #[graphql(default = 50)] limit: i32, + #[graphql(default)] include_deleted: bool, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let messages: Vec = if include_deleted { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 + ORDER BY created_at DESC + LIMIT $2 + "#, + ) + .bind(self.id) + .bind(limit) + .fetch_all(&*pool) + .await? + } else { + sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE conversation_id = $1 AND is_deleted = false + ORDER BY created_at DESC + LIMIT $2 + "#, + ) + .bind(self.id) + .bind(limit) + .fetch_all(&*pool) + .await? + }; + + Ok(messages.into_iter().map(MessageType::from).collect()) + } + + /// Unread count for the current user + async fn unread_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Get current user context - if not authenticated, return 0 + let user = match ctx.data::() { + Ok(u) => u, + Err(_) => return Ok(0), + }; + + let participant_type = match user.profile_type { + ProfileType::Team => "team_profile", + ProfileType::Customer => "customer_profile", + }; + + let count: Option = sqlx::query_scalar( + r#" + SELECT unread_count + FROM conversation_participants + WHERE conversation_id = $1 AND participant_type = $2 AND participant_id = $3 + "#, + ) + .bind(self.id) + .bind(participant_type) + .bind(user.user_id) + .fetch_optional(&*pool) + .await?; + + Ok(count.unwrap_or(0)) + } + + /// The profile that created this conversation (polymorphic) + async fn created_by_profile(&self, ctx: &Context<'_>) -> Result> { + let Some(ref created_by_type) = self.created_by_type else { + return Ok(None); + }; + let Some(created_by_id) = self.created_by_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + match created_by_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(created_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(created_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } +} + +impl From for ConversationType_ { + fn from(c: Conversation) -> Self { + Self { + id: c.base.id, + subject: c.subject, + conversation_type: c.conversation_type.into(), + entity_type: c.entity_type, + entity_id: c.entity_id, + created_by_type: c.created_by_type, + created_by_id: c.created_by_id, + last_message_at: c.last_message_at, + is_archived: c.is_archived, + metadata: c.metadata, + created_at: c.base.created_at, + updated_at: c.base.updated_at, + } + } +} + +// ==================== CONVERSATION PARTICIPANT ==================== + +/// User participation in a conversation +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ConversationParticipantType { + pub id: Uuid, + pub conversation_id: Uuid, + /// Participant type ("team_profile" or "customer_profile") + pub participant_type: String, + pub participant_id: Uuid, + pub last_read_at: Option>, + pub unread_count: i32, + pub is_muted: bool, + pub is_archived: bool, + pub joined_at: DateTime, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ConversationParticipantType { + /// The participant's profile (polymorphic) + async fn participant_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + match self.participant_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.participant_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(self.participant_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } +} + +impl From for ConversationParticipantType { + fn from(p: ConversationParticipant) -> Self { + Self { + id: p.base.id, + conversation_id: p.conversation_id, + participant_type: p.participant_type, + participant_id: p.participant_id, + last_read_at: p.last_read_at, + unread_count: p.unread_count, + is_muted: p.is_muted, + is_archived: p.is_archived, + joined_at: p.joined_at, + created_at: p.base.created_at, + updated_at: p.base.updated_at, + } + } +} + +// ==================== MESSAGE ==================== + +/// Individual message in a conversation +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct MessageType { + pub id: Uuid, + pub conversation_id: Uuid, + /// Author type ("team_profile" or "customer_profile") + pub author_type: String, + pub author_id: Uuid, + pub content: String, + pub is_deleted: bool, + pub reply_to_id: Option, + pub attachments: Option, + pub is_system_message: bool, + pub metadata: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl MessageType { + /// The author's profile (polymorphic) + async fn author_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + match self.author_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.author_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(self.author_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } + + /// The message this is a reply to (if any) + async fn reply_to(&self, ctx: &Context<'_>) -> Result> { + let Some(reply_to_id) = self.reply_to_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let message: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, conversation_id, author_type, author_id, + content, is_deleted, reply_to_id, attachments, is_system_message, metadata + FROM messages + WHERE id = $1 + "#, + ) + .bind(reply_to_id) + .fetch_optional(&*pool) + .await?; + + Ok(message.map(MessageType::from)) + } + + /// Read receipts for this message + async fn read_receipts(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let receipts: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, message_id, reader_type, reader_id, read_at + FROM message_read_receipts + WHERE message_id = $1 + ORDER BY read_at ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(receipts.into_iter().map(MessageReadReceiptType::from).collect()) + } +} + +impl From for MessageType { + fn from(m: Message) -> Self { + Self { + id: m.base.id, + conversation_id: m.conversation_id, + author_type: m.author_type, + author_id: m.author_id, + content: m.content, + is_deleted: m.is_deleted, + reply_to_id: m.reply_to_id, + attachments: m.attachments, + is_system_message: m.is_system_message, + metadata: m.metadata, + created_at: m.base.created_at, + updated_at: m.base.updated_at, + } + } +} + +// ==================== MESSAGE READ RECEIPT ==================== + +/// Tracks when a message was read by a participant +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct MessageReadReceiptType { + pub id: Uuid, + pub message_id: Uuid, + /// Reader type ("team_profile" or "customer_profile") + pub reader_type: String, + pub reader_id: Uuid, + pub read_at: DateTime, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl MessageReadReceiptType { + /// The reader's profile (polymorphic) + async fn reader_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + match self.reader_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.reader_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(self.reader_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } +} + +impl From for MessageReadReceiptType { + fn from(r: MessageReadReceipt) -> Self { + Self { + id: r.base.id, + message_id: r.message_id, + reader_type: r.reader_type, + reader_id: r.reader_id, + read_at: r.read_at, + created_at: r.base.created_at, + updated_at: r.base.updated_at, + } + } +} + +// ==================== PROFILE UNION ==================== + +/// Union type for polymorphic profile references +#[derive(async_graphql::Union)] +pub enum ProfileUnion { + TeamProfile(TeamProfileType), + CustomerProfile(CustomerProfileType), +} diff --git a/src/graphql/types/mod.rs b/src/graphql/types/mod.rs new file mode 100644 index 0000000..e28ff2a --- /dev/null +++ b/src/graphql/types/mod.rs @@ -0,0 +1,33 @@ +mod account; +mod calendar; +mod customer; +mod email; +mod event; +mod invoice; +mod messaging; +mod notification; +mod profile; +mod project; +mod project_scope_template; +mod report; +mod service; +mod service_scope_template; +mod session; +mod wave; + +pub use account::*; +pub use calendar::*; +pub use customer::*; +pub use email::*; +pub use event::*; +pub use invoice::*; +pub use messaging::*; +pub use notification::*; +pub use profile::*; +pub use project::*; +pub use project_scope_template::*; +pub use report::*; +pub use service::*; +pub use service_scope_template::*; +pub use session::*; +pub use wave::*; diff --git a/src/graphql/types/notification.rs b/src/graphql/types/notification.rs new file mode 100644 index 0000000..6798dcf --- /dev/null +++ b/src/graphql/types/notification.rs @@ -0,0 +1,439 @@ +//! Notification GraphQL types +//! +//! Types for notification rules, notifications, and deliveries. + +use async_graphql::{ComplexObject, Context, Enum, Result, SimpleObject}; +use chrono::{DateTime, Utc}; +use serde_json::Value as JsonValue; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{ + DeliveryStatus, Event, Notification, NotificationChannel, NotificationDelivery, + NotificationRule, NotificationStatus, TeamProfile, CustomerProfile, +}; + +use super::{TeamProfileType, CustomerProfileType, ProfileUnion, EventType_}; + +// ==================== ENUMS ==================== + +/// Notification channel enum for GraphQL +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum NotificationChannelEnum { + InApp, + Email, + Sms, +} + +impl From for NotificationChannelEnum { + fn from(nc: NotificationChannel) -> Self { + match nc { + NotificationChannel::InApp => Self::InApp, + NotificationChannel::Email => Self::Email, + NotificationChannel::Sms => Self::Sms, + } + } +} + +/// Notification status enum for GraphQL +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum NotificationStatusEnum { + Pending, + Sent, + Read, + Failed, +} + +impl From for NotificationStatusEnum { + fn from(ns: NotificationStatus) -> Self { + match ns { + NotificationStatus::Pending => Self::Pending, + NotificationStatus::Sent => Self::Sent, + NotificationStatus::Read => Self::Read, + NotificationStatus::Failed => Self::Failed, + } + } +} + +/// Delivery status enum for GraphQL +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum DeliveryStatusEnum { + Pending, + Queued, + Sending, + Sent, + Delivered, + Failed, + Bounced, +} + +impl From for DeliveryStatusEnum { + fn from(ds: DeliveryStatus) -> Self { + match ds { + DeliveryStatus::Pending => Self::Pending, + DeliveryStatus::Queued => Self::Queued, + DeliveryStatus::Sending => Self::Sending, + DeliveryStatus::Sent => Self::Sent, + DeliveryStatus::Delivered => Self::Delivered, + DeliveryStatus::Failed => Self::Failed, + DeliveryStatus::Bounced => Self::Bounced, + } + } +} + +// ==================== NOTIFICATION RULE ==================== + +/// Admin-defined rule for triggering notifications +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct NotificationRuleType { + pub id: Uuid, + pub name: String, + pub description: Option, + pub is_active: bool, + /// Event types that trigger this rule (array of event type strings) + pub event_types: JsonValue, + /// Channels to deliver through (array of channel strings) + pub channels: JsonValue, + /// Target roles (array of role strings) + pub target_roles: Option, + /// Custom conditions for triggering + pub conditions: Option, + /// Notification subject template + pub subject_template: Option, + /// Notification body template + pub body_template: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl NotificationRuleType { + /// Team profiles specifically targeted by this rule + async fn target_team_profiles(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profiles: Vec = sqlx::query_as( + r#" + SELECT tp.id, tp.created_at, tp.updated_at, tp.first_name, tp.last_name, + tp.phone, tp.email, tp.role, tp.status, tp.notes + FROM team_profiles tp + INNER JOIN notification_rule_team_profiles nrtp ON tp.id = nrtp.team_profile_id + WHERE nrtp.rule_id = $1 + ORDER BY tp.last_name, tp.first_name + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(profiles.into_iter().map(TeamProfileType::from).collect()) + } + + /// Customer profiles specifically targeted by this rule + async fn target_customer_profiles(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profiles: Vec = sqlx::query_as( + r#" + SELECT cp.id, cp.created_at, cp.updated_at, cp.customer_id, cp.first_name, + cp.last_name, cp.email, cp.phone, cp.status, cp.notes, cp.is_primary + FROM customer_profiles cp + INNER JOIN notification_rule_customer_profiles nrcp ON cp.id = nrcp.customer_profile_id + WHERE nrcp.rule_id = $1 + ORDER BY cp.last_name, cp.first_name + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(profiles.into_iter().map(CustomerProfileType::from).collect()) + } + + /// Notifications generated by this rule + async fn notifications( + &self, + ctx: &Context<'_>, + #[graphql(default = 50)] limit: i32, + ) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let notifications: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE rule_id = $1 + ORDER BY created_at DESC + LIMIT $2 + "#, + ) + .bind(self.id) + .bind(limit) + .fetch_all(&*pool) + .await?; + + Ok(notifications.into_iter().map(NotificationType::from).collect()) + } +} + +impl From for NotificationRuleType { + fn from(r: NotificationRule) -> Self { + Self { + id: r.base.id, + name: r.name, + description: r.description, + is_active: r.is_active, + event_types: r.event_types, + channels: r.channels, + target_roles: r.target_roles, + conditions: r.conditions, + subject_template: r.subject_template, + body_template: r.body_template, + created_at: r.base.created_at, + updated_at: r.base.updated_at, + } + } +} + +// ==================== NOTIFICATION ==================== + +/// Individual notification instance +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct NotificationType { + pub id: Uuid, + /// Recipient type ("team_profile" or "customer_profile") + pub recipient_type: String, + pub recipient_id: Uuid, + pub rule_id: Option, + pub event_id: Option, + pub status: NotificationStatusEnum, + pub subject: String, + pub body: String, + /// URL to navigate to when notification is clicked + pub action_url: Option, + pub read_at: Option>, + pub metadata: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl NotificationType { + /// The recipient's profile (polymorphic) + async fn recipient_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + match self.recipient_type.as_str() { + "team_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.recipient_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::TeamProfile(TeamProfileType::from(p)))) + } + "customer_profile" => { + let profile: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, customer_id, first_name, last_name, email, phone, status, notes, is_primary + FROM customer_profiles + WHERE id = $1 + "#, + ) + .bind(self.recipient_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(|p| ProfileUnion::CustomerProfile(CustomerProfileType::from(p)))) + } + _ => Ok(None), + } + } + + /// The rule that triggered this notification + async fn rule(&self, ctx: &Context<'_>) -> Result> { + let Some(rule_id) = self.rule_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let rule: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + FROM notification_rules + WHERE id = $1 + "#, + ) + .bind(rule_id) + .fetch_optional(&*pool) + .await?; + + Ok(rule.map(NotificationRuleType::from)) + } + + /// The event that triggered this notification + async fn event(&self, ctx: &Context<'_>) -> Result> { + let Some(event_id) = self.event_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let event: Option = sqlx::query_as( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, metadata, timestamp, created_at + FROM events + WHERE id = $1 + "#, + ) + .bind(event_id) + .fetch_optional(&*pool) + .await?; + + Ok(event.map(EventType_::from)) + } + + /// Delivery attempts for this notification + async fn deliveries(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let deliveries: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + FROM notification_deliveries + WHERE notification_id = $1 + ORDER BY created_at ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(deliveries.into_iter().map(NotificationDeliveryType::from).collect()) + } + + /// Whether this notification has been read + async fn is_read(&self) -> bool { + self.read_at.is_some() || self.status == NotificationStatusEnum::Read + } +} + +impl From for NotificationType { + fn from(n: Notification) -> Self { + Self { + id: n.base.id, + recipient_type: n.recipient_type, + recipient_id: n.recipient_id, + rule_id: n.rule_id, + event_id: n.event_id, + status: n.status.into(), + subject: n.subject, + body: n.body, + action_url: n.action_url, + read_at: n.read_at, + metadata: n.metadata, + created_at: n.base.created_at, + updated_at: n.base.updated_at, + } + } +} + +// ==================== NOTIFICATION DELIVERY ==================== + +/// Tracks delivery attempts per channel +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct NotificationDeliveryType { + pub id: Uuid, + pub notification_id: Uuid, + pub channel: NotificationChannelEnum, + pub status: DeliveryStatusEnum, + pub attempts: i32, + pub last_attempt_at: Option>, + pub sent_at: Option>, + pub delivered_at: Option>, + pub error_message: Option, + /// External service ID (e.g., email provider message ID) + pub external_id: Option, + pub metadata: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl NotificationDeliveryType { + /// The notification this delivery is for + async fn notification(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let notification: Option = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE id = $1 + "#, + ) + .bind(self.notification_id) + .fetch_optional(&*pool) + .await?; + + Ok(notification.map(NotificationType::from)) + } + + /// Whether delivery was successful + async fn is_successful(&self) -> bool { + matches!( + self.status, + DeliveryStatusEnum::Sent | DeliveryStatusEnum::Delivered + ) + } + + /// Whether delivery has permanently failed + async fn is_failed(&self) -> bool { + matches!( + self.status, + DeliveryStatusEnum::Failed | DeliveryStatusEnum::Bounced + ) + } +} + +impl From for NotificationDeliveryType { + fn from(d: NotificationDelivery) -> Self { + Self { + id: d.base.id, + notification_id: d.notification_id, + channel: d.channel.into(), + status: d.status.into(), + attempts: d.attempts, + last_attempt_at: d.last_attempt_at, + sent_at: d.sent_at, + delivered_at: d.delivered_at, + error_message: d.error_message, + external_id: d.external_id, + metadata: d.metadata, + created_at: d.base.created_at, + updated_at: d.base.updated_at, + } + } +} diff --git a/src/graphql/types/profile.rs b/src/graphql/types/profile.rs new file mode 100644 index 0000000..7e8e939 --- /dev/null +++ b/src/graphql/types/profile.rs @@ -0,0 +1,134 @@ +use async_graphql::{Enum, SimpleObject, Union}; +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +use crate::graphql::types::EntityStatusType; +use crate::models::{CustomerProfile, TeamProfile, TeamRole}; + +/// GraphQL enum for team member roles +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum TeamRoleType { + Admin, + TeamLeader, + TeamMember, +} + +impl From for TeamRoleType { + fn from(role: TeamRole) -> Self { + match role { + TeamRole::Admin => TeamRoleType::Admin, + TeamRole::TeamLeader => TeamRoleType::TeamLeader, + TeamRole::TeamMember => TeamRoleType::TeamMember, + } + } +} + +impl From for TeamRole { + fn from(role: TeamRoleType) -> Self { + match role { + TeamRoleType::Admin => TeamRole::Admin, + TeamRoleType::TeamLeader => TeamRole::TeamLeader, + TeamRoleType::TeamMember => TeamRole::TeamMember, + } + } +} + +/// Team member profile (internal user) +#[derive(SimpleObject)] +pub struct TeamProfileType { + pub id: Uuid, + pub first_name: String, + pub last_name: String, + pub full_name: String, + pub email: Option, + pub phone: Option, + pub role: TeamRoleType, + pub status: EntityStatusType, + pub notes: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl From for TeamProfileType { + fn from(profile: TeamProfile) -> Self { + Self { + id: profile.base.id, + first_name: profile.contact.first_name.clone(), + last_name: profile.contact.last_name.clone(), + full_name: profile.full_name(), + email: profile.email, + phone: profile.contact.phone, + role: profile.role.into(), + status: profile.status.into(), + notes: profile.notes, + created_at: profile.base.created_at, + updated_at: profile.base.updated_at, + } + } +} + +/// Customer info for CustomerProfileType +#[derive(SimpleObject)] +pub struct CustomerInfo { + pub id: Uuid, + pub name: String, +} + +/// Customer profile (external user) +#[derive(SimpleObject)] +pub struct CustomerProfileType { + pub id: Uuid, + pub first_name: String, + pub last_name: String, + pub full_name: String, + pub email: Option, + pub phone: Option, + pub status: EntityStatusType, + pub notes: Option, + pub created_at: DateTime, + pub updated_at: DateTime, + pub customers: Vec, +} + +impl CustomerProfileType { + pub fn from_profile(profile: CustomerProfile, customers: Vec) -> Self { + Self { + id: profile.base.id, + first_name: profile.contact.first_name.clone(), + last_name: profile.contact.last_name.clone(), + full_name: profile.full_name(), + email: profile.email, + phone: profile.contact.phone, + status: profile.status.into(), + notes: profile.notes, + created_at: profile.base.created_at, + updated_at: profile.base.updated_at, + customers, + } + } +} + +impl From for CustomerProfileType { + fn from(profile: CustomerProfile) -> Self { + Self { + id: profile.base.id, + first_name: profile.contact.first_name.clone(), + last_name: profile.contact.last_name.clone(), + full_name: profile.full_name(), + email: profile.email, + phone: profile.contact.phone, + status: profile.status.into(), + notes: profile.notes, + created_at: profile.base.created_at, + updated_at: profile.base.updated_at, + customers: vec![], + } + } +} + +/// Union type for the `me` query - can be either team or customer profile +#[derive(Union)] +pub enum ProfileType { + TeamProfile(TeamProfileType), + CustomerProfile(CustomerProfileType), +} diff --git a/src/graphql/types/project.rs b/src/graphql/types/project.rs new file mode 100644 index 0000000..8592c4a --- /dev/null +++ b/src/graphql/types/project.rs @@ -0,0 +1,271 @@ +use std::sync::Arc; + +use async_graphql::{ComplexObject, Context, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::mutations::project::ProjectScopeType; +use crate::models::{AccountAddress, Customer, Project, ProjectScope, ProjectTeamMember, TeamProfile}; +use crate::services::WaveService; + +use super::{AccountAddressType, CustomerType, TeamProfileType, WorkStatusType}; + +/// Project GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectType { + pub id: Uuid, + pub customer_id: Uuid, + pub name: String, + pub date: NaiveDate, + pub status: WorkStatusType, + pub labor: Option, + pub amount: Option, + pub notes: Option, + pub calendar_event_id: Option, + pub wave_service_id: Option, + /// Account address ID (if using tracked location) + pub account_address_id: Option, + /// Freeform address fields (if not using tracked location) + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, +} + +#[ComplexObject] +impl ProjectType { + /// Get the customer for this project + async fn customer(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let customer = sqlx::query_as::<_, Customer>( + r#" + SELECT id, created_at, updated_at, name, status, start_date, end_date, + billing_terms, billing_email, wave_customer_id + FROM customers + WHERE id = $1 + "#, + ) + .bind(self.customer_id) + .fetch_optional(&*pool) + .await?; + + Ok(customer.map(CustomerType::from)) + } + + /// Get the account address/location for this project (if tracked) + async fn account_address(&self, ctx: &Context<'_>) -> Result> { + let Some(address_id) = self.account_address_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let address = sqlx::query_as::<_, AccountAddress>( + r#" + SELECT id, created_at, updated_at, account_id, street_address, city, state, zip_code, + is_active, is_primary, name, notes + FROM account_addresses + WHERE id = $1 + "#, + ) + .bind(address_id) + .fetch_optional(&*pool) + .await?; + + Ok(address.map(AccountAddressType::from)) + } + + /// Get team members assigned to this project + async fn team_members(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let members: Vec = sqlx::query_as::<_, ProjectTeamMember>( + r#" + SELECT id, project_id, team_profile_id, created_at + FROM project_team_members + WHERE project_id = $1 + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(members.into_iter().map(ProjectTeamMemberType::from).collect()) + } + + /// Get scopes for this project (typically ONE active scope) + async fn scopes(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let scopes: Vec = sqlx::query_as::<_, ProjectScope>( + r#" + SELECT id, created_at, updated_at, name, project_id, account_id, account_address_id, description, is_active + FROM project_scopes + WHERE project_id = $1 + ORDER BY is_active DESC, created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(scopes.into_iter().map(ProjectScopeType::from).collect()) + } + + /// Formatted address string (either from account_address or freeform) + async fn formatted_address(&self, ctx: &Context<'_>) -> Result> { + // If we have a tracked account address, fetch and format it + if let Some(address_id) = self.account_address_id { + let db = ctx.data::()?; + let pool = db.pool().await; + + let address = sqlx::query_as::<_, AccountAddress>( + r#" + SELECT id, created_at, updated_at, account_id, street_address, city, state, zip_code, + is_active, is_primary, name, notes + FROM account_addresses + WHERE id = $1 + "#, + ) + .bind(address_id) + .fetch_optional(&*pool) + .await?; + + if let Some(addr) = address { + return Ok(Some(format!( + "{}, {}, {} {}", + addr.address.street_address, addr.address.city, addr.address.state, addr.address.zip_code + ))); + } + } + + // Otherwise use freeform address if available + if let (Some(street), Some(city), Some(state), Some(zip)) = ( + &self.street_address, + &self.city, + &self.state, + &self.zip_code, + ) { + return Ok(Some(format!("{}, {}, {} {}", street, city, state, zip))); + } + + Ok(None) + } + + /// Get the linked Wave product name (if wave_service_id is set) + async fn wave_product_name(&self, ctx: &Context<'_>) -> Result> { + let Some(wave_service_id) = &self.wave_service_id else { + return Ok(None); + }; + + let Some(wave) = ctx.data_opt::>() else { + return Ok(None); + }; + + match wave.get_product(wave_service_id).await { + Ok(product) => Ok(Some(product.name)), + Err(_) => Ok(None), + } + } +} + +impl From for ProjectType { + fn from(project: Project) -> Self { + Self { + id: project.base.id, + customer_id: project.customer_id, + name: project.name, + date: project.date, + status: project.status.into(), + labor: project.labor, + amount: project.amount, + notes: project.notes, + calendar_event_id: project.calendar_event_id, + wave_service_id: project.wave_service_id, + account_address_id: project.account_address_id, + street_address: project.street_address, + city: project.city, + state: project.state, + zip_code: project.zip_code, + } + } +} + +/// Project team member assignment type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectTeamMemberType { + pub id: Uuid, + pub project_id: Uuid, + pub team_profile_id: Uuid, +} + +#[ComplexObject] +impl ProjectTeamMemberType { + /// Get the team profile for this assignment + async fn team_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.team_profile_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ProjectTeamMemberType { + fn from(member: ProjectTeamMember) -> Self { + Self { + id: member.id, + project_id: member.project_id, + team_profile_id: member.team_profile_id, + } + } +} + +/// Filter input for project queries +#[derive(InputObject, Default)] +pub struct ProjectFilterInput { + /// Filter by date range start (inclusive) + pub date_from: Option, + /// Filter by date range end (inclusive) + pub date_to: Option, + /// Filter by status + pub status: Option, + /// Filter by customer ID - DEPRECATED: use customer_ids + pub customer_id: Option, + /// Filter by multiple customer IDs (for customer portal with multiple assigned customers) + pub customer_ids: Option>, + /// Filter by account address ID + pub account_address_id: Option, + /// Filter by assigned team profile ID (for team member portal) + pub team_profile_id: Option, +} + +/// Paginated project results +#[derive(SimpleObject)] +pub struct ProjectConnection { + /// List of projects + pub items: Vec, + /// Total count of projects matching the filter + pub total_count: i32, + /// Whether there are more items after this page + pub has_next_page: bool, +} diff --git a/src/graphql/types/project_scope_template.rs b/src/graphql/types/project_scope_template.rs new file mode 100644 index 0000000..28d546c --- /dev/null +++ b/src/graphql/types/project_scope_template.rs @@ -0,0 +1,258 @@ +use async_graphql::{ComplexObject, Context, InputObject, Result, SimpleObject}; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{ProjectScopeTemplate, ProjectScopeTemplateCategory, ProjectScopeTemplateTask}; + +/// Project scope template GraphQL type - reusable template for creating project scopes +/// Unlike service templates, project templates don't have frequency on tasks +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectScopeTemplateType { + pub id: Uuid, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +#[ComplexObject] +impl ProjectScopeTemplateType { + /// Get all categories in this template + async fn categories(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let categories: Vec = sqlx::query_as::<_, ProjectScopeTemplateCategory>( + r#" + SELECT id, created_at, updated_at, template_id, name, "order" + FROM project_scope_template_categories + WHERE template_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(categories.into_iter().map(ProjectScopeTemplateCategoryType::from).collect()) + } + + /// Count of categories in this template + async fn category_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + "SELECT COUNT(*) FROM project_scope_template_categories WHERE template_id = $1", + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } + + /// Total count of tasks across all categories in this template + async fn task_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM project_scope_template_tasks t + JOIN project_scope_template_categories c ON t.category_id = c.id + WHERE c.template_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } +} + +impl From for ProjectScopeTemplateType { + fn from(template: ProjectScopeTemplate) -> Self { + Self { + id: template.base.id, + name: template.name, + description: template.description, + is_active: template.is_active, + } + } +} + +/// Category within a project scope template (equivalent to Area) +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectScopeTemplateCategoryType { + pub id: Uuid, + pub template_id: Uuid, + pub name: String, + pub order: i32, +} + +#[ComplexObject] +impl ProjectScopeTemplateCategoryType { + /// Get all tasks in this category + async fn tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let tasks: Vec = sqlx::query_as::<_, ProjectScopeTemplateTask>( + r#" + SELECT id, created_at, updated_at, category_id, scope_description, + checklist_description, session_description, "order", estimated_minutes + FROM project_scope_template_tasks + WHERE category_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(tasks.into_iter().map(ProjectScopeTemplateTaskType::from).collect()) + } + + /// Count of tasks in this category + async fn task_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + "SELECT COUNT(*) FROM project_scope_template_tasks WHERE category_id = $1", + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } +} + +impl From for ProjectScopeTemplateCategoryType { + fn from(category: ProjectScopeTemplateCategory) -> Self { + Self { + id: category.base.id, + template_id: category.template_id, + name: category.name, + order: category.order, + } + } +} + +/// Task within a project scope template category +/// Note: No frequency field - project tasks are one-time +#[derive(SimpleObject)] +pub struct ProjectScopeTemplateTaskType { + pub id: Uuid, + pub category_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions + pub session_description: String, + pub order: i32, + pub estimated_minutes: Option, +} + +impl From for ProjectScopeTemplateTaskType { + fn from(task: ProjectScopeTemplateTask) -> Self { + Self { + id: task.base.id, + category_id: task.category_id, + scope_description: task.scope_description, + checklist_description: task.checklist_description, + session_description: task.session_description, + order: task.order, + estimated_minutes: task.estimated_minutes, + } + } +} + +// ==================== INPUT TYPES ==================== + +/// Input for creating a project scope template +#[derive(InputObject)] +pub struct CreateProjectScopeTemplateInput { + pub name: String, + pub description: Option, + /// Optional: create categories with the template + pub categories: Option>, +} + +/// Input for updating a project scope template +#[derive(InputObject)] +pub struct UpdateProjectScopeTemplateInput { + pub name: Option, + pub description: Option, + pub is_active: Option, +} + +/// Input for creating a project scope template category +#[derive(InputObject)] +pub struct CreateProjectScopeTemplateCategoryInput { + pub name: String, + pub order: Option, + /// Optional: create tasks with the category + pub tasks: Option>, +} + +/// Input for updating a project scope template category +#[derive(InputObject)] +pub struct UpdateProjectScopeTemplateCategoryInput { + pub name: Option, + pub order: Option, +} + +/// Input for creating a project scope template task +#[derive(InputObject)] +pub struct CreateProjectScopeTemplateTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for updating a project scope template task +#[derive(InputObject)] +pub struct UpdateProjectScopeTemplateTaskInput { + pub scope_description: Option, + pub checklist_description: Option, + pub session_description: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for importing a project scope template from JSON +#[derive(InputObject)] +pub struct ImportProjectScopeTemplateInput { + pub name: String, + pub description: Option, + pub categories: Vec, + /// If true, replace existing template with same name + pub replace: Option, +} + +/// Category structure for import +#[derive(InputObject)] +pub struct ImportProjectScopeTemplateCategoryInput { + pub name: String, + pub order: i32, + pub tasks: Vec, +} + +/// Task structure for import +#[derive(InputObject)] +pub struct ImportProjectScopeTemplateTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub order: i32, + pub estimated_minutes: Option, +} diff --git a/src/graphql/types/report.rs b/src/graphql/types/report.rs new file mode 100644 index 0000000..07fbec8 --- /dev/null +++ b/src/graphql/types/report.rs @@ -0,0 +1,389 @@ +use async_graphql::{ComplexObject, Context, Enum, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{Project, Report, ReportProject, ReportService, ReportStatus, Service, TeamProfile}; + +use super::{ProjectType, ServiceType, TeamProfileType}; + +/// GraphQL enum for report status +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum ReportStatusType { + Draft, + Finalized, + Paid, +} + +impl From for ReportStatusType { + fn from(status: ReportStatus) -> Self { + match status { + ReportStatus::Draft => ReportStatusType::Draft, + ReportStatus::Finalized => ReportStatusType::Finalized, + ReportStatus::Paid => ReportStatusType::Paid, + } + } +} + +impl From for ReportStatus { + fn from(status: ReportStatusType) -> Self { + match status { + ReportStatusType::Draft => ReportStatus::Draft, + ReportStatusType::Finalized => ReportStatus::Finalized, + ReportStatusType::Paid => ReportStatus::Paid, + } + } +} + +/// Report GraphQL type - team member pay period report +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ReportType { + pub id: Uuid, + pub team_profile_id: Uuid, + pub start_date: NaiveDate, + pub end_date: NaiveDate, + pub status: ReportStatusType, +} + +#[ComplexObject] +impl ReportType { + /// Get the team profile this report belongs to + async fn team_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.team_profile_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } + + /// Get all services in this report with their labor shares + async fn services(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let services: Vec = sqlx::query_as::<_, ReportService>( + r#" + SELECT id, report_id, service_id, labor_share, created_at + FROM report_services + WHERE report_id = $1 + ORDER BY created_at + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(services.into_iter().map(ReportServiceType::from).collect()) + } + + /// Get all projects in this report with their labor shares + async fn projects(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let projects: Vec = sqlx::query_as::<_, ReportProject>( + r#" + SELECT id, report_id, project_id, labor_share, created_at + FROM report_projects + WHERE report_id = $1 + ORDER BY created_at + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(projects.into_iter().map(ReportProjectType::from).collect()) + } + + /// Total labor from all services in this report + async fn services_total(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let total: Option = sqlx::query_scalar( + r#" + SELECT COALESCE(SUM(labor_share), 0) + FROM report_services + WHERE report_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(total.unwrap_or_default()) + } + + /// Total labor from all projects in this report + async fn projects_total(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let total: Option = sqlx::query_scalar( + r#" + SELECT COALESCE(SUM(labor_share), 0) + FROM report_projects + WHERE report_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(total.unwrap_or_default()) + } + + /// Grand total labor (services + projects) + async fn total_labor(&self, ctx: &Context<'_>) -> Result { + let services = self.services_total(ctx).await?; + let projects = self.projects_total(ctx).await?; + Ok(services + projects) + } + + /// Count of services in this report + async fn service_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#"SELECT COUNT(*) FROM report_services WHERE report_id = $1"#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count as i32) + } + + /// Count of projects in this report + async fn project_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#"SELECT COUNT(*) FROM report_projects WHERE report_id = $1"#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count as i32) + } +} + +impl From for ReportType { + fn from(report: Report) -> Self { + Self { + id: report.base.id, + team_profile_id: report.team_profile_id, + start_date: report.start_date, + end_date: report.end_date, + status: report.status.into(), + } + } +} + +/// Report-Service entry with snapshot labor share +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ReportServiceType { + pub id: Uuid, + pub report_id: Uuid, + pub service_id: Uuid, + /// Snapshot labor share - calculated and stored when added + pub labor_share: Decimal, +} + +#[ComplexObject] +impl ReportServiceType { + /// Get the full service details + async fn service(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let service = sqlx::query_as::<_, Service>( + r#" + SELECT id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + FROM services + WHERE id = $1 + "#, + ) + .bind(self.service_id) + .fetch_optional(&*pool) + .await?; + + Ok(service.map(ServiceType::from)) + } +} + +impl From for ReportServiceType { + fn from(rs: ReportService) -> Self { + Self { + id: rs.id, + report_id: rs.report_id, + service_id: rs.service_id, + labor_share: rs.labor_share, + } + } +} + +/// Report-Project entry with snapshot labor share +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ReportProjectType { + pub id: Uuid, + pub report_id: Uuid, + pub project_id: Uuid, + /// Snapshot labor share - calculated and stored when added + pub labor_share: Decimal, +} + +#[ComplexObject] +impl ReportProjectType { + /// Get the full project details + async fn project(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(self.project_id) + .fetch_optional(&*pool) + .await?; + + Ok(project.map(ProjectType::from)) + } +} + +impl From for ReportProjectType { + fn from(rp: ReportProject) -> Self { + Self { + id: rp.id, + report_id: rp.report_id, + project_id: rp.project_id, + labor_share: rp.labor_share, + } + } +} + +/// Filter input for report queries +#[derive(InputObject, Default)] +pub struct ReportFilterInput { + /// Filter by team profile ID + pub team_profile_id: Option, + /// Filter by status + pub status: Option, + /// Filter by reports that overlap with this date + pub date: Option, + /// Filter by reports starting on or after this date + pub start_date_from: Option, + /// Filter by reports ending on or before this date + pub end_date_to: Option, +} + +/// Paginated report results +#[derive(SimpleObject)] +pub struct ReportConnection { + /// List of reports + pub items: Vec, + /// Total count matching the filter + pub total_count: i32, + /// Whether there are more items after this page + pub has_next_page: bool, +} + +/// Eligible service for adding to a report +/// Includes calculated labor share preview +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct EligibleServiceType { + pub service_id: Uuid, + pub date: NaiveDate, + /// Calculated labor share for the team member + pub labor_share: Decimal, + /// Total labor rate for the location + pub labor_total: Decimal, + /// Number of eligible team members (excludes dispatch) + pub team_member_count: i32, +} + +#[ComplexObject] +impl EligibleServiceType { + /// Get the full service details + async fn service(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let service = sqlx::query_as::<_, Service>( + r#" + SELECT id, created_at, updated_at, account_id, account_address_id, date, status, notes, calendar_event_id + FROM services + WHERE id = $1 + "#, + ) + .bind(self.service_id) + .fetch_optional(&*pool) + .await?; + + Ok(service.map(ServiceType::from)) + } +} + +/// Eligible project for adding to a report +/// Includes calculated labor share preview +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct EligibleProjectType { + pub project_id: Uuid, + pub date: NaiveDate, + /// Calculated labor share for the team member + pub labor_share: Decimal, + /// Total labor from project + pub labor_total: Decimal, + /// Number of eligible team members (excludes dispatch) + pub team_member_count: i32, +} + +#[ComplexObject] +impl EligibleProjectType { + /// Get the full project details + async fn project(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let project = sqlx::query_as::<_, Project>( + r#" + SELECT id, created_at, updated_at, customer_id, name, date, status, labor, amount, + notes, calendar_event_id, wave_service_id, account_address_id, + street_address, city, state, zip_code + FROM projects + WHERE id = $1 + "#, + ) + .bind(self.project_id) + .fetch_optional(&*pool) + .await?; + + Ok(project.map(ProjectType::from)) + } +} diff --git a/src/graphql/types/service.rs b/src/graphql/types/service.rs new file mode 100644 index 0000000..d847902 --- /dev/null +++ b/src/graphql/types/service.rs @@ -0,0 +1,230 @@ +use async_graphql::{ComplexObject, Context, Enum, InputObject, Result, SimpleObject}; +use chrono::NaiveDate; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{Account, AccountAddress, Service, ServiceTeamMember, TeamProfile, WorkStatus}; + +use super::{AccountAddressType, AccountType, TeamProfileType}; + +/// GraphQL enum for work status (services and projects) +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum WorkStatusType { + Scheduled, + InProgress, + Completed, + Cancelled, +} + +impl From for WorkStatusType { + fn from(status: WorkStatus) -> Self { + match status { + WorkStatus::Scheduled => WorkStatusType::Scheduled, + WorkStatus::InProgress => WorkStatusType::InProgress, + WorkStatus::Completed => WorkStatusType::Completed, + WorkStatus::Cancelled => WorkStatusType::Cancelled, + } + } +} + +impl From for WorkStatus { + fn from(status: WorkStatusType) -> Self { + match status { + WorkStatusType::Scheduled => WorkStatus::Scheduled, + WorkStatusType::InProgress => WorkStatus::InProgress, + WorkStatusType::Completed => WorkStatus::Completed, + WorkStatusType::Cancelled => WorkStatus::Cancelled, + } + } +} + +/// Reusable pagination input +#[derive(InputObject, Default)] +pub struct PaginationInput { + /// Number of items to skip (default: 0) + pub offset: Option, + /// Number of items to return (default: 50, max: 100) + pub limit: Option, +} + +impl PaginationInput { + pub fn offset(&self) -> i64 { + self.offset.unwrap_or(0).max(0) as i64 + } + + pub fn limit(&self) -> i64 { + self.limit.unwrap_or(50).clamp(1, 100) as i64 + } +} + +/// Service GraphQL type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceType { + pub id: Uuid, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub date: NaiveDate, + pub status: WorkStatusType, + pub notes: Option, + pub calendar_event_id: Option, +} + +#[ComplexObject] +impl ServiceType { + /// Get the account for this service + async fn account(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let account = sqlx::query_as::<_, Account>( + r#" + SELECT id, created_at, updated_at, customer_id, name, status, start_date, end_date + FROM accounts + WHERE id = $1 + "#, + ) + .bind(self.account_id) + .fetch_optional(&*pool) + .await?; + + Ok(account.map(AccountType::from)) + } + + /// Get the address/location for this service + async fn account_address(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let address = sqlx::query_as::<_, AccountAddress>( + r#" + SELECT id, created_at, updated_at, account_id, street_address, city, state, zip_code, + is_active, is_primary, name, notes + FROM account_addresses + WHERE id = $1 + "#, + ) + .bind(self.account_address_id) + .fetch_optional(&*pool) + .await?; + + Ok(address.map(AccountAddressType::from)) + } + + /// Get team members assigned to this service + async fn team_members(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let members: Vec = sqlx::query_as::<_, ServiceTeamMember>( + r#" + SELECT id, service_id, team_profile_id, created_at + FROM service_team_members + WHERE service_id = $1 + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(members.into_iter().map(ServiceTeamMemberType::from).collect()) + } +} + +impl From for ServiceType { + fn from(service: Service) -> Self { + Self { + id: service.base.id, + account_id: service.account_id, + account_address_id: service.account_address_id, + date: service.date, + status: service.status.into(), + notes: service.notes, + calendar_event_id: service.calendar_event_id, + } + } +} + +/// Service team member assignment type +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceTeamMemberType { + pub id: Uuid, + pub service_id: Uuid, + pub team_profile_id: Uuid, +} + +#[ComplexObject] +impl ServiceTeamMemberType { + /// Get the team profile for this assignment + async fn team_profile(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.team_profile_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ServiceTeamMemberType { + fn from(member: ServiceTeamMember) -> Self { + Self { + id: member.id, + service_id: member.service_id, + team_profile_id: member.team_profile_id, + } + } +} + +/// Filter input for service queries +#[derive(InputObject, Default)] +pub struct ServiceFilterInput { + /// Filter by date range start (inclusive) + pub date_from: Option, + /// Filter by date range end (inclusive) + pub date_to: Option, + /// Filter by status + pub status: Option, + /// Filter by account ID + pub account_id: Option, + /// Filter by account address ID + pub account_address_id: Option, + /// Filter by assigned team profile ID (for team member portal) + pub team_profile_id: Option, + /// Filter by customer ID (for customer portal, via account) - DEPRECATED: use customer_ids + pub customer_id: Option, + /// Filter by multiple customer IDs (for customer portal with multiple assigned customers) + pub customer_ids: Option>, +} + +/// Paginated service results +#[derive(SimpleObject)] +pub struct ServiceConnection { + /// List of services + pub items: Vec, + /// Total count of services matching the filter + pub total_count: i32, + /// Whether there are more items after this page + pub has_next_page: bool, +} + +/// Services grouped by assignment status for bulk assignment page +#[derive(SimpleObject)] +pub struct ServicesForAssignmentResult { + /// Services with no team members assigned + pub unassigned: Vec, + /// Services with dispatch (admin) assigned but no other team members + pub ready_to_assign: Vec, + /// Services with dispatch and other team members assigned + pub assigned: Vec, +} diff --git a/src/graphql/types/service_scope_template.rs b/src/graphql/types/service_scope_template.rs new file mode 100644 index 0000000..eb90579 --- /dev/null +++ b/src/graphql/types/service_scope_template.rs @@ -0,0 +1,263 @@ +use async_graphql::{ComplexObject, Context, InputObject, Result, SimpleObject}; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{ServiceScopeTemplate, ServiceScopeTemplateArea, ServiceScopeTemplateTask}; + +use super::TaskFrequencyType; + +/// Service scope template GraphQL type - reusable template for creating service scopes +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceScopeTemplateType { + pub id: Uuid, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +#[ComplexObject] +impl ServiceScopeTemplateType { + /// Get all areas in this template + async fn areas(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let areas: Vec = sqlx::query_as::<_, ServiceScopeTemplateArea>( + r#" + SELECT id, created_at, updated_at, template_id, name, "order" + FROM service_scope_template_areas + WHERE template_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(areas.into_iter().map(ServiceScopeTemplateAreaType::from).collect()) + } + + /// Count of areas in this template + async fn area_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + "SELECT COUNT(*) FROM service_scope_template_areas WHERE template_id = $1", + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } + + /// Total count of tasks across all areas in this template + async fn task_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + r#" + SELECT COUNT(*) + FROM service_scope_template_tasks t + JOIN service_scope_template_areas a ON t.area_id = a.id + WHERE a.template_id = $1 + "#, + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } +} + +impl From for ServiceScopeTemplateType { + fn from(template: ServiceScopeTemplate) -> Self { + Self { + id: template.base.id, + name: template.name, + description: template.description, + is_active: template.is_active, + } + } +} + +/// Area within a service scope template +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceScopeTemplateAreaType { + pub id: Uuid, + pub template_id: Uuid, + pub name: String, + pub order: i32, +} + +#[ComplexObject] +impl ServiceScopeTemplateAreaType { + /// Get all tasks in this area + async fn tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let tasks: Vec = sqlx::query_as::<_, ServiceScopeTemplateTask>( + r#" + SELECT id, created_at, updated_at, area_id, scope_description, + checklist_description, session_description, frequency, "order", estimated_minutes + FROM service_scope_template_tasks + WHERE area_id = $1 + ORDER BY "order" ASC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(tasks.into_iter().map(ServiceScopeTemplateTaskType::from).collect()) + } + + /// Count of tasks in this area + async fn task_count(&self, ctx: &Context<'_>) -> Result { + let db = ctx.data::()?; + let pool = db.pool().await; + + let count: i64 = sqlx::query_scalar( + "SELECT COUNT(*) FROM service_scope_template_tasks WHERE area_id = $1", + ) + .bind(self.id) + .fetch_one(&*pool) + .await?; + + Ok(count) + } +} + +impl From for ServiceScopeTemplateAreaType { + fn from(area: ServiceScopeTemplateArea) -> Self { + Self { + id: area.base.id, + template_id: area.template_id, + name: area.name, + order: area.order, + } + } +} + +/// Task within a service scope template area +#[derive(SimpleObject)] +pub struct ServiceScopeTemplateTaskType { + pub id: Uuid, + pub area_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions + pub session_description: String, + pub frequency: TaskFrequencyType, + pub order: i32, + pub estimated_minutes: Option, +} + +impl From for ServiceScopeTemplateTaskType { + fn from(task: ServiceScopeTemplateTask) -> Self { + Self { + id: task.base.id, + area_id: task.area_id, + scope_description: task.scope_description, + checklist_description: task.checklist_description, + session_description: task.session_description, + frequency: task.frequency.into(), + order: task.order, + estimated_minutes: task.estimated_minutes, + } + } +} + +// ==================== INPUT TYPES ==================== + +/// Input for creating a service scope template +#[derive(InputObject)] +pub struct CreateServiceScopeTemplateInput { + pub name: String, + pub description: Option, + /// Optional: create areas with the template + pub areas: Option>, +} + +/// Input for updating a service scope template +#[derive(InputObject)] +pub struct UpdateServiceScopeTemplateInput { + pub name: Option, + pub description: Option, + pub is_active: Option, +} + +/// Input for creating a service scope template area +#[derive(InputObject)] +pub struct CreateServiceScopeTemplateAreaInput { + pub name: String, + pub order: Option, + /// Optional: create tasks with the area + pub tasks: Option>, +} + +/// Input for updating a service scope template area +#[derive(InputObject)] +pub struct UpdateServiceScopeTemplateAreaInput { + pub name: Option, + pub order: Option, +} + +/// Input for creating a service scope template task +#[derive(InputObject)] +pub struct CreateServiceScopeTemplateTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub frequency: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for updating a service scope template task +#[derive(InputObject)] +pub struct UpdateServiceScopeTemplateTaskInput { + pub scope_description: Option, + pub checklist_description: Option, + pub session_description: Option, + pub frequency: Option, + pub order: Option, + pub estimated_minutes: Option, +} + +/// Input for importing a service scope template from JSON +#[derive(InputObject)] +pub struct ImportServiceScopeTemplateInput { + pub name: String, + pub description: Option, + pub areas: Vec, + /// If true, replace existing template with same name + pub replace: Option, +} + +/// Area structure for import +#[derive(InputObject)] +pub struct ImportServiceScopeTemplateAreaInput { + pub name: String, + pub order: i32, + pub tasks: Vec, +} + +/// Task structure for import +#[derive(InputObject)] +pub struct ImportServiceScopeTemplateTaskInput { + pub scope_description: String, + pub checklist_description: Option, + pub session_description: Option, + pub frequency: Option, + pub order: i32, + pub estimated_minutes: Option, +} diff --git a/src/graphql/types/session.rs b/src/graphql/types/session.rs new file mode 100644 index 0000000..4719ac0 --- /dev/null +++ b/src/graphql/types/session.rs @@ -0,0 +1,873 @@ +//! Session GraphQL types +//! +//! Types for service and project sessions, including notes, images, videos, +//! and task completions. + +use async_graphql::{ComplexObject, Context, Result, SimpleObject}; +use chrono::{DateTime, NaiveDate, Utc}; +use uuid::Uuid; + +use crate::db::Database; +use crate::graphql::mutations::project::ProjectScopeTaskType; +use crate::models::{ + ProjectSession, ProjectSessionImage, ProjectSessionNote, ProjectSessionVideo, + ProjectScopeTask, ServiceSession, ServiceSessionImage, + ServiceSessionNote, ServiceSessionVideo, ServiceScopeTask, + TeamProfile, +}; + +use super::{ServiceScopeTaskType, TeamProfileType}; + +// ==================== SERVICE SESSION TYPES ==================== + +/// Service session - work session for a scheduled service +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceSessionType { + pub id: Uuid, + pub service_id: Uuid, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub customer_id: Uuid, + pub scope_id: Option, + pub start: DateTime, + pub end: Option>, + pub date: NaiveDate, + pub created_by_id: Uuid, + pub closed_by_id: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ServiceSessionType { + /// Whether the session is still active (not closed) + async fn is_active(&self) -> bool { + self.end.is_none() + } + + /// Duration in seconds (if session is closed) + async fn duration_seconds(&self) -> Option { + self.end.map(|end| (end - self.start).num_seconds()) + } + + /// Team profile who created/started the session + async fn created_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.created_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } + + /// Team profile who closed the session (if closed) + async fn closed_by(&self, ctx: &Context<'_>) -> Result> { + let Some(closed_by_id) = self.closed_by_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(closed_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } + + /// Notes attached to this session + async fn notes(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let notes: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, content, author_id, internal + FROM service_session_notes + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(notes.into_iter().map(ServiceSessionNoteType::from).collect()) + } + + /// Images attached to this session + async fn images(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let images: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, + uploaded_by_team_profile_id AS uploaded_by_id, + notes, internal + FROM service_session_images + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(images.into_iter().map(ServiceSessionImageType::from).collect()) + } + + /// Videos attached to this session + async fn videos(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let videos: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, + uploaded_by_team_profile_id AS uploaded_by_id, + notes, internal, duration_seconds, file_size_bytes + FROM service_session_videos + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(videos.into_iter().map(ServiceSessionVideoType::from).collect()) + } + + /// Task completions recorded in this session + async fn completed_tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Task completions are linked via service_session_completed_tasks join table + // Query directly into a tuple since we need session_id from the join + let rows: Vec<(Uuid, Uuid, Uuid, DateTime, Option)> = sqlx::query_as( + r#" + SELECT tc.id, tc.task_id, tc.completed_by_id, tc.completed_at, tc.notes + FROM service_task_completions tc + JOIN service_session_completed_tasks sct ON sct.task_completion_id = tc.id + WHERE sct.session_id = $1 + ORDER BY tc.completed_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(rows + .into_iter() + .map(|(id, task_id, completed_by_id, completed_at, notes)| { + ServiceTaskCompletionType { + id, + session_id: self.id, + task_id, + completed_by_id, + completed_at, + notes, + } + }) + .collect()) + } +} + +impl From for ServiceSessionType { + fn from(session: ServiceSession) -> Self { + Self { + id: session.base.id, + service_id: session.service_id, + account_id: session.account_id, + account_address_id: session.account_address_id, + customer_id: session.customer_id, + scope_id: session.scope_id, + start: session.start, + end: session.end, + date: session.date, + created_by_id: session.created_by_id, + closed_by_id: session.closed_by_id, + created_at: session.base.created_at, + updated_at: session.base.updated_at, + } + } +} + +/// Note attached to a service session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceSessionNoteType { + pub id: Uuid, + pub session_id: Uuid, + pub content: String, + pub author_id: Uuid, + pub internal: bool, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ServiceSessionNoteType { + /// Author of the note + async fn author(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.author_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ServiceSessionNoteType { + fn from(note: ServiceSessionNote) -> Self { + Self { + id: note.base.id, + session_id: note.session_id, + content: note.note.content, + author_id: note.note.author_id, + internal: note.note.internal, + created_at: note.base.created_at, + updated_at: note.base.updated_at, + } + } +} + +/// Image attached to a service session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceSessionImageType { + pub id: Uuid, + pub session_id: Uuid, + pub title: Option, + /// Path to original image (use with /api/media/ prefix) + pub image: String, + /// Path to thumbnail (use with /api/media/ prefix) + pub thumbnail: Option, + pub content_type: String, + pub width: Option, + pub height: Option, + pub uploaded_by_id: Uuid, + pub notes: Option, + pub internal: bool, + pub created_at: DateTime, +} + +#[ComplexObject] +impl ServiceSessionImageType { + /// User who uploaded the image + async fn uploaded_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.uploaded_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ServiceSessionImageType { + fn from(image: ServiceSessionImage) -> Self { + Self { + id: image.base.id, + session_id: image.session_id, + title: image.title, + image: image.image, + thumbnail: image.thumbnail, + content_type: image.media.content_type, + width: image.media.width, + height: image.media.height, + uploaded_by_id: image.media.uploaded_by_id, + notes: image.media.notes, + internal: image.media.internal, + created_at: image.base.created_at, + } + } +} + +/// Video attached to a service session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceSessionVideoType { + pub id: Uuid, + pub session_id: Uuid, + pub title: Option, + /// Path to video file (use with /api/media/ prefix) + pub video: String, + /// Path to thumbnail (use with /api/media/ prefix) + pub thumbnail: Option, + pub content_type: String, + pub width: Option, + pub height: Option, + pub duration_seconds: Option, + pub file_size_bytes: Option, + pub uploaded_by_id: Uuid, + pub notes: Option, + pub internal: bool, + pub created_at: DateTime, +} + +#[ComplexObject] +impl ServiceSessionVideoType { + /// User who uploaded the video + async fn uploaded_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.uploaded_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ServiceSessionVideoType { + fn from(video: ServiceSessionVideo) -> Self { + Self { + id: video.base.id, + session_id: video.session_id, + title: video.title, + video: video.video, + thumbnail: video.thumbnail, + content_type: video.media.content_type, + width: video.media.width, + height: video.media.height, + duration_seconds: video.duration_seconds, + file_size_bytes: video.file_size_bytes, + uploaded_by_id: video.media.uploaded_by_id, + notes: video.media.notes, + internal: video.media.internal, + created_at: video.base.created_at, + } + } +} + +/// Task completion record for a service session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ServiceTaskCompletionType { + pub id: Uuid, + pub session_id: Uuid, + pub task_id: Uuid, + pub completed_by_id: Uuid, + pub completed_at: DateTime, + pub notes: Option, +} + +#[ComplexObject] +impl ServiceTaskCompletionType { + /// The task that was completed + async fn task(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task = sqlx::query_as::<_, ServiceScopeTask>( + r#" + SELECT id, created_at, updated_at, area_id, scope_description, checklist_description, + session_description, frequency, "order", estimated_minutes + FROM service_scope_tasks + WHERE id = $1 + "#, + ) + .bind(self.task_id) + .fetch_optional(&*pool) + .await?; + + Ok(task.map(ServiceScopeTaskType::from)) + } + + /// Team member who completed the task + async fn completed_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.completed_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + + +// ==================== PROJECT SESSION TYPES ==================== + +/// Project session - work session for a scheduled project +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectSessionType { + pub id: Uuid, + pub project_id: Uuid, + pub account_id: Option, + pub account_address_id: Option, + pub customer_id: Uuid, + pub scope_id: Option, + pub start: DateTime, + pub end: Option>, + pub date: NaiveDate, + pub created_by_id: Uuid, + pub closed_by_id: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ProjectSessionType { + /// Whether the session is still active (not closed) + async fn is_active(&self) -> bool { + self.end.is_none() + } + + /// Duration in seconds (if session is closed) + async fn duration_seconds(&self) -> Option { + self.end.map(|end| (end - self.start).num_seconds()) + } + + /// Team profile who created/started the session + async fn created_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.created_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } + + /// Team profile who closed the session (if closed) + async fn closed_by(&self, ctx: &Context<'_>) -> Result> { + let Some(closed_by_id) = self.closed_by_id else { + return Ok(None); + }; + + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(closed_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } + + /// Notes attached to this session + async fn notes(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let notes: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, content, author_id, internal + FROM project_session_notes + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(notes.into_iter().map(ProjectSessionNoteType::from).collect()) + } + + /// Images attached to this session + async fn images(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let images: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, title, image, thumbnail, + content_type, width, height, + uploaded_by_team_profile_id AS uploaded_by_id, + notes, internal + FROM project_session_images + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(images.into_iter().map(ProjectSessionImageType::from).collect()) + } + + /// Videos attached to this session + async fn videos(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let videos: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, session_id, title, video, thumbnail, + content_type, width, height, + uploaded_by_team_profile_id AS uploaded_by_id, + notes, internal, duration_seconds, file_size_bytes + FROM project_session_videos + WHERE session_id = $1 + ORDER BY created_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(videos.into_iter().map(ProjectSessionVideoType::from).collect()) + } + + /// Task completions recorded in this session + async fn completed_tasks(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + // Task completions are linked via project_session_completed_tasks join table + // Query directly into a tuple since we need session_id from the join + let rows: Vec<(Uuid, Uuid, Uuid, DateTime, Option)> = sqlx::query_as( + r#" + SELECT tc.id, tc.task_id, tc.completed_by_id, tc.completed_at, tc.notes + FROM project_task_completions tc + JOIN project_session_completed_tasks pct ON pct.task_completion_id = tc.id + WHERE pct.session_id = $1 + ORDER BY tc.completed_at DESC + "#, + ) + .bind(self.id) + .fetch_all(&*pool) + .await?; + + Ok(rows + .into_iter() + .map(|(id, task_id, completed_by_id, completed_at, notes)| { + ProjectTaskCompletionType { + id, + session_id: self.id, + task_id, + completed_by_id, + completed_at, + notes, + } + }) + .collect()) + } +} + +impl From for ProjectSessionType { + fn from(session: ProjectSession) -> Self { + Self { + id: session.base.id, + project_id: session.project_id, + account_id: session.account_id, + account_address_id: session.account_address_id, + customer_id: session.customer_id, + scope_id: session.scope_id, + start: session.start, + end: session.end, + date: session.date, + created_by_id: session.created_by_id, + closed_by_id: session.closed_by_id, + created_at: session.base.created_at, + updated_at: session.base.updated_at, + } + } +} + +/// Note attached to a project session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectSessionNoteType { + pub id: Uuid, + pub session_id: Uuid, + pub content: String, + pub author_id: Uuid, + pub internal: bool, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[ComplexObject] +impl ProjectSessionNoteType { + /// Author of the note + async fn author(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.author_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ProjectSessionNoteType { + fn from(note: ProjectSessionNote) -> Self { + Self { + id: note.base.id, + session_id: note.session_id, + content: note.note.content, + author_id: note.note.author_id, + internal: note.note.internal, + created_at: note.base.created_at, + updated_at: note.base.updated_at, + } + } +} + +/// Image attached to a project session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectSessionImageType { + pub id: Uuid, + pub session_id: Uuid, + pub title: Option, + /// Path to original image (use with /api/media/ prefix) + pub image: String, + /// Path to thumbnail (use with /api/media/ prefix) + pub thumbnail: Option, + pub content_type: String, + pub width: Option, + pub height: Option, + pub uploaded_by_id: Uuid, + pub notes: Option, + pub internal: bool, + pub created_at: DateTime, +} + +#[ComplexObject] +impl ProjectSessionImageType { + /// User who uploaded the image + async fn uploaded_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.uploaded_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ProjectSessionImageType { + fn from(image: ProjectSessionImage) -> Self { + Self { + id: image.base.id, + session_id: image.session_id, + title: image.title, + image: image.image, + thumbnail: image.thumbnail, + content_type: image.media.content_type, + width: image.media.width, + height: image.media.height, + uploaded_by_id: image.media.uploaded_by_id, + notes: image.media.notes, + internal: image.media.internal, + created_at: image.base.created_at, + } + } +} + +/// Video attached to a project session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectSessionVideoType { + pub id: Uuid, + pub session_id: Uuid, + pub title: Option, + /// Path to video file (use with /api/media/ prefix) + pub video: String, + /// Path to thumbnail (use with /api/media/ prefix) + pub thumbnail: Option, + pub content_type: String, + pub width: Option, + pub height: Option, + pub duration_seconds: Option, + pub file_size_bytes: Option, + pub uploaded_by_id: Uuid, + pub notes: Option, + pub internal: bool, + pub created_at: DateTime, +} + +#[ComplexObject] +impl ProjectSessionVideoType { + /// User who uploaded the video + async fn uploaded_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.uploaded_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + +impl From for ProjectSessionVideoType { + fn from(video: ProjectSessionVideo) -> Self { + Self { + id: video.base.id, + session_id: video.session_id, + title: video.title, + video: video.video, + thumbnail: video.thumbnail, + content_type: video.media.content_type, + width: video.media.width, + height: video.media.height, + duration_seconds: video.duration_seconds, + file_size_bytes: video.file_size_bytes, + uploaded_by_id: video.media.uploaded_by_id, + notes: video.media.notes, + internal: video.media.internal, + created_at: video.base.created_at, + } + } +} + +/// Task completion record for a project session +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct ProjectTaskCompletionType { + pub id: Uuid, + pub session_id: Uuid, + pub task_id: Uuid, + pub completed_by_id: Uuid, + pub completed_at: DateTime, + pub notes: Option, +} + +#[ComplexObject] +impl ProjectTaskCompletionType { + /// The task that was completed + async fn task(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let task = sqlx::query_as::<_, ProjectScopeTask>( + r#" + SELECT id, created_at, updated_at, category_id, scope_description, checklist_description, + session_description, "order", estimated_minutes + FROM project_scope_tasks + WHERE id = $1 + "#, + ) + .bind(self.task_id) + .fetch_optional(&*pool) + .await?; + + Ok(task.map(ProjectScopeTaskType::from)) + } + + /// Team member who completed the task + async fn completed_by(&self, ctx: &Context<'_>) -> Result> { + let db = ctx.data::()?; + let pool = db.pool().await; + + let profile = sqlx::query_as::<_, TeamProfile>( + r#" + SELECT id, created_at, updated_at, first_name, last_name, phone, email, role, status, notes + FROM team_profiles + WHERE id = $1 + "#, + ) + .bind(self.completed_by_id) + .fetch_optional(&*pool) + .await?; + + Ok(profile.map(TeamProfileType::from)) + } +} + diff --git a/src/graphql/types/wave.rs b/src/graphql/types/wave.rs new file mode 100644 index 0000000..4c56032 --- /dev/null +++ b/src/graphql/types/wave.rs @@ -0,0 +1,451 @@ +//! Wave Accounting GraphQL types +//! +//! Types for interacting with Wave's products, customers, and invoices. + +use async_graphql::{InputObject, SimpleObject}; +use rust_decimal::Decimal; + +use crate::services::wave; + +// ============================================================================ +// Wave Product Types +// ============================================================================ + +/// Wave product from the accounting system +#[derive(SimpleObject, Clone)] +pub struct WaveProductType { + /// Wave product ID + pub id: String, + /// Product name + pub name: String, + /// Product description + pub description: Option, + /// Default unit price + pub unit_price: f64, + /// Whether this product is sold + pub is_sold: bool, + /// Whether this product is archived + pub is_archived: bool, + /// Default sales taxes applied to this product + pub taxes: Vec, + /// Income account this product is linked to + pub income_account: Option, +} + +impl From for WaveProductType { + fn from(p: wave::WaveProduct) -> Self { + Self { + id: p.id, + name: p.name, + description: p.description, + unit_price: p.unit_price, + is_sold: p.is_sold, + is_archived: p.is_archived, + taxes: p.default_sales_taxes.into_iter().map(WaveTaxType::from).collect(), + income_account: p.income_account.map(WaveAccountType::from), + } + } +} + +/// Wave tax rate +#[derive(SimpleObject, Clone)] +pub struct WaveTaxType { + pub id: String, + pub name: String, + pub rate: f64, +} + +impl From for WaveTaxType { + fn from(t: wave::WaveTax) -> Self { + Self { + id: t.id, + name: t.name, + rate: t.rate, + } + } +} + +/// Wave account (for income account references) +#[derive(SimpleObject, Clone)] +pub struct WaveAccountType { + pub id: String, + pub name: String, +} + +impl From for WaveAccountType { + fn from(a: wave::WaveAccount) -> Self { + Self { + id: a.id, + name: a.name, + } + } +} + +// ============================================================================ +// Wave Customer Types +// ============================================================================ + +/// Wave customer from the accounting system +#[derive(SimpleObject, Clone)] +pub struct WaveCustomerType { + /// Wave customer ID + pub id: String, + /// Customer name + pub name: String, + /// Customer email + pub email: Option, + /// Customer address + pub address: Option, + /// Currency code + pub currency_code: Option, +} + +impl From for WaveCustomerType { + fn from(c: wave::WaveCustomer) -> Self { + Self { + id: c.id, + name: c.name, + email: c.email, + address: c.address.map(WaveAddressType::from), + currency_code: c.currency.map(|cur| cur.code), + } + } +} + +/// Wave customer address +#[derive(SimpleObject, Clone)] +pub struct WaveAddressType { + pub address_line1: Option, + pub address_line2: Option, + pub city: Option, + pub province_code: Option, + pub province_name: Option, + pub postal_code: Option, +} + +impl From for WaveAddressType { + fn from(a: wave::WaveAddress) -> Self { + Self { + address_line1: a.address_line1, + address_line2: a.address_line2, + city: a.city, + province_code: a.province.as_ref().map(|p| p.code.clone()), + province_name: a.province.map(|p| p.name), + postal_code: a.postal_code, + } + } +} + +// ============================================================================ +// Wave Invoice Types +// ============================================================================ + +/// Wave invoice from the accounting system +#[derive(SimpleObject, Clone)] +pub struct WaveInvoiceType { + /// Wave invoice ID + pub id: String, + /// Invoice number + pub invoice_number: String, + /// Invoice date (YYYY-MM-DD) + pub invoice_date: String, + /// Due date (YYYY-MM-DD) + pub due_date: Option, + /// Status (DRAFT, SENT, VIEWED, PAID, OVERDUE) + pub status: String, + /// Customer info + pub customer: WaveInvoiceCustomerType, + /// Invoice line items + pub items: Vec, + /// Subtotal before taxes + pub subtotal: WaveMoneyType, + /// Total including taxes + pub total: WaveMoneyType, + /// Amount still due + pub amount_due: WaveMoneyType, + /// Amount already paid + pub amount_paid: WaveMoneyType, + /// URL to download the PDF + pub pdf_url: Option, +} + +impl From for WaveInvoiceType { + fn from(i: wave::WaveInvoice) -> Self { + Self { + id: i.id, + invoice_number: i.invoice_number, + invoice_date: i.invoice_date, + due_date: i.due_date, + status: i.status, + customer: WaveInvoiceCustomerType::from(i.customer), + items: i.items.into_iter().map(WaveInvoiceItemType::from).collect(), + subtotal: WaveMoneyType::from(i.subtotal), + total: WaveMoneyType::from(i.total), + amount_due: WaveMoneyType::from(i.amount_due), + amount_paid: WaveMoneyType::from(i.amount_paid), + pdf_url: i.pdf_url, + } + } +} + +/// Wave invoice customer reference +#[derive(SimpleObject, Clone)] +pub struct WaveInvoiceCustomerType { + pub id: String, + pub name: String, +} + +impl From for WaveInvoiceCustomerType { + fn from(c: wave::WaveInvoiceCustomer) -> Self { + Self { + id: c.id, + name: c.name, + } + } +} + +/// Wave invoice line item +#[derive(SimpleObject, Clone)] +pub struct WaveInvoiceItemType { + /// Item description + pub description: String, + /// Quantity + pub quantity: f64, + /// Unit price + pub unit_price: f64, + /// Total for this line item + pub total: WaveMoneyType, +} + +impl From for WaveInvoiceItemType { + fn from(item: wave::WaveInvoiceItem) -> Self { + Self { + description: item.description, + quantity: item.quantity, + unit_price: item.unit_price, + total: WaveMoneyType::from(item.total), + } + } +} + +/// Wave money type with value and currency +#[derive(SimpleObject, Clone)] +pub struct WaveMoneyType { + /// Amount as string (for precision) + pub value: String, + /// Currency code (e.g., "USD", "CAD") + pub currency_code: String, + /// Currency symbol (e.g., "$") + pub currency_symbol: String, +} + +impl From for WaveMoneyType { + fn from(m: wave::WaveMoney) -> Self { + Self { + value: m.value, + currency_code: m.currency.code, + currency_symbol: m.currency.symbol, + } + } +} + +// ============================================================================ +// Input Types for Wave Mutations +// ============================================================================ + +/// Input for creating a Wave invoice from a Nexus invoice +#[derive(InputObject)] +pub struct CreateWaveInvoiceInput { + /// Nexus invoice ID to sync to Wave + pub invoice_id: uuid::Uuid, + /// Optional memo/notes for the invoice + pub memo: Option, + /// Invoice date (YYYY-MM-DD). Defaults to Nexus invoice start_date. + pub invoice_date: Option, + /// Due date (YYYY-MM-DD). If not set, Wave uses customer's default terms. + pub due_date: Option, +} + +/// Input for creating a Wave customer from a Nexus customer +#[derive(InputObject)] +pub struct CreateWaveCustomerInput { + /// Nexus customer ID to sync to Wave + pub customer_id: uuid::Uuid, + /// Override email (uses Nexus billing_email if not set) + pub email: Option, + /// Currency code (defaults to "USD") + pub currency: Option, +} + +/// Result of Wave invoice creation +#[derive(SimpleObject)] +pub struct CreateWaveInvoiceResult { + /// Whether the operation succeeded + pub success: bool, + /// Error message if failed + pub error: Option, + /// The created Wave invoice + pub invoice: Option, + /// Updated Nexus invoice with wave_invoice_id + pub nexus_invoice: Option, +} + +/// Result of Wave customer creation +#[derive(SimpleObject)] +pub struct CreateWaveCustomerResult { + /// Whether the operation succeeded + pub success: bool, + /// Error message if failed + pub error: Option, + /// The created Wave customer + pub customer: Option, + /// Updated Nexus customer with wave_customer_id + pub nexus_customer: Option, +} + +/// Readiness check for creating a Wave invoice +#[derive(SimpleObject)] +pub struct WaveInvoiceReadiness { + /// Whether the invoice can be synced to Wave + pub ready: bool, + /// List of issues preventing sync (empty if ready) + pub issues: Vec, + /// The customer's Wave customer ID (if linked) + pub wave_customer_id: Option, + /// Items that can be synced (have wave_service_id) + pub ready_item_count: i32, + /// Items missing wave_service_id + pub missing_wave_link_count: i32, + /// Total expected amount + pub total_amount: Decimal, +} + +// ============================================================================ +// Pagination Types +// ============================================================================ + +/// Pagination info for Wave queries +#[derive(SimpleObject, Clone)] +pub struct WavePageInfoType { + pub current_page: i32, + pub total_pages: i32, + pub total_count: i32, +} + +/// Paginated list of Wave invoices +#[derive(SimpleObject, Clone)] +pub struct WaveInvoiceConnectionType { + pub page_info: WavePageInfoType, + pub invoices: Vec, +} + +/// Lighter Wave invoice type for list views (no items array) +#[derive(SimpleObject, Clone)] +pub struct WaveInvoiceListItemType { + pub id: String, + pub invoice_number: String, + pub invoice_date: String, + pub due_date: Option, + pub status: String, + pub customer: WaveInvoiceCustomerType, + pub subtotal: WaveMoneyType, + pub total: WaveMoneyType, + pub amount_due: WaveMoneyType, + pub amount_paid: WaveMoneyType, +} + +impl From for WaveInvoiceListItemType { + fn from(i: wave::WaveInvoiceListItem) -> Self { + Self { + id: i.id, + invoice_number: i.invoice_number, + invoice_date: i.invoice_date, + due_date: i.due_date, + status: i.status, + customer: WaveInvoiceCustomerType { + id: i.customer.id, + name: i.customer.name, + }, + subtotal: WaveMoneyType::from(i.subtotal), + total: WaveMoneyType::from(i.total), + amount_due: WaveMoneyType::from(i.amount_due), + amount_paid: WaveMoneyType::from(i.amount_paid), + } + } +} + +// ============================================================================ +// Generic Mutation Result +// ============================================================================ + +/// Generic result for Wave mutations +#[derive(SimpleObject)] +#[graphql(concrete(name = "WaveInvoiceMutationResult", params(WaveInvoiceType)))] +#[graphql(concrete(name = "WaveProductMutationResult", params(WaveProductType)))] +#[graphql(concrete(name = "WaveCustomerMutationResult", params(WaveCustomerType)))] +#[graphql(concrete(name = "WaveBoolMutationResult", params(bool)))] +pub struct WaveMutationResult { + pub success: bool, + pub error: Option, + pub data: Option, +} + +// ============================================================================ +// Additional Input Types +// ============================================================================ + +/// Input for creating a Wave product +#[derive(InputObject)] +pub struct CreateWaveProductInput { + /// Product name + pub name: String, + /// Product description + pub description: Option, + /// Default unit price + pub unit_price: f64, + /// Whether this product is sold (default true) + pub is_sold: Option, + /// Income account ID to link this product to + pub income_account_id: Option, +} + +/// Input for updating a Wave product +#[derive(InputObject)] +pub struct UpdateWaveProductInput { + /// Wave product ID to update + pub id: String, + /// Product name + pub name: Option, + /// Product description + pub description: Option, + /// Default unit price + pub unit_price: Option, + /// Income account ID to link this product to + pub income_account_id: Option, +} + +/// Input for updating a Wave customer +#[derive(InputObject)] +pub struct UpdateWaveCustomerInput { + /// Wave customer ID to update + pub id: String, + /// Customer name + pub name: Option, + /// Customer email + pub email: Option, + /// Customer address + pub address: Option, + /// Currency code + pub currency: Option, +} + +/// Input for Wave address +#[derive(InputObject, Clone)] +pub struct WaveAddressInput { + pub address_line1: Option, + pub address_line2: Option, + pub city: Option, + pub province_code: Option, + pub postal_code: Option, + pub country_code: Option, +} diff --git a/src/jobs/mod.rs b/src/jobs/mod.rs new file mode 100644 index 0000000..90b7bd9 --- /dev/null +++ b/src/jobs/mod.rs @@ -0,0 +1,29 @@ +//! Background Jobs (Apalis) +//! +//! This module contains background job definitions and handlers using Apalis. +//! Jobs are stored in Valkey/Redis and processed asynchronously. +//! +//! ## Job Types +//! +//! ### On-Demand Jobs (Redis-backed) +//! - `ProcessEventJob` - Process an event and create notifications +//! - `DeliverNotificationJob` - Deliver a notification via Email/SMS/In-App +//! +//! ### Scheduled Jobs (Cron-based) +//! - `EventCleanupJob` - Daily cleanup of old non-critical events +//! - `IncompleteWorkReminderJob` - Daily reminder about incomplete work +//! - `NightlyAssignmentsJob` - Daily notification of scheduled work + +pub mod notifications; +pub mod scheduled; + +pub use notifications::{ + DeliverNotificationJob, NotificationJobContext, ProcessEventJob, + DELIVER_NOTIFICATION_QUEUE, PROCESS_EVENT_QUEUE, +}; + +pub use scheduled::{ + EventCleanupJob, EventCleanupResult, + IncompleteWorkReminderJob, IncompleteWorkReminderResult, + NightlyAssignmentsJob, NightlyAssignmentsResult, +}; diff --git a/src/jobs/notifications.rs b/src/jobs/notifications.rs new file mode 100644 index 0000000..1ed572b --- /dev/null +++ b/src/jobs/notifications.rs @@ -0,0 +1,215 @@ +//! Notification Background Jobs +//! +//! Apalis jobs for processing events and delivering notifications. +//! These jobs are registered with the Apalis worker and executed asynchronously. + +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::Event; +use crate::services::gmail::GmailService; +use crate::services::job_queue::JobQueue; +use crate::services::{NotificationDeliveryService, NotificationProcessor, MAX_DELIVERY_ATTEMPTS}; + +// ==================== JOB DEFINITIONS ==================== + +/// Job to process an event and create notifications +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProcessEventJob { + pub event_id: Uuid, +} + +impl ProcessEventJob { + pub fn new(event_id: Uuid) -> Self { + Self { event_id } + } +} + +/// Job to deliver a notification through a specific channel +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DeliverNotificationJob { + pub delivery_id: Uuid, + pub attempt: u32, +} + +impl DeliverNotificationJob { + pub fn new(delivery_id: Uuid) -> Self { + Self { + delivery_id, + attempt: 1, + } + } + + pub fn with_attempt(delivery_id: Uuid, attempt: u32) -> Self { + Self { + delivery_id, + attempt, + } + } +} + +// ==================== JOB CONTEXT ==================== + +/// Shared context for notification jobs +pub struct NotificationJobContext { + pub db: Database, + pub gmail_service: Option>, + pub job_queue: JobQueue, +} + +impl NotificationJobContext { + pub fn new(db: Database, gmail_service: Option>, job_queue: JobQueue) -> Self { + Self { + db, + gmail_service, + job_queue, + } + } +} + +// ==================== JOB EXECUTION ==================== + +impl ProcessEventJob { + /// Execute the job - process an event and create notifications + pub async fn execute(&self, ctx: &NotificationJobContext) -> anyhow::Result<()> { + tracing::info!(event_id = %self.event_id, "Processing event for notifications"); + + // Get current pool (supports credential refresh) + let pool = ctx.db.pool().await; + + // Fetch the event + let event: Option = sqlx::query_as( + r#" + SELECT id, event_type, entity_type, entity_id, actor_type, actor_id, + metadata, timestamp, created_at + FROM events + WHERE id = $1 + "#, + ) + .bind(self.event_id) + .fetch_optional(&*pool) + .await?; + + let Some(event) = event else { + tracing::warn!(event_id = %self.event_id, "Event not found, skipping notification processing"); + return Ok(()); + }; + + // Process the event and create notifications + let notifications = NotificationProcessor::process_event(&*pool, &event).await?; + + tracing::info!( + event_id = %self.event_id, + notification_count = notifications.len(), + "Created notifications for event" + ); + + // Queue delivery jobs for each notification's deliveries + for notification in ¬ifications { + let delivery_ids: Vec = sqlx::query_scalar( + "SELECT id FROM notification_deliveries WHERE notification_id = $1", + ) + .bind(notification.base.id) + .fetch_all(&*pool) + .await?; + + for delivery_id in delivery_ids { + if let Err(e) = ctx.job_queue.queue_deliver_notification(delivery_id).await { + tracing::error!( + delivery_id = %delivery_id, + error = %e, + "Failed to queue notification delivery job" + ); + } + } + } + + Ok(()) + } +} + +impl DeliverNotificationJob { + /// Execute the job - deliver a notification + pub async fn execute(&self, ctx: &NotificationJobContext) -> anyhow::Result<()> { + tracing::info!( + delivery_id = %self.delivery_id, + attempt = self.attempt, + "Delivering notification" + ); + + // Check if we've exceeded max attempts + if self.attempt > MAX_DELIVERY_ATTEMPTS as u32 { + tracing::warn!( + delivery_id = %self.delivery_id, + "Max delivery attempts exceeded" + ); + return Ok(()); // Don't retry further + } + + // Get current pool (supports credential refresh) + let pool = ctx.db.pool().await; + + // Attempt delivery + let result = NotificationDeliveryService::deliver( + &*pool, + ctx.gmail_service.as_ref().map(|s| s.as_ref()), + self.delivery_id, + ) + .await; + + match result { + Ok(_) => { + tracing::info!(delivery_id = %self.delivery_id, "Notification delivered successfully"); + Ok(()) + } + Err(e) => { + tracing::error!( + delivery_id = %self.delivery_id, + attempt = self.attempt, + error = %e, + "Notification delivery failed" + ); + + // Queue retry with incremented attempt + if self.attempt < MAX_DELIVERY_ATTEMPTS as u32 { + if let Err(queue_err) = ctx.job_queue.queue_delivery_retry(self.delivery_id, self.attempt + 1).await { + tracing::error!( + delivery_id = %self.delivery_id, + error = %queue_err, + "Failed to queue delivery retry" + ); + } + } + + // Return Ok so Apalis doesn't automatically retry + Ok(()) + } + } + } +} + +// ==================== QUEUE NAMES ==================== + +/// Queue name for event processing jobs +pub const PROCESS_EVENT_QUEUE: &str = "nexus:notifications:process_event"; + +/// Queue name for notification delivery jobs +pub const DELIVER_NOTIFICATION_QUEUE: &str = "nexus:notifications:deliver"; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_job_serialization() { + let event_job = ProcessEventJob::new(Uuid::new_v4()); + let json = serde_json::to_string(&event_job).unwrap(); + let _: ProcessEventJob = serde_json::from_str(&json).unwrap(); + + let delivery_job = DeliverNotificationJob::new(Uuid::new_v4()); + let json = serde_json::to_string(&delivery_job).unwrap(); + let _: DeliverNotificationJob = serde_json::from_str(&json).unwrap(); + } +} diff --git a/src/jobs/scheduled.rs b/src/jobs/scheduled.rs new file mode 100644 index 0000000..9edeb5b --- /dev/null +++ b/src/jobs/scheduled.rs @@ -0,0 +1,1314 @@ +//! Scheduled Background Jobs +//! +//! Cron-based jobs that run on a schedule: +//! - EventCleanupJob: Daily at 2 AM - cleans up old non-critical events +//! - IncompleteWorkReminderJob: Daily at 8 AM - emails about incomplete work +//! - NightlyAssignmentsJob: Daily at 6 PM - emails about tonight's schedule + +use chrono::{DateTime, Datelike, Duration, NaiveDate, Utc, Weekday}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::models::EventType; +use crate::services::gmail::{GmailService, SendEmailRequest}; + +// ==================== RESULT TYPES ==================== + +/// Result of event cleanup job +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventCleanupResult { + pub deleted_count: i64, + pub cutoff_date: DateTime, + pub retention_days: i64, +} + +/// Result of incomplete work reminder job +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct IncompleteWorkReminderResult { + pub team_members_notified: i32, + pub services_count: i32, + pub projects_count: i32, + pub emails_sent: i32, + pub errors: Vec, +} + +/// Result of nightly assignments job +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct NightlyAssignmentsResult { + pub team_members_notified: i32, + pub services_count: i32, + pub projects_count: i32, + pub emails_sent: i32, + pub errors: Vec, +} + +// ==================== MISSION CRITICAL EVENTS ==================== + +/// Event types that should never be deleted (audit trail) +const MISSION_CRITICAL_EVENTS: &[EventType] = &[ + // Session events + EventType::ServiceSessionStarted, + EventType::ServiceSessionEnded, + EventType::ServiceSessionReverted, + EventType::ProjectSessionStarted, + EventType::ProjectSessionEnded, + EventType::ProjectSessionReverted, + // Task completions + EventType::ServiceTaskCompleted, + EventType::ServiceTaskUncompleted, + EventType::ProjectTaskCompleted, + EventType::ProjectTaskUncompleted, + // Invoice events + EventType::InvoiceCreated, + EventType::InvoiceStatusChanged, + EventType::InvoiceSent, + EventType::InvoicePaid, + EventType::InvoiceOverdue, + EventType::InvoiceDeleted, + // Report events + EventType::ReportCreated, + EventType::ReportSubmitted, + EventType::ReportApproved, + // Profile access events + EventType::CustomerProfileAccessGranted, + EventType::CustomerProfileAccessRevoked, + EventType::TeamProfileCreated, + EventType::TeamProfileDeleted, + EventType::TeamProfileRoleChanged, + // Financial events + EventType::LaborCreated, + EventType::LaborDeleted, + EventType::LaborRateChanged, + EventType::RevenueCreated, + EventType::RevenueDeleted, + EventType::RevenueAmountChanged, + // Status changes for key entities + EventType::ServiceStatusChanged, + EventType::ProjectStatusChanged, + EventType::CustomerStatusChanged, + EventType::AccountStatusChanged, +]; + +// ==================== EVENT CLEANUP JOB ==================== + +/// Job to clean up old non-critical events +pub struct EventCleanupJob; + +impl EventCleanupJob { + /// Default retention period in days + const RETENTION_DAYS: i64 = 90; + + /// Execute the cleanup job + pub async fn execute(pool: &PgPool) -> anyhow::Result { + let retention_days = Self::RETENTION_DAYS; + let cutoff_date = Utc::now() - Duration::days(retention_days); + + tracing::info!( + cutoff_date = %cutoff_date, + retention_days = retention_days, + "Starting event cleanup job" + ); + + // Build the list of event types to preserve + let preserved_types: Vec = MISSION_CRITICAL_EVENTS + .iter() + .map(|et| format!("{:?}", et).to_uppercase()) + .collect(); + + // Delete old non-critical events + let result = sqlx::query_scalar::<_, i64>( + r#" + WITH deleted AS ( + DELETE FROM events + WHERE created_at < $1 + AND event_type::text NOT IN (SELECT unnest($2::text[])) + RETURNING id + ) + SELECT COUNT(*) FROM deleted + "#, + ) + .bind(cutoff_date) + .bind(&preserved_types) + .fetch_one(pool) + .await?; + + let deleted_count = result; + + tracing::info!( + deleted_count = deleted_count, + cutoff_date = %cutoff_date, + "Event cleanup completed" + ); + + Ok(EventCleanupResult { + deleted_count, + cutoff_date, + retention_days, + }) + } +} + +// ==================== INCOMPLETE WORK REMINDER JOB ==================== + +/// Team member with incomplete work +#[derive(Debug, Clone, sqlx::FromRow)] +struct TeamMemberWork { + team_profile_id: Uuid, + email: String, + full_name: Option, +} + +/// Incomplete service info +#[derive(Debug, Clone, sqlx::FromRow)] +#[allow(dead_code)] +struct IncompleteService { + id: Uuid, + date: NaiveDate, + status: String, + account_name: Option, + address_line: Option, + notes: Option, + team_profile_id: Uuid, +} + +/// Incomplete project info +#[derive(Debug, Clone, sqlx::FromRow)] +#[allow(dead_code)] +struct IncompleteProject { + id: Uuid, + name: String, + date: Option, + status: String, + customer_name: Option, + address_line: Option, + team_profile_id: Uuid, +} + +/// Job to send reminders about incomplete work +pub struct IncompleteWorkReminderJob; + +impl IncompleteWorkReminderJob { + /// Execute the reminder job + pub async fn execute( + pool: &PgPool, + gmail_service: Option<&GmailService>, + ) -> anyhow::Result { + let today = Utc::now().date_naive(); + let query_date = Self::get_query_date(today); + + tracing::info!( + query_date = %query_date, + today = %today, + "Starting incomplete work reminder job" + ); + + let mut result = IncompleteWorkReminderResult { + team_members_notified: 0, + services_count: 0, + projects_count: 0, + emails_sent: 0, + errors: Vec::new(), + }; + + // Query incomplete services + let services: Vec = sqlx::query_as( + r#" + SELECT + s.id, + s.date, + s.status::text as status, + a.name as account_name, + CONCAT_WS(', ', + COALESCE(NULLIF(aa.name, ''), 'Primary Service Address'), + aa.street_address, + aa.city, + aa.state + ) as address_line, + s.notes, + stm.team_profile_id + FROM services s + JOIN service_team_members stm ON s.id = stm.service_id + JOIN team_profiles tp ON stm.team_profile_id = tp.id + LEFT JOIN accounts a ON s.account_id = a.id + LEFT JOIN account_addresses aa ON s.account_address_id = aa.id + WHERE s.date = $1 + AND s.status IN ('SCHEDULED', 'IN_PROGRESS') + AND tp.role != 'ADMIN' + AND tp.status = 'ACTIVE' + ORDER BY stm.team_profile_id, s.date + "#, + ) + .bind(query_date) + .fetch_all(pool) + .await?; + + // Query incomplete projects + let projects: Vec = sqlx::query_as( + r#" + SELECT + p.id, + p.name, + p.date, + p.status::text as status, + c.name as customer_name, + CONCAT_WS(', ', + COALESCE(NULLIF(aa.name, ''), 'Primary Service Address'), + aa.street_address, + aa.city, + aa.state + ) as address_line, + ptm.team_profile_id + FROM projects p + JOIN project_team_members ptm ON p.id = ptm.project_id + JOIN team_profiles tp ON ptm.team_profile_id = tp.id + LEFT JOIN customers c ON p.customer_id = c.id + LEFT JOIN account_addresses aa ON p.account_address_id = aa.id + WHERE p.date = $1 + AND p.status IN ('SCHEDULED', 'IN_PROGRESS') + AND tp.role != 'ADMIN' + AND tp.status = 'ACTIVE' + ORDER BY ptm.team_profile_id, p.date + "#, + ) + .bind(query_date) + .fetch_all(pool) + .await?; + + result.services_count = services.len() as i32; + result.projects_count = projects.len() as i32; + + if services.is_empty() && projects.is_empty() { + tracing::info!("No incomplete work found, skipping email notifications"); + return Ok(result); + } + + // Get unique team members + let team_member_ids: Vec = { + let mut ids: Vec = services.iter().map(|s| s.team_profile_id).collect(); + ids.extend(projects.iter().map(|p| p.team_profile_id)); + ids.sort(); + ids.dedup(); + ids + }; + + // Fetch team member details + let team_members: Vec = sqlx::query_as( + r#" + SELECT id as team_profile_id, email, + CONCAT(first_name, ' ', last_name) as full_name + FROM team_profiles + WHERE id = ANY($1) + "#, + ) + .bind(&team_member_ids) + .fetch_all(pool) + .await?; + + result.team_members_notified = team_members.len() as i32; + + // Send emails + let Some(gmail) = gmail_service else { + tracing::warn!("Gmail service not available, skipping email notifications"); + result.errors.push("Gmail service not available".to_string()); + return Ok(result); + }; + + for member in team_members { + let member_services: Vec<&IncompleteService> = services + .iter() + .filter(|s| s.team_profile_id == member.team_profile_id) + .collect(); + let member_projects: Vec<&IncompleteProject> = projects + .iter() + .filter(|p| p.team_profile_id == member.team_profile_id) + .collect(); + + if member_services.is_empty() && member_projects.is_empty() { + continue; + } + + let recipient_name = member + .full_name + .as_deref() + .and_then(|n| n.split_whitespace().next()) + .unwrap_or("Team Member"); + + // Check for weekend services (notes contain "Weekend service window") + let has_weekend_services = member_services + .iter() + .any(|s| s.notes.as_ref().map_or(false, |n| n.contains("Weekend"))); + + let html = Self::render_email( + recipient_name, + &member.email, + &query_date, + &member_services, + &member_projects, + has_weekend_services, + ); + + let date_str = query_date.format("%B %d, %Y").to_string(); + match gmail + .send_email(SendEmailRequest { + to: vec![member.email.clone()], + cc: None, + bcc: None, + subject: format!("Incomplete Work Reminder - {}", date_str), + body: html, + content_type: Some("text/html".to_string()), + from_name: Some("Nexus".to_string()), + attachments: None, + }) + .await + { + Ok(_) => { + result.emails_sent += 1; + tracing::info!( + email = %member.email, + services = member_services.len(), + projects = member_projects.len(), + "Sent incomplete work reminder" + ); + } + Err(e) => { + let error = format!("Failed to send to {}: {}", member.email, e); + tracing::error!("{}", error); + result.errors.push(error); + } + } + } + + // Publish audit event + Self::publish_audit_event(pool, &result).await?; + + tracing::info!( + emails_sent = result.emails_sent, + errors = result.errors.len(), + "Incomplete work reminder job completed" + ); + + Ok(result) + } + + /// Get the date to query for incomplete work + /// - Monday: Query Friday (to catch weekend services) + /// - Saturday: Skip (don't remind on Saturday) + /// - Other days: Query yesterday + fn get_query_date(today: NaiveDate) -> NaiveDate { + match today.weekday() { + Weekday::Mon => today - Duration::days(3), // Friday + Weekday::Sat => today - Duration::days(1), // Friday (but we typically skip Saturday runs) + _ => today - Duration::days(1), // Yesterday + } + } + + fn render_email( + recipient_name: &str, + recipient_email: &str, + query_date: &NaiveDate, + services: &[&IncompleteService], + projects: &[&IncompleteProject], + has_weekend_services: bool, + ) -> String { + let date_str = query_date.format("%B %d, %Y").to_string(); + let current_year = Utc::now().year(); + + // Build services HTML + let services_html = if services.is_empty() { + String::new() + } else { + let items: String = services + .iter() + .enumerate() + .map(|(i, s)| { + let account_name = s.account_name.as_deref().unwrap_or("Unknown"); + let address = s.address_line.as_deref().unwrap_or(""); + let is_weekend = s.notes.as_ref().map_or(false, |n| n.contains("Weekend")); + let weekend_badge = if is_weekend { + r#"WEEKEND"# + } else { + "" + }; + let (status_bg, status_color, status_label) = if s.status == "IN_PROGRESS" { + ("#d8a01d20", "#d8a01d", "In Progress") + } else { + ("#3b78c420", "#3b78c4", "Scheduled") + }; + let border = if i < services.len() - 1 { " border-bottom: 1px solid #e2e8f0;" } else { "" }; + format!( + r#" + + + + + + + + + +
+ {account_name} + {weekend_badge} + + {status_label} +
+ {address} +
+ + "# + ) + }) + .collect(); + + format!( + r#" + + + + + +
+ Services ({count}) +
+ + {items} +
+ + "#, + count = services.len(), + items = items + ) + }; + + // Build projects HTML + let projects_html = if projects.is_empty() { + String::new() + } else { + let items: String = projects + .iter() + .enumerate() + .map(|(i, p)| { + let customer = p.customer_name.as_deref().unwrap_or("Unknown"); + let address = p.address_line.as_deref().unwrap_or(""); + let address_part = if !address.is_empty() { + format!(" • {}", address) + } else { + String::new() + }; + let (status_bg, status_color, status_label) = if p.status == "IN_PROGRESS" { + ("#d8a01d20", "#d8a01d", "In Progress") + } else { + ("#3b78c420", "#3b78c4", "Scheduled") + }; + let border = if i < projects.len() - 1 { " border-bottom: 1px solid #e2e8f0;" } else { "" }; + format!( + r#" + + + + + + + + + +
+ {name} + + {status_label} +
+ {customer}{address_part} +
+ + "#, + name = p.name, + customer = customer, + address_part = address_part, + border = border, + status_bg = status_bg, + status_color = status_color, + status_label = status_label + ) + }) + .collect(); + + format!( + r#" + + + + + +
+ Projects ({count}) +
+ + {items} +
+ + "#, + count = projects.len(), + items = items + ) + }; + + let body_text = if has_weekend_services { + "The following work items are still incomplete and may need your attention. This includes weekend services from Friday that were not completed.".to_string() + } else { + format!( + "The following work items from {} are still incomplete and may need your attention.", + date_str + ) + }; + + format!( + r##" + + + + + + Incomplete Work Reminder - {date_str} + + + + + + + +
+ + + + + + + + + + + + + + + +
+ + + + +
+ + + + + +
+ + + Nexus +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + {services_html} + + + {projects_html} + + + + + + + + + + +
+ + + + +
+ Action Required +
+
+

Incomplete Work Reminder

+
+

Hi {recipient_name},

+
+

{body_text}

+
+

Please visit your dashboard to update the status of these items or contact your team lead if assistance is needed.

+
+ + + + +
+ Open Dashboard +
+
+
+ + + + + + + + + + +
+

+ Visit Nexus +  •  + Contact Us +

+
+

© {current_year} Your Company. All rights reserved.

+
+

+ This is an automated daily reminder sent to {recipient_email}.
+ You received this because you have incomplete work items assigned to you. +

+
+
+
+ +"##, + date_str = date_str, + recipient_name = recipient_name, + recipient_email = recipient_email, + body_text = body_text, + services_html = services_html, + projects_html = projects_html, + current_year = current_year + ) + } + + async fn publish_audit_event( + pool: &PgPool, + result: &IncompleteWorkReminderResult, + ) -> anyhow::Result<()> { + let event_id = Uuid::new_v4(); + let now = Utc::now(); + + sqlx::query( + r#" + INSERT INTO events (id, event_type, entity_type, entity_id, actor_type, metadata, timestamp, created_at) + VALUES ($1, 'MONITORING_TASK_RUN', 'system', $1, 'system', $2, $3, $3) + "#, + ) + .bind(event_id) + .bind(json!({ + "job": "incomplete_work_reminder", + "team_members_notified": result.team_members_notified, + "services_count": result.services_count, + "projects_count": result.projects_count, + "emails_sent": result.emails_sent, + "errors": result.errors, + })) + .bind(now) + .execute(pool) + .await?; + + Ok(()) + } +} + +// ==================== NIGHTLY ASSIGNMENTS JOB ==================== + +/// Scheduled service info +#[derive(Debug, Clone, sqlx::FromRow)] +#[allow(dead_code)] +struct ScheduledService { + id: Uuid, + date: NaiveDate, + account_name: Option, + address_line: Option, + notes: Option, + team_profile_id: Uuid, +} + +/// Scheduled project info +#[derive(Debug, Clone, sqlx::FromRow)] +#[allow(dead_code)] +struct ScheduledProject { + id: Uuid, + name: String, + date: Option, + customer_name: Option, + address_line: Option, + team_profile_id: Uuid, +} + +/// Job to send nightly assignment notifications +pub struct NightlyAssignmentsJob; + +impl NightlyAssignmentsJob { + /// Execute the assignments job + pub async fn execute( + pool: &PgPool, + gmail_service: Option<&GmailService>, + ) -> anyhow::Result { + let today = Utc::now().date_naive(); + + tracing::info!( + date = %today, + "Starting nightly assignments job" + ); + + let mut result = NightlyAssignmentsResult { + team_members_notified: 0, + services_count: 0, + projects_count: 0, + emails_sent: 0, + errors: Vec::new(), + }; + + // Query scheduled services for today + let services: Vec = sqlx::query_as( + r#" + SELECT + s.id, + s.date, + a.name as account_name, + CONCAT_WS(', ', + COALESCE(NULLIF(aa.name, ''), 'Primary Service Address'), + aa.street_address, + aa.city, + aa.state + ) as address_line, + s.notes, + stm.team_profile_id + FROM services s + JOIN service_team_members stm ON s.id = stm.service_id + JOIN team_profiles tp ON stm.team_profile_id = tp.id + LEFT JOIN accounts a ON s.account_id = a.id + LEFT JOIN account_addresses aa ON s.account_address_id = aa.id + WHERE s.date = $1 + AND s.status = 'SCHEDULED' + AND tp.role != 'ADMIN' + AND tp.status = 'ACTIVE' + ORDER BY stm.team_profile_id, s.date + "#, + ) + .bind(today) + .fetch_all(pool) + .await?; + + // Query scheduled projects for today + let projects: Vec = sqlx::query_as( + r#" + SELECT + p.id, + p.name, + p.date, + c.name as customer_name, + CONCAT_WS(', ', + COALESCE(NULLIF(aa.name, ''), 'Primary Service Address'), + aa.street_address, + aa.city, + aa.state + ) as address_line, + ptm.team_profile_id + FROM projects p + JOIN project_team_members ptm ON p.id = ptm.project_id + JOIN team_profiles tp ON ptm.team_profile_id = tp.id + LEFT JOIN customers c ON p.customer_id = c.id + LEFT JOIN account_addresses aa ON p.account_address_id = aa.id + WHERE p.date = $1 + AND p.status = 'SCHEDULED' + AND tp.role != 'ADMIN' + AND tp.status = 'ACTIVE' + ORDER BY ptm.team_profile_id, p.date + "#, + ) + .bind(today) + .fetch_all(pool) + .await?; + + result.services_count = services.len() as i32; + result.projects_count = projects.len() as i32; + + if services.is_empty() && projects.is_empty() { + tracing::info!("No scheduled work found, skipping email notifications"); + return Ok(result); + } + + // Get unique team members + let team_member_ids: Vec = { + let mut ids: Vec = services.iter().map(|s| s.team_profile_id).collect(); + ids.extend(projects.iter().map(|p| p.team_profile_id)); + ids.sort(); + ids.dedup(); + ids + }; + + // Fetch team member details + let team_members: Vec = sqlx::query_as( + r#" + SELECT id as team_profile_id, email, + CONCAT(first_name, ' ', last_name) as full_name + FROM team_profiles + WHERE id = ANY($1) + "#, + ) + .bind(&team_member_ids) + .fetch_all(pool) + .await?; + + result.team_members_notified = team_members.len() as i32; + + // Send emails + let Some(gmail) = gmail_service else { + tracing::warn!("Gmail service not available, skipping email notifications"); + result.errors.push("Gmail service not available".to_string()); + return Ok(result); + }; + + for member in team_members { + let member_services: Vec<&ScheduledService> = services + .iter() + .filter(|s| s.team_profile_id == member.team_profile_id) + .collect(); + let member_projects: Vec<&ScheduledProject> = projects + .iter() + .filter(|p| p.team_profile_id == member.team_profile_id) + .collect(); + + if member_services.is_empty() && member_projects.is_empty() { + continue; + } + + let recipient_name = member + .full_name + .as_deref() + .and_then(|n| n.split_whitespace().next()) + .unwrap_or("Team Member"); + + // Check for weekend services (notes contain "Weekend service window") + let has_weekend_services = member_services + .iter() + .any(|s| s.notes.as_ref().map_or(false, |n| n.contains("Weekend"))); + + let html = Self::render_email( + recipient_name, + &member.email, + &today, + &member_services, + &member_projects, + has_weekend_services, + ); + + let date_str = today.format("%B %d, %Y").to_string(); + match gmail + .send_email(SendEmailRequest { + to: vec![member.email.clone()], + cc: None, + bcc: None, + subject: format!("Tonight's Assignments - {}", date_str), + body: html, + content_type: Some("text/html".to_string()), + from_name: Some("Nexus".to_string()), + attachments: None, + }) + .await + { + Ok(_) => { + result.emails_sent += 1; + tracing::info!( + email = %member.email, + services = member_services.len(), + projects = member_projects.len(), + "Sent nightly assignments" + ); + } + Err(e) => { + let error = format!("Failed to send to {}: {}", member.email, e); + tracing::error!("{}", error); + result.errors.push(error); + } + } + } + + // Publish audit event + Self::publish_audit_event(pool, &result).await?; + + tracing::info!( + emails_sent = result.emails_sent, + errors = result.errors.len(), + "Nightly assignments job completed" + ); + + Ok(result) + } + + fn render_email( + recipient_name: &str, + recipient_email: &str, + date: &NaiveDate, + services: &[&ScheduledService], + projects: &[&ScheduledProject], + has_weekend_services: bool, + ) -> String { + let date_str = date.format("%B %d, %Y").to_string(); + let current_year = Utc::now().year(); + + // Build services HTML + let services_html = if services.is_empty() { + String::new() + } else { + let items: String = services + .iter() + .enumerate() + .map(|(i, s)| { + let account_name = s.account_name.as_deref().unwrap_or("Unknown"); + let address = s.address_line.as_deref().unwrap_or(""); + let is_weekend = s.notes.as_ref().map_or(false, |n| n.contains("Weekend")); + let weekend_badge = if is_weekend { + r#"WEEKEND"# + } else { + "" + }; + let border = if i < services.len() - 1 { " border-bottom: 1px solid #e2e8f0;" } else { "" }; + format!( + r#" + + + + + + + + +
+ {account_name} + {weekend_badge} +
+ {address} +
+ + "# + ) + }) + .collect(); + + format!( + r#" + + + + + +
+ Services ({count}) +
+ + {items} +
+ + "#, + count = services.len(), + items = items + ) + }; + + // Build projects HTML + let projects_html = if projects.is_empty() { + String::new() + } else { + let items: String = projects + .iter() + .enumerate() + .map(|(i, p)| { + let customer = p.customer_name.as_deref().unwrap_or("Unknown"); + let address = p.address_line.as_deref().unwrap_or(""); + let address_part = if !address.is_empty() { + format!(" • {}", address) + } else { + String::new() + }; + let border = if i < projects.len() - 1 { " border-bottom: 1px solid #e2e8f0;" } else { "" }; + format!( + r#" + + + + + + + + +
+ {name} +
+ {customer}{address_part} +
+ + "#, + name = p.name, + customer = customer, + address_part = address_part, + border = border + ) + }) + .collect(); + + format!( + r#" + + + + + +
+ Projects ({count}) +
+ + {items} +
+ + "#, + count = projects.len(), + items = items + ) + }; + + let body_text = if has_weekend_services { + format!( + "Here's your scheduled work for {}, including weekend services that can be completed through Sunday.", + date_str + ) + } else { + format!("Here's your scheduled work for {}.", date_str) + }; + + let deadline_text = if has_weekend_services { + "Regular services are due by 8:00am tomorrow morning. Weekend services are due by Monday morning." + } else { + "All of your completed work is due by 8:00am tomorrow morning." + }; + + format!( + r##" + + + + + + Tonight's Assignments - {date_str} + + + + + + + +
+ + + + + + + + + + + + + + + +
+ + + + +
+ + + + + +
+ + + Nexus +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + {services_html} + + + {projects_html} + + + + + + + + + + +
+ + + + +
+ Tonight's Schedule +
+
+

Your Assignments for Tonight

+
+

Hi {recipient_name},

+
+

{body_text}

+
+

{deadline_text}

+

Please visit your dashboard to get started.

+
+ + + + +
+ View Dashboard +
+
+
+ + + + + + + + + + +
+

+ Visit Nexus +  •  + Contact Us +

+
+

© {current_year} Your Company. All rights reserved.

+
+

+ This is an automated notification sent to {recipient_email}.
+ You received this because you have work scheduled for tonight. +

+
+
+
+ +"##, + date_str = date_str, + recipient_name = recipient_name, + recipient_email = recipient_email, + body_text = body_text, + deadline_text = deadline_text, + services_html = services_html, + projects_html = projects_html, + current_year = current_year + ) + } + + async fn publish_audit_event( + pool: &PgPool, + result: &NightlyAssignmentsResult, + ) -> anyhow::Result<()> { + let event_id = Uuid::new_v4(); + let now = Utc::now(); + + sqlx::query( + r#" + INSERT INTO events (id, event_type, entity_type, entity_id, actor_type, metadata, timestamp, created_at) + VALUES ($1, 'MONITORING_TASK_RUN', 'system', $1, 'system', $2, $3, $3) + "#, + ) + .bind(event_id) + .bind(json!({ + "job": "nightly_assignments", + "team_members_notified": result.team_members_notified, + "services_count": result.services_count, + "projects_count": result.projects_count, + "emails_sent": result.emails_sent, + "errors": result.errors, + })) + .bind(now) + .execute(pool) + .await?; + + Ok(()) + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..1b04876 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,9 @@ +pub mod auth; +pub mod config; +pub mod db; +pub mod graphql; +pub mod jobs; +pub mod models; +pub mod routes; +pub mod services; +pub mod signals; diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..fc06267 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,442 @@ +use std::net::SocketAddr; +use std::sync::Arc; + +use apalis::prelude::*; +use apalis_cron::CronStream; +use apalis_redis::{RedisConfig, RedisStorage}; +use cron::Schedule; +use std::str::FromStr; +use axum::{middleware, routing::get, Router}; +use tower_http::services::ServeDir; +use tower_http::trace::TraceLayer; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +use nexus::{ + auth::ory_auth_middleware, + config::Config, + db::Database, + graphql::{build_schema, SchemaOptions}, + jobs::{ + DeliverNotificationJob, NotificationJobContext, ProcessEventJob, + EventCleanupJob, IncompleteWorkReminderJob, NightlyAssignmentsJob, + DELIVER_NOTIFICATION_QUEUE, PROCESS_EVENT_QUEUE, + }, + routes::{self, graphql_handler, graphql_playground, MediaState}, + services::{GmailService, GoogleAuthService, GoogleCalendarService, JobQueue, S3Service, WaveService}, + signals, +}; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + // Load .env file if present (for local development) + dotenvy::dotenv().ok(); + + // Initialize tracing + tracing_subscriber::registry() + .with( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| "nexus=debug,tower_http=debug".into()), + ) + .with(tracing_subscriber::fmt::layer()) + .init(); + + // Load configuration + let config = Config::from_env().expect("Failed to load configuration"); + + tracing::info!("Starting Nexus server on {}", config.bind_addr()); + + // Connect to database + let db = Database::connect().await?; + + // Spawn SIGHUP handler for credential refresh + let db_clone = db.clone(); + tokio::spawn(async move { + signals::handle_sighup(db_clone).await; + }); + + // Spawn file watcher as fallback for credential refresh + // (handles cases where SIGHUP signaling fails) + let db_clone = db.clone(); + tokio::spawn(async move { + signals::watch_secrets_file(db_clone).await; + }); + + // Initialize Google services if configured + let (calendar_service, gmail_service) = initialize_google_services(&config); + + // Initialize Wave service if configured + let wave_service = initialize_wave_service(&config); + + // Initialize S3 service if configured + let s3_service = initialize_s3_service(&config).await; + + // Initialize job queue + let job_queue = initialize_job_queue(&config).await; + + // Build GraphQL schema with database and services + let schema = build_schema(SchemaOptions { + db: db.clone(), + calendar_service, + gmail_service: gmail_service.clone(), + wave_service, + s3_service: s3_service.clone(), + job_queue: job_queue.clone(), + }); + + // Build authenticated routes (behind Ory middleware) + // Playground (GET) needs Database for admin check + // Handler (POST) needs Schema for query execution + let graphql_routes = Router::new() + .route("/graphql", get(graphql_playground)) + .with_state(db.clone()) + .merge( + Router::new() + .route("/graphql", axum::routing::post(graphql_handler)) + .with_state(schema), + ); + + // Build media proxy routes (if S3 is configured) + let media_routes = if let Some(s3) = s3_service { + let media_state = MediaState { s3 }; + Some( + Router::new() + .route("/api/media/{*path}", get(routes::get_media).head(routes::head_media)) + .with_state(media_state), + ) + } else { + None + }; + + // Combine authenticated routes + let mut authenticated_routes = graphql_routes; + if let Some(media) = media_routes { + authenticated_routes = authenticated_routes.merge(media); + } + let authenticated_routes = authenticated_routes.layer(middleware::from_fn(ory_auth_middleware)); + + // Build main router - health routes are public (no auth required) + // Note: CORS is handled by Oathkeeper, not here + let app = Router::new() + .merge(routes::router(db.clone())) // Public routes (/health, /health/ready) + .nest_service("/static", ServeDir::new("static")) // Static files (logo, etc.) + .merge(authenticated_routes) + .layer(TraceLayer::new_for_http()); + + // Parse bind address + let addr: SocketAddr = config.bind_addr().parse()?; + + // Start background workers if job queue is available + if let Some(ref jq) = job_queue { + let worker_handle = spawn_workers( + db.clone(), + gmail_service, + jq.clone(), + &config, + ).await; + + // Spawn worker runner in background + tokio::spawn(async move { + if let Err(e) = worker_handle.await { + tracing::error!("Worker error: {}", e); + } + }); + } + + // Start server + tracing::info!("Nexus GraphQL API listening on http://{}", addr); + let listener = tokio::net::TcpListener::bind(addr).await?; + axum::serve(listener, app).await?; + + Ok(()) +} + +/// Initialize Google services if configured +fn initialize_google_services( + config: &Config, +) -> ( + Option>, + Option>, +) { + // Check if Google service account key is configured + let service_account_key = match &config.google_service_account_key { + Some(key) if !key.is_empty() => key, + _ => { + tracing::info!("Google services not configured (GOOGLE_SERVICE_ACCOUNT_KEY not set)"); + return (None, None); + } + }; + + // Create shared auth service + let auth = match GoogleAuthService::new(service_account_key) { + Ok(auth) => Arc::new(auth), + Err(e) => { + tracing::warn!("Failed to initialize Google auth service: {}", e); + return (None, None); + } + }; + + // Initialize Calendar service if calendar ID is configured + let calendar_service = config.google_calendar_id.as_ref().and_then(|calendar_id| { + if calendar_id.is_empty() { + tracing::info!("Google Calendar not configured (GOOGLE_CALENDAR_ID not set)"); + None + } else { + tracing::info!("Google Calendar service initialized for calendar: {}", calendar_id); + Some(Arc::new(GoogleCalendarService::new(auth.clone(), calendar_id.clone(), calendar_id.clone()))) + } + }); + + // Initialize Gmail service if user email is configured + let gmail_service = config.google_gmail_user.as_ref().and_then(|user_email| { + if user_email.is_empty() { + tracing::info!("Gmail service not configured (GOOGLE_GMAIL_USER not set)"); + None + } else { + tracing::info!("Gmail service initialized for user: {}", user_email); + Some(Arc::new(GmailService::new(auth.clone(), user_email.clone()))) + } + }); + + (calendar_service, gmail_service) +} + +/// Initialize Wave service if configured +fn initialize_wave_service(config: &Config) -> Option> { + let access_token = match &config.wave_access_token { + Some(token) if !token.is_empty() => token, + _ => { + tracing::info!("Wave service not configured (WAVE_ACCESS_TOKEN not set)"); + return None; + } + }; + + let business_id = match &config.wave_business_id { + Some(id) if !id.is_empty() => id, + _ => { + tracing::info!("Wave service not configured (WAVE_BUSINESS_ID not set)"); + return None; + } + }; + + tracing::info!("Wave service initialized for business: {}", business_id); + Some(Arc::new(WaveService::new( + access_token.clone(), + business_id.clone(), + ))) +} + +/// Initialize S3 service if configured +async fn initialize_s3_service(config: &Config) -> Option> { + if config.s3_endpoint.is_empty() || config.s3_access_key.is_empty() { + tracing::info!("S3 service not configured (S3_ENDPOINT or S3_ACCESS_KEY not set)"); + return None; + } + + match S3Service::new(config).await { + Ok(s3) => { + tracing::info!( + "S3 service initialized (endpoint: {}, bucket: {})", + config.s3_endpoint, + config.s3_bucket + ); + Some(Arc::new(s3)) + } + Err(e) => { + tracing::warn!("Failed to initialize S3 service: {}", e); + None + } + } +} + +/// Initialize job queue if Valkey is configured +async fn initialize_job_queue(config: &Config) -> Option { + if config.valkey_url.is_empty() { + tracing::info!("Job queue not configured (VALKEY_URL not set)"); + return None; + } + + match JobQueue::new(&config.valkey_url).await { + Ok(queue) => { + tracing::info!("Job queue initialized ({})", config.valkey_url); + Some(queue) + } + Err(e) => { + tracing::warn!("Failed to initialize job queue: {}", e); + None + } + } +} + +/// Context for scheduled jobs (simpler than NotificationJobContext) +struct ScheduledJobContext { + db: Database, + gmail_service: Option>, +} + +/// Spawn background workers for notification processing and scheduled jobs +async fn spawn_workers( + db: Database, + gmail_service: Option>, + job_queue: JobQueue, + config: &Config, +) -> tokio::task::JoinHandle<()> { + let ctx = Arc::new(NotificationJobContext::new(db.clone(), gmail_service.clone(), job_queue.clone())); + let scheduled_ctx = Arc::new(ScheduledJobContext { + db: db.clone(), + gmail_service: gmail_service.clone(), + }); + + // Connect to Redis for workers + let conn = apalis_redis::connect(config.valkey_url.clone()) + .await + .expect("Failed to connect to Valkey for workers"); + + // Create storage for process event jobs + let event_storage: RedisStorage = RedisStorage::new_with_config( + conn.clone(), + RedisConfig::default().set_namespace(PROCESS_EVENT_QUEUE), + ); + + // Create storage for delivery jobs + let delivery_storage: RedisStorage = RedisStorage::new_with_config( + conn, + RedisConfig::default().set_namespace(DELIVER_NOTIFICATION_QUEUE), + ); + + // Create cron schedules (UTC times) + // Format: sec min hour day-of-month month day-of-week year + // 2 AM Eastern = 7 AM UTC (EST) / 6 AM UTC (EDT) + // 8 AM Eastern = 1 PM UTC (EST) / 12 PM UTC (EDT) + // 6 PM Eastern = 11 PM UTC (EST) / 10 PM UTC (EDT) + // Using EST offsets (5 hours) - will be off by 1 hour during DST + let cleanup_schedule = Schedule::from_str("0 0 7 * * * *").expect("Invalid cleanup schedule"); + let reminder_schedule = Schedule::from_str("0 0 13 * * * *").expect("Invalid reminder schedule"); + let assignments_schedule = Schedule::from_str("0 0 23 * * * *").expect("Invalid assignments schedule"); + + // Create worker functions + let ctx_event = Arc::clone(&ctx); + let ctx_delivery = Arc::clone(&ctx); + let ctx_cleanup = Arc::clone(&scheduled_ctx); + let ctx_reminder = Arc::clone(&scheduled_ctx); + let ctx_assignments = Arc::clone(&scheduled_ctx); + + // Build and run the monitor with workers + tokio::spawn(async move { + let monitor = Monitor::new() + // On-demand notification workers + .register(move |_instance| { + WorkerBuilder::new(PROCESS_EVENT_QUEUE) + .backend(event_storage.clone()) + .data(ctx_event.clone()) + .build(|job: ProcessEventJob, ctx: Data>| async move { + job.execute(&ctx).await + }) + }) + .register(move |_instance| { + WorkerBuilder::new(DELIVER_NOTIFICATION_QUEUE) + .backend(delivery_storage.clone()) + .data(ctx_delivery.clone()) + .build(|job: DeliverNotificationJob, ctx: Data>| async move { + job.execute(&ctx).await + }) + }) + // Scheduled cron workers + .register({ + let schedule = cleanup_schedule.clone(); + let ctx = ctx_cleanup.clone(); + move |_instance| { + let ctx = ctx.clone(); + WorkerBuilder::new("event-cleanup") + .backend(CronStream::new(schedule.clone())) + .data(ctx) + .build(|_tick, ctx: Data>| async move { + tracing::info!("Running event cleanup job"); + let pool = ctx.db.pool().await; + match EventCleanupJob::execute(&*pool).await { + Ok(result) => { + tracing::info!( + deleted = result.deleted_count, + cutoff = %result.cutoff_date, + "Event cleanup completed" + ); + Ok(()) + } + Err(e) => { + tracing::error!("Event cleanup failed: {}", e); + Err(e) + } + } + }) + } + }) + .register({ + let schedule = reminder_schedule.clone(); + let ctx = ctx_reminder.clone(); + move |_instance| { + let ctx = ctx.clone(); + WorkerBuilder::new("incomplete-work-reminder") + .backend(CronStream::new(schedule.clone())) + .data(ctx) + .build(|_tick, ctx: Data>| async move { + tracing::info!("Running incomplete work reminder job"); + let pool = ctx.db.pool().await; + match IncompleteWorkReminderJob::execute( + &*pool, + ctx.gmail_service.as_ref().map(|s| s.as_ref()), + ).await { + Ok(result) => { + tracing::info!( + emails = result.emails_sent, + services = result.services_count, + projects = result.projects_count, + "Incomplete work reminder completed" + ); + Ok(()) + } + Err(e) => { + tracing::error!("Incomplete work reminder failed: {}", e); + Err(e) + } + } + }) + } + }) + .register({ + let schedule = assignments_schedule.clone(); + let ctx = ctx_assignments.clone(); + move |_instance| { + let ctx = ctx.clone(); + WorkerBuilder::new("nightly-assignments") + .backend(CronStream::new(schedule.clone())) + .data(ctx) + .build(|_tick, ctx: Data>| async move { + tracing::info!("Running nightly assignments job"); + let pool = ctx.db.pool().await; + match NightlyAssignmentsJob::execute( + &*pool, + ctx.gmail_service.as_ref().map(|s| s.as_ref()), + ).await { + Ok(result) => { + tracing::info!( + emails = result.emails_sent, + services = result.services_count, + projects = result.projects_count, + "Nightly assignments completed" + ); + Ok(()) + } + Err(e) => { + tracing::error!("Nightly assignments failed: {}", e); + Err(e) + } + } + }) + } + }); + + tracing::info!("Starting notification and scheduled workers"); + if let Err(e) = monitor.run().await { + tracing::error!("Worker monitor error: {}", e); + } + }) +} diff --git a/src/models/account.rs b/src/models/account.rs new file mode 100644 index 0000000..f2bef95 --- /dev/null +++ b/src/models/account.rs @@ -0,0 +1,82 @@ +use chrono::NaiveDate; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{AddressFields, BaseFields, ContactFields, EntityStatus}; + +/// Customer's business location or division +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Account { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub customer_id: Uuid, + pub name: String, + pub status: EntityStatus, + pub start_date: Option, + pub end_date: Option, +} + +impl Account { + /// Check if account is currently active based on dates + pub fn is_active(&self) -> bool { + if self.status != EntityStatus::Active { + return false; + } + + let today = chrono::Utc::now().date_naive(); + + if let Some(start) = self.start_date { + if today < start { + return false; + } + } + + if let Some(end) = self.end_date { + if today > end { + return false; + } + } + + true + } +} + +/// Physical location for an account +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct AccountAddress { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub address: AddressFields, + pub is_active: bool, + pub is_primary: bool, + pub name: Option, + pub notes: Option, +} + +/// Contact person at an account +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct AccountContact { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub contact: ContactFields, + pub email: Option, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +impl AccountContact { + pub fn full_name(&self) -> String { + self.contact.full_name() + } +} diff --git a/src/models/base.rs b/src/models/base.rs new file mode 100644 index 0000000..42f9cc5 --- /dev/null +++ b/src/models/base.rs @@ -0,0 +1,253 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +// ==================== BASE FIELD GROUPS ==================== + +/// Base fields shared by all models (id + timestamps) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct BaseFields { + pub id: Uuid, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl Default for BaseFields { + fn default() -> Self { + let now = Utc::now(); + Self { + id: Uuid::new_v4(), + created_at: now, + updated_at: now, + } + } +} + +/// Address fields (street, city, state, zip) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow, Default)] +pub struct AddressFields { + pub street_address: String, + pub city: String, + pub state: String, + pub zip_code: String, +} + +impl AddressFields { + /// Format as single-line address + pub fn format_inline(&self) -> String { + format!( + "{}, {}, {} {}", + self.street_address, self.city, self.state, self.zip_code + ) + } + + /// Format as multi-line address + pub fn format_multiline(&self) -> String { + format!( + "{}\n{}, {} {}", + self.street_address, self.city, self.state, self.zip_code + ) + } +} + +/// Contact fields (first_name, last_name, phone) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow, Default)] +pub struct ContactFields { + pub first_name: String, + pub last_name: String, + pub phone: Option, +} + +impl ContactFields { + pub fn full_name(&self) -> String { + format!("{} {}", self.first_name, self.last_name) + } +} + +/// Media metadata fields (dimensions, content type, uploader) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct MediaFields { + pub content_type: String, + pub width: Option, + pub height: Option, + pub uploaded_by_id: Uuid, + pub notes: Option, + pub internal: bool, +} + +/// Note fields (content, author, internal flag) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct NoteFields { + pub content: String, + pub author_id: Uuid, + pub internal: bool, +} + +/// Status for entities with lifecycle +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "entity_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum EntityStatus { + Active, + Inactive, + Pending, +} + +impl Default for EntityStatus { + fn default() -> Self { + Self::Active + } +} + +/// Work status for services and projects +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "work_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum WorkStatus { + Scheduled, + InProgress, + Completed, + Cancelled, +} + +impl Default for WorkStatus { + fn default() -> Self { + Self::Scheduled + } +} + +/// Invoice status +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "invoice_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum InvoiceStatus { + Draft, + Sent, + Paid, + Overdue, + Cancelled, +} + +impl Default for InvoiceStatus { + fn default() -> Self { + Self::Draft + } +} + +/// Report status for payroll workflow +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "report_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ReportStatus { + Draft, + Finalized, + Paid, +} + +impl Default for ReportStatus { + fn default() -> Self { + Self::Draft + } +} + +/// Payment type for invoices +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "payment_type", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PaymentType { + Check, + CreditCard, + BankTransfer, + Cash, +} + +/// Task frequency +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "task_frequency", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum TaskFrequency { + Daily, + Weekly, + Monthly, + Quarterly, + Triannual, + Annual, + AsNeeded, +} + +impl Default for TaskFrequency { + fn default() -> Self { + Self::Daily + } +} + +/// Team member role +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "team_role", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum TeamRole { + Admin, + TeamLeader, + TeamMember, +} + +impl Default for TeamRole { + fn default() -> Self { + Self::TeamMember + } +} + +/// Conversation type +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "conversation_type", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ConversationType { + Direct, + Group, + Support, +} + +impl Default for ConversationType { + fn default() -> Self { + Self::Direct + } +} + +// ==================== NOTIFICATION ENUMS ==================== + +/// Notification delivery channel +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "notification_channel", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum NotificationChannel { + InApp, + Email, + Sms, +} + +/// Notification status +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "notification_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum NotificationStatus { + Pending, + Sent, + Read, + Failed, +} + +impl Default for NotificationStatus { + fn default() -> Self { + Self::Pending + } +} + +/// Delivery attempt status +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "delivery_status", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum DeliveryStatus { + Pending, + Queued, + Sending, + Sent, + Delivered, + Failed, + Bounced, +} + +impl Default for DeliveryStatus { + fn default() -> Self { + Self::Pending + } +} diff --git a/src/models/chat.rs b/src/models/chat.rs new file mode 100644 index 0000000..556b9c8 --- /dev/null +++ b/src/models/chat.rs @@ -0,0 +1,53 @@ +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::BaseFields; + +// ==================== CHAT (AI ASSISTANT) ==================== + +/// Chat message role +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ChatRole { + User, + Assistant, + Tool, +} + +impl ChatRole { + pub fn as_str(&self) -> &'static str { + match self { + Self::User => "user", + Self::Assistant => "assistant", + Self::Tool => "tool", + } + } +} + +/// Chat conversation with AI assistant +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ChatConversation { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub team_profile_id: Uuid, + pub title: String, + pub is_active: bool, +} + +/// Chat message within a conversation +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ChatMessage { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub conversation_id: Uuid, + /// Role: 'user', 'assistant', 'tool' + pub role: String, + pub content: Option, + /// Tool calls made by assistant (JSON array) + pub tool_calls: serde_json::Value, + /// Tool results returned (JSON array) + pub tool_results: serde_json::Value, +} diff --git a/src/models/customer.rs b/src/models/customer.rs new file mode 100644 index 0000000..0a77e4a --- /dev/null +++ b/src/models/customer.rs @@ -0,0 +1,84 @@ +use chrono::NaiveDate; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{AddressFields, BaseFields, ContactFields, EntityStatus}; + +/// Top-level business entity +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Customer { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub status: EntityStatus, + pub start_date: Option, + pub end_date: Option, + pub billing_terms: Option, + pub billing_email: Option, + pub wave_customer_id: Option, +} + +impl Customer { + /// Check if customer is currently active based on dates + pub fn is_active(&self) -> bool { + if self.status != EntityStatus::Active { + return false; + } + + let today = chrono::Utc::now().date_naive(); + + if let Some(start) = self.start_date { + if today < start { + return false; + } + } + + if let Some(end) = self.end_date { + if today > end { + return false; + } + } + + true + } +} + +/// Customer address (for customers without accounts) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct CustomerAddress { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub customer_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub address: AddressFields, + pub is_active: bool, + pub is_primary: bool, + pub name: Option, + pub notes: Option, +} + +/// Customer contact +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct CustomerContact { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub customer_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub contact: ContactFields, + pub email: Option, + pub is_active: bool, + pub is_primary: bool, + pub notes: Option, +} + +impl CustomerContact { + pub fn full_name(&self) -> String { + self.contact.full_name() + } +} diff --git a/src/models/event.rs b/src/models/event.rs new file mode 100644 index 0000000..9a87a56 --- /dev/null +++ b/src/models/event.rs @@ -0,0 +1,258 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::Value as JsonValue; +use sqlx::FromRow; +use uuid::Uuid; + +/// Event types for the audit trail system +/// Organized by entity category for clarity +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "event_type", rename_all = "SCREAMING_SNAKE_CASE")] +pub enum EventType { + // ==================== CUSTOMER EVENTS ==================== + CustomerCreated, + CustomerUpdated, + CustomerDeleted, + CustomerStatusChanged, + CustomerAddressCreated, + CustomerAddressUpdated, + CustomerAddressDeleted, + CustomerContactCreated, + CustomerContactUpdated, + CustomerContactDeleted, + + // ==================== ACCOUNT EVENTS ==================== + AccountCreated, + AccountUpdated, + AccountDeleted, + AccountStatusChanged, + AccountAddressCreated, + AccountAddressUpdated, + AccountAddressDeleted, + AccountAddressPrimaryChanged, + AccountContactCreated, + AccountContactUpdated, + AccountContactDeleted, + AccountContactPrimaryChanged, + + // ==================== SERVICE EVENTS ==================== + ServiceCreated, + ServiceUpdated, + ServiceDeleted, + ServiceStatusChanged, + ServiceAssigned, + ServiceUnassigned, + ServiceRescheduled, + + // ==================== PROJECT EVENTS ==================== + ProjectCreated, + ProjectUpdated, + ProjectDeleted, + ProjectStatusChanged, + ProjectAssigned, + ProjectUnassigned, + ProjectRescheduled, + ProjectAmountChanged, + + // ==================== SCHEDULE EVENTS ==================== + ScheduleCreated, + ScheduleUpdated, + ScheduleDeleted, + ScheduleFrequencyChanged, + + // ==================== SESSION EVENTS ==================== + ServiceSessionStarted, + ServiceSessionEnded, + ServiceSessionReverted, + ProjectSessionStarted, + ProjectSessionEnded, + ProjectSessionReverted, + + // ==================== SESSION MEDIA EVENTS ==================== + SessionNoteCreated, + SessionNoteUpdated, + SessionNoteDeleted, + SessionImageUploaded, + SessionImageUpdated, + SessionImageDeleted, + SessionVideoUploaded, + SessionVideoUpdated, + SessionVideoDeleted, + SessionMediaInternalFlagged, + + // ==================== TASK EVENTS ==================== + ServiceTaskCompleted, + ServiceTaskUncompleted, + ProjectTaskCompleted, + ProjectTaskUncompleted, + + // ==================== SCOPE EVENTS ==================== + ServiceScopeCreated, + ServiceScopeUpdated, + ServiceScopeDeleted, + ServiceScopeActivated, + ServiceScopeDeactivated, + ServiceScopeAreaCreated, + ServiceScopeAreaUpdated, + ServiceScopeAreaDeleted, + ServiceScopeTaskCreated, + ServiceScopeTaskUpdated, + ServiceScopeTaskDeleted, + + ProjectScopeCreated, + ProjectScopeUpdated, + ProjectScopeDeleted, + ProjectScopeActivated, + ProjectScopeDeactivated, + ProjectScopeCategoryCreated, + ProjectScopeCategoryUpdated, + ProjectScopeCategoryDeleted, + ProjectScopeTaskCreated, + ProjectScopeTaskUpdated, + ProjectScopeTaskDeleted, + + // ==================== SCOPE TEMPLATE EVENTS ==================== + ScopeTemplateCreated, + ScopeTemplateUpdated, + ScopeTemplateDeleted, + ScopeTemplateActivated, + ScopeTemplateDeactivated, + ScopeTemplateInstantiated, + + // ==================== PROFILE EVENTS ==================== + TeamProfileCreated, + TeamProfileUpdated, + TeamProfileDeleted, + TeamProfileRoleChanged, + TeamProfileStatusChanged, + CustomerProfileCreated, + CustomerProfileUpdated, + CustomerProfileDeleted, + CustomerProfileStatusChanged, + CustomerProfileAccessGranted, + CustomerProfileAccessRevoked, + + // ==================== FINANCIAL EVENTS ==================== + LaborCreated, + LaborUpdated, + LaborDeleted, + LaborRateChanged, + RevenueCreated, + RevenueUpdated, + RevenueDeleted, + RevenueAmountChanged, + InvoiceCreated, + InvoiceUpdated, + InvoiceDeleted, + InvoiceStatusChanged, + InvoiceSent, + InvoicePaid, + InvoiceOverdue, + + // ==================== REPORT EVENTS ==================== + ReportCreated, + ReportUpdated, + ReportDeleted, + ReportSubmitted, + ReportApproved, + + // ==================== CONVERSATION EVENTS ==================== + ConversationCreated, + ConversationUpdated, + ConversationArchived, + ConversationUnarchived, + ConversationParticipantAdded, + ConversationParticipantRemoved, + + // ==================== MESSAGE EVENTS ==================== + MessageSent, + MessageUpdated, + MessageDeleted, + MessageRead, + + // ==================== NOTIFICATION EVENTS ==================== + NotificationRuleCreated, + NotificationRuleUpdated, + NotificationRuleDeleted, + NotificationRuleActivated, + NotificationRuleDeactivated, + NotificationCreated, + NotificationSent, + NotificationRead, + NotificationFailed, + NotificationDeliveryAttempted, + NotificationDeliverySucceeded, + NotificationDeliveryFailed, + + // ==================== SYSTEM EVENTS ==================== + SystemStartup, + SystemShutdown, + MonitoringTaskRun, + MonitoringAlertTriggered, + BackgroundJobStarted, + BackgroundJobCompleted, + BackgroundJobFailed, +} + +/// Comprehensive audit trail event +/// Note: Event uses explicit fields instead of BaseFields since it has +/// a `timestamp` field that serves a different purpose than `created_at` +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Event { + pub id: Uuid, + pub event_type: EventType, + /// Entity type (e.g., "customer", "service", "project") + pub entity_type: String, + pub entity_id: Uuid, + /// Actor type (team_profile or customer_profile, or "system") + pub actor_type: Option, + pub actor_id: Option, + /// Additional context (old_status, new_status, changed_fields, etc.) + pub metadata: Option, + /// When the event occurred (business timestamp) + pub timestamp: DateTime, + /// When the record was created (system timestamp) + pub created_at: DateTime, +} + +impl Event { + /// Create a new event + pub fn new( + event_type: EventType, + entity_type: impl Into, + entity_id: Uuid, + actor_type: Option, + actor_id: Option, + metadata: Option, + ) -> Self { + let now = Utc::now(); + Self { + id: Uuid::new_v4(), + event_type, + entity_type: entity_type.into(), + entity_id, + actor_type, + actor_id, + metadata, + timestamp: now, + created_at: now, + } + } + + /// Create a system event (no actor) + pub fn system( + event_type: EventType, + entity_type: impl Into, + entity_id: Uuid, + metadata: Option, + ) -> Self { + Self::new( + event_type, + entity_type, + entity_id, + Some("system".to_string()), + None, + metadata, + ) + } +} diff --git a/src/models/financial.rs b/src/models/financial.rs new file mode 100644 index 0000000..38e1130 --- /dev/null +++ b/src/models/financial.rs @@ -0,0 +1,101 @@ +use chrono::{DateTime, NaiveDate, Utc}; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, InvoiceStatus, PaymentType}; + +/// Labor rate for an account address +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Labor { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_address_id: Uuid, + pub amount: Decimal, + pub start_date: NaiveDate, + pub end_date: Option, +} + +impl Labor { + /// Check if labor rate is active on a given date + pub fn is_active_on(&self, date: NaiveDate) -> bool { + if date < self.start_date { + return false; + } + + if let Some(end) = self.end_date { + if date > end { + return false; + } + } + + true + } +} + +/// Revenue expectation for an account +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Revenue { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_id: Uuid, + pub amount: Decimal, + pub start_date: NaiveDate, + pub end_date: Option, + pub wave_service_id: Option, +} + +impl Revenue { + /// Check if revenue is active on a given date + pub fn is_active_on(&self, date: NaiveDate) -> bool { + if date < self.start_date { + return false; + } + + if let Some(end) = self.end_date { + if date > end { + return false; + } + } + + true + } +} + +/// Customer invoice +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Invoice { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub customer_id: Uuid, + pub start_date: NaiveDate, + pub end_date: NaiveDate, + pub status: InvoiceStatus, + pub date_paid: Option, + pub payment_type: Option, + pub wave_invoice_id: Option, +} + +/// Many-to-many: Invoice to projects (join table with amount snapshot) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct InvoiceProject { + pub id: Uuid, + pub invoice_id: Uuid, + pub project_id: Uuid, + pub amount: Decimal, + pub created_at: DateTime, +} + +/// Many-to-many: Invoice to revenues (join table with amount snapshot) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct InvoiceRevenue { + pub id: Uuid, + pub invoice_id: Uuid, + pub revenue_id: Uuid, + pub amount: Decimal, + pub created_at: DateTime, +} diff --git a/src/models/mcp.rs b/src/models/mcp.rs new file mode 100644 index 0000000..44edef5 --- /dev/null +++ b/src/models/mcp.rs @@ -0,0 +1,128 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, TeamRole}; + +// ==================== MCP SERVER REGISTRY ==================== + +/// MCP transport type for server connections +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum McpTransportType { + Stdio, + Sse, + Websocket, +} + +impl McpTransportType { + pub fn as_str(&self) -> &'static str { + match self { + Self::Stdio => "stdio", + Self::Sse => "sse", + Self::Websocket => "websocket", + } + } +} + +/// MCP tool execution status +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum McpExecutionStatus { + Pending, + Running, + Success, + Error, +} + +impl McpExecutionStatus { + pub fn as_str(&self) -> &'static str { + match self { + Self::Pending => "pending", + Self::Running => "running", + Self::Success => "success", + Self::Error => "error", + } + } +} + +/// MCP Server definition +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct McpServer { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub description: Option, + /// Transport type: 'stdio', 'sse', 'websocket' + pub transport_type: String, + /// For stdio: command to run + pub command: Option, + /// Command arguments (JSON array) + pub args: serde_json::Value, + /// Environment variables (JSON object) + pub env: serde_json::Value, + /// For sse/websocket: URL to connect to + pub url: Option, + pub is_active: bool, + /// Rate limit: max calls per minute + pub rate_limit_per_minute: Option, + /// Rate limit: max calls per hour + pub rate_limit_per_hour: Option, +} + +/// MCP Tool provided by a server +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct McpTool { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub server_id: Uuid, + pub name: String, + pub description: Option, + /// JSON Schema for tool input + pub input_schema: serde_json::Value, + pub is_active: bool, +} + +/// Role-based permission for an MCP tool +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct McpToolPermission { + pub id: Uuid, + pub created_at: DateTime, + pub tool_id: Uuid, + pub role: TeamRole, +} + +/// User-specific permission override for an MCP tool +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct McpToolUserPermission { + pub id: Uuid, + pub created_at: DateTime, + pub tool_id: Uuid, + pub team_profile_id: Uuid, + /// Explicit allow/deny (overrides role-based permission) + pub is_allowed: bool, +} + +/// MCP Tool execution log entry +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct McpToolExecution { + pub id: Uuid, + pub created_at: DateTime, + pub tool_id: Uuid, + pub team_profile_id: Uuid, + pub chat_message_id: Option, + /// Input provided to the tool (JSON) + pub input: serde_json::Value, + /// Output from the tool (JSON) + pub output: Option, + /// Error message if execution failed + pub error: Option, + pub started_at: DateTime, + pub completed_at: Option>, + pub duration_ms: Option, + /// Status: 'pending', 'running', 'success', 'error' + pub status: String, +} diff --git a/src/models/messaging.rs b/src/models/messaging.rs new file mode 100644 index 0000000..b25ea6f --- /dev/null +++ b/src/models/messaging.rs @@ -0,0 +1,89 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::Value as JsonValue; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, ConversationType}; + +/// Message thread/conversation +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Conversation { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub subject: Option, + pub conversation_type: ConversationType, + /// Entity this conversation is about (polymorphic) + pub entity_type: Option, + pub entity_id: Option, + /// Who created the conversation (polymorphic) + pub created_by_type: Option, + pub created_by_id: Option, + pub last_message_at: Option>, + pub is_archived: bool, + pub metadata: Option, +} + +/// User participation in a conversation +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ConversationParticipant { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub conversation_id: Uuid, + /// Participant type (team_profile or customer_profile) + pub participant_type: String, + pub participant_id: Uuid, + pub last_read_at: Option>, + /// Count of unread messages for this participant + pub unread_count: i32, + /// Whether notifications are muted + pub is_muted: bool, + /// User-specific archive (separate from conversation.is_archived) + pub is_archived: bool, + /// When the participant joined the conversation + pub joined_at: DateTime, +} + +impl ConversationParticipant { + /// Check if participant has unread messages + pub fn has_unread(&self) -> bool { + self.unread_count > 0 + } +} + +/// Individual message in a conversation +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Message { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub conversation_id: Uuid, + /// Author type (team_profile or customer_profile) + pub author_type: String, + pub author_id: Uuid, + pub content: String, + pub is_deleted: bool, + /// For threaded replies + pub reply_to_id: Option, + /// File attachments (JSONB array of attachment metadata) + pub attachments: Option, + /// System-generated message (e.g., "User joined conversation") + pub is_system_message: bool, + /// Additional metadata (formatting, mentions, etc.) + pub metadata: Option, +} + +/// Tracks when specific messages are read by specific participants +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct MessageReadReceipt { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub message_id: Uuid, + /// Reader type (team_profile or customer_profile) + pub reader_type: String, + pub reader_id: Uuid, + pub read_at: DateTime, +} diff --git a/src/models/mod.rs b/src/models/mod.rs new file mode 100644 index 0000000..5480e22 --- /dev/null +++ b/src/models/mod.rs @@ -0,0 +1,31 @@ +mod base; +mod customer; +mod account; +mod service; +mod project; +mod scope; +mod session; +mod profile; +mod financial; +mod messaging; +mod notification; +mod event; +mod mcp; +mod chat; +mod punchlist; + +pub use base::*; +pub use customer::*; +pub use account::*; +pub use service::*; +pub use project::*; +pub use scope::*; +pub use session::*; +pub use profile::*; +pub use financial::*; +pub use messaging::*; +pub use notification::*; +pub use event::*; +pub use mcp::*; +pub use chat::*; +pub use punchlist::*; diff --git a/src/models/notification.rs b/src/models/notification.rs new file mode 100644 index 0000000..a5bd394 --- /dev/null +++ b/src/models/notification.rs @@ -0,0 +1,89 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::Value as JsonValue; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, DeliveryStatus, NotificationChannel, NotificationStatus}; + +/// Admin-defined rule for triggering notifications +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct NotificationRule { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub description: Option, + pub is_active: bool, + /// Event types that trigger this rule (stored as JSONB array of strings) + pub event_types: JsonValue, + /// Channels to deliver through (stored as JSONB array of strings) + pub channels: JsonValue, + /// Target roles (stored as JSONB array of strings) + pub target_roles: Option, + /// Custom conditions for triggering (JSONB) + pub conditions: Option, + /// Notification subject template + pub subject_template: Option, + /// Notification body template + pub body_template: Option, +} + +/// Individual notification instance +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Notification { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + /// Recipient type (team_profile or customer_profile) + pub recipient_type: String, + pub recipient_id: Uuid, + /// The rule that triggered this notification (optional for system notifications) + pub rule_id: Option, + /// The event that triggered this notification + pub event_id: Option, + pub status: NotificationStatus, + pub subject: String, + pub body: String, + /// URL to navigate to when notification is clicked + pub action_url: Option, + pub read_at: Option>, + pub metadata: Option, +} + +/// Tracks delivery attempts per channel +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct NotificationDelivery { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub notification_id: Uuid, + pub channel: NotificationChannel, + pub status: DeliveryStatus, + pub attempts: i32, + pub last_attempt_at: Option>, + pub sent_at: Option>, + pub delivered_at: Option>, + pub error_message: Option, + /// External service ID (e.g., email provider message ID) + pub external_id: Option, + pub metadata: Option, +} + +/// Many-to-many: NotificationRule targets specific TeamProfiles +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct NotificationRuleTeamProfile { + pub id: Uuid, + pub rule_id: Uuid, + pub team_profile_id: Uuid, + pub created_at: DateTime, +} + +/// Many-to-many: NotificationRule targets specific CustomerProfiles +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct NotificationRuleCustomerProfile { + pub id: Uuid, + pub rule_id: Uuid, + pub customer_profile_id: Uuid, + pub created_at: DateTime, +} diff --git a/src/models/profile.rs b/src/models/profile.rs new file mode 100644 index 0000000..104491f --- /dev/null +++ b/src/models/profile.rs @@ -0,0 +1,113 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use rust_decimal::Decimal; + +use super::{BaseFields, ContactFields, EntityStatus, ReportStatus, TeamRole}; + +/// Internal team member profile +/// Note: base.id IS the Kratos identity UUID - no separate ory_kratos_id +/// This allows Oathkeeper's X-User-ID header to be used directly for profile lookup +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct TeamProfile { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + #[sqlx(flatten)] + #[serde(flatten)] + pub contact: ContactFields, + pub email: Option, + pub role: TeamRole, + pub status: EntityStatus, + pub notes: Option, +} + +impl TeamProfile { + pub fn full_name(&self) -> String { + self.contact.full_name() + } + + /// Check if this profile has at least the given role level + pub fn has_minimum_role(&self, role: TeamRole) -> bool { + let self_level = match self.role { + TeamRole::Admin => 3, + TeamRole::TeamLeader => 2, + TeamRole::TeamMember => 1, + }; + + let required_level = match role { + TeamRole::Admin => 3, + TeamRole::TeamLeader => 2, + TeamRole::TeamMember => 1, + }; + + self_level >= required_level + } +} + +/// External customer profile +/// Note: base.id IS the Kratos identity UUID - no separate ory_kratos_id +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct CustomerProfile { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + #[sqlx(flatten)] + #[serde(flatten)] + pub contact: ContactFields, + pub email: Option, + pub status: EntityStatus, + pub notes: Option, +} + +impl CustomerProfile { + pub fn full_name(&self) -> String { + self.contact.full_name() + } +} + +/// Many-to-many: CustomerProfile access to Customer (join table) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct CustomerProfileAccess { + pub id: Uuid, + pub customer_profile_id: Uuid, + pub customer_id: Uuid, + pub created_at: DateTime, +} + +/// Team member work report for a pay period +/// Contains services and projects worked, with snapshot labor amounts for payroll +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Report { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub team_profile_id: Uuid, + pub start_date: chrono::NaiveDate, + pub end_date: chrono::NaiveDate, + pub status: ReportStatus, +} + +/// Report-Service association with snapshot labor share +/// labor_share is calculated and stored when added - immutable for payroll accuracy +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ReportService { + pub id: Uuid, + pub report_id: Uuid, + pub service_id: Uuid, + pub labor_share: Decimal, + pub created_at: DateTime, +} + +/// Report-Project association with snapshot labor share +/// labor_share is calculated and stored when added - immutable for payroll accuracy +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ReportProject { + pub id: Uuid, + pub report_id: Uuid, + pub project_id: Uuid, + pub labor_share: Decimal, + pub created_at: DateTime, +} diff --git a/src/models/project.rs b/src/models/project.rs new file mode 100644 index 0000000..cf121c8 --- /dev/null +++ b/src/models/project.rs @@ -0,0 +1,58 @@ +use chrono::{DateTime, NaiveDate, Utc}; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, WorkStatus}; + +/// One-time project or job +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Project { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub customer_id: Uuid, + pub name: String, + pub date: NaiveDate, + pub status: WorkStatus, + pub labor: Option, + pub amount: Option, + pub notes: Option, + pub calendar_event_id: Option, + pub wave_service_id: Option, + + // Either account_address_id OR freeform address fields (mutually exclusive) + // Note: Cannot use AddressFields here as these are all Option + pub account_address_id: Option, + pub street_address: Option, + pub city: Option, + pub state: Option, + pub zip_code: Option, +} + +impl Project { + /// Validate that either account_address_id OR freeform address is provided, not both + pub fn validate_address(&self) -> Result<(), &'static str> { + let has_account_address = self.account_address_id.is_some(); + let has_freeform = self.street_address.is_some() + || self.city.is_some() + || self.state.is_some() + || self.zip_code.is_some(); + + match (has_account_address, has_freeform) { + (true, true) => Err("Cannot have both account_address_id and freeform address"), + (false, false) => Err("Must have either account_address_id or freeform address"), + _ => Ok(()), + } + } +} + +/// Team member assignment to a project (many-to-many join table) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectTeamMember { + pub id: Uuid, + pub project_id: Uuid, + pub team_profile_id: Uuid, + pub created_at: DateTime, +} diff --git a/src/models/punchlist.rs b/src/models/punchlist.rs new file mode 100644 index 0000000..3e90533 --- /dev/null +++ b/src/models/punchlist.rs @@ -0,0 +1,79 @@ +use chrono::{DateTime, NaiveDate, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::BaseFields; + +// ==================== SERVICE PUNCHLISTS ==================== + +/// Service punchlist (customer-facing snapshot from completed session) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServicePunchlist { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + /// Denormalized for easy access + pub customer_id: Uuid, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub date: NaiveDate, + /// URL to exported PDF + pub pdf_url: Option, + /// When the punchlist was exported to PDF + pub exported_at: Option>, + pub notes: Option, +} + +/// Individual task entry on a service punchlist (snapshot of scope task) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServicePunchlistItem { + pub id: Uuid, + pub created_at: DateTime, + pub punchlist_id: Uuid, + /// Reference to original task (nullable if task was deleted) + pub task_id: Option, + /// Snapshot of checklist_description at time of punchlist creation + pub checklist_description: String, + pub order: i32, + pub is_completed: bool, + pub completed_at: Option>, + pub completed_by_id: Option, +} + +// ==================== PROJECT PUNCHLISTS ==================== + +/// Project punchlist (customer-facing snapshot from completed session) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectPunchlist { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + /// Denormalized for easy access + pub customer_id: Uuid, + pub project_id: Uuid, + pub date: NaiveDate, + /// URL to exported PDF + pub pdf_url: Option, + /// When the punchlist was exported to PDF + pub exported_at: Option>, + pub notes: Option, +} + +/// Individual task entry on a project punchlist (snapshot of scope task) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectPunchlistItem { + pub id: Uuid, + pub created_at: DateTime, + pub punchlist_id: Uuid, + /// Reference to original task (nullable if task was deleted) + pub task_id: Option, + /// Snapshot of checklist_description at time of punchlist creation + pub checklist_description: String, + pub order: i32, + pub is_completed: bool, + pub completed_at: Option>, + pub completed_by_id: Option, +} diff --git a/src/models/scope.rs b/src/models/scope.rs new file mode 100644 index 0000000..758417f --- /dev/null +++ b/src/models/scope.rs @@ -0,0 +1,181 @@ +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, TaskFrequency}; + +// ==================== SERVICE SCOPES ==================== + +/// Service scope assigned to an account address (location) +/// Exactly ONE active scope per account address +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScope { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub description: Option, + pub is_active: bool, +} + +/// Area within a service scope (sub-division) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScopeArea { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub scope_id: Uuid, + pub name: String, + pub order: i32, +} + +/// Task within a service scope area +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScopeTask { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub area_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions (shown during sessions) + pub session_description: String, + pub frequency: TaskFrequency, + pub order: i32, + pub estimated_minutes: Option, +} + +// ==================== PROJECT SCOPES ==================== + +/// Scope for project work (assigned directly to project, not location-based) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScope { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub project_id: Uuid, + /// Optional: if project is at a specific location + pub account_id: Option, + pub account_address_id: Option, + pub description: Option, + pub is_active: bool, +} + +/// Category within a project scope (equivalent to Area for services) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScopeCategory { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub scope_id: Uuid, + pub name: String, + pub order: i32, +} + +/// Task within a project scope category +/// Note: Unlike service tasks, project tasks don't have frequency - they are one-time +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScopeTask { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub category_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions (shown during sessions) + pub session_description: String, + pub order: i32, + pub estimated_minutes: Option, +} + +// ==================== SERVICE SCOPE TEMPLATES ==================== + +/// Reusable service scope template (instantiated as ServiceScope) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScopeTemplate { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +/// Area within a service scope template +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScopeTemplateArea { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub template_id: Uuid, + pub name: String, + pub order: i32, +} + +/// Task within a service scope template area +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceScopeTemplateTask { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub area_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions (shown during sessions) + pub session_description: String, + pub frequency: TaskFrequency, + pub order: i32, + pub estimated_minutes: Option, +} + +// ==================== PROJECT SCOPE TEMPLATES ==================== + +/// Reusable project scope template (instantiated as ProjectScope) +/// Unlike service templates, project templates don't have frequency on tasks +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScopeTemplate { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub name: String, + pub description: Option, + pub is_active: bool, +} + +/// Category within a project scope template (equivalent to Area) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScopeTemplateCategory { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub template_id: Uuid, + pub name: String, + pub order: i32, +} + +/// Task within a project scope template category +/// Note: No frequency field - project tasks are one-time +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectScopeTemplateTask { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub category_id: Uuid, + /// Customer-facing description + pub scope_description: String, + /// QA/punchlist formatted description + pub checklist_description: String, + /// Team member work instructions (shown during sessions) + pub session_description: String, + pub order: i32, + pub estimated_minutes: Option, +} diff --git a/src/models/service.rs b/src/models/service.rs new file mode 100644 index 0000000..94d3585 --- /dev/null +++ b/src/models/service.rs @@ -0,0 +1,126 @@ +use chrono::{DateTime, NaiveDate, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, WorkStatus}; + +/// Recurring cleaning service at an account address +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Service { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub date: NaiveDate, + pub status: WorkStatus, + pub notes: Option, + pub calendar_event_id: Option, +} + +/// Team member assignment to a service (many-to-many join table) +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceTeamMember { + pub id: Uuid, + pub service_id: Uuid, + pub team_profile_id: Uuid, + pub created_at: DateTime, +} + +/// Recurring service schedule for an account address +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Schedule { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub account_address_id: Uuid, + pub name: Option, + pub monday: bool, + pub tuesday: bool, + pub wednesday: bool, + pub thursday: bool, + pub friday: bool, + pub saturday: bool, + pub sunday: bool, + pub weekend_service: bool, + pub schedule_exception: Option, + pub start_date: Option, + pub end_date: Option, +} + +impl Schedule { + /// Check if service is scheduled for a given weekday (0=Monday, 6=Sunday) + pub fn is_scheduled_for_weekday(&self, weekday: u32) -> bool { + if self.weekend_service { + return weekday >= 5; // Saturday or Sunday + } + + match weekday { + 0 => self.monday, + 1 => self.tuesday, + 2 => self.wednesday, + 3 => self.thursday, + 4 => self.friday, + 5 => self.saturday, + 6 => self.sunday, + _ => false, + } + } + + /// Check if a service should be generated for a given weekday + /// Returns (should_generate, optional_note) + /// + /// Logic mirrors Django backend: + /// - Mon-Thu (0-3): Use individual day flags + /// - Friday (4): If weekend_service, generate with note; otherwise use friday flag + /// - Sat-Sun (5-6): If weekend_service, skip (Friday covers weekend); otherwise use day flags + pub fn should_generate_for_weekday(&self, weekday: u32) -> (bool, Option<&'static str>) { + match weekday { + // Mon-Thu: always use individual day flags + 0 => (self.monday, None), + 1 => (self.tuesday, None), + 2 => (self.wednesday, None), + 3 => (self.thursday, None), + // Friday: weekend_service takes precedence, otherwise use friday flag + 4 => { + if self.weekend_service { + (true, Some("Weekend service window (Fri-Sun)")) + } else { + (self.friday, None) + } + } + // Sat-Sun: only use day flags if weekend_service is OFF + 5 => { + if self.weekend_service { + (false, None) + } else { + (self.saturday, None) + } + } + 6 => { + if self.weekend_service { + (false, None) + } else { + (self.sunday, None) + } + } + _ => (false, None), + } + } + + /// Check if schedule is active for a given date + pub fn is_active_on(&self, date: NaiveDate) -> bool { + if let Some(start) = self.start_date { + if date < start { + return false; + } + } + if let Some(end) = self.end_date { + if date > end { + return false; + } + } + true + } +} diff --git a/src/models/session.rs b/src/models/session.rs new file mode 100644 index 0000000..a47d4d5 --- /dev/null +++ b/src/models/session.rs @@ -0,0 +1,199 @@ +use chrono::{DateTime, NaiveDate, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +use super::{BaseFields, MediaFields, NoteFields}; + +// ==================== SERVICE SESSIONS ==================== + +/// Active work session during a service +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceSession { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub service_id: Uuid, + pub account_id: Uuid, + pub account_address_id: Uuid, + pub customer_id: Uuid, + pub scope_id: Option, + pub start: DateTime, + pub end: Option>, + pub created_by_id: Uuid, + pub closed_by_id: Option, + pub date: NaiveDate, +} + +impl ServiceSession { + /// Check if session is still active (not ended) + pub fn is_active(&self) -> bool { + self.end.is_none() + } + + /// Calculate duration in seconds + pub fn duration_seconds(&self) -> Option { + self.end.map(|end| (end - self.start).num_seconds()) + } +} + +/// Note attached to a service session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceSessionNote { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub note: NoteFields, +} + +/// Image attached to a service session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceSessionImage { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + pub title: Option, + /// S3 path to the original image + pub image: String, + /// S3 path to the thumbnail + pub thumbnail: Option, + #[sqlx(flatten)] + #[serde(flatten)] + pub media: MediaFields, +} + +/// Video attached to a service session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceSessionVideo { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + pub title: Option, + /// S3 path to the video file + pub video: String, + /// S3 path to the thumbnail + pub thumbnail: Option, + #[sqlx(flatten)] + #[serde(flatten)] + pub media: MediaFields, + pub duration_seconds: Option, + pub file_size_bytes: Option, +} + +/// Task completion record for a service +/// Note: Linked to sessions via service_session_completed_tasks junction table +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ServiceTaskCompletion { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub service_id: Uuid, + pub task_id: Uuid, + pub account_address_id: Option, + pub completed_by_id: Uuid, + pub completed_at: DateTime, + pub year: i32, + pub month: i32, + pub notes: Option, +} + +// ==================== PROJECT SESSIONS ==================== + +/// Active work session during a project +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectSession { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub project_id: Uuid, + pub account_id: Option, + pub account_address_id: Option, + pub customer_id: Uuid, + pub scope_id: Option, + pub start: DateTime, + pub end: Option>, + pub created_by_id: Uuid, + pub closed_by_id: Option, + pub date: NaiveDate, +} + +impl ProjectSession { + /// Check if session is still active (not ended) + pub fn is_active(&self) -> bool { + self.end.is_none() + } + + /// Calculate duration in seconds + pub fn duration_seconds(&self) -> Option { + self.end.map(|end| (end - self.start).num_seconds()) + } +} + +/// Note attached to a project session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectSessionNote { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + #[sqlx(flatten)] + #[serde(flatten)] + pub note: NoteFields, +} + +/// Image attached to a project session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectSessionImage { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + pub title: Option, + /// S3 path to the original image + pub image: String, + /// S3 path to the thumbnail + pub thumbnail: Option, + #[sqlx(flatten)] + #[serde(flatten)] + pub media: MediaFields, +} + +/// Video attached to a project session +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectSessionVideo { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub session_id: Uuid, + pub title: Option, + /// S3 path to the video file + pub video: String, + /// S3 path to the thumbnail + pub thumbnail: Option, + #[sqlx(flatten)] + #[serde(flatten)] + pub media: MediaFields, + pub duration_seconds: Option, + pub file_size_bytes: Option, +} + +/// Task completion record for a project +/// Note: Linked to sessions via project_session_completed_tasks junction table +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct ProjectTaskCompletion { + #[sqlx(flatten)] + #[serde(flatten)] + pub base: BaseFields, + pub project_id: Uuid, + pub task_id: Uuid, + pub account_id: Option, + pub account_address_id: Option, + pub completed_by_id: Uuid, + pub completed_at: DateTime, + pub notes: Option, +} diff --git a/src/routes/graphql.rs b/src/routes/graphql.rs new file mode 100644 index 0000000..9f84fde --- /dev/null +++ b/src/routes/graphql.rs @@ -0,0 +1,114 @@ +//! GraphQL route handlers + +use axum::{ + body::Body, + extract::State, + http::{header::CONTENT_TYPE, Request, StatusCode}, + response::IntoResponse, +}; +use async_graphql::{futures_util::TryStreamExt, http::MultipartOptions}; +use async_graphql_axum::GraphQLResponse; +use tokio_util::compat::TokioAsyncReadCompatExt; + +use crate::auth::{OptionalUser, ProfileType, RequiredUser}; +use crate::db::Database; +use crate::graphql::NexusSchema; +use crate::models::TeamRole; + +/// GraphQL handler - supports both JSON and multipart requests (for file uploads) +pub async fn graphql_handler( + State(schema): State, + user: OptionalUser, + req: Request, +) -> GraphQLResponse { + // Get content type for parsing + let content_type = req + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); + + // Convert axum body stream to AsyncRead (futures-compatible) + let body_stream = req + .into_body() + .into_data_stream() + .map_err(|e| std::io::Error::other(e.to_string())); + let body_reader = tokio_util::io::StreamReader::new(body_stream).compat(); + + // Configure multipart options (50MB max file, 10 files max) + let opts = MultipartOptions::default() + .max_file_size(50 * 1024 * 1024) + .max_num_files(10); + + // Parse request (handles both JSON and multipart automatically) + let mut request = match async_graphql::http::receive_body(content_type, body_reader, opts).await + { + Ok(req) => req, + Err(e) => { + return async_graphql::Response::from_errors(vec![async_graphql::ServerError::new( + format!("Failed to parse GraphQL request: {}", e), + None, + )]) + .into(); + } + }; + + // Add user context to GraphQL context if present + if let Some(ctx) = user.0 { + request = request.data(ctx); + } + + schema.execute(request).await.into() +} + +/// GraphQL Playground handler - admin only +pub async fn graphql_playground( + State(db): State, + user: RequiredUser, +) -> Result { + // Must be a team profile + if user.0.profile_type != ProfileType::Team { + tracing::warn!( + user_id = %user.0.user_id, + "Non-team user attempted to access GraphQL playground" + ); + return Err(StatusCode::FORBIDDEN); + } + + // Check if user is admin + let pool = db.pool().await; + let role: Option = sqlx::query_scalar( + "SELECT role FROM team_profiles WHERE id = $1", + ) + .bind(user.0.user_id) + .fetch_optional(&*pool) + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to fetch team profile role"); + StatusCode::INTERNAL_SERVER_ERROR + })?; + + match role { + Some(TeamRole::Admin) => { + // Serve the playground + Ok(axum::response::Html(async_graphql::http::playground_source( + async_graphql::http::GraphQLPlaygroundConfig::new("/v2/graphql") + .with_setting("request.credentials", "include"), + ))) + } + Some(_) => { + tracing::warn!( + user_id = %user.0.user_id, + "Non-admin user attempted to access GraphQL playground" + ); + Err(StatusCode::FORBIDDEN) + } + None => { + tracing::warn!( + user_id = %user.0.user_id, + "Team profile not found for playground access" + ); + Err(StatusCode::FORBIDDEN) + } + } +} diff --git a/src/routes/media.rs b/src/routes/media.rs new file mode 100644 index 0000000..a777d10 --- /dev/null +++ b/src/routes/media.rs @@ -0,0 +1,65 @@ +//! Media proxy route for authenticated S3 access +//! +//! Proxies requests to S3/Garage with authentication, allowing +//! session media to be accessed only by authenticated users. + +use std::sync::Arc; + +use axum::{ + body::Body, + extract::{Path, State}, + http::{header, Response, StatusCode}, + response::IntoResponse, +}; + +use crate::services::S3Service; + +/// Shared state for media routes +#[derive(Clone)] +pub struct MediaState { + pub s3: Arc, +} + +/// Proxy a file from S3 +/// +/// GET /api/media/{path} +/// +/// This endpoint requires authentication (handled by middleware). +/// It fetches the file from S3 and streams it to the client. +pub async fn get_media( + State(state): State, + Path(path): Path, +) -> impl IntoResponse { + match state.s3.get_file(&path).await { + Ok((data, content_type)) => { + // Build response with proper headers + Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, content_type) + .header(header::CONTENT_LENGTH, data.len()) + .header(header::CACHE_CONTROL, "private, max-age=86400") // Cache for 1 day + .body(Body::from(data)) + .unwrap() + } + Err(e) => { + tracing::warn!(path = %path, error = %e, "Failed to fetch media from S3"); + Response::builder() + .status(StatusCode::NOT_FOUND) + .header(header::CONTENT_TYPE, "application/json") + .body(Body::from(r#"{"error": "Media not found"}"#)) + .unwrap() + } + } +} + +/// Check if a file exists in S3 (HEAD request) +pub async fn head_media( + State(state): State, + Path(path): Path, +) -> impl IntoResponse { + if state.s3.file_exists(&path).await { + StatusCode::OK + } else { + StatusCode::NOT_FOUND + } +} diff --git a/src/routes/mod.rs b/src/routes/mod.rs new file mode 100644 index 0000000..1d989f4 --- /dev/null +++ b/src/routes/mod.rs @@ -0,0 +1,72 @@ +pub mod graphql; +pub mod media; + +use axum::{ + extract::State, + http::StatusCode, + response::IntoResponse, + routing::get, + Json, Router, +}; +use serde::Serialize; + +use crate::db::Database; + +pub use graphql::{graphql_handler, graphql_playground}; +pub use media::{get_media, head_media, MediaState}; + +#[derive(Serialize)] +struct HealthResponse { + status: &'static str, + version: &'static str, +} + +#[derive(Serialize)] +struct ReadyResponse { + status: &'static str, + version: &'static str, + database: &'static str, +} + +/// Basic liveness check - is the process running? +async fn health() -> impl IntoResponse { + Json(HealthResponse { + status: "ok", + version: env!("CARGO_PKG_VERSION"), + }) +} + +/// Readiness check - can we actually serve requests? +/// Tests database connectivity to catch stale credentials. +async fn ready(State(db): State) -> impl IntoResponse { + let pool = db.pool().await; + + match sqlx::query("SELECT 1").execute(&*pool).await { + Ok(_) => ( + StatusCode::OK, + Json(ReadyResponse { + status: "ok", + version: env!("CARGO_PKG_VERSION"), + database: "connected", + }), + ), + Err(e) => { + tracing::error!(error = %e, "Health check failed: database connection error"); + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ReadyResponse { + status: "error", + version: env!("CARGO_PKG_VERSION"), + database: "disconnected", + }), + ) + } + } +} + +pub fn router(db: Database) -> Router { + Router::new() + .route("/health", get(health)) + .route("/health/ready", get(ready)) + .with_state(db) +} diff --git a/src/services/email_templates.rs b/src/services/email_templates.rs new file mode 100644 index 0000000..11beaef --- /dev/null +++ b/src/services/email_templates.rs @@ -0,0 +1,337 @@ +//! Email template system for pre-defined email formats + +use std::collections::HashMap; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum TemplateError { + #[error("Template not found: {0}")] + NotFound(String), + #[error("Missing required variable: {0}")] + MissingVariable(String), +} + +/// Email template definition +#[derive(Debug, Clone)] +pub struct EmailTemplate { + pub id: String, + pub name: String, + pub description: String, + pub subject_template: String, + pub body_template: String, + /// Default values for template variables + pub default_variables: HashMap, + /// Content type (text/html or text/plain) + pub content_type: String, +} + +impl EmailTemplate { + /// Render the template with provided variables + pub fn render(&self, variables: &HashMap) -> Result<(String, String), TemplateError> { + let subject = self.substitute_variables(&self.subject_template, variables)?; + let body = self.substitute_variables(&self.body_template, variables)?; + Ok((subject, body)) + } + + /// Get list of variable names in this template + pub fn variable_names(&self) -> Vec { + self.default_variables.keys().cloned().collect() + } + + fn substitute_variables(&self, template: &str, variables: &HashMap) -> Result { + let mut result = template.to_string(); + + // First pass: substitute provided variables + for (key, value) in variables { + let placeholder = format!("{{{{{}}}}}", key); + result = result.replace(&placeholder, value); + } + + // Second pass: substitute defaults for any remaining placeholders + for (key, default_value) in &self.default_variables { + let placeholder = format!("{{{{{}}}}}", key); + if result.contains(&placeholder) { + result = result.replace(&placeholder, default_value); + } + } + + Ok(result) + } +} + +/// Registry of available email templates +pub struct EmailTemplateRegistry { + templates: HashMap, +} + +impl Default for EmailTemplateRegistry { + fn default() -> Self { + Self::new() + } +} + +impl EmailTemplateRegistry { + /// Create a new registry with built-in templates + pub fn new() -> Self { + let mut registry = Self { + templates: HashMap::new(), + }; + + // Register built-in templates + registry.register(Self::Nexus_notification()); + registry.register(Self::service_scheduled()); + registry.register(Self::project_update()); + registry.register(Self::invoice_ready()); + registry.register(Self::payment_received()); + + registry + } + + /// Register a custom template + pub fn register(&mut self, template: EmailTemplate) { + self.templates.insert(template.id.clone(), template); + } + + /// Get a template by ID + pub fn get(&self, template_id: &str) -> Option<&EmailTemplate> { + self.templates.get(template_id) + } + + /// List all available templates + pub fn list(&self) -> Vec<&EmailTemplate> { + self.templates.values().collect() + } + + /// List template IDs + pub fn list_ids(&self) -> Vec { + self.templates.keys().cloned().collect() + } + + // Built-in templates + + fn Nexus_notification() -> EmailTemplate { + EmailTemplate { + id: "Nexus_notification".to_string(), + name: "Nexus Notification".to_string(), + description: "General notification from Nexus".to_string(), + subject_template: "{{subject}}".to_string(), + body_template: r#" + + +

Hi {{recipient_name}},

+ +

{{message}}

+ +

Thanks,
+Nexus Team

+ +"#.to_string(), + default_variables: HashMap::from([ + ("subject".to_string(), "Notification from Nexus".to_string()), + ("recipient_name".to_string(), "Team Member".to_string()), + ("message".to_string(), "This is a notification message.".to_string()), + ]), + content_type: "text/html".to_string(), + } + } + + fn service_scheduled() -> EmailTemplate { + EmailTemplate { + id: "service_scheduled".to_string(), + name: "Service Scheduled".to_string(), + description: "Notification when a service is scheduled".to_string(), + subject_template: "Service Scheduled - {{customer_name}}".to_string(), + body_template: r#" + + +

Hi {{recipient_name}},

+ +

A new service has been scheduled:

+ + + + + + + + + + + + + + +
Customer:{{customer_name}}
Date:{{service_date}}
Address:{{service_address}}
+ +

Please check the Nexus system for full details.

+ +

Thanks,
+Nexus Dispatch Team

+ +"#.to_string(), + default_variables: HashMap::from([ + ("recipient_name".to_string(), "Team Member".to_string()), + ("customer_name".to_string(), "Customer".to_string()), + ("service_date".to_string(), "TBD".to_string()), + ("service_address".to_string(), "TBD".to_string()), + ]), + content_type: "text/html".to_string(), + } + } + + fn project_update() -> EmailTemplate { + EmailTemplate { + id: "project_update".to_string(), + name: "Project Update".to_string(), + description: "Notification about project status changes".to_string(), + subject_template: "Project Update - {{project_name}}".to_string(), + body_template: r#" + + +

Hi {{recipient_name}},

+ +

Project Status Update

+ + + + + + + + + + +
Project:{{project_name}}
Status:{{project_status}}
+ +

{{message}}

+ +

View details in the Nexus system.

+ +

Thanks,
+Nexus Team

+ +"#.to_string(), + default_variables: HashMap::from([ + ("recipient_name".to_string(), "Team Member".to_string()), + ("project_name".to_string(), "Project".to_string()), + ("project_status".to_string(), "Updated".to_string()), + ("message".to_string(), "".to_string()), + ]), + content_type: "text/html".to_string(), + } + } + + fn invoice_ready() -> EmailTemplate { + EmailTemplate { + id: "invoice_ready".to_string(), + name: "Invoice Ready".to_string(), + description: "Notification when an invoice is ready for a customer".to_string(), + subject_template: "Invoice Ready - {{invoice_period}}".to_string(), + body_template: r#" + + +

Hi {{recipient_name}},

+ +

Your invoice for {{invoice_period}} is now available.

+ + + + + + + + + + +
Period:{{invoice_period}}
Amount:{{invoice_amount}}
+ +

{{message}}

+ +

Thanks for your business,
+Acme Services

+ +"#.to_string(), + default_variables: HashMap::from([ + ("recipient_name".to_string(), "Valued Customer".to_string()), + ("invoice_period".to_string(), "Current Period".to_string()), + ("invoice_amount".to_string(), "$0.00".to_string()), + ("message".to_string(), "Please let us know if you have any questions.".to_string()), + ]), + content_type: "text/html".to_string(), + } + } + + fn payment_received() -> EmailTemplate { + EmailTemplate { + id: "payment_received".to_string(), + name: "Payment Received".to_string(), + description: "Confirmation when a payment is received".to_string(), + subject_template: "Payment Received - Thank You!".to_string(), + body_template: r#" + + +

Hi {{recipient_name}},

+ +

We've received your payment. Thank you!

+ + + + + + + + + + +
Amount:{{payment_amount}}
Date:{{payment_date}}
+ +

{{message}}

+ +

Thanks for your business,
+Acme Services

+ +"#.to_string(), + default_variables: HashMap::from([ + ("recipient_name".to_string(), "Valued Customer".to_string()), + ("payment_amount".to_string(), "$0.00".to_string()), + ("payment_date".to_string(), "Today".to_string()), + ("message".to_string(), "".to_string()), + ]), + content_type: "text/html".to_string(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_template_render() { + let template = EmailTemplateRegistry::new(); + let notif = template.get("Nexus_notification").unwrap(); + + let vars = HashMap::from([ + ("subject".to_string(), "Test Subject".to_string()), + ("recipient_name".to_string(), "John".to_string()), + ("message".to_string(), "Hello, world!".to_string()), + ]); + + let (subject, body) = notif.render(&vars).unwrap(); + assert_eq!(subject, "Test Subject"); + assert!(body.contains("Hi John,")); + assert!(body.contains("Hello, world!")); + } + + #[test] + fn test_template_defaults() { + let template = EmailTemplateRegistry::new(); + let notif = template.get("Nexus_notification").unwrap(); + + let vars = HashMap::new(); + let (subject, body) = notif.render(&vars).unwrap(); + + assert_eq!(subject, "Notification from Nexus"); + assert!(body.contains("Hi Team Member,")); + } +} diff --git a/src/services/events.rs b/src/services/events.rs new file mode 100644 index 0000000..64040ef --- /dev/null +++ b/src/services/events.rs @@ -0,0 +1,260 @@ +//! Event Publisher Service +//! +//! Publishes events to the audit trail with automatic metadata enrichment. +//! Handles the logic for whether events should be created based on their +//! criticality and matching notification rules. + +use anyhow::Result; +use serde_json::Value as JsonValue; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::models::{Event, EventType}; +use crate::services::job_queue::JobQueue; +use crate::services::metadata::MetadataEnricher; + +/// Event publisher service +pub struct EventPublisher; + +impl EventPublisher { + /// Publish an event with automatic metadata enrichment + /// + /// # Arguments + /// * `pool` - Database connection pool + /// * `event_type` - The type of event + /// * `entity_type` - Entity type string (e.g., "service", "project", "account") + /// * `entity_id` - UUID of the entity + /// * `actor` - Optional tuple of (actor_type, actor_id) + /// * `metadata` - Optional additional metadata (will be merged with enriched data) + /// + /// # Returns + /// * `Ok(Some(Event))` - Event was created + /// * `Ok(None)` - Event was skipped (non-critical with no matching rules) + /// * `Err(...)` - Database error + pub async fn publish( + pool: &PgPool, + event_type: EventType, + entity_type: &str, + entity_id: Uuid, + actor: Option<(&str, Uuid)>, + metadata: Option, + ) -> Result> { + // Check if this event should be created + if !Self::is_mission_critical(&event_type) { + let has_rules = Self::has_matching_rules(pool, &event_type).await?; + if !has_rules { + tracing::debug!( + event_type = ?event_type, + "Skipping non-critical event with no matching notification rules" + ); + return Ok(None); + } + } + + // Enrich metadata with context + let enriched_metadata = + MetadataEnricher::enrich(pool, &event_type, entity_type, entity_id, metadata).await; + + // Create the event + let (actor_type, actor_id) = match actor { + Some((t, id)) => (Some(t.to_string()), Some(id)), + None => (None, None), + }; + + let event = Event::new( + event_type, + entity_type, + entity_id, + actor_type, + actor_id, + Some(enriched_metadata), + ); + + // Insert into database + let created: Event = sqlx::query_as( + r#" + INSERT INTO events (id, event_type, entity_type, entity_id, actor_type, actor_id, metadata, timestamp, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + RETURNING id, event_type, entity_type, entity_id, actor_type, actor_id, metadata, timestamp, created_at + "#, + ) + .bind(event.id) + .bind(&event.event_type) + .bind(&event.entity_type) + .bind(event.entity_id) + .bind(&event.actor_type) + .bind(event.actor_id) + .bind(&event.metadata) + .bind(event.timestamp) + .bind(event.created_at) + .fetch_one(pool) + .await?; + + tracing::info!( + event_id = %created.id, + event_type = ?created.event_type, + entity_type = %created.entity_type, + entity_id = %created.entity_id, + "Event published" + ); + + Ok(Some(created)) + } + + /// Publish an event and queue notification processing job + /// + /// Same as `publish()` but also queues a background job to process + /// the event and create notifications. + pub async fn publish_and_queue( + pool: &PgPool, + job_queue: &JobQueue, + event_type: EventType, + entity_type: &str, + entity_id: Uuid, + actor: Option<(&str, Uuid)>, + metadata: Option, + ) -> Result> { + let event = Self::publish(pool, event_type, entity_type, entity_id, actor, metadata).await?; + + // Queue notification processing if event was created + if let Some(ref created) = event { + if let Err(e) = job_queue.queue_process_event(created.id).await { + tracing::error!( + event_id = %created.id, + error = %e, + "Failed to queue event processing job" + ); + // Don't fail the publish - event is still created + } + } + + Ok(event) + } + + /// Publish a system event (no actor) + pub async fn publish_system( + pool: &PgPool, + event_type: EventType, + entity_type: &str, + entity_id: Uuid, + metadata: Option, + ) -> Result> { + Self::publish( + pool, + event_type, + entity_type, + entity_id, + Some(("system", Uuid::nil())), + metadata, + ) + .await + } + + /// Check if an event type is mission-critical (always created) + /// + /// Mission-critical events are always recorded regardless of whether + /// there are matching notification rules. These are events that are + /// important for auditing and compliance. + pub fn is_mission_critical(event_type: &EventType) -> bool { + matches!( + event_type, + // Session lifecycle - critical for time tracking + EventType::ServiceSessionStarted + | EventType::ServiceSessionEnded + | EventType::ServiceSessionReverted + | EventType::ProjectSessionStarted + | EventType::ProjectSessionEnded + | EventType::ProjectSessionReverted + // Invoice lifecycle - critical for accounting + | EventType::InvoiceCreated + | EventType::InvoiceSent + | EventType::InvoicePaid + | EventType::InvoiceOverdue + // Status changes - critical for workflow + | EventType::ServiceStatusChanged + | EventType::ProjectStatusChanged + | EventType::AccountStatusChanged + | EventType::CustomerStatusChanged + // Profile lifecycle - critical for access control + | EventType::TeamProfileCreated + | EventType::TeamProfileDeleted + | EventType::TeamProfileStatusChanged + | EventType::TeamProfileRoleChanged + | EventType::CustomerProfileCreated + | EventType::CustomerProfileDeleted + | EventType::CustomerProfileStatusChanged + | EventType::CustomerProfileAccessGranted + | EventType::CustomerProfileAccessRevoked + // Report lifecycle - critical for payroll + | EventType::ReportSubmitted + | EventType::ReportApproved + // System events + | EventType::SystemStartup + | EventType::SystemShutdown + ) + } + + /// Check if there are any active notification rules matching this event type + async fn has_matching_rules(pool: &PgPool, event_type: &EventType) -> Result { + // Convert event type to the string format stored in JSONB + let event_type_str = format!("{:?}", event_type); + + // Check if any active rule's event_types array contains this event type + let exists: bool = sqlx::query_scalar( + r#" + SELECT EXISTS( + SELECT 1 FROM notification_rules + WHERE is_active = true + AND event_types @> $1::jsonb + ) + "#, + ) + .bind(serde_json::json!([event_type_str])) + .fetch_one(pool) + .await?; + + Ok(exists) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mission_critical_events() { + // Session events should be mission critical + assert!(EventPublisher::is_mission_critical( + &EventType::ServiceSessionStarted + )); + assert!(EventPublisher::is_mission_critical( + &EventType::ServiceSessionEnded + )); + assert!(EventPublisher::is_mission_critical( + &EventType::ProjectSessionStarted + )); + + // Invoice events should be mission critical + assert!(EventPublisher::is_mission_critical(&EventType::InvoicePaid)); + assert!(EventPublisher::is_mission_critical(&EventType::InvoiceSent)); + + // Profile events should be mission critical + assert!(EventPublisher::is_mission_critical( + &EventType::TeamProfileCreated + )); + assert!(EventPublisher::is_mission_critical( + &EventType::CustomerProfileAccessGranted + )); + + // Regular CRUD events should not be mission critical + assert!(!EventPublisher::is_mission_critical( + &EventType::ServiceCreated + )); + assert!(!EventPublisher::is_mission_critical( + &EventType::ProjectUpdated + )); + assert!(!EventPublisher::is_mission_critical( + &EventType::AccountDeleted + )); + } +} diff --git a/src/services/gmail.rs b/src/services/gmail.rs new file mode 100644 index 0000000..616aec3 --- /dev/null +++ b/src/services/gmail.rs @@ -0,0 +1,393 @@ +//! Gmail API service for sending emails + +use base64::{Engine as _, engine::general_purpose}; +use reqwest::{Client, Response}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::sync::Arc; +use thiserror::Error; + +use super::google_auth::{GoogleAuthService, GMAIL_SEND_SCOPE, GMAIL_FULL_SCOPE}; + +const GMAIL_API_BASE: &str = "https://gmail.googleapis.com/gmail/v1"; + +#[derive(Debug, Error)] +pub enum GmailError { + #[error("Authentication failed: {0}")] + Auth(#[from] super::google_auth::GoogleAuthError), + #[error("Request failed: {0}")] + Request(String), + #[error("Failed to parse response: {0}")] + Parse(String), + #[error("Email not found")] + NotFound, + #[error("Gmail API error: {0}")] + ApiError(String), + #[error("Gmail user not configured")] + NotConfigured, +} + +/// Email header +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailHeader { + pub name: String, + pub value: String, +} + +/// Email body +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailBody { + #[serde(rename = "attachmentId")] + pub attachment_id: Option, + pub size: u64, + pub data: Option, +} + +/// Email payload (MIME structure) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailPayload { + #[serde(rename = "partId")] + pub part_id: Option, + #[serde(rename = "mimeType")] + pub mime_type: String, + pub filename: Option, + pub headers: Vec, + pub body: EmailBody, + pub parts: Option>, +} + +/// Full email message +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Email { + pub id: String, + #[serde(rename = "threadId")] + pub thread_id: String, + #[serde(rename = "labelIds")] + pub label_ids: Vec, + pub snippet: String, + pub payload: EmailPayload, + #[serde(rename = "sizeEstimate")] + pub size_estimate: u64, + #[serde(rename = "historyId")] + pub history_id: String, + #[serde(rename = "internalDate")] + pub internal_date: String, +} + +/// Minimal email reference (for list results) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailMessage { + pub id: String, + #[serde(rename = "threadId")] + pub thread_id: String, +} + +/// Email list response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailListResponse { + #[serde(default)] + pub messages: Vec, + #[serde(rename = "nextPageToken")] + pub next_page_token: Option, + #[serde(rename = "resultSizeEstimate")] + pub result_size_estimate: u32, +} + +/// Email attachment +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmailAttachment { + pub filename: String, + /// Base64 encoded content + pub content: String, + pub content_type: String, +} + +/// Request to send an email +#[derive(Debug, Clone)] +pub struct SendEmailRequest { + /// Recipient email addresses + pub to: Vec, + /// CC recipients + pub cc: Option>, + /// BCC recipients + pub bcc: Option>, + /// Email subject + pub subject: String, + /// Email body + pub body: String, + /// Content type (defaults to "text/html") + pub content_type: Option, + /// Display name for the sender + pub from_name: Option, + /// Attachments + pub attachments: Option>, +} + +/// Response from sending an email +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SendEmailResponse { + pub id: String, + #[serde(rename = "threadId")] + pub thread_id: String, + #[serde(rename = "labelIds")] + pub label_ids: Option>, +} + +/// Query for listing emails +#[derive(Debug, Clone, Default)] +pub struct ListEmailsQuery { + /// Gmail search query + pub q: Option, + /// Maximum results to return + pub max_results: Option, + /// Page token for pagination + pub page_token: Option, + /// Filter by label IDs + pub label_ids: Option>, + /// Include spam and trash + pub include_spam_trash: Option, +} + +/// Gmail service for sending and managing emails +pub struct GmailService { + client: Client, + auth: Arc, + /// Email address to impersonate (domain-wide delegation) + user_email: String, +} + +impl GmailService { + /// Create a new Gmail service + pub fn new(auth: Arc, user_email: String) -> Self { + Self { + client: Client::new(), + auth, + user_email, + } + } + + /// Send an email + pub async fn send_email(&self, request: SendEmailRequest) -> Result { + let url = format!("{}/users/me/messages/send", GMAIL_API_BASE); + let raw_email = build_raw_email(&self.user_email, request)?; + + let body = json!({ + "raw": raw_email + }); + + let response = self.authenticated_post(&url, &body, GMAIL_SEND_SCOPE).await?; + handle_send_response(response).await + } + + /// List emails + pub async fn list_emails(&self, query: ListEmailsQuery) -> Result { + let url = build_list_emails_url(query); + let response = self.authenticated_get(&url, GMAIL_FULL_SCOPE).await?; + handle_list_response(response).await + } + + /// Get a specific email + pub async fn get_email(&self, email_id: &str) -> Result { + let url = format!("{}/users/me/messages/{}", GMAIL_API_BASE, urlencoding::encode(email_id)); + let response = self.authenticated_get(&url, GMAIL_FULL_SCOPE).await?; + handle_email_response(response).await + } + + /// Delete an email + pub async fn delete_email(&self, email_id: &str) -> Result<(), GmailError> { + let url = format!("{}/users/me/messages/{}", GMAIL_API_BASE, urlencoding::encode(email_id)); + let response = self.authenticated_delete(&url).await?; + handle_delete_response(response).await + } + + /// Mark an email as read + pub async fn mark_as_read(&self, email_id: &str) -> Result { + let url = format!("{}/users/me/messages/{}/modify", GMAIL_API_BASE, urlencoding::encode(email_id)); + let body = json!({ + "removeLabelIds": ["UNREAD"] + }); + let response = self.authenticated_post(&url, &body, GMAIL_FULL_SCOPE).await?; + handle_email_response(response).await + } + + /// Mark an email as unread + pub async fn mark_as_unread(&self, email_id: &str) -> Result { + let url = format!("{}/users/me/messages/{}/modify", GMAIL_API_BASE, urlencoding::encode(email_id)); + let body = json!({ + "addLabelIds": ["UNREAD"] + }); + let response = self.authenticated_post(&url, &body, GMAIL_FULL_SCOPE).await?; + handle_email_response(response).await + } + + /// Get the user email address + pub fn user_email(&self) -> &str { + &self.user_email + } + + // Private helpers + + async fn authenticated_get(&self, url: &str, scope: &str) -> Result { + let token = self.auth.get_access_token(scope, Some(&self.user_email)).await?; + self.client + .get(url) + .header("Authorization", format!("Bearer {}", token)) + .send() + .await + .map_err(|e| GmailError::Request(e.to_string())) + } + + async fn authenticated_post(&self, url: &str, body: &serde_json::Value, scope: &str) -> Result { + let token = self.auth.get_access_token(scope, Some(&self.user_email)).await?; + self.client + .post(url) + .header("Authorization", format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(body) + .send() + .await + .map_err(|e| GmailError::Request(e.to_string())) + } + + async fn authenticated_delete(&self, url: &str) -> Result { + let token = self.auth.get_access_token(GMAIL_FULL_SCOPE, Some(&self.user_email)).await?; + self.client + .delete(url) + .header("Authorization", format!("Bearer {}", token)) + .send() + .await + .map_err(|e| GmailError::Request(e.to_string())) + } +} + +// Helper functions + +fn build_raw_email(from_email: &str, request: SendEmailRequest) -> Result { + let mut lines = Vec::new(); + + // To header + lines.push(format!("To: {}", request.to.join(", "))); + + // From header with optional display name + if let Some(ref from_name) = request.from_name { + lines.push(format!("From: \"{}\" <{}>", from_name, from_email)); + } else { + lines.push(format!("From: {}", from_email)); + } + + // CC + if let Some(cc) = request.cc { + if !cc.is_empty() { + lines.push(format!("Cc: {}", cc.join(", "))); + } + } + + // BCC + if let Some(bcc) = request.bcc { + if !bcc.is_empty() { + lines.push(format!("Bcc: {}", bcc.join(", "))); + } + } + + // Subject + lines.push(format!("Subject: {}", request.subject)); + + // Content type + let content_type = request.content_type.as_deref().unwrap_or("text/html"); + lines.push(format!("Content-Type: {}; charset=utf-8", content_type)); + + // Empty line to separate headers from body + lines.push(String::new()); + + // Body + lines.push(request.body); + + let raw = lines.join("\r\n"); + + // Base64 encode using URL-safe alphabet + Ok(general_purpose::URL_SAFE_NO_PAD.encode(raw.as_bytes())) +} + +fn build_list_emails_url(query: ListEmailsQuery) -> String { + let mut url = format!("{}/users/me/messages", GMAIL_API_BASE); + let mut params = Vec::new(); + + if let Some(q) = query.q { + params.push(format!("q={}", urlencoding::encode(&q))); + } + if let Some(max_results) = query.max_results { + params.push(format!("maxResults={}", max_results)); + } + if let Some(page_token) = query.page_token { + params.push(format!("pageToken={}", urlencoding::encode(&page_token))); + } + if let Some(label_ids) = query.label_ids { + for label_id in label_ids { + params.push(format!("labelIds={}", urlencoding::encode(&label_id))); + } + } + if let Some(include_spam_trash) = query.include_spam_trash { + params.push(format!("includeSpamTrash={}", include_spam_trash)); + } + + if !params.is_empty() { + url.push('?'); + url.push_str(¶ms.join("&")); + } + + url +} + +async fn handle_send_response(response: Response) -> Result { + if response.status().is_success() { + response + .json() + .await + .map_err(|e| GmailError::Parse(e.to_string())) + } else { + Err(handle_error_response(response).await) + } +} + +async fn handle_email_response(response: Response) -> Result { + if response.status().is_success() { + response + .json() + .await + .map_err(|e| GmailError::Parse(e.to_string())) + } else { + Err(handle_error_response(response).await) + } +} + +async fn handle_list_response(response: Response) -> Result { + if response.status().is_success() { + response + .json() + .await + .map_err(|e| GmailError::Parse(e.to_string())) + } else { + Err(handle_error_response(response).await) + } +} + +async fn handle_delete_response(response: Response) -> Result<(), GmailError> { + match response.status().as_u16() { + 204 => Ok(()), + 404 => Err(GmailError::NotFound), + _ => Err(handle_error_response(response).await), + } +} + +async fn handle_error_response(response: Response) -> GmailError { + let status = response.status(); + let error_text = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + + match status.as_u16() { + 404 => GmailError::NotFound, + _ => GmailError::ApiError(format!("HTTP {}: {}", status, error_text)), + } +} diff --git a/src/services/google_auth.rs b/src/services/google_auth.rs new file mode 100644 index 0000000..508c566 --- /dev/null +++ b/src/services/google_auth.rs @@ -0,0 +1,260 @@ +//! Shared Google service account authentication for Calendar and Gmail APIs + +use base64::{Engine as _, engine::general_purpose}; +use jsonwebtoken::{Algorithm, EncodingKey, Header, encode}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::time::{SystemTime, UNIX_EPOCH}; +use thiserror::Error; +use tokio::sync::RwLock; + +const GOOGLE_TOKEN_URL: &str = "https://oauth2.googleapis.com/token"; + +/// Google Calendar API scope +pub const GOOGLE_CALENDAR_SCOPE: &str = "https://www.googleapis.com/auth/calendar"; + +/// Gmail send scope +pub const GMAIL_SEND_SCOPE: &str = "https://www.googleapis.com/auth/gmail.send"; + +/// Gmail full access scope (for read/delete operations) +pub const GMAIL_FULL_SCOPE: &str = "https://mail.google.com/"; + +#[derive(Debug, Error)] +pub enum GoogleAuthError { + #[error("Failed to read service account key file: {0}")] + KeyFileRead(String), + #[error("Invalid base64 service account key: {0}")] + InvalidBase64(String), + #[error("Invalid UTF-8 in service account key: {0}")] + InvalidUtf8(String), + #[error("Invalid service account key format: {0}")] + InvalidKeyFormat(String), + #[error("Invalid private key: {0}")] + InvalidPrivateKey(String), + #[error("Failed to create JWT: {0}")] + JwtCreation(String), + #[error("Token exchange failed: {0}")] + TokenExchange(String), + #[error("Google service account key not configured")] + NotConfigured, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ServiceAccountKey { + #[serde(rename = "type")] + pub key_type: String, + pub project_id: String, + pub private_key_id: String, + pub private_key: String, + pub client_email: String, + pub client_id: String, + pub auth_uri: String, + pub token_uri: String, + pub auth_provider_x509_cert_url: String, + pub client_x509_cert_url: String, +} + +#[derive(Debug, Serialize)] +struct JwtClaims { + iss: String, + scope: String, + aud: String, + exp: usize, + iat: usize, + #[serde(skip_serializing_if = "Option::is_none")] + sub: Option, +} + +#[derive(Debug, Deserialize)] +struct TokenResponse { + access_token: String, + #[allow(dead_code)] + token_type: String, + expires_in: u32, +} + +#[derive(Debug, Clone)] +struct CachedToken { + access_token: String, + expires_at: u64, + scope: String, + sub: Option, +} + +/// Shared Google authentication service with token caching +pub struct GoogleAuthService { + client: Client, + service_account: ServiceAccountKey, + /// Cache tokens per (scope, sub) combination + cached_tokens: Arc>>, +} + +impl GoogleAuthService { + /// Create a new Google auth service from a service account key string + /// The key can be: a file path, raw JSON, or base64-encoded JSON + pub fn new(service_account_key: &str) -> Result { + let service_account = parse_service_account_key(service_account_key)?; + + Ok(Self { + client: Client::new(), + service_account, + cached_tokens: Arc::new(RwLock::new(Vec::new())), + }) + } + + /// Get an access token for the specified scope and optional subject (for impersonation) + pub async fn get_access_token( + &self, + scope: &str, + sub: Option<&str>, + ) -> Result { + // Check cache first + { + let tokens = self.cached_tokens.read().await; + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs(); + + for cached in tokens.iter() { + if cached.scope == scope + && cached.sub.as_deref() == sub + && cached.expires_at > now + 60 + { + return Ok(cached.access_token.clone()); + } + } + } + + // Get a new token + let jwt = self.create_jwt(scope, sub)?; + let token_response = self.exchange_jwt_for_token(&jwt).await?; + + // Cache the new token + let expires_at = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() + + token_response.expires_in as u64; + + let new_token = CachedToken { + access_token: token_response.access_token.clone(), + expires_at, + scope: scope.to_string(), + sub: sub.map(String::from), + }; + + // Update cache - remove old token for same scope/sub if exists + { + let mut tokens = self.cached_tokens.write().await; + tokens.retain(|t| !(t.scope == scope && t.sub.as_deref() == sub)); + tokens.push(new_token); + } + + Ok(token_response.access_token) + } + + /// Get the service account email (useful for "from" addresses) + pub fn service_account_email(&self) -> &str { + &self.service_account.client_email + } + + fn create_jwt(&self, scope: &str, sub: Option<&str>) -> Result { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() as usize; + + let claims = JwtClaims { + iss: self.service_account.client_email.clone(), + scope: scope.to_string(), + aud: GOOGLE_TOKEN_URL.to_string(), + exp: now + 3600, // 1 hour + iat: now, + sub: sub.map(String::from), + }; + + let encoding_key = EncodingKey::from_rsa_pem(self.service_account.private_key.as_bytes()) + .map_err(|e| GoogleAuthError::InvalidPrivateKey(e.to_string()))?; + + encode(&Header::new(Algorithm::RS256), &claims, &encoding_key) + .map_err(|e| GoogleAuthError::JwtCreation(e.to_string())) + } + + async fn exchange_jwt_for_token(&self, jwt: &str) -> Result { + let response = self + .client + .post(GOOGLE_TOKEN_URL) + .form(&[ + ("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer"), + ("assertion", jwt), + ]) + .send() + .await + .map_err(|e| GoogleAuthError::TokenExchange(e.to_string()))?; + + if response.status().is_success() { + response + .json() + .await + .map_err(|e| GoogleAuthError::TokenExchange(format!("Failed to parse response: {}", e))) + } else { + let error_text = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + Err(GoogleAuthError::TokenExchange(error_text)) + } + } +} + +/// Parse a service account key from various formats +fn parse_service_account_key(key: &str) -> Result { + let key_data = if key.starts_with('/') || key.starts_with("./") { + // File path + std::fs::read_to_string(key) + .map_err(|e| GoogleAuthError::KeyFileRead(e.to_string()))? + } else if key.starts_with('{') { + // Raw JSON + key.to_string() + } else { + // Base64 encoded + let decoded = general_purpose::STANDARD + .decode(key) + .map_err(|e| GoogleAuthError::InvalidBase64(e.to_string()))?; + + String::from_utf8(decoded) + .map_err(|e| GoogleAuthError::InvalidUtf8(e.to_string()))? + }; + + serde_json::from_str(&key_data) + .map_err(|e| GoogleAuthError::InvalidKeyFormat(e.to_string())) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_json_key() { + let json = r#"{ + "type": "service_account", + "project_id": "test", + "private_key_id": "key123", + "private_key": "-----BEGIN RSA PRIVATE KEY-----\ntest\n-----END RSA PRIVATE KEY-----\n", + "client_email": "test@test.iam.gserviceaccount.com", + "client_id": "123", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test" + }"#; + + let result = parse_service_account_key(json); + assert!(result.is_ok()); + let key = result.unwrap(); + assert_eq!(key.project_id, "test"); + assert_eq!(key.client_email, "test@test.iam.gserviceaccount.com"); + } +} diff --git a/src/services/google_calendar.rs b/src/services/google_calendar.rs new file mode 100644 index 0000000..9e226b0 --- /dev/null +++ b/src/services/google_calendar.rs @@ -0,0 +1,454 @@ +//! Google Calendar API service + +use chrono::{DateTime, Utc}; +use reqwest::{Client, Response}; +use serde::{Deserialize, Serialize}; +use serde_json::{Value, json}; +use std::sync::Arc; +use thiserror::Error; + +use super::google_auth::{GoogleAuthService, GOOGLE_CALENDAR_SCOPE}; + +const CALENDAR_API_BASE: &str = "https://www.googleapis.com/calendar/v3"; + +#[derive(Debug, Error)] +pub enum CalendarError { + #[error("Authentication failed: {0}")] + Auth(#[from] super::google_auth::GoogleAuthError), + #[error("Request failed: {0}")] + Request(String), + #[error("Failed to parse response: {0}")] + Parse(String), + #[error("Event not found")] + NotFound, + #[error("Invalid event ID: {0}")] + InvalidEventId(String), + #[error("Google Calendar API error: {0}")] + ApiError(String), + #[error("Calendar not configured")] + NotConfigured, +} + +/// Reminder configuration for an event +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventReminder { + /// Method: "email" or "popup" + pub method: String, + /// Minutes before the event + pub minutes: i32, +} + +/// Reminders settings for an event +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventReminders { + #[serde(rename = "useDefault")] + pub use_default: bool, + pub overrides: Option>, +} + +/// Date/time for an event (supports both timed and all-day events) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventDateTime { + #[serde(rename = "dateTime", skip_serializing_if = "Option::is_none")] + pub date_time: Option>, + /// For all-day events (YYYY-MM-DD format) + #[serde(skip_serializing_if = "Option::is_none")] + pub date: Option, + #[serde(rename = "timeZone", skip_serializing_if = "Option::is_none")] + pub time_zone: Option, +} + +/// Event attendee +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Attendee { + pub email: String, + #[serde(rename = "displayName", skip_serializing_if = "Option::is_none")] + pub display_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub optional: Option, + #[serde(rename = "responseStatus", skip_serializing_if = "Option::is_none")] + pub response_status: Option, +} + +/// Calendar event +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CalendarEvent { + pub id: String, + pub summary: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub location: Option, + pub start: EventDateTime, + pub end: EventDateTime, + #[serde(skip_serializing_if = "Option::is_none")] + pub attendees: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub reminders: Option, + #[serde(rename = "colorId", skip_serializing_if = "Option::is_none")] + pub color_id: Option, + #[serde(rename = "htmlLink", skip_serializing_if = "Option::is_none")] + pub html_link: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub created: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub updated: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option, +} + +/// Request to create a calendar event +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateEventRequest { + /// Custom event ID (optional, must be 5-1024 chars, a-v and 0-9 only) + pub id: Option, + pub summary: String, + pub description: Option, + pub location: Option, + pub start: EventDateTime, + pub end: EventDateTime, + pub attendees: Option>, + pub reminders: Option, + #[serde(rename = "colorId")] + pub color_id: Option, +} + +/// Request to update a calendar event +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdateEventRequest { + pub summary: Option, + pub description: Option, + pub location: Option, + pub start: Option, + pub end: Option, + pub attendees: Option>, + pub reminders: Option, + #[serde(rename = "colorId")] + pub color_id: Option, +} + +/// Query parameters for listing events +#[derive(Debug, Clone, Default)] +pub struct ListEventsQuery { + pub time_min: Option>, + pub time_max: Option>, + pub max_results: Option, + pub q: Option, + pub single_events: Option, + pub order_by: Option, +} + +/// Google Calendar service +pub struct GoogleCalendarService { + client: Client, + auth: Arc, + calendar_id: String, + /// Email to impersonate via Domain-Wide Delegation + impersonate_user: String, +} + +impl GoogleCalendarService { + /// Create a new calendar service + /// + /// `impersonate_user` is the email address to impersonate via Domain-Wide Delegation. + /// This is required for operations like inviting attendees. + pub fn new(auth: Arc, calendar_id: String, impersonate_user: String) -> Self { + Self { + client: Client::new(), + auth, + calendar_id, + impersonate_user, + } + } + + /// Create a new calendar event + pub async fn create_event(&self, request: CreateEventRequest) -> Result { + if let Some(ref id) = request.id { + validate_event_id(id)?; + } + + let url = format!("{}/calendars/{}/events", CALENDAR_API_BASE, urlencoding::encode(&self.calendar_id)); + let event_body = build_create_event_body(request); + + let response = self.authenticated_post(&url, &event_body).await?; + handle_event_response(response).await + } + + /// Get an event by ID + pub async fn get_event(&self, event_id: &str) -> Result { + let url = format!( + "{}/calendars/{}/events/{}", + CALENDAR_API_BASE, + urlencoding::encode(&self.calendar_id), + urlencoding::encode(event_id) + ); + + let response = self.authenticated_get(&url).await?; + handle_event_response_with_404(response).await + } + + /// List events + pub async fn list_events(&self, query: ListEventsQuery) -> Result, CalendarError> { + let url = build_list_events_url(&self.calendar_id, query); + let response = self.authenticated_get(&url).await?; + handle_list_events_response(response).await + } + + /// Update an event + pub async fn update_event( + &self, + event_id: &str, + request: UpdateEventRequest, + ) -> Result { + let existing = self.get_event(event_id).await?; + let url = format!( + "{}/calendars/{}/events/{}", + CALENDAR_API_BASE, + urlencoding::encode(&self.calendar_id), + urlencoding::encode(event_id) + ); + let event_body = build_update_event_body(request, existing); + + let response = self.authenticated_put(&url, &event_body).await?; + handle_event_response_with_404(response).await + } + + /// Delete an event + pub async fn delete_event(&self, event_id: &str) -> Result<(), CalendarError> { + let url = format!( + "{}/calendars/{}/events/{}", + CALENDAR_API_BASE, + urlencoding::encode(&self.calendar_id), + urlencoding::encode(event_id) + ); + + let response = self.authenticated_delete(&url).await?; + handle_delete_response(response).await + } + + /// Get the calendar ID + pub fn calendar_id(&self) -> &str { + &self.calendar_id + } + + // Private helpers + + async fn authenticated_get(&self, url: &str) -> Result { + let token = self.auth.get_access_token(GOOGLE_CALENDAR_SCOPE, Some(&self.impersonate_user)).await?; + self.client + .get(url) + .header("Authorization", format!("Bearer {}", token)) + .send() + .await + .map_err(|e| CalendarError::Request(e.to_string())) + } + + async fn authenticated_post(&self, url: &str, body: &Value) -> Result { + let token = self.auth.get_access_token(GOOGLE_CALENDAR_SCOPE, Some(&self.impersonate_user)).await?; + self.client + .post(url) + .header("Authorization", format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(body) + .send() + .await + .map_err(|e| CalendarError::Request(e.to_string())) + } + + async fn authenticated_put(&self, url: &str, body: &Value) -> Result { + let token = self.auth.get_access_token(GOOGLE_CALENDAR_SCOPE, Some(&self.impersonate_user)).await?; + self.client + .put(url) + .header("Authorization", format!("Bearer {}", token)) + .header("Content-Type", "application/json") + .json(body) + .send() + .await + .map_err(|e| CalendarError::Request(e.to_string())) + } + + async fn authenticated_delete(&self, url: &str) -> Result { + let token = self.auth.get_access_token(GOOGLE_CALENDAR_SCOPE, Some(&self.impersonate_user)).await?; + self.client + .delete(url) + .header("Authorization", format!("Bearer {}", token)) + .send() + .await + .map_err(|e| CalendarError::Request(e.to_string())) + } +} + +// Helper functions + +fn validate_event_id(id: &str) -> Result<(), CalendarError> { + if id.len() < 5 || id.len() > 1024 { + return Err(CalendarError::InvalidEventId( + "ID must be between 5 and 1024 characters".to_string(), + )); + } + + for ch in id.chars() { + if !ch.is_ascii_lowercase() && !ch.is_ascii_digit() { + return Err(CalendarError::InvalidEventId( + "ID can only contain lowercase letters a-v and digits 0-9".to_string(), + )); + } + if ch.is_ascii_lowercase() && ch > 'v' { + return Err(CalendarError::InvalidEventId( + "ID can only contain lowercase letters a-v and digits 0-9".to_string(), + )); + } + } + + Ok(()) +} + +fn build_create_event_body(request: CreateEventRequest) -> Value { + let mut body = json!({ + "summary": request.summary, + "start": request.start, + "end": request.end + }); + + if let Some(id) = request.id { + body["id"] = json!(id); + } + if let Some(description) = request.description { + body["description"] = json!(description); + } + if let Some(location) = request.location { + body["location"] = json!(location); + } + if let Some(attendees) = request.attendees { + body["attendees"] = json!(attendees); + } + if let Some(reminders) = request.reminders { + body["reminders"] = json!(reminders); + } + if let Some(color_id) = request.color_id { + body["colorId"] = json!(color_id); + } + + body +} + +fn build_update_event_body(request: UpdateEventRequest, existing: CalendarEvent) -> Value { + let mut body = json!({ + "summary": request.summary.unwrap_or(existing.summary), + "start": request.start.unwrap_or(existing.start), + "end": request.end.unwrap_or(existing.end) + }); + + if let Some(description) = request.description.or(existing.description) { + body["description"] = json!(description); + } + if let Some(location) = request.location.or(existing.location) { + body["location"] = json!(location); + } + if let Some(attendees) = request.attendees.or(existing.attendees) { + body["attendees"] = json!(attendees); + } + if let Some(reminders) = request.reminders.or(existing.reminders) { + body["reminders"] = json!(reminders); + } + if let Some(color_id) = request.color_id.or(existing.color_id) { + body["colorId"] = json!(color_id); + } + + body +} + +fn build_list_events_url(calendar_id: &str, query: ListEventsQuery) -> String { + let mut url = format!( + "{}/calendars/{}/events", + CALENDAR_API_BASE, + urlencoding::encode(calendar_id) + ); + let mut params = Vec::new(); + + if let Some(time_min) = query.time_min { + params.push(format!("timeMin={}", urlencoding::encode(&time_min.to_rfc3339()))); + } + if let Some(time_max) = query.time_max { + params.push(format!("timeMax={}", urlencoding::encode(&time_max.to_rfc3339()))); + } + if let Some(max_results) = query.max_results { + params.push(format!("maxResults={}", max_results)); + } + if let Some(q) = query.q { + params.push(format!("q={}", urlencoding::encode(&q))); + } + if let Some(single_events) = query.single_events { + params.push(format!("singleEvents={}", single_events)); + } + if let Some(order_by) = query.order_by { + params.push(format!("orderBy={}", urlencoding::encode(&order_by))); + } + + if !params.is_empty() { + url.push('?'); + url.push_str(¶ms.join("&")); + } + + url +} + +async fn handle_event_response(response: Response) -> Result { + if response.status().is_success() { + response + .json() + .await + .map_err(|e| CalendarError::Parse(e.to_string())) + } else { + Err(handle_error_response(response).await) + } +} + +async fn handle_event_response_with_404(response: Response) -> Result { + match response.status().as_u16() { + 200 => response + .json() + .await + .map_err(|e| CalendarError::Parse(e.to_string())), + 404 => Err(CalendarError::NotFound), + _ => Err(handle_error_response(response).await), + } +} + +async fn handle_list_events_response(response: Response) -> Result, CalendarError> { + if response.status().is_success() { + let json: Value = response + .json() + .await + .map_err(|e| CalendarError::Parse(e.to_string()))?; + + let events = json["items"] + .as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|item| serde_json::from_value(item.clone()).ok()) + .collect(); + + Ok(events) + } else { + Err(handle_error_response(response).await) + } +} + +async fn handle_delete_response(response: Response) -> Result<(), CalendarError> { + match response.status().as_u16() { + 204 => Ok(()), + 404 => Err(CalendarError::NotFound), + _ => Err(handle_error_response(response).await), + } +} + +async fn handle_error_response(response: Response) -> CalendarError { + let status = response.status(); + let error_text = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + + CalendarError::ApiError(format!("HTTP {}: {}", status, error_text)) +} diff --git a/src/services/image.rs b/src/services/image.rs new file mode 100644 index 0000000..12c4db1 --- /dev/null +++ b/src/services/image.rs @@ -0,0 +1,303 @@ +//! Image processing service for session media +//! +//! Handles image validation, HEIC conversion, and thumbnail generation. + +use bytes::Bytes; +use image::codecs::jpeg::JpegEncoder; +use image::imageops::FilterType; +use image::{DynamicImage, GenericImageView, ImageReader}; +use std::io::Cursor; +use std::process::Command; +use tempfile::NamedTempFile; +use thiserror::Error; + +/// Image processing errors +#[derive(Error, Debug)] +pub enum ImageError { + #[error("Invalid image format: {0}")] + InvalidFormat(String), + + #[error("Image too large: {size} bytes (max {max} bytes)")] + TooLarge { size: usize, max: usize }, + + #[error("Failed to decode image: {0}")] + DecodeFailed(String), + + #[error("Failed to encode image: {0}")] + EncodeFailed(String), + + #[error("HEIC conversion failed: {0}")] + HeicConversionFailed(String), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), +} + +/// Result of processing an image +#[derive(Debug)] +pub struct ProcessedImage { + /// The processed image data (JPEG for HEIC, original for others) + pub data: Bytes, + /// Content type (always image/jpeg for processed images, or original) + pub content_type: String, + /// Image width in pixels + pub width: u32, + /// Image height in pixels + pub height: u32, +} + +/// Maximum image size (10MB) +pub const MAX_IMAGE_SIZE: usize = 10 * 1024 * 1024; + +/// Thumbnail size (max dimension) +pub const THUMBNAIL_SIZE: u32 = 320; + +/// JPEG quality for thumbnails and HEIC conversion +pub const JPEG_QUALITY: u8 = 85; + +/// Detect image format from magic bytes +fn detect_format(data: &[u8]) -> Option<&'static str> { + if data.len() < 12 { + return None; + } + + // JPEG: FF D8 FF + if data.starts_with(&[0xFF, 0xD8, 0xFF]) { + return Some("image/jpeg"); + } + + // PNG: 89 50 4E 47 0D 0A 1A 0A + if data.starts_with(&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]) { + return Some("image/png"); + } + + // GIF: GIF87a or GIF89a + if data.starts_with(b"GIF87a") || data.starts_with(b"GIF89a") { + return Some("image/gif"); + } + + // WebP: RIFF....WEBP + if data.len() >= 12 && &data[0..4] == b"RIFF" && &data[8..12] == b"WEBP" { + return Some("image/webp"); + } + + // HEIC/HEIF: ftyp followed by heic, heix, hevc, mif1, etc. + if data.len() >= 12 && &data[4..8] == b"ftyp" { + let brand = &data[8..12]; + if brand == b"heic" + || brand == b"heix" + || brand == b"hevc" + || brand == b"hevx" + || brand == b"mif1" + || brand == b"msf1" + { + return Some("image/heic"); + } + } + + // BMP: BM + if data.starts_with(b"BM") { + return Some("image/bmp"); + } + + // TIFF: II or MM + if data.starts_with(&[0x49, 0x49, 0x2A, 0x00]) || data.starts_with(&[0x4D, 0x4D, 0x00, 0x2A]) { + return Some("image/tiff"); + } + + None +} + +/// Check if format is HEIC/HEIF +fn is_heic(content_type: &str) -> bool { + matches!(content_type, "image/heic" | "image/heif") +} + +/// Convert HEIC to JPEG using heif-convert CLI tool +fn convert_heic_to_jpeg(data: &[u8]) -> Result, ImageError> { + // Write HEIC data to temp file + let mut input_file = NamedTempFile::with_suffix(".heic")?; + std::io::Write::write_all(&mut input_file, data)?; + + // Create output temp file + let output_file = NamedTempFile::with_suffix(".jpg")?; + let output_path = output_file.path().to_path_buf(); + + // Run heif-convert + let output = Command::new("heif-convert") + .arg("-q") + .arg(JPEG_QUALITY.to_string()) + .arg(input_file.path()) + .arg(&output_path) + .output(); + + match output { + Ok(result) if result.status.success() => { + // Read the converted JPEG + std::fs::read(&output_path).map_err(|e| ImageError::HeicConversionFailed(e.to_string())) + } + Ok(result) => { + let stderr = String::from_utf8_lossy(&result.stderr); + Err(ImageError::HeicConversionFailed(format!( + "heif-convert failed: {}", + stderr + ))) + } + Err(e) => { + // heif-convert not installed, try ImageMagick convert + let output = Command::new("convert") + .arg(input_file.path()) + .arg("-quality") + .arg(JPEG_QUALITY.to_string()) + .arg(format!("jpeg:{}", output_path.display())) + .output(); + + match output { + Ok(result) if result.status.success() => std::fs::read(&output_path) + .map_err(|e| ImageError::HeicConversionFailed(e.to_string())), + Ok(result) => { + let stderr = String::from_utf8_lossy(&result.stderr); + Err(ImageError::HeicConversionFailed(format!( + "ImageMagick convert failed: {}", + stderr + ))) + } + Err(_) => Err(ImageError::HeicConversionFailed(format!( + "Neither heif-convert nor ImageMagick available: {}", + e + ))), + } + } + } +} + +/// Process an uploaded image +/// +/// - Validates size and format +/// - Converts HEIC to JPEG +/// - Extracts dimensions +pub fn process_image(data: &[u8], _filename: &str) -> Result { + // Check size + if data.len() > MAX_IMAGE_SIZE { + return Err(ImageError::TooLarge { + size: data.len(), + max: MAX_IMAGE_SIZE, + }); + } + + // Detect format + let content_type = detect_format(data) + .ok_or_else(|| ImageError::InvalidFormat("Unknown image format".to_string()))?; + + // Handle HEIC conversion + let (processed_data, final_content_type) = if is_heic(content_type) { + let jpeg_data = convert_heic_to_jpeg(data)?; + (jpeg_data, "image/jpeg".to_string()) + } else { + (data.to_vec(), content_type.to_string()) + }; + + // Decode to get dimensions + let img = ImageReader::new(Cursor::new(&processed_data)) + .with_guessed_format() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))? + .decode() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))?; + + let (width, height) = img.dimensions(); + + Ok(ProcessedImage { + data: Bytes::from(processed_data), + content_type: final_content_type, + width, + height, + }) +} + +/// Generate a thumbnail from image data +/// +/// Returns JPEG thumbnail data fitting within max_size x max_size +pub fn generate_thumbnail(data: &[u8], max_size: u32) -> Result, ImageError> { + // Decode the image + let img = ImageReader::new(Cursor::new(data)) + .with_guessed_format() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))? + .decode() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))?; + + // Calculate thumbnail dimensions (preserve aspect ratio) + let (orig_width, orig_height) = img.dimensions(); + let (thumb_width, thumb_height) = if orig_width > orig_height { + let ratio = max_size as f64 / orig_width as f64; + (max_size, (orig_height as f64 * ratio) as u32) + } else { + let ratio = max_size as f64 / orig_height as f64; + ((orig_width as f64 * ratio) as u32, max_size) + }; + + // Resize + let thumbnail = img.resize(thumb_width, thumb_height, FilterType::Lanczos3); + + // Encode as JPEG + encode_jpeg(&thumbnail) +} + +/// Encode an image as JPEG +fn encode_jpeg(img: &DynamicImage) -> Result, ImageError> { + let mut output = Vec::new(); + let mut encoder = JpegEncoder::new_with_quality(&mut output, JPEG_QUALITY); + encoder + .encode_image(img) + .map_err(|e| ImageError::EncodeFailed(e.to_string()))?; + Ok(output) +} + +/// Get image dimensions without full decode (faster for validation) +pub fn get_dimensions(data: &[u8]) -> Result<(u32, u32), ImageError> { + let reader = ImageReader::new(Cursor::new(data)) + .with_guessed_format() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))?; + + let dimensions = reader + .into_dimensions() + .map_err(|e| ImageError::DecodeFailed(e.to_string()))?; + + Ok(dimensions) +} + +/// Get the file extension for a content type +pub fn extension_for_content_type(content_type: &str) -> &'static str { + match content_type { + "image/jpeg" => "jpg", + "image/png" => "png", + "image/gif" => "gif", + "image/webp" => "webp", + "image/heic" | "image/heif" => "heic", + "image/bmp" => "bmp", + "image/tiff" => "tiff", + _ => "bin", + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_detect_jpeg() { + let data = [0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46, 0x49, 0x46, 0x00, 0x01]; + assert_eq!(detect_format(&data), Some("image/jpeg")); + } + + #[test] + fn test_detect_png() { + let data = [0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D]; + assert_eq!(detect_format(&data), Some("image/png")); + } + + #[test] + fn test_detect_heic() { + let data = [0x00, 0x00, 0x00, 0x18, b'f', b't', b'y', b'p', b'h', b'e', b'i', b'c']; + assert_eq!(detect_format(&data), Some("image/heic")); + } +} diff --git a/src/services/job_queue.rs b/src/services/job_queue.rs new file mode 100644 index 0000000..d4e0089 --- /dev/null +++ b/src/services/job_queue.rs @@ -0,0 +1,86 @@ +//! Job Queue Service +//! +//! Provides job queueing functionality using Apalis and Redis/Valkey. +//! Used to enqueue background jobs for notification processing and delivery. + +use apalis::prelude::*; +use apalis_redis::{RedisConfig, RedisStorage}; +use std::sync::Arc; +use tokio::sync::RwLock; +use uuid::Uuid; + +use crate::jobs::{DeliverNotificationJob, ProcessEventJob, DELIVER_NOTIFICATION_QUEUE, PROCESS_EVENT_QUEUE}; + +/// Job queue service for enqueueing background jobs +pub struct JobQueue { + event_storage: Arc>>, + delivery_storage: Arc>>, +} + +impl Clone for JobQueue { + fn clone(&self) -> Self { + Self { + event_storage: Arc::clone(&self.event_storage), + delivery_storage: Arc::clone(&self.delivery_storage), + } + } +} + +impl JobQueue { + /// Create a new job queue connected to Redis/Valkey + pub async fn new(redis_url: &str) -> anyhow::Result { + let conn = apalis_redis::connect(redis_url).await?; + + let event_storage = RedisStorage::new_with_config( + conn.clone(), + RedisConfig::default().set_namespace(PROCESS_EVENT_QUEUE), + ); + + let delivery_storage = RedisStorage::new_with_config( + conn, + RedisConfig::default().set_namespace(DELIVER_NOTIFICATION_QUEUE), + ); + + Ok(Self { + event_storage: Arc::new(RwLock::new(event_storage)), + delivery_storage: Arc::new(RwLock::new(delivery_storage)), + }) + } + + /// Queue a job to process an event and create notifications + pub async fn queue_process_event(&self, event_id: Uuid) -> anyhow::Result<()> { + let job = ProcessEventJob::new(event_id); + let mut storage = self.event_storage.write().await; + storage.push(job).await?; + tracing::debug!(event_id = %event_id, "Queued ProcessEventJob"); + Ok(()) + } + + /// Queue a job to deliver a notification + pub async fn queue_deliver_notification(&self, delivery_id: Uuid) -> anyhow::Result<()> { + let job = DeliverNotificationJob::new(delivery_id); + let mut storage = self.delivery_storage.write().await; + storage.push(job).await?; + tracing::debug!(delivery_id = %delivery_id, "Queued DeliverNotificationJob"); + Ok(()) + } + + /// Queue a retry for notification delivery with incremented attempt + pub async fn queue_delivery_retry(&self, delivery_id: Uuid, attempt: u32) -> anyhow::Result<()> { + let job = DeliverNotificationJob::with_attempt(delivery_id, attempt); + let mut storage = self.delivery_storage.write().await; + storage.push(job).await?; + tracing::debug!(delivery_id = %delivery_id, attempt = attempt, "Queued DeliverNotificationJob retry"); + Ok(()) + } + + /// Get the event storage for worker registration + pub fn event_storage(&self) -> Arc>> { + Arc::clone(&self.event_storage) + } + + /// Get the delivery storage for worker registration + pub fn delivery_storage(&self) -> Arc>> { + Arc::clone(&self.delivery_storage) + } +} diff --git a/src/services/metadata.rs b/src/services/metadata.rs new file mode 100644 index 0000000..2e5dbad --- /dev/null +++ b/src/services/metadata.rs @@ -0,0 +1,657 @@ +//! Metadata Enricher Service +//! +//! Enriches events with human-readable context (names, dates, addresses) +//! instead of just UUIDs, enabling meaningful notification messages. + +use serde_json::{json, Value as JsonValue}; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::models::EventType; + +/// Domain categories for metadata enrichment +#[derive(Debug, Clone, Copy)] +pub enum MetadataDomain { + Service, + Project, + Account, + Customer, + Invoice, + Report, + Profile, + Session, +} + +/// Metadata enricher service +pub struct MetadataEnricher; + +impl MetadataEnricher { + /// Enrich metadata based on event type and entity + /// + /// Returns enriched metadata with human-readable context. + /// Existing metadata takes precedence over auto-enriched values. + pub async fn enrich( + pool: &PgPool, + event_type: &EventType, + entity_type: &str, + entity_id: Uuid, + existing_metadata: Option, + ) -> JsonValue { + let domain = Self::get_domain(event_type); + + let enriched = if let Some(domain) = domain { + match domain { + MetadataDomain::Service => Self::enrich_service(pool, entity_id).await, + MetadataDomain::Project => Self::enrich_project(pool, entity_id).await, + MetadataDomain::Account => Self::enrich_account(pool, entity_type, entity_id).await, + MetadataDomain::Customer => Self::enrich_customer(pool, entity_type, entity_id).await, + MetadataDomain::Invoice => Self::enrich_invoice(pool, entity_id).await, + MetadataDomain::Report => Self::enrich_report(pool, entity_id).await, + MetadataDomain::Profile => Self::enrich_profile(pool, entity_type, entity_id).await, + MetadataDomain::Session => Self::enrich_session(pool, entity_type, entity_id).await, + } + } else { + json!({}) + }; + + // Merge: existing metadata takes precedence + Self::merge_metadata(existing_metadata, enriched) + } + + /// Determine the domain based on event type + fn get_domain(event_type: &EventType) -> Option { + match event_type { + // Service events + EventType::ServiceCreated + | EventType::ServiceUpdated + | EventType::ServiceDeleted + | EventType::ServiceStatusChanged + | EventType::ServiceAssigned + | EventType::ServiceUnassigned + | EventType::ServiceRescheduled => Some(MetadataDomain::Service), + + // Service scope events + EventType::ServiceScopeCreated + | EventType::ServiceScopeUpdated + | EventType::ServiceScopeDeleted + | EventType::ServiceScopeActivated + | EventType::ServiceScopeDeactivated + | EventType::ServiceScopeAreaCreated + | EventType::ServiceScopeAreaUpdated + | EventType::ServiceScopeAreaDeleted + | EventType::ServiceScopeTaskCreated + | EventType::ServiceScopeTaskUpdated + | EventType::ServiceScopeTaskDeleted => Some(MetadataDomain::Service), + + // Service task events + EventType::ServiceTaskCompleted | EventType::ServiceTaskUncompleted => { + Some(MetadataDomain::Service) + } + + // Service session events + EventType::ServiceSessionStarted + | EventType::ServiceSessionEnded + | EventType::ServiceSessionReverted => Some(MetadataDomain::Session), + + // Project events + EventType::ProjectCreated + | EventType::ProjectUpdated + | EventType::ProjectDeleted + | EventType::ProjectStatusChanged + | EventType::ProjectAssigned + | EventType::ProjectUnassigned + | EventType::ProjectRescheduled + | EventType::ProjectAmountChanged => Some(MetadataDomain::Project), + + // Project scope events + EventType::ProjectScopeCreated + | EventType::ProjectScopeUpdated + | EventType::ProjectScopeDeleted + | EventType::ProjectScopeActivated + | EventType::ProjectScopeDeactivated + | EventType::ProjectScopeCategoryCreated + | EventType::ProjectScopeCategoryUpdated + | EventType::ProjectScopeCategoryDeleted + | EventType::ProjectScopeTaskCreated + | EventType::ProjectScopeTaskUpdated + | EventType::ProjectScopeTaskDeleted => Some(MetadataDomain::Project), + + // Project task events + EventType::ProjectTaskCompleted | EventType::ProjectTaskUncompleted => { + Some(MetadataDomain::Project) + } + + // Project session events + EventType::ProjectSessionStarted + | EventType::ProjectSessionEnded + | EventType::ProjectSessionReverted => Some(MetadataDomain::Session), + + // Account events + EventType::AccountCreated + | EventType::AccountUpdated + | EventType::AccountDeleted + | EventType::AccountStatusChanged + | EventType::AccountAddressCreated + | EventType::AccountAddressUpdated + | EventType::AccountAddressDeleted + | EventType::AccountAddressPrimaryChanged + | EventType::AccountContactCreated + | EventType::AccountContactUpdated + | EventType::AccountContactDeleted + | EventType::AccountContactPrimaryChanged => Some(MetadataDomain::Account), + + // Customer events + EventType::CustomerCreated + | EventType::CustomerUpdated + | EventType::CustomerDeleted + | EventType::CustomerStatusChanged + | EventType::CustomerAddressCreated + | EventType::CustomerAddressUpdated + | EventType::CustomerAddressDeleted + | EventType::CustomerContactCreated + | EventType::CustomerContactUpdated + | EventType::CustomerContactDeleted => Some(MetadataDomain::Customer), + + // Invoice events + EventType::InvoiceCreated + | EventType::InvoiceUpdated + | EventType::InvoiceDeleted + | EventType::InvoiceStatusChanged + | EventType::InvoiceSent + | EventType::InvoicePaid + | EventType::InvoiceOverdue => Some(MetadataDomain::Invoice), + + // Report events + EventType::ReportCreated + | EventType::ReportUpdated + | EventType::ReportDeleted + | EventType::ReportSubmitted + | EventType::ReportApproved => Some(MetadataDomain::Report), + + // Profile events + EventType::TeamProfileCreated + | EventType::TeamProfileUpdated + | EventType::TeamProfileDeleted + | EventType::TeamProfileRoleChanged + | EventType::TeamProfileStatusChanged + | EventType::CustomerProfileCreated + | EventType::CustomerProfileUpdated + | EventType::CustomerProfileDeleted + | EventType::CustomerProfileStatusChanged + | EventType::CustomerProfileAccessGranted + | EventType::CustomerProfileAccessRevoked => Some(MetadataDomain::Profile), + + // Session media events + EventType::SessionNoteCreated + | EventType::SessionNoteUpdated + | EventType::SessionNoteDeleted + | EventType::SessionImageUploaded + | EventType::SessionImageUpdated + | EventType::SessionImageDeleted + | EventType::SessionVideoUploaded + | EventType::SessionVideoUpdated + | EventType::SessionVideoDeleted + | EventType::SessionMediaInternalFlagged => Some(MetadataDomain::Session), + + // Events that don't need enrichment + _ => None, + } + } + + /// Enrich service-related events + async fn enrich_service(pool: &PgPool, service_id: Uuid) -> JsonValue { + #[derive(sqlx::FromRow)] + struct ServiceContext { + date: chrono::NaiveDate, + account_name: String, + customer_name: String, + street_address: String, + city: String, + state: String, + zip_code: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT s.date, a.name as account_name, c.name as customer_name, + aa.street_address, aa.city, aa.state, aa.zip_code + FROM services s + JOIN accounts a ON a.id = s.account_id + JOIN customers c ON c.id = a.customer_id + JOIN account_addresses aa ON aa.id = s.account_address_id + WHERE s.id = $1 + "#, + ) + .bind(service_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "service_id": service_id.to_string(), + "date": ctx.date.to_string(), + "account_name": ctx.account_name, + "customer_name": ctx.customer_name, + "address": format!("{}, {}, {} {}", ctx.street_address, ctx.city, ctx.state, ctx.zip_code) + }), + None => json!({ "service_id": service_id.to_string() }), + } + } + + /// Enrich project-related events + async fn enrich_project(pool: &PgPool, project_id: Uuid) -> JsonValue { + #[derive(sqlx::FromRow)] + struct ProjectContext { + name: String, + start_date: Option, + account_name: String, + customer_name: String, + street_address: String, + city: String, + state: String, + zip_code: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT p.name, p.start_date, a.name as account_name, c.name as customer_name, + aa.street_address, aa.city, aa.state, aa.zip_code + FROM projects p + JOIN accounts a ON a.id = p.account_id + JOIN customers c ON c.id = a.customer_id + JOIN account_addresses aa ON aa.id = p.account_address_id + WHERE p.id = $1 + "#, + ) + .bind(project_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => { + let mut meta = json!({ + "project_id": project_id.to_string(), + "project_name": ctx.name, + "account_name": ctx.account_name, + "customer_name": ctx.customer_name, + "address": format!("{}, {}, {} {}", ctx.street_address, ctx.city, ctx.state, ctx.zip_code) + }); + if let Some(date) = ctx.start_date { + meta["start_date"] = json!(date.to_string()); + } + meta + } + None => json!({ "project_id": project_id.to_string() }), + } + } + + /// Enrich account-related events + async fn enrich_account(pool: &PgPool, entity_type: &str, entity_id: Uuid) -> JsonValue { + // Handle different account-related entity types + let account_id = match entity_type { + "account" => entity_id, + "account_address" => { + let id: Option = sqlx::query_scalar( + "SELECT account_id FROM account_addresses WHERE id = $1", + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + match id { + Some(id) => id, + None => return json!({ "entity_id": entity_id.to_string() }), + } + } + "account_contact" => { + let id: Option = sqlx::query_scalar( + "SELECT account_id FROM account_contacts WHERE id = $1", + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + match id { + Some(id) => id, + None => return json!({ "entity_id": entity_id.to_string() }), + } + } + _ => return json!({ "entity_id": entity_id.to_string() }), + }; + + #[derive(sqlx::FromRow)] + struct AccountContext { + name: String, + customer_name: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT a.name, c.name as customer_name + FROM accounts a + JOIN customers c ON c.id = a.customer_id + WHERE a.id = $1 + "#, + ) + .bind(account_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "account_id": account_id.to_string(), + "account_name": ctx.name, + "customer_name": ctx.customer_name + }), + None => json!({ "account_id": account_id.to_string() }), + } + } + + /// Enrich customer-related events + async fn enrich_customer(pool: &PgPool, entity_type: &str, entity_id: Uuid) -> JsonValue { + // Handle different customer-related entity types + let customer_id = match entity_type { + "customer" => entity_id, + "customer_address" => { + let id: Option = sqlx::query_scalar( + "SELECT customer_id FROM customer_addresses WHERE id = $1", + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + match id { + Some(id) => id, + None => return json!({ "entity_id": entity_id.to_string() }), + } + } + "customer_contact" => { + let id: Option = sqlx::query_scalar( + "SELECT customer_id FROM customer_contacts WHERE id = $1", + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + match id { + Some(id) => id, + None => return json!({ "entity_id": entity_id.to_string() }), + } + } + _ => return json!({ "entity_id": entity_id.to_string() }), + }; + + let name: Option = + sqlx::query_scalar("SELECT name FROM customers WHERE id = $1") + .bind(customer_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match name { + Some(name) => json!({ + "customer_id": customer_id.to_string(), + "customer_name": name + }), + None => json!({ "customer_id": customer_id.to_string() }), + } + } + + /// Enrich invoice-related events + async fn enrich_invoice(pool: &PgPool, invoice_id: Uuid) -> JsonValue { + #[derive(sqlx::FromRow)] + struct InvoiceContext { + invoice_number: Option, + total: rust_decimal::Decimal, + customer_name: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT i.invoice_number, i.total, c.name as customer_name + FROM invoices i + JOIN customers c ON c.id = i.customer_id + WHERE i.id = $1 + "#, + ) + .bind(invoice_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "invoice_id": invoice_id.to_string(), + "invoice_number": ctx.invoice_number, + "total": ctx.total.to_string(), + "customer_name": ctx.customer_name + }), + None => json!({ "invoice_id": invoice_id.to_string() }), + } + } + + /// Enrich report-related events + async fn enrich_report(pool: &PgPool, report_id: Uuid) -> JsonValue { + #[derive(sqlx::FromRow)] + struct ReportContext { + team_first_name: String, + team_last_name: String, + start_date: chrono::NaiveDate, + end_date: chrono::NaiveDate, + } + + let result: Option = sqlx::query_as( + r#" + SELECT tp.first_name as team_first_name, tp.last_name as team_last_name, + r.start_date, r.end_date + FROM reports r + JOIN team_profiles tp ON tp.id = r.team_profile_id + WHERE r.id = $1 + "#, + ) + .bind(report_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "report_id": report_id.to_string(), + "team_member": format!("{} {}", ctx.team_first_name, ctx.team_last_name), + "period": format!("{} - {}", ctx.start_date, ctx.end_date) + }), + None => json!({ "report_id": report_id.to_string() }), + } + } + + /// Enrich profile-related events + async fn enrich_profile(pool: &PgPool, entity_type: &str, entity_id: Uuid) -> JsonValue { + match entity_type { + "team_profile" => { + #[derive(sqlx::FromRow)] + struct TeamProfileContext { + first_name: String, + last_name: String, + email: Option, + } + + let result: Option = sqlx::query_as( + "SELECT first_name, last_name, email FROM team_profiles WHERE id = $1", + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "profile_id": entity_id.to_string(), + "profile_type": "team", + "name": format!("{} {}", ctx.first_name, ctx.last_name), + "email": ctx.email + }), + None => json!({ "profile_id": entity_id.to_string(), "profile_type": "team" }), + } + } + "customer_profile" => { + #[derive(sqlx::FromRow)] + struct CustomerProfileContext { + first_name: String, + last_name: String, + email: Option, + customer_name: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT cp.first_name, cp.last_name, cp.email, c.name as customer_name + FROM customer_profiles cp + JOIN customers c ON c.id = cp.customer_id + WHERE cp.id = $1 + "#, + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "profile_id": entity_id.to_string(), + "profile_type": "customer", + "name": format!("{} {}", ctx.first_name, ctx.last_name), + "email": ctx.email, + "customer_name": ctx.customer_name + }), + None => json!({ "profile_id": entity_id.to_string(), "profile_type": "customer" }), + } + } + _ => json!({ "profile_id": entity_id.to_string() }), + } + } + + /// Enrich session-related events + async fn enrich_session(pool: &PgPool, entity_type: &str, entity_id: Uuid) -> JsonValue { + match entity_type { + "service_session" => { + #[derive(sqlx::FromRow)] + struct SessionContext { + date: chrono::NaiveDate, + account_name: String, + customer_name: String, + street_address: String, + city: String, + state: String, + zip_code: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT ss.date, a.name as account_name, c.name as customer_name, + aa.street_address, aa.city, aa.state, aa.zip_code + FROM service_sessions ss + JOIN accounts a ON a.id = ss.account_id + JOIN customers c ON c.id = ss.customer_id + JOIN account_addresses aa ON aa.id = ss.account_address_id + WHERE ss.id = $1 + "#, + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "session_id": entity_id.to_string(), + "session_type": "service", + "date": ctx.date.to_string(), + "account_name": ctx.account_name, + "customer_name": ctx.customer_name, + "address": format!("{}, {}, {} {}", ctx.street_address, ctx.city, ctx.state, ctx.zip_code) + }), + None => json!({ "session_id": entity_id.to_string(), "session_type": "service" }), + } + } + "project_session" => { + #[derive(sqlx::FromRow)] + struct SessionContext { + date: chrono::NaiveDate, + project_name: String, + account_name: String, + customer_name: String, + street_address: String, + city: String, + state: String, + zip_code: String, + } + + let result: Option = sqlx::query_as( + r#" + SELECT ps.date, p.name as project_name, a.name as account_name, c.name as customer_name, + aa.street_address, aa.city, aa.state, aa.zip_code + FROM project_sessions ps + JOIN projects p ON p.id = ps.project_id + JOIN accounts a ON a.id = ps.account_id + JOIN customers c ON c.id = ps.customer_id + JOIN account_addresses aa ON aa.id = ps.account_address_id + WHERE ps.id = $1 + "#, + ) + .bind(entity_id) + .fetch_optional(pool) + .await + .ok() + .flatten(); + + match result { + Some(ctx) => json!({ + "session_id": entity_id.to_string(), + "session_type": "project", + "date": ctx.date.to_string(), + "project_name": ctx.project_name, + "account_name": ctx.account_name, + "customer_name": ctx.customer_name, + "address": format!("{}, {}, {} {}", ctx.street_address, ctx.city, ctx.state, ctx.zip_code) + }), + None => json!({ "session_id": entity_id.to_string(), "session_type": "project" }), + } + } + _ => json!({ "session_id": entity_id.to_string() }), + } + } + + /// Merge existing metadata with enriched metadata + /// Existing values take precedence over enriched values + fn merge_metadata(existing: Option, enriched: JsonValue) -> JsonValue { + match existing { + Some(JsonValue::Object(mut existing_map)) => { + if let JsonValue::Object(enriched_map) = enriched { + // Add enriched values that don't exist in existing + for (key, value) in enriched_map { + existing_map.entry(key).or_insert(value); + } + } + JsonValue::Object(existing_map) + } + Some(other) => other, // Non-object existing metadata is preserved as-is + None => enriched, + } + } +} diff --git a/src/services/mod.rs b/src/services/mod.rs new file mode 100644 index 0000000..7146db5 --- /dev/null +++ b/src/services/mod.rs @@ -0,0 +1,54 @@ +//! External services integration +//! +//! This module contains integrations with external services like +//! Google Calendar, Gmail, Wave Accounting, and S3/Garage storage. +//! Also includes internal services for events, notifications, and metadata. + +pub mod email_templates; +pub mod events; +pub mod gmail; +pub mod job_queue; +pub mod google_auth; +pub mod google_calendar; +pub mod image; +pub mod metadata; +pub mod notification_delivery; +pub mod notifications; +pub mod s3; +pub mod video; +pub mod wave; + +// Re-export commonly used types +pub use email_templates::{EmailTemplate, EmailTemplateRegistry, TemplateError}; +pub use gmail::{ + Email, EmailListResponse, EmailMessage, GmailError, GmailService, ListEmailsQuery, + SendEmailRequest, SendEmailResponse, +}; +pub use google_auth::{GoogleAuthError, GoogleAuthService, GMAIL_FULL_SCOPE, GMAIL_SEND_SCOPE, GOOGLE_CALENDAR_SCOPE}; +pub use google_calendar::{ + Attendee, CalendarError, CalendarEvent, CreateEventRequest, EventDateTime, EventReminder, + EventReminders, GoogleCalendarService, ListEventsQuery, UpdateEventRequest, +}; +pub use image::{ + generate_thumbnail, process_image, ImageError, ProcessedImage, MAX_IMAGE_SIZE, THUMBNAIL_SIZE, +}; +pub use s3::{S3Error, S3Service}; +pub use video::{extract_metadata, generate_thumbnail as generate_video_thumbnail, verify_video, VideoError, VideoMetadata, MAX_VIDEO_SIZE}; +pub use wave::{ + CreateAddressInput as WaveCreateAddressInput, + CreateCustomerInput as WaveCreateCustomerInput, + CreateInvoiceInput as WaveCreateInvoiceInput, + CreateInvoiceItemInput as WaveCreateInvoiceItemInput, + CreateProductInput as WaveCreateProductInput, + UpdateCustomerInput as WaveUpdateCustomerInput, + UpdateProductInput as WaveUpdateProductInput, + WaveCustomer, WaveError, WaveInvoice, WaveInvoiceConnection, WaveInvoiceListItem, + WavePageInfo, WaveProduct, WaveService, +}; + +// Event and notification services +pub use events::EventPublisher; +pub use job_queue::JobQueue; +pub use metadata::MetadataEnricher; +pub use notification_delivery::{NotificationDeliveryService, MAX_DELIVERY_ATTEMPTS}; +pub use notifications::NotificationProcessor; diff --git a/src/services/notification_delivery.rs b/src/services/notification_delivery.rs new file mode 100644 index 0000000..23cabda --- /dev/null +++ b/src/services/notification_delivery.rs @@ -0,0 +1,363 @@ +//! Notification Delivery Service +//! +//! Handles the actual delivery of notifications through various channels +//! (IN_APP, EMAIL, SMS). Each channel has its own delivery logic. + +use anyhow::{anyhow, Result}; +use chrono::Utc; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::models::{DeliveryStatus, Notification, NotificationChannel, NotificationDelivery, NotificationStatus}; +use crate::services::gmail::{GmailService, SendEmailRequest}; + +/// Maximum number of delivery attempts before giving up +pub const MAX_DELIVERY_ATTEMPTS: i32 = 5; + +/// Notification delivery service +pub struct NotificationDeliveryService; + +impl NotificationDeliveryService { + /// Deliver a notification through the specified channel + /// + /// # Arguments + /// * `pool` - Database connection pool + /// * `gmail_service` - Optional Gmail service for email delivery + /// * `delivery_id` - The delivery record to process + /// + /// # Returns + /// * `Ok(())` - Delivery succeeded + /// * `Err(...)` - Delivery failed (will be retried if attempts < MAX) + pub async fn deliver( + pool: &PgPool, + gmail_service: Option<&GmailService>, + delivery_id: Uuid, + ) -> Result<()> { + // Fetch delivery record with notification + let delivery = Self::get_delivery(pool, delivery_id).await?; + let notification = Self::get_notification(pool, delivery.notification_id).await?; + + // Check if we've exceeded max attempts + if delivery.attempts >= MAX_DELIVERY_ATTEMPTS { + Self::mark_failed(pool, delivery_id, "Max delivery attempts exceeded").await?; + return Err(anyhow!("Max delivery attempts exceeded")); + } + + // Update attempt counter + Self::increment_attempts(pool, delivery_id).await?; + + // Deliver based on channel + let result = match delivery.channel { + NotificationChannel::InApp => Self::deliver_in_app(pool, &delivery).await, + NotificationChannel::Email => { + Self::deliver_email(pool, gmail_service, &delivery, ¬ification).await + } + NotificationChannel::Sms => Self::deliver_sms(pool, &delivery, ¬ification).await, + }; + + match result { + Ok(external_id) => { + Self::mark_sent(pool, delivery_id, external_id.as_deref()).await?; + // Update notification status if all deliveries are complete + Self::update_notification_status(pool, notification.base.id).await?; + Ok(()) + } + Err(e) => { + tracing::error!( + delivery_id = %delivery_id, + channel = ?delivery.channel, + error = %e, + "Delivery failed" + ); + Self::mark_sending_failed(pool, delivery_id, &e.to_string()).await?; + Err(e) + } + } + } + + /// Deliver in-app notification (just mark as sent - it's already in the database) + async fn deliver_in_app(_pool: &PgPool, delivery: &NotificationDelivery) -> Result> { + tracing::info!( + delivery_id = %delivery.base.id, + "In-app notification delivered" + ); + Ok(None) + } + + /// Deliver notification via email + async fn deliver_email( + pool: &PgPool, + gmail_service: Option<&GmailService>, + delivery: &NotificationDelivery, + notification: &Notification, + ) -> Result> { + let gmail = gmail_service + .ok_or_else(|| anyhow!("Gmail service not configured"))?; + + // Get recipient email address + let email = Self::get_recipient_email(pool, ¬ification.recipient_type, notification.recipient_id).await?; + + let Some(email) = email else { + return Err(anyhow!("Recipient has no email address")); + }; + + // Send email + let request = SendEmailRequest { + to: vec![email], + cc: None, + bcc: None, + subject: notification.subject.clone(), + body: notification.body.clone(), + content_type: Some("text/html".to_string()), + from_name: Some("Nexus Notifications".to_string()), + attachments: None, + }; + + let response = gmail.send_email(request).await?; + + tracing::info!( + delivery_id = %delivery.base.id, + message_id = %response.id, + "Email notification delivered" + ); + + Ok(Some(response.id)) + } + + /// Deliver notification via SMS (placeholder - not implemented yet) + async fn deliver_sms( + _pool: &PgPool, + delivery: &NotificationDelivery, + _notification: &Notification, + ) -> Result> { + tracing::warn!( + delivery_id = %delivery.base.id, + "SMS delivery not implemented" + ); + Err(anyhow!("SMS delivery not implemented")) + } + + /// Get recipient's email address + async fn get_recipient_email(pool: &PgPool, recipient_type: &str, recipient_id: Uuid) -> Result> { + let email: Option> = match recipient_type { + "team_profile" => { + sqlx::query_scalar("SELECT email FROM team_profiles WHERE id = $1") + .bind(recipient_id) + .fetch_optional(pool) + .await? + } + "customer_profile" => { + sqlx::query_scalar("SELECT email FROM customer_profiles WHERE id = $1") + .bind(recipient_id) + .fetch_optional(pool) + .await? + } + _ => None, + }; + + Ok(email.flatten()) + } + + /// Fetch delivery record + async fn get_delivery(pool: &PgPool, delivery_id: Uuid) -> Result { + let delivery: NotificationDelivery = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + FROM notification_deliveries + WHERE id = $1 + "#, + ) + .bind(delivery_id) + .fetch_one(pool) + .await?; + + Ok(delivery) + } + + /// Fetch notification record + async fn get_notification(pool: &PgPool, notification_id: Uuid) -> Result { + let notification: Notification = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + FROM notifications + WHERE id = $1 + "#, + ) + .bind(notification_id) + .fetch_one(pool) + .await?; + + Ok(notification) + } + + /// Increment attempt counter + async fn increment_attempts(pool: &PgPool, delivery_id: Uuid) -> Result<()> { + let now = Utc::now(); + + sqlx::query( + r#" + UPDATE notification_deliveries + SET attempts = attempts + 1, last_attempt_at = $1, status = $2, updated_at = $1 + WHERE id = $3 + "#, + ) + .bind(now) + .bind(DeliveryStatus::Sending) + .bind(delivery_id) + .execute(pool) + .await?; + + Ok(()) + } + + /// Mark delivery as sent + async fn mark_sent(pool: &PgPool, delivery_id: Uuid, external_id: Option<&str>) -> Result<()> { + let now = Utc::now(); + + sqlx::query( + r#" + UPDATE notification_deliveries + SET status = $1, sent_at = $2, delivered_at = $2, external_id = $3, updated_at = $2 + WHERE id = $4 + "#, + ) + .bind(DeliveryStatus::Delivered) + .bind(now) + .bind(external_id) + .bind(delivery_id) + .execute(pool) + .await?; + + Ok(()) + } + + /// Mark delivery as failed (permanently) + async fn mark_failed(pool: &PgPool, delivery_id: Uuid, error: &str) -> Result<()> { + let now = Utc::now(); + + sqlx::query( + r#" + UPDATE notification_deliveries + SET status = $1, error_message = $2, updated_at = $3 + WHERE id = $4 + "#, + ) + .bind(DeliveryStatus::Failed) + .bind(error) + .bind(now) + .bind(delivery_id) + .execute(pool) + .await?; + + Ok(()) + } + + /// Mark delivery as failed (will retry) + async fn mark_sending_failed(pool: &PgPool, delivery_id: Uuid, error: &str) -> Result<()> { + let now = Utc::now(); + + // Check current attempts + let attempts: i32 = sqlx::query_scalar("SELECT attempts FROM notification_deliveries WHERE id = $1") + .bind(delivery_id) + .fetch_one(pool) + .await?; + + let status = if attempts >= MAX_DELIVERY_ATTEMPTS { + DeliveryStatus::Failed + } else { + DeliveryStatus::Pending // Will be retried + }; + + sqlx::query( + r#" + UPDATE notification_deliveries + SET status = $1, error_message = $2, updated_at = $3 + WHERE id = $4 + "#, + ) + .bind(status) + .bind(error) + .bind(now) + .bind(delivery_id) + .execute(pool) + .await?; + + Ok(()) + } + + /// Update notification status based on delivery statuses + async fn update_notification_status(pool: &PgPool, notification_id: Uuid) -> Result<()> { + // Count delivery statuses + #[derive(sqlx::FromRow)] + struct DeliveryCounts { + total: i64, + delivered: i64, + failed: i64, + } + + let counts: DeliveryCounts = sqlx::query_as( + r#" + SELECT + COUNT(*) as total, + COUNT(*) FILTER (WHERE status IN ('SENT', 'DELIVERED')) as delivered, + COUNT(*) FILTER (WHERE status = 'FAILED') as failed + FROM notification_deliveries + WHERE notification_id = $1 + "#, + ) + .bind(notification_id) + .fetch_one(pool) + .await?; + + let new_status = if counts.delivered > 0 { + NotificationStatus::Sent + } else if counts.failed == counts.total { + NotificationStatus::Failed + } else { + NotificationStatus::Pending + }; + + sqlx::query( + r#" + UPDATE notifications + SET status = $1, updated_at = NOW() + WHERE id = $2 AND status = 'PENDING' + "#, + ) + .bind(new_status) + .bind(notification_id) + .execute(pool) + .await?; + + Ok(()) + } + + /// Get pending deliveries for a channel (for batch processing) + pub async fn get_pending_deliveries( + pool: &PgPool, + channel: NotificationChannel, + limit: i32, + ) -> Result> { + let deliveries: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + FROM notification_deliveries + WHERE channel = $1 + AND status IN ('PENDING', 'QUEUED') + AND attempts < $2 + ORDER BY created_at ASC + LIMIT $3 + "#, + ) + .bind(&channel) + .bind(MAX_DELIVERY_ATTEMPTS) + .bind(limit) + .fetch_all(pool) + .await?; + + Ok(deliveries) + } +} diff --git a/src/services/notifications.rs b/src/services/notifications.rs new file mode 100644 index 0000000..63bd6f0 --- /dev/null +++ b/src/services/notifications.rs @@ -0,0 +1,401 @@ +//! Notification Processor Service +//! +//! Processes events and creates notifications based on matching rules. +//! Handles recipient determination, template rendering, and notification creation. + +use anyhow::Result; +use chrono::Utc; +use serde_json::Value as JsonValue; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::models::{ + DeliveryStatus, Event, Notification, NotificationChannel, NotificationDelivery, + NotificationRule, NotificationStatus, +}; + +/// Notification processor service +pub struct NotificationProcessor; + +impl NotificationProcessor { + /// Process an event and create notifications based on matching rules + /// + /// # Arguments + /// * `pool` - Database connection pool + /// * `event` - The event to process + /// + /// # Returns + /// * `Ok(Vec)` - Created notifications + pub async fn process_event(pool: &PgPool, event: &Event) -> Result> { + // Find active rules matching this event type + let rules = Self::find_matching_rules(pool, &event.event_type).await?; + + if rules.is_empty() { + tracing::debug!( + event_id = %event.id, + event_type = ?event.event_type, + "No matching notification rules for event" + ); + return Ok(vec![]); + } + + let mut notifications = Vec::new(); + + for rule in rules { + // Check if rule conditions match the event metadata + if !Self::matches_conditions(&rule, event) { + continue; + } + + // Determine recipients based on rule targeting + let recipients = Self::get_recipients(pool, &rule).await?; + + // Render templates with event context + let subject = Self::render_template( + rule.subject_template.as_deref().unwrap_or("Notification"), + event, + ); + let body = Self::render_template( + rule.body_template.as_deref().unwrap_or("You have a new notification."), + event, + ); + + // Generate action URL + let action_url = Self::generate_action_url(&event.entity_type, event.entity_id); + + // Create notification for each recipient + for (recipient_type, recipient_id) in recipients { + let notification = Self::create_notification( + pool, + &rule, + event, + &recipient_type, + recipient_id, + &subject, + &body, + action_url.as_deref(), + ) + .await?; + + // Create delivery records for each channel + Self::create_delivery_records(pool, ¬ification, &rule).await?; + + notifications.push(notification); + } + } + + tracing::info!( + event_id = %event.id, + notification_count = notifications.len(), + "Processed event and created notifications" + ); + + Ok(notifications) + } + + /// Find active rules matching the given event type + async fn find_matching_rules(pool: &PgPool, event_type: &crate::models::EventType) -> Result> { + let event_type_str = format!("{:?}", event_type); + + let rules: Vec = sqlx::query_as( + r#" + SELECT id, created_at, updated_at, name, description, is_active, event_types, + channels, target_roles, conditions, subject_template, body_template + FROM notification_rules + WHERE is_active = true + AND event_types @> $1::jsonb + "#, + ) + .bind(serde_json::json!([event_type_str])) + .fetch_all(pool) + .await?; + + Ok(rules) + } + + /// Check if rule conditions match event metadata + fn matches_conditions(rule: &NotificationRule, event: &Event) -> bool { + let Some(conditions) = &rule.conditions else { + return true; // No conditions means always match + }; + + let Some(metadata) = &event.metadata else { + return false; // Has conditions but no metadata to check + }; + + // Simple JSON path matching + // Conditions format: { "key": "expected_value" } or { "key": { "$in": ["val1", "val2"] } } + if let (JsonValue::Object(cond_map), JsonValue::Object(meta_map)) = (conditions, metadata) { + for (key, expected) in cond_map { + let actual = meta_map.get(key); + + match (expected, actual) { + // Direct value comparison + (JsonValue::String(exp), Some(JsonValue::String(act))) if exp == act => continue, + (JsonValue::Number(exp), Some(JsonValue::Number(act))) if exp == act => continue, + (JsonValue::Bool(exp), Some(JsonValue::Bool(act))) if exp == act => continue, + + // $in operator + (JsonValue::Object(op), Some(actual_val)) => { + if let Some(JsonValue::Array(allowed)) = op.get("$in") { + if allowed.contains(actual_val) { + continue; + } + } + return false; + } + + // No match + _ => return false, + } + } + true + } else { + false + } + } + + /// Determine recipients based on rule targeting + async fn get_recipients(pool: &PgPool, rule: &NotificationRule) -> Result> { + let mut recipients = Vec::new(); + + // Get specific team profile targets + let team_targets: Vec = sqlx::query_scalar( + "SELECT team_profile_id FROM notification_rule_team_profiles WHERE rule_id = $1", + ) + .bind(rule.base.id) + .fetch_all(pool) + .await?; + + for id in team_targets { + recipients.push(("team_profile".to_string(), id)); + } + + // Get specific customer profile targets + let customer_targets: Vec = sqlx::query_scalar( + "SELECT customer_profile_id FROM notification_rule_customer_profiles WHERE rule_id = $1", + ) + .bind(rule.base.id) + .fetch_all(pool) + .await?; + + for id in customer_targets { + recipients.push(("customer_profile".to_string(), id)); + } + + // Get team profiles by role if target_roles is specified + if let Some(target_roles) = &rule.target_roles { + if let JsonValue::Array(roles) = target_roles { + let role_strings: Vec<&str> = roles + .iter() + .filter_map(|v| v.as_str()) + .collect(); + + if !role_strings.is_empty() { + // Query team profiles with matching roles + let role_targets: Vec = sqlx::query_scalar( + r#" + SELECT id FROM team_profiles + WHERE status = 'ACTIVE' + AND role::text = ANY($1) + "#, + ) + .bind(&role_strings) + .fetch_all(pool) + .await?; + + for id in role_targets { + // Avoid duplicates + if !recipients.iter().any(|(t, i)| t == "team_profile" && *i == id) { + recipients.push(("team_profile".to_string(), id)); + } + } + } + } + } + + Ok(recipients) + } + + /// Render a template with event context + /// + /// Supports simple variable substitution: {{variable_name}} + /// Variables are pulled from event metadata + fn render_template(template: &str, event: &Event) -> String { + let mut result = template.to_string(); + + // Built-in variables + result = result.replace("{{event_type}}", &format!("{:?}", event.event_type)); + result = result.replace("{{entity_type}}", &event.entity_type); + result = result.replace("{{entity_id}}", &event.entity_id.to_string()); + result = result.replace("{{timestamp}}", &event.timestamp.to_rfc3339()); + + // Metadata variables - simple loop-based replacement + if let Some(JsonValue::Object(meta)) = &event.metadata { + for (key, value) in meta { + let placeholder = format!("{{{{{}}}}}", key); + let replacement = match value { + JsonValue::String(s) => s.clone(), + JsonValue::Number(n) => n.to_string(), + JsonValue::Bool(b) => b.to_string(), + _ => continue, + }; + result = result.replace(&placeholder, &replacement); + } + } + + result + } + + /// Generate action URL based on entity type + fn generate_action_url(entity_type: &str, entity_id: Uuid) -> Option { + let path = match entity_type { + "service" => format!("/services/{}", entity_id), + "project" => format!("/projects/{}", entity_id), + "account" => format!("/accounts/{}", entity_id), + "customer" => format!("/customers/{}", entity_id), + "invoice" => format!("/invoices/{}", entity_id), + "report" => format!("/reports/{}", entity_id), + "service_session" => format!("/sessions/service/{}", entity_id), + "project_session" => format!("/sessions/project/{}", entity_id), + "team_profile" => format!("/team/{}", entity_id), + "customer_profile" => format!("/customer-profiles/{}", entity_id), + _ => return None, + }; + + Some(path) + } + + /// Create a notification record + async fn create_notification( + pool: &PgPool, + rule: &NotificationRule, + event: &Event, + recipient_type: &str, + recipient_id: Uuid, + subject: &str, + body: &str, + action_url: Option<&str>, + ) -> Result { + let now = Utc::now(); + + let notification: Notification = sqlx::query_as( + r#" + INSERT INTO notifications ( + id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, $5, $6, $7, $8, $9, NULL, NULL) + RETURNING id, created_at, updated_at, recipient_type, recipient_id, rule_id, event_id, + status, subject, body, action_url, read_at, metadata + "#, + ) + .bind(now) + .bind(recipient_type) + .bind(recipient_id) + .bind(rule.base.id) + .bind(event.id) + .bind(NotificationStatus::Pending) + .bind(subject) + .bind(body) + .bind(action_url) + .fetch_one(pool) + .await?; + + Ok(notification) + } + + /// Create delivery records for each channel in the rule + async fn create_delivery_records( + pool: &PgPool, + notification: &Notification, + rule: &NotificationRule, + ) -> Result> { + let mut deliveries = Vec::new(); + let now = Utc::now(); + + // Parse channels from JSONB + let channels: Vec = if let JsonValue::Array(arr) = &rule.channels { + arr.iter() + .filter_map(|v| v.as_str()) + .filter_map(|s| match s.to_uppercase().as_str() { + "IN_APP" | "INAPP" => Some(NotificationChannel::InApp), + "EMAIL" => Some(NotificationChannel::Email), + "SMS" => Some(NotificationChannel::Sms), + _ => None, + }) + .collect() + } else { + vec![NotificationChannel::InApp] // Default to in-app + }; + + for channel in channels { + let delivery: NotificationDelivery = sqlx::query_as( + r#" + INSERT INTO notification_deliveries ( + id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + ) + VALUES (gen_random_uuid(), $1, $1, $2, $3, $4, 0, NULL, NULL, NULL, NULL, NULL, NULL) + RETURNING id, created_at, updated_at, notification_id, channel, status, attempts, + last_attempt_at, sent_at, delivered_at, error_message, external_id, metadata + "#, + ) + .bind(now) + .bind(notification.base.id) + .bind(&channel) + .bind(DeliveryStatus::Pending) + .fetch_one(pool) + .await?; + + deliveries.push(delivery); + } + + Ok(deliveries) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_render_template() { + let event = Event::new( + crate::models::EventType::ServiceCreated, + "service", + Uuid::new_v4(), + None, + None, + Some(json!({ + "account_name": "Acme Corp", + "customer_name": "John Doe", + "date": "2026-01-10" + })), + ); + + let template = "Service for {{customer_name}} at {{account_name}} on {{date}}"; + let result = NotificationProcessor::render_template(template, &event); + + assert_eq!(result, "Service for John Doe at Acme Corp on 2026-01-10"); + } + + #[test] + fn test_generate_action_url() { + let id = Uuid::new_v4(); + + assert_eq!( + NotificationProcessor::generate_action_url("service", id), + Some(format!("/services/{}", id)) + ); + assert_eq!( + NotificationProcessor::generate_action_url("project", id), + Some(format!("/projects/{}", id)) + ); + assert_eq!( + NotificationProcessor::generate_action_url("unknown", id), + None + ); + } +} diff --git a/src/services/s3.rs b/src/services/s3.rs new file mode 100644 index 0000000..321e755 --- /dev/null +++ b/src/services/s3.rs @@ -0,0 +1,148 @@ +//! S3/Garage storage service for media uploads + +use bytes::Bytes; +use s3::creds::Credentials; +use s3::error::S3Error as RustS3Error; +use s3::{Bucket, Region}; +use thiserror::Error; + +use crate::config::Config; + +/// S3 service errors +#[derive(Error, Debug)] +pub enum S3Error { + #[error("S3 upload failed: {0}")] + UploadFailed(String), + + #[error("S3 delete failed: {0}")] + DeleteFailed(String), + + #[error("S3 get failed: {0}")] + GetFailed(String), + + #[error("S3 not configured")] + NotConfigured, + + #[error("S3 credentials error: {0}")] + CredentialsError(String), + + #[error("S3 bucket error: {0}")] + BucketError(String), +} + +impl From for S3Error { + fn from(e: RustS3Error) -> Self { + S3Error::BucketError(e.to_string()) + } +} + +/// S3/Garage storage service +#[derive(Clone)] +pub struct S3Service { + bucket: Box, + bucket_name: String, + endpoint: String, +} + +impl S3Service { + /// Create a new S3 service from configuration + pub async fn new(config: &Config) -> Result { + if config.s3_endpoint.is_empty() || config.s3_access_key.is_empty() { + return Err(S3Error::NotConfigured); + } + + let credentials = Credentials::new( + Some(&config.s3_access_key), + Some(&config.s3_secret_key), + None, + None, + None, + ) + .map_err(|e| S3Error::CredentialsError(e.to_string()))?; + + let region = Region::Custom { + region: "garage".to_string(), + endpoint: config.s3_endpoint.clone(), + }; + + let bucket = Bucket::new(&config.s3_bucket, region, credentials) + .map_err(|e| S3Error::BucketError(e.to_string()))? + .with_path_style(); + + Ok(Self { + bucket, + bucket_name: config.s3_bucket.clone(), + endpoint: config.s3_endpoint.clone(), + }) + } + + /// Upload a file to S3 + /// + /// Returns the key (path) where the file was stored + pub async fn upload_file( + &self, + key: &str, + data: Bytes, + content_type: &str, + ) -> Result { + self.bucket + .put_object_with_content_type(key, &data, content_type) + .await + .map_err(|e| S3Error::UploadFailed(e.to_string()))?; + + Ok(key.to_string()) + } + + /// Delete a file from S3 + pub async fn delete_file(&self, key: &str) -> Result<(), S3Error> { + self.bucket + .delete_object(key) + .await + .map_err(|e| S3Error::DeleteFailed(e.to_string()))?; + + Ok(()) + } + + /// Get a file from S3 + /// + /// Returns the file bytes and content type + pub async fn get_file(&self, key: &str) -> Result<(Bytes, String), S3Error> { + let response = self + .bucket + .get_object(key) + .await + .map_err(|e| S3Error::GetFailed(e.to_string()))?; + + let content_type = response + .headers() + .get("content-type") + .map(|s| s.to_string()) + .unwrap_or_else(|| "application/octet-stream".to_string()); + + Ok((Bytes::from(response.to_vec()), content_type)) + } + + /// Check if a file exists in S3 + pub async fn file_exists(&self, key: &str) -> bool { + self.bucket.head_object(key).await.is_ok() + } + + /// Get the bucket name + pub fn bucket(&self) -> &str { + &self.bucket_name + } + + /// Get the endpoint URL + pub fn endpoint(&self) -> &str { + &self.endpoint + } +} + +impl std::fmt::Debug for S3Service { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("S3Service") + .field("bucket", &self.bucket_name) + .field("endpoint", &self.endpoint) + .finish() + } +} diff --git a/src/services/video.rs b/src/services/video.rs new file mode 100644 index 0000000..b67beaa --- /dev/null +++ b/src/services/video.rs @@ -0,0 +1,335 @@ +//! Video processing service for session media +//! +//! Handles video validation, metadata extraction, and thumbnail generation +//! using ffmpeg/ffprobe. + +use std::path::Path; +use std::process::Command; +use thiserror::Error; + +/// Video processing errors +#[derive(Error, Debug)] +pub enum VideoError { + #[error("Invalid video format: {0}")] + InvalidFormat(String), + + #[error("Video too large: {size} bytes (max {max} bytes)")] + TooLarge { size: usize, max: usize }, + + #[error("Failed to extract metadata: {0}")] + MetadataFailed(String), + + #[error("Failed to generate thumbnail: {0}")] + ThumbnailFailed(String), + + #[error("ffprobe not available: {0}")] + FfprobeNotAvailable(String), + + #[error("ffmpeg not available: {0}")] + FfmpegNotAvailable(String), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), +} + +/// Video metadata +#[derive(Debug, Clone)] +pub struct VideoMetadata { + /// Video width in pixels + pub width: u32, + /// Video height in pixels + pub height: u32, + /// Duration in seconds + pub duration_seconds: i32, + /// Content type (MIME) + pub content_type: String, +} + +/// Maximum video size (250MB) +pub const MAX_VIDEO_SIZE: usize = 250 * 1024 * 1024; + +/// Supported video MIME types +const SUPPORTED_VIDEO_TYPES: &[&str] = &[ + "video/mp4", + "video/quicktime", // .mov + "video/webm", + "video/x-msvideo", // .avi + "video/x-matroska", // .mkv + "video/3gpp", + "video/3gpp2", +]; + +/// Detect video format from magic bytes +fn detect_format(data: &[u8]) -> Option<&'static str> { + if data.len() < 12 { + return None; + } + + // MP4/MOV: ftyp box + if data.len() >= 12 && &data[4..8] == b"ftyp" { + let brand = &data[8..12]; + // Common MP4 brands + if brand == b"isom" + || brand == b"iso2" + || brand == b"mp41" + || brand == b"mp42" + || brand == b"avc1" + || brand == b"M4V " + || brand == b"M4A " + { + return Some("video/mp4"); + } + // QuickTime brands + if brand == b"qt " { + return Some("video/quicktime"); + } + // 3GPP + if brand == b"3gp4" || brand == b"3gp5" || brand == b"3gp6" { + return Some("video/3gpp"); + } + } + + // WebM: EBML header + if data.starts_with(&[0x1A, 0x45, 0xDF, 0xA3]) { + return Some("video/webm"); + } + + // AVI: RIFF....AVI + if data.len() >= 12 && &data[0..4] == b"RIFF" && &data[8..12] == b"AVI " { + return Some("video/x-msvideo"); + } + + // MKV: EBML header (same as WebM, distinguished by doctype) + // For simplicity, we detect WebM/MKV together as webm + + None +} + +/// Verify video data and return content type +pub fn verify_video(data: &[u8], filename: &str) -> Result { + // Check size + if data.len() > MAX_VIDEO_SIZE { + return Err(VideoError::TooLarge { + size: data.len(), + max: MAX_VIDEO_SIZE, + }); + } + + // Try to detect from magic bytes + if let Some(content_type) = detect_format(data) { + return Ok(content_type.to_string()); + } + + // Fall back to extension + let extension = Path::new(filename) + .extension() + .and_then(|e| e.to_str()) + .unwrap_or("") + .to_lowercase(); + + let content_type = match extension.as_str() { + "mp4" | "m4v" => "video/mp4", + "mov" => "video/quicktime", + "webm" => "video/webm", + "avi" => "video/x-msvideo", + "mkv" => "video/x-matroska", + "3gp" => "video/3gpp", + "3g2" => "video/3gpp2", + _ => { + return Err(VideoError::InvalidFormat(format!( + "Unknown video format: {}", + extension + ))) + } + }; + + if !SUPPORTED_VIDEO_TYPES.contains(&content_type) { + return Err(VideoError::InvalidFormat(format!( + "Unsupported video type: {}", + content_type + ))); + } + + Ok(content_type.to_string()) +} + +/// Extract video metadata using ffprobe +pub fn extract_metadata(video_path: &Path) -> Result { + // Run ffprobe to get video info as JSON + let output = Command::new("ffprobe") + .args([ + "-v", + "quiet", + "-print_format", + "json", + "-show_format", + "-show_streams", + "-select_streams", + "v:0", // First video stream only + ]) + .arg(video_path) + .output() + .map_err(|e| VideoError::FfprobeNotAvailable(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(VideoError::MetadataFailed(format!( + "ffprobe failed: {}", + stderr + ))); + } + + let json_str = String::from_utf8_lossy(&output.stdout); + + // Parse the JSON + let json: serde_json::Value = serde_json::from_str(&json_str) + .map_err(|e| VideoError::MetadataFailed(format!("Failed to parse ffprobe output: {}", e)))?; + + // Extract dimensions from first video stream + let streams = json["streams"] + .as_array() + .ok_or_else(|| VideoError::MetadataFailed("No streams found".to_string()))?; + + let video_stream = streams + .first() + .ok_or_else(|| VideoError::MetadataFailed("No video stream found".to_string()))?; + + let width = video_stream["width"] + .as_u64() + .ok_or_else(|| VideoError::MetadataFailed("Could not get width".to_string()))? + as u32; + + let height = video_stream["height"] + .as_u64() + .ok_or_else(|| VideoError::MetadataFailed("Could not get height".to_string()))? + as u32; + + // Get duration from format or stream + let duration_seconds = json["format"]["duration"] + .as_str() + .and_then(|s| s.parse::().ok()) + .or_else(|| { + video_stream["duration"] + .as_str() + .and_then(|s| s.parse::().ok()) + }) + .unwrap_or(0.0) as i32; + + // Determine content type from format + let format_name = json["format"]["format_name"] + .as_str() + .unwrap_or("mp4"); + + let content_type = match format_name { + name if name.contains("mp4") || name.contains("m4v") => "video/mp4", + name if name.contains("mov") || name.contains("quicktime") => "video/quicktime", + name if name.contains("webm") => "video/webm", + name if name.contains("avi") => "video/x-msvideo", + name if name.contains("matroska") || name.contains("mkv") => "video/x-matroska", + name if name.contains("3gp") => "video/3gpp", + _ => "video/mp4", // Default + }; + + Ok(VideoMetadata { + width, + height, + duration_seconds, + content_type: content_type.to_string(), + }) +} + +/// Generate a thumbnail from a video at a specific timestamp +pub fn generate_thumbnail( + video_path: &Path, + output_path: &Path, + timestamp_seconds: f64, +) -> Result<(), VideoError> { + // Run ffmpeg to extract a frame + let output = Command::new("ffmpeg") + .args([ + "-y", // Overwrite output + "-ss", // Seek to timestamp + &format!("{:.2}", timestamp_seconds), + "-i", // Input file + ]) + .arg(video_path) + .args([ + "-vframes", + "1", // Extract 1 frame + "-vf", + "scale=320:-1", // Scale to 320px width, maintain aspect ratio + "-f", + "mjpeg", // Output as JPEG + "-q:v", + "3", // Quality (2-31, lower is better) + ]) + .arg(output_path) + .output() + .map_err(|e| VideoError::FfmpegNotAvailable(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(VideoError::ThumbnailFailed(format!( + "ffmpeg failed: {}", + stderr + ))); + } + + // Verify output file exists and has content + let metadata = std::fs::metadata(output_path) + .map_err(|e| VideoError::ThumbnailFailed(format!("Output file not created: {}", e)))?; + + if metadata.len() == 0 { + return Err(VideoError::ThumbnailFailed( + "Thumbnail file is empty".to_string(), + )); + } + + Ok(()) +} + +/// Get the file extension for a video content type +pub fn extension_for_content_type(content_type: &str) -> &'static str { + match content_type { + "video/mp4" => "mp4", + "video/quicktime" => "mov", + "video/webm" => "webm", + "video/x-msvideo" => "avi", + "video/x-matroska" => "mkv", + "video/3gpp" => "3gp", + "video/3gpp2" => "3g2", + _ => "mp4", + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_detect_mp4() { + // Minimal MP4 ftyp box + let data = [ + 0x00, 0x00, 0x00, 0x18, + b'f', b't', b'y', b'p', + b'i', b's', b'o', b'm', + ]; + assert_eq!(detect_format(&data), Some("video/mp4")); + } + + #[test] + fn test_detect_webm() { + let data = [0x1A, 0x45, 0xDF, 0xA3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; + assert_eq!(detect_format(&data), Some("video/webm")); + } + + #[test] + fn test_detect_avi() { + let data = [ + b'R', b'I', b'F', b'F', + 0x00, 0x00, 0x00, 0x00, + b'A', b'V', b'I', b' ', + ]; + assert_eq!(detect_format(&data), Some("video/x-msvideo")); + } +} diff --git a/src/services/wave.rs b/src/services/wave.rs new file mode 100644 index 0000000..80880d8 --- /dev/null +++ b/src/services/wave.rs @@ -0,0 +1,1507 @@ +//! Wave Accounting API integration +//! +//! Provides access to Wave's GraphQL API for invoice creation, +//! customer management, and product management. + +use reqwest::Client; +use serde::{Deserialize, Deserializer, Serialize}; + +const WAVE_API_URL: &str = "https://gql.waveapps.com/graphql/public"; + +/// Deserialize a value that can be either a number or a string representation of a number +fn deserialize_string_or_number<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + use serde::de::Error; + + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrNumber { + Number(f64), + String(String), + } + + match StringOrNumber::deserialize(deserializer)? { + StringOrNumber::Number(n) => Ok(n), + StringOrNumber::String(s) => s.parse().map_err(|_| D::Error::custom(format!("invalid number: {}", s))), + } +} + +/// Deserialize null as empty vec (Wave API returns null for empty arrays) +fn deserialize_null_as_empty_vec<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, +{ + Option::>::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + +/// Wave API service +pub struct WaveService { + client: Client, + access_token: String, + business_id: String, +} + +impl WaveService { + /// Create a new Wave service + pub fn new(access_token: String, business_id: String) -> Self { + Self { + client: Client::new(), + access_token, + business_id, + } + } + + /// Get the business ID + pub fn business_id(&self) -> &str { + &self.business_id + } + + /// Execute a GraphQL query against the Wave API + async fn execute Deserialize<'de>>( + &self, + query: &str, + variables: Option, + ) -> Result { + let body = serde_json::json!({ + "query": query, + "variables": variables.unwrap_or(serde_json::json!({})) + }); + + let response = self + .client + .post(WAVE_API_URL) + .header("Authorization", format!("Bearer {}", self.access_token)) + .header("Content-Type", "application/json") + .json(&body) + .send() + .await + .map_err(|e| WaveError::Request(e.to_string()))?; + + let status = response.status(); + let text = response + .text() + .await + .map_err(|e| WaveError::Request(e.to_string()))?; + + if !status.is_success() { + return Err(WaveError::Api(format!("HTTP {}: {}", status, text))); + } + + let result: GraphQLResponse = + serde_json::from_str(&text).map_err(|e| WaveError::Parse(e.to_string()))?; + + if let Some(errors) = result.errors { + if !errors.is_empty() { + return Err(WaveError::GraphQL( + errors.into_iter().map(|e| e.message).collect(), + )); + } + } + + result + .data + .ok_or_else(|| WaveError::Api("No data returned".to_string())) + } + + /// List all products for the business + pub async fn list_products(&self) -> Result, WaveError> { + let query = r#" + query ListProducts($businessId: ID!) { + business(id: $businessId) { + products(page: 1, pageSize: 100) { + edges { + node { + id + name + description + unitPrice + isSold + isArchived + defaultSalesTaxes { + id + name + rate + } + incomeAccount { + id + name + } + } + } + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id + }); + + let response: ListProductsResponse = self.execute(query, Some(variables)).await?; + + Ok(response + .business + .products + .edges + .into_iter() + .map(|e| e.node) + .collect()) + } + + /// List all customers for the business + pub async fn list_customers(&self) -> Result, WaveError> { + let query = r#" + query ListCustomers($businessId: ID!) { + business(id: $businessId) { + customers(page: 1, pageSize: 100) { + edges { + node { + id + name + email + address { + addressLine1 + addressLine2 + city + province { + code + name + } + postalCode + } + currency { + code + } + } + } + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id + }); + + let response: ListCustomersResponse = self.execute(query, Some(variables)).await?; + + Ok(response + .business + .customers + .edges + .into_iter() + .map(|e| e.node) + .collect()) + } + + /// Get a single invoice by ID + pub async fn get_invoice(&self, invoice_id: &str) -> Result { + let query = r#" + query GetInvoice($businessId: ID!, $invoiceId: ID!) { + business(id: $businessId) { + invoice(id: $invoiceId) { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + items { + description + quantity + unitPrice + total { + value + currency { + code + symbol + } + } + } + subtotal { + value + currency { + code + symbol + } + } + total { + value + currency { + code + symbol + } + } + amountDue { + value + currency { + code + symbol + } + } + amountPaid { + value + currency { + code + symbol + } + } + pdfUrl + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id, + "invoiceId": invoice_id + }); + + let response: GetInvoiceResponse = self.execute(query, Some(variables)).await?; + + response + .business + .invoice + .ok_or_else(|| WaveError::Api("Invoice not found".to_string())) + } + + /// Create a new invoice in Wave + pub async fn create_invoice(&self, input: CreateInvoiceInput) -> Result { + let query = r#" + mutation CreateInvoice($input: InvoiceCreateInput!) { + invoiceCreate(input: $input) { + didSucceed + inputErrors { + code + message + path + } + invoice { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + items { + description + quantity + unitPrice + total { + value + currency { + code + symbol + } + } + } + subtotal { + value + currency { + code + symbol + } + } + total { + value + currency { + code + symbol + } + } + amountDue { + value + currency { + code + symbol + } + } + amountPaid { + value + currency { + code + symbol + } + } + pdfUrl + } + } + } + "#; + + let variables = serde_json::json!({ + "input": input + }); + + let response: CreateInvoiceResponse = self.execute(query, Some(variables)).await?; + + if !response.invoice_create.did_succeed { + let errors: Vec = response + .invoice_create + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .invoice_create + .invoice + .ok_or_else(|| WaveError::Api("No invoice returned".to_string())) + } + + /// List invoices for the business with pagination + pub async fn list_invoices( + &self, + page: i32, + page_size: i32, + ) -> Result { + let query = r#" + query ListInvoices($businessId: ID!, $page: Int!, $pageSize: Int!) { + business(id: $businessId) { + invoices(page: $page, pageSize: $pageSize, sort: [INVOICE_DATE_DESC]) { + pageInfo { + currentPage + totalPages + totalCount + } + edges { + node { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + subtotal { + value + currency { + code + symbol + } + } + total { + value + currency { + code + symbol + } + } + amountDue { + value + currency { + code + symbol + } + } + amountPaid { + value + currency { + code + symbol + } + } + } + } + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id, + "page": page, + "pageSize": page_size + }); + + let response: ListInvoicesResponse = self.execute(query, Some(variables)).await?; + + let invoices = response.business.invoices; + Ok(WaveInvoiceConnection { + page_info: invoices.page_info, + invoices: invoices.edges.into_iter().map(|e| e.node).collect(), + }) + } + + /// Approve a draft invoice (moves to SAVED status - point of no return for edits) + pub async fn approve_invoice(&self, invoice_id: &str) -> Result { + let query = r#" + mutation ApproveInvoice($input: InvoiceApproveInput!) { + invoiceApprove(input: $input) { + didSucceed + inputErrors { + code + message + path + } + invoice { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + items { + description + quantity + unitPrice + total { + value + currency { + code + symbol + } + } + } + subtotal { + value + currency { + code + symbol + } + } + total { + value + currency { + code + symbol + } + } + amountDue { + value + currency { + code + symbol + } + } + amountPaid { + value + currency { + code + symbol + } + } + pdfUrl + } + } + } + "#; + + let variables = serde_json::json!({ + "input": { + "invoiceId": invoice_id + } + }); + + let response: ApproveInvoiceResponse = self.execute(query, Some(variables)).await?; + + if !response.invoice_approve.did_succeed { + let errors: Vec = response + .invoice_approve + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .invoice_approve + .invoice + .ok_or_else(|| WaveError::Api("No invoice returned".to_string())) + } + + /// Send an approved invoice to the customer + pub async fn send_invoice(&self, invoice_id: &str) -> Result { + let query = r#" + mutation SendInvoice($input: InvoiceSendInput!) { + invoiceSend(input: $input) { + didSucceed + inputErrors { + code + message + path + } + invoice { + id + invoiceNumber + invoiceDate + dueDate + status + customer { + id + name + } + items { + description + quantity + unitPrice + total { + value + currency { + code + symbol + } + } + } + subtotal { + value + currency { + code + symbol + } + } + total { + value + currency { + code + symbol + } + } + amountDue { + value + currency { + code + symbol + } + } + amountPaid { + value + currency { + code + symbol + } + } + pdfUrl + } + } + } + "#; + + let variables = serde_json::json!({ + "input": { + "invoiceId": invoice_id + } + }); + + let response: SendInvoiceResponse = self.execute(query, Some(variables)).await?; + + if !response.invoice_send.did_succeed { + let errors: Vec = response + .invoice_send + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .invoice_send + .invoice + .ok_or_else(|| WaveError::Api("No invoice returned".to_string())) + } + + /// Delete a draft invoice + pub async fn delete_invoice(&self, invoice_id: &str) -> Result { + let query = r#" + mutation DeleteInvoice($input: InvoiceDeleteInput!) { + invoiceDelete(input: $input) { + didSucceed + inputErrors { + code + message + path + } + } + } + "#; + + let variables = serde_json::json!({ + "input": { + "invoiceId": invoice_id + } + }); + + let response: DeleteInvoiceResponse = self.execute(query, Some(variables)).await?; + + if !response.invoice_delete.did_succeed { + let errors: Vec = response + .invoice_delete + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + Ok(true) + } + + /// Get a single customer by ID + pub async fn get_customer(&self, customer_id: &str) -> Result { + let query = r#" + query GetCustomer($businessId: ID!, $customerId: ID!) { + business(id: $businessId) { + customer(id: $customerId) { + id + name + email + address { + addressLine1 + addressLine2 + city + province { + code + name + } + postalCode + } + currency { + code + } + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id, + "customerId": customer_id + }); + + let response: GetCustomerResponse = self.execute(query, Some(variables)).await?; + + response + .business + .customer + .ok_or_else(|| WaveError::Api("Customer not found".to_string())) + } + + /// Get a single product by ID + pub async fn get_product(&self, product_id: &str) -> Result { + let query = r#" + query GetProduct($businessId: ID!, $productId: ID!) { + business(id: $businessId) { + product(id: $productId) { + id + name + description + unitPrice + isSold + isArchived + defaultSalesTaxes { + id + name + rate + } + incomeAccount { + id + name + } + } + } + } + "#; + + let variables = serde_json::json!({ + "businessId": self.business_id, + "productId": product_id + }); + + let response: GetProductResponse = self.execute(query, Some(variables)).await?; + + response + .business + .product + .ok_or_else(|| WaveError::Api("Product not found".to_string())) + } + + /// Create a new product in Wave + pub async fn create_product(&self, input: CreateProductInput) -> Result { + let query = r#" + mutation CreateProduct($input: ProductCreateInput!) { + productCreate(input: $input) { + didSucceed + inputErrors { + code + message + path + } + product { + id + name + description + unitPrice + isSold + isArchived + defaultSalesTaxes { + id + name + rate + } + incomeAccount { + id + name + } + } + } + } + "#; + + let variables = serde_json::json!({ + "input": input + }); + + let response: CreateProductResponse = self.execute(query, Some(variables)).await?; + + if !response.product_create.did_succeed { + let errors: Vec = response + .product_create + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .product_create + .product + .ok_or_else(|| WaveError::Api("No product returned".to_string())) + } + + /// Update an existing product in Wave + pub async fn update_product(&self, input: UpdateProductInput) -> Result { + let query = r#" + mutation UpdateProduct($input: ProductPatchInput!) { + productPatch(input: $input) { + didSucceed + inputErrors { + code + message + path + } + product { + id + name + description + unitPrice + isSold + isArchived + defaultSalesTaxes { + id + name + rate + } + incomeAccount { + id + name + } + } + } + } + "#; + + let variables = serde_json::json!({ + "input": input + }); + + let response: UpdateProductResponse = self.execute(query, Some(variables)).await?; + + if !response.product_patch.did_succeed { + let errors: Vec = response + .product_patch + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .product_patch + .product + .ok_or_else(|| WaveError::Api("No product returned".to_string())) + } + + /// Archive a product in Wave + pub async fn archive_product(&self, product_id: &str) -> Result { + let query = r#" + mutation ArchiveProduct($input: ProductArchiveInput!) { + productArchive(input: $input) { + didSucceed + inputErrors { + code + message + path + } + product { + id + name + description + unitPrice + isSold + isArchived + defaultSalesTaxes { + id + name + rate + } + incomeAccount { + id + name + } + } + } + } + "#; + + let variables = serde_json::json!({ + "input": { + "id": product_id + } + }); + + let response: ArchiveProductResponse = self.execute(query, Some(variables)).await?; + + if !response.product_archive.did_succeed { + let errors: Vec = response + .product_archive + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .product_archive + .product + .ok_or_else(|| WaveError::Api("No product returned".to_string())) + } + + /// Update an existing customer in Wave + pub async fn update_customer(&self, input: UpdateCustomerInput) -> Result { + let query = r#" + mutation UpdateCustomer($input: CustomerPatchInput!) { + customerPatch(input: $input) { + didSucceed + inputErrors { + code + message + path + } + customer { + id + name + email + address { + addressLine1 + addressLine2 + city + province { + code + name + } + postalCode + } + currency { + code + } + } + } + } + "#; + + // Serialize input and encode the ID to Relay Global ID format + let mut json_input = serde_json::to_value(&input).map_err(|e| WaveError::Parse(e.to_string()))?; + if let serde_json::Value::Object(ref mut map) = json_input { + map.insert("id".to_string(), serde_json::json!(&input.id)); + } + + let variables = serde_json::json!({ + "input": json_input + }); + + let response: UpdateCustomerResponse = self.execute(query, Some(variables)).await?; + + if !response.customer_patch.did_succeed { + let errors: Vec = response + .customer_patch + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .customer_patch + .customer + .ok_or_else(|| WaveError::Api("No customer returned".to_string())) + } + + /// Delete a customer in Wave + pub async fn delete_customer(&self, customer_id: &str) -> Result { + let query = r#" + mutation DeleteCustomer($input: CustomerDeleteInput!) { + customerDelete(input: $input) { + didSucceed + inputErrors { + code + message + path + } + } + } + "#; + + let variables = serde_json::json!({ + "input": { + "id": customer_id + } + }); + + let response: DeleteCustomerResponse = self.execute(query, Some(variables)).await?; + + if !response.customer_delete.did_succeed { + let errors: Vec = response + .customer_delete + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + Ok(true) + } + + /// Create a new customer in Wave + pub async fn create_customer(&self, input: CreateCustomerInput) -> Result { + let query = r#" + mutation CreateCustomer($input: CustomerCreateInput!) { + customerCreate(input: $input) { + didSucceed + inputErrors { + code + message + path + } + customer { + id + name + email + address { + addressLine1 + addressLine2 + city + province { + code + name + } + postalCode + } + currency { + code + } + } + } + } + "#; + + let variables = serde_json::json!({ + "input": input + }); + + let response: CreateCustomerResponse = self.execute(query, Some(variables)).await?; + + if !response.customer_create.did_succeed { + let errors: Vec = response + .customer_create + .input_errors + .into_iter() + .map(|e| format!("{}: {}", e.path.join("."), e.message)) + .collect(); + return Err(WaveError::Validation(errors)); + } + + response + .customer_create + .customer + .ok_or_else(|| WaveError::Api("No customer returned".to_string())) + } +} + +// ============================================================================ +// Error types +// ============================================================================ + +#[derive(Debug, thiserror::Error)] +pub enum WaveError { + #[error("Request error: {0}")] + Request(String), + + #[error("API error: {0}")] + Api(String), + + #[error("Parse error: {0}")] + Parse(String), + + #[error("GraphQL errors: {0:?}")] + GraphQL(Vec), + + #[error("Validation errors: {0:?}")] + Validation(Vec), +} + +// ============================================================================ +// GraphQL response types +// ============================================================================ + +#[derive(Debug, Deserialize)] +struct GraphQLResponse { + data: Option, + errors: Option>, +} + +#[derive(Debug, Deserialize)] +struct GraphQLError { + message: String, +} + +// ============================================================================ +// Wave API response types +// ============================================================================ + +#[derive(Debug, Deserialize)] +struct ListProductsResponse { + business: BusinessProducts, +} + +#[derive(Debug, Deserialize)] +struct BusinessProducts { + products: Connection, +} + +#[derive(Debug, Deserialize)] +struct ListCustomersResponse { + business: BusinessCustomers, +} + +#[derive(Debug, Deserialize)] +struct BusinessCustomers { + customers: Connection, +} + +#[derive(Debug, Deserialize)] +struct GetInvoiceResponse { + business: BusinessInvoice, +} + +#[derive(Debug, Deserialize)] +struct BusinessInvoice { + invoice: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CreateInvoiceResponse { + invoice_create: InvoiceCreateResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct InvoiceCreateResult { + did_succeed: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + input_errors: Vec, + invoice: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CreateCustomerResponse { + customer_create: CustomerCreateResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CustomerCreateResult { + did_succeed: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + input_errors: Vec, + customer: Option, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +struct InputError { + code: String, + message: String, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + path: Vec, +} + +#[derive(Debug, Deserialize)] +struct Connection { + edges: Vec>, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct PagedConnection { + page_info: WavePageInfo, + edges: Vec>, +} + +#[derive(Debug, Deserialize)] +struct Edge { + node: T, +} + +#[derive(Debug, Deserialize)] +struct ListInvoicesResponse { + business: BusinessInvoices, +} + +#[derive(Debug, Deserialize)] +struct BusinessInvoices { + invoices: PagedConnection, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ApproveInvoiceResponse { + invoice_approve: InvoiceMutationResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SendInvoiceResponse { + invoice_send: InvoiceMutationResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct DeleteInvoiceResponse { + invoice_delete: DeleteResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct InvoiceMutationResult { + did_succeed: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + input_errors: Vec, + invoice: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct DeleteResult { + did_succeed: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + input_errors: Vec, +} + +#[derive(Debug, Deserialize)] +struct GetCustomerResponse { + business: BusinessCustomer, +} + +#[derive(Debug, Deserialize)] +struct BusinessCustomer { + customer: Option, +} + +#[derive(Debug, Deserialize)] +struct GetProductResponse { + business: BusinessProduct, +} + +#[derive(Debug, Deserialize)] +struct BusinessProduct { + product: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct CreateProductResponse { + product_create: ProductMutationResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct UpdateProductResponse { + product_patch: ProductMutationResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ArchiveProductResponse { + product_archive: ProductMutationResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ProductMutationResult { + did_succeed: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + input_errors: Vec, + product: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct UpdateCustomerResponse { + customer_patch: CustomerCreateResult, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct DeleteCustomerResponse { + customer_delete: DeleteResult, +} + +// ============================================================================ +// Wave data types (public) +// ============================================================================ + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveProduct { + pub id: String, + pub name: String, + pub description: Option, + #[serde(deserialize_with = "deserialize_string_or_number")] + pub unit_price: f64, + pub is_sold: bool, + pub is_archived: bool, + #[serde(default, deserialize_with = "deserialize_null_as_empty_vec")] + pub default_sales_taxes: Vec, + pub income_account: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveTax { + pub id: String, + pub name: String, + #[serde(deserialize_with = "deserialize_string_or_number")] + pub rate: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveAccount { + pub id: String, + pub name: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveCustomer { + pub id: String, + pub name: String, + pub email: Option, + pub address: Option, + pub currency: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveAddress { + pub address_line1: Option, + pub address_line2: Option, + pub city: Option, + pub province: Option, + pub postal_code: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveProvince { + pub code: String, + pub name: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveCurrency { + pub code: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveInvoice { + pub id: String, + pub invoice_number: String, + pub invoice_date: String, + pub due_date: Option, + pub status: String, + pub customer: WaveInvoiceCustomer, + pub items: Vec, + pub subtotal: WaveMoney, + pub total: WaveMoney, + pub amount_due: WaveMoney, + pub amount_paid: WaveMoney, + pub pdf_url: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveInvoiceCustomer { + pub id: String, + pub name: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveInvoiceItem { + pub description: String, + #[serde(deserialize_with = "deserialize_string_or_number")] + pub quantity: f64, + #[serde(deserialize_with = "deserialize_string_or_number")] + pub unit_price: f64, + pub total: WaveMoney, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveMoney { + pub value: String, + pub currency: WaveMoneyurrency, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WaveMoneyurrency { + pub code: String, + pub symbol: String, +} + +// ============================================================================ +// Input types (for mutations) +// ============================================================================ + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateInvoiceInput { + pub business_id: String, + pub customer_id: String, + pub items: Vec, + pub invoice_date: String, + pub due_date: Option, + pub memo: Option, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateInvoiceItemInput { + pub product_id: String, + pub description: Option, + pub quantity: f64, + pub unit_price: f64, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateCustomerInput { + pub business_id: String, + pub name: String, + pub email: Option, + pub address: Option, + pub currency: Option, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateAddressInput { + pub address_line1: Option, + pub address_line2: Option, + pub city: Option, + pub province_code: Option, + pub postal_code: Option, + pub country_code: Option, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateProductInput { + pub business_id: String, + pub name: String, + pub description: Option, + pub unit_price: f64, + pub is_sold: bool, + pub income_account_id: Option, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdateProductInput { + pub id: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub unit_price: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub income_account_id: Option, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdateCustomerInput { + pub id: String, + pub name: Option, + pub email: Option, + pub address: Option, + pub currency: Option, +} + +// ============================================================================ +// Pagination types (public) +// ============================================================================ + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WavePageInfo { + pub current_page: i32, + pub total_pages: i32, + pub total_count: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveInvoiceConnection { + pub page_info: WavePageInfo, + pub invoices: Vec, +} + +/// Lighter invoice type for list views (no items array) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WaveInvoiceListItem { + pub id: String, + pub invoice_number: String, + pub invoice_date: String, + pub due_date: Option, + pub status: String, + pub customer: WaveInvoiceCustomer, + pub subtotal: WaveMoney, + pub total: WaveMoney, + pub amount_due: WaveMoney, + pub amount_paid: WaveMoney, +} diff --git a/src/signals.rs b/src/signals.rs new file mode 100644 index 0000000..eec876a --- /dev/null +++ b/src/signals.rs @@ -0,0 +1,100 @@ +use crate::db::Database; +use std::path::Path; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::time::{Duration, SystemTime}; + +/// Counter for tracking credential refresh attempts +static REFRESH_COUNT: AtomicU64 = AtomicU64::new(0); + +const VAULT_SECRETS_PATH: &str = "/vault/secrets/.env"; +const FILE_POLL_INTERVAL: Duration = Duration::from_secs(10); + +/// Perform the credential refresh +async fn do_refresh(db: &Database, source: &str) { + let count = REFRESH_COUNT.fetch_add(1, Ordering::SeqCst) + 1; + tracing::warn!( + refresh_count = count, + source = source, + "Starting credential refresh" + ); + + let start = std::time::Instant::now(); + match db.refresh().await { + Ok(()) => { + let elapsed = start.elapsed(); + tracing::info!( + refresh_count = count, + elapsed_ms = elapsed.as_millis() as u64, + "Credential refresh completed successfully" + ); + } + Err(e) => { + let elapsed = start.elapsed(); + tracing::error!( + refresh_count = count, + elapsed_ms = elapsed.as_millis() as u64, + error = %e, + "Failed to refresh credentials - DATABASE MAY BE STALE" + ); + } + } +} + +#[cfg(unix)] +pub async fn handle_sighup(db: Database) { + use tokio::signal::unix::{signal, SignalKind}; + + let mut stream = signal(SignalKind::hangup()) + .expect("Failed to create SIGHUP handler"); + + let pid = std::process::id(); + tracing::info!( + pid = pid, + "SIGHUP handler registered for credential refresh" + ); + + while stream.recv().await.is_some() { + do_refresh(&db, "SIGHUP").await; + } + + tracing::error!("SIGHUP handler loop exited unexpectedly!"); +} + +#[cfg(not(unix))] +pub async fn handle_sighup(_db: Database) { + tracing::warn!("SIGHUP handling not supported on this platform"); +} + +/// Watch the secrets file for changes and trigger refresh +/// This is a fallback for when SIGHUP signaling doesn't work +pub async fn watch_secrets_file(db: Database) { + let path = Path::new(VAULT_SECRETS_PATH); + + // Get initial mtime + let mut last_mtime = get_mtime(path); + + tracing::info!( + path = VAULT_SECRETS_PATH, + poll_interval_secs = FILE_POLL_INTERVAL.as_secs(), + "File watcher started for credential refresh" + ); + + loop { + tokio::time::sleep(FILE_POLL_INTERVAL).await; + + let current_mtime = get_mtime(path); + + if current_mtime != last_mtime { + tracing::info!( + path = VAULT_SECRETS_PATH, + "Secrets file changed, triggering refresh" + ); + last_mtime = current_mtime; + do_refresh(&db, "file_watch").await; + } + } +} + +fn get_mtime(path: &Path) -> Option { + path.metadata().ok().and_then(|m| m.modified().ok()) +} diff --git a/static/images/logo-white.png b/static/images/logo-white.png new file mode 100644 index 0000000..dbeff20 Binary files /dev/null and b/static/images/logo-white.png differ diff --git a/vault/agent-config-kratos-migrate.hcl b/vault/agent-config-kratos-migrate.hcl new file mode 100644 index 0000000..efe756e --- /dev/null +++ b/vault/agent-config-kratos-migrate.hcl @@ -0,0 +1,35 @@ +# Vault Agent Configuration - Kratos Migrations +# Uses nexus-kratos-migrate role: database/creds/nexus-kratos-migrate (15m TTL) + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/kratos-env-migrate.ctmpl" + destination = "/vault/secrets/.env" + perms = 0644 +} diff --git a/vault/agent-config-kratos.hcl b/vault/agent-config-kratos.hcl new file mode 100644 index 0000000..9b1d288 --- /dev/null +++ b/vault/agent-config-kratos.hcl @@ -0,0 +1,37 @@ +# Vault Agent Configuration - Kratos Runtime +# Renders Kratos secrets (SMTP, cookies, etc.) from secret/data/nexus/kratos +# Note: DSN is static (points to PgBouncer), DB creds are handled by pgbouncer container + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + static_secret_render_interval = "5m" + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/kratos-env.ctmpl" + destination = "/vault/secrets/.env" + perms = 0644 +} diff --git a/vault/agent-config-migrate.hcl b/vault/agent-config-migrate.hcl new file mode 100644 index 0000000..8b53336 --- /dev/null +++ b/vault/agent-config-migrate.hcl @@ -0,0 +1,39 @@ +# Vault Agent Configuration - Migrations +# Uses nexus-migrate role: database/creds/nexus-migrate (15m TTL, DDL+DML) +# One-shot mode: render template once and exit + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/env-migrate.ctmpl" + destination = "/vault/secrets/.env" + perms = 0644 +} + +# Exit after rendering (one-shot mode for migrations) +exit_after_auth = false diff --git a/vault/agent-config-oathkeeper.hcl b/vault/agent-config-oathkeeper.hcl new file mode 100644 index 0000000..164f464 --- /dev/null +++ b/vault/agent-config-oathkeeper.hcl @@ -0,0 +1,36 @@ +# Vault Agent Configuration - Oathkeeper Runtime +# Uses nexus-oathkeeper role: secret/data/nexus/oathkeeper only + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + static_secret_render_interval = "5m" + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/oathkeeper-env.ctmpl" + destination = "/vault/secrets/.env" + perms = 0644 +} diff --git a/vault/agent-config-pgbouncer.hcl b/vault/agent-config-pgbouncer.hcl new file mode 100644 index 0000000..f82a647 --- /dev/null +++ b/vault/agent-config-pgbouncer.hcl @@ -0,0 +1,41 @@ +# Vault Agent Configuration - PgBouncer +# Renders pgbouncer.ini with dynamic database credentials from Vault +# Sends SIGHUP to PgBouncer on credential refresh + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + static_secret_render_interval = "5m" + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/pgbouncer.ini.ctmpl" + destination = "/etc/pgbouncer/pgbouncer.ini" + perms = 0644 + + exec { + command = ["sh", "-c", "PID=$(cat /var/run/pgbouncer/pgbouncer.pid 2>/dev/null); echo \"Sending SIGHUP to PgBouncer PID: $PID\"; kill -HUP $PID && echo 'SIGHUP sent successfully' || echo 'Failed to send SIGHUP (PgBouncer may not be running yet)'"] + } +} diff --git a/vault/agent-config.hcl b/vault/agent-config.hcl new file mode 100644 index 0000000..d912fab --- /dev/null +++ b/vault/agent-config.hcl @@ -0,0 +1,40 @@ +# Vault Agent Configuration - App Runtime +# Uses nexus-app role: database/creds/nexus-app (1h TTL) + secret/data/nexus/* + +pid_file = "/tmp/vault-agent.pid" + +vault { + address = "http://vault.example.local:8200" +} + +auto_auth { + method "approle" { + config = { + role_id_file_path = "/vault/role-id" + secret_id_file_path = "/vault/secret-id" + remove_secret_id_file_after_reading = false + } + } + + sink "file" { + config = { + path = "/vault/token" + mode = 0644 + } + } +} + +template_config { + static_secret_render_interval = "5m" + exit_on_retry_failure = true +} + +template { + source = "/vault/templates/env.ctmpl" + destination = "/vault/secrets/.env" + perms = 0644 + + exec { + command = ["sh", "-c", "PID=$(cat /vault/secrets/nexus.pid 2>/dev/null); echo \"Sending SIGHUP to PID: $PID\"; kill -HUP $PID && echo 'SIGHUP sent successfully' || echo 'Failed to send SIGHUP'"] + } +} diff --git a/vault/templates/env-migrate.ctmpl b/vault/templates/env-migrate.ctmpl new file mode 100644 index 0000000..afffd21 --- /dev/null +++ b/vault/templates/env-migrate.ctmpl @@ -0,0 +1,4 @@ +# Database (migration credentials from Vault - 15m TTL, DDL+DML) +{{ with secret "database/creds/nexus-migrate" -}} +DATABASE_URL=postgresql://{{ .Data.username }}:{{ .Data.password }}@db.example.local:5432/nexus +{{- end }} diff --git a/vault/templates/env.ctmpl b/vault/templates/env.ctmpl new file mode 100644 index 0000000..1070c45 --- /dev/null +++ b/vault/templates/env.ctmpl @@ -0,0 +1,43 @@ +# Server +HOST=0.0.0.0 +PORT=5050 + +# Database (dynamic credentials from Vault) +{{ with secret "database/creds/nexus-app" -}} +DATABASE_URL=postgresql://{{ .Data.username }}:{{ .Data.password }}@db.example.local:5432/nexus +{{- end }} + +# Valkey/Redis +{{ with secret "secret/data/nexus/valkey" -}} +VALKEY_URL=redis://{{ .Data.data.username }}:{{ .Data.data.password }}@{{ .Data.data.host }}:{{ .Data.data.port }}/0 +VALKEY_KEY_PREFIX={{ .Data.data.key_prefix }} +{{- end }} + +# S3 Storage +{{ with secret "secret/data/nexus/s3" -}} +S3_ENDPOINT={{ .Data.data.endpoint }} +S3_BUCKET={{ .Data.data.bucket }} +S3_ACCESS_KEY={{ .Data.data.access_key }} +S3_SECRET_KEY={{ .Data.data.secret_key }} +{{- end }} + +# Ory Oathkeeper +{{ with secret "secret/data/nexus/oathkeeper" -}} +OATHKEEPER_SECRET={{ .Data.data.secret }} +{{- end }} + +# Google Services (Calendar & Gmail) +{{ with secret "secret/data/nexus/google" -}} +GOOGLE_SERVICE_ACCOUNT_KEY={{ .Data.data.service_account_key }} +GOOGLE_CALENDAR_ID={{ .Data.data.calendar_id }} +GOOGLE_GMAIL_USER={{ .Data.data.gmail_user }} +{{- end }} + +# Wave Accounting +{{ with secret "secret/data/nexus/wave" -}} +WAVE_ACCESS_TOKEN={{ .Data.data.access_token }} +WAVE_BUSINESS_ID={{ .Data.data.business_id }} +{{- end }} + +# Logging +RUST_LOG=nexus=info,tower_http=info diff --git a/vault/templates/kratos-env-migrate.ctmpl b/vault/templates/kratos-env-migrate.ctmpl new file mode 100644 index 0000000..0fe5edb --- /dev/null +++ b/vault/templates/kratos-env-migrate.ctmpl @@ -0,0 +1,6 @@ +# Kratos Migration Environment +# Rendered by Vault Agent from database/creds/nexus-kratos-migrate + +{{ with secret "database/creds/nexus-kratos-migrate" -}} +DSN=postgres://{{ .Data.username | urlquery }}:{{ .Data.password | urlquery }}@db.example.local:5432/nexus?sslmode=disable&search_path=kratos,public +{{- end }} diff --git a/vault/templates/kratos-env.ctmpl b/vault/templates/kratos-env.ctmpl new file mode 100644 index 0000000..650a57a --- /dev/null +++ b/vault/templates/kratos-env.ctmpl @@ -0,0 +1,16 @@ +# Kratos Runtime Environment +# Rendered by Vault Agent from secret/data/nexus/kratos +# DSN points to PgBouncer (localhost:6432) which proxies to PostgreSQL with dynamic Vault creds + +# Static DSN to PgBouncer - auth_type=trust so no password needed +# search_path is set in PgBouncer's connect_query +DSN="postgres://kratos@127.0.0.1:6432/nexus?sslmode=disable" + +{{ with secret "secret/data/nexus/kratos" -}} +SECRETS_COOKIE="{{ .Data.data.secrets_cookie }}" +SECRETS_CIPHER="{{ .Data.data.secrets_cipher }}" +SECRETS_DEFAULT="{{ .Data.data.secrets_default }}" +COURIER_SMTP_CONNECTION_URI="{{ .Data.data.smtp_connection_uri }}" +COURIER_SMTP_FROM_ADDRESS="{{ .Data.data.smtp_from_address }}" +COURIER_SMTP_FROM_NAME="{{ .Data.data.smtp_from_name }}" +{{- end }} diff --git a/vault/templates/oathkeeper-env.ctmpl b/vault/templates/oathkeeper-env.ctmpl new file mode 100644 index 0000000..870db29 --- /dev/null +++ b/vault/templates/oathkeeper-env.ctmpl @@ -0,0 +1,10 @@ +# Oathkeeper Environment +# Rendered by Vault Agent from secret/data/nexus/oathkeeper + +# Shared secret for backend authentication +{{ with secret "secret/data/nexus/oathkeeper" -}} +OATHKEEPER_SECRET={{ .Data.data.secret }} +{{- end }} + +# Backend URL (static config) +BACKEND_URL=http://backend.example.local:5500 diff --git a/vault/templates/pgbouncer.ini.ctmpl b/vault/templates/pgbouncer.ini.ctmpl new file mode 100644 index 0000000..f15daf7 --- /dev/null +++ b/vault/templates/pgbouncer.ini.ctmpl @@ -0,0 +1,46 @@ +# PgBouncer Configuration - Rendered by Vault Agent +# Proxies Kratos connections to PostgreSQL with dynamic Vault credentials + +{{ with secret "database/creds/nexus-kratos-app" -}} +[databases] +; Kratos connects here, PgBouncer uses Vault credentials for PostgreSQL +; connect_query sets search_path since PgBouncer doesn't support it as startup param +; Both 'nexus' and 'kratos' aliases route to the same backend with kratos schema +nexus = host=db.example.local port=5432 dbname=nexus user={{ .Data.username }} password={{ .Data.password }} connect_query='SET search_path TO kratos,public' +kratos = host=db.example.local port=5432 dbname=nexus user={{ .Data.username }} password={{ .Data.password }} connect_query='SET search_path TO kratos,public' + +[pgbouncer] +listen_addr = 127.0.0.1 +listen_port = 6432 +unix_socket_dir = /var/run/pgbouncer + +; Trust localhost - only local processes can connect +; PgBouncer handles real auth to PostgreSQL with Vault credentials +auth_type = trust +auth_file = /etc/pgbouncer/userlist.txt + +; Connection pooling +pool_mode = session +max_client_conn = 100 +default_pool_size = 20 +min_pool_size = 5 +reserve_pool_size = 5 + +; Timeouts +server_connect_timeout = 15 +server_idle_timeout = 600 +client_idle_timeout = 0 + +; Logging +logfile = /var/log/pgbouncer/pgbouncer.log +log_connections = 1 +log_disconnections = 1 +log_pooler_errors = 1 + +; Admin +admin_users = pgbouncer +stats_users = pgbouncer + +; Pid file for SIGHUP from Vault Agent +pidfile = /var/run/pgbouncer/pgbouncer.pid +{{- end }}