From fd5d81304e920926f9f07e3cf2a0718af5cdbb23 Mon Sep 17 00:00:00 2001 From: Damien Coles Date: Mon, 26 Jan 2026 11:09:40 -0500 Subject: [PATCH] public-ready-init --- .env.example | 49 + .gitignore | 29 + .mcp.json.example | 9 + Dockerfile | 56 + README.md | 269 ++ config/__init__.py | 6 + config/asgi.py | 23 + config/celery.py | 51 + config/db_backend/__init__.py | 6 + config/db_backend/base.py | 56 + config/settings.py | 385 +++ config/storage.py | 27 + config/urls.py | 40 + core/__init__.py | 0 core/admin.py | 645 ++++ core/apps.py | 25 + core/chat/__init__.py | 1 + core/chat/consumers.py | 261 ++ core/chat/service.py | 627 ++++ core/graphql/__init__.py | 9 + core/graphql/enums.py | 9 + core/graphql/filters/__init__.py | 18 + core/graphql/filters/account.py | 41 + core/graphql/filters/account_punchlist.py | 8 + core/graphql/filters/customer.py | 42 + core/graphql/filters/invoice.py | 10 + core/graphql/filters/labor.py | 9 + core/graphql/filters/messaging.py | 29 + core/graphql/filters/profile.py | 14 + core/graphql/filters/project.py | 12 + core/graphql/filters/project_punchlist.py | 9 + core/graphql/filters/project_scope.py | 37 + .../graphql/filters/project_scope_template.py | 65 + core/graphql/filters/report.py | 10 + core/graphql/filters/revenue.py | 9 + core/graphql/filters/schedule.py | 9 + core/graphql/filters/scope.py | 40 + core/graphql/filters/scope_template.py | 52 + core/graphql/filters/service.py | 12 + core/graphql/filters/session.py | 52 + core/graphql/filters/session_image.py | 48 + core/graphql/filters/session_note.py | 51 + core/graphql/filters/session_video.py | 75 + core/graphql/inputs/__init__.py | 18 + core/graphql/inputs/account.py | 76 + core/graphql/inputs/account_punchlist.py | 17 + core/graphql/inputs/customer.py | 78 + core/graphql/inputs/invoice.py | 29 + core/graphql/inputs/labor.py | 21 + core/graphql/inputs/messaging.py | 75 + core/graphql/inputs/profile.py | 53 + core/graphql/inputs/project.py | 45 + core/graphql/inputs/project_punchlist.py | 17 + core/graphql/inputs/project_scope.py | 66 + core/graphql/inputs/project_scope_template.py | 50 + core/graphql/inputs/report.py | 21 + core/graphql/inputs/revenue.py | 23 + core/graphql/inputs/schedule.py | 39 + core/graphql/inputs/scope.py | 84 + core/graphql/inputs/scope_template.py | 63 + core/graphql/inputs/service.py | 34 + core/graphql/inputs/session.py | 36 + core/graphql/inputs/session_image.py | 17 + core/graphql/inputs/session_note.py | 35 + core/graphql/inputs/session_video.py | 17 + core/graphql/mutations/__init__.py | 18 + core/graphql/mutations/account.py | 188 ++ core/graphql/mutations/account_punchlist.py | 114 + core/graphql/mutations/customer.py | 188 ++ core/graphql/mutations/event.py | 265 ++ core/graphql/mutations/invoice.py | 105 + core/graphql/mutations/labor.py | 58 + core/graphql/mutations/messaging.py | 513 +++ core/graphql/mutations/profile.py | 131 + core/graphql/mutations/project.py | 188 ++ core/graphql/mutations/project_punchlist.py | 114 + core/graphql/mutations/project_scope.py | 218 ++ .../mutations/project_scope_template.py | 141 + core/graphql/mutations/report.py | 70 + core/graphql/mutations/revenue.py | 58 + core/graphql/mutations/schedule.py | 108 + core/graphql/mutations/scope.py | 294 ++ core/graphql/mutations/scope_template.py | 246 ++ core/graphql/mutations/service.py | 327 ++ core/graphql/mutations/session.py | 467 +++ core/graphql/mutations/session_image.py | 221 ++ core/graphql/mutations/session_note.py | 109 + core/graphql/mutations/session_video.py | 330 ++ core/graphql/pubsub.py | 49 + core/graphql/queries/__init__.py | 18 + core/graphql/queries/account.py | 13 + core/graphql/queries/account_punchlist.py | 12 + core/graphql/queries/customer.py | 13 + core/graphql/queries/dashboard.py | 274 ++ core/graphql/queries/event.py | 206 ++ core/graphql/queries/invoice.py | 10 + core/graphql/queries/labor.py | 10 + core/graphql/queries/messaging.py | 148 + core/graphql/queries/profile.py | 27 + core/graphql/queries/project.py | 35 + core/graphql/queries/project_punchlist.py | 12 + core/graphql/queries/project_scope.py | 47 + .../graphql/queries/project_scope_template.py | 49 + core/graphql/queries/report.py | 10 + core/graphql/queries/revenue.py | 10 + core/graphql/queries/schedule.py | 10 + core/graphql/queries/scope.py | 17 + core/graphql/queries/scope_template.py | 24 + core/graphql/queries/service.py | 35 + core/graphql/queries/session.py | 45 + core/graphql/queries/session_image.py | 19 + core/graphql/queries/session_note.py | 20 + core/graphql/queries/session_video.py | 20 + core/graphql/schema.py | 157 + core/graphql/subscriptions/__init__.py | 18 + core/graphql/subscriptions/account.py | 52 + core/graphql/subscriptions/account_address.py | 52 + core/graphql/subscriptions/account_contact.py | 52 + .../subscriptions/account_punchlist.py | 52 + core/graphql/subscriptions/customer.py | 52 + .../graphql/subscriptions/customer_address.py | 52 + .../graphql/subscriptions/customer_contact.py | 52 + core/graphql/subscriptions/invoice.py | 52 + core/graphql/subscriptions/labor.py | 52 + core/graphql/subscriptions/messaging.py | 211 ++ core/graphql/subscriptions/profile.py | 95 + core/graphql/subscriptions/project.py | 52 + .../subscriptions/project_punchlist.py | 52 + core/graphql/subscriptions/project_scope.py | 141 + .../subscriptions/project_scope_template.py | 145 + core/graphql/subscriptions/report.py | 52 + core/graphql/subscriptions/revenue.py | 52 + core/graphql/subscriptions/schedule.py | 52 + core/graphql/subscriptions/scope.py | 179 ++ core/graphql/subscriptions/scope_template.py | 161 + core/graphql/subscriptions/service.py | 52 + core/graphql/types/__init__.py | 20 + core/graphql/types/account.py | 55 + core/graphql/types/account_punchlist.py | 10 + core/graphql/types/customer.py | 44 + core/graphql/types/dashboard.py | 48 + core/graphql/types/event.py | 111 + core/graphql/types/invoice.py | 19 + core/graphql/types/labor.py | 12 + core/graphql/types/messaging.py | 288 ++ core/graphql/types/profile.py | 32 + core/graphql/types/project.py | 24 + core/graphql/types/project_punchlist.py | 10 + core/graphql/types/project_scope.py | 44 + core/graphql/types/project_scope_template.py | 34 + core/graphql/types/report.py | 96 + core/graphql/types/revenue.py | 13 + core/graphql/types/schedule.py | 21 + core/graphql/types/scope.py | 46 + core/graphql/types/scope_template.py | 32 + core/graphql/types/service.py | 15 + core/graphql/types/session.py | 47 + core/graphql/types/session_image.py | 32 + core/graphql/types/session_note.py | 26 + core/graphql/types/session_video.py | 38 + core/graphql/utils.py | 304 ++ core/mcp/__init__.py | 2 + core/mcp/__main__.py | 5 + core/mcp/auth.py | 322 ++ core/mcp/base.py | 39 + core/mcp/server.py | 74 + core/mcp/tools/__init__.py | 29 + core/mcp/tools/admin.py | 279 ++ core/mcp/tools/auth.py | 57 + core/mcp/tools/customers.py | 263 ++ core/mcp/tools/dashboard.py | 168 + core/mcp/tools/notifications.py | 257 ++ core/mcp/tools/projects.py | 424 +++ core/mcp/tools/services.py | 494 +++ core/mcp/tools/sessions.py | 373 +++ core/mcp/tools/utility.py | 27 + core/middleware.py | 262 ++ core/migrations/__init__.py | 0 core/models/__init__.py | 25 + core/models/account.py | 151 + core/models/account_punchlist.py | 20 + core/models/base.py | 214 ++ core/models/chat.py | 94 + core/models/customer.py | 127 + core/models/enums.py | 270 ++ core/models/events.py | 269 ++ core/models/invoice.py | 32 + core/models/labor.py | 57 + core/models/messaging.py | 254 ++ core/models/profile.py | 69 + core/models/project.py | 122 + core/models/project_punchlist.py | 15 + core/models/project_scope.py | 97 + core/models/project_scope_template.py | 121 + core/models/report.py | 145 + core/models/revenue.py | 24 + core/models/schedule.py | 96 + core/models/scope.py | 127 + core/models/scope_template.py | 108 + core/models/service.py | 37 + core/models/session.py | 289 ++ core/models/session_image.py | 61 + core/models/session_video.py | 163 + core/permissions.py | 13 + core/services/__init__.py | 2 + core/services/email_renderer.py | 291 ++ core/services/email_service.py | 303 ++ core/services/events.py | 1632 ++++++++++ core/services/metadata/__init__.py | 19 + core/services/metadata/account.py | 135 + core/services/metadata/base.py | 67 + core/services/metadata/customer.py | 92 + core/services/metadata/project.py | 188 ++ core/services/metadata/service.py | 146 + core/services/monitoring/__init__.py | 14 + core/services/monitoring/base.py | 84 + core/services/monitoring/commands/__init__.py | 12 + .../commands/incomplete_work_reminder.py | 275 ++ .../commands/nightly_assignments.py | 276 ++ core/services/monitoring/registry.py | 78 + core/services/notifications.py | 249 ++ core/services/scope_builder.py | 79 + core/services/session_service.py | 346 ++ core/services/video.py | 154 + core/static/images/logo-white.png | Bin 0 -> 77097 bytes core/tasks/__init__.py | 4 + core/tasks/event_cleanup.py | 77 + core/tasks/monitoring.py | 72 + core/tasks/notifications.py | 267 ++ core/templates/email/base_notification.html | 167 + .../email/incomplete_work_reminder.html | 249 ++ core/templates/email/nightly_assignments.html | 248 ++ core/views.py | 482 +++ docker-compose.yml | 58 + entrypoint.sh | 37 + manage.py | 22 + nginx.conf | 108 + poetry.lock | 2827 +++++++++++++++++ poetry.toml | 2 + pyproject.toml | 40 + setup.sh | 51 + vault/db-admin-template.hcl | 4 + vault/db-app-template.hcl | 4 + vault/vault-agent-config.hcl | 25 + 244 files changed, 28322 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 .mcp.json.example create mode 100644 Dockerfile create mode 100644 README.md create mode 100644 config/__init__.py create mode 100644 config/asgi.py create mode 100644 config/celery.py create mode 100644 config/db_backend/__init__.py create mode 100644 config/db_backend/base.py create mode 100644 config/settings.py create mode 100644 config/storage.py create mode 100644 config/urls.py create mode 100644 core/__init__.py create mode 100644 core/admin.py create mode 100644 core/apps.py create mode 100644 core/chat/__init__.py create mode 100644 core/chat/consumers.py create mode 100644 core/chat/service.py create mode 100644 core/graphql/__init__.py create mode 100644 core/graphql/enums.py create mode 100644 core/graphql/filters/__init__.py create mode 100644 core/graphql/filters/account.py create mode 100644 core/graphql/filters/account_punchlist.py create mode 100644 core/graphql/filters/customer.py create mode 100644 core/graphql/filters/invoice.py create mode 100644 core/graphql/filters/labor.py create mode 100644 core/graphql/filters/messaging.py create mode 100644 core/graphql/filters/profile.py create mode 100644 core/graphql/filters/project.py create mode 100644 core/graphql/filters/project_punchlist.py create mode 100644 core/graphql/filters/project_scope.py create mode 100644 core/graphql/filters/project_scope_template.py create mode 100644 core/graphql/filters/report.py create mode 100644 core/graphql/filters/revenue.py create mode 100644 core/graphql/filters/schedule.py create mode 100644 core/graphql/filters/scope.py create mode 100644 core/graphql/filters/scope_template.py create mode 100644 core/graphql/filters/service.py create mode 100644 core/graphql/filters/session.py create mode 100644 core/graphql/filters/session_image.py create mode 100644 core/graphql/filters/session_note.py create mode 100644 core/graphql/filters/session_video.py create mode 100644 core/graphql/inputs/__init__.py create mode 100644 core/graphql/inputs/account.py create mode 100644 core/graphql/inputs/account_punchlist.py create mode 100644 core/graphql/inputs/customer.py create mode 100644 core/graphql/inputs/invoice.py create mode 100644 core/graphql/inputs/labor.py create mode 100644 core/graphql/inputs/messaging.py create mode 100644 core/graphql/inputs/profile.py create mode 100644 core/graphql/inputs/project.py create mode 100644 core/graphql/inputs/project_punchlist.py create mode 100644 core/graphql/inputs/project_scope.py create mode 100644 core/graphql/inputs/project_scope_template.py create mode 100644 core/graphql/inputs/report.py create mode 100644 core/graphql/inputs/revenue.py create mode 100644 core/graphql/inputs/schedule.py create mode 100644 core/graphql/inputs/scope.py create mode 100644 core/graphql/inputs/scope_template.py create mode 100644 core/graphql/inputs/service.py create mode 100644 core/graphql/inputs/session.py create mode 100644 core/graphql/inputs/session_image.py create mode 100644 core/graphql/inputs/session_note.py create mode 100644 core/graphql/inputs/session_video.py create mode 100644 core/graphql/mutations/__init__.py create mode 100644 core/graphql/mutations/account.py create mode 100644 core/graphql/mutations/account_punchlist.py create mode 100644 core/graphql/mutations/customer.py create mode 100644 core/graphql/mutations/event.py create mode 100644 core/graphql/mutations/invoice.py create mode 100644 core/graphql/mutations/labor.py create mode 100644 core/graphql/mutations/messaging.py create mode 100644 core/graphql/mutations/profile.py create mode 100644 core/graphql/mutations/project.py create mode 100644 core/graphql/mutations/project_punchlist.py create mode 100644 core/graphql/mutations/project_scope.py create mode 100644 core/graphql/mutations/project_scope_template.py create mode 100644 core/graphql/mutations/report.py create mode 100644 core/graphql/mutations/revenue.py create mode 100644 core/graphql/mutations/schedule.py create mode 100644 core/graphql/mutations/scope.py create mode 100644 core/graphql/mutations/scope_template.py create mode 100644 core/graphql/mutations/service.py create mode 100644 core/graphql/mutations/session.py create mode 100644 core/graphql/mutations/session_image.py create mode 100644 core/graphql/mutations/session_note.py create mode 100644 core/graphql/mutations/session_video.py create mode 100644 core/graphql/pubsub.py create mode 100644 core/graphql/queries/__init__.py create mode 100644 core/graphql/queries/account.py create mode 100644 core/graphql/queries/account_punchlist.py create mode 100644 core/graphql/queries/customer.py create mode 100644 core/graphql/queries/dashboard.py create mode 100644 core/graphql/queries/event.py create mode 100644 core/graphql/queries/invoice.py create mode 100644 core/graphql/queries/labor.py create mode 100644 core/graphql/queries/messaging.py create mode 100644 core/graphql/queries/profile.py create mode 100644 core/graphql/queries/project.py create mode 100644 core/graphql/queries/project_punchlist.py create mode 100644 core/graphql/queries/project_scope.py create mode 100644 core/graphql/queries/project_scope_template.py create mode 100644 core/graphql/queries/report.py create mode 100644 core/graphql/queries/revenue.py create mode 100644 core/graphql/queries/schedule.py create mode 100644 core/graphql/queries/scope.py create mode 100644 core/graphql/queries/scope_template.py create mode 100644 core/graphql/queries/service.py create mode 100644 core/graphql/queries/session.py create mode 100644 core/graphql/queries/session_image.py create mode 100644 core/graphql/queries/session_note.py create mode 100644 core/graphql/queries/session_video.py create mode 100644 core/graphql/schema.py create mode 100644 core/graphql/subscriptions/__init__.py create mode 100644 core/graphql/subscriptions/account.py create mode 100644 core/graphql/subscriptions/account_address.py create mode 100644 core/graphql/subscriptions/account_contact.py create mode 100644 core/graphql/subscriptions/account_punchlist.py create mode 100644 core/graphql/subscriptions/customer.py create mode 100644 core/graphql/subscriptions/customer_address.py create mode 100644 core/graphql/subscriptions/customer_contact.py create mode 100644 core/graphql/subscriptions/invoice.py create mode 100644 core/graphql/subscriptions/labor.py create mode 100644 core/graphql/subscriptions/messaging.py create mode 100644 core/graphql/subscriptions/profile.py create mode 100644 core/graphql/subscriptions/project.py create mode 100644 core/graphql/subscriptions/project_punchlist.py create mode 100644 core/graphql/subscriptions/project_scope.py create mode 100644 core/graphql/subscriptions/project_scope_template.py create mode 100644 core/graphql/subscriptions/report.py create mode 100644 core/graphql/subscriptions/revenue.py create mode 100644 core/graphql/subscriptions/schedule.py create mode 100644 core/graphql/subscriptions/scope.py create mode 100644 core/graphql/subscriptions/scope_template.py create mode 100644 core/graphql/subscriptions/service.py create mode 100644 core/graphql/types/__init__.py create mode 100644 core/graphql/types/account.py create mode 100644 core/graphql/types/account_punchlist.py create mode 100644 core/graphql/types/customer.py create mode 100644 core/graphql/types/dashboard.py create mode 100644 core/graphql/types/event.py create mode 100644 core/graphql/types/invoice.py create mode 100644 core/graphql/types/labor.py create mode 100644 core/graphql/types/messaging.py create mode 100644 core/graphql/types/profile.py create mode 100644 core/graphql/types/project.py create mode 100644 core/graphql/types/project_punchlist.py create mode 100644 core/graphql/types/project_scope.py create mode 100644 core/graphql/types/project_scope_template.py create mode 100644 core/graphql/types/report.py create mode 100644 core/graphql/types/revenue.py create mode 100644 core/graphql/types/schedule.py create mode 100644 core/graphql/types/scope.py create mode 100644 core/graphql/types/scope_template.py create mode 100644 core/graphql/types/service.py create mode 100644 core/graphql/types/session.py create mode 100644 core/graphql/types/session_image.py create mode 100644 core/graphql/types/session_note.py create mode 100644 core/graphql/types/session_video.py create mode 100644 core/graphql/utils.py create mode 100644 core/mcp/__init__.py create mode 100644 core/mcp/__main__.py create mode 100644 core/mcp/auth.py create mode 100644 core/mcp/base.py create mode 100644 core/mcp/server.py create mode 100644 core/mcp/tools/__init__.py create mode 100644 core/mcp/tools/admin.py create mode 100644 core/mcp/tools/auth.py create mode 100644 core/mcp/tools/customers.py create mode 100644 core/mcp/tools/dashboard.py create mode 100644 core/mcp/tools/notifications.py create mode 100644 core/mcp/tools/projects.py create mode 100644 core/mcp/tools/services.py create mode 100644 core/mcp/tools/sessions.py create mode 100644 core/mcp/tools/utility.py create mode 100644 core/middleware.py create mode 100644 core/migrations/__init__.py create mode 100644 core/models/__init__.py create mode 100644 core/models/account.py create mode 100644 core/models/account_punchlist.py create mode 100644 core/models/base.py create mode 100644 core/models/chat.py create mode 100644 core/models/customer.py create mode 100644 core/models/enums.py create mode 100644 core/models/events.py create mode 100644 core/models/invoice.py create mode 100644 core/models/labor.py create mode 100644 core/models/messaging.py create mode 100644 core/models/profile.py create mode 100644 core/models/project.py create mode 100644 core/models/project_punchlist.py create mode 100644 core/models/project_scope.py create mode 100644 core/models/project_scope_template.py create mode 100644 core/models/report.py create mode 100644 core/models/revenue.py create mode 100644 core/models/schedule.py create mode 100644 core/models/scope.py create mode 100644 core/models/scope_template.py create mode 100644 core/models/service.py create mode 100644 core/models/session.py create mode 100644 core/models/session_image.py create mode 100644 core/models/session_video.py create mode 100644 core/permissions.py create mode 100644 core/services/__init__.py create mode 100644 core/services/email_renderer.py create mode 100644 core/services/email_service.py create mode 100644 core/services/events.py create mode 100644 core/services/metadata/__init__.py create mode 100644 core/services/metadata/account.py create mode 100644 core/services/metadata/base.py create mode 100644 core/services/metadata/customer.py create mode 100644 core/services/metadata/project.py create mode 100644 core/services/metadata/service.py create mode 100644 core/services/monitoring/__init__.py create mode 100644 core/services/monitoring/base.py create mode 100644 core/services/monitoring/commands/__init__.py create mode 100644 core/services/monitoring/commands/incomplete_work_reminder.py create mode 100644 core/services/monitoring/commands/nightly_assignments.py create mode 100644 core/services/monitoring/registry.py create mode 100644 core/services/notifications.py create mode 100644 core/services/scope_builder.py create mode 100644 core/services/session_service.py create mode 100644 core/services/video.py create mode 100644 core/static/images/logo-white.png create mode 100644 core/tasks/__init__.py create mode 100644 core/tasks/event_cleanup.py create mode 100644 core/tasks/monitoring.py create mode 100644 core/tasks/notifications.py create mode 100644 core/templates/email/base_notification.html create mode 100644 core/templates/email/incomplete_work_reminder.html create mode 100644 core/templates/email/nightly_assignments.html create mode 100644 core/views.py create mode 100644 docker-compose.yml create mode 100644 entrypoint.sh create mode 100755 manage.py create mode 100644 nginx.conf create mode 100644 poetry.lock create mode 100644 poetry.toml create mode 100644 pyproject.toml create mode 100644 setup.sh create mode 100644 vault/db-admin-template.hcl create mode 100644 vault/db-app-template.hcl create mode 100644 vault/vault-agent-config.hcl diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..7531fd6 --- /dev/null +++ b/.env.example @@ -0,0 +1,49 @@ +# Django Settings +SECRET_KEY=your-secret-key-generate-a-strong-one +DEBUG=False + +# Database +DB_NAME=nexus +DB_HOST=localhost +DB_PORT=5432 +DB_USER=postgres +DB_PASSWORD=your-database-password + +# Database Admin (for migrations) +DB_ADMIN_USER=postgres +DB_ADMIN_PASSWORD=your-admin-password + +# Redis/Valkey +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_USERNAME= +REDIS_PASSWORD=your-redis-password + +# Redis Cluster Mode (optional) +REDIS_CLUSTER_MODE=False + +# Redis Sentinel (optional - for high availability) +# REDIS_SENTINEL_HOSTS=host1:26379,host2:26379,host3:26379 +# REDIS_SENTINEL_MASTER=valkey-ha +# REDIS_SENTINEL_PASSWORD= + +# Ory Oathkeeper +OATHKEEPER_SECRET=your-oathkeeper-secret + +# S3 Storage (Garage/MinIO compatible) +AWS_ACCESS_KEY_ID=your-access-key +AWS_SECRET_ACCESS_KEY=your-secret-key +AWS_STORAGE_BUCKET_NAME=nexus-media +AWS_S3_ENDPOINT_URL=http://localhost:3900 + +# AI Chat (Anthropic Claude) +ANTHROPIC_API_KEY=your-anthropic-api-key +ANTHROPIC_MODEL=claude-sonnet-4-20250514 + +# Emailer Microservice +EMAILER_BASE_URL=https://email.example.com +EMAILER_API_KEY=your-emailer-api-key +EMAILER_DEFAULT_SENDER=noreply@example.com + +# Dispatch Profile (for labor calculations) +DISPATCH_TEAM_PROFILE_ID= diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..facc604 --- /dev/null +++ b/.gitignore @@ -0,0 +1,29 @@ +# Python +.venv/ +venv/ +__pycache__/ +*.py[cod] +*.egg-info/ + +# Django +db.sqlite3 +/staticfiles/ +/media/ + +# Environment +.env +.env.* +!.env.example + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# MCP configuration (machine-specific paths) +.mcp.json + +# Misc +*.log +.DS_Store \ No newline at end of file diff --git a/.mcp.json.example b/.mcp.json.example new file mode 100644 index 0000000..36064a5 --- /dev/null +++ b/.mcp.json.example @@ -0,0 +1,9 @@ +{ + "mcpServers": { + "nexus": { + "command": "/path/to/nexus-5/.venv/bin/python", + "args": ["-m", "core.mcp.server"], + "cwd": "/path/to/nexus-5" + } + } +} diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..aa31111 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,56 @@ +# Use a slim, official Python image as the base +FROM python:3.13-slim AS base +# Set environment variables for Python and Poetry +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + POETRY_VIRTUALENVS_CREATE=false \ + POETRY_NO_INTERACTION=1 +# Install system dependencies +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential \ + curl \ + libpq-dev \ + lsb-release \ + gnupg \ + ffmpeg \ + && curl -fsSL https://apt.releases.hashicorp.com/gpg | gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg \ + && echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/hashicorp.list \ + && apt-get update \ + && apt-get install -y vault \ + && rm -rf /var/lib/apt/lists/* +# Install Poetry into a globally accessible location +ENV POETRY_HOME=/opt/poetry +RUN curl -sSL https://install.python-poetry.org | python3 - +# Add Poetry to the system's PATH for all users +ENV PATH="$POETRY_HOME/bin:$PATH" +# Set the working directory for the application +WORKDIR /app +# Create a non-root user and group for security +RUN addgroup --system app && adduser --system --group app +# Copy only dependency files first to leverage Docker's layer cache +COPY pyproject.toml poetry.lock* /app/ +# Copy the vault agent config and templates +COPY /vault/vault-agent-config.hcl /etc/vault/agent-config.hcl +COPY /vault/db-admin-template.hcl /etc/vault/admin-template.hcl +COPY /vault/db-app-template.hcl /etc/vault/app-template.hcl +COPY entrypoint.sh /app/entrypoint.sh +# Install Python dependencies +RUN poetry install --no-ansi --no-root +# Copy the rest of the application source code +COPY . /app +# Set correct ownership and permissions for the application files WHILE STILL ROOT +RUN chown -R app:app /app/ +RUN chmod +x /app/entrypoint.sh +RUN chmod +x /app/setup.sh +# Make sure the secrets dir is writable by the 'app' user +RUN mkdir -p /vault/secrets && chown -R app:app /vault/secrets +# --- Switch to the non-root user --- +USER app +# Run collectstatic to gather all static files +RUN poetry run python manage.py collectstatic --no-input +# Expose the application port +EXPOSE 8000 +# Set the entrypoint script to run on container start +ENTRYPOINT ["/app/entrypoint.sh"] +# The CMD is passed from docker-compose.yml to the entrypoint \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..8687007 --- /dev/null +++ b/README.md @@ -0,0 +1,269 @@ +# Nexus 5 + +A modern, production-ready field service management API built with Django, Strawberry GraphQL, and Django Channels. Nexus 5 represents the culmination of lessons learned from previous iterations, combining the developer productivity of Django with enterprise-grade features. + +## Improvements Over Previous Versions + +### Evolution from Nexus 1-4 + +| Feature | Nexus 1-2 | Nexus 3 | Nexus 4 (Rust) | Nexus 5 | +|---------|-----------|---------|----------------|---------| +| **API** | REST (DRF) | GraphQL (Graphene) | GraphQL (async-graphql) | GraphQL (Strawberry) | +| **Real-time** | None | None | None | WebSocket subscriptions | +| **Auth** | JWT (DRF) | JWT (graphql-jwt) | JWT (jsonwebtoken) | Ory Kratos + Oathkeeper | +| **Background Tasks** | None | None | None | Celery + Redis | +| **File Storage** | Local | Local | None | S3-compatible (Garage) | +| **Caching** | None | None | None | Valkey/Redis with Sentinel HA | +| **Database Credentials** | Static .env | Static .env | Static .env | HashiCorp Vault (dynamic) | +| **Chat/AI** | None | None | None | Claude AI integration | +| **Email** | Django SMTP | Django SMTP | None | Rust microservice | + +### Key Improvements in Nexus 5 + +1. **Strawberry GraphQL**: Modern, type-safe GraphQL with native Python type hints +2. **Real-time Subscriptions**: WebSocket-based subscriptions for live updates via Django Channels +3. **Ory Authentication Stack**: Enterprise-grade auth with Kratos (identity) + Oathkeeper (API gateway) +4. **High-Availability Caching**: Valkey/Redis with Sentinel support for automatic failover +5. **Dynamic Database Credentials**: HashiCorp Vault integration for rotating DB credentials +6. **S3-Compatible Storage**: Garage cluster for distributed file storage +7. **AI Chat Integration**: Claude-powered assistant for the application +8. **MCP Server**: Model Context Protocol server for AI tool integration +9. **Celery Beat Scheduling**: Automated monitoring and notification tasks +10. **Session Tracking**: Detailed work sessions with images, videos, and notes + +## Tech Stack + +### Backend +- Python 3.11+ +- Django 5.x +- Strawberry GraphQL +- Django Channels (WebSocket) +- Celery + Redis/Valkey +- PostgreSQL +- S3 Storage (Garage/MinIO compatible) +- HashiCorp Vault (optional) + +### External Services +- Ory Kratos (Identity Management) +- Ory Oathkeeper (API Gateway) +- Valkey/Redis (Caching & Pub/Sub) +- Anthropic Claude (AI Chat) + +## Project Structure + +``` +nexus-5/ +├── config/ +│ ├── settings.py # Django settings with env vars +│ ├── celery.py # Celery configuration +│ ├── asgi.py # ASGI with Channels +│ ├── storage.py # S3 storage backend +│ └── db_backend/ # Custom DB backend for Vault +├── core/ +│ ├── models/ # Domain models +│ ├── graphql/ +│ │ ├── types/ # Strawberry types +│ │ ├── inputs/ # Input types +│ │ ├── filters/ # Filter types +│ │ ├── queries/ # Query resolvers +│ │ ├── mutations/ # Mutation resolvers +│ │ └── subscriptions/# WebSocket subscriptions +│ ├── chat/ # AI chat with Channels +│ ├── mcp/ # MCP server for AI tools +│ ├── services/ # Business logic services +│ ├── tasks/ # Celery tasks +│ └── templates/ # Email templates +├── vault/ # Vault configuration templates +├── Dockerfile +├── docker-compose.yml +└── pyproject.toml # Poetry dependencies +``` + +## Quick Start + +### Prerequisites +- Python 3.11+ +- PostgreSQL 15+ +- Redis/Valkey +- Docker (recommended) + +### Development Setup + +```bash +# Clone repository +git clone +cd nexus-5 + +# Create virtual environment +python -m venv .venv +source .venv/bin/activate + +# Install dependencies with Poetry +pip install poetry +poetry install + +# Create .env file +cp .env.example .env +# Edit .env with your configuration + +# Run migrations +python manage.py migrate + +# Create superuser +python manage.py createsuperuser + +# Start development server +python manage.py runserver +``` + +### With Docker + +```bash +docker-compose up -d +``` + +## Configuration + +### Required Environment Variables + +```bash +# Django +SECRET_KEY=your-secret-key +DEBUG=False + +# Database +DB_NAME=nexus +DB_HOST=localhost +DB_PORT=5432 +DB_USER=postgres +DB_PASSWORD=password + +# Redis/Valkey +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_PASSWORD=password + +# Ory (if using) +OATHKEEPER_SECRET=your-oathkeeper-secret +``` + +### Optional Environment Variables + +```bash +# High Availability +REDIS_SENTINEL_HOSTS=host1:26379,host2:26379,host3:26379 +REDIS_SENTINEL_MASTER=valkey-ha +REDIS_CLUSTER_MODE=False + +# S3 Storage +AWS_ACCESS_KEY_ID=your-key +AWS_SECRET_ACCESS_KEY=your-secret +AWS_STORAGE_BUCKET_NAME=nexus-media +AWS_S3_ENDPOINT_URL=http://localhost:3900 + +# AI Chat +ANTHROPIC_API_KEY=your-api-key +ANTHROPIC_MODEL=claude-sonnet-4-20250514 + +# Emailer Microservice +EMAILER_BASE_URL=https://email.example.com +EMAILER_API_KEY=your-api-key + +# Dispatch Profile (for labor calculations) +DISPATCH_TEAM_PROFILE_ID=uuid-here +``` + +## GraphQL API + +The GraphQL endpoint is available at `/graphql/` with the GraphiQL playground. + +### Example Query + +```graphql +query GetServices($filter: ServiceFilter) { + services(filter: $filter) { + id + date + status + account { + name + } + teamMembers { + firstName + lastName + } + } +} +``` + +### Example Subscription + +```graphql +subscription OnServiceUpdated { + serviceUpdated { + id + status + date + } +} +``` + +## Core Features + +### Work Session Tracking +- Start/stop time tracking for services and projects +- Photo and video documentation +- Internal and customer-visible notes +- Task completion tracking with scopes + +### Scope Management +- Reusable scope templates +- Area-based task organization +- Frequency-based task scheduling (daily, weekly, monthly) +- Completion tracking per service + +### Real-time Messaging +- Internal team conversations +- Customer communication threads +- Unread counts and read receipts +- WebSocket-based live updates + +### AI Chat Assistant +- Claude-powered contextual help +- MCP server for tool integration +- Conversation history per user + +## Deployment + +### Production Checklist + +1. Set `DEBUG=False` +2. Configure strong `SECRET_KEY` +3. Set up PostgreSQL with proper credentials +4. Configure Valkey/Redis (consider Sentinel for HA) +5. Set up Ory Kratos and Oathkeeper +6. Configure S3 storage +7. Set up Celery workers and beat scheduler +8. Configure nginx reverse proxy +9. Enable HTTPS + +### Running Celery + +```bash +# Worker +celery -A config worker -l INFO + +# Beat scheduler +celery -A config beat -l INFO +``` + +## Related Services + +- **nexus-5-auth**: Ory Kratos/Oathkeeper configuration and auth frontend +- **nexus-5-emailer**: Rust-based email microservice +- **nexus-5-scheduler**: Calendar integration service +- **nexus-5-frontend-***: SvelteKit frontend applications + +## License + +MIT License - See LICENSE file for details. diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..ef9eee5 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,6 @@ +# Django configuration module + +# Import Celery app to ensure it's loaded when Django starts +from .celery import app as celery_app + +__all__ = ('celery_app',) diff --git a/config/asgi.py b/config/asgi.py new file mode 100644 index 0000000..7116ea0 --- /dev/null +++ b/config/asgi.py @@ -0,0 +1,23 @@ +import os +import django +from django.core.asgi import get_asgi_application +from channels.routing import ProtocolTypeRouter, URLRouter +from django.urls import path +from strawberry.channels import GraphQLWSConsumer + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') +django.setup() + +from core.graphql.schema import schema +from core.middleware import OryWebSocketAuthMiddleware +from core.chat.consumers import ChatConsumer + +application = ProtocolTypeRouter({ + 'http': get_asgi_application(), + 'websocket': OryWebSocketAuthMiddleware( + URLRouter([ + path('graphql/', GraphQLWSConsumer.as_asgi(schema=schema)), + path('ws/chat/', ChatConsumer.as_asgi()), + ]) + ), +}) diff --git a/config/celery.py b/config/celery.py new file mode 100644 index 0000000..efc0411 --- /dev/null +++ b/config/celery.py @@ -0,0 +1,51 @@ +""" +Celery configuration for Nexus v5. +Uses Redis as both broker and result backend (separate DB from Channels). +""" +import os +from celery import Celery +from celery.backends.redis import SentinelBackend + + +class FixedSentinelBackend(SentinelBackend): + """ + Fixes Celery bug where SentinelBackend._params_from_url() doesn't copy + 'username' from URL params, breaking Redis/Valkey ACL authentication. + + Celery only copies 'db' and 'password' but forgets 'username'. + """ + + def _params_from_url(self, url, defaults): + connparams = super()._params_from_url(url, defaults) + + # Fix: parent only copies 'db' and 'password', missing 'username' + if connparams.get('hosts') and 'username' in connparams['hosts'][0]: + connparams['username'] = connparams['hosts'][0]['username'] + + return connparams + +# Set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') + +# Create Celery app +app = Celery('nexus') + +# Load configuration from Django settings, using a "CELERY_" prefix. +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django apps. +app.autodiscover_tasks() + +# Import tasks after Django setup to ensure they're registered +from django.conf import settings +if settings.configured: + try: + from core.tasks import notifications, event_cleanup + except ImportError: + pass + + +@app.task(bind=True, ignore_result=True) +def debug_task(self): + """Debug task for testing Celery setup""" + print(f'Request: {self.request!r}') diff --git a/config/db_backend/__init__.py b/config/db_backend/__init__.py new file mode 100644 index 0000000..514e7ab --- /dev/null +++ b/config/db_backend/__init__.py @@ -0,0 +1,6 @@ +""" +Custom PostgreSQL database backend package for Vault credential rotation. + +This package provides a Django database backend that automatically reloads +credentials from Vault agent's rendered secret files. +""" diff --git a/config/db_backend/base.py b/config/db_backend/base.py new file mode 100644 index 0000000..fddae53 --- /dev/null +++ b/config/db_backend/base.py @@ -0,0 +1,56 @@ +""" +Custom PostgreSQL database backend that dynamically reloads credentials from Vault. + +This wrapper ensures that Django picks up rotated database credentials from Vault +without requiring a container restart. Credentials are re-read from the Vault agent's +rendered secret files before each new connection is established. +""" +import os +from django.db.backends.postgresql import base + + +class DatabaseWrapper(base.DatabaseWrapper): + """PostgreSQL wrapper that reloads credentials from Vault secret files.""" + + def get_connection_params(self): + """ + Reload credentials from Vault files before connecting. + + This method is called each time Django establishes a new database connection. + It reads the latest credentials from /vault/secrets/app.env (maintained by + Vault agent) and updates the connection parameters. + + Falls back to environment variables if the Vault secret file is unavailable + (e.g., in local development). + """ + params = super().get_connection_params() + + # Determine which alias this is (default or admin) + alias = getattr(self, 'alias', 'default') + + if alias == 'admin': + secret_file = '/vault/secrets/admin.env' + user_var = 'DB_ADMIN_USER' + password_var = 'DB_ADMIN_PASSWORD' + else: + secret_file = '/vault/secrets/app.env' + user_var = 'DB_USER' + password_var = 'DB_PASSWORD' + + # Try to read fresh credentials from Vault agent's rendered file + try: + if os.path.exists(secret_file): + with open(secret_file, 'r') as f: + for line in f: + line = line.strip() + if line.startswith(f'export {user_var}='): + username = line.split('=', 1)[1].strip().strip('"').strip("'") + params['user'] = username + elif line.startswith(f'export {password_var}='): + password = line.split('=', 1)[1].strip().strip('"').strip("'") + params['password'] = password + except (FileNotFoundError, PermissionError, IOError): + # Fallback to environment variables (local development or error case) + pass + + return params diff --git a/config/settings.py b/config/settings.py new file mode 100644 index 0000000..38cd576 --- /dev/null +++ b/config/settings.py @@ -0,0 +1,385 @@ +import os +from pathlib import Path +import dotenv + +SITE_NAME = "Nexus v5" +DISPATCH_TEAM_PROFILE_ID = os.getenv('DISPATCH_TEAM_PROFILE_ID') + +# --- Security: Oathkeeper Verification --- +OATHKEEPER_SECRET = os.getenv('OATHKEEPER_SECRET') + +# --- AI Chat: Anthropic Claude API --- +ANTHROPIC_API_KEY = os.getenv('ANTHROPIC_API_KEY', '') +ANTHROPIC_MODEL = os.getenv('ANTHROPIC_MODEL', 'claude-sonnet-4-20250514') + +# --- Initial Setup --- +dotenv.load_dotenv() +BASE_DIR = Path(__file__).resolve().parent.parent +SECRET_KEY = os.getenv('SECRET_KEY') +DEBUG = os.getenv('DEBUG', 'False').lower() in ('true', '1', 't') +ALLOWED_HOSTS = ['*'] + +# --- Unified Redis/Valkey Configuration --- +REDIS_HOST = os.getenv('REDIS_HOST') +REDIS_PORT = os.getenv('REDIS_PORT') +REDIS_USERNAME = os.getenv('REDIS_USERNAME', '') +REDIS_PASSWORD = os.getenv('REDIS_PASSWORD') +REDIS_CLUSTER_MODE = os.getenv('REDIS_CLUSTER_MODE', 'False').lower() in ('true', '1', 't') +# ACL auth format: username:password@ (username required for Valkey ACL) +REDIS_AUTH = f"{REDIS_USERNAME}:{REDIS_PASSWORD}@" if REDIS_PASSWORD else "" + +# Sentinel configuration (for HA failover) +# Format: "host1:port1,host2:port2,host3:port3" +REDIS_SENTINEL_HOSTS = os.getenv('REDIS_SENTINEL_HOSTS', '') +REDIS_SENTINEL_MASTER = os.getenv('REDIS_SENTINEL_MASTER', 'valkey-ha') +REDIS_SENTINEL_PASSWORD = os.getenv('REDIS_SENTINEL_PASSWORD', '') # Sentinel auth +REDIS_SENTINEL_MODE = bool(REDIS_SENTINEL_HOSTS) + +# Parse sentinel hosts into list of tuples [(host, port), ...] +REDIS_SENTINELS = [] +if REDIS_SENTINEL_MODE: + REDIS_SENTINELS = [ + (h.split(':')[0], int(h.split(':')[1])) + for h in REDIS_SENTINEL_HOSTS.split(',') + ] + +# --- Django Applications & Middleware --- +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'corsheaders', + 'daphne', + 'django.contrib.staticfiles', + 'django.contrib.postgres', + 'core.apps.CoreConfig', + 'channels', + 'strawberry_django', + 'rest_framework', + 'storages', +] + +MIDDLEWARE = [ + 'core.middleware.ConditionalCorsMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'whitenoise.middleware.WhiteNoiseMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'core.middleware.OryHeaderAuthenticationMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +CORS_ALLOWED_ORIGINS = [ + "http://localhost:5173", + "https://app.example.com", +] + +CORS_ALLOWED_ORIGIN_REGEXES = [ + # Regex to allow any origin on the 192.168.100.x subnet + r"^https?://192\.168\.100\.\d{1,3}(:\d+)?$", +] + +# CORS credentials support for cookie-based auth +CORS_ALLOW_CREDENTIALS = True + +# Allow common headers for GraphQL +CORS_ALLOW_HEADERS = [ + 'accept', + 'accept-encoding', + 'authorization', + 'content-type', + 'dnt', + 'origin', + 'user-agent', + 'x-csrftoken', + 'x-requested-with', +] + +CSRF_TRUSTED_ORIGINS = [ + "https://api.example.com", + "https://app.example.com", + "https://local.example.com:5173" +] + +# --- Channels & ASGI --- +ASGI_APPLICATION = 'config.asgi.application' +if REDIS_SENTINEL_MODE: + # Sentinel mode: use master discovery for HA failover + _sentinel_host_config = { + "sentinels": REDIS_SENTINELS, + "master_name": REDIS_SENTINEL_MASTER, + "password": REDIS_PASSWORD, + "username": REDIS_USERNAME, + "db": 0, + } + if REDIS_SENTINEL_PASSWORD: + _sentinel_host_config["sentinel_kwargs"] = {"password": REDIS_SENTINEL_PASSWORD} + CHANNEL_LAYERS = { + 'default': { + 'BACKEND': 'channels_valkey.core.ValkeyChannelLayer', + 'CONFIG': { + "hosts": [_sentinel_host_config], + "prefix": "nexus:channels", + }, + }, + } +elif REDIS_CLUSTER_MODE: + # Use sharded pubsub for cluster mode + CHANNEL_LAYERS = { + 'default': { + 'BACKEND': 'channels_valkey.pubsub.ValkeyPubSubChannelLayer', + 'CONFIG': { + "hosts": [f"valkey://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0"], + "prefix": "nexus:channels", + }, + }, + } +else: + CHANNEL_LAYERS = { + 'default': { + 'BACKEND': 'channels_valkey.core.ValkeyChannelLayer', + 'CONFIG': { + "hosts": [f"valkey://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0"], + "prefix": "nexus:channels", + }, + }, + } + +# --- Framework Settings --- +STRAWBERRY_DJANGO = { + 'FIELD_DESCRIPTION_FROM_HELP_TEXT': True, + 'TYPE_DESCRIPTION_FROM_MODEL_DOCSTRING': True, + 'MUTATIONS_DEFAULT_HANDLE_ERRORS': True, +} + +# --- Security Settings --- +USE_X_FORWARDED_HOST = True +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + +# --- Core Django Settings --- +ROOT_URLCONF = 'config.urls' +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'APP_DIRS': True, + 'DIRS': [BASE_DIR / 'templates'], + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +# --- Databases & Caches --- +DATABASES = { + 'default': { + 'ENGINE': 'config.db_backend', # Custom backend for Vault credential reloading + 'NAME': os.getenv('DB_NAME'), + 'HOST': os.getenv('DB_HOST'), + 'PORT': os.getenv('DB_PORT'), + 'USER': os.environ.get('DB_USER'), # Fallback for local dev + 'PASSWORD': os.environ.get('DB_PASSWORD'), # Fallback for local dev + 'CONN_MAX_AGE': 600, # Keep connections for 10 minutes + 'CONN_HEALTH_CHECKS': True, # Verify connections before reuse + }, + 'admin': { + 'ENGINE': 'config.db_backend', # Custom backend for Vault credential reloading + 'NAME': os.getenv('DB_NAME'), + 'HOST': os.getenv('DB_HOST'), + 'PORT': os.getenv('DB_PORT'), + 'USER': os.environ.get('DB_ADMIN_USER'), # Fallback for local dev + 'PASSWORD': os.environ.get('DB_ADMIN_PASSWORD'), # Fallback for local dev + 'CONN_MAX_AGE': 600, # Keep connections for 10 minutes + 'CONN_HEALTH_CHECKS': True, # Verify connections before reuse + } +} +if REDIS_SENTINEL_MODE: + # Sentinel mode: use django-valkey with SentinelClient for HA failover + _valkey_connection_kwargs = {"password": REDIS_PASSWORD} + if REDIS_USERNAME: + _valkey_connection_kwargs["username"] = REDIS_USERNAME + CACHES = { + "default": { + "BACKEND": "django_valkey.cache.ValkeyCache", + "LOCATION": f"valkey://{REDIS_SENTINEL_MASTER}/0", + "KEY_PREFIX": "nexus:cache", + "OPTIONS": { + "CLIENT_CLASS": "django_valkey.client.SentinelClient", + "SENTINELS": REDIS_SENTINELS, + "CONNECTION_POOL_CLASS": "valkey.sentinel.SentinelConnectionPool", + "CONNECTION_POOL_CLASS_KWARGS": _valkey_connection_kwargs, + "SENTINEL_KWARGS": {"password": REDIS_SENTINEL_PASSWORD} if REDIS_SENTINEL_PASSWORD else {}, + }, + } + } +elif REDIS_CLUSTER_MODE: + CACHES = { + "default": { + "BACKEND": "django_valkey.cache.ValkeyCache", + "LOCATION": f"valkey://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0", + "KEY_PREFIX": "nexus:cache", + "OPTIONS": { + "CLIENT_CLASS": "django_valkey.client.DefaultClient", + "VALKEY_CLIENT_CLASS": "valkey.cluster.ValkeyCluster", + "VALKEY_CLIENT_KWARGS": { + "skip_full_coverage_check": True, + }, + }, + } + } +else: + CACHES = { + "default": { + "BACKEND": "django_valkey.cache.ValkeyCache", + "LOCATION": f"valkey://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0", + "KEY_PREFIX": "nexus:cache", + "OPTIONS": { + "CLIENT_CLASS": "django_valkey.client.DefaultClient", + }, + } + } + +# --- Celery Configuration --- +# All Redis usage on /0 with key prefixes for namespace isolation +if REDIS_SENTINEL_MODE: + # Sentinel mode: use master discovery for HA failover + # Format: sentinel://user:pass@host1:port/db;sentinel://user:pass@host2:port/db;... + # Each sentinel URL must include full credentials (for master connection after discovery) + if REDIS_USERNAME and REDIS_PASSWORD: + sentinel_urls = ';'.join([ + f"sentinel://{REDIS_USERNAME}:{REDIS_PASSWORD}@{h}:{p}/0" + for h, p in REDIS_SENTINELS + ]) + elif REDIS_PASSWORD: + sentinel_urls = ';'.join([ + f"sentinel://:{REDIS_PASSWORD}@{h}:{p}/0" + for h, p in REDIS_SENTINELS + ]) + else: + sentinel_urls = ';'.join([ + f"sentinel://{h}:{p}/0" + for h, p in REDIS_SENTINELS + ]) + CELERY_BROKER_URL = sentinel_urls + # Use custom backend class that fixes Celery's missing 'username' param for ACL auth + CELERY_RESULT_BACKEND = f"config.celery.FixedSentinelBackend+{sentinel_urls}" + CELERY_BROKER_TRANSPORT_OPTIONS = { + 'master_name': REDIS_SENTINEL_MASTER, + 'global_keyprefix': 'nexus:celery:', + } + CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = { + 'master_name': REDIS_SENTINEL_MASTER, + 'global_keyprefix': 'nexus:celery:', + } + # Sentinel authentication (if Sentinel itself requires auth, separate from master) + if REDIS_SENTINEL_PASSWORD: + CELERY_BROKER_TRANSPORT_OPTIONS['sentinel_kwargs'] = {'password': REDIS_SENTINEL_PASSWORD} + CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS['sentinel_kwargs'] = {'password': REDIS_SENTINEL_PASSWORD} +elif REDIS_CLUSTER_MODE: + # Celery 5.3+ supports cluster mode natively + CELERY_BROKER_URL = f"redis://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0" + CELERY_RESULT_BACKEND = f"redis://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0" + CELERY_BROKER_TRANSPORT_OPTIONS = { + 'global_keyprefix': 'nexus:celery:', + 'fanout_prefix': True, + 'fanout_patterns': True, + } + CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {'global_keyprefix': 'nexus:celery:'} + CELERY_BROKER_USE_SSL = False + CELERY_REDIS_BACKEND_USE_CLUSTER = True +else: + CELERY_BROKER_URL = f"redis://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0" + CELERY_RESULT_BACKEND = f"redis://{REDIS_AUTH}{REDIS_HOST}:{REDIS_PORT}/0" + CELERY_BROKER_TRANSPORT_OPTIONS = {'global_keyprefix': 'nexus:celery:'} + CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = {'global_keyprefix': 'nexus:celery:'} +CELERY_ACCEPT_CONTENT = ['json'] +CELERY_TASK_SERIALIZER = 'json' +CELERY_RESULT_SERIALIZER = 'json' +CELERY_TIMEZONE = 'America/New_York' +CELERY_TASK_TRACK_STARTED = True +CELERY_TASK_TIME_LIMIT = 30 * 60 # 30 minutes +CELERY_TASK_SOFT_TIME_LIMIT = 25 * 60 # 25 minutes +CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True + +# Celery Beat Schedule (periodic tasks) +from celery.schedules import crontab + +CELERY_BEAT_SCHEDULE = { + 'cleanup-old-events': { + 'task': 'core.tasks.event_cleanup.cleanup_old_events', + 'schedule': crontab(hour=2, minute=0), # Run daily at 2 AM Eastern + }, + 'monitoring-incomplete-work-reminder': { + 'task': 'core.tasks.monitoring.run_monitoring_command', + 'schedule': crontab(hour=8, minute=0), # 8 AM Eastern + 'args': ['incomplete_work_reminder'], + }, + 'monitoring-nightly-assignments': { + 'task': 'core.tasks.monitoring.run_monitoring_command', + 'schedule': crontab(hour=18, minute=0), # 6 PM Eastern + 'args': ['nightly_assignments'], + }, +} + +# --- Emailer Microservice Configuration --- +# Emailer is a Rust-based REST API for sending emails via Gmail API +EMAILER_BASE_URL = os.getenv('EMAILER_BASE_URL', 'https://email.example.com') +EMAILER_API_KEY = os.getenv('EMAILER_API_KEY', '') +EMAILER_DEFAULT_SENDER = os.getenv('EMAILER_DEFAULT_SENDER', 'noreply@example.com') + +# --- Security & Static Files --- +AUTH_PASSWORD_VALIDATORS = [ + {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, + {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator'}, + {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}, + {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}, +] +STATIC_URL = '/static/' +STATIC_ROOT = BASE_DIR / 'staticfiles' + +# --- Media Files & File Upload --- +MEDIA_URL = '/api/media/' + +# S3 Storage Configuration (Garage S3-compatible cluster) +# boto3/django-storages use AWS_* naming convention but connect to Garage +AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID') +AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY') +AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME', 'nexus-media') +AWS_S3_ENDPOINT_URL = os.getenv('AWS_S3_ENDPOINT_URL', 'http://10.10.10.39:3900') +AWS_S3_REGION_NAME = 'garage' # Garage ignores this but boto3 requires it +AWS_DEFAULT_ACL = None # Use bucket default +AWS_QUERYSTRING_AUTH = False # Nginx handles auth, not pre-signed URLs +AWS_S3_FILE_OVERWRITE = False # Preserve unique filenames + +# Legacy MEDIA_ROOT for local dev fallback (not used in production with S3) +MEDIA_ROOT = BASE_DIR / 'media' + +# Django 4.2+ STORAGES configuration (replaces deprecated DEFAULT_FILE_STORAGE) +# Uses custom GarageS3Storage that returns nginx-proxied URLs instead of direct S3 URLs +STORAGES = { + "default": { + "BACKEND": "config.storage.GarageS3Storage", + }, + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} + +# Increased limits for video uploads (250 MB max) +DATA_UPLOAD_MAX_MEMORY_SIZE = 250 * 1024 * 1024 # 250 MB +FILE_UPLOAD_MAX_MEMORY_SIZE = 250 * 1024 * 1024 # 250 MB + +# --- Internationalization --- +LANGUAGE_CODE = 'en-us' +TIME_ZONE = 'UTC' +USE_I18N = True +USE_TZ = True +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' \ No newline at end of file diff --git a/config/storage.py b/config/storage.py new file mode 100644 index 0000000..ccac5af --- /dev/null +++ b/config/storage.py @@ -0,0 +1,27 @@ +""" +Custom S3 storage backend for Garage that returns nginx-proxied URLs. + +Instead of returning direct S3 URLs like: + http://10.10.10.39:3900/nexus-media/uploads/... + +Returns relative URLs that go through nginx: + /api/media/uploads/... + +Nginx then handles auth and proxies to Garage's website mode. +""" +from django.conf import settings +from storages.backends.s3boto3 import S3Boto3Storage + + +class GarageS3Storage(S3Boto3Storage): + """ + S3Boto3Storage subclass that returns URLs through nginx proxy. + """ + + def url(self, name, parameters=None, expire=None, http_method=None): + """ + Return a URL that goes through our nginx proxy instead of direct S3. + """ + # Return relative URL that nginx will proxy to S3 + # MEDIA_URL is '/api/media/' so this becomes '/api/media/uploads/...' + return f"{settings.MEDIA_URL}{name}" diff --git a/config/urls.py b/config/urls.py new file mode 100644 index 0000000..0ce2df0 --- /dev/null +++ b/config/urls.py @@ -0,0 +1,40 @@ +from django.contrib import admin +from django.urls import path, re_path +from django.views.decorators.csrf import csrf_exempt +from django.http import HttpResponseForbidden +from strawberry.django.views import AsyncGraphQLView +from core.graphql.schema import schema +from core.views import ( + upload_service_session_image, + upload_project_session_image, + upload_service_session_video, + upload_project_session_video, + serve_protected_media, + media_auth_check, +) + + +class AdminOnlyGraphQLView(AsyncGraphQLView): + """GraphQL view that restricts GraphiQL IDE to ADMIN role only.""" + + async def render_graphql_ide(self, request): + profile = getattr(request, 'profile', None) + if profile and hasattr(profile, 'role') and profile.role == 'ADMIN': + return await super().render_graphql_ide(request) + return HttpResponseForbidden("GraphiQL is only available to administrators") + + +urlpatterns = [ + path("admin/", admin.site.urls), + path( + "graphql/", + csrf_exempt(AdminOnlyGraphQLView.as_view(schema=schema, graphiql=True)) + ), + path("api/upload/photo/service/", csrf_exempt(upload_service_session_image), name="upload_service_session_image"), + path("api/upload/photo/project/", csrf_exempt(upload_project_session_image), name="upload_project_session_image"), + path("api/upload/video/service/", csrf_exempt(upload_service_session_video), name="upload_service_session_video"), + path("api/upload/video/project/", csrf_exempt(upload_project_session_video), name="upload_project_session_video"), + re_path(r"^api/media/(?P.*)$", serve_protected_media, name="serve_protected_media"), + # Auth check endpoint for nginx auth_request (S3 media proxy) + re_path(r"^api/media-auth/(?P.*)$", media_auth_check, name="media_auth_check"), +] diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/admin.py b/core/admin.py new file mode 100644 index 0000000..f363896 --- /dev/null +++ b/core/admin.py @@ -0,0 +1,645 @@ +from django.contrib import admin +from core.models import ( + Customer, + CustomerAddress, + CustomerContact, + Account, + AccountAddress, + AccountContact, + Service, + Project, + Report, + Revenue, + Labor, + Schedule, + Invoice, + AccountPunchlist, + ProjectPunchlist, + CustomerProfile, + TeamProfile, + Scope, + Area, + Task, + TaskCompletion, + ScopeTemplate, + AreaTemplate, + TaskTemplate, + ProjectScope, + ProjectScopeCategory, + ProjectScopeTask, + ProjectScopeTaskCompletion, + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, + ServiceSession, + ProjectSession, + ServiceSessionNote, + ProjectSessionNote, + # Events & Notifications + Event, + NotificationRule, + Notification, + NotificationDelivery, + # Messaging + Conversation, + ConversationParticipant, + Message, + MessageReadReceipt, + # Session Media + ServiceSessionImage, + ProjectSessionImage, + ServiceSessionVideo, + ProjectSessionVideo, + # Chat + ChatConversation, + ChatMessage, +) + + +@admin.register(Customer) +class CustomerAdmin(admin.ModelAdmin): + list_display = ("name", "status", "start_date", "end_date") + list_filter = ("status",) + search_fields = ("name",) + + +@admin.register(Account) +class AccountAdmin(admin.ModelAdmin): + list_display = ("name", "customer", "status", "start_date", "end_date") + list_filter = ("status", "customer") + search_fields = ("name", "customer__name") + + +@admin.register(CustomerAddress) +class CustomerAddressAdmin(admin.ModelAdmin): + list_display = ("customer", "address_type", "is_primary", "is_active") + list_filter = ("address_type", "is_primary", "is_active") + search_fields = ("customer__name", "street_address", "city") + + +@admin.register(CustomerContact) +class CustomerContactAdmin(admin.ModelAdmin): + list_display = ("full_name", "customer", "email", "phone", "is_primary", "is_active") + list_filter = ("is_primary", "is_active") + search_fields = ("first_name", "last_name", "customer__name", "email", "phone") + + +@admin.register(AccountAddress) +class AccountAddressAdmin(admin.ModelAdmin): + list_display = ("account", "street_address", "city", "is_primary", "is_active") + list_filter = ("is_primary", "is_active") + search_fields = ("account__name", "street_address", "city") + + +@admin.register(AccountContact) +class AccountContactAdmin(admin.ModelAdmin): + list_display = ("full_name", "account", "email", "phone", "is_primary", "is_active") + list_filter = ("is_primary", "is_active") + search_fields = ("first_name", "last_name", "account__name", "email", "phone") + + +@admin.register(Service) +class ServiceAdmin(admin.ModelAdmin): + list_display = ("account_address", "date", "status") + list_filter = ("status", "date") + search_fields = ("account_address__account__name",) + + +@admin.register(Project) +class ProjectAdmin(admin.ModelAdmin): + list_display = ("customer", "account_address", "date", "status", "labor", "amount") + list_filter = ("status", "date", "customer") + search_fields = ( + "customer__name", + "account_address__account__name", + "street_address", + "city", + "state", + "zip_code", + ) + + +@admin.register(Report) +class ReportAdmin(admin.ModelAdmin): + list_display = ("team_member", "date") + list_filter = ("date",) + search_fields = ("team_member__first_name", "team_member__last_name") + + +@admin.register(Revenue) +class RevenueAdmin(admin.ModelAdmin): + list_display = ("account", "amount", "start_date", "end_date") + list_filter = ("start_date",) + search_fields = ("account__name",) + + +@admin.register(Labor) +class LaborAdmin(admin.ModelAdmin): + list_display = ("account_address", "amount", "start_date", "end_date") + list_filter = ("start_date",) + search_fields = ("account_address__account__name",) + + +@admin.register(Schedule) +class ScheduleAdmin(admin.ModelAdmin): + list_display = ("account_address", "start_date", "end_date", "weekend_service") + list_filter = ("weekend_service",) + search_fields = ("account_address__account__name",) + + +@admin.register(Invoice) +class InvoiceAdmin(admin.ModelAdmin): + list_display = ("customer", "date", "status", "date_paid", "payment_type") + list_filter = ("status", "date") + search_fields = ("customer__name",) + + +@admin.register(AccountPunchlist) +class AccountPunchlistAdmin(admin.ModelAdmin): + list_display = ("account", "date") + list_filter = ("date",) + search_fields = ("account__name",) + + +@admin.register(ProjectPunchlist) +class ProjectPunchlistAdmin(admin.ModelAdmin): + list_display = ("project", "date") + list_filter = ("date",) + search_fields = ( + "project__account_address__account__name", + "project__street_address", + "project__city", + "project__state", + "project__zip_code", + "project__customer__name", + ) + + +@admin.register(CustomerProfile) +class CustomerProfileAdmin(admin.ModelAdmin): + list_display = ("user", "get_customers", "status") + list_filter = ("status",) + search_fields = ("user__username", "first_name", "last_name", "email") + filter_horizontal = ("customers",) + + def get_customers(self, obj): + """Display comma-separated list of customers""" + return ", ".join([c.name for c in obj.customers.all()]) + get_customers.short_description = "Customers" + + +@admin.register(TeamProfile) +class TeamProfileAdmin(admin.ModelAdmin): + list_display = ("user", "first_name", "last_name", "status") + list_filter = ("status",) + search_fields = ("user__username", "first_name", "last_name") + + +@admin.register(Scope) +class ScopeAdmin(admin.ModelAdmin): + list_display = ("name", "account", "account_address", "is_active") + list_filter = ("is_active", "account") + search_fields = ("name", "account__name", "account_address__street_address") + + +class TaskTemplateInline(admin.TabularInline): + model = TaskTemplate + extra = 1 + fields = ("description", "frequency", "order", "is_conditional", "estimated_minutes") + ordering = ("order",) + show_change_link = True + + +class AreaTemplateInline(admin.TabularInline): + model = AreaTemplate + extra = 1 + fields = ("name", "order") + ordering = ("order",) + show_change_link = True + + +@admin.register(ScopeTemplate) +class ScopeTemplateAdmin(admin.ModelAdmin): + list_display = ("name", "is_active") + list_filter = ("is_active",) + search_fields = ("name", "description") + inlines = (AreaTemplateInline,) + ordering = ("name",) + + +@admin.register(AreaTemplate) +class AreaTemplateAdmin(admin.ModelAdmin): + list_display = ("name", "scope_template", "order") + list_filter = ("scope_template",) + search_fields = ("name", "scope_template__name") + inlines = (TaskTemplateInline,) + ordering = ("scope_template", "order", "name") + + +@admin.register(TaskTemplate) +class TaskTemplateAdmin(admin.ModelAdmin): + list_display = ("short_description", "area_template", "frequency", "order", "is_conditional") + list_filter = ("frequency", "is_conditional", "area_template__scope_template") + search_fields = ("description", "area_template__name", "area_template__scope_template__name") + ordering = ("area_template", "order") + + def short_description(self, obj): + return (obj.description or "")[:60] + + short_description.short_description = "Description" + + +@admin.register(ServiceSession) +class ServiceSessionAdmin(admin.ModelAdmin): + list_display = ( + "service", + "account", + "account_address", + "scope", + "start", + "end", + "created_by", + "closed_by", + "is_active", + ) + list_filter = ("start", "end", "account", "scope") + search_fields = ( + "service__account_address__account__name", + "account_address__street_address", + "account_address__city", + "created_by__first_name", + "created_by__last_name", + ) + ordering = ("-start",) + readonly_fields = ("duration_seconds",) + filter_horizontal = ("completed_tasks",) + + +@admin.register(ProjectScope) +class ProjectScopeAdmin(admin.ModelAdmin): + list_display = ("name", "project", "account", "account_address", "is_active") + list_filter = ("is_active", "project", "account") + search_fields = ( + "name", + "project__customer__name", + "project__account_address__account__name", + "account__name", + "account_address__street_address", + ) + ordering = ("name",) + + +@admin.register(ProjectScopeCategory) +class ProjectScopeCategoryAdmin(admin.ModelAdmin): + list_display = ("name", "scope", "order") + list_filter = ("scope",) + search_fields = ("name", "scope__name") + ordering = ("scope", "order", "name") + + +@admin.register(ProjectScopeTask) +class ProjectScopeTaskAdmin(admin.ModelAdmin): + list_display = ("short_description", "category", "order", "estimated_minutes") + list_filter = ("category__scope",) + search_fields = ("description", "category__name", "category__scope__name") + ordering = ("category", "order") + + def short_description(self, obj): + return (obj.description or "")[:60] + + short_description.short_description = "Description" + + +class ProjectTaskTemplateInline(admin.TabularInline): + model = ProjectTaskTemplate + extra = 1 + fields = ("description", "order", "estimated_minutes") + ordering = ("order",) + show_change_link = True + + +class ProjectAreaTemplateInline(admin.TabularInline): + model = ProjectAreaTemplate + extra = 1 + fields = ("name", "order") + ordering = ("order",) + show_change_link = True + + +@admin.register(ProjectScopeTemplate) +class ProjectScopeTemplateAdmin(admin.ModelAdmin): + list_display = ("name", "is_active") + list_filter = ("is_active",) + search_fields = ("name", "description") + inlines = (ProjectAreaTemplateInline,) + ordering = ("name",) + + +@admin.register(ProjectAreaTemplate) +class ProjectAreaTemplateAdmin(admin.ModelAdmin): + list_display = ("name", "scope_template", "order") + list_filter = ("scope_template",) + search_fields = ("name", "scope_template__name") + inlines = (ProjectTaskTemplateInline,) + ordering = ("scope_template", "order", "name") + + +@admin.register(ProjectTaskTemplate) +class ProjectTaskTemplateAdmin(admin.ModelAdmin): + list_display = ("short_description", "area_template", "order", "estimated_minutes") + list_filter = ("area_template__scope_template",) + search_fields = ("description", "area_template__name", "area_template__scope_template__name") + ordering = ("area_template", "order") + + def short_description(self, obj): + return (obj.description or "")[:60] + + short_description.short_description = "Description" + + +@admin.register(ProjectSession) +class ProjectSessionAdmin(admin.ModelAdmin): + list_display = ( + "project", + "account", + "account_address", + "scope", + "start", + "end", + "created_by", + "closed_by", + "is_active", + ) + list_filter = ("start", "end", "account", "scope") + search_fields = ( + "project__account_address__account__name", + "account_address__street_address", + "account_address__city", + "created_by__first_name", + "created_by__last_name", + ) + ordering = ("-start",) + readonly_fields = ("duration_seconds",) + + +# Admin registrations for Area, Task, TaskCompletion, and ProjectScopeTaskCompletion +class TaskInline(admin.TabularInline): + model = Task + extra = 1 + fields = ("description", "frequency", "order", "is_conditional", "estimated_minutes") + ordering = ("order",) + show_change_link = True + + +@admin.register(Area) +class AreaAdmin(admin.ModelAdmin): + list_display = ("name", "scope", "order") + list_filter = ("scope",) + search_fields = ("name", "scope__name") + ordering = ("scope", "order", "name") + inlines = (TaskInline,) + + +@admin.register(Task) +class TaskAdmin(admin.ModelAdmin): + list_display = ("short_description", "area", "frequency", "order", "is_conditional") + list_filter = ("frequency", "is_conditional", "area__scope") + search_fields = ("description", "area__name", "area__scope__name") + ordering = ("area", "order") + + def short_description(self, obj): + return (obj.description or "")[:60] + + short_description.short_description = "Description" + + +@admin.register(TaskCompletion) +class TaskCompletionAdmin(admin.ModelAdmin): + list_display = ("task", "service", "account_address", "completed_by", "completed_at", "year", "month") + list_filter = ("completed_at", "completed_by", "task__area__scope") + search_fields = ( + "task__description", + "task__area__name", + "task__area__scope__name", + "service__account_address__account__name", + "service__account_address__street_address", + ) + ordering = ("-completed_at",) + + +@admin.register(ProjectScopeTaskCompletion) +class ProjectScopeTaskCompletionAdmin(admin.ModelAdmin): + list_display = ( + "task", + "project", + "account", + "account_address", + "completed_by", + "completed_at", + ) + list_filter = ("completed_at", "completed_by", "task__category__scope", "project", "account") + search_fields = ( + "task__description", + "task__category__name", + "task__category__scope__name", + "project__customer__name", + "project__account_address__account__name", + "account__name", + "account_address__street_address", + ) + ordering = ("-completed_at",) + + +@admin.register(ServiceSessionNote) +class ServiceSessionNoteAdmin(admin.ModelAdmin): + list_display = ("session", "short_content", "author", "internal", "created_at") + list_filter = ("internal", "created_at", "author") + search_fields = ( + "content", + "session__service__account_address__account__name", + "author__first_name", + "author__last_name", + ) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + def short_content(self, obj): + return (obj.content or "")[:60] + + short_content.short_description = "Content" + + +@admin.register(ProjectSessionNote) +class ProjectSessionNoteAdmin(admin.ModelAdmin): + list_display = ("session", "short_content", "author", "internal", "created_at") + list_filter = ("internal", "created_at", "author") + search_fields = ( + "content", + "session__project__customer__name", + "author__first_name", + "author__last_name", + ) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + def short_content(self, obj): + return (obj.content or "")[:60] + + short_content.short_description = "Content" + + +# ============================================================================= +# Events & Notifications +# ============================================================================= + + +@admin.register(Event) +class EventAdmin(admin.ModelAdmin): + list_display = ("event_type", "entity_type", "entity_id", "created_at") + list_filter = ("event_type", "entity_type", "created_at") + search_fields = ("entity_type", "entity_id") + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(NotificationRule) +class NotificationRuleAdmin(admin.ModelAdmin): + list_display = ("name", "is_active", "get_channels", "created_at") + list_filter = ("is_active",) + search_fields = ("name", "description") + filter_horizontal = ("target_team_profiles", "target_customer_profiles") + readonly_fields = ("created_at", "updated_at") + + def get_channels(self, obj): + return ", ".join(obj.channels) if obj.channels else "" + + get_channels.short_description = "Channels" + + +@admin.register(Notification) +class NotificationAdmin(admin.ModelAdmin): + list_display = ("subject", "event", "status", "read_at", "created_at") + list_filter = ("status", "created_at") + search_fields = ("subject", "body") + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(NotificationDelivery) +class NotificationDeliveryAdmin(admin.ModelAdmin): + list_display = ("notification", "channel", "status", "attempts", "sent_at") + list_filter = ("channel", "status") + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +# ============================================================================= +# Messaging +# ============================================================================= + + +@admin.register(Conversation) +class ConversationAdmin(admin.ModelAdmin): + list_display = ("subject", "conversation_type", "last_message_at", "is_archived") + list_filter = ("conversation_type", "is_archived") + search_fields = ("subject",) + ordering = ("-last_message_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(ConversationParticipant) +class ConversationParticipantAdmin(admin.ModelAdmin): + list_display = ("conversation", "unread_count", "is_muted", "is_archived", "joined_at") + list_filter = ("is_muted", "is_archived") + ordering = ("-joined_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(Message) +class MessageAdmin(admin.ModelAdmin): + list_display = ("conversation", "short_body", "is_system_message", "created_at") + list_filter = ("is_system_message", "created_at") + search_fields = ("body",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + def short_body(self, obj): + return (obj.body or "")[:60] + + short_body.short_description = "Body" + + +@admin.register(MessageReadReceipt) +class MessageReadReceiptAdmin(admin.ModelAdmin): + list_display = ("message", "read_at") + ordering = ("-read_at",) + readonly_fields = ("created_at", "updated_at") + + +# ============================================================================= +# Session Media +# ============================================================================= + + +@admin.register(ServiceSessionImage) +class ServiceSessionImageAdmin(admin.ModelAdmin): + list_display = ("service_session", "title", "created_at") + list_filter = ("created_at",) + search_fields = ("title",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(ProjectSessionImage) +class ProjectSessionImageAdmin(admin.ModelAdmin): + list_display = ("project_session", "title", "created_at") + list_filter = ("created_at",) + search_fields = ("title",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(ServiceSessionVideo) +class ServiceSessionVideoAdmin(admin.ModelAdmin): + list_display = ("service_session", "title", "duration_seconds", "created_at") + list_filter = ("created_at",) + search_fields = ("title",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(ProjectSessionVideo) +class ProjectSessionVideoAdmin(admin.ModelAdmin): + list_display = ("project_session", "title", "duration_seconds", "created_at") + list_filter = ("created_at",) + search_fields = ("title",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + +# ============================================================================= +# Chat +# ============================================================================= + + +@admin.register(ChatConversation) +class ChatConversationAdmin(admin.ModelAdmin): + list_display = ("team_profile", "title", "is_active", "created_at", "updated_at") + list_filter = ("is_active", "created_at") + search_fields = ("title", "team_profile__first_name", "team_profile__last_name") + ordering = ("-updated_at",) + readonly_fields = ("created_at", "updated_at") + + +@admin.register(ChatMessage) +class ChatMessageAdmin(admin.ModelAdmin): + list_display = ("conversation", "role", "short_content", "created_at") + list_filter = ("role", "created_at") + search_fields = ("content",) + ordering = ("-created_at",) + readonly_fields = ("created_at", "updated_at") + + def short_content(self, obj): + return (obj.content or "")[:60] + + short_content.short_description = "Content" diff --git a/core/apps.py b/core/apps.py new file mode 100644 index 0000000..77f4616 --- /dev/null +++ b/core/apps.py @@ -0,0 +1,25 @@ +import logging +from django.apps import AppConfig + +logger = logging.getLogger(__name__) + +# HEIF/HEIC image format support for iOS photo uploads +try: + import pillow_heif as _pillow_heif +except ImportError: + _pillow_heif = None # type: ignore + + +class CoreConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'core' + + def ready(self): + # Register HEIF/HEIC image format support for iOS photo uploads + if _pillow_heif is not None: + _pillow_heif.register_heif_opener() + logger.info("HEIF image format support registered successfully") + else: + logger.warning("pillow-heif not installed, HEIC/HEIF images from iOS devices will not be supported") + + logger.info("Core is ready.") \ No newline at end of file diff --git a/core/chat/__init__.py b/core/chat/__init__.py new file mode 100644 index 0000000..71aa12a --- /dev/null +++ b/core/chat/__init__.py @@ -0,0 +1 @@ +# Chat module for AI assistant integration diff --git a/core/chat/consumers.py b/core/chat/consumers.py new file mode 100644 index 0000000..50b9c0f --- /dev/null +++ b/core/chat/consumers.py @@ -0,0 +1,261 @@ +""" +WebSocket consumer for AI chat. +""" +import json +import logging +from typing import Optional + +from channels.generic.websocket import AsyncWebsocketConsumer +from channels.db import database_sync_to_async + +from core.models import TeamProfile +from core.models.chat import ChatConversation +from core.chat.service import ChatService + +logger = logging.getLogger(__name__) + + +class ChatConsumer(AsyncWebsocketConsumer): + """ + WebSocket consumer for AI chat with Claude. + + Handles: + - Connection authentication (via OryWebSocketAuthMiddleware) + - Message streaming + - Conversation history + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.profile: Optional[TeamProfile] = None + self.chat_service: Optional[ChatService] = None + + async def connect(self): + """Handle WebSocket connection.""" + # Get profile from scope (set by OryWebSocketAuthMiddleware) + self.profile = self.scope.get('profile') + + if not self.profile: + logger.warning("Chat connection rejected - no profile") + await self.close(code=4401) + return + + # Only allow team profiles + if not isinstance(self.profile, TeamProfile): + logger.warning("Chat connection rejected - not a team profile") + await self.close(code=4403) + return + + # Initialize chat service + self.chat_service = ChatService(self.profile) + + await self.accept() + + # Send welcome message + await self.send_json({ + "type": "connected", + "user": { + "id": str(self.profile.id), + "name": f"{self.profile.first_name} {self.profile.last_name}".strip(), + "email": self.profile.email, + } + }) + + # Send role-based intro message + await self.send_json({ + "type": "intro", + "content": self._get_intro_message() + }) + + def _get_intro_message(self) -> str: + """Get intro message based on user role.""" + first_name = self.profile.first_name or "there" + role = getattr(self.profile, 'role', None) + + if role == 'ADMIN': + return ( + f"Hey {first_name}! I'm your Nexus assistant. As an admin, I can help you with:\n\n" + "• **View & manage** all services, projects, and team assignments\n" + "• **Create & schedule** new services and projects\n" + "• **Access reports** and system statistics\n" + "• **Manage notifications** and team settings\n\n" + "What would you like to do today?" + ) + elif role == 'TEAM_LEADER': + return ( + f"Hey {first_name}! I'm your Nexus assistant. As a team leader, I can help you with:\n\n" + "• **View schedules** for you and your team\n" + "• **Check service & project details** across accounts\n" + "• **Track work sessions** and task completion\n" + "• **Access customer and account information**\n\n" + "What can I help you with?" + ) + else: # TEAM_MEMBER + return ( + f"Hey {first_name}! I'm your Nexus assistant. I can help you with:\n\n" + "• **View your schedule** and assigned work\n" + "• **Check service & project details** for your assignments\n" + "• **Manage work sessions** and mark tasks complete\n" + "• **Track your notifications**\n\n" + "What do you need help with?" + ) + + async def disconnect(self, close_code): + """Handle WebSocket disconnection.""" + logger.info(f"Chat disconnected: {close_code}") + + async def receive(self, text_data): + """Handle incoming WebSocket messages.""" + try: + data = json.loads(text_data) + except json.JSONDecodeError: + await self.send_json({"type": "error", "error": "Invalid JSON"}) + return + + message_type = data.get("type") + + if message_type == "chat": + await self.handle_chat(data) + elif message_type == "history": + await self.handle_history(data) + elif message_type == "conversations": + await self.handle_list_conversations() + elif message_type == "new_conversation": + await self.handle_new_conversation() + else: + await self.send_json({"type": "error", "error": f"Unknown message type: {message_type}"}) + + async def handle_chat(self, data): + """Handle a chat message.""" + content = data.get("content", "").strip() + conversation_id = data.get("conversation_id") + + if not content: + await self.send_json({"type": "error", "error": "Message content is required"}) + return + + try: + # Get or create conversation + conversation = await self.chat_service.get_or_create_conversation(conversation_id) + + # If new conversation, send conversation_created event + if not conversation_id: + await self.send_json({ + "type": "conversation_created", + "conversation": { + "id": str(conversation.id), + "title": conversation.title or "New Conversation", + "created_at": conversation.created_at.isoformat(), + } + }) + + # Stream response + async for event in self.chat_service.stream_response(conversation, content): + await self.send_json(event) + + except Exception as e: + logger.exception("Error handling chat message") + await self.send_json({"type": "error", "error": str(e)}) + + async def handle_history(self, data): + """Handle request for conversation history.""" + conversation_id = data.get("conversation_id") + + if not conversation_id: + await self.send_json({"type": "error", "error": "conversation_id is required"}) + return + + try: + @database_sync_to_async + def get_conversation_with_messages(): + try: + conv = ChatConversation.objects.prefetch_related('messages').get( + id=conversation_id, + team_profile=self.profile, + is_active=True + ) + return { + "id": str(conv.id), + "title": conv.title or "New Conversation", + "created_at": conv.created_at.isoformat(), + "messages": [ + { + "id": str(msg.id), + "role": msg.role, + "content": msg.content, + "tool_calls": msg.tool_calls, + "tool_results": msg.tool_results, + "created_at": msg.created_at.isoformat(), + } + for msg in conv.messages.all().order_by('created_at') + ] + } + except ChatConversation.DoesNotExist: + return None + + conversation = await get_conversation_with_messages() + + if conversation: + await self.send_json({ + "type": "history", + "conversation": conversation + }) + else: + await self.send_json({"type": "error", "error": "Conversation not found"}) + + except Exception as e: + logger.exception("Error fetching history") + await self.send_json({"type": "error", "error": str(e)}) + + async def handle_list_conversations(self): + """Handle request to list all conversations.""" + try: + @database_sync_to_async + def get_conversations(): + convs = ChatConversation.objects.filter( + team_profile=self.profile, + is_active=True + ).order_by('-updated_at')[:50] + + return [ + { + "id": str(conv.id), + "title": conv.title or "New Conversation", + "created_at": conv.created_at.isoformat(), + "updated_at": conv.updated_at.isoformat(), + } + for conv in convs + ] + + conversations = await get_conversations() + + await self.send_json({ + "type": "conversations", + "conversations": conversations + }) + + except Exception as e: + logger.exception("Error listing conversations") + await self.send_json({"type": "error", "error": str(e)}) + + async def handle_new_conversation(self): + """Handle request to create a new conversation.""" + try: + conversation = await self.chat_service.get_or_create_conversation() + + await self.send_json({ + "type": "conversation_created", + "conversation": { + "id": str(conversation.id), + "title": conversation.title or "New Conversation", + "created_at": conversation.created_at.isoformat(), + } + }) + + except Exception as e: + logger.exception("Error creating conversation") + await self.send_json({"type": "error", "error": str(e)}) + + async def send_json(self, data): + """Send JSON data to the WebSocket.""" + await self.send(text_data=json.dumps(data)) diff --git a/core/chat/service.py b/core/chat/service.py new file mode 100644 index 0000000..ac0d015 --- /dev/null +++ b/core/chat/service.py @@ -0,0 +1,627 @@ +""" +Chat service that integrates Claude with Nexus MCP tools. +""" +import json +import logging +from typing import AsyncGenerator, Optional, List, Dict, Any + +import anthropic +from django.conf import settings + +from core.models import TeamProfile +from core.models.chat import ChatConversation, ChatMessage + +logger = logging.getLogger(__name__) + +# Tools that require confirmation before execution +DESTRUCTIVE_ACTIONS = { + 'delete_service', + 'delete_project', + 'create_services_bulk', +} + +# System prompt for the assistant +SYSTEM_PROMPT = """You are a helpful assistant for Nexus, a field service management system used by your organization. + +You have access to tools to query and manage: +- Customers and their accounts +- Services (scheduled cleaning visits) +- Projects (one-time work) +- Team member schedules +- Session tracking and task completion +- Notifications + +Be concise and helpful. When asked about data, use the appropriate tools to fetch current information. +When performing destructive actions like deletion or bulk creation, clearly confirm what will be affected. + +Format responses in markdown when appropriate for better readability.""" + + +def get_mcp_tools() -> List[Dict[str, Any]]: + """ + Get the list of MCP tools as Anthropic tool definitions. + """ + # Import here to avoid circular imports + from core.mcp.tools.auth import set_active_profile, get_my_profile + from core.mcp.tools.dashboard import get_my_schedule, get_system_stats + from core.mcp.tools.customers import list_customers, get_customer, list_accounts, get_account + from core.mcp.tools.services import list_services, get_service, create_service, update_service, delete_service, create_services_bulk + from core.mcp.tools.projects import list_projects, get_project, create_project, update_project, delete_project + from core.mcp.tools.sessions import get_active_session, open_session, close_session, revert_session, add_task_completion, remove_task_completion + from core.mcp.tools.notifications import get_my_notifications, get_unread_notification_count, mark_notification_read, mark_all_notifications_read + + # Map function to tool definition + tools = [ + # Dashboard + { + "name": "get_my_schedule", + "description": "Get your assigned services and projects for a date range.", + "input_schema": { + "type": "object", + "properties": { + "start_date": {"type": "string", "description": "Start date in YYYY-MM-DD format"}, + "end_date": {"type": "string", "description": "End date in YYYY-MM-DD format"}, + "status": {"type": "string", "description": "Optional status filter"} + } + } + }, + { + "name": "get_system_stats", + "description": "Get high-level system statistics. Requires ADMIN or TEAM_LEADER role.", + "input_schema": {"type": "object", "properties": {}} + }, + # Customers + { + "name": "list_customers", + "description": "List customers with optional filtering. Requires ADMIN or TEAM_LEADER role.", + "input_schema": { + "type": "object", + "properties": { + "limit": {"type": "integer", "description": "Maximum customers to return (default 25)"}, + "search": {"type": "string", "description": "Search term for customer name"}, + "is_active": {"type": "boolean", "description": "Filter by active status"} + } + } + }, + { + "name": "get_customer", + "description": "Get detailed customer information including accounts.", + "input_schema": { + "type": "object", + "properties": { + "customer_id": {"type": "string", "description": "UUID of the customer"} + }, + "required": ["customer_id"] + } + }, + { + "name": "list_accounts", + "description": "List accounts with optional filtering.", + "input_schema": { + "type": "object", + "properties": { + "limit": {"type": "integer", "description": "Maximum accounts to return"}, + "customer_id": {"type": "string", "description": "Filter by customer UUID"}, + "search": {"type": "string", "description": "Search term"}, + "is_active": {"type": "boolean", "description": "Filter by active status"} + } + } + }, + { + "name": "get_account", + "description": "Get detailed account information.", + "input_schema": { + "type": "object", + "properties": { + "account_id": {"type": "string", "description": "UUID of the account"} + }, + "required": ["account_id"] + } + }, + # Services + { + "name": "list_services", + "description": "List services with optional filters.", + "input_schema": { + "type": "object", + "properties": { + "limit": {"type": "integer", "description": "Maximum services to return"}, + "customer_id": {"type": "string", "description": "Filter by customer UUID"}, + "account_id": {"type": "string", "description": "Filter by account UUID"}, + "status": {"type": "string", "description": "Status filter (SCHEDULED, IN_PROGRESS, COMPLETED, CANCELLED)"}, + "date": {"type": "string", "description": "Exact date in YYYY-MM-DD format"}, + "start_date": {"type": "string", "description": "Range start date"}, + "end_date": {"type": "string", "description": "Range end date"} + } + } + }, + { + "name": "get_service", + "description": "Get detailed service information including scope and tasks.", + "input_schema": { + "type": "object", + "properties": { + "service_id": {"type": "string", "description": "UUID of the service"} + }, + "required": ["service_id"] + } + }, + { + "name": "create_service", + "description": "Create a new service. Requires ADMIN role.", + "input_schema": { + "type": "object", + "properties": { + "account_address_id": {"type": "string", "description": "UUID of the account address"}, + "date": {"type": "string", "description": "Service date in YYYY-MM-DD format"}, + "status": {"type": "string", "description": "Status (default SCHEDULED)"}, + "team_member_ids": {"type": "string", "description": "Comma-separated team member UUIDs"}, + "notes": {"type": "string", "description": "Optional notes"} + }, + "required": ["account_address_id", "date"] + } + }, + { + "name": "update_service", + "description": "Update an existing service. Requires ADMIN role.", + "input_schema": { + "type": "object", + "properties": { + "service_id": {"type": "string", "description": "UUID of the service"}, + "date": {"type": "string", "description": "New date"}, + "status": {"type": "string", "description": "New status"}, + "team_member_ids": {"type": "string", "description": "Comma-separated team member UUIDs"}, + "notes": {"type": "string", "description": "Updated notes"} + }, + "required": ["service_id"] + } + }, + { + "name": "delete_service", + "description": "Delete a service. Requires ADMIN role. WARNING: This is destructive.", + "input_schema": { + "type": "object", + "properties": { + "service_id": {"type": "string", "description": "UUID of the service to delete"} + }, + "required": ["service_id"] + } + }, + { + "name": "create_services_bulk", + "description": "Create multiple services at once. Requires ADMIN role. Max 500 services.", + "input_schema": { + "type": "object", + "properties": { + "services_json": {"type": "string", "description": "JSON array of service objects with account_address_id, date, status, notes"} + }, + "required": ["services_json"] + } + }, + # Projects + { + "name": "list_projects", + "description": "List projects with optional filters.", + "input_schema": { + "type": "object", + "properties": { + "limit": {"type": "integer", "description": "Maximum projects to return"}, + "customer_id": {"type": "string", "description": "Filter by customer UUID"}, + "status": {"type": "string", "description": "Status filter"}, + "date": {"type": "string", "description": "Exact date"}, + "start_date": {"type": "string", "description": "Range start"}, + "end_date": {"type": "string", "description": "Range end"} + } + } + }, + { + "name": "get_project", + "description": "Get detailed project information.", + "input_schema": { + "type": "object", + "properties": { + "project_id": {"type": "string", "description": "UUID of the project"} + }, + "required": ["project_id"] + } + }, + { + "name": "create_project", + "description": "Create a new project. Requires ADMIN role.", + "input_schema": { + "type": "object", + "properties": { + "customer_id": {"type": "string", "description": "UUID of the customer"}, + "name": {"type": "string", "description": "Project name"}, + "date": {"type": "string", "description": "Project date"}, + "labor": {"type": "number", "description": "Labor cost"}, + "amount": {"type": "number", "description": "Total amount"}, + "account_address_id": {"type": "string", "description": "UUID of account address"}, + "street_address": {"type": "string", "description": "Freeform street address"}, + "city": {"type": "string", "description": "City"}, + "state": {"type": "string", "description": "State"}, + "zip_code": {"type": "string", "description": "Zip code"}, + "team_member_ids": {"type": "string", "description": "Comma-separated UUIDs"}, + "notes": {"type": "string", "description": "Notes"} + }, + "required": ["customer_id", "name", "date", "labor"] + } + }, + { + "name": "update_project", + "description": "Update an existing project. Requires ADMIN role.", + "input_schema": { + "type": "object", + "properties": { + "project_id": {"type": "string", "description": "UUID of the project"}, + "name": {"type": "string"}, + "date": {"type": "string"}, + "status": {"type": "string"}, + "labor": {"type": "number"}, + "amount": {"type": "number"}, + "team_member_ids": {"type": "string"}, + "notes": {"type": "string"} + }, + "required": ["project_id"] + } + }, + { + "name": "delete_project", + "description": "Delete a project. Requires ADMIN role. WARNING: This is destructive.", + "input_schema": { + "type": "object", + "properties": { + "project_id": {"type": "string", "description": "UUID of the project to delete"} + }, + "required": ["project_id"] + } + }, + # Sessions + { + "name": "get_active_session", + "description": "Get the active session for a service or project.", + "input_schema": { + "type": "object", + "properties": { + "entity_type": {"type": "string", "description": "Either 'service' or 'project'"}, + "entity_id": {"type": "string", "description": "UUID of the service or project"} + }, + "required": ["entity_type", "entity_id"] + } + }, + { + "name": "open_session", + "description": "Start a work session for a service or project.", + "input_schema": { + "type": "object", + "properties": { + "entity_type": {"type": "string", "description": "Either 'service' or 'project'"}, + "entity_id": {"type": "string", "description": "UUID"} + }, + "required": ["entity_type", "entity_id"] + } + }, + { + "name": "close_session", + "description": "Complete a work session and mark tasks as done.", + "input_schema": { + "type": "object", + "properties": { + "entity_type": {"type": "string"}, + "entity_id": {"type": "string"}, + "completed_task_ids": {"type": "string", "description": "Comma-separated task UUIDs"} + }, + "required": ["entity_type", "entity_id"] + } + }, + # Notifications + { + "name": "get_my_notifications", + "description": "Get your notifications.", + "input_schema": { + "type": "object", + "properties": { + "unread_only": {"type": "boolean"}, + "limit": {"type": "integer"} + } + } + }, + { + "name": "get_unread_notification_count", + "description": "Get count of unread notifications.", + "input_schema": {"type": "object", "properties": {}} + }, + { + "name": "mark_all_notifications_read", + "description": "Mark all notifications as read.", + "input_schema": {"type": "object", "properties": {}} + } + ] + + return tools + + +async def execute_tool(tool_name: str, tool_input: Dict[str, Any], profile: TeamProfile) -> str: + """ + Execute an MCP tool and return the result as a string. + """ + # Import tool functions + from core.mcp.tools import dashboard, customers, services, projects, sessions, notifications + from core.mcp.auth import MCPContext + + # Set the active profile for the MCP context + MCPContext.set_profile(profile) + + # Map tool names to functions + tool_map = { + # Dashboard + "get_my_schedule": dashboard.get_my_schedule, + "get_system_stats": dashboard.get_system_stats, + # Customers + "list_customers": customers.list_customers, + "get_customer": customers.get_customer, + "list_accounts": customers.list_accounts, + "get_account": customers.get_account, + # Services + "list_services": services.list_services, + "get_service": services.get_service, + "create_service": services.create_service, + "update_service": services.update_service, + "delete_service": services.delete_service, + "create_services_bulk": services.create_services_bulk, + # Projects + "list_projects": projects.list_projects, + "get_project": projects.get_project, + "create_project": projects.create_project, + "update_project": projects.update_project, + "delete_project": projects.delete_project, + # Sessions + "get_active_session": sessions.get_active_session, + "open_session": sessions.open_session, + "close_session": sessions.close_session, + "revert_session": sessions.revert_session, + "add_task_completion": sessions.add_task_completion, + "remove_task_completion": sessions.remove_task_completion, + # Notifications + "get_my_notifications": notifications.get_my_notifications, + "get_unread_notification_count": notifications.get_unread_notification_count, + "mark_notification_read": notifications.mark_notification_read, + "mark_all_notifications_read": notifications.mark_all_notifications_read, + } + + func = tool_map.get(tool_name) + if not func: + return json.dumps({"error": f"Unknown tool: {tool_name}"}) + + try: + result = await func(**tool_input) + return result + except Exception as e: + logger.exception(f"Error executing tool {tool_name}") + return json.dumps({"error": str(e)}) + + +class ChatService: + """ + Service for handling chat conversations with Claude. + """ + + def __init__(self, profile: TeamProfile): + self.profile = profile + self.client = anthropic.AsyncAnthropic(api_key=settings.ANTHROPIC_API_KEY) + self.tools = get_mcp_tools() + + async def get_or_create_conversation(self, conversation_id: Optional[str] = None) -> ChatConversation: + """Get existing conversation or create a new one.""" + from channels.db import database_sync_to_async + + if conversation_id: + @database_sync_to_async + def get_conv(): + return ChatConversation.objects.filter( + id=conversation_id, + team_profile=self.profile, + is_active=True + ).first() + + conv = await get_conv() + if conv: + return conv + + # Create new conversation + @database_sync_to_async + def create_conv(): + return ChatConversation.objects.create( + team_profile=self.profile, + title="" + ) + + return await create_conv() + + async def get_conversation_messages(self, conversation: ChatConversation) -> List[Dict[str, Any]]: + """Get message history for Claude API format.""" + from channels.db import database_sync_to_async + + @database_sync_to_async + def fetch_messages(): + messages = [] + for msg in conversation.messages.all().order_by('created_at'): + messages.append({ + "role": msg.role, + "content": msg.content + }) + return messages + + return await fetch_messages() + + async def save_message( + self, + conversation: ChatConversation, + role: str, + content: str, + tool_calls: Optional[List] = None, + tool_results: Optional[List] = None + ) -> ChatMessage: + """Save a message to the conversation.""" + from channels.db import database_sync_to_async + + @database_sync_to_async + def create_message(): + msg = ChatMessage.objects.create( + conversation=conversation, + role=role, + content=content, + tool_calls=tool_calls or [], + tool_results=tool_results or [] + ) + # Update conversation title if first user message + if role == 'user' and not conversation.title: + conversation.title = content[:50] + ('...' if len(content) > 50 else '') + conversation.save(update_fields=['title', 'updated_at']) + return msg + + return await create_message() + + async def stream_response( + self, + conversation: ChatConversation, + user_message: str + ) -> AsyncGenerator[Dict[str, Any], None]: + """ + Stream a response from Claude, handling tool calls. + + Yields events: + - {"type": "message_start", "conversation_id": str} + - {"type": "text", "content": str} + - {"type": "tool_call", "tool": str, "input": dict} + - {"type": "tool_result", "tool": str, "result": str} + - {"type": "message_end", "message_id": str} + - {"type": "error", "error": str} + """ + # Save user message + await self.save_message(conversation, 'user', user_message) + + # Get conversation history + messages = await self.get_conversation_messages(conversation) + + yield {"type": "message_start", "conversation_id": str(conversation.id)} + + try: + full_response = "" + tool_calls = [] + tool_results = [] + + # Keep processing until we get a final response (no more tool calls) + while True: + # Create message with streaming + async with self.client.messages.stream( + model=settings.ANTHROPIC_MODEL, + max_tokens=4096, + system=SYSTEM_PROMPT, + messages=messages, + tools=self.tools, + ) as stream: + current_tool_use = None + current_tool_input = "" + + async for event in stream: + if event.type == "content_block_start": + if event.content_block.type == "tool_use": + current_tool_use = { + "id": event.content_block.id, + "name": event.content_block.name, + } + current_tool_input = "" + + elif event.type == "content_block_delta": + if event.delta.type == "text_delta": + full_response += event.delta.text + yield {"type": "text", "content": event.delta.text} + elif event.delta.type == "input_json_delta": + current_tool_input += event.delta.partial_json + + elif event.type == "content_block_stop": + if current_tool_use: + try: + tool_input = json.loads(current_tool_input) if current_tool_input else {} + except json.JSONDecodeError: + tool_input = {} + + current_tool_use["input"] = tool_input + tool_calls.append(current_tool_use) + + yield { + "type": "tool_call", + "id": current_tool_use["id"], + "tool": current_tool_use["name"], + "input": tool_input, + "requires_confirmation": current_tool_use["name"] in DESTRUCTIVE_ACTIONS + } + + current_tool_use = None + current_tool_input = "" + + # Get the final message to check stop reason + final_message = await stream.get_final_message() + + # If there are tool calls, execute them and continue + if final_message.stop_reason == "tool_use": + # Execute each tool call + tool_use_results = [] + for tool_call in tool_calls: + if tool_call not in [t for t in tool_use_results]: + result = await execute_tool( + tool_call["name"], + tool_call["input"], + self.profile + ) + + tool_results.append({ + "id": tool_call["id"], + "tool": tool_call["name"], + "result": result + }) + + yield { + "type": "tool_result", + "id": tool_call["id"], + "tool": tool_call["name"], + "result": result + } + + tool_use_results.append({ + "type": "tool_result", + "tool_use_id": tool_call["id"], + "content": result + }) + + # Add assistant message with tool use and tool results to continue conversation + messages.append({ + "role": "assistant", + "content": final_message.content + }) + messages.append({ + "role": "user", + "content": tool_use_results + }) + + # Clear tool calls for next iteration + tool_calls = [] + else: + # No more tool calls, we're done + break + + # Save assistant message + msg = await self.save_message( + conversation, + 'assistant', + full_response, + tool_calls=tool_calls, + tool_results=tool_results + ) + + yield {"type": "message_end", "message_id": str(msg.id)} + + except Exception as e: + logger.exception("Error streaming response") + yield {"type": "error", "error": str(e)} diff --git a/core/graphql/__init__.py b/core/graphql/__init__.py new file mode 100644 index 0000000..0a68963 --- /dev/null +++ b/core/graphql/__init__.py @@ -0,0 +1,9 @@ +from core.graphql.queries import * +from core.graphql.mutations import * +from core.graphql.types import * +from core.graphql.schema import * +from core.graphql.inputs import * +from core.graphql.subscriptions import * +from core.graphql.pubsub import * +from core.graphql.utils import * +from core.graphql.enums import * \ No newline at end of file diff --git a/core/graphql/enums.py b/core/graphql/enums.py new file mode 100644 index 0000000..659b11d --- /dev/null +++ b/core/graphql/enums.py @@ -0,0 +1,9 @@ +import strawberry +from enum import Enum + + +@strawberry.enum +class DateOrdering(Enum): + """Ordering direction for date-based queries.""" + ASC = "ASC" + DESC = "DESC" diff --git a/core/graphql/filters/__init__.py b/core/graphql/filters/__init__.py new file mode 100644 index 0000000..53b779e --- /dev/null +++ b/core/graphql/filters/__init__.py @@ -0,0 +1,18 @@ +from core.graphql.filters.account import * +from core.graphql.filters.project import * +from core.graphql.filters.service import * +from core.graphql.filters.labor import * +from core.graphql.filters.revenue import * +from core.graphql.filters.schedule import * +from core.graphql.filters.invoice import * +from core.graphql.filters.report import * +from core.graphql.filters.account_punchlist import * +from core.graphql.filters.project_punchlist import * +from core.graphql.filters.customer import * +from core.graphql.filters.profile import * +from core.graphql.filters.scope import * +from core.graphql.filters.scope_template import * +from core.graphql.filters.project_scope import * +from core.graphql.filters.project_scope_template import * +from core.graphql.filters.session import * +from core.graphql.filters.session_image import * \ No newline at end of file diff --git a/core/graphql/filters/account.py b/core/graphql/filters/account.py new file mode 100644 index 0000000..ade3119 --- /dev/null +++ b/core/graphql/filters/account.py @@ -0,0 +1,41 @@ +import strawberry +import strawberry_django as sd +from django.db.models import Q, QuerySet +from core.models.account import Account, AccountContact, AccountAddress + + +@sd.filter(Account) +class AccountFilter: + id: strawberry.auto + name: strawberry.auto + customer_id: strawberry.auto + + @sd.filter_field + def is_active(self, queryset, value: bool, prefix: str) -> tuple[QuerySet, Q]: + today = sd.utils.timezone.now().date() + active_query = Q( + status='ACTIVE', + start_date__lte=today + ) & ( + Q(end_date__isnull=True) | Q(end_date__gte=today) + ) + if value: + return queryset, active_query + return queryset, ~active_query + + @sd.filter_field + def search(self, queryset, value: str, prefix: str) -> tuple[QuerySet, Q]: + return queryset, Q(**{f"{prefix}name__icontains": value}) + + +@sd.filter(AccountAddress) +class AccountAddressFilter: + id: strawberry.auto + + +@sd.filter(AccountContact) +class AccountContactFilter: + id: strawberry.auto + account_id: strawberry.auto + is_active: strawberry.auto + is_primary: strawberry.auto diff --git a/core/graphql/filters/account_punchlist.py b/core/graphql/filters/account_punchlist.py new file mode 100644 index 0000000..ed40766 --- /dev/null +++ b/core/graphql/filters/account_punchlist.py @@ -0,0 +1,8 @@ +import strawberry +import strawberry_django as sd +from core.models.account_punchlist import AccountPunchlist + +@sd.filter(AccountPunchlist) +class AccountPunchlistFilter: + id: strawberry.auto + account_id: strawberry.auto \ No newline at end of file diff --git a/core/graphql/filters/customer.py b/core/graphql/filters/customer.py new file mode 100644 index 0000000..605549a --- /dev/null +++ b/core/graphql/filters/customer.py @@ -0,0 +1,42 @@ +import strawberry +import strawberry_django as sd +from typing import Optional +from django.db.models import Q +from core.models.customer import Customer, CustomerAddress, CustomerContact + + +@sd.filter(Customer) +class CustomerFilter: + id: strawberry.auto + search: Optional[str] = strawberry.field(default=None) + is_active: Optional[bool] = strawberry.field(default=None) + + @staticmethod + def filter_search(queryset, value: str): + return queryset.filter(name__icontains=value) + + @staticmethod + def filter_is_active(queryset, value: bool): + today = sd.utils.timezone.now().date() + active_query = Q( + status='ACTIVE', + start_date__lte=today + ) & ( + Q(end_date__isnull=True) | Q(end_date__gte=today) + ) + if value: + return queryset.filter(active_query) + return queryset.exclude(active_query) + + +@sd.filter(CustomerAddress) +class CustomerAddressFilter: + id: strawberry.auto + + +@sd.filter(CustomerContact) +class CustomerContactFilter: + id: strawberry.auto + customer_id: strawberry.auto + is_active: strawberry.auto + is_primary: strawberry.auto diff --git a/core/graphql/filters/invoice.py b/core/graphql/filters/invoice.py new file mode 100644 index 0000000..77b48a5 --- /dev/null +++ b/core/graphql/filters/invoice.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from core.models.invoice import Invoice + + +@sd.filter(Invoice) +class InvoiceFilter: + id: strawberry.auto + customer_id: strawberry.auto + status: strawberry.auto diff --git a/core/graphql/filters/labor.py b/core/graphql/filters/labor.py new file mode 100644 index 0000000..84c25c7 --- /dev/null +++ b/core/graphql/filters/labor.py @@ -0,0 +1,9 @@ +import strawberry +import strawberry_django as sd +from core.models.labor import Labor + + +@sd.filter(Labor) +class LaborFilter: + id: strawberry.auto + account_address_id: strawberry.auto diff --git a/core/graphql/filters/messaging.py b/core/graphql/filters/messaging.py new file mode 100644 index 0000000..abc1270 --- /dev/null +++ b/core/graphql/filters/messaging.py @@ -0,0 +1,29 @@ +import strawberry +import strawberry_django as sd +from core.models.messaging import Conversation, Message, ConversationParticipant + + +@sd.filter(Conversation, lookups=True) +class ConversationFilter: + id: strawberry.auto + conversation_type: strawberry.auto + is_archived: strawberry.auto + last_message_at: strawberry.auto + created_at: strawberry.auto + + +@sd.filter(Message, lookups=True) +class MessageFilter: + id: strawberry.auto + conversation_id: strawberry.auto + is_system_message: strawberry.auto + created_at: strawberry.auto + + +@sd.filter(ConversationParticipant, lookups=True) +class ConversationParticipantFilter: + id: strawberry.auto + conversation_id: strawberry.auto + is_muted: strawberry.auto + is_archived: strawberry.auto + unread_count: strawberry.auto diff --git a/core/graphql/filters/profile.py b/core/graphql/filters/profile.py new file mode 100644 index 0000000..ee4e0f3 --- /dev/null +++ b/core/graphql/filters/profile.py @@ -0,0 +1,14 @@ +import strawberry +import strawberry_django as sd +from core.models.profile import CustomerProfile, TeamProfile + + +@sd.filter(CustomerProfile) +class CustomerProfileFilter: + id: strawberry.auto + customers: strawberry.auto + + +@sd.filter(TeamProfile) +class TeamProfileFilter: + id: strawberry.auto diff --git a/core/graphql/filters/project.py b/core/graphql/filters/project.py new file mode 100644 index 0000000..ccc5d21 --- /dev/null +++ b/core/graphql/filters/project.py @@ -0,0 +1,12 @@ +import strawberry +import strawberry_django as sd +from core.models.project import Project + +@sd.filter(Project, lookups=True) +class ProjectFilter: + id: strawberry.auto + customer_id: strawberry.auto + account_address_id: strawberry.auto + status: strawberry.auto + team_members: strawberry.auto + date: strawberry.auto \ No newline at end of file diff --git a/core/graphql/filters/project_punchlist.py b/core/graphql/filters/project_punchlist.py new file mode 100644 index 0000000..617ef4f --- /dev/null +++ b/core/graphql/filters/project_punchlist.py @@ -0,0 +1,9 @@ +import strawberry +import strawberry_django as sd +from core.models.project_punchlist import ProjectPunchlist + + +@sd.filter(ProjectPunchlist) +class ProjectPunchlistFilter: + id: strawberry.auto + project_id: strawberry.auto diff --git a/core/graphql/filters/project_scope.py b/core/graphql/filters/project_scope.py new file mode 100644 index 0000000..2144ac0 --- /dev/null +++ b/core/graphql/filters/project_scope.py @@ -0,0 +1,37 @@ +import strawberry +import strawberry_django as sd +from core.models.project_scope import ProjectScope, ProjectScopeCategory, ProjectScopeTask, ProjectScopeTaskCompletion + + +@sd.filter(ProjectScope) +class ProjectScopeFilter: + id: strawberry.auto + project_id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + is_active: strawberry.auto + + +@sd.filter(ProjectScopeCategory) +class ProjectScopeCategoryFilter: + id: strawberry.auto + scope_id: strawberry.auto + order: strawberry.auto + + +@sd.filter(ProjectScopeTask) +class ProjectScopeTaskFilter: + id: strawberry.auto + category_id: strawberry.auto + order: strawberry.auto + + +@sd.filter(ProjectScopeTaskCompletion) +class ProjectScopeTaskCompletionFilter: + id: strawberry.auto + project_id: strawberry.auto + task_id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + completed_by_id: strawberry.auto + diff --git a/core/graphql/filters/project_scope_template.py b/core/graphql/filters/project_scope_template.py new file mode 100644 index 0000000..9f578ec --- /dev/null +++ b/core/graphql/filters/project_scope_template.py @@ -0,0 +1,65 @@ +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.project_scope_template import ( + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, +) + + +@sd.filter(ProjectScopeTemplate) +class ProjectScopeTemplateFilter: + id: strawberry.auto + is_active: strawberry.auto + + # Convenience search fields + name_search: Optional[str] = strawberry.field(default=None, description="Case-insensitive search on name") + description_search: Optional[str] = strawberry.field(default=None, + description="Case-insensitive search on description") + + @staticmethod + def filter_name_search(queryset, value: Optional[str]): + if not value: + return queryset + return queryset.filter(name__icontains=value) + + @staticmethod + def filter_description_search(queryset, value: Optional[str]): + if not value: + return queryset + return queryset.filter(description__icontains=value) + + +@sd.filter(ProjectAreaTemplate) +class ProjectAreaTemplateFilter: + id: strawberry.auto + scope_template_id: strawberry.auto + order: strawberry.auto + + # Convenience search + name_search: Optional[str] = strawberry.field(default=None, description="Case-insensitive search on name") + + @staticmethod + def filter_name_search(queryset, value: Optional[str]): + if not value: + return queryset + return queryset.filter(name__icontains=value) + + +@sd.filter(ProjectTaskTemplate) +class ProjectTaskTemplateFilter: + id: strawberry.auto + area_template_id: strawberry.auto + order: strawberry.auto + estimated_minutes: strawberry.auto + + # Convenience search + description_search: Optional[str] = strawberry.field(default=None, + description="Case-insensitive search on description") + + @staticmethod + def filter_description_search(queryset, value: Optional[str]): + if not value: + return queryset + return queryset.filter(description__icontains=value) diff --git a/core/graphql/filters/report.py b/core/graphql/filters/report.py new file mode 100644 index 0000000..42c84ed --- /dev/null +++ b/core/graphql/filters/report.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from core.models.report import Report + + +@sd.filter(Report) +class ReportFilter: + id: strawberry.auto + date: strawberry.auto + team_member_id: strawberry.auto diff --git a/core/graphql/filters/revenue.py b/core/graphql/filters/revenue.py new file mode 100644 index 0000000..3044f15 --- /dev/null +++ b/core/graphql/filters/revenue.py @@ -0,0 +1,9 @@ +import strawberry +import strawberry_django as sd +from core.models.revenue import Revenue + + +@sd.filter(Revenue) +class RevenueFilter: + id: strawberry.auto + account_id: strawberry.auto diff --git a/core/graphql/filters/schedule.py b/core/graphql/filters/schedule.py new file mode 100644 index 0000000..ea5103b --- /dev/null +++ b/core/graphql/filters/schedule.py @@ -0,0 +1,9 @@ +import strawberry +import strawberry_django as sd +from core.models.schedule import Schedule + + +@sd.filter(Schedule) +class ScheduleFilter: + id: strawberry.auto + account_address_id: strawberry.auto diff --git a/core/graphql/filters/scope.py b/core/graphql/filters/scope.py new file mode 100644 index 0000000..952f4e6 --- /dev/null +++ b/core/graphql/filters/scope.py @@ -0,0 +1,40 @@ +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.scope import Scope, Area, Task, TaskCompletion + + +@sd.filter(Scope) +class ScopeFilter: + id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + is_active: strawberry.auto + search: Optional[str] = strawberry.field(default=None) + + @staticmethod + def filter_search(queryset, value: str): + return queryset.filter(name__icontains=value) + + +@sd.filter(Area) +class AreaFilter: + id: strawberry.auto + scope_id: strawberry.auto + + +@sd.filter(Task) +class TaskFilter: + id: strawberry.auto + area_id: strawberry.auto + frequency: strawberry.auto + + +@sd.filter(TaskCompletion) +class TaskCompletionFilter: + id: strawberry.auto + service_id: strawberry.auto + task_id: strawberry.auto + completed_by_id: strawberry.auto + year: strawberry.auto + month: strawberry.auto diff --git a/core/graphql/filters/scope_template.py b/core/graphql/filters/scope_template.py new file mode 100644 index 0000000..9e72a51 --- /dev/null +++ b/core/graphql/filters/scope_template.py @@ -0,0 +1,52 @@ +from typing import Optional +import strawberry +import strawberry_django as sd +from django.db.models import Q +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate + + +@sd.filter(ScopeTemplate) +class ScopeTemplateFilter: + id: strawberry.auto + is_active: Optional[bool] = strawberry.field(default=None) + search: Optional[str] = strawberry.field(default=None, description="Case-insensitive search on name or description") + + @staticmethod + def filter_is_active(queryset, value: bool): + return queryset.filter(is_active=value) + + @staticmethod + def filter_search(queryset, value: str): + return queryset.filter(Q(name__icontains=value) | Q(description__icontains=value)) + + +@sd.filter(AreaTemplate) +class AreaTemplateFilter: + id: strawberry.auto + scope_template_id: strawberry.auto + search: Optional[str] = strawberry.field(default=None, description="Case-insensitive search on name") + + @staticmethod + def filter_search(queryset, value: str): + return queryset.filter(name__icontains=value) + + +@sd.filter(TaskTemplate) +class TaskTemplateFilter: + id: strawberry.auto + area_template_id: strawberry.auto + frequency: Optional[str] = strawberry.field(default=None) + is_conditional: Optional[bool] = strawberry.field(default=None) + description_search: Optional[str] = strawberry.field(default=None, description="Case-insensitive search on description") + + @staticmethod + def filter_frequency(queryset, value: str): + return queryset.filter(frequency=value) + + @staticmethod + def filter_is_conditional(queryset, value: bool): + return queryset.filter(is_conditional=value) + + @staticmethod + def filter_description_search(queryset, value: str): + return queryset.filter(description__icontains=value) \ No newline at end of file diff --git a/core/graphql/filters/service.py b/core/graphql/filters/service.py new file mode 100644 index 0000000..a99dbaa --- /dev/null +++ b/core/graphql/filters/service.py @@ -0,0 +1,12 @@ +import strawberry +import strawberry_django as sd +from core.models.service import Service + +@sd.filter(Service, lookups=True) +class ServiceFilter: + id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + status: strawberry.auto + team_members: strawberry.auto + date: strawberry.auto \ No newline at end of file diff --git a/core/graphql/filters/session.py b/core/graphql/filters/session.py new file mode 100644 index 0000000..92d7109 --- /dev/null +++ b/core/graphql/filters/session.py @@ -0,0 +1,52 @@ +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.session import ServiceSession, ProjectSession + + +@sd.filter(ServiceSession, lookups=True) +class ServiceSessionFilter: + id: strawberry.auto + service_id: strawberry.auto + account_address_id: strawberry.auto + start: strawberry.auto + end: strawberry.auto + created_by_id: strawberry.auto + + team_member_id: Optional[str] = strawberry.field(default=strawberry.UNSET) + is_active: Optional[bool] = strawberry.field(default=None) + + @staticmethod + def filter_team_member_id(queryset, value: Optional[str]): + if value is None or value is strawberry.UNSET: + return queryset + # Filter through the service -> team_members relationship + return queryset.filter(service__team_members__id=value) + + @staticmethod + def filter_is_active(queryset, value: Optional[bool]): + if value is None: + return queryset + return queryset.filter(end__isnull=value) + + +@sd.filter(ProjectSession, lookups=True) +class ProjectSessionFilter: + id: strawberry.auto + project_id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + customer_id: strawberry.auto + scope_id: strawberry.auto + created_by_id: strawberry.auto + date: strawberry.auto + start: strawberry.auto + end: strawberry.auto + + team_member_id: Optional[str] = strawberry.field(default=strawberry.UNSET) + + @staticmethod + def filter_team_member_id(queryset, value: Optional[str]): + if value is None or value is strawberry.UNSET: + return queryset + return queryset.filter(project__team_members__id=value) \ No newline at end of file diff --git a/core/graphql/filters/session_image.py b/core/graphql/filters/session_image.py new file mode 100644 index 0000000..78ffa81 --- /dev/null +++ b/core/graphql/filters/session_image.py @@ -0,0 +1,48 @@ +from datetime import datetime +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.session_image import ServiceSessionImage, ProjectSessionImage + +@sd.filter(ServiceSessionImage) +class ServiceSessionImageFilter: + id: strawberry.auto + service_session_id: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + title_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + + @staticmethod + def filter_title_contains(qs, value: str): + return qs.filter(title__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) + + +@sd.filter(ProjectSessionImage) +class ProjectSessionImageFilter: + id: strawberry.auto + project_session_id: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + title_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + + @staticmethod + def filter_title_contains(qs, value: str): + return qs.filter(title__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) diff --git a/core/graphql/filters/session_note.py b/core/graphql/filters/session_note.py new file mode 100644 index 0000000..ebc844b --- /dev/null +++ b/core/graphql/filters/session_note.py @@ -0,0 +1,51 @@ +from datetime import datetime +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.session import ServiceSessionNote, ProjectSessionNote + + +@sd.filter(ServiceSessionNote) +class ServiceSessionNoteFilter: + id: strawberry.auto + session_id: strawberry.auto + author_id: strawberry.auto + internal: strawberry.auto + content_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + + @staticmethod + def filter_content_contains(qs, value: str): + return qs.filter(content__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) + + +@sd.filter(ProjectSessionNote) +class ProjectSessionNoteFilter: + id: strawberry.auto + session_id: strawberry.auto + author_id: strawberry.auto + internal: strawberry.auto + content_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + + @staticmethod + def filter_content_contains(qs, value: str): + return qs.filter(content__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) diff --git a/core/graphql/filters/session_video.py b/core/graphql/filters/session_video.py new file mode 100644 index 0000000..8ceb2a3 --- /dev/null +++ b/core/graphql/filters/session_video.py @@ -0,0 +1,75 @@ +from datetime import datetime +import strawberry +import strawberry_django as sd +from typing import Optional +from core.models.session_video import ServiceSessionVideo, ProjectSessionVideo + + +@sd.filter(ServiceSessionVideo) +class ServiceSessionVideoFilter: + id: strawberry.auto + service_session_id: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + internal: strawberry.auto + title_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + min_duration: Optional[int] = strawberry.field(default=None) + max_duration: Optional[int] = strawberry.field(default=None) + + @staticmethod + def filter_title_contains(qs, value: str): + return qs.filter(title__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) + + @staticmethod + def filter_min_duration(qs, value: int): + """Filter videos with duration >= value (in seconds)""" + return qs.filter(duration_seconds__gte=value) + + @staticmethod + def filter_max_duration(qs, value: int): + """Filter videos with duration <= value (in seconds)""" + return qs.filter(duration_seconds__lte=value) + + +@sd.filter(ProjectSessionVideo) +class ProjectSessionVideoFilter: + id: strawberry.auto + project_session_id: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + internal: strawberry.auto + title_contains: Optional[str] = strawberry.field(default=None) + created_after: Optional[datetime] = strawberry.field(default=None) + created_before: Optional[datetime] = strawberry.field(default=None) + min_duration: Optional[int] = strawberry.field(default=None) + max_duration: Optional[int] = strawberry.field(default=None) + + @staticmethod + def filter_title_contains(qs, value: str): + return qs.filter(title__icontains=value) + + @staticmethod + def filter_created_after(qs, value): + return qs.filter(created_at__gte=value) + + @staticmethod + def filter_created_before(qs, value): + return qs.filter(created_at__lte=value) + + @staticmethod + def filter_min_duration(qs, value: int): + """Filter videos with duration >= value (in seconds)""" + return qs.filter(duration_seconds__gte=value) + + @staticmethod + def filter_max_duration(qs, value: int): + """Filter videos with duration <= value (in seconds)""" + return qs.filter(duration_seconds__lte=value) diff --git a/core/graphql/inputs/__init__.py b/core/graphql/inputs/__init__.py new file mode 100644 index 0000000..4e7f6d8 --- /dev/null +++ b/core/graphql/inputs/__init__.py @@ -0,0 +1,18 @@ +from core.graphql.inputs.customer import * +from core.graphql.inputs.account import * +from core.graphql.inputs.project import * +from core.graphql.inputs.service import * +from core.graphql.inputs.labor import * +from core.graphql.inputs.revenue import * +from core.graphql.inputs.schedule import * +from core.graphql.inputs.invoice import * +from core.graphql.inputs.report import * +from core.graphql.inputs.account_punchlist import * +from core.graphql.inputs.project_punchlist import * +from core.graphql.inputs.profile import * +from core.graphql.inputs.scope import * +from core.graphql.inputs.scope_template import * +from core.graphql.inputs.project_scope import * +from core.graphql.inputs.project_scope_template import * +from core.graphql.inputs.session import * +from core.graphql.inputs.session_image import * \ No newline at end of file diff --git a/core/graphql/inputs/account.py b/core/graphql/inputs/account.py new file mode 100644 index 0000000..a58e985 --- /dev/null +++ b/core/graphql/inputs/account.py @@ -0,0 +1,76 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +# Account inputs +@strawberry.input +class AccountInput: + customer_id: GlobalID + name: str + status: str + start_date: datetime.date + end_date: Optional[datetime.date] = None + + +@strawberry.input +class AccountUpdateInput: + id: GlobalID + customer_id: Optional[GlobalID] = None + name: Optional[str] = None + status: Optional[str] = None + start_date: Optional[datetime.date] = None + end_date: Optional[datetime.date] = None + + +# AccountAddress inputs +@strawberry.input +class AccountAddressInput: + account_id: GlobalID + name: str + street_address: str + city: str + state: str + zip_code: str + is_active: bool = True + is_primary: bool = False + notes: str = "" + + +@strawberry.input +class AccountAddressUpdateInput: + id: GlobalID + name: Optional[str] = None + street_address: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + zip_code: Optional[str] = None + is_active: Optional[bool] = None + is_primary: Optional[bool] = None + notes: Optional[str] = None + + +# AccountContact inputs +@strawberry.input +class AccountContactInput: + account_id: GlobalID + first_name: str + last_name: str + phone: Optional[str] = None + email: Optional[str] = None + is_primary: bool = False + is_active: bool = True + notes: str = "" + + +@strawberry.input +class AccountContactUpdateInput: + id: GlobalID + first_name: Optional[str] = None + last_name: Optional[str] = None + phone: Optional[str] = None + email: Optional[str] = None + is_primary: Optional[bool] = None + is_active: Optional[bool] = None + notes: Optional[str] = None diff --git a/core/graphql/inputs/account_punchlist.py b/core/graphql/inputs/account_punchlist.py new file mode 100644 index 0000000..1cced2a --- /dev/null +++ b/core/graphql/inputs/account_punchlist.py @@ -0,0 +1,17 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class AccountPunchlistInput: + account_id: GlobalID + date: datetime.date + + +@strawberry.input +class AccountPunchlistUpdateInput: + id: GlobalID + account_id: Optional[GlobalID] = None + date: Optional[datetime.date] = None diff --git a/core/graphql/inputs/customer.py b/core/graphql/inputs/customer.py new file mode 100644 index 0000000..9feccbd --- /dev/null +++ b/core/graphql/inputs/customer.py @@ -0,0 +1,78 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +# Customer inputs +@strawberry.input +class CustomerInput: + name: str + status: str + start_date: datetime.date + end_date: Optional[datetime.date] = None + billing_terms: str + billing_email: str + wave_customer_id: Optional[str] = None + + +@strawberry.input +class CustomerUpdateInput: + id: GlobalID + name: Optional[str] = None + status: Optional[str] = None + start_date: Optional[datetime.date] = None + end_date: Optional[datetime.date] = None + billing_terms: Optional[str] = None + billing_email: Optional[str] = None + wave_customer_id: Optional[str] = None + + +# CustomerAddress inputs +@strawberry.input +class CustomerAddressInput: + customer_id: GlobalID + street_address: str + city: str + state: str + zip_code: str + address_type: str + is_active: bool = True + is_primary: bool = False + + +@strawberry.input +class CustomerAddressUpdateInput: + id: GlobalID + street_address: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + zip_code: Optional[str] = None + address_type: Optional[str] = None + is_active: Optional[bool] = None + is_primary: Optional[bool] = None + + +# CustomerContact inputs +@strawberry.input +class CustomerContactInput: + customer_id: GlobalID + first_name: str + last_name: str + phone: str + email: str + is_primary: bool = False + is_active: bool = True + notes: str = "" + + +@strawberry.input +class CustomerContactUpdateInput: + id: GlobalID + first_name: Optional[str] = None + last_name: Optional[str] = None + phone: Optional[str] = None + email: Optional[str] = None + is_primary: Optional[bool] = None + is_active: Optional[bool] = None + notes: Optional[str] = None diff --git a/core/graphql/inputs/invoice.py b/core/graphql/inputs/invoice.py new file mode 100644 index 0000000..fe5d202 --- /dev/null +++ b/core/graphql/inputs/invoice.py @@ -0,0 +1,29 @@ +import datetime +from typing import List, Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class InvoiceInput: + date: datetime.date + customer_id: GlobalID + status: str + date_paid: Optional[datetime.date] = None + payment_type: Optional[str] = None + project_ids: Optional[List[GlobalID]] = None + revenue_ids: Optional[List[GlobalID]] = None + wave_invoice_id: Optional[str] = None + + +@strawberry.input +class InvoiceUpdateInput: + id: GlobalID + date: Optional[datetime.date] = None + customer_id: Optional[GlobalID] = None + status: Optional[str] = None + date_paid: Optional[datetime.date] = None + payment_type: Optional[str] = None + project_ids: Optional[List[GlobalID]] = None + revenue_ids: Optional[List[GlobalID]] = None + wave_invoice_id: Optional[str] = None diff --git a/core/graphql/inputs/labor.py b/core/graphql/inputs/labor.py new file mode 100644 index 0000000..ce41369 --- /dev/null +++ b/core/graphql/inputs/labor.py @@ -0,0 +1,21 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class LaborInput: + account_address_id: GlobalID + amount: float + start_date: datetime.date + end_date: Optional[datetime.date] = None + + +@strawberry.input +class LaborUpdateInput: + id: GlobalID + account_address_id: Optional[GlobalID] = None + amount: Optional[float] = None + start_date: Optional[datetime.date] = None + end_date: Optional[datetime.date] = None diff --git a/core/graphql/inputs/messaging.py b/core/graphql/inputs/messaging.py new file mode 100644 index 0000000..7caa9c4 --- /dev/null +++ b/core/graphql/inputs/messaging.py @@ -0,0 +1,75 @@ +from typing import List, Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ConversationInput: + """Input for creating a new conversation""" + subject: str + conversation_type: str # DIRECT, GROUP, SUPPORT + participant_ids: List[GlobalID] # List of TeamProfile or CustomerProfile IDs + entity_type: Optional[str] = None # e.g., "Project", "Service", "Account" + entity_id: Optional[GlobalID] = None # UUID of the entity + metadata: Optional[str] = None # JSON string + + +@strawberry.input +class ConversationUpdateInput: + """Input for updating a conversation""" + id: GlobalID + subject: Optional[str] = None + is_archived: Optional[bool] = None + metadata: Optional[str] = None + + +@strawberry.input +class MessageInput: + """Input for sending a new message""" + conversation_id: GlobalID + body: str + reply_to_id: Optional[GlobalID] = None # For threading + attachments: Optional[str] = None # JSON string with attachment metadata + metadata: Optional[str] = None # JSON string + + +@strawberry.input +class MessageUpdateInput: + """Input for updating a message (limited fields)""" + id: GlobalID + body: str + attachments: Optional[str] = None # JSON string with attachment metadata + + +@strawberry.input +class AddParticipantInput: + """Input for adding a participant to a conversation""" + conversation_id: GlobalID + participant_id: GlobalID # TeamProfile or CustomerProfile ID + + +@strawberry.input +class RemoveParticipantInput: + """Input for removing a participant from a conversation""" + conversation_id: GlobalID + participant_id: GlobalID + + +@strawberry.input +class MarkAsReadInput: + """Input for marking messages as read""" + conversation_id: GlobalID + + +@strawberry.input +class ArchiveConversationInput: + """Input for archiving/unarchiving a conversation""" + conversation_id: GlobalID + is_archived: bool + + +@strawberry.input +class MuteConversationInput: + """Input for muting/unmuting a conversation""" + conversation_id: GlobalID + is_muted: bool diff --git a/core/graphql/inputs/profile.py b/core/graphql/inputs/profile.py new file mode 100644 index 0000000..f131134 --- /dev/null +++ b/core/graphql/inputs/profile.py @@ -0,0 +1,53 @@ +from typing import Optional, List +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class CustomerProfileInput: + user_id: Optional[GlobalID] = None + first_name: str + last_name: str + email: Optional[str] = None + phone: Optional[str] = None + status: str = 'PENDING' + notes: Optional[str] = '' + customer_ids: Optional[List[GlobalID]] = None + + +@strawberry.input +class CustomerProfileUpdateInput: + id: GlobalID + user_id: Optional[GlobalID] = None + first_name: Optional[str] = None + last_name: Optional[str] = None + email: Optional[str] = None + phone: Optional[str] = None + status: Optional[str] = None + notes: Optional[str] = None + customer_ids: Optional[List[GlobalID]] = None + + +@strawberry.input +class TeamProfileInput: + user_id: Optional[GlobalID] = None + first_name: str + last_name: str + email: Optional[str] = None + phone: Optional[str] = None + status: str = 'PENDING' + notes: Optional[str] = None + role: str + + +@strawberry.input +class TeamProfileUpdateInput: + id: GlobalID + user_id: Optional[GlobalID] = None + first_name: Optional[str] = None + last_name: Optional[str] = None + email: Optional[str] = None + phone: Optional[str] = None + status: Optional[str] = None + notes: Optional[str] = None + role: Optional[str] = None diff --git a/core/graphql/inputs/project.py b/core/graphql/inputs/project.py new file mode 100644 index 0000000..80a597e --- /dev/null +++ b/core/graphql/inputs/project.py @@ -0,0 +1,45 @@ +import datetime +from typing import List, Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ProjectInput: + customer_id: GlobalID + account_address_id: Optional[GlobalID] = None + street_address: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + zip_code: Optional[str] = None + name: str + date: datetime.date + status: str + notes: Optional[str] = None + labor: float + amount: float + team_member_ids: Optional[List[GlobalID]] = None + scope_id: Optional[GlobalID] = None + calendar_event_id: Optional[str] = None + wave_service_id: Optional[str] = None + + +@strawberry.input +class ProjectUpdateInput: + id: GlobalID + customer_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + street_address: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + zip_code: Optional[str] = None + name: Optional[str] = None + date: Optional[datetime.date] = None + status: Optional[str] = None + notes: Optional[str] = None + labor: Optional[float] = None + amount: Optional[float] = None + team_member_ids: Optional[List[GlobalID]] = None + scope_id: Optional[GlobalID] = None + calendar_event_id: Optional[str] = None + wave_service_id: Optional[str] = None diff --git a/core/graphql/inputs/project_punchlist.py b/core/graphql/inputs/project_punchlist.py new file mode 100644 index 0000000..b651cd2 --- /dev/null +++ b/core/graphql/inputs/project_punchlist.py @@ -0,0 +1,17 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ProjectPunchlistInput: + project_id: GlobalID + date: datetime.date + + +@strawberry.input +class ProjectPunchlistUpdateInput: + id: GlobalID + project_id: Optional[GlobalID] = None + date: Optional[datetime.date] = None diff --git a/core/graphql/inputs/project_scope.py b/core/graphql/inputs/project_scope.py new file mode 100644 index 0000000..369c2c6 --- /dev/null +++ b/core/graphql/inputs/project_scope.py @@ -0,0 +1,66 @@ +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ProjectScopeInput: + name: str + project_id: GlobalID + account_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + description: Optional[str] = None + is_active: Optional[bool] = True + + +@strawberry.input +class ProjectScopeUpdateInput: + id: GlobalID + name: Optional[str] = None + account_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + description: Optional[str] = None + is_active: Optional[bool] = None + + +@strawberry.input +class ProjectScopeCategoryInput: + scope_id: GlobalID + name: str + order: int = 0 + + +@strawberry.input +class ProjectScopeCategoryUpdateInput: + id: GlobalID + name: Optional[str] = None + order: Optional[int] = None + + +@strawberry.input +class ProjectScopeTaskInput: + category_id: GlobalID + description: str + checklist_description: Optional[str] = "" + order: int = 0 + estimated_minutes: Optional[int] = None + + +@strawberry.input +class ProjectScopeTaskUpdateInput: + id: GlobalID + description: Optional[str] = None + checklist_description: Optional[str] = None + order: Optional[int] = None + estimated_minutes: Optional[int] = None + + +@strawberry.input +class CreateProjectScopeFromTemplateInput: + template_id: GlobalID + project_id: GlobalID + account_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + name: Optional[str] = None + description: Optional[str] = None + is_active: Optional[bool] = True diff --git a/core/graphql/inputs/project_scope_template.py b/core/graphql/inputs/project_scope_template.py new file mode 100644 index 0000000..ed33cd0 --- /dev/null +++ b/core/graphql/inputs/project_scope_template.py @@ -0,0 +1,50 @@ +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ProjectScopeTemplateInput: + name: str + description: Optional[str] = "" + is_active: Optional[bool] = True + + +@strawberry.input +class ProjectScopeTemplateUpdateInput: + id: GlobalID + name: Optional[str] = None + description: Optional[str] = None + is_active: Optional[bool] = None + + +@strawberry.input +class ProjectAreaTemplateInput: + scope_template_id: GlobalID + name: str + order: int = 0 + + +@strawberry.input +class ProjectAreaTemplateUpdateInput: + id: GlobalID + name: Optional[str] = None + order: Optional[int] = None + + +@strawberry.input +class ProjectTaskTemplateInput: + area_template_id: GlobalID + description: str + checklist_description: Optional[str] = "" + order: int = 0 + estimated_minutes: Optional[int] = None + + +@strawberry.input +class ProjectTaskTemplateUpdateInput: + id: GlobalID + description: Optional[str] = None + checklist_description: Optional[str] = None + order: Optional[int] = None + estimated_minutes: Optional[int] = None diff --git a/core/graphql/inputs/report.py b/core/graphql/inputs/report.py new file mode 100644 index 0000000..012024b --- /dev/null +++ b/core/graphql/inputs/report.py @@ -0,0 +1,21 @@ +import datetime +from typing import List, Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ReportInput: + date: datetime.date + team_member_id: GlobalID + service_ids: Optional[List[GlobalID]] = None + project_ids: Optional[List[GlobalID]] = None + + +@strawberry.input +class ReportUpdateInput: + id: GlobalID + date: Optional[datetime.date] = None + team_member_id: Optional[GlobalID] = None + service_ids: Optional[List[GlobalID]] = None + project_ids: Optional[List[GlobalID]] = None diff --git a/core/graphql/inputs/revenue.py b/core/graphql/inputs/revenue.py new file mode 100644 index 0000000..3a7b51d --- /dev/null +++ b/core/graphql/inputs/revenue.py @@ -0,0 +1,23 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class RevenueInput: + account_id: GlobalID + amount: float + start_date: datetime.date + end_date: Optional[datetime.date] = None + wave_service_id: Optional[str] = None + + +@strawberry.input +class RevenueUpdateInput: + id: GlobalID + account_id: Optional[GlobalID] = None + amount: Optional[float] = None + start_date: Optional[datetime.date] = None + end_date: Optional[datetime.date] = None + wave_service_id: Optional[str] = None diff --git a/core/graphql/inputs/schedule.py b/core/graphql/inputs/schedule.py new file mode 100644 index 0000000..fd03863 --- /dev/null +++ b/core/graphql/inputs/schedule.py @@ -0,0 +1,39 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ScheduleInput: + name: Optional[str] = None + account_address_id: GlobalID = None + monday_service: bool = False + tuesday_service: bool = False + wednesday_service: bool = False + thursday_service: bool = False + friday_service: bool = False + saturday_service: bool = False + sunday_service: bool = False + weekend_service: bool = False + schedule_exception: Optional[str] = None + start_date: datetime.date + end_date: Optional[datetime.date] = None + + +@strawberry.input +class ScheduleUpdateInput: + id: GlobalID + account_address_id: Optional[GlobalID] + name: Optional[str] = None + monday_service: Optional[bool] = None + tuesday_service: Optional[bool] = None + wednesday_service: Optional[bool] = None + thursday_service: Optional[bool] = None + friday_service: Optional[bool] = None + saturday_service: Optional[bool] = None + sunday_service: Optional[bool] = None + weekend_service: Optional[bool] = None + schedule_exception: Optional[str] = None + start_date: Optional[datetime.date] = None + end_date: Optional[datetime.date] = None diff --git a/core/graphql/inputs/scope.py b/core/graphql/inputs/scope.py new file mode 100644 index 0000000..008b91b --- /dev/null +++ b/core/graphql/inputs/scope.py @@ -0,0 +1,84 @@ +import datetime +from typing import Optional +import strawberry +from strawberry.relay import GlobalID + + +# Scope inputs +@strawberry.input +class ScopeInput: + name: str + account_id: GlobalID + account_address_id: Optional[GlobalID] = None + description: str = "" + is_active: bool = True + + +@strawberry.input +class ScopeUpdateInput: + id: GlobalID + name: Optional[str] = None + account_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + description: Optional[str] = None + is_active: Optional[bool] = None + + +# Area inputs +@strawberry.input +class AreaInput: + name: str + scope_id: GlobalID + order: int = 0 + + +@strawberry.input +class AreaUpdateInput: + id: GlobalID + name: Optional[str] = None + scope_id: Optional[GlobalID] = None + order: Optional[int] = None + + +# Task inputs +@strawberry.input +class TaskInput: + area_id: GlobalID + description: str + checklist_description: Optional[str] = None + frequency: str + order: int = 0 + is_conditional: bool = False + estimated_minutes: Optional[int] = None + + +@strawberry.input +class TaskUpdateInput: + id: GlobalID + area_id: Optional[GlobalID] = None + description: Optional[str] = None + checklist_description: Optional[str] = None + frequency: Optional[str] = None + order: Optional[int] = None + is_conditional: Optional[bool] = None + estimated_minutes: Optional[int] = None + + +# TaskCompletion inputs +@strawberry.input +class TaskCompletionInput: + service_id: GlobalID + task_id: GlobalID + completed_by_id: GlobalID + completed_at: datetime.datetime + notes: str = "" + + +@strawberry.input +class TaskCompletionUpdateInput: + id: GlobalID + service_id: Optional[GlobalID] = None + task_id: Optional[GlobalID] = None + completed_by_id: Optional[GlobalID] = None + completed_at: Optional[datetime.datetime] = None + notes: Optional[str] = None \ No newline at end of file diff --git a/core/graphql/inputs/scope_template.py b/core/graphql/inputs/scope_template.py new file mode 100644 index 0000000..f53759f --- /dev/null +++ b/core/graphql/inputs/scope_template.py @@ -0,0 +1,63 @@ +import strawberry +from typing import Optional + + +@strawberry.input +class ScopeTemplateInput: + name: str + description: Optional[str] = None + is_active: Optional[bool] = True + + +@strawberry.input +class ScopeTemplateUpdateInput: + id: strawberry.ID + name: Optional[str] = None + description: Optional[str] = None + is_active: Optional[bool] = None + + +@strawberry.input +class AreaTemplateInput: + scope_template_id: strawberry.ID + name: str + order: Optional[int] = 0 + + +@strawberry.input +class AreaTemplateUpdateInput: + id: strawberry.ID + name: Optional[str] = None + order: Optional[int] = None + + +@strawberry.input +class TaskTemplateInput: + area_template_id: strawberry.ID + description: str + checklist_description: Optional[str] = None + frequency: str # Must match TaskFrequencyChoices values + order: Optional[int] = 0 + is_conditional: Optional[bool] = False + estimated_minutes: Optional[int] = None + + +@strawberry.input +class TaskTemplateUpdateInput: + id: strawberry.ID + description: Optional[str] = None + checklist_description: Optional[str] = None + frequency: Optional[str] = None + order: Optional[int] = None + is_conditional: Optional[bool] = None + estimated_minutes: Optional[int] = None + + +@strawberry.input +class CreateScopeFromTemplateInput: + template_id: strawberry.ID + account_id: strawberry.ID + account_address_id: Optional[strawberry.ID] = None + name: Optional[str] = None + description: Optional[str] = None + is_active: Optional[bool] = True \ No newline at end of file diff --git a/core/graphql/inputs/service.py b/core/graphql/inputs/service.py new file mode 100644 index 0000000..8699d39 --- /dev/null +++ b/core/graphql/inputs/service.py @@ -0,0 +1,34 @@ +import datetime +from typing import List, Optional +import strawberry +from strawberry.relay import GlobalID + + +@strawberry.input +class ServiceInput: + account_id: Optional[GlobalID] = None + account_address_id: GlobalID + date: datetime.date + status: str + notes: Optional[str] = None + team_member_ids: Optional[List[GlobalID]] = None + calendar_event_id: Optional[str] = None + + +@strawberry.input +class ServiceUpdateInput: + id: GlobalID + account_id: Optional[GlobalID] = None + account_address_id: Optional[GlobalID] = None + date: Optional[datetime.date] = None + status: Optional[str] = None + notes: Optional[str] = None + team_member_ids: Optional[List[GlobalID]] = None + calendar_event_id: Optional[str] = None + +@strawberry.input +class ServiceGenerationInput: + account_address_id: GlobalID + schedule_id: GlobalID + month: int + year: int \ No newline at end of file diff --git a/core/graphql/inputs/session.py b/core/graphql/inputs/session.py new file mode 100644 index 0000000..e2bf38d --- /dev/null +++ b/core/graphql/inputs/session.py @@ -0,0 +1,36 @@ +from typing import List, Optional +import strawberry +from strawberry import ID +from strawberry.relay import GlobalID + + +@strawberry.input +class OpenServiceSessionInput: + service_id: ID + + +@strawberry.input +class CloseServiceSessionInput: + service_id: ID + task_ids: List[ID] + + +@strawberry.input +class RevertServiceSessionInput: + service_id: ID + + +@strawberry.input +class ProjectSessionStartInput: + project_id: ID + + +@strawberry.input +class ProjectSessionCloseInput: + project_id: ID + completed_task_ids: Optional[List[ID]] = None + + +@strawberry.input +class ProjectSessionRevertInput: + project_id: ID diff --git a/core/graphql/inputs/session_image.py b/core/graphql/inputs/session_image.py new file mode 100644 index 0000000..620100a --- /dev/null +++ b/core/graphql/inputs/session_image.py @@ -0,0 +1,17 @@ +import strawberry +from typing import Optional +from strawberry.relay import GlobalID + +@strawberry.input +class ServiceSessionImageUpdateInput: + id: GlobalID + title: Optional[str] = None + notes: Optional[str] = None + internal: Optional[bool] = None + +@strawberry.input +class ProjectSessionImageUpdateInput: + id: GlobalID + title: Optional[str] = None + notes: Optional[str] = None + internal: Optional[bool] = None \ No newline at end of file diff --git a/core/graphql/inputs/session_note.py b/core/graphql/inputs/session_note.py new file mode 100644 index 0000000..434e55a --- /dev/null +++ b/core/graphql/inputs/session_note.py @@ -0,0 +1,35 @@ +import strawberry +from typing import Optional +from strawberry.relay import GlobalID + + +@strawberry.input +class ServiceSessionNoteInput: + session_id: GlobalID + content: str + author_id: Optional[GlobalID] = None + internal: bool = True + + +@strawberry.input +class ServiceSessionNoteUpdateInput: + id: GlobalID + content: Optional[str] = None + author_id: Optional[GlobalID] = None + internal: Optional[bool] = None + + +@strawberry.input +class ProjectSessionNoteInput: + session_id: GlobalID + content: str + author_id: Optional[GlobalID] = None + internal: bool = True + + +@strawberry.input +class ProjectSessionNoteUpdateInput: + id: GlobalID + content: Optional[str] = None + author_id: Optional[GlobalID] = None + internal: Optional[bool] = None diff --git a/core/graphql/inputs/session_video.py b/core/graphql/inputs/session_video.py new file mode 100644 index 0000000..d08a040 --- /dev/null +++ b/core/graphql/inputs/session_video.py @@ -0,0 +1,17 @@ +import strawberry +from typing import Optional +from strawberry.relay import GlobalID + +@strawberry.input +class ServiceSessionVideoUpdateInput: + id: GlobalID + title: Optional[str] = None + notes: Optional[str] = None + internal: Optional[bool] = None + +@strawberry.input +class ProjectSessionVideoUpdateInput: + id: GlobalID + title: Optional[str] = None + notes: Optional[str] = None + internal: Optional[bool] = None diff --git a/core/graphql/mutations/__init__.py b/core/graphql/mutations/__init__.py new file mode 100644 index 0000000..0c15cd9 --- /dev/null +++ b/core/graphql/mutations/__init__.py @@ -0,0 +1,18 @@ +from core.graphql.mutations.customer import * +from core.graphql.mutations.account import * +from core.graphql.mutations.profile import * +from core.graphql.mutations.project import * +from core.graphql.mutations.service import * +from core.graphql.mutations.labor import * +from core.graphql.mutations.revenue import * +from core.graphql.mutations.schedule import * +from core.graphql.mutations.invoice import * +from core.graphql.mutations.report import * +from core.graphql.mutations.account_punchlist import * +from core.graphql.mutations.project_punchlist import * +from core.graphql.mutations.scope import * +from core.graphql.mutations.scope_template import * +from core.graphql.mutations.project_scope import * +from core.graphql.mutations.project_scope_template import * +from core.graphql.mutations.session import * +from core.graphql.mutations.session_image import * \ No newline at end of file diff --git a/core/graphql/mutations/account.py b/core/graphql/mutations/account.py new file mode 100644 index 0000000..7b3602a --- /dev/null +++ b/core/graphql/mutations/account.py @@ -0,0 +1,188 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.account import ( + AccountInput, AccountUpdateInput, + AccountAddressInput, AccountAddressUpdateInput, + AccountContactInput, AccountContactUpdateInput, +) +from core.graphql.types.account import ( + AccountType, + AccountAddressType, + AccountContactType, +) +from core.models.account import Account, AccountAddress, AccountContact +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_account_created, publish_account_updated, publish_account_deleted, + publish_account_status_changed, + publish_account_address_created, publish_account_address_updated, publish_account_address_deleted, + publish_account_contact_created, publish_account_contact_updated, publish_account_contact_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new account") + async def create_account(self, input: AccountInput, info: Info) -> AccountType: + instance = await create_object(input, Account) + await pubsub.publish("account_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_created( + account_id=str(instance.id), + triggered_by=profile, + metadata={'customer_id': str(instance.customer_id), 'status': instance.status, 'name': instance.name} + ) + + return cast(AccountType, instance) + + @strawberry.mutation(description="Update an existing account") + async def update_account(self, input: AccountUpdateInput, info: Info) -> AccountType: + # Get old status for comparison + old_account = await database_sync_to_async(Account.objects.get)(pk=input.id.node_id) + old_status = old_account.status + + instance = await update_object(input, Account) + await pubsub.publish("account_updated", instance.id) + + # Publish events for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_updated( + account_id=str(instance.id), + triggered_by=profile, + metadata={'name': instance.name} + ) + + # Check for status change + if hasattr(input, 'status') and input.status != old_status: + await publish_account_status_changed( + account_id=str(instance.id), + old_status=old_status, + new_status=instance.status, + triggered_by=profile + ) + + return cast(AccountType, instance) + + @strawberry.mutation(description="Delete an existing account") + async def delete_account(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Account) + if not instance: + raise ValueError(f"Account with ID {id} does not exist") + await pubsub.publish("account_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_deleted( + account_id=str(id), + triggered_by=profile, + metadata={'name': instance.name} + ) + + return id + + @strawberry.mutation(description="Create a new account address") + async def create_account_address( + self, input: AccountAddressInput, info: Info + ) -> AccountAddressType: + instance = await create_object(input, AccountAddress) + await pubsub.publish("account_address_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_address_created( + address_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(AccountAddressType, instance) + + @strawberry.mutation(description="Update an existing account address") + async def update_account_address( + self, input: AccountAddressUpdateInput, info: Info + ) -> AccountAddressType: + instance = await update_object(input, AccountAddress) + await pubsub.publish("account_address_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_address_updated( + address_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(AccountAddressType, instance) + + @strawberry.mutation(description="Delete an existing account address") + async def delete_account_address(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, AccountAddress) + if not instance: + raise ValueError(f"AccountAddress with ID {id} does not exist") + await pubsub.publish("account_address_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_address_deleted( + address_id=str(id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new account contact") + async def create_account_contact( + self, input: AccountContactInput, info: Info + ) -> AccountContactType: + instance = await create_object(input, AccountContact) + await pubsub.publish("account_contact_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_contact_created( + contact_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(AccountContactType, instance) + + @strawberry.mutation(description="Update an existing account contact") + async def update_account_contact( + self, input: AccountContactUpdateInput, info: Info + ) -> AccountContactType: + instance = await update_object(input, AccountContact) + await pubsub.publish("account_contact_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_contact_updated( + contact_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(AccountContactType, instance) + + @strawberry.mutation(description="Delete an existing account contact") + async def delete_account_contact(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, AccountContact) + if not instance: + raise ValueError(f"AccountContact with ID {id} does not exist") + await pubsub.publish("account_contact_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_account_contact_deleted( + contact_id=str(id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/account_punchlist.py b/core/graphql/mutations/account_punchlist.py new file mode 100644 index 0000000..04cf290 --- /dev/null +++ b/core/graphql/mutations/account_punchlist.py @@ -0,0 +1,114 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.account_punchlist import ( + AccountPunchlistInput, + AccountPunchlistUpdateInput, +) +from core.graphql.types.account_punchlist import AccountPunchlistType +from core.models.account_punchlist import AccountPunchlist +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_account_punchlist_created, + publish_account_punchlist_updated, + publish_account_punchlist_deleted, + publish_punchlist_status_changed, + publish_punchlist_priority_changed, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new account punchlist") + async def create_account_punchlist( + self, input: AccountPunchlistInput, info: Info + ) -> AccountPunchlistType: + instance = await create_object(input, AccountPunchlist) + await pubsub.publish(f"account_punchlist_created", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish event + await publish_account_punchlist_created( + punchlist_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(AccountPunchlistType, instance) + + @strawberry.mutation(description="Update an existing account punchlist") + async def update_account_punchlist( + self, input: AccountPunchlistUpdateInput, info: Info + ) -> AccountPunchlistType: + # Get old instance for comparison + old_instance = await database_sync_to_async( + AccountPunchlist.objects.get + )(id=input.id) + + # Update the instance + instance = await update_object(input, AccountPunchlist) + await pubsub.publish(f"account_punchlist_updated", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish update event + await publish_account_punchlist_updated( + punchlist_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + # Check for status changes (if the model has status field) + if hasattr(old_instance, 'status') and hasattr(instance, 'status'): + if old_instance.status != instance.status: + await publish_punchlist_status_changed( + punchlist_id=str(instance.id), + entity_type='AccountPunchlist', + old_status=old_instance.status, + new_status=instance.status, + triggered_by=profile + ) + + # Check for priority changes (if the model has priority field) + if hasattr(old_instance, 'priority') and hasattr(instance, 'priority'): + if old_instance.priority != instance.priority: + await publish_punchlist_priority_changed( + punchlist_id=str(instance.id), + entity_type='AccountPunchlist', + old_priority=old_instance.priority, + new_priority=instance.priority, + triggered_by=profile + ) + + return cast(AccountPunchlistType, instance) + + @strawberry.mutation(description="Delete an existing account punchlist") + async def delete_account_punchlist(self, id: strawberry.ID, info: Info) -> strawberry.ID: + # Get instance before deletion to access account_id + instance = await database_sync_to_async( + AccountPunchlist.objects.get + )(id=id) + + # Delete the instance + deleted_instance = await delete_object(id, AccountPunchlist) + if not deleted_instance: + raise ValueError(f"AccountPunchlist with ID {id} does not exist") + + await pubsub.publish(f"account_punchlist_deleted", id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish delete event + await publish_account_punchlist_deleted( + punchlist_id=str(id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/customer.py b/core/graphql/mutations/customer.py new file mode 100644 index 0000000..e0b16ca --- /dev/null +++ b/core/graphql/mutations/customer.py @@ -0,0 +1,188 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.customer import ( + CustomerInput, CustomerUpdateInput, + CustomerAddressInput, CustomerAddressUpdateInput, + CustomerContactInput, CustomerContactUpdateInput, +) +from core.graphql.types.customer import ( + CustomerType, + CustomerAddressType, + CustomerContactType, +) +from core.models.customer import Customer, CustomerAddress, CustomerContact +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_customer_created, publish_customer_updated, publish_customer_deleted, + publish_customer_status_changed, + publish_customer_address_created, publish_customer_address_updated, publish_customer_address_deleted, + publish_customer_contact_created, publish_customer_contact_updated, publish_customer_contact_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new customer") + async def create_customer(self, input: CustomerInput, info: Info) -> CustomerType: + instance = await create_object(input, Customer) + await pubsub.publish(f"customer_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_created( + customer_id=str(instance.id), + triggered_by=profile, + metadata={'status': instance.status, 'name': instance.name} + ) + + return cast(CustomerType, instance) + + @strawberry.mutation(description="Update an existing customer") + async def update_customer(self, input: CustomerUpdateInput, info: Info) -> CustomerType: + # Get old status for comparison + old_customer = await database_sync_to_async(Customer.objects.get)(pk=input.id.node_id) + old_status = old_customer.status + + instance = await update_object(input, Customer) + await pubsub.publish(f"customer_updated", instance.id) + + # Publish events for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_updated( + customer_id=str(instance.id), + triggered_by=profile, + metadata={'name': instance.name} + ) + + # Check for status change + if hasattr(input, 'status') and input.status != old_status: + await publish_customer_status_changed( + customer_id=str(instance.id), + old_status=old_status, + new_status=instance.status, + triggered_by=profile + ) + + return cast(CustomerType, instance) + + @strawberry.mutation(description="Delete an existing customer") + async def delete_customer(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Customer) + if not instance: + raise ValueError(f"Customer with ID {id} does not exist") + await pubsub.publish(f"customer_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_deleted( + customer_id=str(id), + triggered_by=profile, + metadata={'name': instance.name} + ) + + return id + + @strawberry.mutation(description="Create a new customer address") + async def create_customer_address( + self, input: CustomerAddressInput, info: Info + ) -> CustomerAddressType: + instance = await create_object(input, CustomerAddress) + await pubsub.publish(f"customer_address_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_address_created( + address_id=str(instance.id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return cast(CustomerAddressType, instance) + + @strawberry.mutation(description="Update an existing customer address") + async def update_customer_address( + self, input: CustomerAddressUpdateInput, info: Info + ) -> CustomerAddressType: + instance = await update_object(input, CustomerAddress) + await pubsub.publish(f"customer_address_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_address_updated( + address_id=str(instance.id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return cast(CustomerAddressType, instance) + + @strawberry.mutation(description="Delete an existing customer address") + async def delete_customer_address(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, CustomerAddress) + if not instance: + raise ValueError(f"CustomerAddress with ID {id} does not exist") + await pubsub.publish(f"customer_address_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_address_deleted( + address_id=str(id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new customer contact") + async def create_customer_contact( + self, input: CustomerContactInput, info: Info + ) -> CustomerContactType: + instance = await create_object(input, CustomerContact) + await pubsub.publish(f"customer_contact_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_contact_created( + contact_id=str(instance.id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return cast(CustomerContactType, instance) + + @strawberry.mutation(description="Update an existing customer contact") + async def update_customer_contact( + self, input: CustomerContactUpdateInput, info: Info + ) -> CustomerContactType: + instance = await update_object(input, CustomerContact) + await pubsub.publish(f"customer_contact_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_contact_updated( + contact_id=str(instance.id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return cast(CustomerContactType, instance) + + @strawberry.mutation(description="Delete an existing customer contact") + async def delete_customer_contact(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, CustomerContact) + if not instance: + raise ValueError(f"CustomerContact with ID {id} does not exist") + await pubsub.publish(f"customer_contact_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_customer_contact_deleted( + contact_id=str(id), + customer_id=str(instance.customer_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/event.py b/core/graphql/mutations/event.py new file mode 100644 index 0000000..dd77b27 --- /dev/null +++ b/core/graphql/mutations/event.py @@ -0,0 +1,265 @@ +import strawberry +from typing import List, Optional +from strawberry.types import Info +from strawberry.relay import GlobalID +from channels.db import database_sync_to_async +from django.contrib.contenttypes.models import ContentType +from django.utils import timezone + +from core.graphql.types.event import NotificationRuleType, NotificationType +from core.models.events import NotificationRule, Notification +from core.models.enums import ( + EventTypeChoices, + NotificationChannelChoices, + RoleChoices +) + + +@strawberry.input +class NotificationRuleInput: + """Input for creating a notification rule""" + name: str + description: Optional[str] = "" + event_types: List[EventTypeChoices] + channels: List[NotificationChannelChoices] + target_roles: Optional[List[RoleChoices]] = None + target_team_profile_ids: Optional[List[strawberry.ID]] = None + target_customer_profile_ids: Optional[List[strawberry.ID]] = None + is_active: Optional[bool] = True + template_subject: Optional[str] = "" + template_body: Optional[str] = "" + conditions: Optional[strawberry.scalars.JSON] = None + + +@strawberry.input +class NotificationRuleUpdateInput: + """Input for updating a notification rule""" + id: GlobalID + name: Optional[str] = None + description: Optional[str] = None + event_types: Optional[List[EventTypeChoices]] = None + channels: Optional[List[NotificationChannelChoices]] = None + target_roles: Optional[List[RoleChoices]] = None + target_team_profile_ids: Optional[List[strawberry.ID]] = None + target_customer_profile_ids: Optional[List[strawberry.ID]] = None + is_active: Optional[bool] = None + template_subject: Optional[str] = None + template_body: Optional[str] = None + conditions: Optional[strawberry.scalars.JSON] = None + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a notification rule (Admin only)") + async def create_notification_rule( + self, + info: Info, + input: NotificationRuleInput + ) -> NotificationRuleType: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can create notification rules + from core.models.profile import TeamProfile + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + # Prepare data + data = { + 'name': input.name, + 'description': input.description or '', + 'event_types': input.event_types, + 'channels': input.channels, + 'target_roles': input.target_roles or [], + 'is_active': input.is_active if input.is_active is not None else True, + 'template_subject': input.template_subject or '', + 'template_body': input.template_body or '', + 'conditions': input.conditions or {}, + } + + # Create rule + rule = await database_sync_to_async(NotificationRule.objects.create)(**data) + + # Set M2M relationships + if input.target_team_profile_ids: + await database_sync_to_async( + lambda: rule.target_team_profiles.set(input.target_team_profile_ids) + )() + + if input.target_customer_profile_ids: + await database_sync_to_async( + lambda: rule.target_customer_profiles.set(input.target_customer_profile_ids) + )() + + return rule + + @strawberry.mutation(description="Update a notification rule (Admin only)") + async def update_notification_rule( + self, + info: Info, + input: NotificationRuleUpdateInput + ) -> NotificationRuleType: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can update notification rules + from core.models.profile import TeamProfile + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + # Get rule + rule = await database_sync_to_async(NotificationRule.objects.get)(pk=input.id.node_id) + + # Update fields + update_fields = [] + if input.name is not None: + rule.name = input.name + update_fields.append('name') + + if input.description is not None: + rule.description = input.description + update_fields.append('description') + + if input.event_types is not None: + rule.event_types = input.event_types + update_fields.append('event_types') + + if input.channels is not None: + rule.channels = input.channels + update_fields.append('channels') + + if input.target_roles is not None: + rule.target_roles = input.target_roles + update_fields.append('target_roles') + + if input.is_active is not None: + rule.is_active = input.is_active + update_fields.append('is_active') + + if input.template_subject is not None: + rule.template_subject = input.template_subject + update_fields.append('template_subject') + + if input.template_body is not None: + rule.template_body = input.template_body + update_fields.append('template_body') + + if input.conditions is not None: + rule.conditions = input.conditions + update_fields.append('conditions') + + if update_fields: + update_fields.append('updated_at') + await database_sync_to_async(rule.save)(update_fields=update_fields) + + # Update M2M relationships + if input.target_team_profile_ids is not None: + await database_sync_to_async( + lambda: rule.target_team_profiles.set(input.target_team_profile_ids) + )() + + if input.target_customer_profile_ids is not None: + await database_sync_to_async( + lambda: rule.target_customer_profiles.set(input.target_customer_profile_ids) + )() + + return rule + + @strawberry.mutation(description="Delete a notification rule (Admin only)") + async def delete_notification_rule( + self, + info: Info, + id: strawberry.ID + ) -> strawberry.ID: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can delete notification rules + from core.models.profile import TeamProfile + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + rule = await database_sync_to_async(NotificationRule.objects.get)(pk=id) + await database_sync_to_async(rule.delete)() + + return id + + @strawberry.mutation(description="Mark notification as read") + async def mark_notification_as_read( + self, + info: Info, + id: strawberry.ID + ) -> NotificationType: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Get notification + notification = await database_sync_to_async( + lambda: Notification.objects.select_related('event', 'rule', 'recipient_content_type').get(pk=id) + )() + + # Verify user has access to this notification + content_type = await database_sync_to_async(ContentType.objects.get_for_model)(profile) + + if (notification.recipient_content_type != content_type or + str(notification.recipient_object_id) != str(profile.id)): + raise PermissionError("Not authorized to mark this notification as read") + + # Mark as read + await database_sync_to_async(lambda: notification.mark_as_read())() + + return notification + + @strawberry.mutation(description="Mark all notifications as read for current user") + async def mark_all_notifications_as_read(self, info: Info) -> int: + profile = getattr(info.context.request, 'profile', None) + if not profile: + return 0 + + # Get content type for the profile + content_type = await database_sync_to_async(ContentType.objects.get_for_model)(profile) + + # Update all unread notifications + from core.models.enums import NotificationStatusChoices + + count = await database_sync_to_async( + lambda: Notification.objects.filter( + recipient_content_type=content_type, + recipient_object_id=profile.id, + read_at__isnull=True + ).update( + read_at=timezone.now(), + status=NotificationStatusChoices.READ + ) + )() + + return count + + @strawberry.mutation(description="Delete a notification") + async def delete_notification( + self, + info: Info, + id: strawberry.ID + ) -> strawberry.ID: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Get notification and verify access + @database_sync_to_async + def get_and_verify(): + notification = Notification.objects.get(pk=id) + content_type = ContentType.objects.get_for_model(type(profile)) + + if (notification.recipient_content_type != content_type or + str(notification.recipient_object_id) != str(profile.id)): + raise PermissionError("Not authorized to delete this notification") + + notification.delete() + return id + + return await get_and_verify() diff --git a/core/graphql/mutations/invoice.py b/core/graphql/mutations/invoice.py new file mode 100644 index 0000000..e50bcfc --- /dev/null +++ b/core/graphql/mutations/invoice.py @@ -0,0 +1,105 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.inputs.invoice import InvoiceInput, InvoiceUpdateInput +from core.graphql.types.invoice import InvoiceType +from core.models.invoice import Invoice +from core.models.enums import InvoiceChoices +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import publish_invoice_generated, publish_invoice_paid +from core.services.events import EventPublisher +from core.models.enums import EventTypeChoices + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new invoice") + async def create_invoice(self, input: InvoiceInput, info: Info) -> InvoiceType: + # Exclude m2m id fields from model constructor + payload = {k: v for k, v in input.__dict__.items() if k not in {"project_ids", "revenue_ids"}} + m2m_data = { + "projects": input.project_ids, + "revenues": input.revenue_ids, + } + instance = await create_object(payload, Invoice, m2m_data) + await pubsub.publish("invoice_created", instance.id) + + # Publish event for notifications (invoice creation = invoice generated) + profile = getattr(info.context.request, 'profile', None) + await publish_invoice_generated( + invoice_id=str(instance.id), + triggered_by=profile, + metadata={'customer_id': str(instance.customer_id), 'status': instance.status} + ) + + return cast(InvoiceType, instance) + + @strawberry.mutation(description="Update an existing invoice") + async def update_invoice(self, input: InvoiceUpdateInput, info: Info) -> InvoiceType: + # Get old invoice to check for status changes + from channels.db import database_sync_to_async + old_invoice = await database_sync_to_async(Invoice.objects.get)(pk=input.id.node_id) + old_status = old_invoice.status + + # Keep id and non-m2m fields; drop m2m *_ids from the update payload + payload = {k: v for k, v in input.__dict__.items() if k not in {"project_ids", "revenue_ids"}} + m2m_data = { + "projects": getattr(input, "project_ids", None), + "revenues": getattr(input, "revenue_ids", None), + } + instance = await update_object(payload, Invoice, m2m_data) + await pubsub.publish("invoice_updated", instance.id) + + # Publish events for notifications + profile = getattr(info.context.request, 'profile', None) + + # Check if status changed + if hasattr(input, 'status') and input.status and input.status != old_status: + # Publish status change event + await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_SENT if input.status == InvoiceChoices.SENT else + EventTypeChoices.INVOICE_PAID if input.status == InvoiceChoices.PAID else + EventTypeChoices.INVOICE_OVERDUE if input.status == InvoiceChoices.OVERDUE else + EventTypeChoices.INVOICE_CANCELLED if input.status == InvoiceChoices.CANCELLED else None, + entity_type='Invoice', + entity_id=str(instance.id), + triggered_by=profile, + metadata={'old_status': old_status, 'new_status': instance.status, 'customer_id': str(instance.customer_id)} + ) + + # Special handling for paid invoices + if instance.status == InvoiceChoices.PAID: + await publish_invoice_paid( + invoice_id=str(instance.id), + triggered_by=profile, + metadata={'customer_id': str(instance.customer_id), 'amount': str(instance.amount)} + ) + + return cast(InvoiceType, instance) + + @strawberry.mutation(description="Delete an existing invoice") + async def delete_invoice(self, id: strawberry.ID, info: Info) -> strawberry.ID: + # Get invoice before deletion to access customer_id for event + from channels.db import database_sync_to_async + from core.graphql.utils import _decode_global_id + pk = _decode_global_id(id) + invoice = await database_sync_to_async(Invoice.objects.get)(pk=pk) + customer_id = str(invoice.customer_id) + + instance = await delete_object(id, Invoice) + if not instance: + raise ValueError(f"Invoice with ID {id} does not exist") + await pubsub.publish("invoice_deleted", id) + + # Publish event for notifications (deletion treated as cancellation) + profile = getattr(info.context.request, 'profile', None) + await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_CANCELLED, + entity_type='Invoice', + entity_id=str(id), + triggered_by=profile, + metadata={'customer_id': customer_id, 'action': 'deleted'} + ) + + return id diff --git a/core/graphql/mutations/labor.py b/core/graphql/mutations/labor.py new file mode 100644 index 0000000..000728d --- /dev/null +++ b/core/graphql/mutations/labor.py @@ -0,0 +1,58 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.inputs.labor import LaborInput, LaborUpdateInput +from core.graphql.types.labor import LaborType +from core.models.labor import Labor +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_labor_rate_created, publish_labor_rate_updated, publish_labor_rate_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new labor rate") + async def create_labor(self, input: LaborInput, info: Info) -> LaborType: + instance = await create_object(input, Labor) + await pubsub.publish("labor_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_labor_rate_created( + rate_id=str(instance.id), + triggered_by=profile + ) + + return cast(LaborType, instance) + + @strawberry.mutation(description="Update an existing labor rate") + async def update_labor(self, input: LaborUpdateInput, info: Info) -> LaborType: + instance = await update_object(input, Labor) + await pubsub.publish("labor_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_labor_rate_updated( + rate_id=str(instance.id), + triggered_by=profile + ) + + return cast(LaborType, instance) + + @strawberry.mutation(description="Delete an existing labor rate") + async def delete_labor(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Labor) + if not instance: + raise ValueError(f"Labor with ID {id} does not exist") + await pubsub.publish("labor_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_labor_rate_deleted( + rate_id=str(id), + triggered_by=profile + ) + + return id \ No newline at end of file diff --git a/core/graphql/mutations/messaging.py b/core/graphql/mutations/messaging.py new file mode 100644 index 0000000..d46e161 --- /dev/null +++ b/core/graphql/mutations/messaging.py @@ -0,0 +1,513 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from strawberry.relay import GlobalID +from channels.db import database_sync_to_async +from django.utils import timezone +from django.contrib.contenttypes.models import ContentType +import json + +from core.graphql.pubsub import pubsub +from core.graphql.inputs.messaging import ( + ConversationInput, + ConversationUpdateInput, + MessageInput, + MessageUpdateInput, + AddParticipantInput, + RemoveParticipantInput, + MarkAsReadInput, + ArchiveConversationInput, + MuteConversationInput, +) +from core.graphql.types.messaging import ConversationType, MessageType, ConversationParticipantType +from core.models.messaging import Conversation, Message, ConversationParticipant, MessageReadReceipt +from core.models.profile import TeamProfile, CustomerProfile +from core.models.enums import EventTypeChoices +from core.services.events import EventPublisher + + +def is_admin_profile(profile) -> bool: + """Check if the profile is the admin profile""" + from django.conf import settings + return str(profile.id) == settings.DISPATCH_TEAM_PROFILE_ID + + +@database_sync_to_async +def get_profile_from_id(participant_id: str): + """Helper to get TeamProfile or CustomerProfile from GlobalID""" + # Try TeamProfile first + try: + return TeamProfile.objects.get(pk=participant_id) + except TeamProfile.DoesNotExist: + pass + + # Try CustomerProfile + try: + return CustomerProfile.objects.get(pk=participant_id) + except CustomerProfile.DoesNotExist: + raise ValueError(f"Profile with ID {participant_id} not found") + + +@database_sync_to_async +def get_entity_from_type_and_id(entity_type: str, entity_id: str): + """Helper to get entity (Project, Service, etc.) from type and ID""" + from django.apps import apps + + try: + model = apps.get_model('core', entity_type) + return model.objects.get(pk=entity_id) + except Exception as e: + raise ValueError(f"Entity {entity_type} with ID {entity_id} not found: {e}") + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new conversation") + async def create_conversation(self, input: ConversationInput, info: Info) -> ConversationType: + """ + Create a new conversation with participants and optional entity link. + """ + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + # Parse metadata if provided + metadata = json.loads(input.metadata) if input.metadata else {} + + # Create conversation + @database_sync_to_async + def create(): + # Get creator content type inside sync context + creator_content_type = ContentType.objects.get_for_model(type(profile)) + + conversation = Conversation.objects.create( + subject=input.subject, + conversation_type=input.conversation_type, + created_by_content_type=creator_content_type, + created_by_object_id=profile.id, + metadata=metadata + ) + + # Link to entity if provided + if input.entity_type and input.entity_id: + from django.apps import apps + try: + model = apps.get_model('core', input.entity_type) + content_type = ContentType.objects.get_for_model(model) + conversation.entity_content_type = content_type + # Extract UUID from GlobalID + conversation.entity_object_id = input.entity_id.node_id + conversation.save() + except Exception: + pass + + # Add creator as a participant first + ConversationParticipant.objects.create( + conversation=conversation, + participant_content_type=creator_content_type, + participant_object_id=profile.id + ) + + # Add other participants + for participant_id in input.participant_ids: + # Extract UUID from GlobalID + uuid = participant_id.node_id + try: + participant = TeamProfile.objects.get(pk=uuid) + content_type = ContentType.objects.get_for_model(TeamProfile) + except TeamProfile.DoesNotExist: + try: + participant = CustomerProfile.objects.get(pk=uuid) + content_type = ContentType.objects.get_for_model(CustomerProfile) + except CustomerProfile.DoesNotExist: + continue + + # Skip if this participant is the creator (already added) + if content_type == creator_content_type and participant.id == profile.id: + continue + + ConversationParticipant.objects.create( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=participant.id + ) + + return conversation + + instance = await create() + await pubsub.publish("conversation_created", instance.id) + + # Publish event + await EventPublisher.publish( + event_type=EventTypeChoices.CONVERSATION_CREATED, + entity_type='Conversation', + entity_id=str(instance.id), + triggered_by=profile, + metadata={'subject': instance.subject, 'type': instance.conversation_type} + ) + + return cast(ConversationType, instance) + + @strawberry.mutation(description="Update a conversation") + async def update_conversation(self, input: ConversationUpdateInput, info: Info) -> ConversationType: + """Update conversation details""" + profile = getattr(info.context.request, 'profile', None) + + @database_sync_to_async + def update(): + conversation = Conversation.objects.get(pk=input.id.node_id) + + if input.subject is not None: + conversation.subject = input.subject + if input.is_archived is not None: + conversation.is_archived = input.is_archived + if input.metadata is not None: + conversation.metadata = json.loads(input.metadata) + + conversation.save() + return conversation + + instance = await update() + await pubsub.publish("conversation_updated", instance.id) + + if input.is_archived: + await EventPublisher.publish( + event_type=EventTypeChoices.CONVERSATION_ARCHIVED, + entity_type='Conversation', + entity_id=str(instance.id), + triggered_by=profile + ) + + return cast(ConversationType, instance) + + @strawberry.mutation(description="Send a message in a conversation") + async def send_message(self, input: MessageInput, info: Info) -> MessageType: + """ + Send a new message in a conversation. + Updates unread counts for other participants. + """ + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + # Parse attachments and metadata + attachments = json.loads(input.attachments) if input.attachments else [] + metadata = json.loads(input.metadata) if input.metadata else {} + + @database_sync_to_async + def create(): + # Get sender content type inside sync context + sender_content_type = ContentType.objects.get_for_model(type(profile)) + # Extract UUIDs from GlobalIDs + conversation_uuid = input.conversation_id.node_id + reply_to_uuid = input.reply_to_id.node_id if input.reply_to_id else None + + # Create message + message = Message.objects.create( + conversation_id=conversation_uuid, + sender_content_type=sender_content_type, + sender_object_id=profile.id, + body=input.body, + reply_to_id=reply_to_uuid, + attachments=attachments, + metadata=metadata + ) + + # Update conversation last_message_at + conversation = message.conversation + conversation.last_message_at = message.created_at + conversation.save(update_fields=['last_message_at', 'updated_at']) + + # Increment unread count for all participants except sender + participants = ConversationParticipant.objects.filter( + conversation=conversation + ).exclude( + participant_content_type=sender_content_type, + participant_object_id=profile.id + ) + + for participant in participants: + participant.unread_count += 1 + participant.save(update_fields=['unread_count', 'updated_at']) + + return message + + instance = await create() + await pubsub.publish("message_sent", { + "message_id": instance.id, + "conversation_id": str(input.conversation_id) + }) + + # Publish event + await EventPublisher.publish( + event_type=EventTypeChoices.MESSAGE_SENT, + entity_type='Message', + entity_id=str(instance.id), + triggered_by=profile, + metadata={ + 'conversation_id': str(input.conversation_id), + 'body_preview': instance.body[:100] + } + ) + + return cast(MessageType, instance) + + @strawberry.mutation(description="Mark conversation as read") + async def mark_conversation_as_read(self, input: MarkAsReadInput, info: Info) -> ConversationType: + """ + Mark all messages in a conversation as read for the current user. + Resets unread count to 0. + """ + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + content_type = ContentType.objects.get_for_model(type(profile)) + + @database_sync_to_async + def mark_read(): + conversation = Conversation.objects.get(pk=input.conversation_id.node_id) + + # Update participant record + participant = ConversationParticipant.objects.get( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=profile.id + ) + participant.last_read_at = timezone.now() + participant.unread_count = 0 + participant.save(update_fields=['last_read_at', 'unread_count', 'updated_at']) + + # Create read receipts for unread messages + messages = Message.objects.filter( + conversation=conversation, + created_at__gt=participant.last_read_at or timezone.now() + ).exclude( + sender_content_type=content_type, + sender_object_id=profile.id + ) + + for message in messages: + MessageReadReceipt.objects.get_or_create( + message=message, + reader_content_type=content_type, + reader_object_id=profile.id + ) + + return conversation + + instance = await mark_read() + await pubsub.publish("conversation_read", { + "conversation_id": instance.id, + "participant_id": str(profile.id) + }) + + return cast(ConversationType, instance) + + @strawberry.mutation(description="Archive or unarchive a conversation") + async def archive_conversation(self, input: ArchiveConversationInput, info: Info) -> ConversationType: + """Archive or unarchive a conversation for the current user""" + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + content_type = ContentType.objects.get_for_model(type(profile)) + + @database_sync_to_async + def archive(): + conversation = Conversation.objects.get(pk=input.conversation_id.node_id) + + participant = ConversationParticipant.objects.get( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=profile.id + ) + participant.is_archived = input.is_archived + participant.save(update_fields=['is_archived', 'updated_at']) + + return conversation + + instance = await archive() + return cast(ConversationType, instance) + + @strawberry.mutation(description="Mute or unmute a conversation") + async def mute_conversation(self, input: MuteConversationInput, info: Info) -> ConversationType: + """Mute or unmute notifications for a conversation""" + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + content_type = ContentType.objects.get_for_model(type(profile)) + + @database_sync_to_async + def mute(): + conversation = Conversation.objects.get(pk=input.conversation_id.node_id) + + participant = ConversationParticipant.objects.get( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=profile.id + ) + participant.is_muted = input.is_muted + participant.save(update_fields=['is_muted', 'updated_at']) + + return conversation + + instance = await mute() + return cast(ConversationType, instance) + + @strawberry.mutation(description="Add a participant to a conversation") + async def add_participant(self, input: AddParticipantInput, info: Info) -> ConversationParticipantType: + """Add a new participant to an existing conversation""" + profile = getattr(info.context.request, 'profile', None) + + @database_sync_to_async + def add(): + conversation = Conversation.objects.get(pk=input.conversation_id.node_id) + + # Get participant profile + participant_uuid = input.participant_id.node_id + try: + participant = TeamProfile.objects.get(pk=participant_uuid) + content_type = ContentType.objects.get_for_model(TeamProfile) + except TeamProfile.DoesNotExist: + participant = CustomerProfile.objects.get(pk=participant_uuid) + content_type = ContentType.objects.get_for_model(CustomerProfile) + + # Create participant record + conv_participant, created = ConversationParticipant.objects.get_or_create( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=participant.id + ) + + return conv_participant + + instance = await add() + await pubsub.publish("participant_added", { + "conversation_id": str(input.conversation_id), + "participant_id": str(input.participant_id) + }) + + # Publish event + await EventPublisher.publish( + event_type=EventTypeChoices.CONVERSATION_PARTICIPANT_ADDED, + entity_type='Conversation', + entity_id=str(input.conversation_id), + triggered_by=profile, + metadata={'participant_id': str(input.participant_id)} + ) + + return cast(ConversationParticipantType, instance) + + @strawberry.mutation(description="Remove a participant from a conversation") + async def remove_participant(self, input: RemoveParticipantInput, info: Info) -> strawberry.ID: + """Remove a participant from a conversation""" + profile = getattr(info.context.request, 'profile', None) + + @database_sync_to_async + def remove(): + conversation = Conversation.objects.get(pk=input.conversation_id.node_id) + + # Get participant profile + participant_uuid = input.participant_id.node_id + try: + participant = TeamProfile.objects.get(pk=participant_uuid) + content_type = ContentType.objects.get_for_model(TeamProfile) + except TeamProfile.DoesNotExist: + participant = CustomerProfile.objects.get(pk=participant_uuid) + content_type = ContentType.objects.get_for_model(CustomerProfile) + + # Delete participant record + ConversationParticipant.objects.filter( + conversation=conversation, + participant_content_type=content_type, + participant_object_id=participant.id + ).delete() + + return conversation.id + + conversation_id = await remove() + await pubsub.publish("participant_removed", { + "conversation_id": str(input.conversation_id), + "participant_id": str(input.participant_id) + }) + + # Publish event + await EventPublisher.publish( + event_type=EventTypeChoices.CONVERSATION_PARTICIPANT_REMOVED, + entity_type='Conversation', + entity_id=str(input.conversation_id), + triggered_by=profile, + metadata={'participant_id': str(input.participant_id)} + ) + + return input.conversation_id + + @strawberry.mutation(description="Delete a conversation") + async def delete_conversation(self, id: GlobalID, info: Info) -> strawberry.ID: + """Delete a conversation (only by creator or admin)""" + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + content_type = ContentType.objects.get_for_model(type(profile)) + + @database_sync_to_async + def delete(): + conversation = Conversation.objects.get(pk=id.node_id) + + # Check if user is the creator OR the admin profile + is_creator = (conversation.created_by_content_type == content_type and + conversation.created_by_object_id == profile.id) + + if not (is_creator or is_admin_profile(profile)): + raise PermissionError("Only the conversation creator or admin can delete it") + + conversation.delete() + return id + + conversation_id = await delete() + await pubsub.publish("conversation_deleted", str(conversation_id)) + + return conversation_id + + @strawberry.mutation(description="Delete a message") + async def delete_message(self, id: GlobalID, info: Info) -> strawberry.ID: + """Delete a message (only by sender or admin)""" + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise ValueError("User must be authenticated") + + content_type = ContentType.objects.get_for_model(type(profile)) + + @database_sync_to_async + def delete(): + message = Message.objects.get(pk=id.node_id) + + # Check if user is the sender OR the admin profile + is_sender = (message.sender_object_id == profile.id and + message.sender_content_type == content_type) + + if not (is_sender or is_admin_profile(profile)): + raise PermissionError("You can only delete your own messages or be an admin") + + conversation_id = message.conversation_id + message.delete() + return conversation_id + + conversation_id = await delete() + await pubsub.publish("message_deleted", { + "message_id": str(id), + "conversation_id": str(conversation_id) + }) + + # Publish event + await EventPublisher.publish( + event_type=EventTypeChoices.MESSAGE_DELETED, + entity_type='Message', + entity_id=str(id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/profile.py b/core/graphql/mutations/profile.py new file mode 100644 index 0000000..de5ed56 --- /dev/null +++ b/core/graphql/mutations/profile.py @@ -0,0 +1,131 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.profile import ( + CustomerProfileInput, + CustomerProfileUpdateInput, + TeamProfileInput, + TeamProfileUpdateInput, +) +from core.graphql.types.profile import CustomerProfileType, TeamProfileType +from core.models.profile import CustomerProfile, TeamProfile +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_team_profile_created, publish_team_profile_updated, publish_team_profile_deleted, + publish_team_profile_role_changed, + publish_customer_profile_created, publish_customer_profile_updated, publish_customer_profile_deleted, + publish_customer_profile_access_granted, publish_customer_profile_access_revoked, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new customer profile") + async def create_customer_profile( + self, input: CustomerProfileInput, info: Info + ) -> CustomerProfileType: + m2m_data = {"customers": input.customer_ids} if input.customer_ids else None + instance = await create_object(input, CustomerProfile, m2m_data) + await pubsub.publish(f"customer_profile_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_customer_profile_created(str(instance.id), triggered_by=profile) + + return cast(CustomerProfileType, instance) + + @strawberry.mutation(description="Update an existing customer profile") + async def update_customer_profile( + self, input: CustomerProfileUpdateInput, info: Info + ) -> CustomerProfileType: + # Get old profile to detect customer access changes + old_profile = await database_sync_to_async(CustomerProfile.objects.get)(pk=input.id.node_id) + old_customer_ids = set(str(cid) for cid in await database_sync_to_async(list)( + old_profile.customers.values_list('id', flat=True) + )) + + m2m_data = {"customers": input.customer_ids} if input.customer_ids else None + instance = await update_object(input, CustomerProfile, m2m_data) + await pubsub.publish(f"customer_profile_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_customer_profile_updated(str(instance.id), triggered_by=profile) + + # Detect customer access changes + if input.customer_ids is not None: + new_customer_ids = set(str(cid) for cid in input.customer_ids) + + # Newly granted access + for customer_id in new_customer_ids - old_customer_ids: + await publish_customer_profile_access_granted( + str(instance.id), customer_id, triggered_by=profile + ) + + # Revoked access + for customer_id in old_customer_ids - new_customer_ids: + await publish_customer_profile_access_revoked( + str(instance.id), customer_id, triggered_by=profile + ) + + return cast(CustomerProfileType, instance) + + @strawberry.mutation(description="Delete an existing customer profile") + async def delete_customer_profile(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, CustomerProfile) + if not instance: + raise ValueError(f"CustomerProfile with ID {id} does not exist") + await pubsub.publish(f"customer_profile_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_customer_profile_deleted(str(id), triggered_by=profile) + + return id + + @strawberry.mutation(description="Create a new team profile") + async def create_team_profile(self, input: TeamProfileInput, info: Info) -> TeamProfileType: + instance = await create_object(input, TeamProfile) + await pubsub.publish(f"team_profile_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_team_profile_created(str(instance.id), triggered_by=profile) + + return cast(TeamProfileType, instance) + + @strawberry.mutation(description="Update an existing team profile") + async def update_team_profile(self, input: TeamProfileUpdateInput, info: Info) -> TeamProfileType: + # Get old profile to detect role changes + old_profile = await database_sync_to_async(TeamProfile.objects.get)(pk=input.id.node_id) + old_role = old_profile.role + + instance = await update_object(input, TeamProfile) + await pubsub.publish(f"team_profile_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_team_profile_updated(str(instance.id), triggered_by=profile) + + # Check for role change + if input.role is not None and input.role != old_role: + await publish_team_profile_role_changed( + str(instance.id), old_role, input.role, triggered_by=profile + ) + + return cast(TeamProfileType, instance) + + @strawberry.mutation(description="Delete an existing team profile") + async def delete_team_profile(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, TeamProfile) + if not instance: + raise ValueError(f"TeamProfile with ID {id} does not exist") + await pubsub.publish(f"team_profile_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_team_profile_deleted(str(id), triggered_by=profile) + + return id diff --git a/core/graphql/mutations/project.py b/core/graphql/mutations/project.py new file mode 100644 index 0000000..ff5cfac --- /dev/null +++ b/core/graphql/mutations/project.py @@ -0,0 +1,188 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from asgiref.sync import sync_to_async +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.project import ProjectInput, ProjectUpdateInput +from core.graphql.types.project import ProjectType +from core.models.account import AccountAddress +from core.models.profile import TeamProfile +from core.models.project import Project +from core.models.enums import ServiceChoices +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_project_created, publish_project_status_changed, + publish_project_completed, publish_project_dispatched, + publish_project_deleted, +) + + +# Helper to get admin profile +async def _get_admin_profile(): + return await sync_to_async( + lambda: TeamProfile.objects.filter(role='ADMIN').first() + )() + + +# Helper to check if admin is in team member IDs (handles GlobalID objects) +def _admin_in_team_members(admin_id, team_member_ids): + if not team_member_ids or not admin_id: + return False + # team_member_ids may be GlobalID objects with .node_id attribute + member_uuids = [] + for mid in team_member_ids: + if hasattr(mid, 'node_id'): + member_uuids.append(str(mid.node_id)) + else: + member_uuids.append(str(mid)) + return str(admin_id) in member_uuids + + +# Helper to get old team member IDs from instance +async def _get_old_team_member_ids(instance): + return await sync_to_async( + lambda: set(str(m.id) for m in instance.team_members.all()) + )() + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new project") + async def create_project(self, input: ProjectInput, info: Info) -> ProjectType: + # Exclude m2m id fields from model constructor + payload = {k: v for k, v in input.__dict__.items() if k not in {"team_member_ids"}} + m2m_data = {"team_members": input.team_member_ids} + instance = await create_object(payload, Project, m2m_data) + await pubsub.publish("project_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_project_created( + project_id=str(instance.id), + triggered_by=profile, + metadata={ + 'status': instance.status, + 'customer_id': str(instance.customer_id), + 'name': instance.name, + 'date': str(instance.date) + } + ) + + # Check if project was dispatched (admin in team members) + admin = await _get_admin_profile() + if admin and _admin_in_team_members(admin.id, input.team_member_ids): + # Build metadata + account_address_id = None + account_name = None + if instance.account_address_id: + account_address_id = str(instance.account_address_id) + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_name = account_address.account.name if account_address.account else None + + await publish_project_dispatched( + project_id=str(instance.id), + triggered_by=profile, + metadata={ + 'project_id': str(instance.id), + 'project_name': instance.name, + 'customer_id': str(instance.customer_id), + 'account_address_id': account_address_id, + 'account_name': account_name, + 'date': str(instance.date), + 'status': instance.status + } + ) + + return cast(ProjectType, instance) + + @strawberry.mutation(description="Update an existing project") + async def update_project(self, input: ProjectUpdateInput, info: Info) -> ProjectType: + # Get old project to check for status changes + old_project = await database_sync_to_async(Project.objects.get)(pk=input.id.node_id) + old_status = old_project.status + + # Get old team member IDs before update (for dispatched detection) + old_team_member_ids = await _get_old_team_member_ids(old_project) + + # Keep id and non-m2m fields; drop m2m *_ids from the update payload + payload = {k: v for k, v in input.__dict__.items() if k not in {"team_member_ids"}} + m2m_data = {"team_members": getattr(input, "team_member_ids", None)} + instance = await update_object(payload, Project, m2m_data) + await pubsub.publish("project_updated", instance.id) + + # Publish events for notifications + profile = getattr(info.context.request, 'profile', None) + + # Check if status changed + if hasattr(input, 'status') and input.status and input.status != old_status: + await publish_project_status_changed( + project_id=str(instance.id), + old_status=old_status, + new_status=instance.status, + triggered_by=profile + ) + + # Check if project was completed + if instance.status == ServiceChoices.COMPLETED: + await publish_project_completed( + project_id=str(instance.id), + triggered_by=profile, + metadata={ + 'customer_id': str(instance.customer_id), + 'name': instance.name, + 'date': str(instance.date) + } + ) + + # Check if admin was newly added (dispatched) + if input.team_member_ids is not None: + admin = await _get_admin_profile() + if admin: + admin_was_in_old = str(admin.id) in old_team_member_ids + admin_in_new = _admin_in_team_members(admin.id, input.team_member_ids) + + if not admin_was_in_old and admin_in_new: + # Admin was just added - project was dispatched + account_address_id = None + account_name = None + if instance.account_address_id: + account_address_id = str(instance.account_address_id) + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_name = account_address.account.name if account_address.account else None + + await publish_project_dispatched( + project_id=str(instance.id), + triggered_by=profile, + metadata={ + 'project_id': str(instance.id), + 'project_name': instance.name, + 'customer_id': str(instance.customer_id), + 'account_address_id': account_address_id, + 'account_name': account_name, + 'date': str(instance.date), + 'status': instance.status + } + ) + + return cast(ProjectType, instance) + + @strawberry.mutation(description="Delete an existing project") + async def delete_project(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Project) + if not instance: + raise ValueError(f"Project with ID {id} does not exist") + await pubsub.publish("project_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_project_deleted( + project_id=str(id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/project_punchlist.py b/core/graphql/mutations/project_punchlist.py new file mode 100644 index 0000000..248900e --- /dev/null +++ b/core/graphql/mutations/project_punchlist.py @@ -0,0 +1,114 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.project_punchlist import ( + ProjectPunchlistInput, + ProjectPunchlistUpdateInput, +) +from core.graphql.types.project_punchlist import ProjectPunchlistType +from core.models.project_punchlist import ProjectPunchlist +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_project_punchlist_created, + publish_project_punchlist_updated, + publish_project_punchlist_deleted, + publish_punchlist_status_changed, + publish_punchlist_priority_changed, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new project punchlist") + async def create_project_punchlist( + self, input: ProjectPunchlistInput, info: Info + ) -> ProjectPunchlistType: + instance = await create_object(input, ProjectPunchlist) + await pubsub.publish(f"project_punchlist_created", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish event + await publish_project_punchlist_created( + punchlist_id=str(instance.id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + return cast(ProjectPunchlistType, instance) + + @strawberry.mutation(description="Update an existing project punchlist") + async def update_project_punchlist( + self, input: ProjectPunchlistUpdateInput, info: Info + ) -> ProjectPunchlistType: + # Get old instance for comparison + old_instance = await database_sync_to_async( + ProjectPunchlist.objects.get + )(id=input.id) + + # Update the instance + instance = await update_object(input, ProjectPunchlist) + await pubsub.publish(f"project_punchlist_updated", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish update event + await publish_project_punchlist_updated( + punchlist_id=str(instance.id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + # Check for status changes (if the model has status field) + if hasattr(old_instance, 'status') and hasattr(instance, 'status'): + if old_instance.status != instance.status: + await publish_punchlist_status_changed( + punchlist_id=str(instance.id), + entity_type='ProjectPunchlist', + old_status=old_instance.status, + new_status=instance.status, + triggered_by=profile + ) + + # Check for priority changes (if the model has priority field) + if hasattr(old_instance, 'priority') and hasattr(instance, 'priority'): + if old_instance.priority != instance.priority: + await publish_punchlist_priority_changed( + punchlist_id=str(instance.id), + entity_type='ProjectPunchlist', + old_priority=old_instance.priority, + new_priority=instance.priority, + triggered_by=profile + ) + + return cast(ProjectPunchlistType, instance) + + @strawberry.mutation(description="Delete an existing project punchlist") + async def delete_project_punchlist(self, id: strawberry.ID, info: Info) -> strawberry.ID: + # Get instance before deletion to access project_id + instance = await database_sync_to_async( + ProjectPunchlist.objects.get + )(id=id) + + # Delete the instance + deleted_instance = await delete_object(id, ProjectPunchlist) + if not deleted_instance: + raise ValueError(f"ProjectPunchlist with ID {id} does not exist") + + await pubsub.publish(f"project_punchlist_deleted", id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish delete event + await publish_project_punchlist_deleted( + punchlist_id=str(id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/project_scope.py b/core/graphql/mutations/project_scope.py new file mode 100644 index 0000000..801b6fb --- /dev/null +++ b/core/graphql/mutations/project_scope.py @@ -0,0 +1,218 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from asgiref.sync import sync_to_async +from core.graphql.inputs.project_scope import ( + ProjectScopeInput, + ProjectScopeUpdateInput, + ProjectScopeCategoryInput, + ProjectScopeCategoryUpdateInput, + ProjectScopeTaskInput, + ProjectScopeTaskUpdateInput, + CreateProjectScopeFromTemplateInput, +) +from core.graphql.types.project_scope import ( + ProjectScopeType, + ProjectScopeCategoryType, + ProjectScopeTaskType, +) +from core.graphql.utils import create_object, update_object, delete_object +from core.models.account import Account, AccountAddress +from core.models.project import Project +from core.models.project_scope import ProjectScope, ProjectScopeCategory, ProjectScopeTask +from core.models.project_scope_template import ProjectScopeTemplate +from core.services.events import ( + publish_project_scope_created, publish_project_scope_updated, publish_project_scope_deleted, + publish_project_scope_category_created, publish_project_scope_category_updated, publish_project_scope_category_deleted, + publish_project_scope_task_created, publish_project_scope_task_updated, publish_project_scope_task_deleted, + publish_project_scope_template_instantiated, +) + + +@strawberry.type +class Mutation: + # ProjectScope CRUD + @strawberry.mutation(description="Create a new ProjectScope") + async def create_project_scope(self, input: ProjectScopeInput, info: Info) -> ProjectScopeType: + instance = await create_object(input, ProjectScope) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_created( + scope_id=str(instance.id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + return cast(ProjectScopeType, instance) + + @strawberry.mutation(description="Update an existing ProjectScope") + async def update_project_scope(self, input: ProjectScopeUpdateInput, info: Info) -> ProjectScopeType: + instance = await update_object(input, ProjectScope) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_updated( + scope_id=str(instance.id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + return cast(ProjectScopeType, instance) + + @strawberry.mutation(description="Delete a ProjectScope") + async def delete_project_scope(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectScope) + if not instance: + raise ValueError(f"ProjectScope with ID {id} does not exist") + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_deleted( + scope_id=str(id), + project_id=str(instance.project_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a ProjectScopeCategory") + async def create_project_scope_category(self, input: ProjectScopeCategoryInput, info: Info) -> ProjectScopeCategoryType: + instance = await create_object(input, ProjectScopeCategory) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_category_created( + category_id=str(instance.id), + scope_id=str(instance.scope_id), + triggered_by=profile + ) + + return cast(ProjectScopeCategoryType, instance) + + @strawberry.mutation(description="Update a ProjectScopeCategory") + async def update_project_scope_category(self, input: ProjectScopeCategoryUpdateInput, info: Info) -> ProjectScopeCategoryType: + instance = await update_object(input, ProjectScopeCategory) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_category_updated( + category_id=str(instance.id), + scope_id=str(instance.scope_id), + triggered_by=profile + ) + + return cast(ProjectScopeCategoryType, instance) + + @strawberry.mutation(description="Delete a ProjectScopeCategory") + async def delete_project_scope_category(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectScopeCategory) + if not instance: + raise ValueError(f"ProjectScopeCategory with ID {id} does not exist") + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_category_deleted( + category_id=str(id), + scope_id=str(instance.scope_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a ProjectScopeTask") + async def create_project_scope_task(self, input: ProjectScopeTaskInput, info: Info) -> ProjectScopeTaskType: + instance = await create_object(input, ProjectScopeTask) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_task_created( + task_id=str(instance.id), + category_id=str(instance.category_id), + triggered_by=profile + ) + + return cast(ProjectScopeTaskType, instance) + + @strawberry.mutation(description="Update a ProjectScopeTask") + async def update_project_scope_task(self, input: ProjectScopeTaskUpdateInput, info: Info) -> ProjectScopeTaskType: + instance = await update_object(input, ProjectScopeTask) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_task_updated( + task_id=str(instance.id), + category_id=str(instance.category_id), + triggered_by=profile + ) + + return cast(ProjectScopeTaskType, instance) + + @strawberry.mutation(description="Delete a ProjectScopeTask") + async def delete_project_scope_task(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectScopeTask) + if not instance: + raise ValueError(f"ProjectScopeTask with ID {id} does not exist") + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_task_deleted( + task_id=str(id), + category_id=str(instance.category_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Instantiate a ProjectScope (with Categories and Tasks) from a ProjectScopeTemplate") + async def create_project_scope_from_template(self, input: CreateProjectScopeFromTemplateInput, info: Info) -> ProjectScopeType: + def _do_create_sync() -> tuple[ProjectScope, str, str]: + # Load required objects synchronously (ORM-safe in this thread) + project = ( + Project.objects + .select_related("account_address__account") + .get(pk=input.project_id.node_id) + ) + tpl = ProjectScopeTemplate.objects.get(pk=input.template_id.node_id) + + # Defaults derived from project (if project has an account_address) + account = None + account_address = None + if project.account_address_id: + account_address = project.account_address + account = account_address.account + + if input.account_address_id: + account_address = AccountAddress.objects.get(pk=input.account_address_id.node_id) + account = account_address.account + + if input.account_id: + account = Account.objects.get(pk=input.account_id.node_id) + + # Instantiate the ProjectScope object from the template + instance = tpl.instantiate( + project=project, + account=account, + account_address=account_address, + name=input.name, + description=input.description, + is_active=input.is_active if input.is_active is not None else True, + ) + + # Persist the relation on the project + project.scope = instance + project.save(update_fields=["scope"]) + return instance, str(tpl.id), str(project.id) + + instance, template_id, project_id = await sync_to_async(_do_create_sync, thread_sensitive=True)() + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_project_scope_template_instantiated( + scope_id=str(instance.id), + template_id=template_id, + project_id=project_id, + triggered_by=profile + ) + + return cast(ProjectScopeType, instance) \ No newline at end of file diff --git a/core/graphql/mutations/project_scope_template.py b/core/graphql/mutations/project_scope_template.py new file mode 100644 index 0000000..61a49e9 --- /dev/null +++ b/core/graphql/mutations/project_scope_template.py @@ -0,0 +1,141 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from strawberry.scalars import JSON +from asgiref.sync import sync_to_async +from core.graphql.inputs.project_scope_template import ( + ProjectScopeTemplateInput, + ProjectScopeTemplateUpdateInput, + ProjectAreaTemplateInput, + ProjectAreaTemplateUpdateInput, + ProjectTaskTemplateInput, + ProjectTaskTemplateUpdateInput, +) +from core.graphql.pubsub import pubsub +from core.graphql.types.project_scope_template import ( + ProjectScopeTemplateType, + ProjectAreaTemplateType, + ProjectTaskTemplateType, +) +from core.graphql.utils import create_object, update_object, delete_object +from core.models.project_scope_template import ( + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, +) +from core.services.scope_builder import build_project_scope_template + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new Project Scope Template") + async def create_project_scope_template(self, input: ProjectScopeTemplateInput, info: Info) -> ProjectScopeTemplateType: + instance = await create_object(input, ProjectScopeTemplate) + await pubsub.publish("project_scope_template_created", instance.id) + # Note: No event publisher exists for project scope template CRUD operations yet + return cast(ProjectScopeTemplateType, instance) + + @strawberry.mutation(description="Update an existing Project Scope Template") + async def update_project_scope_template(self, input: ProjectScopeTemplateUpdateInput, info: Info) -> ProjectScopeTemplateType: + instance = await update_object(input, ProjectScopeTemplate) + await pubsub.publish("project_scope_template_updated", instance.id) + # Note: No event publisher exists for project scope template CRUD operations yet + return cast(ProjectScopeTemplateType, instance) + + @strawberry.mutation(description="Delete a Project Scope Template") + async def delete_project_scope_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectScopeTemplate) + if not instance: + raise ValueError(f"ProjectScopeTemplate with ID {id} does not exist") + await pubsub.publish("project_scope_template_deleted", id) + # Note: No event publisher exists for project scope template CRUD operations yet + return id + + @strawberry.mutation(description="Create a Project Area Template") + async def create_project_area_template(self, input: ProjectAreaTemplateInput, info: Info) -> ProjectAreaTemplateType: + instance = await create_object(input, ProjectAreaTemplate) + await pubsub.publish("project_area_template_created", instance.id) + # Note: No event publisher exists for project area template CRUD operations yet + return cast(ProjectAreaTemplateType, instance) + + @strawberry.mutation(description="Update a Project Area Template") + async def update_project_area_template(self, input: ProjectAreaTemplateUpdateInput, info: Info) -> ProjectAreaTemplateType: + instance = await update_object(input, ProjectAreaTemplate) + await pubsub.publish("project_area_template_updated", instance.id) + # Note: No event publisher exists for project area template CRUD operations yet + return cast(ProjectAreaTemplateType, instance) + + @strawberry.mutation(description="Delete a Project Area Template") + async def delete_project_area_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectAreaTemplate) + if not instance: + raise ValueError(f"ProjectAreaTemplate with ID {id} does not exist") + await pubsub.publish("project_area_template_deleted", id) + # Note: No event publisher exists for project area template CRUD operations yet + return id + + @strawberry.mutation(description="Create a Project Task Template") + async def create_project_task_template(self, input: ProjectTaskTemplateInput, info: Info) -> ProjectTaskTemplateType: + instance = await create_object(input, ProjectTaskTemplate) + await pubsub.publish("project_task_template_created", instance.id) + # Note: No event publisher exists for project task template CRUD operations yet + return cast(ProjectTaskTemplateType, instance) + + @strawberry.mutation(description="Update a Project Task Template") + async def update_project_task_template(self, input: ProjectTaskTemplateUpdateInput, info: Info) -> ProjectTaskTemplateType: + instance = await update_object(input, ProjectTaskTemplate) + await pubsub.publish("project_task_template_updated", instance.id) + # Note: No event publisher exists for project task template CRUD operations yet + return cast(ProjectTaskTemplateType, instance) + + @strawberry.mutation(description="Delete a Project Task Template") + async def delete_project_task_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectTaskTemplate) + if not instance: + raise ValueError(f"ProjectTaskTemplate with ID {id} does not exist") + await pubsub.publish("project_task_template_deleted", id) + # Note: No event publisher exists for project task template CRUD operations yet + return id + + @strawberry.mutation(description="Create a ProjectScopeTemplate (and nested Categories/Tasks) from a JSON payload") + async def create_project_scope_template_from_json( + self, + payload: JSON, + replace: bool = False, + info: Info | None = None, + ) -> ProjectScopeTemplateType: + """ + Accepts a JSON object matching the builder payload shape: + { + "name": str, "description": str, "is_active": bool, + "categories": [ + {"name": str, "order": int, "tasks": [ + {"description": str, "checklist_description": str, "order": int, "estimated_minutes": int} + ]} + ] + } + If replace=True and a template with the same name exists, it will be deleted first. + """ + + def _do_create_sync(): + if not isinstance(payload, dict): + raise ValueError("payload must be a JSON object") + + name = payload.get("name") + if not name or not isinstance(name, str): + raise ValueError("payload.name is required and must be a string") + + if replace: + ProjectScopeTemplate.objects.filter(name=name).delete() + elif ProjectScopeTemplate.objects.filter(name=name).exists(): + raise ValueError( + f"A ProjectScopeTemplate named '{name}' already exists (use replace=true to overwrite)" + ) + + tpl = build_project_scope_template(payload) + return tpl + + instance = await sync_to_async(_do_create_sync)() + await pubsub.publish("project_scope_template_created", instance.id) + # Note: No event publisher exists for project scope template CRUD operations yet + return cast(ProjectScopeTemplateType, instance) diff --git a/core/graphql/mutations/report.py b/core/graphql/mutations/report.py new file mode 100644 index 0000000..df18777 --- /dev/null +++ b/core/graphql/mutations/report.py @@ -0,0 +1,70 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.inputs.report import ReportInput, ReportUpdateInput +from core.graphql.types.report import ReportType +from core.models.report import Report +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import publish_report_submitted, publish_report_updated, publish_report_deleted + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new report") + async def create_report(self, input: ReportInput, info: Info) -> ReportType: + # Exclude m2m id fields from model constructor + payload = {k: v for k, v in input.__dict__.items() if k not in {"service_ids", "project_ids"}} + m2m_data = { + "services": input.service_ids, + "projects": input.project_ids, + } + instance = await create_object(payload, Report, m2m_data) + await pubsub.publish("report_created", instance.id) + + # Publish event for notifications (report creation = report submission) + profile = getattr(info.context.request, 'profile', None) + await publish_report_submitted( + report_id=str(instance.id), + triggered_by=profile, + metadata={'team_member_id': str(instance.team_member_id)} + ) + + return cast(ReportType, instance) + + @strawberry.mutation(description="Update an existing report") + async def update_report(self, input: ReportUpdateInput, info: Info) -> ReportType: + # Keep id and non-m2m fields; drop m2m *_ids from the update payload + payload = {k: v for k, v in input.__dict__.items() if k not in {"service_ids", "project_ids"}} + m2m_data = { + "services": getattr(input, "service_ids", None), + "projects": getattr(input, "project_ids", None), + } + instance = await update_object(payload, Report, m2m_data) + await pubsub.publish("report_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_report_updated( + report_id=str(instance.id), + triggered_by=profile, + metadata={'team_member_id': str(instance.team_member_id)} + ) + + return cast(ReportType, instance) + + @strawberry.mutation(description="Delete an existing report") + async def delete_report(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Report) + if not instance: + raise ValueError(f"Report with ID {id} does not exist") + await pubsub.publish("report_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_report_deleted( + report_id=str(id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/revenue.py b/core/graphql/mutations/revenue.py new file mode 100644 index 0000000..81f7b13 --- /dev/null +++ b/core/graphql/mutations/revenue.py @@ -0,0 +1,58 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.inputs.revenue import RevenueInput, RevenueUpdateInput +from core.graphql.types.revenue import RevenueType +from core.models.revenue import Revenue +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_revenue_rate_created, publish_revenue_rate_updated, publish_revenue_rate_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new revenue rate") + async def create_revenue(self, input: RevenueInput, info: Info) -> RevenueType: + instance = await create_object(input, Revenue) + await pubsub.publish("revenue_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_revenue_rate_created( + rate_id=str(instance.id), + triggered_by=profile + ) + + return cast(RevenueType, instance) + + @strawberry.mutation(description="Update an existing revenue rate") + async def update_revenue(self, input: RevenueUpdateInput, info: Info) -> RevenueType: + instance = await update_object(input, Revenue) + await pubsub.publish("revenue_updated", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_revenue_rate_updated( + rate_id=str(instance.id), + triggered_by=profile + ) + + return cast(RevenueType, instance) + + @strawberry.mutation(description="Delete an existing revenue rate") + async def delete_revenue(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Revenue) + if not instance: + raise ValueError(f"Revenue with ID {id} does not exist") + await pubsub.publish("revenue_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_revenue_rate_deleted( + rate_id=str(id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/schedule.py b/core/graphql/mutations/schedule.py new file mode 100644 index 0000000..8ee13fa --- /dev/null +++ b/core/graphql/mutations/schedule.py @@ -0,0 +1,108 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.schedule import ScheduleInput, ScheduleUpdateInput +from core.graphql.types.schedule import ScheduleType +from core.models.schedule import Schedule +from core.models.account import AccountAddress +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_schedule_created, publish_schedule_updated, publish_schedule_deleted, + publish_schedule_frequency_changed, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new service schedule") + async def create_schedule(self, input: ScheduleInput, info: Info) -> ScheduleType: + instance = await create_object(input, Schedule) + await pubsub.publish("schedule_created", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish event with account_id in metadata + account_address = await database_sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_id = str(account_address.account_id) if account_address.account_id else None + await publish_schedule_created( + schedule_id=str(instance.id), + triggered_by=profile, + metadata={'account_id': account_id} + ) + + return cast(ScheduleType, instance) + + @strawberry.mutation(description="Update an existing service schedule") + async def update_schedule(self, input: ScheduleUpdateInput, info: Info) -> ScheduleType: + # Get the old schedule to check for frequency changes + old_schedule = await database_sync_to_async(Schedule.objects.get)(pk=input.id.node_id) + + # Store old frequency state + old_frequency = { + 'monday': old_schedule.monday_service, + 'tuesday': old_schedule.tuesday_service, + 'wednesday': old_schedule.wednesday_service, + 'thursday': old_schedule.thursday_service, + 'friday': old_schedule.friday_service, + 'saturday': old_schedule.saturday_service, + 'sunday': old_schedule.sunday_service, + 'weekend': old_schedule.weekend_service, + } + + instance = await update_object(input, Schedule) + await pubsub.publish("schedule_updated", instance.id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish schedule updated event + await publish_schedule_updated( + schedule_id=str(instance.id), + triggered_by=profile + ) + + # Check if frequency changed + new_frequency = { + 'monday': instance.monday_service, + 'tuesday': instance.tuesday_service, + 'wednesday': instance.wednesday_service, + 'thursday': instance.thursday_service, + 'friday': instance.friday_service, + 'saturday': instance.saturday_service, + 'sunday': instance.sunday_service, + 'weekend': instance.weekend_service, + } + + if old_frequency != new_frequency: + # Publish frequency changed event + await publish_schedule_frequency_changed( + schedule_id=str(instance.id), + old_frequency=str(old_frequency), + new_frequency=str(new_frequency), + triggered_by=profile + ) + + return cast(ScheduleType, instance) + + @strawberry.mutation(description="Delete an existing service schedule") + async def delete_schedule(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Schedule) + if not instance: + raise ValueError(f"Schedule with ID {id} does not exist") + await pubsub.publish("schedule_deleted", id) + + # Get profile from request context + profile = getattr(info.context.request, 'profile', None) + + # Publish schedule deleted event + await publish_schedule_deleted( + schedule_id=str(id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/scope.py b/core/graphql/mutations/scope.py new file mode 100644 index 0000000..c310ece --- /dev/null +++ b/core/graphql/mutations/scope.py @@ -0,0 +1,294 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from channels.db import database_sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.inputs.scope import ( + ScopeInput, ScopeUpdateInput, + AreaInput, AreaUpdateInput, + TaskInput, TaskUpdateInput, + TaskCompletionInput, TaskCompletionUpdateInput, +) +from core.graphql.types.scope import ( + ScopeType, + AreaType, + TaskType, + TaskCompletionType, +) +from core.models.scope import Scope, Area, Task, TaskCompletion +from core.models.session import ServiceSession +from core.graphql.utils import create_object, update_object, delete_object, _decode_global_id +from core.services.events import ( + publish_scope_created, publish_scope_updated, publish_scope_deleted, + publish_area_created, publish_area_updated, publish_area_deleted, + publish_task_created, publish_task_updated, publish_task_deleted, + publish_task_completion_recorded, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new scope") + async def create_scope(self, input: ScopeInput, info: Info) -> ScopeType: + instance = await create_object(input, Scope) + await pubsub.publish("scope_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_created( + scope_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(ScopeType, instance) + + @strawberry.mutation(description="Update an existing scope") + async def update_scope(self, input: ScopeUpdateInput, info: Info) -> ScopeType: + instance = await update_object(input, Scope) + await pubsub.publish("scope_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_updated( + scope_id=str(instance.id), + account_id=str(instance.account_id), + triggered_by=profile + ) + + return cast(ScopeType, instance) + + @strawberry.mutation(description="Delete an existing scope") + async def delete_scope(self, id: strawberry.ID, info: Info) -> strawberry.ID: + def _delete_scope_sync(scope_id): + """ + Smart delete: soft-delete if sessions reference this scope, hard-delete otherwise. + Returns (account_id, action) where action is 'deleted' or 'deactivated'. + """ + pk = _decode_global_id(scope_id) + try: + scope = Scope.objects.get(pk=pk) + except Scope.DoesNotExist: + return None, None + + account_id = scope.account_id + + # Check if any service sessions reference this scope + has_sessions = ServiceSession.objects.filter(scope_id=pk).exists() + + if has_sessions: + # Soft delete - deactivate the scope to preserve historical data + scope.is_active = False + scope.save(update_fields=['is_active']) + else: + # Hard delete - no sessions reference this scope + scope.delete() + + return account_id, 'deactivated' if has_sessions else 'deleted' + + account_id, action = await database_sync_to_async(_delete_scope_sync)(id) + + if account_id is None: + raise ValueError(f"Scope with ID {id} does not exist") + + await pubsub.publish("scope_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_deleted( + scope_id=str(id), + account_id=str(account_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new area") + async def create_area(self, input: AreaInput, info: Info) -> AreaType: + instance = await create_object(input, Area) + await pubsub.publish("area_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_created( + area_id=str(instance.id), + scope_id=str(instance.scope_id), + triggered_by=profile + ) + + return cast(AreaType, instance) + + @strawberry.mutation(description="Update an existing area") + async def update_area(self, input: AreaUpdateInput, info: Info) -> AreaType: + instance = await update_object(input, Area) + await pubsub.publish("area_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_updated( + area_id=str(instance.id), + scope_id=str(instance.scope_id), + triggered_by=profile + ) + + return cast(AreaType, instance) + + @strawberry.mutation(description="Delete an existing area") + async def delete_area(self, id: strawberry.ID, info: Info) -> strawberry.ID: + def _delete_area_sync(area_id): + """ + Delete an area if no task completions reference its tasks. + Returns scope_id on success, raises ValueError if completions exist. + """ + pk = _decode_global_id(area_id) + try: + area = Area.objects.get(pk=pk) + except Area.DoesNotExist: + return None + + # Check if any task completions reference tasks in this area + has_completions = TaskCompletion.objects.filter(task__area_id=pk).exists() + + if has_completions: + raise ValueError( + "Cannot delete area: it contains tasks with recorded completions. " + "Deactivate the scope instead to preserve historical data." + ) + + scope_id = area.scope_id + area.delete() + return scope_id + + scope_id = await database_sync_to_async(_delete_area_sync)(id) + + if scope_id is None: + raise ValueError(f"Area with ID {id} does not exist") + + await pubsub.publish("area_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_deleted( + area_id=str(id), + scope_id=str(scope_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new task") + async def create_task(self, input: TaskInput, info: Info) -> TaskType: + instance = await create_object(input, Task) + await pubsub.publish("task_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_created( + task_id=str(instance.id), + area_id=str(instance.area_id), + triggered_by=profile + ) + + return cast(TaskType, instance) + + @strawberry.mutation(description="Update an existing task") + async def update_task(self, input: TaskUpdateInput, info: Info) -> TaskType: + instance = await update_object(input, Task) + await pubsub.publish("task_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_updated( + task_id=str(instance.id), + area_id=str(instance.area_id), + triggered_by=profile + ) + + return cast(TaskType, instance) + + @strawberry.mutation(description="Delete an existing task") + async def delete_task(self, id: strawberry.ID, info: Info) -> strawberry.ID: + def _delete_task_sync(task_id): + """ + Delete a task if no task completions reference it. + Returns area_id on success, raises ValueError if completions exist. + """ + pk = _decode_global_id(task_id) + try: + task = Task.objects.get(pk=pk) + except Task.DoesNotExist: + return None + + # Check if any task completions reference this task + has_completions = TaskCompletion.objects.filter(task_id=pk).exists() + + if has_completions: + raise ValueError( + "Cannot delete task: it has recorded completions. " + "Deactivate the scope instead to preserve historical data." + ) + + area_id = task.area_id + task.delete() + return area_id + + area_id = await database_sync_to_async(_delete_task_sync)(id) + + if area_id is None: + raise ValueError(f"Task with ID {id} does not exist") + + await pubsub.publish("task_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_deleted( + task_id=str(id), + area_id=str(area_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new task completion") + async def create_task_completion(self, input: TaskCompletionInput, info: Info) -> TaskCompletionType: + instance = await create_object(input, TaskCompletion) + await pubsub.publish("task_completion_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_completion_recorded( + completion_id=str(instance.id), + task_id=str(instance.task_id), + service_id=str(instance.service_id), + triggered_by=profile + ) + + return cast(TaskCompletionType, instance) + + @strawberry.mutation(description="Update an existing task completion") + async def update_task_completion(self, input: TaskCompletionUpdateInput, info: Info) -> TaskCompletionType: + instance = await update_object(input, TaskCompletion) + await pubsub.publish("task_completion_updated", instance.id) + + # Publish event (reuse the same event for updates) + profile = getattr(info.context.request, 'profile', None) + await publish_task_completion_recorded( + completion_id=str(instance.id), + task_id=str(instance.task_id), + service_id=str(instance.service_id), + triggered_by=profile + ) + + return cast(TaskCompletionType, instance) + + @strawberry.mutation(description="Delete an existing task completion") + async def delete_task_completion(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, TaskCompletion) + if not instance: + raise ValueError(f"TaskCompletion with ID {id} does not exist") + await pubsub.publish("task_completion_deleted", id) + + # Note: No event publication for deletion as there's no corresponding delete event + # in the events.py file for task completions + + return id \ No newline at end of file diff --git a/core/graphql/mutations/scope_template.py b/core/graphql/mutations/scope_template.py new file mode 100644 index 0000000..0677a71 --- /dev/null +++ b/core/graphql/mutations/scope_template.py @@ -0,0 +1,246 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from asgiref.sync import sync_to_async +from core.graphql.pubsub import pubsub +from core.graphql.utils import create_object, update_object, delete_object, _decode_global_id +from core.graphql.types.scope_template import ( + ScopeTemplateType, + AreaTemplateType, + TaskTemplateType, +) +from core.graphql.types.scope import ScopeType +from core.graphql.inputs.scope_template import ( + ScopeTemplateInput, ScopeTemplateUpdateInput, + AreaTemplateInput, AreaTemplateUpdateInput, + TaskTemplateInput, TaskTemplateUpdateInput, + CreateScopeFromTemplateInput, +) +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate +from core.models.account import Account, AccountAddress +from strawberry.scalars import JSON +from core.services import build_scope_template +from core.services.events import ( + publish_scope_template_created, publish_scope_template_updated, publish_scope_template_deleted, + publish_scope_template_instantiated, + publish_area_template_created, publish_area_template_updated, publish_area_template_deleted, + publish_task_template_created, publish_task_template_updated, publish_task_template_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new scope template") + async def create_scope_template(self, input: ScopeTemplateInput, info: Info) -> ScopeTemplateType: + instance = await create_object(input, ScopeTemplate) + await pubsub.publish("scope_template_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_template_created( + template_id=str(instance.id), + triggered_by=profile + ) + + return cast(ScopeTemplateType, instance) + + @strawberry.mutation(description="Update an existing scope template") + async def update_scope_template(self, input: ScopeTemplateUpdateInput, info: Info) -> ScopeTemplateType: + instance = await update_object(input, ScopeTemplate) + await pubsub.publish("scope_template_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_template_updated( + template_id=str(instance.id), + triggered_by=profile + ) + + return cast(ScopeTemplateType, instance) + + @strawberry.mutation(description="Delete an existing scope template") + async def delete_scope_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ScopeTemplate) + if not instance: + raise ValueError(f"ScopeTemplate with ID {id} does not exist") + await pubsub.publish("scope_template_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_template_deleted( + template_id=str(id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new area template") + async def create_area_template(self, input: AreaTemplateInput, info: Info) -> AreaTemplateType: + instance = await create_object(input, AreaTemplate) + await pubsub.publish("area_template_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_template_created( + template_id=str(instance.id), + scope_template_id=str(instance.scope_template_id), + triggered_by=profile + ) + + return cast(AreaTemplateType, instance) + + @strawberry.mutation(description="Update an existing area template") + async def update_area_template(self, input: AreaTemplateUpdateInput, info: Info) -> AreaTemplateType: + instance = await update_object(input, AreaTemplate) + await pubsub.publish("area_template_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_template_updated( + template_id=str(instance.id), + scope_template_id=str(instance.scope_template_id), + triggered_by=profile + ) + + return cast(AreaTemplateType, instance) + + @strawberry.mutation(description="Delete an existing area template") + async def delete_area_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, AreaTemplate) + if not instance: + raise ValueError(f"AreaTemplate with ID {id} does not exist") + await pubsub.publish("area_template_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_area_template_deleted( + template_id=str(id), + scope_template_id=str(instance.scope_template_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new task template") + async def create_task_template(self, input: TaskTemplateInput, info: Info) -> TaskTemplateType: + instance = await create_object(input, TaskTemplate) + await pubsub.publish("task_template_created", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_template_created( + template_id=str(instance.id), + area_template_id=str(instance.area_template_id), + triggered_by=profile + ) + + return cast(TaskTemplateType, instance) + + @strawberry.mutation(description="Update an existing task template") + async def update_task_template(self, input: TaskTemplateUpdateInput, info: Info) -> TaskTemplateType: + instance = await update_object(input, TaskTemplate) + await pubsub.publish("task_template_updated", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_template_updated( + template_id=str(instance.id), + area_template_id=str(instance.area_template_id), + triggered_by=profile + ) + + return cast(TaskTemplateType, instance) + + @strawberry.mutation(description="Delete an existing task template") + async def delete_task_template(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, TaskTemplate) + if not instance: + raise ValueError(f"TaskTemplate with ID {id} does not exist") + await pubsub.publish("task_template_deleted", id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_task_template_deleted( + template_id=str(id), + area_template_id=str(instance.area_template_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Instantiate a Scope (with Areas and Tasks) from a ScopeTemplate") + async def create_scope_from_template(self, input: CreateScopeFromTemplateInput, info: Info) -> ScopeType: + def _do_create_sync(): + template = ScopeTemplate.objects.get(pk=_decode_global_id(input.template_id)) + account = Account.objects.get(pk=_decode_global_id(input.account_id)) + account_address = None + if input.account_address_id: + account_address = AccountAddress.objects.get( + pk=_decode_global_id(input.account_address_id), account=account + ) + scope = template.instantiate( + account=account, + account_address=account_address, + name=input.name, + description=input.description, + is_active=input.is_active if input.is_active is not None else True, + ) + return scope, str(template.id), str(account.id) + + # Run ORM-heavy work in a thread + instance, template_id, account_id = await sync_to_async(_do_create_sync)() + await pubsub.publish("scope_created_from_template", instance.id) + + # Publish event + profile = getattr(info.context.request, 'profile', None) + await publish_scope_template_instantiated( + scope_id=str(instance.id), + template_id=template_id, + account_id=account_id, + triggered_by=profile + ) + + return cast(ScopeType, instance) + + @strawberry.mutation(description="Create a ScopeTemplate (and nested Areas/Tasks) from a JSON payload") + async def create_scope_template_from_json( + self, + payload: JSON, + replace: bool = False, + info: Info | None = None, + ) -> ScopeTemplateType: + """ + Accepts a JSON object matching the builder payload shape. + If replace=True and a template with the same name exists, it will be deleted first. + """ + + def _do_create_sync(): + if not isinstance(payload, dict): + raise ValueError("payload must be a JSON object") + + name = payload.get("name") + if not name or not isinstance(name, str): + raise ValueError("payload.name is required and must be a string") + + if replace: + ScopeTemplate.objects.filter(name=name).delete() + elif ScopeTemplate.objects.filter(name=name).exists(): + raise ValueError( + f"A ScopeTemplate named '{name}' already exists (use replace=true to overwrite)" + ) + + tpl = build_scope_template(payload) + return tpl + + instance = await sync_to_async(_do_create_sync)() + await pubsub.publish("scope_template_created", instance.id) + + # Publish event + if info: + profile = getattr(info.context.request, 'profile', None) + await publish_scope_template_created( + template_id=str(instance.id), + triggered_by=profile + ) + + return cast(ScopeTemplateType, instance) diff --git a/core/graphql/mutations/service.py b/core/graphql/mutations/service.py new file mode 100644 index 0000000..4b736b2 --- /dev/null +++ b/core/graphql/mutations/service.py @@ -0,0 +1,327 @@ +import calendar +import datetime +from typing import List, cast +from uuid import UUID +import strawberry +from strawberry.types import Info +from asgiref.sync import sync_to_async +from channels.db import database_sync_to_async +from django.db import transaction +from core.graphql.inputs.service import ServiceInput, ServiceUpdateInput, ServiceGenerationInput +from core.graphql.pubsub import pubsub +from core.graphql.types.service import ServiceType +from core.graphql.utils import create_object, update_object, delete_object, _is_holiday +from core.models.account import AccountAddress +from core.models.profile import TeamProfile +from core.models.schedule import Schedule +from core.models.service import Service +from core.services.events import ( + publish_service_created, publish_service_deleted, + publish_service_status_changed, publish_service_completed, publish_service_cancelled, + publish_services_bulk_generated, publish_service_dispatched, +) + + +# Helper to get admin profile +async def _get_admin_profile(): + return await sync_to_async( + lambda: TeamProfile.objects.filter(role='ADMIN').first() + )() + + +# Helper to check if admin is in team member IDs (handles GlobalID objects) +def _admin_in_team_members(admin_id, team_member_ids): + if not team_member_ids or not admin_id: + return False + # team_member_ids may be GlobalID objects with .node_id attribute + member_uuids = [] + for mid in team_member_ids: + if hasattr(mid, 'node_id'): + member_uuids.append(str(mid.node_id)) + else: + member_uuids.append(str(mid)) + return str(admin_id) in member_uuids + + +# Helper to get old team member IDs from instance +async def _get_old_team_member_ids(instance): + return await sync_to_async( + lambda: set(str(m.id) for m in instance.team_members.all()) + )() + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new service visit") + async def create_service(self, input: ServiceInput, info: Info) -> ServiceType: + # Exclude m2m id fields from model constructor + payload = {k: v for k, v in input.__dict__.items() if k not in {"team_member_ids"}} + m2m_data = {"team_members": input.team_member_ids} + instance = await create_object(payload, Service, m2m_data) + await pubsub.publish("service_created", instance.id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + # Get account_id safely via account_address + account_id = None + if instance.account_address_id: + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_id = str(account_address.account_id) if account_address.account_id else None + + await publish_service_created( + service_id=str(instance.id), + triggered_by=profile, + metadata={ + 'account_id': account_id, + 'date': str(instance.date), + 'status': instance.status + } + ) + + # Check if service was dispatched (admin in team members) + admin = await _get_admin_profile() + if admin and _admin_in_team_members(admin.id, input.team_member_ids): + # Build metadata + account_name = None + account_address_id = None + if instance.account_address_id: + account_address_id = str(instance.account_address_id) + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_name = account_address.account.name if account_address.account else None + + await publish_service_dispatched( + service_id=str(instance.id), + triggered_by=profile, + metadata={ + 'service_id': str(instance.id), + 'account_address_id': account_address_id, + 'account_name': account_name, + 'date': str(instance.date), + 'status': instance.status + } + ) + + return cast(ServiceType, instance) + + @strawberry.mutation(description="Update an existing service visit") + async def update_service(self, input: ServiceUpdateInput, info: Info) -> ServiceType: + # Get old service data for comparison + old_service = await database_sync_to_async(Service.objects.get)(pk=input.id.node_id) + old_status = old_service.status + + # Get old team member IDs before update (for dispatched detection) + old_team_member_ids = await _get_old_team_member_ids(old_service) + + # Keep id and non-m2m fields; drop m2m *_ids from the update payload + payload = {k: v for k, v in input.__dict__.items() if k not in {"team_member_ids"}} + m2m_data = {"team_members": getattr(input, "team_member_ids", None)} + instance = await update_object(payload, Service, m2m_data) + await pubsub.publish("service_updated", instance.id) + + # Publish events for notifications + profile = getattr(info.context.request, 'profile', None) + + # Check for status change + if hasattr(input, 'status') and input.status and input.status != old_status: + # Get account name for notifications + account_name = None + if instance.account_address_id: + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_name = account_address.account.name if account_address.account else None + + if instance.status == 'COMPLETED': + await publish_service_completed( + service_id=str(instance.id), + triggered_by=profile, + metadata={ + 'date': str(instance.date), + 'account_name': account_name + } + ) + elif instance.status == 'CANCELLED': + await publish_service_cancelled( + service_id=str(instance.id), + triggered_by=profile, + metadata={ + 'date': str(instance.date), + 'account_name': account_name + } + ) + else: + await publish_service_status_changed( + service_id=str(instance.id), + old_status=old_status, + new_status=instance.status, + triggered_by=profile + ) + + # Check if admin was newly added (dispatched) + if input.team_member_ids is not None: + admin = await _get_admin_profile() + if admin: + admin_was_in_old = str(admin.id) in old_team_member_ids + admin_in_new = _admin_in_team_members(admin.id, input.team_member_ids) + + if not admin_was_in_old and admin_in_new: + # Admin was just added - service was dispatched + account_name = None + account_address_id = None + # Use explicit select_related to safely traverse FK chain + if instance.account_address_id: + account_address_id = str(instance.account_address_id) + account_address = await sync_to_async( + lambda: AccountAddress.objects.select_related('account').get(id=instance.account_address_id) + )() + account_name = account_address.account.name if account_address.account else None + + await publish_service_dispatched( + service_id=str(instance.id), + triggered_by=profile, + metadata={ + 'service_id': str(instance.id), + 'account_address_id': account_address_id, + 'account_name': account_name, + 'date': str(instance.date), + 'status': instance.status + } + ) + + return cast(ServiceType, instance) + + @strawberry.mutation(description="Delete an existing service visit") + async def delete_service(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, Service) + if instance: + await pubsub.publish("service_deleted", id) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_service_deleted( + service_id=str(id), + triggered_by=profile, + metadata={'date': str(instance.date)} + ) + + return id + raise ValueError(f"Service with ID {id} does not exist") + + @strawberry.mutation(description="Generate service visits for a given month (all-or-nothing)") + async def generate_services_by_month(self, input: ServiceGenerationInput, info: Info) -> List[ServiceType]: + if input.month < 1 or input.month > 12: + raise ValueError("month must be in range 1..12") + + year = input.year + month_num = input.month + + # Fetch the AccountAddress and Schedule by their IDs + address = await AccountAddress.objects.aget(id=input.account_address_id.node_id) + schedule = await Schedule.objects.aget(id=input.schedule_id.node_id) + + # Optional but recommended: ensure the schedule belongs to this address + if getattr(schedule, "account_address_id", None) != address.id: + raise ValueError("Schedule does not belong to the provided account address") + + cal = calendar.Calendar(firstweekday=calendar.MONDAY) + days_in_month = [d for d in cal.itermonthdates(year, month_num) if d.month == month_num] + + def is_within_schedule(dt: datetime.date) -> bool: + if dt < schedule.start_date: + return False + if schedule.end_date and dt > schedule.end_date: + return False + return True + + def day_flag(weekday: int) -> bool: + return [ + schedule.monday_service, + schedule.tuesday_service, + schedule.wednesday_service, + schedule.thursday_service, + schedule.friday_service, + schedule.saturday_service, + schedule.sunday_service, + ][weekday] + + targets: list[tuple[datetime.date, str | None]] = [] + for day in days_in_month: + if not is_within_schedule(day): + continue + if _is_holiday(day): + continue + + wd = day.weekday() # Mon=0...Sun=6 + schedule_today = False + note: str | None = None + + if 0 <= wd <= 3: + schedule_today = day_flag(wd) + elif wd == 4: + # Friday + if schedule.weekend_service: + schedule_today = True + note = "Weekend service window (Fri–Sun)" + else: + schedule_today = day_flag(wd) + else: + # Sat-Sun + if schedule.weekend_service: + schedule_today = False + else: + schedule_today = day_flag(wd) + + if schedule_today: + targets.append((day, note)) + + if not targets: + return cast(List[ServiceType], []) + + # Run the transactional DB work in a sync thread + def _create_services_sync( + account_address_id: UUID, + targets_local: list[tuple[datetime.date, str | None]] + ) -> List[Service]: + with transaction.atomic(): + if Service.objects.filter( + account_address_id=account_address_id, + date__in=[svc_day for (svc_day, _) in targets_local] + ).exists(): + raise ValueError( + "One or more services already exist for the selected month; nothing was created." + ) + + to_create = [ + Service( + account_address_id=account_address_id, + date=svc_day, + notes=(svc_note or None), + ) + for (svc_day, svc_note) in targets_local + ] + return Service.objects.bulk_create(to_create) + + created_instances: List[Service] = await sync_to_async( + _create_services_sync, + thread_sensitive=True, + )(address.id, targets) + + for obj in created_instances: + await pubsub.publish("service_created", obj.id) + + # Publish bulk generation event for notifications + if created_instances: + profile = getattr(info.context.request, 'profile', None) + month_name = datetime.date(year, month_num, 1).strftime('%B %Y') + await publish_services_bulk_generated( + account_id=str(address.account_id), + count=len(created_instances), + month=month_name, + triggered_by=profile + ) + + return cast(List[ServiceType], created_instances) diff --git a/core/graphql/mutations/session.py b/core/graphql/mutations/session.py new file mode 100644 index 0000000..cfda16b --- /dev/null +++ b/core/graphql/mutations/session.py @@ -0,0 +1,467 @@ +from typing import List, cast +from uuid import UUID +import strawberry +from channels.db import database_sync_to_async +from django.core.exceptions import ValidationError +from strawberry import Info +from core.graphql.inputs.session import OpenServiceSessionInput, CloseServiceSessionInput, RevertServiceSessionInput, ProjectSessionStartInput, \ + ProjectSessionCloseInput, ProjectSessionRevertInput +from core.graphql.pubsub import pubsub +from core.graphql.types.session import ServiceSessionType, ProjectSessionType +from core.models.profile import TeamProfile +from core.models.scope import Task +from core.models.session import ServiceSession, ProjectSession +from core.models.project_scope import ProjectScopeTask +from core.services.session_service import SessionService +from core.services.events import ( + publish_service_session_opened, publish_service_session_closed, publish_service_session_reverted, + publish_service_task_completed, publish_service_task_uncompleted, + publish_project_session_opened, publish_project_session_closed, publish_project_session_reverted, + publish_project_task_completed, publish_project_task_uncompleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Revert an active service session back to scheduled (deletes the active session)") + async def revert_service_session(self, input: RevertServiceSessionInput, info: Info) -> bool: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can revert service sessions.") + + service_pk = UUID(str(input.service_id)) + + svc = SessionService() + result = await database_sync_to_async(svc.revert_session)( + entity_type="service", + entity_id=service_pk, + actor=profile, + ) + + # Publish event + await publish_service_session_reverted( + session_id=str(result.session_id), + service_id=str(result.entity_id), + triggered_by=profile + ) + + return True + + @strawberry.mutation(description="Open a service session for a scheduled service") + async def open_service_session(self, input: OpenServiceSessionInput, info: Info) -> ServiceSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can open service sessions.") + + service_pk = UUID(str(input.service_id)) + + svc = SessionService() + result = await database_sync_to_async(svc.open_session)( + entity_type="service", + entity_id=service_pk, + actor=profile, + ) + + async def load_session() -> ServiceSession: + return await database_sync_to_async( + lambda: ( + ServiceSession.objects + .select_related("service", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=result.session_id) + ) + )() + + session = await load_session() + + # Publish event + await publish_service_session_opened( + session_id=str(result.session_id), + service_id=str(result.entity_id), + triggered_by=profile + ) + + return cast(ServiceSessionType, cast(object, session)) + + @strawberry.mutation(description="Close the active service session and record completed tasks") + async def close_service_session(self, input: CloseServiceSessionInput, info: Info) -> ServiceSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can close service sessions.") + + service_pk = UUID(str(input.service_id)) + task_pks: List[UUID] = [UUID(str(x)) for x in input.task_ids] + + def load_tasks() -> List[Task]: + qs = Task.objects.filter(pk__in=task_pks) + return list(qs) + + tasks = await database_sync_to_async(load_tasks)() + if len(tasks) != len(task_pks): + raise ValidationError("One or more task IDs are invalid.") + + svc = SessionService() + result = await database_sync_to_async(svc.close_session)( + entity_type="service", + entity_id=service_pk, + actor=profile, + tasks=tasks, + ) + + async def load_session() -> ServiceSession: + return await database_sync_to_async( + lambda: ( + ServiceSession.objects + .select_related("service", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=result.session_id) + ) + )() + + session = await load_session() + + # Get account name and service date for notifications + account_name = session.account.name if session.account else None + service_date = str(session.service.date) if session.service else None + + # Publish event + await publish_service_session_closed( + session_id=str(result.session_id), + service_id=str(result.entity_id), + triggered_by=profile, + metadata={ + 'account_name': account_name, + 'date': service_date + } + ) + + return cast(ServiceSessionType, cast(object, session)) + + @strawberry.mutation(description="Add a task completion to an active service session") + async def add_task_completion(self, info: Info, service_id: strawberry.ID, task_id: strawberry.ID, + notes: str | None = None) -> ServiceSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can add task completions.") + + svc = SessionService() + + task_pk = UUID(str(task_id)) + service_pk = UUID(str(service_id)) + + # Load task to get name for event + task = await database_sync_to_async(Task.objects.get)(pk=task_pk) + + session_id = await database_sync_to_async(svc.add_task_completion)( + service_id=service_pk, + task_id=task_pk, + actor=profile, + notes=notes, + ) + + async def load_session() -> ServiceSession: + return await database_sync_to_async( + lambda: ( + ServiceSession.objects + .select_related("service", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=session_id) + ) + )() + + session = await load_session() + + # Publish event + await publish_service_task_completed( + task_id=str(task_pk), + service_id=str(service_pk), + task_name=task.checklist_description, + triggered_by=profile + ) + + return cast(ServiceSessionType, cast(object, session)) + + @strawberry.mutation(description="Remove a task completion from an active service session") + async def remove_task_completion(self, info: Info, service_id: strawberry.ID, + task_id: strawberry.ID) -> ServiceSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can remove task completions.") + + svc = SessionService() + + task_pk = UUID(str(task_id)) + service_pk = UUID(str(service_id)) + + # Load task to get name for event + task = await database_sync_to_async(Task.objects.get)(pk=task_pk) + + session_id = await database_sync_to_async(svc.remove_task_completion)( + service_id=service_pk, + task_id=task_pk, + ) + + async def load_session() -> ServiceSession: + return await database_sync_to_async( + lambda: ( + ServiceSession.objects + .select_related("service", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=session_id) + ) + )() + + session = await load_session() + + # Publish event + await publish_service_task_uncompleted( + task_id=str(task_pk), + service_id=str(service_pk), + task_name=task.checklist_description, + triggered_by=profile + ) + + return cast(ServiceSessionType, cast(object, session)) + + @strawberry.mutation(description="Add a task completion to an active project session") + async def add_project_task_completion(self, info: Info, project_id: strawberry.ID, task_id: strawberry.ID, + notes: str | None = None) -> ProjectSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can add project task completions.") + + svc = SessionService() + + # Load task to get name and validate it exists + task_pk = UUID(str(task_id)) + project_pk = UUID(str(project_id)) + + try: + task = await database_sync_to_async(ProjectScopeTask.objects.get)(pk=task_pk) + except ProjectScopeTask.DoesNotExist: + raise ValidationError("Invalid project task ID.") + + session_id = await database_sync_to_async(svc.add_project_task_completion)( + project_id=project_pk, + task_id=task_pk, + actor=profile, + notes=notes, + ) + + async def load_session() -> ProjectSession: + return await database_sync_to_async( + lambda: ( + ProjectSession.objects + .select_related("project", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=session_id) + ) + )() + + session = await load_session() + + # Publish event + await publish_project_task_completed( + task_id=str(task_pk), + project_id=str(project_pk), + task_name=task.checklist_description, + triggered_by=profile + ) + + return cast(ProjectSessionType, cast(object, session)) + + @strawberry.mutation(description="Remove a task completion from an active project session") + async def remove_project_task_completion(self, info: Info, project_id: strawberry.ID, + task_id: strawberry.ID) -> ProjectSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can remove project task completions.") + + svc = SessionService() + + task_pk = UUID(str(task_id)) + project_pk = UUID(str(project_id)) + + # Load task to get name for event + task = await database_sync_to_async(ProjectScopeTask.objects.get)(pk=task_pk) + + session_id = await database_sync_to_async(svc.remove_project_task_completion)( + project_id=project_pk, + task_id=task_pk, + ) + + async def load_session() -> ProjectSession: + return await database_sync_to_async( + lambda: ( + ProjectSession.objects + .select_related("project", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=session_id) + ) + )() + + session = await load_session() + + # Publish event + await publish_project_task_uncompleted( + task_id=str(task_pk), + project_id=str(project_pk), + task_name=task.checklist_description, + triggered_by=profile + ) + + return cast(ProjectSessionType, cast(object, session)) + + @strawberry.mutation(description="Start a new ProjectSession for a scheduled project") + async def open_project_session(self, input: ProjectSessionStartInput, info: Info) -> ProjectSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can start project sessions.") + + project_pk = UUID(str(input.project_id)) + + svc = SessionService() + result = await database_sync_to_async(svc.open_session)( + entity_type="project", + entity_id=project_pk, + actor=profile, + ) + + async def load_session() -> ProjectSession: + return await database_sync_to_async( + lambda: ( + ProjectSession.objects + .select_related("project", "account", "account_address", "customer", "scope", "created_by", + "closed_by") + .prefetch_related("completed_tasks") + .get(pk=result.session_id) + ) + )() + + session = await load_session() + + # Notify listeners that the project was updated (status change, etc.) + await pubsub.publish("project_updated", result.entity_id) + await pubsub.publish("project_session_created", result.session_id) + + # Publish event + await publish_project_session_opened( + session_id=str(result.session_id), + project_id=str(result.entity_id), + triggered_by=profile + ) + + return cast(ProjectSessionType, cast(object, session)) + + @strawberry.mutation(description="Close the active ProjectSession") + async def close_project_session(self, input: ProjectSessionCloseInput, info: Info) -> ProjectSessionType: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can close project sessions.") + + project_pk = UUID(str(input.project_id)) + task_ids_raw = input.completed_task_ids or [] + task_pks: List[UUID] = [UUID(str(x)) for x in task_ids_raw] + + # Load ProjectScopeTask objects for the provided IDs + def load_tasks() -> List[ProjectScopeTask]: + qs = ProjectScopeTask.objects.filter(pk__in=task_pks) + return list(qs) + + tasks: List[ProjectScopeTask] = [] + if task_pks: + tasks = await database_sync_to_async(load_tasks)() + if len(tasks) != len(task_pks): + raise ValidationError("One or more project task IDs are invalid.") + + # Let the service manage select_for_update inside its @transaction.atomic + svc = SessionService() + result = await database_sync_to_async(svc.close_session)( + entity_type="project", + entity_id=project_pk, + actor=profile, + tasks=tasks if task_pks else None, + ) + + async def load_session() -> ProjectSession: + return await database_sync_to_async( + lambda: ( + ProjectSession.objects + .select_related( + "project", "account", "account_address", "customer", "scope", "created_by", "closed_by" + ) + .prefetch_related("completed_tasks") + .get(pk=result.session_id) + ) + )() + + session = await load_session() + + await pubsub.publish("project_updated", result.entity_id) + await pubsub.publish("project_session_closed", result.session_id) + + # Get account/customer name and project date for notifications + if session.account: + account_name = session.account.name + elif session.customer: + account_name = session.customer.name + else: + account_name = None + project_date = str(session.project.date) if session.project and session.project.date else None + + # Publish event + await publish_project_session_closed( + session_id=str(result.session_id), + project_id=str(result.entity_id), + triggered_by=profile, + metadata={ + 'account_name': account_name, + 'date': project_date + } + ) + + return cast(ProjectSessionType, cast(object, session)) + + @strawberry.mutation(description="Revert the active ProjectSession back to scheduled (deletes the active session)") + async def revert_project_session(self, input: ProjectSessionRevertInput, info: Info) -> bool: + # Use Oathkeeper authentication + profile = getattr(info.context.request, "profile", None) + if not profile or not isinstance(profile, TeamProfile): + raise ValidationError("Authentication required. Only team members can revert project sessions.") + + project_pk = UUID(str(input.project_id)) + + svc = SessionService() + result = await database_sync_to_async(svc.revert_session)( + entity_type="project", + entity_id=project_pk, + actor=profile, + ) + + # Publish project updated to reflect status change + await pubsub.publish("project_updated", result.entity_id) + + # Publish event + await publish_project_session_reverted( + session_id=str(result.session_id), + project_id=str(result.entity_id), + triggered_by=profile + ) + + return True + diff --git a/core/graphql/mutations/session_image.py b/core/graphql/mutations/session_image.py new file mode 100644 index 0000000..1c74a98 --- /dev/null +++ b/core/graphql/mutations/session_image.py @@ -0,0 +1,221 @@ +from typing import Optional, cast +import io +import strawberry +from strawberry import Info +from strawberry.file_uploads import Upload +from strawberry.relay import GlobalID +from channels.db import database_sync_to_async +from django.core.exceptions import ValidationError +from django.core.files.base import ContentFile + +from core.models.session import ServiceSession, ProjectSession +from core.models.session_image import ServiceSessionImage, ProjectSessionImage +from core.graphql.types.session_image import ( + ServiceSessionImageType, + ProjectSessionImageType, +) +from core.graphql.inputs.session_image import ( + ServiceSessionImageUpdateInput, + ProjectSessionImageUpdateInput, +) +from core.graphql.utils import update_object, delete_object, _decode_global_id +from core.services.events import ( + publish_session_image_uploaded, + publish_session_image_updated, + publish_session_image_deleted, + publish_session_media_internal_flagged, +) + + +def _verify_image_bytes(data: bytes) -> None: + """ + Verify the uploaded bytes are a valid image payload using Pillow. + Uses a safe import for UnidentifiedImageError for broader compatibility. + """ + from PIL import Image as PilImage + try: + from PIL import UnidentifiedImageError as _UIE # type: ignore + except (ImportError, AttributeError): + _UIE = None + + invalid_img_exc = (_UIE, OSError, ValueError) if _UIE else (OSError, ValueError) + + try: + PilImage.open(io.BytesIO(data)).verify() + except invalid_img_exc: + raise ValidationError("Uploaded file is not a valid image.") + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Upload an image to a ServiceSession") + async def upload_service_session_image( + self, + info: Info, + session_id: GlobalID, + file: Upload, + title: Optional[str] = None, + notes: Optional[str] = None, + internal: bool = True, + ) -> ServiceSessionImageType: + req_profile = getattr(info.context.request, "profile", None) + if not req_profile: + raise ValidationError("Authentication required.") + if not file or not getattr(file, "filename", None): + raise ValidationError("No file provided.") + + filename: str = file.filename + content_type: str = getattr(file, "content_type", "") or "" + data = await file.read() + if not data: + raise ValidationError("Empty file upload.") + + _verify_image_bytes(data) + + sess_pk = _decode_global_id(session_id) + + def _create_img_sync() -> ServiceSessionImage: + sess = ServiceSession.objects.get(pk=sess_pk) + img = ServiceSessionImage( + title=title or "", + notes=notes or "", + service_session=sess, + uploaded_by_team_profile=req_profile, + content_type=content_type, + internal=internal, + ) + img.image.save(filename, ContentFile(data), save=True) + return img + + instance: ServiceSessionImage = await database_sync_to_async(_create_img_sync)() + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_uploaded( + image_id=str(instance.id), + session_id=str(instance.service_session_id), + is_internal=internal, + triggered_by=profile + ) + + return cast(ServiceSessionImageType, instance) + + @strawberry.mutation(description="Upload an image to a ProjectSession") + async def upload_project_session_image( + self, + info: Info, + session_id: GlobalID, + file: Upload, + title: Optional[str] = None, + notes: Optional[str] = None, + internal: bool = True, + ) -> ProjectSessionImageType: + req_profile = getattr(info.context.request, "profile", None) + if not req_profile: + raise ValidationError("Authentication required.") + if not file or not getattr(file, "filename", None): + raise ValidationError("No file provided.") + + filename: str = file.filename + content_type: str = getattr(file, "content_type", "") or "" + data = await file.read() + if not data: + raise ValidationError("Empty file upload.") + + _verify_image_bytes(data) + + sess_pk = _decode_global_id(session_id) + + def _create_img_sync() -> ProjectSessionImage: + sess = ProjectSession.objects.get(pk=sess_pk) + img = ProjectSessionImage( + title=title or "", + notes=notes or "", + project_session=sess, + uploaded_by_team_profile=req_profile, + content_type=content_type, + internal=internal, + ) + img.image.save(filename, ContentFile(data), save=True) + return img + + instance: ProjectSessionImage = await database_sync_to_async(_create_img_sync)() + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_uploaded( + image_id=str(instance.id), + session_id=str(instance.project_session_id), + is_internal=internal, + triggered_by=profile + ) + + return cast(ProjectSessionImageType, instance) + + @strawberry.mutation(description="Update an existing ServiceSession image (e.g., title)") + async def update_service_session_image( + self, info: Info, input: ServiceSessionImageUpdateInput + ) -> ServiceSessionImageType: + payload = {"id": input.id, "title": input.title, "notes": input.notes, "internal": input.internal} + instance = await update_object(payload, ServiceSessionImage) + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_updated( + image_id=str(instance.id), + session_id=str(instance.service_session_id), + triggered_by=profile + ) + + return cast(ServiceSessionImageType, instance) + + @strawberry.mutation(description="Update an existing ProjectSession image (e.g., title)") + async def update_project_session_image( + self, info: Info, input: ProjectSessionImageUpdateInput + ) -> ProjectSessionImageType: + payload = {"id": input.id, "title": input.title, "notes": input.notes, "internal": input.internal} + instance = await update_object(payload, ProjectSessionImage) + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_updated( + image_id=str(instance.id), + session_id=str(instance.project_session_id), + triggered_by=profile + ) + + return cast(ProjectSessionImageType, instance) + + @strawberry.mutation(description="Delete a ServiceSession image") + async def delete_service_session_image(self, info: Info, id: strawberry.ID) -> strawberry.ID: + # Delete the instance (delete_object returns the instance before deletion) + instance = await delete_object(id, ServiceSessionImage) + if not instance: + raise ValueError(f"ServiceSessionImage with ID {id} does not exist") + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_deleted( + image_id=str(instance.id), + session_id=str(instance.service_session_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Delete a ProjectSession image") + async def delete_project_session_image(self, info: Info, id: strawberry.ID) -> strawberry.ID: + # Delete the instance (delete_object returns the instance before deletion) + instance = await delete_object(id, ProjectSessionImage) + if not instance: + raise ValueError(f"ProjectSessionImage with ID {id} does not exist") + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_image_deleted( + image_id=str(instance.id), + session_id=str(instance.project_session_id), + triggered_by=profile + ) + + return id \ No newline at end of file diff --git a/core/graphql/mutations/session_note.py b/core/graphql/mutations/session_note.py new file mode 100644 index 0000000..16c1c1c --- /dev/null +++ b/core/graphql/mutations/session_note.py @@ -0,0 +1,109 @@ +from typing import cast +import strawberry +from strawberry.types import Info +from core.graphql.inputs.session_note import ( + ServiceSessionNoteInput, + ServiceSessionNoteUpdateInput, + ProjectSessionNoteInput, + ProjectSessionNoteUpdateInput, +) +from core.graphql.types.session_note import ( + ServiceSessionNoteType, + ProjectSessionNoteType, +) +from core.models.session import ServiceSessionNote, ProjectSessionNote +from core.graphql.utils import create_object, update_object, delete_object +from core.services.events import ( + publish_session_note_created, publish_session_note_updated, publish_session_note_deleted, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Create a new service session note") + async def create_service_session_note(self, input: ServiceSessionNoteInput, info: Info) -> ServiceSessionNoteType: + instance = await create_object(input, ServiceSessionNote) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_created( + note_id=str(instance.id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return cast(ServiceSessionNoteType, instance) + + @strawberry.mutation(description="Update an existing service session note") + async def update_service_session_note(self, input: ServiceSessionNoteUpdateInput, info: Info) -> ServiceSessionNoteType: + instance = await update_object(input, ServiceSessionNote) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_updated( + note_id=str(instance.id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return cast(ServiceSessionNoteType, instance) + + @strawberry.mutation(description="Delete a service session note") + async def delete_service_session_note(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ServiceSessionNote) + if not instance: + raise ValueError(f"ServiceSessionNote with ID {id} does not exist") + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_deleted( + note_id=str(id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Create a new project session note") + async def create_project_session_note(self, input: ProjectSessionNoteInput, info: Info) -> ProjectSessionNoteType: + instance = await create_object(input, ProjectSessionNote) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_created( + note_id=str(instance.id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return cast(ProjectSessionNoteType, instance) + + @strawberry.mutation(description="Update an existing project session note") + async def update_project_session_note(self, input: ProjectSessionNoteUpdateInput, info: Info) -> ProjectSessionNoteType: + instance = await update_object(input, ProjectSessionNote) + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_updated( + note_id=str(instance.id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return cast(ProjectSessionNoteType, instance) + + @strawberry.mutation(description="Delete a project session note") + async def delete_project_session_note(self, id: strawberry.ID, info: Info) -> strawberry.ID: + instance = await delete_object(id, ProjectSessionNote) + if not instance: + raise ValueError(f"ProjectSessionNote with ID {id} does not exist") + + # Publish event for notifications + profile = getattr(info.context.request, 'profile', None) + await publish_session_note_deleted( + note_id=str(id), + session_id=str(instance.session_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/mutations/session_video.py b/core/graphql/mutations/session_video.py new file mode 100644 index 0000000..cc5885f --- /dev/null +++ b/core/graphql/mutations/session_video.py @@ -0,0 +1,330 @@ +from typing import Optional, cast +import strawberry +from strawberry import Info +from strawberry.file_uploads import Upload +from strawberry.relay import GlobalID +from channels.db import database_sync_to_async +from django.core.exceptions import ValidationError +from django.core.files.base import ContentFile + +from core.models.session import ServiceSession, ProjectSession +from core.models.session_video import ServiceSessionVideo, ProjectSessionVideo +from core.graphql.types.session_video import ( + ServiceSessionVideoType, + ProjectSessionVideoType, +) +from core.services.video import ( + verify_video_bytes, + extract_video_metadata, + generate_video_thumbnail, +) +from core.graphql.utils import update_object, delete_object, _decode_global_id +from core.graphql.inputs.session_video import ( + ServiceSessionVideoUpdateInput, + ProjectSessionVideoUpdateInput, +) +from core.services.events import ( + publish_session_video_uploaded, + publish_session_video_updated, + publish_session_video_deleted, + publish_session_media_internal_flagged, +) + + +@strawberry.type +class Mutation: + @strawberry.mutation(description="Upload a video to a ServiceSession") + async def upload_service_session_video( + self, + info: Info, + session_id: GlobalID, + file: Upload, + title: Optional[str] = None, + notes: Optional[str] = None, + internal: bool = True, + ) -> ServiceSessionVideoType: + """ + Upload a video file to a ServiceSession. + + Accepts video formats: MP4, MOV, WebM, AVI, MKV + Maximum file size: 250 MB + """ + req_profile = getattr(info.context.request, "profile", None) + if not req_profile: + raise ValidationError("Authentication required.") + if not file or not getattr(file, "filename", None): + raise ValidationError("No file provided.") + + filename: str = file.filename + data = await file.read() + if not data: + raise ValidationError("Empty file upload.") + + # Validate video file and get content type + content_type = verify_video_bytes(data, filename) + + sess_pk = _decode_global_id(session_id) + + def _create_video_sync() -> ServiceSessionVideo: + from django.core.files import File + import tempfile + import os + + sess = ServiceSession.objects.get(pk=sess_pk) + + # Write video to temp file for ffmpeg processing (required for S3 storage) + video_ext = os.path.splitext(filename)[1] or '.mp4' + video_fd, video_tmp_path = tempfile.mkstemp(suffix=video_ext) + thumb_fd, thumb_tmp_path = tempfile.mkstemp(suffix='.jpg') + + try: + # Write video bytes to temp file + os.write(video_fd, data) + os.close(video_fd) + os.close(thumb_fd) + + # Extract metadata from temp file (before saving to S3) + metadata = extract_video_metadata(video_tmp_path) + + # Generate thumbnail from temp file + thumbnail_generated = generate_video_thumbnail(video_tmp_path, thumb_tmp_path, timestamp=1.0) + + video = ServiceSessionVideo( + title=title or "", + notes=notes or "", + service_session=sess, + uploaded_by_team_profile=req_profile, + content_type=content_type, + internal=internal, + ) + + # Set metadata before saving + if metadata: + video.width, video.height, video.duration_seconds = metadata + + # Save video to storage (S3 or local) + video.video.save(filename, ContentFile(data), save=True) + + # Save thumbnail if generated + if thumbnail_generated and os.path.exists(thumb_tmp_path): + with open(thumb_tmp_path, 'rb') as thumb_file: + video.thumbnail.save( + f'thumb_{video.id}.jpg', + File(thumb_file), + save=False + ) + + video.save() + return video + + finally: + # Clean up temp files + if os.path.exists(video_tmp_path): + os.unlink(video_tmp_path) + if os.path.exists(thumb_tmp_path): + os.unlink(thumb_tmp_path) + + instance: ServiceSessionVideo = await database_sync_to_async(_create_video_sync)() + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_uploaded( + video_id=str(instance.id), + session_id=str(instance.service_session_id), + is_internal=internal, + triggered_by=profile + ) + + # If marked as internal, also publish internal flag event + if internal: + await publish_session_media_internal_flagged( + media_id=str(instance.id), + media_type='SessionVideo', + session_id=str(instance.service_session_id), + triggered_by=profile + ) + + return cast(ServiceSessionVideoType, instance) + + @strawberry.mutation(description="Upload a video to a ProjectSession") + async def upload_project_session_video( + self, + info: Info, + session_id: GlobalID, + file: Upload, + title: Optional[str] = None, + notes: Optional[str] = None, + internal: bool = True, + ) -> ProjectSessionVideoType: + """ + Upload a video file to a ProjectSession. + + Accepts video formats: MP4, MOV, WebM, AVI, MKV + Maximum file size: 250 MB + """ + req_profile = getattr(info.context.request, "profile", None) + if not req_profile: + raise ValidationError("Authentication required.") + if not file or not getattr(file, "filename", None): + raise ValidationError("No file provided.") + + filename: str = file.filename + data = await file.read() + if not data: + raise ValidationError("Empty file upload.") + + # Validate video file and get content type + content_type = verify_video_bytes(data, filename) + + sess_pk = _decode_global_id(session_id) + + def _create_video_sync() -> ProjectSessionVideo: + from django.core.files import File + import tempfile + import os + + sess = ProjectSession.objects.get(pk=sess_pk) + + # Write video to temp file for ffmpeg processing (required for S3 storage) + video_ext = os.path.splitext(filename)[1] or '.mp4' + video_fd, video_tmp_path = tempfile.mkstemp(suffix=video_ext) + thumb_fd, thumb_tmp_path = tempfile.mkstemp(suffix='.jpg') + + try: + # Write video bytes to temp file + os.write(video_fd, data) + os.close(video_fd) + os.close(thumb_fd) + + # Extract metadata from temp file (before saving to S3) + metadata = extract_video_metadata(video_tmp_path) + + # Generate thumbnail from temp file + thumbnail_generated = generate_video_thumbnail(video_tmp_path, thumb_tmp_path, timestamp=1.0) + + video = ProjectSessionVideo( + title=title or "", + notes=notes or "", + project_session=sess, + uploaded_by_team_profile=req_profile, + content_type=content_type, + internal=internal, + ) + + # Set metadata before saving + if metadata: + video.width, video.height, video.duration_seconds = metadata + + # Save video to storage (S3 or local) + video.video.save(filename, ContentFile(data), save=True) + + # Save thumbnail if generated + if thumbnail_generated and os.path.exists(thumb_tmp_path): + with open(thumb_tmp_path, 'rb') as thumb_file: + video.thumbnail.save( + f'thumb_{video.id}.jpg', + File(thumb_file), + save=False + ) + + video.save() + return video + + finally: + # Clean up temp files + if os.path.exists(video_tmp_path): + os.unlink(video_tmp_path) + if os.path.exists(thumb_tmp_path): + os.unlink(thumb_tmp_path) + + instance: ProjectSessionVideo = await database_sync_to_async(_create_video_sync)() + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_uploaded( + video_id=str(instance.id), + session_id=str(instance.project_session_id), + is_internal=internal, + triggered_by=profile + ) + + # If marked as internal, also publish internal flag event + if internal: + await publish_session_media_internal_flagged( + media_id=str(instance.id), + media_type='SessionVideo', + session_id=str(instance.project_session_id), + triggered_by=profile + ) + + return cast(ProjectSessionVideoType, instance) + + @strawberry.mutation(description="Update an existing ServiceSession video (e.g., title)") + async def update_service_session_video( + self, info: Info, input: ServiceSessionVideoUpdateInput + ) -> ServiceSessionVideoType: + payload = {"id": input.id, "title": input.title, "notes": input.notes, "internal": input.internal} + instance = await update_object(payload, ServiceSessionVideo) + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_updated( + video_id=str(instance.id), + session_id=str(instance.service_session_id), + triggered_by=profile + ) + + return cast(ServiceSessionVideoType, instance) + + @strawberry.mutation(description="Update an existing ProjectSession video (e.g., title)") + async def update_project_session_video( + self, info: Info, input: ProjectSessionVideoUpdateInput + ) -> ProjectSessionVideoType: + payload = {"id": input.id, "title": input.title, "notes": input.notes, "internal": input.internal} + instance = await update_object(payload, ProjectSessionVideo) + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_updated( + video_id=str(instance.id), + session_id=str(instance.project_session_id), + triggered_by=profile + ) + + return cast(ProjectSessionVideoType, instance) + + @strawberry.mutation(description="Delete a ServiceSession video") + async def delete_service_session_video(self, info: Info, id: strawberry.ID) -> strawberry.ID: + """Delete a video from a ServiceSession.""" + # Delete the instance (delete_object returns the instance before deletion) + instance = await delete_object(id, ServiceSessionVideo) + if not instance: + raise ValueError(f"ServiceSessionVideo with ID {id} does not exist") + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_deleted( + video_id=str(instance.id), + session_id=str(instance.service_session_id), + triggered_by=profile + ) + + return id + + @strawberry.mutation(description="Delete a ProjectSession video") + async def delete_project_session_video(self, info: Info, id: strawberry.ID) -> strawberry.ID: + """Delete a video from a ProjectSession.""" + # Delete the instance (delete_object returns the instance before deletion) + instance = await delete_object(id, ProjectSessionVideo) + if not instance: + raise ValueError(f"ProjectSessionVideo with ID {id} does not exist") + + # Publish events + profile = getattr(info.context.request, 'profile', None) + await publish_session_video_deleted( + video_id=str(instance.id), + session_id=str(instance.project_session_id), + triggered_by=profile + ) + + return id diff --git a/core/graphql/pubsub.py b/core/graphql/pubsub.py new file mode 100644 index 0000000..b5e9999 --- /dev/null +++ b/core/graphql/pubsub.py @@ -0,0 +1,49 @@ +from contextlib import asynccontextmanager +from typing import AsyncGenerator, Any, AsyncIterator + +from channels.layers import get_channel_layer + + +class PubSub: + """ + A PubSub implementation that uses the Django Channels layer. + """ + def __init__(self): + self.channel_layer = get_channel_layer() + + async def publish(self, channel: str, message: Any): + """ + Publishes a message to the given channel. + """ + await self.channel_layer.group_send( + channel, + { + "type": "channel.message", + "message": message, + }, + ) + + @asynccontextmanager + async def subscribe(self, channel: str) -> AsyncGenerator[AsyncIterator[Any], None]: + """ + Subscribes to a channel and yields an async iterator over messages. + Designed to be used with 'async with'. + """ + channel_name = await self.channel_layer.new_channel() + await self.channel_layer.group_add(channel, channel_name) + + async def _subscriber(): + while True: + message = await self.channel_layer.receive(channel_name) + if message.get("type") == "channel.message": + yield message["message"] + + try: + yield _subscriber() + finally: + # This cleanup code will run automatically when the 'async with' block is exited. + await self.channel_layer.group_discard(channel, channel_name) + + +# Create a single global instance for the application to use. +pubsub = PubSub() \ No newline at end of file diff --git a/core/graphql/queries/__init__.py b/core/graphql/queries/__init__.py new file mode 100644 index 0000000..2c0f65e --- /dev/null +++ b/core/graphql/queries/__init__.py @@ -0,0 +1,18 @@ +from core.graphql.queries.customer import * +from core.graphql.queries.account import * +from core.graphql.queries.profile import * +from core.graphql.queries.project import * +from core.graphql.queries.service import * +from core.graphql.queries.labor import * +from core.graphql.queries.revenue import * +from core.graphql.queries.schedule import * +from core.graphql.queries.invoice import * +from core.graphql.queries.report import * +from core.graphql.queries.account_punchlist import * +from core.graphql.queries.project_punchlist import * +from core.graphql.queries.scope import * +from core.graphql.queries.scope_template import * +from core.graphql.queries.project_scope import * +from core.graphql.queries.project_scope_template import * +from core.graphql.queries.session import * +from core.graphql.queries.session_image import * \ No newline at end of file diff --git a/core/graphql/queries/account.py b/core/graphql/queries/account.py new file mode 100644 index 0000000..e52cb02 --- /dev/null +++ b/core/graphql/queries/account.py @@ -0,0 +1,13 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.account import AccountType, AccountAddressType, AccountContactType +from core.graphql.filters.account import AccountFilter, AccountContactFilter + +@strawberry.type +class Query: + account: Optional[AccountType] = sd.node() + account_address: Optional[AccountAddressType] = sd.node() + account_contact: Optional[AccountContactType] = sd.node() + accounts: List[AccountType] = sd.field(filters=AccountFilter) + account_contacts: List[AccountContactType] = sd.field(filters=AccountContactFilter) \ No newline at end of file diff --git a/core/graphql/queries/account_punchlist.py b/core/graphql/queries/account_punchlist.py new file mode 100644 index 0000000..5bd4596 --- /dev/null +++ b/core/graphql/queries/account_punchlist.py @@ -0,0 +1,12 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.account_punchlist import AccountPunchlistType +from core.graphql.filters.account_punchlist import AccountPunchlistFilter + +@strawberry.type +class Query: + account_punchlist: Optional[AccountPunchlistType] = sd.node() + account_punchlists: List[AccountPunchlistType] = sd.field( + filters=AccountPunchlistFilter + ) \ No newline at end of file diff --git a/core/graphql/queries/customer.py b/core/graphql/queries/customer.py new file mode 100644 index 0000000..e67873a --- /dev/null +++ b/core/graphql/queries/customer.py @@ -0,0 +1,13 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.customer import CustomerType, CustomerAddressType, CustomerContactType +from core.graphql.filters.customer import CustomerFilter, CustomerContactFilter + +@strawberry.type +class Query: + customer: Optional[CustomerType] = sd.node() + customer_address: Optional[CustomerAddressType] = sd.node() + customer_contact: Optional[CustomerContactType] = sd.node() + customers: List[CustomerType] = sd.field(filters=CustomerFilter) + customer_contacts: List[CustomerContactType] = sd.field(filters=CustomerContactFilter) \ No newline at end of file diff --git a/core/graphql/queries/dashboard.py b/core/graphql/queries/dashboard.py new file mode 100644 index 0000000..bf960d6 --- /dev/null +++ b/core/graphql/queries/dashboard.py @@ -0,0 +1,274 @@ +from datetime import date +from typing import Optional +import strawberry +from strawberry import ID +from django.db.models import Prefetch +from asgiref.sync import sync_to_async +from core.graphql.types.dashboard import ( + AdminDashboardData, + TeamDashboardData, + CustomerDashboardData, +) +from core.models.service import Service +from core.models.project import Project +from core.models.invoice import Invoice +from core.models.report import Report +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate +from core.models.project_scope_template import ( + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, +) + + +def parse_month_range(month: str) -> tuple[date, date]: + """Parse a month string like '2024-01' into start and end dates.""" + year, month_num = map(int, month.split('-')) + start = date(year, month_num, 1) + + # Calculate end of month + if month_num == 12: + end = date(year + 1, 1, 1) + else: + end = date(year, month_num + 1, 1) + + # End is exclusive, so subtract one day for inclusive range + from datetime import timedelta + end = end - timedelta(days=1) + + return start, end + + +def _fetch_admin_dashboard_sync( + start: date, + end: date, + invoice_status: Optional[str], +) -> AdminDashboardData: + """Synchronous database fetching for admin dashboard.""" + # Services - optimized with prefetch for team_members + services = list( + Service.objects + .filter(date__gte=start, date__lte=end) + .select_related('account_address', 'account_address__account') + .prefetch_related('team_members') + .order_by('date', 'id') + ) + + # Projects - optimized with prefetch for team_members + projects = list( + Project.objects + .filter(date__gte=start, date__lte=end) + .select_related('account_address', 'account_address__account', 'customer') + .prefetch_related('team_members') + .order_by('date', 'id') + ) + + # Invoices - show all (pages need full list, not month-filtered) + invoices_qs = Invoice.objects.select_related('customer') + if invoice_status: + invoices_qs = invoices_qs.filter(status=invoice_status) + invoices = list(invoices_qs.order_by('-date', '-id')) + + # Reports - show all (pages need full list, not month-filtered) + reports = list( + Report.objects + .select_related('team_member') + .order_by('-date', '-id') + ) + + # Service Scope Templates - with nested areas and tasks prefetched + task_prefetch = Prefetch( + 'task_templates', + queryset=TaskTemplate.objects.order_by('order', 'id') + ) + area_prefetch = Prefetch( + 'area_templates', + queryset=AreaTemplate.objects.prefetch_related(task_prefetch).order_by('order', 'name') + ) + service_scope_templates = list( + ScopeTemplate.objects + .prefetch_related(area_prefetch) + .order_by('name') + ) + + # Project Scope Templates - with nested categories and tasks prefetched + project_task_prefetch = Prefetch( + 'task_templates', + queryset=ProjectTaskTemplate.objects.order_by('order', 'id') + ) + category_prefetch = Prefetch( + 'category_templates', + queryset=ProjectAreaTemplate.objects.prefetch_related(project_task_prefetch).order_by('order', 'name') + ) + project_scope_templates = list( + ProjectScopeTemplate.objects + .prefetch_related(category_prefetch) + .order_by('name') + ) + + return AdminDashboardData( + services=services, + projects=projects, + invoices=invoices, + reports=reports, + service_scope_templates=service_scope_templates, + project_scope_templates=project_scope_templates, + ) + + +def _fetch_team_dashboard_sync( + team_profile_id: str, + start: date, + end: date, +) -> TeamDashboardData: + """Synchronous database fetching for team dashboard.""" + # Services assigned to this team member + services = list( + Service.objects + .filter( + team_members__id=team_profile_id, + date__gte=start, + date__lte=end + ) + .select_related('account_address', 'account_address__account') + .prefetch_related('team_members') + .order_by('date', 'id') + ) + + # Projects assigned to this team member + projects = list( + Project.objects + .filter( + team_members__id=team_profile_id, + date__gte=start, + date__lte=end + ) + .select_related('account_address', 'account_address__account', 'customer') + .prefetch_related('team_members') + .order_by('date', 'id') + ) + + # Reports for this team member + reports = list( + Report.objects + .filter( + team_member_id=team_profile_id, + date__gte=start, + date__lte=end + ) + .select_related('team_member') + .order_by('-date', '-id') + ) + + return TeamDashboardData( + services=services, + projects=projects, + reports=reports, + ) + + +def _fetch_customer_dashboard_sync( + customer_id: str, +) -> CustomerDashboardData: + """Synchronous database fetching for customer dashboard.""" + # Services for customer's accounts + services = list( + Service.objects + .filter(account_address__account__customer_id=customer_id) + .select_related('account_address', 'account_address__account') + .prefetch_related('team_members') + .order_by('-date', '-id')[:100] # Limit for performance + ) + + # Projects for customer + projects = list( + Project.objects + .filter(customer_id=customer_id) + .select_related('account_address', 'account_address__account', 'customer') + .prefetch_related('team_members') + .order_by('-date', '-id')[:100] # Limit for performance + ) + + # Invoices for customer + invoices = list( + Invoice.objects + .filter(customer_id=customer_id) + .select_related('customer') + .order_by('-date', '-id')[:100] # Limit for performance + ) + + return CustomerDashboardData( + services=services, + projects=projects, + invoices=invoices, + ) + + +@strawberry.type +class Query: + @strawberry.field( + name="adminDashboard", + description="Consolidated dashboard data for admin/team leader users. " + "Returns all services, projects, invoices, reports, and scope templates " + "for the given month in a single optimized query." + ) + async def admin_dashboard( + self, + info, + month: str, + invoice_status: Optional[str] = None, + ) -> AdminDashboardData: + """Fetch all admin dashboard data in a single optimized query. + + Args: + month: Month string in format 'YYYY-MM' (e.g., '2024-01') + invoice_status: Optional invoice status filter (e.g., 'SENT', 'PAID') + + Returns: + AdminDashboardData with all dashboard entities + """ + start, end = parse_month_range(month) + return await sync_to_async(_fetch_admin_dashboard_sync)(start, end, invoice_status) + + @strawberry.field( + name="teamDashboard", + description="Consolidated dashboard data for team member users. " + "Returns services and projects assigned to the requesting user." + ) + async def team_dashboard( + self, + info, + team_profile_id: ID, + month: str, + ) -> TeamDashboardData: + """Fetch all team dashboard data in a single optimized query. + + Args: + team_profile_id: The team member's profile ID + month: Month string in format 'YYYY-MM' (e.g., '2024-01') + + Returns: + TeamDashboardData with services and projects for the team member + """ + start, end = parse_month_range(month) + return await sync_to_async(_fetch_team_dashboard_sync)(team_profile_id, start, end) + + @strawberry.field( + name="customerDashboard", + description="Consolidated dashboard data for customer users. " + "Returns services, projects, and invoices for the customer." + ) + async def customer_dashboard( + self, + info, + customer_id: ID, + ) -> CustomerDashboardData: + """Fetch all customer dashboard data in a single optimized query. + + Args: + customer_id: The customer's profile ID + + Returns: + CustomerDashboardData with services, projects, and invoices + """ + return await sync_to_async(_fetch_customer_dashboard_sync)(customer_id) diff --git a/core/graphql/queries/event.py b/core/graphql/queries/event.py new file mode 100644 index 0000000..ce1a60a --- /dev/null +++ b/core/graphql/queries/event.py @@ -0,0 +1,206 @@ +import strawberry +from typing import List, Optional +from strawberry.types import Info +from channels.db import database_sync_to_async +from django.contrib.contenttypes.models import ContentType + +from core.graphql.types.event import EventType, NotificationRuleType, NotificationType, NotificationDeliveryType +from core.models.events import Event, NotificationRule, Notification, NotificationDelivery +from core.models.enums import NotificationStatusChoices + + +@strawberry.type +class Query: + @strawberry.field(description="Get all events") + async def events( + self, + info: Info, + limit: Optional[int] = 50, + offset: Optional[int] = 0 + ) -> List[EventType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + events = await database_sync_to_async( + lambda: list(Event.objects.all().order_by('-created_at')[offset:offset + limit]) + )() + + return events + + @strawberry.field(description="Get event by ID") + async def event( + self, + info: Info, + id: strawberry.ID + ) -> Optional[EventType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + try: + event = await database_sync_to_async(Event.objects.get)(pk=id) + return event + except Event.DoesNotExist: + return None + + @strawberry.field(description="Get all notification rules") + async def notification_rules( + self, + info: Info, + is_active: Optional[bool] = None + ) -> List[NotificationRuleType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can view notification rules + from core.models.profile import TeamProfile + from core.models.enums import RoleChoices + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + queryset = NotificationRule.objects.prefetch_related( + 'target_team_profiles', + 'target_customer_profiles' + ) + if is_active is not None: + queryset = queryset.filter(is_active=is_active) + + rules = await database_sync_to_async(lambda: list(queryset.order_by('name')))() + return rules + + @strawberry.field(description="Get notification rule by ID") + async def notification_rule( + self, + info: Info, + id: strawberry.ID + ) -> Optional[NotificationRuleType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can view notification rules + from core.models.profile import TeamProfile + from core.models.enums import RoleChoices + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + try: + rule = await database_sync_to_async( + lambda: NotificationRule.objects.prefetch_related( + 'target_team_profiles', + 'target_customer_profiles' + ).get(pk=id) + )() + return rule + except NotificationRule.DoesNotExist: + return None + + @strawberry.field(description="Get notifications for current user") + async def my_notifications( + self, + info: Info, + unread_only: Optional[bool] = False, + limit: Optional[int] = 50, + offset: Optional[int] = 0 + ) -> List[NotificationType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + @database_sync_to_async + def get_notifications(): + # Get content type for the profile + content_type = ContentType.objects.get_for_model(type(profile)) + + # Build query + queryset = Notification.objects.filter( + recipient_content_type=content_type, + recipient_object_id=profile.id + ) + + if unread_only: + queryset = queryset.filter(read_at__isnull=True) + + # Get notifications + return list( + queryset.select_related('event', 'rule') + .order_by('-created_at')[offset:offset + limit] + ) + + return await get_notifications() + + @strawberry.field(description="Get unread notification count for current user") + async def my_unread_notification_count(self, info: Info) -> int: + profile = getattr(info.context.request, 'profile', None) + if not profile: + return 0 + + # Get content type for the profile + content_type = await database_sync_to_async(ContentType.objects.get_for_model)(profile) + + # Count unread notifications + count = await database_sync_to_async( + Notification.objects.filter( + recipient_content_type=content_type, + recipient_object_id=profile.id, + read_at__isnull=True + ).count + )() + + return count + + @strawberry.field(description="Get notification by ID") + async def notification( + self, + info: Info, + id: strawberry.ID + ) -> Optional[NotificationType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + @database_sync_to_async + def get_and_verify(): + notification = Notification.objects.select_related('event', 'rule').get(pk=id) + + # Verify user has access to this notification + content_type = ContentType.objects.get_for_model(type(profile)) + + if (notification.recipient_content_type != content_type or + str(notification.recipient_object_id) != str(profile.id)): + raise PermissionError("Not authorized to view this notification") + + return notification + + try: + return await get_and_verify() + except Notification.DoesNotExist: + return None + + @strawberry.field(description="Get notification delivery status") + async def notification_deliveries( + self, + info: Info, + notification_id: strawberry.ID + ) -> List[NotificationDeliveryType]: + profile = getattr(info.context.request, 'profile', None) + if not profile: + raise PermissionError("Authentication required") + + # Only admins can view delivery status + from core.models.profile import TeamProfile + from core.models.enums import RoleChoices + if not isinstance(profile, TeamProfile) or profile.role != RoleChoices.ADMIN: + raise PermissionError("Admin access required") + + deliveries = await database_sync_to_async( + lambda: list( + NotificationDelivery.objects.filter(notification_id=notification_id) + .select_related('notification') + .order_by('-created_at') + ) + )() + + return deliveries diff --git a/core/graphql/queries/invoice.py b/core/graphql/queries/invoice.py new file mode 100644 index 0000000..3d68a27 --- /dev/null +++ b/core/graphql/queries/invoice.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.invoice import InvoiceType +from core.graphql.filters.invoice import InvoiceFilter + +@strawberry.type +class Query: + invoice: Optional[InvoiceType] = sd.node() + invoices: List[InvoiceType] = sd.field(filters=InvoiceFilter) \ No newline at end of file diff --git a/core/graphql/queries/labor.py b/core/graphql/queries/labor.py new file mode 100644 index 0000000..0b088b2 --- /dev/null +++ b/core/graphql/queries/labor.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.labor import LaborType +from core.graphql.filters.labor import LaborFilter + +@strawberry.type +class Query: + labor: Optional[LaborType] = sd.node() + labors: List[LaborType] = sd.field(filters=LaborFilter) \ No newline at end of file diff --git a/core/graphql/queries/messaging.py b/core/graphql/queries/messaging.py new file mode 100644 index 0000000..bcaf1a2 --- /dev/null +++ b/core/graphql/queries/messaging.py @@ -0,0 +1,148 @@ +from typing import List, Optional, Iterable +import strawberry +import strawberry_django as sd +from strawberry import ID +from strawberry_django.relay import DjangoCursorConnection +from django.contrib.contenttypes.models import ContentType + +from core.graphql.filters.messaging import ConversationFilter, MessageFilter +from core.graphql.types.messaging import ConversationType, MessageType +from core.models.messaging import Conversation, Message +from core.models.profile import TeamProfile, CustomerProfile + + +@strawberry.type +class Query: + """Messaging queries""" + + conversation: Optional[ConversationType] = sd.node() + conversations: List[ConversationType] = sd.field(filters=ConversationFilter) + + message: Optional[MessageType] = sd.node() + messages: List[MessageType] = sd.field(filters=MessageFilter) + + @sd.connection( + DjangoCursorConnection["ConversationType"], + name="getMyConversations", + description="Return conversations for the authenticated user (inbox)", + filters=ConversationFilter, + ) + def get_my_conversations( + self, + info, + include_archived: bool = False, + ) -> Iterable["Conversation"]: + """ + Get all conversations for the current authenticated user. + Returns conversations ordered by last message timestamp. + """ + # Get profile directly from context (not Django User model) + profile = getattr(info.context.request, 'profile', None) + if not profile: + return Conversation.objects.none() + + # Determine the profile's content type + content_type = ContentType.objects.get_for_model(type(profile)) + + # Build query + queryset = Conversation.objects.filter( + participants__participant_content_type=content_type, + participants__participant_object_id=profile.id, + ) + + # Filter archived conversations unless explicitly requested + if not include_archived: + queryset = queryset.filter(participants__is_archived=False) + + return queryset.prefetch_related( + 'participants', + 'participants__participant_content_type', + ).distinct().order_by('-last_message_at', '-created_at') + + @sd.connection( + DjangoCursorConnection["ConversationType"], + name="getConversationsByEntity", + description="Return conversations linked to a specific entity (Project, Service, Account, etc.)", + filters=ConversationFilter, + ) + def get_conversations_by_entity( + self, + entity_type: str, + entity_id: ID, + ) -> Iterable["Conversation"]: + """ + Get all conversations linked to a specific entity. + entity_type: Model name (e.g., 'Project', 'Service', 'Account') + entity_id: UUID of the entity + """ + from django.apps import apps + + try: + # Get the content type for the entity + model = apps.get_model('core', entity_type) + content_type = ContentType.objects.get_for_model(model) + + return Conversation.objects.filter( + entity_content_type=content_type, + entity_object_id=entity_id + ).prefetch_related( + 'participants', + 'participants__participant_content_type', + ).order_by('-last_message_at') + except Exception: + return Conversation.objects.none() + + @strawberry.field(description="Get unread message count for the authenticated user") + async def unread_message_count(self, info) -> int: + """ + Get total unread message count across all conversations for the current user. + """ + from channels.db import database_sync_to_async + + # Get profile directly from context (not Django User model) + profile = getattr(info.context.request, 'profile', None) + if not profile: + return 0 + + @database_sync_to_async + def get_count(): + # Determine the profile's content type + content_type = ContentType.objects.get_for_model(type(profile)) + + # Sum unread counts from all participant records + from core.models.messaging import ConversationParticipant + from django.db.models import Sum + + total = ConversationParticipant.objects.filter( + participant_content_type=content_type, + participant_object_id=profile.id, + is_archived=False + ).aggregate(total=Sum('unread_count'))['total'] + + return total if total else 0 + + return await get_count() + + @sd.connection( + DjangoCursorConnection["MessageType"], + name="getMessagesByConversation", + description="Return messages for a specific conversation", + filters=MessageFilter, + ) + def get_messages_by_conversation( + self, + conversation_id: ID, + include_system: bool = True, + ) -> Iterable["Message"]: + """ + Get all messages for a specific conversation. + """ + queryset = Message.objects.filter(conversation_id=conversation_id) + + if not include_system: + queryset = queryset.filter(is_system_message=False) + + return queryset.prefetch_related( + 'read_receipts', + 'sender_content_type', + ).order_by('created_at') diff --git a/core/graphql/queries/profile.py b/core/graphql/queries/profile.py new file mode 100644 index 0000000..7bb7ffb --- /dev/null +++ b/core/graphql/queries/profile.py @@ -0,0 +1,27 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional, Union +from core.graphql.types.profile import CustomerProfileType, TeamProfileType +from core.graphql.filters.profile import CustomerProfileFilter +from strawberry.types import Info + +@strawberry.type +class Query: + customer_profile: Optional[CustomerProfileType] = sd.node() + customer_profiles: List[CustomerProfileType] = sd.field( + filters=CustomerProfileFilter + ) + + team_profile: Optional[TeamProfileType] = sd.node() + team_profiles: List[TeamProfileType] = sd.field() + + @strawberry.field(description="Get the currently authenticated user's profile") + def me(self, info: Info) -> Optional[Union[CustomerProfileType, TeamProfileType]]: + """ + Returns the current user's Django profile (Team or Customer). + Profile is set by OryHeaderAuthenticationMiddleware from Oathkeeper headers. + """ + profile = getattr(info.context.request, 'profile', None) + if not profile: + return None + return profile \ No newline at end of file diff --git a/core/graphql/queries/project.py b/core/graphql/queries/project.py new file mode 100644 index 0000000..3522bec --- /dev/null +++ b/core/graphql/queries/project.py @@ -0,0 +1,35 @@ +from typing import List, Optional, Iterable +import strawberry +import strawberry_django as sd +from strawberry import ID +from strawberry_django.relay import DjangoCursorConnection +from core.graphql.filters.project import ProjectFilter +from core.graphql.types.project import ProjectType +from core.graphql.enums import DateOrdering +from core.models.project import Project + + +@strawberry.type +class Query: + project: Optional[ProjectType] = sd.node() + projects: List[ProjectType] = sd.field(filters=ProjectFilter) + + @sd.connection( + DjangoCursorConnection["ProjectType"], + name="getProjectsByTeamMember", + description="Return projects that include the given TeamProfile ID as a team member", + filters=ProjectFilter, + ) + def get_projects_by_team_member( + self, + team_profile_id: ID, + ordering: Optional[DateOrdering] = DateOrdering.DESC, + ) -> Iterable["Project"]: + order_prefix = "" if ordering == DateOrdering.ASC else "-" + return ( + Project.objects + .filter(team_members__id=team_profile_id) + .select_related('account_address', 'account_address__account', 'customer') + .prefetch_related('team_members') + .order_by(f"{order_prefix}date", f"{order_prefix}id") + ) diff --git a/core/graphql/queries/project_punchlist.py b/core/graphql/queries/project_punchlist.py new file mode 100644 index 0000000..ad4e089 --- /dev/null +++ b/core/graphql/queries/project_punchlist.py @@ -0,0 +1,12 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.project_punchlist import ProjectPunchlistType +from core.graphql.filters.project_punchlist import ProjectPunchlistFilter + +@strawberry.type +class Query: + project_punchlist: Optional[ProjectPunchlistType] = sd.node() + project_punchlists: List[ProjectPunchlistType] = sd.field( + filters=ProjectPunchlistFilter + ) \ No newline at end of file diff --git a/core/graphql/queries/project_scope.py b/core/graphql/queries/project_scope.py new file mode 100644 index 0000000..6dad4a3 --- /dev/null +++ b/core/graphql/queries/project_scope.py @@ -0,0 +1,47 @@ +from typing import List, Optional, Iterable +import strawberry +import strawberry_django as sd +from strawberry import ID +from strawberry_django.relay import DjangoCursorConnection +from core.graphql.filters.project_scope import ( + ProjectScopeFilter, + ProjectScopeCategoryFilter, + ProjectScopeTaskFilter, ProjectScopeTaskCompletionFilter, +) +from core.graphql.types.project_scope import ( + ProjectScopeType, + ProjectScopeCategoryType, + ProjectScopeTaskType, + ProjectScopeTaskCompletionType +) +from core.models.project_scope import ProjectScopeCategory, ProjectScopeTask + + +@strawberry.type +class Query: + project_scope: Optional[ProjectScopeType] = sd.node() + project_scopes: List[ProjectScopeType] = sd.field(filters=ProjectScopeFilter) + project_scope_category: Optional[ProjectScopeCategoryType] = sd.node() + project_scope_categories: List[ProjectScopeCategoryType] = sd.field(filters=ProjectScopeCategoryFilter) + project_scope_task: Optional[ProjectScopeTaskType] = sd.node() + project_scope_tasks: List[ProjectScopeTaskType] = sd.field(filters=ProjectScopeTaskFilter) + project_scope_task_completion: Optional[ProjectScopeTaskCompletionType] = sd.node() + project_scope_task_completions: List[ProjectScopeTaskCompletionType] = sd.field(filters=ProjectScopeTaskCompletionFilter) + + @sd.connection( + DjangoCursorConnection["ProjectScopeCategoryType"], + name="getProjectScopeCategories", + description="Return categories for a given ProjectScope", + filters=ProjectScopeCategoryFilter, + ) + def get_project_scope_categories(self, scope_id: ID) -> Iterable[ProjectScopeCategory]: + return ProjectScopeCategory.objects.filter(scope_id=scope_id).order_by("order", "name") + + @sd.connection( + DjangoCursorConnection["ProjectScopeTaskType"], + name="getProjectScopeTasks", + description="Return tasks for a given ProjectScopeCategory", + filters=ProjectScopeTaskFilter, + ) + def get_project_scope_tasks(self, category_id: ID) -> Iterable[ProjectScopeTask]: + return ProjectScopeTask.objects.filter(category_id=category_id).order_by("order", "id") diff --git a/core/graphql/queries/project_scope_template.py b/core/graphql/queries/project_scope_template.py new file mode 100644 index 0000000..e756f72 --- /dev/null +++ b/core/graphql/queries/project_scope_template.py @@ -0,0 +1,49 @@ +from typing import List, Optional, Iterable +import strawberry +import strawberry_django as sd +from strawberry import ID +from strawberry_django.relay import DjangoCursorConnection +from core.graphql.filters.project_scope_template import ( + ProjectScopeTemplateFilter, + ProjectAreaTemplateFilter, + ProjectTaskTemplateFilter, +) +from core.graphql.types.project_scope_template import ( + ProjectScopeTemplateType, + ProjectAreaTemplateType, + ProjectTaskTemplateType, +) +from core.models.project_scope_template import ( + ProjectAreaTemplate, + ProjectTaskTemplate, +) + + +@strawberry.type +class Query: + project_scope_template: Optional[ProjectScopeTemplateType] = sd.node() + project_scope_templates: List[ProjectScopeTemplateType] = sd.field(filters=ProjectScopeTemplateFilter) + + project_area_template: Optional[ProjectAreaTemplateType] = sd.node() + project_area_templates: List[ProjectAreaTemplateType] = sd.field(filters=ProjectAreaTemplateFilter) + + project_task_template: Optional[ProjectTaskTemplateType] = sd.node() + project_task_templates: List[ProjectTaskTemplateType] = sd.field(filters=ProjectTaskTemplateFilter) + + @sd.connection( + DjangoCursorConnection["ProjectAreaTemplateType"], + name="getProjectAreaTemplates", + description="Return area templates for a given ProjectScopeTemplate", + filters=ProjectAreaTemplateFilter, + ) + def get_project_area_templates(self, scope_template_id: ID) -> Iterable[ProjectAreaTemplate]: + return ProjectAreaTemplate.objects.filter(scope_template_id=scope_template_id).order_by("order", "name") + + @sd.connection( + DjangoCursorConnection["ProjectTaskTemplateType"], + name="getProjectTaskTemplates", + description="Return task templates for a given ProjectAreaTemplate", + filters=ProjectTaskTemplateFilter, + ) + def get_project_task_templates(self, area_template_id: ID) -> Iterable[ProjectTaskTemplate]: + return ProjectTaskTemplate.objects.filter(area_template_id=area_template_id).order_by("order", "id") diff --git a/core/graphql/queries/report.py b/core/graphql/queries/report.py new file mode 100644 index 0000000..d406400 --- /dev/null +++ b/core/graphql/queries/report.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.report import ReportType +from core.graphql.filters.report import ReportFilter + +@strawberry.type +class Query: + report: Optional[ReportType] = sd.node() + reports: List[ReportType] = sd.field(filters=ReportFilter) \ No newline at end of file diff --git a/core/graphql/queries/revenue.py b/core/graphql/queries/revenue.py new file mode 100644 index 0000000..34bc88e --- /dev/null +++ b/core/graphql/queries/revenue.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.revenue import RevenueType +from core.graphql.filters.revenue import RevenueFilter + +@strawberry.type +class Query: + revenue: Optional[RevenueType] = sd.node() + revenues: List[RevenueType] = sd.field(filters=RevenueFilter) \ No newline at end of file diff --git a/core/graphql/queries/schedule.py b/core/graphql/queries/schedule.py new file mode 100644 index 0000000..ec61760 --- /dev/null +++ b/core/graphql/queries/schedule.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.schedule import ScheduleType +from core.graphql.filters.schedule import ScheduleFilter + +@strawberry.type +class Query: + schedule: Optional[ScheduleType] = sd.node() + schedules: List[ScheduleType] = sd.field(filters=ScheduleFilter) \ No newline at end of file diff --git a/core/graphql/queries/scope.py b/core/graphql/queries/scope.py new file mode 100644 index 0000000..82bb6f8 --- /dev/null +++ b/core/graphql/queries/scope.py @@ -0,0 +1,17 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.scope import ScopeType, AreaType, TaskType, TaskCompletionType +from core.graphql.filters.scope import ScopeFilter, AreaFilter, TaskFilter, TaskCompletionFilter + + +@strawberry.type +class Query: + scope: Optional[ScopeType] = sd.node() + area: Optional[AreaType] = sd.node() + task: Optional[TaskType] = sd.node() + task_completion: Optional[TaskCompletionType] = sd.node() + scopes: List[ScopeType] = sd.field(filters=ScopeFilter) + areas: List[AreaType] = sd.field(filters=AreaFilter) + tasks: List[TaskType] = sd.field(filters=TaskFilter) + task_completions: List[TaskCompletionType] = sd.field(filters=TaskCompletionFilter) diff --git a/core/graphql/queries/scope_template.py b/core/graphql/queries/scope_template.py new file mode 100644 index 0000000..1646bdb --- /dev/null +++ b/core/graphql/queries/scope_template.py @@ -0,0 +1,24 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.scope_template import ( + ScopeTemplateType, + AreaTemplateType, + TaskTemplateType, +) +from core.graphql.filters.scope_template import ( + ScopeTemplateFilter, + AreaTemplateFilter, + TaskTemplateFilter, +) + + +@strawberry.type +class Query: + scope_template: Optional[ScopeTemplateType] = sd.node() + area_template: Optional[AreaTemplateType] = sd.node() + task_template: Optional[TaskTemplateType] = sd.node() + + scope_templates: List[ScopeTemplateType] = sd.field(filters=ScopeTemplateFilter) + area_templates: List[AreaTemplateType] = sd.field(filters=AreaTemplateFilter) + task_templates: List[TaskTemplateType] = sd.field(filters=TaskTemplateFilter) diff --git a/core/graphql/queries/service.py b/core/graphql/queries/service.py new file mode 100644 index 0000000..dd5bea8 --- /dev/null +++ b/core/graphql/queries/service.py @@ -0,0 +1,35 @@ +from typing import List, Optional, Iterable +import strawberry +import strawberry_django as sd +from strawberry import ID +from strawberry_django.relay import DjangoCursorConnection +from core.graphql.filters.service import ServiceFilter +from core.graphql.types.service import ServiceType +from core.graphql.enums import DateOrdering +from core.models.service import Service + + +@strawberry.type +class Query: + service: Optional[ServiceType] = sd.node() + services: List[ServiceType] = sd.field(filters=ServiceFilter) + + @sd.connection( + DjangoCursorConnection["ServiceType"], + name="getServicesByTeamMember", + description="Return services that include the given TeamProfile ID as a team member", + filters=ServiceFilter, + ) + def get_services_by_team_member( + self, + team_profile_id: ID, + ordering: Optional[DateOrdering] = DateOrdering.DESC, + ) -> Iterable["Service"]: + order_prefix = "" if ordering == DateOrdering.ASC else "-" + return ( + Service.objects + .filter(team_members__id=team_profile_id) + .select_related('account_address', 'account_address__account') + .prefetch_related('team_members') + .order_by(f"{order_prefix}date", f"{order_prefix}id") + ) diff --git a/core/graphql/queries/session.py b/core/graphql/queries/session.py new file mode 100644 index 0000000..8b707a3 --- /dev/null +++ b/core/graphql/queries/session.py @@ -0,0 +1,45 @@ +# Python +from typing import Optional, List, cast +from uuid import UUID +import strawberry +import strawberry_django as sd +from strawberry import Info +from asgiref.sync import sync_to_async +from core.graphql.types.session import ServiceSessionType, ProjectSessionType +from core.graphql.filters.session import ServiceSessionFilter, ProjectSessionFilter +from core.models.session import ServiceSession, ProjectSession + + +@strawberry.type +class Query: + service_session: Optional[ServiceSessionType] = sd.node() + service_sessions: List[ServiceSessionType] = sd.field(filters=ServiceSessionFilter) + + @strawberry.field(description="Get the active service session for a given service") + async def active_service_session(self, service_id: UUID, info: Info) -> Optional[ServiceSessionType]: + def fetch(): + qs = ( + ServiceSession.objects + .select_related("service", "account", "account_address", "customer", "scope") + .prefetch_related("completed_tasks") + ) + return qs.filter(service_id=service_id, end__isnull=True).first() + + obj = await sync_to_async(fetch, thread_sensitive=True)() + return cast(Optional[ServiceSessionType], obj) + + project_session: Optional[ProjectSessionType] = sd.node() + project_sessions: List[ProjectSessionType] = sd.field(filters=ProjectSessionFilter) + + @strawberry.field(description="Get the active project session for a given project") + async def active_project_session(self, project_id: UUID, info: Info) -> Optional[ProjectSessionType]: + def fetch(): + qs = ( + ProjectSession.objects + .select_related("project", "account", "account_address", "customer", "scope") + .prefetch_related("completed_tasks") + ) + return qs.filter(project_id=project_id, end__isnull=True).first() + + obj = await sync_to_async(fetch, thread_sensitive=True)() + return cast(Optional[ProjectSessionType], obj) diff --git a/core/graphql/queries/session_image.py b/core/graphql/queries/session_image.py new file mode 100644 index 0000000..76ccfcb --- /dev/null +++ b/core/graphql/queries/session_image.py @@ -0,0 +1,19 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.session_image import ( + ServiceSessionImageType, + ProjectSessionImageType, +) +from core.graphql.filters.session_image import ( + ServiceSessionImageFilter, + ProjectSessionImageFilter, +) + +@strawberry.type +class Query: + service_session_image: Optional[ServiceSessionImageType] = sd.node() + project_session_image: Optional[ProjectSessionImageType] = sd.node() + + service_session_images: List[ServiceSessionImageType] = sd.field(filters=ServiceSessionImageFilter) + project_session_images: List[ProjectSessionImageType] = sd.field(filters=ProjectSessionImageFilter) \ No newline at end of file diff --git a/core/graphql/queries/session_note.py b/core/graphql/queries/session_note.py new file mode 100644 index 0000000..cfae83a --- /dev/null +++ b/core/graphql/queries/session_note.py @@ -0,0 +1,20 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.session_note import ( + ServiceSessionNoteType, + ProjectSessionNoteType, +) +from core.graphql.filters.session_note import ( + ServiceSessionNoteFilter, + ProjectSessionNoteFilter, +) + + +@strawberry.type +class Query: + service_session_note: Optional[ServiceSessionNoteType] = sd.node() + project_session_note: Optional[ProjectSessionNoteType] = sd.node() + + service_session_notes: List[ServiceSessionNoteType] = sd.field(filters=ServiceSessionNoteFilter) + project_session_notes: List[ProjectSessionNoteType] = sd.field(filters=ProjectSessionNoteFilter) diff --git a/core/graphql/queries/session_video.py b/core/graphql/queries/session_video.py new file mode 100644 index 0000000..b21431a --- /dev/null +++ b/core/graphql/queries/session_video.py @@ -0,0 +1,20 @@ +import strawberry +import strawberry_django as sd +from typing import List, Optional +from core.graphql.types.session_video import ( + ServiceSessionVideoType, + ProjectSessionVideoType, +) +from core.graphql.filters.session_video import ( + ServiceSessionVideoFilter, + ProjectSessionVideoFilter, +) + + +@strawberry.type +class Query: + service_session_video: Optional[ServiceSessionVideoType] = sd.node() + project_session_video: Optional[ProjectSessionVideoType] = sd.node() + + service_session_videos: List[ServiceSessionVideoType] = sd.field(filters=ServiceSessionVideoFilter) + project_session_videos: List[ProjectSessionVideoType] = sd.field(filters=ProjectSessionVideoFilter) diff --git a/core/graphql/schema.py b/core/graphql/schema.py new file mode 100644 index 0000000..2d8c260 --- /dev/null +++ b/core/graphql/schema.py @@ -0,0 +1,157 @@ +import strawberry +from core.graphql.queries.customer import Query as CustomerQuery +from core.graphql.queries.account import Query as AccountQuery +from core.graphql.queries.profile import Query as ProfileQuery +from core.graphql.queries.project import Query as ProjectQuery +from core.graphql.queries.service import Query as ServiceQuery +from core.graphql.queries.labor import Query as LaborQuery +from core.graphql.queries.revenue import Query as RevenueQuery +from core.graphql.queries.schedule import Query as ScheduleQuery +from core.graphql.queries.invoice import Query as InvoiceQuery +from core.graphql.queries.report import Query as ReportQuery +from core.graphql.queries.account_punchlist import Query as AccountPunchlistQuery +from core.graphql.queries.project_punchlist import Query as ProjectPunchlistQuery +from core.graphql.queries.scope import Query as ScopeQuery +from core.graphql.queries.project_scope import Query as ProjectScopeQuery +from core.graphql.queries.project_scope_template import Query as ProjectScopeTemplateQuery +from core.graphql.queries.scope_template import Query as ScopeTemplateQuery +from core.graphql.queries.session import Query as SessionQuery +from core.graphql.queries.session_image import Query as SessionImageQuery +from core.graphql.queries.session_video import Query as SessionVideoQuery +from core.graphql.queries.session_note import Query as SessionNoteQuery +from core.graphql.queries.event import Query as EventQuery +from core.graphql.queries.messaging import Query as MessagingQuery +from core.graphql.queries.dashboard import Query as DashboardQuery +from core.graphql.mutations.customer import Mutation as CustomerMutation +from core.graphql.mutations.account import Mutation as AccountMutation +from core.graphql.mutations.profile import Mutation as ProfileMutation +from core.graphql.mutations.project import Mutation as ProjectMutation +from core.graphql.mutations.service import Mutation as ServiceMutation +from core.graphql.mutations.labor import Mutation as LaborMutation +from core.graphql.mutations.revenue import Mutation as RevenueMutation +from core.graphql.mutations.schedule import Mutation as ScheduleMutation +from core.graphql.mutations.invoice import Mutation as InvoiceMutation +from core.graphql.mutations.report import Mutation as ReportMutation +from core.graphql.mutations.account_punchlist import Mutation as AccountPunchlistMutation +from core.graphql.mutations.project_punchlist import Mutation as ProjectPunchlistMutation +from core.graphql.mutations.scope import Mutation as ScopeMutation +from core.graphql.mutations.scope_template import Mutation as ScopeTemplateMutation +from core.graphql.mutations.project_scope import Mutation as ProjectScopeMutation +from core.graphql.mutations.project_scope_template import Mutation as ProjectScopeTemplateMutation +from core.graphql.mutations.session import Mutation as SessionMutation +from core.graphql.mutations.session_image import Mutation as SessionImageMutation +from core.graphql.mutations.session_video import Mutation as SessionVideoMutation +from core.graphql.mutations.session_note import Mutation as SessionNoteMutation +from core.graphql.mutations.event import Mutation as EventMutation +from core.graphql.mutations.messaging import Mutation as MessagingMutation +from core.graphql.subscriptions.service import Subscription as ServiceSubscription +from core.graphql.subscriptions.project import Subscription as ProjectSubscription +from core.graphql.subscriptions.account import Subscription as AccountSubscription +from core.graphql.subscriptions.account_address import Subscription as AccountAddressSubscription +from core.graphql.subscriptions.account_contact import Subscription as AccountContactSubscription +from core.graphql.subscriptions.invoice import Subscription as InvoiceSubscription +from core.graphql.subscriptions.labor import Subscription as LaborSubscription +from core.graphql.subscriptions.revenue import Subscription as RevenueSubscription +from core.graphql.subscriptions.schedule import Subscription as ScheduleSubscription +from core.graphql.subscriptions.customer import Subscription as CustomerSubscription +from core.graphql.subscriptions.customer_address import Subscription as CustomerAddressSubscription +from core.graphql.subscriptions.customer_contact import Subscription as CustomerContactSubscription +from core.graphql.subscriptions.report import Subscription as ReportSubscription +from core.graphql.subscriptions.profile import Subscription as ProfileSubscription +from core.graphql.subscriptions.account_punchlist import Subscription as AccountPunchlistSubscription +from core.graphql.subscriptions.project_punchlist import Subscription as ProjectPunchlistSubscription +from core.graphql.subscriptions.scope import Subscription as ScopeSubscription +from core.graphql.subscriptions.scope_template import Subscription as ScopeTemplateSubscription +from core.graphql.subscriptions.project_scope import Subscription as ProjectScopeSubscription +from core.graphql.subscriptions.project_scope_template import Subscription as ProjectScopeTemplateSubscription +from core.graphql.subscriptions.messaging import Subscription as MessagingSubscription + + +@strawberry.type +class Query( + CustomerQuery, + AccountQuery, + ProjectQuery, + ServiceQuery, + LaborQuery, + RevenueQuery, + ScheduleQuery, + InvoiceQuery, + ReportQuery, + AccountPunchlistQuery, + ProjectPunchlistQuery, + ProfileQuery, + ScopeQuery, + ScopeTemplateQuery, + ProjectScopeQuery, + ProjectScopeTemplateQuery, + SessionQuery, + SessionImageQuery, + SessionVideoQuery, + SessionNoteQuery, + EventQuery, + MessagingQuery, + DashboardQuery, +): + """Root GraphQL query type combining all query groups.""" + pass + + +@strawberry.type +class Mutation( + CustomerMutation, + AccountMutation, + ProjectMutation, + ServiceMutation, + LaborMutation, + RevenueMutation, + ScheduleMutation, + InvoiceMutation, + ReportMutation, + AccountPunchlistMutation, + ProjectPunchlistMutation, + ProfileMutation, + ScopeMutation, + ScopeTemplateMutation, + ProjectScopeMutation, + ProjectScopeTemplateMutation, + SessionMutation, + SessionImageMutation, + SessionVideoMutation, + SessionNoteMutation, + EventMutation, + MessagingMutation, +): + """Root GraphQL mutation type combining all mutation groups.""" + pass + + +@strawberry.type +class Subscription( + ServiceSubscription, + ProjectSubscription, + AccountSubscription, + AccountAddressSubscription, + AccountContactSubscription, + InvoiceSubscription, + LaborSubscription, + RevenueSubscription, + ScheduleSubscription, + CustomerSubscription, + CustomerAddressSubscription, + CustomerContactSubscription, + ReportSubscription, + ProfileSubscription, + AccountPunchlistSubscription, + ProjectPunchlistSubscription, + ScopeSubscription, + ScopeTemplateSubscription, + ProjectScopeSubscription, + ProjectScopeTemplateSubscription, + MessagingSubscription, +): + """Root GraphQL subscription type combining all subscription groups.""" + pass + + +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) diff --git a/core/graphql/subscriptions/__init__.py b/core/graphql/subscriptions/__init__.py new file mode 100644 index 0000000..5239d09 --- /dev/null +++ b/core/graphql/subscriptions/__init__.py @@ -0,0 +1,18 @@ +from core.graphql.subscriptions.service import * +from core.graphql.subscriptions.project import * +from core.graphql.subscriptions.account import * +from core.graphql.subscriptions.invoice import * +from core.graphql.subscriptions.labor import * +from core.graphql.subscriptions.revenue import * +from core.graphql.subscriptions.schedule import * +from core.graphql.subscriptions.customer import * +from core.graphql.subscriptions.customer_address import * +from core.graphql.subscriptions.customer_contact import * +from core.graphql.subscriptions.report import * +from core.graphql.subscriptions.account_punchlist import * +from core.graphql.subscriptions.project_punchlist import * +from core.graphql.subscriptions.profile import * +from core.graphql.subscriptions.scope import * +from core.graphql.subscriptions.scope_template import * +from core.graphql.subscriptions.project_scope import * +from core.graphql.subscriptions.project_scope_template import * \ No newline at end of file diff --git a/core/graphql/subscriptions/account.py b/core/graphql/subscriptions/account.py new file mode 100644 index 0000000..3bbd9cb --- /dev/null +++ b/core/graphql/subscriptions/account.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.account import AccountType +from core.graphql.utils import _extract_id +from core.models.account import Account + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to account creation events") + async def account_created(self, info: Info) -> AsyncGenerator[AccountType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_created") as subscriber: + async for payload in subscriber: + account_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Account.objects.get)(pk=account_id) + except Account.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account updates") + async def account_updated(self, info: Info) -> AsyncGenerator[AccountType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_updated") as subscriber: + async for payload in subscriber: + account_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Account.objects.get)(pk=account_id) + except Account.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account deletion events") + async def account_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_deleted") as subscriber: + async for payload in subscriber: + account_id = await _extract_id(payload) + yield strawberry.ID(account_id) diff --git a/core/graphql/subscriptions/account_address.py b/core/graphql/subscriptions/account_address.py new file mode 100644 index 0000000..2533ccf --- /dev/null +++ b/core/graphql/subscriptions/account_address.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.account import AccountAddressType +from core.graphql.utils import _extract_id +from core.models.account import AccountAddress + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to account address creation events") + async def account_address_created(self, info: Info) -> AsyncGenerator[AccountAddressType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_address_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountAddress.objects.get)(pk=entity_id) + except AccountAddress.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account address updates") + async def account_address_updated(self, info: Info) -> AsyncGenerator[AccountAddressType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_address_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountAddress.objects.get)(pk=entity_id) + except AccountAddress.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account address deletion events") + async def account_address_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_address_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/account_contact.py b/core/graphql/subscriptions/account_contact.py new file mode 100644 index 0000000..1e71ec1 --- /dev/null +++ b/core/graphql/subscriptions/account_contact.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.account import AccountContactType +from core.graphql.utils import _extract_id +from core.models.account import AccountContact + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to account contact creation events") + async def account_contact_created(self, info: Info) -> AsyncGenerator[AccountContactType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_contact_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountContact.objects.get)(pk=entity_id) + except AccountContact.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account contact updates") + async def account_contact_updated(self, info: Info) -> AsyncGenerator[AccountContactType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_contact_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountContact.objects.get)(pk=entity_id) + except AccountContact.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account contact deletion events") + async def account_contact_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_contact_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/account_punchlist.py b/core/graphql/subscriptions/account_punchlist.py new file mode 100644 index 0000000..cf9c520 --- /dev/null +++ b/core/graphql/subscriptions/account_punchlist.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.account_punchlist import AccountPunchlistType +from core.graphql.utils import _extract_id +from core.models.account_punchlist import AccountPunchlist + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to account punchlist creation events") + async def account_punchlist_created(self, info: Info) -> AsyncGenerator[AccountPunchlistType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_punchlist_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountPunchlist.objects.get)(pk=entity_id) + except AccountPunchlist.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account punchlist updates") + async def account_punchlist_updated(self, info: Info) -> AsyncGenerator[AccountPunchlistType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_punchlist_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AccountPunchlist.objects.get)(pk=entity_id) + except AccountPunchlist.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to account punchlist deletion events") + async def account_punchlist_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("account_punchlist_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/customer.py b/core/graphql/subscriptions/customer.py new file mode 100644 index 0000000..0f48436 --- /dev/null +++ b/core/graphql/subscriptions/customer.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.customer import CustomerType +from core.graphql.utils import _extract_id +from core.models.customer import Customer + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to customer creation events") + async def customer_created(self, info: Info) -> AsyncGenerator[CustomerType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Customer.objects.get)(pk=entity_id) + except Customer.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer updates") + async def customer_updated(self, info: Info) -> AsyncGenerator[CustomerType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Customer.objects.get)(pk=entity_id) + except Customer.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer deletion events") + async def customer_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/customer_address.py b/core/graphql/subscriptions/customer_address.py new file mode 100644 index 0000000..127322d --- /dev/null +++ b/core/graphql/subscriptions/customer_address.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.customer import CustomerAddressType +from core.graphql.utils import _extract_id +from core.models.customer import CustomerAddress + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to customer address creation events") + async def customer_address_created(self, info: Info) -> AsyncGenerator[CustomerAddressType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_address_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerAddress.objects.get)(pk=entity_id) + except CustomerAddress.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer address updates") + async def customer_address_updated(self, info: Info) -> AsyncGenerator[CustomerAddressType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_address_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerAddress.objects.get)(pk=entity_id) + except CustomerAddress.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer address deletion events") + async def customer_address_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_address_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/customer_contact.py b/core/graphql/subscriptions/customer_contact.py new file mode 100644 index 0000000..8a0be82 --- /dev/null +++ b/core/graphql/subscriptions/customer_contact.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.customer import CustomerContactType +from core.graphql.utils import _extract_id +from core.models.customer import CustomerContact + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to customer contact creation events") + async def customer_contact_created(self, info: Info) -> AsyncGenerator[CustomerContactType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_contact_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerContact.objects.get)(pk=entity_id) + except CustomerContact.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer contact updates") + async def customer_contact_updated(self, info: Info) -> AsyncGenerator[CustomerContactType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_contact_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerContact.objects.get)(pk=entity_id) + except CustomerContact.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer contact deletion events") + async def customer_contact_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_contact_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/invoice.py b/core/graphql/subscriptions/invoice.py new file mode 100644 index 0000000..4ee1ff6 --- /dev/null +++ b/core/graphql/subscriptions/invoice.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.invoice import InvoiceType +from core.graphql.utils import _extract_id +from core.models.invoice import Invoice + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to invoice creation events") + async def invoice_created(self, info: Info) -> AsyncGenerator[InvoiceType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("invoice_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Invoice.objects.get)(pk=entity_id) + except Invoice.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to invoice updates") + async def invoice_updated(self, info: Info) -> AsyncGenerator[InvoiceType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("invoice_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Invoice.objects.get)(pk=entity_id) + except Invoice.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to invoice deletion events") + async def invoice_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("invoice_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/labor.py b/core/graphql/subscriptions/labor.py new file mode 100644 index 0000000..6e05e33 --- /dev/null +++ b/core/graphql/subscriptions/labor.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.labor import LaborType +from core.graphql.utils import _extract_id +from core.models.labor import Labor + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to labor creation events") + async def labor_created(self, info: Info) -> AsyncGenerator[LaborType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("labor_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Labor.objects.get)(pk=entity_id) + except Labor.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to labor updates") + async def labor_updated(self, info: Info) -> AsyncGenerator[LaborType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("labor_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Labor.objects.get)(pk=entity_id) + except Labor.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to labor deletion events") + async def labor_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("labor_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/messaging.py b/core/graphql/subscriptions/messaging.py new file mode 100644 index 0000000..c8db7cc --- /dev/null +++ b/core/graphql/subscriptions/messaging.py @@ -0,0 +1,211 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from strawberry.relay import GlobalID + +from core.graphql.pubsub import pubsub +from core.graphql.types.messaging import ConversationType, MessageType +from core.graphql.utils import _extract_id +from core.models.messaging import Conversation, Message + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to new conversations") + async def conversation_created(self, info: Info) -> AsyncGenerator[ConversationType, None]: + """Real-time notification when a new conversation is created""" + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("conversation_created") as subscriber: + async for payload in subscriber: + conversation_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Conversation.objects.get)(pk=conversation_id) + except Conversation.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to conversation updates") + async def conversation_updated(self, info: Info) -> AsyncGenerator[ConversationType, None]: + """Real-time notification when a conversation is updated""" + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("conversation_updated") as subscriber: + async for payload in subscriber: + conversation_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Conversation.objects.get)(pk=conversation_id) + except Conversation.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to new messages in a specific conversation") + async def message_sent( + self, + info: Info, + conversation_id: GlobalID + ) -> AsyncGenerator[MessageType, None]: + """ + Real-time notification when a new message is sent in a specific conversation. + Clients should subscribe to this for each conversation they have open. + """ + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("message_sent") as subscriber: + async for payload in subscriber: + # payload is a dict with message_id and conversation_id + if isinstance(payload, dict): + msg_conversation_id = payload.get("conversation_id") + message_id = payload.get("message_id") + + # Only yield messages for the requested conversation + if str(msg_conversation_id) != str(conversation_id): + continue + + try: + instance = await database_sync_to_async(Message.objects.get)(pk=message_id) + except Message.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to new messages across all conversations") + async def message_received(self, info: Info) -> AsyncGenerator[MessageType, None]: + """ + Real-time notification for all new messages across all conversations. + Useful for showing notifications or updating unread counts. + """ + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + # Get user's profile to filter relevant messages + profile = None + if hasattr(user, 'team_profile'): + profile = user.team_profile + elif hasattr(user, 'customer_profile'): + profile = user.customer_profile + + if not profile: + raise PermissionError("User profile not found") + + async with pubsub.subscribe("message_sent") as subscriber: + async for payload in subscriber: + if isinstance(payload, dict): + message_id = payload.get("message_id") + + try: + instance = await database_sync_to_async(Message.objects.select_related('conversation').get)(pk=message_id) + + # Check if user is a participant in this conversation + @database_sync_to_async + def is_participant(): + from django.contrib.contenttypes.models import ContentType + content_type = ContentType.objects.get_for_model(type(profile)) + return instance.conversation.participants.filter( + participant_content_type=content_type, + participant_object_id=profile.id + ).exists() + + # Only yield if user is a participant (and not the sender) + if await is_participant(): + # Don't notify sender of their own messages + if instance.sender_object_id != profile.id: + yield instance + + except Message.DoesNotExist: + continue + + @strawberry.subscription(description="Subscribe to conversation read events") + async def conversation_read( + self, + info: Info, + conversation_id: GlobalID + ) -> AsyncGenerator[ConversationType, None]: + """ + Real-time notification when someone marks a conversation as read. + Useful for showing read receipts. + """ + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("conversation_read") as subscriber: + async for payload in subscriber: + if isinstance(payload, dict): + conv_id = payload.get("conversation_id") + + # Only yield for the requested conversation + if str(conv_id) != str(conversation_id): + continue + + try: + instance = await database_sync_to_async(Conversation.objects.get)(pk=conv_id) + except Conversation.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to participant changes") + async def participant_added( + self, + info: Info, + conversation_id: GlobalID + ) -> AsyncGenerator[ConversationType, None]: + """Real-time notification when a participant is added to a conversation""" + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("participant_added") as subscriber: + async for payload in subscriber: + if isinstance(payload, dict): + conv_id = payload.get("conversation_id") + + if str(conv_id) != str(conversation_id): + continue + + try: + instance = await database_sync_to_async(Conversation.objects.get)(pk=conv_id) + except Conversation.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to conversation deletion events") + async def conversation_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + """Real-time notification when a conversation is deleted""" + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("conversation_deleted") as subscriber: + async for payload in subscriber: + conversation_id = await _extract_id(payload) + yield strawberry.ID(conversation_id) + + @strawberry.subscription(description="Subscribe to message deletion events") + async def message_deleted( + self, + info: Info, + conversation_id: GlobalID + ) -> AsyncGenerator[strawberry.ID, None]: + """Real-time notification when a message is deleted""" + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("message_deleted") as subscriber: + async for payload in subscriber: + if isinstance(payload, dict): + conv_id = payload.get("conversation_id") + message_id = payload.get("message_id") + + if str(conv_id) != str(conversation_id): + continue + + yield strawberry.ID(message_id) diff --git a/core/graphql/subscriptions/profile.py b/core/graphql/subscriptions/profile.py new file mode 100644 index 0000000..74714a7 --- /dev/null +++ b/core/graphql/subscriptions/profile.py @@ -0,0 +1,95 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.profile import CustomerProfileType, TeamProfileType +from core.graphql.utils import _extract_id +from core.models.profile import CustomerProfile, TeamProfile + + +@strawberry.type +class Subscription: + # CustomerProfile subscriptions + @strawberry.subscription(description="Subscribe to customer profile creation events") + async def customer_profile_created(self, info: Info) -> AsyncGenerator[CustomerProfileType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_profile_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerProfile.objects.get)(pk=entity_id) + except CustomerProfile.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer profile updates") + async def customer_profile_updated(self, info: Info) -> AsyncGenerator[CustomerProfileType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_profile_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(CustomerProfile.objects.get)(pk=entity_id) + except CustomerProfile.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to customer profile deletion events") + async def customer_profile_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("customer_profile_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # TeamProfile subscriptions + @strawberry.subscription(description="Subscribe to team profile creation events") + async def team_profile_created(self, info: Info) -> AsyncGenerator[TeamProfileType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("team_profile_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TeamProfile.objects.get)(pk=entity_id) + except TeamProfile.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to team profile updates") + async def team_profile_updated(self, info: Info) -> AsyncGenerator[TeamProfileType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("team_profile_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TeamProfile.objects.get)(pk=entity_id) + except TeamProfile.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to team profile deletion events") + async def team_profile_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("team_profile_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/project.py b/core/graphql/subscriptions/project.py new file mode 100644 index 0000000..5f48025 --- /dev/null +++ b/core/graphql/subscriptions/project.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.project import ProjectType +from core.graphql.utils import _extract_id +from core.models.project import Project + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to project creation events") + async def project_created(self, info: Info) -> AsyncGenerator[ProjectType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_created") as subscriber: + async for payload in subscriber: + project_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Project.objects.get)(pk=project_id) + except Project.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project updates") + async def project_updated(self, info: Info) -> AsyncGenerator[ProjectType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_updated") as subscriber: + async for payload in subscriber: + project_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Project.objects.get)(pk=project_id) + except Project.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project deletion events") + async def project_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_deleted") as subscriber: + async for payload in subscriber: + project_id = await _extract_id(payload) + yield strawberry.ID(project_id) diff --git a/core/graphql/subscriptions/project_punchlist.py b/core/graphql/subscriptions/project_punchlist.py new file mode 100644 index 0000000..9cc7f2b --- /dev/null +++ b/core/graphql/subscriptions/project_punchlist.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.project_punchlist import ProjectPunchlistType +from core.graphql.utils import _extract_id +from core.models.project_punchlist import ProjectPunchlist + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to project punchlist creation events") + async def project_punchlist_created(self, info: Info) -> AsyncGenerator[ProjectPunchlistType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_punchlist_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectPunchlist.objects.get)(pk=entity_id) + except ProjectPunchlist.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project punchlist updates") + async def project_punchlist_updated(self, info: Info) -> AsyncGenerator[ProjectPunchlistType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_punchlist_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectPunchlist.objects.get)(pk=entity_id) + except ProjectPunchlist.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project punchlist deletion events") + async def project_punchlist_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_punchlist_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/project_scope.py b/core/graphql/subscriptions/project_scope.py new file mode 100644 index 0000000..56f34cc --- /dev/null +++ b/core/graphql/subscriptions/project_scope.py @@ -0,0 +1,141 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.project_scope import ( + ProjectScopeType, + ProjectScopeCategoryType, + ProjectScopeTaskType, +) +from core.graphql.utils import _extract_id +from core.models.project_scope import ProjectScope, ProjectScopeCategory, ProjectScopeTask + + +@strawberry.type +class Subscription: + # ProjectScope events + @strawberry.subscription(description="Subscribe to project scope creation events") + async def project_scope_created(self, info: Info) -> AsyncGenerator[ProjectScopeType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScope.objects.get)(pk=entity_id) + except ProjectScope.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope updates") + async def project_scope_updated(self, info: Info) -> AsyncGenerator[ProjectScopeType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScope.objects.get)(pk=entity_id) + except ProjectScope.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope deletion events") + async def project_scope_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # ProjectScopeCategory (areas) + @strawberry.subscription(description="Subscribe to project scope category creation events") + async def project_scope_category_created(self, info: Info) -> AsyncGenerator[ProjectScopeCategoryType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_category_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeCategory.objects.get)(pk=entity_id) + except ProjectScopeCategory.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope category updates") + async def project_scope_category_updated(self, info: Info) -> AsyncGenerator[ProjectScopeCategoryType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_category_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeCategory.objects.get)(pk=entity_id) + except ProjectScopeCategory.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope category deletion events") + async def project_scope_category_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_category_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # ProjectScopeTask + @strawberry.subscription(description="Subscribe to project scope task creation events") + async def project_scope_task_created(self, info: Info) -> AsyncGenerator[ProjectScopeTaskType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_task_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeTask.objects.get)(pk=entity_id) + except ProjectScopeTask.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope task updates") + async def project_scope_task_updated(self, info: Info) -> AsyncGenerator[ProjectScopeTaskType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_task_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeTask.objects.get)(pk=entity_id) + except ProjectScopeTask.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope task deletion events") + async def project_scope_task_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_task_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/project_scope_template.py b/core/graphql/subscriptions/project_scope_template.py new file mode 100644 index 0000000..9dcc6d4 --- /dev/null +++ b/core/graphql/subscriptions/project_scope_template.py @@ -0,0 +1,145 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.project_scope_template import ( + ProjectScopeTemplateType, + ProjectAreaTemplateType, + ProjectTaskTemplateType, +) +from core.graphql.utils import _extract_id +from core.models.project_scope_template import ( + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, +) + + +@strawberry.type +class Subscription: + # Template + @strawberry.subscription(description="Subscribe to project scope template creation events") + async def project_scope_template_created(self, info: Info) -> AsyncGenerator[ProjectScopeTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeTemplate.objects.get)(pk=entity_id) + except ProjectScopeTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope template updates") + async def project_scope_template_updated(self, info: Info) -> AsyncGenerator[ProjectScopeTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectScopeTemplate.objects.get)(pk=entity_id) + except ProjectScopeTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project scope template deletion events") + async def project_scope_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_scope_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # Area template + @strawberry.subscription(description="Subscribe to project area template creation events") + async def project_area_template_created(self, info: Info) -> AsyncGenerator[ProjectAreaTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_area_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectAreaTemplate.objects.get)(pk=entity_id) + except ProjectAreaTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project area template updates") + async def project_area_template_updated(self, info: Info) -> AsyncGenerator[ProjectAreaTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_area_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectAreaTemplate.objects.get)(pk=entity_id) + except ProjectAreaTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project area template deletion events") + async def project_area_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_area_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # Task template + @strawberry.subscription(description="Subscribe to project task template creation events") + async def project_task_template_created(self, info: Info) -> AsyncGenerator[ProjectTaskTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_task_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectTaskTemplate.objects.get)(pk=entity_id) + except ProjectTaskTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project task template updates") + async def project_task_template_updated(self, info: Info) -> AsyncGenerator[ProjectTaskTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_task_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ProjectTaskTemplate.objects.get)(pk=entity_id) + except ProjectTaskTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to project task template deletion events") + async def project_task_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("project_task_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/report.py b/core/graphql/subscriptions/report.py new file mode 100644 index 0000000..a4789e2 --- /dev/null +++ b/core/graphql/subscriptions/report.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.report import ReportType +from core.graphql.utils import _extract_id +from core.models.report import Report + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to report creation events") + async def report_created(self, info: Info) -> AsyncGenerator[ReportType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("report_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Report.objects.get)(pk=entity_id) + except Report.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to report updates") + async def report_updated(self, info: Info) -> AsyncGenerator[ReportType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("report_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Report.objects.get)(pk=entity_id) + except Report.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to report deletion events") + async def report_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("report_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/revenue.py b/core/graphql/subscriptions/revenue.py new file mode 100644 index 0000000..3580161 --- /dev/null +++ b/core/graphql/subscriptions/revenue.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.revenue import RevenueType +from core.graphql.utils import _extract_id +from core.models.revenue import Revenue + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to revenue creation events") + async def revenue_created(self, info: Info) -> AsyncGenerator[RevenueType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("revenue_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Revenue.objects.get)(pk=entity_id) + except Revenue.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to revenue updates") + async def revenue_updated(self, info: Info) -> AsyncGenerator[RevenueType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("revenue_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Revenue.objects.get)(pk=entity_id) + except Revenue.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to revenue deletion events") + async def revenue_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("revenue_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/schedule.py b/core/graphql/subscriptions/schedule.py new file mode 100644 index 0000000..ee888dd --- /dev/null +++ b/core/graphql/subscriptions/schedule.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.schedule import ScheduleType +from core.graphql.utils import _extract_id +from core.models.schedule import Schedule + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to schedule creation events") + async def schedule_created(self, info: Info) -> AsyncGenerator[ScheduleType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("schedule_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Schedule.objects.get)(pk=entity_id) + except Schedule.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to schedule updates") + async def schedule_updated(self, info: Info) -> AsyncGenerator[ScheduleType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("schedule_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Schedule.objects.get)(pk=entity_id) + except Schedule.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to schedule deletion events") + async def schedule_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("schedule_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) diff --git a/core/graphql/subscriptions/scope.py b/core/graphql/subscriptions/scope.py new file mode 100644 index 0000000..ab153f5 --- /dev/null +++ b/core/graphql/subscriptions/scope.py @@ -0,0 +1,179 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.scope import ScopeType, AreaType, TaskType, TaskCompletionType +from core.graphql.utils import _extract_id +from core.models.scope import Scope, Area, Task, TaskCompletion + + +@strawberry.type +class Subscription: + # Scope subscriptions + @strawberry.subscription(description="Subscribe to scope creation events") + async def scope_created(self, info: Info) -> AsyncGenerator[ScopeType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Scope.objects.get)(pk=entity_id) + except Scope.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to scope updates") + async def scope_updated(self, info: Info) -> AsyncGenerator[ScopeType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Scope.objects.get)(pk=entity_id) + except Scope.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to scope deletion events") + async def scope_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # Area subscriptions + @strawberry.subscription(description="Subscribe to area creation events") + async def area_created(self, info: Info) -> AsyncGenerator[AreaType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Area.objects.get)(pk=entity_id) + except Area.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to area updates") + async def area_updated(self, info: Info) -> AsyncGenerator[AreaType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Area.objects.get)(pk=entity_id) + except Area.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to area deletion events") + async def area_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # Task subscriptions + @strawberry.subscription(description="Subscribe to task creation events") + async def task_created(self, info: Info) -> AsyncGenerator[TaskType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Task.objects.get)(pk=entity_id) + except Task.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task updates") + async def task_updated(self, info: Info) -> AsyncGenerator[TaskType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Task.objects.get)(pk=entity_id) + except Task.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task deletion events") + async def task_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # TaskCompletion subscriptions + @strawberry.subscription(description="Subscribe to task completion creation events") + async def task_completion_created(self, info: Info) -> AsyncGenerator[TaskCompletionType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_completion_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TaskCompletion.objects.get)(pk=entity_id) + except TaskCompletion.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task completion updates") + async def task_completion_updated(self, info: Info) -> AsyncGenerator[TaskCompletionType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_completion_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TaskCompletion.objects.get)(pk=entity_id) + except TaskCompletion.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task completion deletion events") + async def task_completion_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_completion_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) \ No newline at end of file diff --git a/core/graphql/subscriptions/scope_template.py b/core/graphql/subscriptions/scope_template.py new file mode 100644 index 0000000..9ff8f6c --- /dev/null +++ b/core/graphql/subscriptions/scope_template.py @@ -0,0 +1,161 @@ +# Python +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info + +from core.graphql.pubsub import pubsub +from core.graphql.types.scope_template import ( + ScopeTemplateType, + AreaTemplateType, + TaskTemplateType, +) +from core.graphql.types.scope import ScopeType +from core.graphql.utils import _extract_id +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate +from core.models.scope import Scope + + +@strawberry.type +class Subscription: + # ScopeTemplate subscriptions + @strawberry.subscription(description="Subscribe to scope template creation events") + async def scope_template_created(self, info: Info) -> AsyncGenerator[ScopeTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ScopeTemplate.objects.get)(pk=entity_id) + except ScopeTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to scope template updates") + async def scope_template_updated(self, info: Info) -> AsyncGenerator[ScopeTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(ScopeTemplate.objects.get)(pk=entity_id) + except ScopeTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to scope template deletion events") + async def scope_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # AreaTemplate subscriptions + @strawberry.subscription(description="Subscribe to area template creation events") + async def area_template_created(self, info: Info) -> AsyncGenerator[AreaTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AreaTemplate.objects.get)(pk=entity_id) + except AreaTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to area template updates") + async def area_template_updated(self, info: Info) -> AsyncGenerator[AreaTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(AreaTemplate.objects.get)(pk=entity_id) + except AreaTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to area template deletion events") + async def area_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("area_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # TaskTemplate subscriptions + @strawberry.subscription(description="Subscribe to task template creation events") + async def task_template_created(self, info: Info) -> AsyncGenerator[TaskTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_template_created") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TaskTemplate.objects.get)(pk=entity_id) + except TaskTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task template updates") + async def task_template_updated(self, info: Info) -> AsyncGenerator[TaskTemplateType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_template_updated") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(TaskTemplate.objects.get)(pk=entity_id) + except TaskTemplate.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to task template deletion events") + async def task_template_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("task_template_deleted") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + yield strawberry.ID(entity_id) + + # Scope created from the template + @strawberry.subscription(description="Subscribe to scopes created from a template") + async def scope_created_from_template(self, info: Info) -> AsyncGenerator[ScopeType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("scope_created_from_template") as subscriber: + async for payload in subscriber: + entity_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Scope.objects.get)(pk=entity_id) + except Scope.DoesNotExist: + continue + yield instance \ No newline at end of file diff --git a/core/graphql/subscriptions/service.py b/core/graphql/subscriptions/service.py new file mode 100644 index 0000000..fdd39d6 --- /dev/null +++ b/core/graphql/subscriptions/service.py @@ -0,0 +1,52 @@ +from typing import AsyncGenerator +import strawberry +from channels.db import database_sync_to_async +from strawberry.types import Info +from core.graphql.pubsub import pubsub +from core.graphql.types.service import ServiceType +from core.graphql.utils import _extract_id +from core.models.service import Service + + +@strawberry.type +class Subscription: + @strawberry.subscription(description="Subscribe to service visit creation events") + async def service_created(self, info: Info) -> AsyncGenerator[ServiceType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("service_created") as subscriber: + async for payload in subscriber: + service_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Service.objects.get)(pk=service_id) + except Service.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to service visit updates") + async def service_updated(self, info: Info) -> AsyncGenerator[ServiceType, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("service_updated") as subscriber: + async for payload in subscriber: + service_id = await _extract_id(payload) + try: + instance = await database_sync_to_async(Service.objects.get)(pk=service_id) + except Service.DoesNotExist: + continue + yield instance + + @strawberry.subscription(description="Subscribe to service visit deletion events") + async def service_deleted(self, info: Info) -> AsyncGenerator[strawberry.ID, None]: + user = info.context.user + if not user or not user.is_authenticated: + raise PermissionError("Authentication required") + + async with pubsub.subscribe("service_deleted") as subscriber: + async for payload in subscriber: + service_id = await _extract_id(payload) + yield strawberry.ID(service_id) diff --git a/core/graphql/types/__init__.py b/core/graphql/types/__init__.py new file mode 100644 index 0000000..7b68f7f --- /dev/null +++ b/core/graphql/types/__init__.py @@ -0,0 +1,20 @@ +from core.graphql.types.customer import * +from core.graphql.types.account import * +from core.graphql.types.profile import * +from core.graphql.types.project import * +from core.graphql.types.service import * +from core.graphql.types.labor import * +from core.graphql.types.revenue import * +from core.graphql.types.schedule import * +from core.graphql.types.invoice import * +from core.graphql.types.report import * +from core.graphql.types.account_punchlist import * +from core.graphql.types.project_punchlist import * +from core.graphql.types.scope import * +from core.graphql.types.scope_template import * +from core.graphql.types.project_scope import * +from core.graphql.types.project_scope_template import * +from core.graphql.types.session import * +from core.graphql.types.session_image import * +from core.graphql.types.session_video import * +from core.graphql.types.session_note import * \ No newline at end of file diff --git a/core/graphql/types/account.py b/core/graphql/types/account.py new file mode 100644 index 0000000..58bbb78 --- /dev/null +++ b/core/graphql/types/account.py @@ -0,0 +1,55 @@ +from typing import List, Optional +import strawberry +import strawberry_django as sd +from strawberry import relay + +from core.graphql.types.scope import ScopeType +from core.graphql.types.revenue import RevenueType +from core.graphql.types.labor import LaborType +from core.graphql.types.schedule import ScheduleType +from core.graphql.types.service import ServiceType +from core.models.account import Account, AccountAddress, AccountContact + + +@sd.type(Account) +class AccountType(relay.Node): + name: strawberry.auto + status: strawberry.auto + start_date: strawberry.auto + end_date: strawberry.auto + customer_id: strawberry.auto + is_active: bool + primary_address: Optional["AccountAddressType"] + addresses: List["AccountAddressType"] = sd.field() + contacts: List["AccountContactType"] = sd.field() + revenues: List["RevenueType"] = sd.field() + + +@sd.type(AccountAddress) +class AccountAddressType(relay.Node): + street_address: strawberry.auto + name: strawberry.auto + city: strawberry.auto + state: strawberry.auto + zip_code: strawberry.auto + is_active: bool + is_primary: strawberry.auto + notes: strawberry.auto + account_id: strawberry.auto + schedules: List["ScheduleType"] = sd.field() + services: List["ServiceType"] = sd.field() + labors: List["LaborType"] = sd.field() + scopes: List["ScopeType"] = sd.field() + + +@sd.type(AccountContact) +class AccountContactType(relay.Node): + first_name: strawberry.auto + last_name: strawberry.auto + phone: strawberry.auto + email: strawberry.auto + is_primary: strawberry.auto + is_active: bool + notes: strawberry.auto + account_id: strawberry.auto + full_name: str diff --git a/core/graphql/types/account_punchlist.py b/core/graphql/types/account_punchlist.py new file mode 100644 index 0000000..0dda2cc --- /dev/null +++ b/core/graphql/types/account_punchlist.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.account_punchlist import AccountPunchlist + + +@sd.type(AccountPunchlist) +class AccountPunchlistType(relay.Node): + account_id: strawberry.auto + date: strawberry.auto diff --git a/core/graphql/types/customer.py b/core/graphql/types/customer.py new file mode 100644 index 0000000..eb9974c --- /dev/null +++ b/core/graphql/types/customer.py @@ -0,0 +1,44 @@ +from typing import List +import strawberry +from strawberry import relay +import strawberry_django as sd +from core.graphql.types.account import AccountType +from core.models.customer import Customer, CustomerAddress, CustomerContact + + +@sd.type(Customer) +class CustomerType(relay.Node): + name: strawberry.auto + status: strawberry.auto + start_date: strawberry.auto + end_date: strawberry.auto + billing_terms: strawberry.auto + billing_email: strawberry.auto + wave_customer_id: strawberry.auto + is_active: bool + addresses: List["CustomerAddressType"] = sd.field() + contacts: List["CustomerContactType"] = sd.field() + accounts: List["AccountType"] = sd.field() + + +@sd.type(CustomerAddress) +class CustomerAddressType(relay.Node): + street_address: strawberry.auto + city: strawberry.auto + state: strawberry.auto + zip_code: strawberry.auto + address_type: strawberry.auto + is_active: strawberry.auto + is_primary: strawberry.auto + + +@sd.type(CustomerContact) +class CustomerContactType(relay.Node): + first_name: strawberry.auto + last_name: strawberry.auto + phone: strawberry.auto + email: strawberry.auto + is_primary: strawberry.auto + is_active: strawberry.auto + notes: strawberry.auto + full_name: str diff --git a/core/graphql/types/dashboard.py b/core/graphql/types/dashboard.py new file mode 100644 index 0000000..07d67aa --- /dev/null +++ b/core/graphql/types/dashboard.py @@ -0,0 +1,48 @@ +import strawberry +from typing import List +from core.graphql.types.service import ServiceType +from core.graphql.types.project import ProjectType +from core.graphql.types.invoice import InvoiceType +from core.graphql.types.report import ReportType +from core.graphql.types.scope_template import ScopeTemplateType +from core.graphql.types.project_scope_template import ProjectScopeTemplateType + + +@strawberry.type +class AdminDashboardData: + """Consolidated dashboard data for admin/team leader users. + + Returns all data needed for the admin dashboard in a single query, + with proper database optimization via select_related and prefetch_related. + """ + + services: List[ServiceType] + projects: List[ProjectType] + invoices: List[InvoiceType] + reports: List[ReportType] + service_scope_templates: List[ScopeTemplateType] + project_scope_templates: List[ProjectScopeTemplateType] + + +@strawberry.type +class TeamDashboardData: + """Consolidated dashboard data for team member users. + + Returns services and projects assigned to the team member. + """ + + services: List[ServiceType] + projects: List[ProjectType] + reports: List[ReportType] + + +@strawberry.type +class CustomerDashboardData: + """Consolidated dashboard data for customer users. + + Returns services, projects, and invoices for the customer. + """ + + services: List[ServiceType] + projects: List[ProjectType] + invoices: List[InvoiceType] diff --git a/core/graphql/types/event.py b/core/graphql/types/event.py new file mode 100644 index 0000000..4eda08d --- /dev/null +++ b/core/graphql/types/event.py @@ -0,0 +1,111 @@ +import strawberry +import strawberry_django +from typing import Optional, List +from datetime import datetime + +from core.models.events import Event, NotificationRule, Notification, NotificationDelivery +from core.models.enums import ( + EventTypeChoices, + NotificationChannelChoices, + NotificationStatusChoices, + DeliveryStatusChoices, + RoleChoices +) + + +@strawberry_django.type(Event) +class EventType: + id: strawberry.ID + event_type: EventTypeChoices + entity_type: str + entity_id: strawberry.ID + metadata: strawberry.scalars.JSON + created_at: datetime + updated_at: datetime + + @strawberry_django.field + def triggered_by_id(self, root: Event) -> Optional[strawberry.ID]: + """ID of the profile that triggered this event""" + return strawberry.ID(str(root.triggered_by_object_id)) if root.triggered_by_object_id else None + + @strawberry_django.field + def triggered_by_type(self, root: Event) -> Optional[str]: + """Type of profile that triggered this event (TeamProfile or CustomerProfile)""" + if root.triggered_by_content_type: + return root.triggered_by_content_type.model + return None + + +@strawberry_django.type(NotificationRule) +class NotificationRuleType: + id: strawberry.ID + name: str + description: str + event_types: List[EventTypeChoices] + channels: List[NotificationChannelChoices] + target_roles: List[RoleChoices] + is_active: bool + template_subject: str + template_body: str + conditions: strawberry.scalars.JSON + created_at: datetime + updated_at: datetime + + @strawberry_django.field + def target_team_profile_ids(self, root: NotificationRule) -> List[strawberry.ID]: + """IDs of specific team members to notify""" + return [strawberry.ID(str(profile.id)) for profile in root.target_team_profiles.all()] + + @strawberry_django.field + def target_customer_profile_ids(self, root: NotificationRule) -> List[strawberry.ID]: + """IDs of specific customer profiles to notify""" + return [strawberry.ID(str(profile.id)) for profile in root.target_customer_profiles.all()] + + +@strawberry_django.type(Notification) +class NotificationType: + id: strawberry.ID + event: EventType + rule: Optional[NotificationRuleType] + status: NotificationStatusChoices + subject: str + body: str + action_url: str + read_at: Optional[datetime] + metadata: strawberry.scalars.JSON + created_at: datetime + updated_at: datetime + + @strawberry_django.field + def recipient_id(self, root: Notification) -> strawberry.ID: + """ID of the recipient profile""" + return strawberry.ID(str(root.recipient_object_id)) + + @strawberry.field + async def recipient_type(self, root: Notification) -> str: + """Type of recipient profile (TeamProfile or CustomerProfile)""" + from channels.db import database_sync_to_async + content_type = await database_sync_to_async(lambda: root.recipient_content_type)() + return content_type.model + + @strawberry_django.field + def is_read(self, root: Notification) -> bool: + """Whether the notification has been read""" + return root.read_at is not None + + +@strawberry_django.type(NotificationDelivery) +class NotificationDeliveryType: + id: strawberry.ID + notification: NotificationType + channel: NotificationChannelChoices + status: DeliveryStatusChoices + attempts: int + last_attempt_at: Optional[datetime] + sent_at: Optional[datetime] + delivered_at: Optional[datetime] + error_message: str + external_id: str + metadata: strawberry.scalars.JSON + created_at: datetime + updated_at: datetime diff --git a/core/graphql/types/invoice.py b/core/graphql/types/invoice.py new file mode 100644 index 0000000..fb67260 --- /dev/null +++ b/core/graphql/types/invoice.py @@ -0,0 +1,19 @@ +from typing import List +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.invoice import Invoice +from core.graphql.types.project import ProjectType +from core.graphql.types.revenue import RevenueType + + +@sd.type(Invoice) +class InvoiceType(relay.Node): + date: strawberry.auto + customer_id: strawberry.auto + status: strawberry.auto + date_paid: strawberry.auto + payment_type: strawberry.auto + wave_invoice_id: strawberry.auto + projects: List["ProjectType"] = sd.field() + revenues: List["RevenueType"] = sd.field() diff --git a/core/graphql/types/labor.py b/core/graphql/types/labor.py new file mode 100644 index 0000000..6986d2f --- /dev/null +++ b/core/graphql/types/labor.py @@ -0,0 +1,12 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.labor import Labor + + +@sd.type(Labor) +class LaborType(relay.Node): + account_address_id: strawberry.auto + amount: strawberry.auto + start_date: strawberry.auto + end_date: strawberry.auto diff --git a/core/graphql/types/messaging.py b/core/graphql/types/messaging.py new file mode 100644 index 0000000..6c2bcb8 --- /dev/null +++ b/core/graphql/types/messaging.py @@ -0,0 +1,288 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from typing import Optional, List +from datetime import datetime + +from core.models.messaging import ( + Conversation, + ConversationParticipant, + Message, + MessageReadReceipt +) +from core.graphql.types.profile import TeamProfileType, CustomerProfileType +from core.graphql.types.project import ProjectType +from core.graphql.types.service import ServiceType +from core.graphql.types.account import AccountType +from core.graphql.types.customer import CustomerType + + +@strawberry.type +class ParticipantType: + """ + Union type for conversation participants (TeamProfile or CustomerProfile) + """ + team_profile: Optional[TeamProfileType] = None + customer_profile: Optional[CustomerProfileType] = None + + +@strawberry.type +class EntityType: + """ + Union type for conversation entities (Project, Service, Account, Customer, etc.) + """ + project: Optional[ProjectType] = None + service: Optional[ServiceType] = None + account: Optional[AccountType] = None + customer: Optional[CustomerType] = None + entity_type: str # The model name (e.g., "Project", "Service") + entity_id: strawberry.ID # The UUID + + +@sd.type(Message) +class MessageType(relay.Node): + conversation_id: strawberry.auto + body: strawberry.auto + reply_to_id: strawberry.auto + attachments: strawberry.auto + is_system_message: strawberry.auto + metadata: strawberry.auto + created_at: strawberry.auto + updated_at: strawberry.auto + + @strawberry.field + async def sender(self) -> ParticipantType: + """Return the sender as a union type""" + from core.models.profile import TeamProfile, CustomerProfile + from channels.db import database_sync_to_async + + # Get sender asynchronously + sender = await database_sync_to_async(lambda: self.sender)() + + if isinstance(sender, TeamProfile): + return ParticipantType(team_profile=sender, customer_profile=None) + elif isinstance(sender, CustomerProfile): + return ParticipantType(team_profile=None, customer_profile=sender) + return ParticipantType(team_profile=None, customer_profile=None) + + @strawberry.field + async def reply_to(self) -> Optional['MessageType']: + """Return the message this is replying to""" + from channels.db import database_sync_to_async + reply = await database_sync_to_async(lambda: self.reply_to)() + return reply if reply else None + + @strawberry.field + async def read_by(self, info) -> List['MessageReadReceiptType']: + """Return list of read receipts for this message""" + from channels.db import database_sync_to_async + return await database_sync_to_async(list)(self.read_receipts.all()) + + @strawberry.field + async def can_delete(self, info) -> bool: + """Return whether the current user can delete this message""" + from core.models.profile import TeamProfile, CustomerProfile + from django.contrib.contenttypes.models import ContentType + from django.conf import settings + from channels.db import database_sync_to_async + + profile = getattr(info.context.request, 'profile', None) + if not profile: + return False + + @database_sync_to_async + def check_delete_permission(): + content_type = ContentType.objects.get_for_model(type(profile)) + is_sender = (self.sender_object_id == profile.id and + self.sender_content_type == content_type) + is_admin = (str(profile.id) == settings.DISPATCH_TEAM_PROFILE_ID) + return is_sender or is_admin + + return await check_delete_permission() + + +@sd.type(ConversationParticipant) +class ConversationParticipantType(relay.Node): + conversation_id: strawberry.auto + last_read_at: strawberry.auto + unread_count: strawberry.auto + is_muted: strawberry.auto + is_archived: strawberry.auto + joined_at: strawberry.auto + created_at: strawberry.auto + updated_at: strawberry.auto + + @strawberry.field + async def participant(self) -> ParticipantType: + """Return the participant as a union type""" + from core.models.profile import TeamProfile, CustomerProfile + from channels.db import database_sync_to_async + + # Get the participant object asynchronously + participant = await database_sync_to_async(lambda: self.participant)() + + if isinstance(participant, TeamProfile): + return ParticipantType(team_profile=participant, customer_profile=None) + elif isinstance(participant, CustomerProfile): + return ParticipantType(team_profile=None, customer_profile=participant) + return ParticipantType(team_profile=None, customer_profile=None) + + +@sd.type(Conversation) +class ConversationType(relay.Node): + subject: strawberry.auto + conversation_type: strawberry.auto + last_message_at: strawberry.auto + is_archived: strawberry.auto + metadata: strawberry.auto + created_at: strawberry.auto + updated_at: strawberry.auto + + @strawberry.field + async def created_by(self) -> Optional[ParticipantType]: + """Return the conversation creator""" + from core.models.profile import TeamProfile, CustomerProfile + from channels.db import database_sync_to_async + + # Get creator asynchronously + creator = await database_sync_to_async(lambda: self.created_by)() + if not creator: + return None + + if isinstance(creator, TeamProfile): + return ParticipantType(team_profile=creator, customer_profile=None) + elif isinstance(creator, CustomerProfile): + return ParticipantType(team_profile=None, customer_profile=creator) + return None + + @strawberry.field + async def entity(self) -> Optional[EntityType]: + """Return the related entity (Project, Service, Account, etc.)""" + from core.models.project import Project + from core.models.service import Service + from core.models.account import Account + from core.models.customer import Customer + from channels.db import database_sync_to_async + + # Get all entity-related data asynchronously in a single call + @database_sync_to_async + def get_entity_data(): + entity = self.entity # GenericForeignKey access + if not entity: + return None, None, None + entity_type = self.entity_content_type.model # ForeignKey access + entity_id = str(self.entity_object_id) + return entity, entity_type, entity_id + + entity, entity_type, entity_id = await get_entity_data() + if not entity: + return None + + result = EntityType( + entity_type=entity_type, + entity_id=entity_id + ) + + if isinstance(entity, Project): + result.project = entity + elif isinstance(entity, Service): + result.service = entity + elif isinstance(entity, Account): + result.account = entity + elif isinstance(entity, Customer): + result.customer = entity + + return result + + @strawberry.field + async def participants(self, info) -> List[ConversationParticipantType]: + """Return all participants in the conversation""" + from channels.db import database_sync_to_async + return await database_sync_to_async(list)(self.participants.all()) + + @strawberry.field + async def messages( + self, + info, + limit: Optional[int] = 50, + offset: Optional[int] = 0 + ) -> List[MessageType]: + """Return messages in the conversation with pagination""" + from channels.db import database_sync_to_async + return await database_sync_to_async(list)(self.messages.all()[offset:offset + limit]) + + @strawberry.field + async def message_count(self, info) -> int: + """Return total message count""" + from channels.db import database_sync_to_async + return await database_sync_to_async(self.messages.count)() + + @strawberry.field + async def unread_count(self, info) -> int: + """Return unread count for the current user""" + from channels.db import database_sync_to_async + + # Get profile directly from context (not Django User model) + profile = getattr(info.context.request, 'profile', None) + if not profile: + return 0 + + @database_sync_to_async + def get_unread(): + # Get the profile's content type + from django.contrib.contenttypes.models import ContentType + content_type = ContentType.objects.get_for_model(type(profile)) + + # Get the participant record + participant = self.participants.filter( + participant_content_type=content_type, + participant_object_id=profile.id + ).first() + + return participant.unread_count if participant else 0 + + return await get_unread() + + @strawberry.field + async def can_delete(self, info) -> bool: + """Return whether the current user can delete this conversation""" + from django.contrib.contenttypes.models import ContentType + from django.conf import settings + from channels.db import database_sync_to_async + + profile = getattr(info.context.request, 'profile', None) + if not profile: + return False + + @database_sync_to_async + def check_delete_permission(): + content_type = ContentType.objects.get_for_model(type(profile)) + is_creator = (self.created_by_content_type == content_type and + self.created_by_object_id == profile.id) + is_admin = (str(profile.id) == settings.DISPATCH_TEAM_PROFILE_ID) + return is_creator or is_admin + + return await check_delete_permission() + + +@sd.type(MessageReadReceipt) +class MessageReadReceiptType(relay.Node): + message_id: strawberry.auto + read_at: strawberry.auto + created_at: strawberry.auto + updated_at: strawberry.auto + + @strawberry.field + async def reader(self) -> ParticipantType: + """Return the reader as a union type""" + from core.models.profile import TeamProfile, CustomerProfile + from channels.db import database_sync_to_async + + # Get reader asynchronously + reader = await database_sync_to_async(lambda: self.reader)() + + if isinstance(reader, TeamProfile): + return ParticipantType(team_profile=reader, customer_profile=None) + elif isinstance(reader, CustomerProfile): + return ParticipantType(team_profile=None, customer_profile=reader) + return ParticipantType(team_profile=None, customer_profile=None) diff --git a/core/graphql/types/profile.py b/core/graphql/types/profile.py new file mode 100644 index 0000000..3d64be3 --- /dev/null +++ b/core/graphql/types/profile.py @@ -0,0 +1,32 @@ +from typing import List +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.graphql.types.customer import CustomerType +from core.models.profile import CustomerProfile, TeamProfile + + +@sd.type(CustomerProfile) +class CustomerProfileType(relay.Node): + first_name: strawberry.auto + last_name: strawberry.auto + email: strawberry.auto + phone: strawberry.auto + status: strawberry.auto + notes: strawberry.auto + ory_kratos_id: strawberry.auto + customers: List["CustomerType"] = sd.field() + full_name: str + + +@sd.type(TeamProfile) +class TeamProfileType(relay.Node): + first_name: strawberry.auto + last_name: strawberry.auto + email: strawberry.auto + phone: strawberry.auto + status: strawberry.auto + notes: strawberry.auto + ory_kratos_id: strawberry.auto + full_name: str + role: strawberry.auto diff --git a/core/graphql/types/project.py b/core/graphql/types/project.py new file mode 100644 index 0000000..0719cf5 --- /dev/null +++ b/core/graphql/types/project.py @@ -0,0 +1,24 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.project import Project + + +@sd.type(Project) +class ProjectType(relay.Node): + customer_id: strawberry.auto + account_address_id: strawberry.auto + street_address: strawberry.auto + city: strawberry.auto + state: strawberry.auto + zip_code: strawberry.auto + name: strawberry.auto + date: strawberry.auto + status: strawberry.auto + notes: strawberry.auto + labor: strawberry.auto + amount: strawberry.auto + team_members: strawberry.auto + scope_id: strawberry.auto + calendar_event_id: strawberry.auto + wave_service_id: strawberry.auto diff --git a/core/graphql/types/project_punchlist.py b/core/graphql/types/project_punchlist.py new file mode 100644 index 0000000..dabc552 --- /dev/null +++ b/core/graphql/types/project_punchlist.py @@ -0,0 +1,10 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.project_punchlist import ProjectPunchlist + + +@sd.type(ProjectPunchlist) +class ProjectPunchlistType(relay.Node): + project_id: strawberry.auto + date: strawberry.auto diff --git a/core/graphql/types/project_scope.py b/core/graphql/types/project_scope.py new file mode 100644 index 0000000..c479926 --- /dev/null +++ b/core/graphql/types/project_scope.py @@ -0,0 +1,44 @@ +from typing import List + +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.project_scope import ProjectScope, ProjectScopeCategory, ProjectScopeTask, ProjectScopeTaskCompletion + + +@sd.type(ProjectScope) +class ProjectScopeType(relay.Node): + name: strawberry.auto + account_id: strawberry.auto + project_id: strawberry.auto + account_address_id: strawberry.auto + description: strawberry.auto + is_active: strawberry.auto + project_areas: List['ProjectScopeCategoryType'] = sd.field() + + +@sd.type(ProjectScopeCategory) +class ProjectScopeCategoryType(relay.Node): + name: strawberry.auto + scope_id: strawberry.auto + order: strawberry.auto + project_tasks: List['ProjectScopeTaskType'] = sd.field() + + +@sd.type(ProjectScopeTask) +class ProjectScopeTaskType(relay.Node): + category_id: strawberry.auto + description: strawberry.auto + checklist_description: strawberry.auto + order: strawberry.auto + estimated_minutes: strawberry.auto + + +@sd.type(ProjectScopeTaskCompletion) +class ProjectScopeTaskCompletionType(relay.Node): + task_id: strawberry.auto + project_id: strawberry.auto + account_address_id: strawberry.auto + completed_by_id: strawberry.auto + completed_at: strawberry.auto + notes: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/project_scope_template.py b/core/graphql/types/project_scope_template.py new file mode 100644 index 0000000..c633e4f --- /dev/null +++ b/core/graphql/types/project_scope_template.py @@ -0,0 +1,34 @@ +from typing import List +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.project_scope_template import ( + ProjectScopeTemplate, + ProjectAreaTemplate, + ProjectTaskTemplate, +) + + +@sd.type(ProjectScopeTemplate) +class ProjectScopeTemplateType(relay.Node): + name: strawberry.auto + description: strawberry.auto + is_active: strawberry.auto + category_templates: List['ProjectAreaTemplateType'] + + +@sd.type(ProjectAreaTemplate) +class ProjectAreaTemplateType(relay.Node): + scope_template_id: strawberry.auto + name: strawberry.auto + order: strawberry.auto + task_templates: List['ProjectTaskTemplateType'] + + +@sd.type(ProjectTaskTemplate) +class ProjectTaskTemplateType(relay.Node): + area_template_id: strawberry.auto + description: strawberry.auto + checklist_description: strawberry.auto + order: strawberry.auto + estimated_minutes: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/report.py b/core/graphql/types/report.py new file mode 100644 index 0000000..d45d3ec --- /dev/null +++ b/core/graphql/types/report.py @@ -0,0 +1,96 @@ +from typing import List +from decimal import Decimal + +import strawberry +import strawberry_django as sd +from strawberry import relay +from channels.db import database_sync_to_async + +from core.graphql.types.project import ProjectType +from core.graphql.types.service import ServiceType +from core.graphql.types.profile import TeamProfileType +from core.models.report import Report + + +@strawberry.type +class LaborBreakdownService: + service_id: relay.GlobalID + account_name: strawberry.Private[str] + address: strawberry.Private[str] + total_labor_rate: Decimal + team_member_count: int + is_team_member_assigned: bool + labor_share: Decimal + + +@strawberry.type +class LaborBreakdownProject: + project_id: relay.GlobalID + project_name: str + total_labor_amount: Decimal + team_member_count: int + is_team_member_assigned: bool + labor_share: Decimal + + +@strawberry.type +class LaborBreakdown: + team_member_id: relay.GlobalID + team_member_name: str + services: List[LaborBreakdownService] + projects: List[LaborBreakdownProject] + services_total: Decimal + projects_total: Decimal + grand_total: Decimal + + +@sd.type(Report) +class ReportType(relay.Node): + date: strawberry.auto + team_member_id: strawberry.auto + services: List["ServiceType"] = sd.field() + projects: List["ProjectType"] = sd.field() + + # Computed fields using model methods - now async + @strawberry.field + async def services_labor_total(self) -> Decimal: + """Total labor value for all services in this report""" + return await database_sync_to_async(self.get_services_labor_total)() + + @strawberry.field + async def projects_labor_total(self) -> Decimal: + """Total labor value for all projects in this report""" + return await database_sync_to_async(self.get_projects_labor_total)() + + @strawberry.field + async def total_labor_value(self) -> Decimal: + """Combined total labor value for services and projects""" + return await database_sync_to_async(self.get_total_labor_value)() + + @strawberry.field + async def labor_breakdown(self) -> LaborBreakdown: + """Detailed breakdown of labor calculations""" + breakdown_data = await database_sync_to_async(self.get_labor_breakdown)() + + # Convert UUID IDs to GlobalIDs + services = [] + for service_data in breakdown_data['services']: + service_data_copy = service_data.copy() + service_data_copy['service_id'] = relay.to_base64(ServiceType, service_data['service_id']) + services.append(LaborBreakdownService(**service_data_copy)) + + projects = [] + for project_data in breakdown_data['projects']: + project_data_copy = project_data.copy() + project_data_copy['project_id'] = relay.to_base64(ProjectType, project_data['project_id']) + projects.append(LaborBreakdownProject(**project_data_copy)) + + return LaborBreakdown( + team_member_id=relay.to_base64(TeamProfileType, breakdown_data['team_member_id']), + team_member_name=breakdown_data['team_member_name'], + services=services, + projects=projects, + services_total=breakdown_data['services_total'], + projects_total=breakdown_data['projects_total'], + grand_total=breakdown_data['grand_total'] + ) \ No newline at end of file diff --git a/core/graphql/types/revenue.py b/core/graphql/types/revenue.py new file mode 100644 index 0000000..e195811 --- /dev/null +++ b/core/graphql/types/revenue.py @@ -0,0 +1,13 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.revenue import Revenue + + +@sd.type(Revenue) +class RevenueType(relay.Node): + account_id: strawberry.auto + amount: strawberry.auto + start_date: strawberry.auto + end_date: strawberry.auto + wave_service_id: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/schedule.py b/core/graphql/types/schedule.py new file mode 100644 index 0000000..3cd9d61 --- /dev/null +++ b/core/graphql/types/schedule.py @@ -0,0 +1,21 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.schedule import Schedule + + +@sd.type(Schedule) +class ScheduleType(relay.Node): + name: strawberry.auto + account_address_id: strawberry.auto + monday_service: strawberry.auto + tuesday_service: strawberry.auto + wednesday_service: strawberry.auto + thursday_service: strawberry.auto + friday_service: strawberry.auto + saturday_service: strawberry.auto + sunday_service: strawberry.auto + weekend_service: strawberry.auto + schedule_exception: strawberry.auto + start_date: strawberry.auto + end_date: strawberry.auto diff --git a/core/graphql/types/scope.py b/core/graphql/types/scope.py new file mode 100644 index 0000000..5051fbe --- /dev/null +++ b/core/graphql/types/scope.py @@ -0,0 +1,46 @@ +from typing import List +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.scope import Scope, Area, Task, TaskCompletion + + +@sd.type(Scope) +class ScopeType(relay.Node): + name: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + description: strawberry.auto + is_active: strawberry.auto + areas: List["AreaType"] = sd.field() + + +@sd.type(Area) +class AreaType(relay.Node): + name: strawberry.auto + scope_id: strawberry.auto + order: strawberry.auto + tasks: List["TaskType"] = sd.field() + + +@sd.type(Task) +class TaskType(relay.Node): + area_id: strawberry.auto + description: strawberry.auto + checklist_description: strawberry.auto + frequency: strawberry.auto + order: strawberry.auto + is_conditional: strawberry.auto + estimated_minutes: strawberry.auto + + +@sd.type(TaskCompletion) +class TaskCompletionType(relay.Node): + task_id: strawberry.auto + service_id: strawberry.auto + account_address_id: strawberry.auto + completed_by_id: strawberry.auto + completed_at: strawberry.auto + notes: strawberry.auto + year: strawberry.auto + month: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/scope_template.py b/core/graphql/types/scope_template.py new file mode 100644 index 0000000..258ae23 --- /dev/null +++ b/core/graphql/types/scope_template.py @@ -0,0 +1,32 @@ +from typing import List +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate + + +@sd.type(ScopeTemplate) +class ScopeTemplateType(relay.Node): + name: strawberry.auto + description: strawberry.auto + is_active: strawberry.auto + area_templates: List["AreaTemplateType"] = sd.field() + + +@sd.type(AreaTemplate) +class AreaTemplateType(relay.Node): + scope_template_id: strawberry.auto + name: strawberry.auto + order: strawberry.auto + task_templates: List["TaskTemplateType"] = sd.field() + + +@sd.type(TaskTemplate) +class TaskTemplateType(relay.Node): + area_template_id: strawberry.auto + description: strawberry.auto + checklist_description: strawberry.auto + frequency: strawberry.auto + order: strawberry.auto + is_conditional: strawberry.auto + estimated_minutes: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/service.py b/core/graphql/types/service.py new file mode 100644 index 0000000..006b4e2 --- /dev/null +++ b/core/graphql/types/service.py @@ -0,0 +1,15 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.service import Service + + +@sd.type(Service) +class ServiceType(relay.Node): + account_id: strawberry.auto + account_address_id: strawberry.auto + date: strawberry.auto + status: strawberry.auto + notes: strawberry.auto + team_members: strawberry.auto + calendar_event_id: strawberry.auto diff --git a/core/graphql/types/session.py b/core/graphql/types/session.py new file mode 100644 index 0000000..ab66a9b --- /dev/null +++ b/core/graphql/types/session.py @@ -0,0 +1,47 @@ +from typing import List +import strawberry +from strawberry import relay +import strawberry_django as sd +from core.models.session import ServiceSession, ProjectSession +from core.graphql.types.scope import TaskCompletionType +from core.graphql.types.project_scope import ProjectScopeTaskCompletionType +from core.graphql.types.session_image import ServiceSessionImageType, ProjectSessionImageType +from core.graphql.types.session_video import ServiceSessionVideoType, ProjectSessionVideoType +from core.graphql.types.session_note import ServiceSessionNoteType, ProjectSessionNoteType + + +@sd.type(ServiceSession) +class ServiceSessionType(relay.Node): + service_id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + customer_id: strawberry.auto + scope_id: strawberry.auto + start: strawberry.auto + end: strawberry.auto + completed_tasks: List["TaskCompletionType"] = sd.field() + photos: List["ServiceSessionImageType"] = sd.field(field_name="service_session_images") + videos: List["ServiceSessionVideoType"] = sd.field(field_name="service_session_videos") + notes: List["ServiceSessionNoteType"] = sd.field() + duration_seconds: strawberry.auto + is_active: bool + + +@sd.type(ProjectSession) +class ProjectSessionType(relay.Node): + project_id: strawberry.auto + account_id: strawberry.auto + account_address_id: strawberry.auto + customer_id: strawberry.auto + scope_id: strawberry.auto + start: strawberry.auto + end: strawberry.auto + date: strawberry.auto + created_by_id: strawberry.auto + closed_by_id: strawberry.auto + completed_tasks: List["ProjectScopeTaskCompletionType"] = sd.field() + photos: List["ProjectSessionImageType"] = sd.field(field_name="project_session_images") + videos: List["ProjectSessionVideoType"] = sd.field(field_name="project_session_videos") + notes: List["ProjectSessionNoteType"] = sd.field() + is_active: strawberry.auto + duration_seconds: strawberry.auto diff --git a/core/graphql/types/session_image.py b/core/graphql/types/session_image.py new file mode 100644 index 0000000..d9fa9af --- /dev/null +++ b/core/graphql/types/session_image.py @@ -0,0 +1,32 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.session_image import ServiceSessionImage, ProjectSessionImage + +@sd.type(ServiceSessionImage) +class ServiceSessionImageType(relay.Node): + title: strawberry.auto + image: strawberry.auto + thumbnail: strawberry.auto + content_type: strawberry.auto + width: strawberry.auto + height: strawberry.auto + created_at: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + service_session_id: strawberry.auto + notes: strawberry.auto + internal: bool + +@sd.type(ProjectSessionImage) +class ProjectSessionImageType(relay.Node): + title: strawberry.auto + image: strawberry.auto + thumbnail: strawberry.auto + content_type: strawberry.auto + width: strawberry.auto + height: strawberry.auto + created_at: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + project_session_id: strawberry.auto + notes: strawberry.auto + internal: bool \ No newline at end of file diff --git a/core/graphql/types/session_note.py b/core/graphql/types/session_note.py new file mode 100644 index 0000000..be7451a --- /dev/null +++ b/core/graphql/types/session_note.py @@ -0,0 +1,26 @@ +from typing import List +import strawberry +from strawberry import relay +import strawberry_django as sd + +from core.models.session import ServiceSessionNote, ProjectSessionNote + + +@sd.type(ServiceSessionNote) +class ServiceSessionNoteType(relay.Node): + created_at: strawberry.auto + updated_at: strawberry.auto + content: strawberry.auto + author_id: strawberry.auto + internal: bool + session_id: strawberry.auto + + +@sd.type(ProjectSessionNote) +class ProjectSessionNoteType(relay.Node): + created_at: strawberry.auto + updated_at: strawberry.auto + content: strawberry.auto + author_id: strawberry.auto + internal: bool + session_id: strawberry.auto \ No newline at end of file diff --git a/core/graphql/types/session_video.py b/core/graphql/types/session_video.py new file mode 100644 index 0000000..0ccbf57 --- /dev/null +++ b/core/graphql/types/session_video.py @@ -0,0 +1,38 @@ +import strawberry +import strawberry_django as sd +from strawberry import relay +from core.models.session_video import ServiceSessionVideo, ProjectSessionVideo + + +@sd.type(ServiceSessionVideo) +class ServiceSessionVideoType(relay.Node): + title: strawberry.auto + video: strawberry.auto + thumbnail: strawberry.auto + content_type: strawberry.auto + duration_seconds: strawberry.auto + file_size_bytes: strawberry.auto + width: strawberry.auto + height: strawberry.auto + created_at: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + service_session_id: strawberry.auto + notes: strawberry.auto + internal: strawberry.auto + + +@sd.type(ProjectSessionVideo) +class ProjectSessionVideoType(relay.Node): + title: strawberry.auto + video: strawberry.auto + thumbnail: strawberry.auto + content_type: strawberry.auto + duration_seconds: strawberry.auto + file_size_bytes: strawberry.auto + width: strawberry.auto + height: strawberry.auto + created_at: strawberry.auto + uploaded_by_team_profile_id: strawberry.auto + project_session_id: strawberry.auto + notes: strawberry.auto + internal: strawberry.auto diff --git a/core/graphql/utils.py b/core/graphql/utils.py new file mode 100644 index 0000000..f36f5b5 --- /dev/null +++ b/core/graphql/utils.py @@ -0,0 +1,304 @@ +import base64 +import binascii +import calendar +import datetime +from functools import lru_cache +from typing import Union, Dict, Any, Iterable, List, Optional, Type, TypeVar, Awaitable +from channels.db import database_sync_to_async +from django.db.models import Model +from strawberry.relay import GlobalID + +ModelType = TypeVar('ModelType', bound=Model) + + +def _decode_global_id(gid: Union[str, GlobalID, None]) -> Optional[str]: + """ + Decode a Global ID to extract the node ID. + Handles both GlobalID objects and base64 encoded strings. + """ + if gid is None: + return None + if isinstance(gid, GlobalID): + return gid.node_id + try: + decoded = base64.b64decode(gid).decode("utf-8") + if ":" in decoded: + return decoded.split(":", 1)[1] + except (binascii.Error, UnicodeDecodeError): + pass + return gid + + +def _decode_global_ids(ids: Optional[Iterable[str]]) -> Optional[List[str]]: + """Decode a list of Global IDs.""" + if ids is None: + return None + return [_decode_global_id(x) for x in ids] + + +def _to_dict(input_data: Union[Dict[str, Any], object]) -> Dict[str, Any]: + """ + Convert input data to a dictionary. + Handles both dict objects and objects with attributes. + """ + if isinstance(input_data, dict): + return dict(input_data) + try: + return {k: v for k, v in vars(input_data).items() if not k.startswith("_")} + except TypeError: + return {} + + +def _decode_scalar_ids_inplace(data: Dict[str, Any]) -> None: + """ + Decode Global IDs in-place for scalar ID fields. + Handles 'id' field and any field ending with '_id'. + """ + if "id" in data and data["id"] is not None: + data["id"] = _decode_global_id(data["id"]) + for k, v in list(data.items()): + if k.endswith("_id") and v is not None: + data[k] = _decode_global_id(v) + + +def _filter_write_fields(raw: Dict[str, Any], m2m_data: Optional[dict] = None) -> Dict[str, Any]: + """ + Remove fields that shouldn't be written directly to the model: + - many-to-many fields handled separately (m2m_data keys) + - *_ids convenience arrays that are processed elsewhere + - id (primary key) + """ + m2m_fields = list(m2m_data.keys()) if m2m_data else [] + exclude_keys = set(m2m_fields) | {k for k in raw.keys() if k.endswith("_ids")} | {"id"} + return {k: v for k, v in raw.items() if k not in exclude_keys} + + +def _observed(date: datetime.date) -> datetime.date: + """ + Calculate the observed date for holidays. + If holiday is Saturday -> observe Friday; if Sunday -> observe Monday + """ + wd = date.weekday() + if wd == 5: # Saturday + return date - datetime.timedelta(days=1) + if wd == 6: # Sunday + return date + datetime.timedelta(days=1) + return date + + +def _nth_weekday_of_month(year: int, month: int, weekday: int, n: int) -> datetime.date: + """ + Find the nth occurrence of a weekday in a month. + weekday: Mon=0...Sun=6, n: 1..5 (e.g., 4th Thursday) + """ + count = 0 + for day in range(1, 32): + try: + d = datetime.date(year, month, day) + except ValueError: + break + if d.weekday() == weekday: + count += 1 + if count == n: + return d + raise ValueError("Invalid nth weekday request") + + +def _last_weekday_of_month(year: int, month: int, weekday: int) -> datetime.date: + """Find the last occurrence of a weekday in a month (e.g., last Monday in May).""" + last_day = calendar.monthrange(year, month)[1] + for day in range(last_day, 0, -1): + d = datetime.date(year, month, day) + if d.weekday() == weekday: + return d + raise ValueError("Invalid last weekday request") + + +@lru_cache(maxsize=64) +def _holiday_set(year: int) -> set[datetime.date]: + """Generate a set of federal holidays for the given year.""" + holidays: set[datetime.date] = set() + + # New Year's Day (observed) + holidays.add(_observed(datetime.date(year, 1, 1))) + + # Memorial Day (last Monday in May) + holidays.add(_last_weekday_of_month(year, 5, calendar.MONDAY)) + + # Independence Day (observed) + holidays.add(_observed(datetime.date(year, 7, 4))) + + # Labor Day (first Monday in September) + holidays.add(_nth_weekday_of_month(year, 9, calendar.MONDAY, 1)) + + # Thanksgiving Day (4th Thursday in November) + holidays.add(_nth_weekday_of_month(year, 11, calendar.THURSDAY, 4)) + + # Christmas Day (observed) + holidays.add(_observed(datetime.date(year, 12, 25))) + + return holidays + + +def _is_holiday(date: datetime.date) -> bool: + """Check if a date is a federal holiday.""" + return date in _holiday_set(date.year) + + +def _extract_id(payload: Union[dict, str, int]) -> str: + """Extract ID from various payload formats.""" + return str(payload.get("id")) if isinstance(payload, dict) else str(payload) + + +# Internal synchronous implementations +def _create_object_sync(input_data, model_class: Type[ModelType], m2m_data: dict = None) -> ModelType: + """Synchronous implementation of create_object.""" + raw = _to_dict(input_data) + _decode_scalar_ids_inplace(raw) + + data = _filter_write_fields(raw, m2m_data) + instance = model_class.objects.create(**data) + + # Handle many-to-many relationships + if m2m_data: + for field, values in m2m_data.items(): + if values is not None: + getattr(instance, field).set(_decode_global_ids(values)) + + return instance + + +def _update_object_sync(input_data, model_class: Type[ModelType], m2m_data: dict = None) -> ModelType: + """Synchronous implementation of update_object.""" + raw = _to_dict(input_data) + _decode_scalar_ids_inplace(raw) + + try: + instance = model_class.objects.get(pk=raw.get("id")) + + data = _filter_write_fields(raw, m2m_data) + + # Update only provided fields + update_fields = [] + for field, value in data.items(): + if value is not None: + setattr(instance, field, value) + update_fields.append(field) + + if update_fields: + instance.save(update_fields=update_fields) + else: + instance.save() + + # Handle many-to-many relationships (only update if explicitly provided) + if m2m_data: + for field, values in m2m_data.items(): + if values is not None: + getattr(instance, field).set(_decode_global_ids(values)) + # None means "not provided" - leave unchanged + # To clear a relationship, pass an empty array [] + + return instance + + except model_class.DoesNotExist: + raise ValueError(f"{model_class.__name__} with ID {raw.get('id')} does not exist") + + +def _delete_object_sync(object_id, model_class: Type[ModelType]) -> Optional[ModelType]: + """Synchronous implementation of delete_object.""" + pk = _decode_global_id(object_id) + try: + instance = model_class.objects.get(pk=pk) + instance.delete() + return instance + except model_class.DoesNotExist: + return None + + +# Public async functions with explicit typing for IDE support +def create_object(input_data, model_class: Type[ModelType], m2m_data: dict = None) -> Awaitable[ModelType]: + """ + Create a new model instance asynchronously. + + Args: + input_data: Input data (dict or object with attributes) + model_class: Django model class + m2m_data: Optional dictionary of many-to-many field data + + Returns: + Awaitable that resolves to created model instance + """ + return database_sync_to_async(_create_object_sync)(input_data, model_class, m2m_data) + + +def update_object(input_data, model_class: Type[ModelType], m2m_data: dict = None) -> Awaitable[ModelType]: + """ + Update an existing model instance asynchronously. + + Args: + input_data: Input data (dict or object with attributes) - must include 'id' + model_class: Django model class + m2m_data: Optional dictionary of many-to-many field data + + Returns: + Awaitable that resolves to updated model instance + + Raises: + ValueError: If an object with the given ID doesn't exist + """ + return database_sync_to_async(_update_object_sync)(input_data, model_class, m2m_data) + + +def delete_object(object_id, model_class: Type[ModelType]) -> Awaitable[Optional[ModelType]]: + """ + Delete a model instance asynchronously. + + Args: + object_id: Global ID or primary key of the object to delete + model_class: Django model class + + Returns: + Awaitable that resolves to deleted model instance if found, None if not found + """ + return database_sync_to_async(_delete_object_sync)(object_id, model_class) + + +def _get_conversations_for_entity_sync(entity_instance: Model) -> List: + """ + Synchronous implementation to get conversations linked to an entity via GenericForeignKey. + + Args: + entity_instance: The model instance (e.g., Service, Project, Account, Customer) + + Returns: + List of Conversation objects linked to this entity + """ + from django.contrib.contenttypes.models import ContentType + from core.models.messaging import Conversation + + content_type = ContentType.objects.get_for_model(type(entity_instance)) + return list(Conversation.objects.filter( + entity_content_type=content_type, + entity_object_id=entity_instance.id + )) + + +def get_conversations_for_entity(entity_instance: Model) -> Awaitable[List]: + """ + Get all conversations linked to an entity asynchronously. + + This helper handles the GenericForeignKey relationship pattern for conversations. + Use this in your GraphQL types to easily add a conversations field. + + Args: + entity_instance: The model instance (e.g., Service, Project, Account, Customer) + + Returns: + Awaitable that resolves to list of Conversation objects + + Example usage in GraphQL type: + @strawberry.field + async def conversations(self) -> List[ConversationType]: + return await get_conversations_for_entity(self) + """ + return database_sync_to_async(_get_conversations_for_entity_sync)(entity_instance) diff --git a/core/mcp/__init__.py b/core/mcp/__init__.py new file mode 100644 index 0000000..5121ba1 --- /dev/null +++ b/core/mcp/__init__.py @@ -0,0 +1,2 @@ +# MCP Server integration for Nexus +# Import tools and resources to register them with django-mcp diff --git a/core/mcp/__main__.py b/core/mcp/__main__.py new file mode 100644 index 0000000..9aefc22 --- /dev/null +++ b/core/mcp/__main__.py @@ -0,0 +1,5 @@ +"""Entry point for running the MCP server as a module.""" +from core.mcp.server import mcp + +if __name__ == "__main__": + mcp.run() diff --git a/core/mcp/auth.py b/core/mcp/auth.py new file mode 100644 index 0000000..7b3b690 --- /dev/null +++ b/core/mcp/auth.py @@ -0,0 +1,322 @@ +""" +MCP Authentication and Authorization Module + +Handles profile context management and role-based access control for MCP tools. +""" + +import json +from enum import Enum +from functools import wraps +from typing import Any, Callable, Optional +from channels.db import database_sync_to_async +from core.models.profile import TeamProfile + + +class MCPContext: + """Singleton context manager for MCP session state.""" + + _instance = None + _profile: Optional[TeamProfile] = None + _profile_id: Optional[str] = None + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + @classmethod + def set_profile(cls, profile: TeamProfile): + """Set the active profile for this session.""" + cls._profile = profile + cls._profile_id = str(profile.id) + + @classmethod + def get_profile(cls) -> Optional[TeamProfile]: + """Get the active profile.""" + return cls._profile + + @classmethod + def get_profile_id(cls) -> Optional[str]: + """Get the active profile ID.""" + return cls._profile_id + + @classmethod + def clear(cls): + """Clear the session context.""" + cls._profile = None + cls._profile_id = None + + @classmethod + def is_authenticated(cls) -> bool: + """Check if a profile is set.""" + return cls._profile is not None + + +class Role(str, Enum): + """Role enumeration matching RoleChoices.""" + ADMIN = "ADMIN" + TEAM_LEADER = "TEAM_LEADER" + TEAM_MEMBER = "TEAM_MEMBER" + + +# Role hierarchy for permission checks +ROLE_HIERARCHY = { + Role.ADMIN: 3, + Role.TEAM_LEADER: 2, + Role.TEAM_MEMBER: 1, +} + + +def get_role_level(role: str) -> int: + """Get numeric level for a role.""" + try: + return ROLE_HIERARCHY[Role(role)] + except (ValueError, KeyError): + return 0 + + +async def get_team_profile(profile_id: str) -> Optional[TeamProfile]: + """Fetch a TeamProfile by ID.""" + + @database_sync_to_async + def fetch(): + try: + return TeamProfile.objects.get(pk=profile_id) + except TeamProfile.DoesNotExist: + return None + + return await fetch() + + +async def set_active_profile(profile_id: str) -> TeamProfile: + """ + Set the active profile for the MCP session. + + Args: + profile_id: UUID of the team profile + + Returns: + The TeamProfile instance + + Raises: + PermissionError: If profile not found + """ + profile = await get_team_profile(profile_id) + if not profile: + raise PermissionError(f"Profile {profile_id} not found") + + MCPContext.set_profile(profile) + return profile + + +def require_auth(func: Callable) -> Callable: + """Decorator to require authentication.""" + + @wraps(func) + async def wrapper(*args, **kwargs): + if not MCPContext.is_authenticated(): + raise PermissionError( + "No active profile. Call set_active_profile first." + ) + return await func(*args, **kwargs) + + return wrapper + + +def require_role(*allowed_roles: Role) -> Callable: + """ + Decorator to require specific roles. + + Usage: + @require_role(Role.ADMIN, Role.TEAM_LEADER) + async def admin_or_leader_tool(): + ... + """ + + def decorator(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args, **kwargs): + profile = MCPContext.get_profile() + if not profile: + raise PermissionError( + "No active profile. Call set_active_profile first." + ) + + role_values = [r.value for r in allowed_roles] + if profile.role not in role_values: + raise PermissionError( + f"Access denied. Required role: {role_values}, " + f"your role: {profile.role}" + ) + + return await func(*args, **kwargs) + + return wrapper + + return decorator + + +def require_minimum_role(min_role: Role) -> Callable: + """ + Decorator to require a minimum role level. + + Usage: + @require_minimum_role(Role.TEAM_LEADER) + async def leader_or_above_tool(): + ... + """ + + def decorator(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args, **kwargs): + profile = MCPContext.get_profile() + if not profile: + raise PermissionError( + "No active profile. Call set_active_profile first." + ) + + user_level = get_role_level(profile.role) + required_level = ROLE_HIERARCHY[min_role] + + if user_level < required_level: + raise PermissionError( + f"Access denied. Minimum role required: {min_role.value}, " + f"your role: {profile.role}" + ) + + return await func(*args, **kwargs) + + return wrapper + + return decorator + + +async def check_entity_access( + entity_type: str, + entity_id: str, + require_assignment: bool = True +) -> bool: + """ + Check if the current profile has access to an entity. + + For ADMIN and TEAM_LEADER: always allowed + For TEAM_MEMBER: only if assigned (when require_assignment=True) + + Args: + entity_type: 'service' or 'project' + entity_id: UUID of the entity + require_assignment: Whether to check team member assignment + + Returns: + True if access is allowed + + Raises: + PermissionError: If access denied + """ + profile = MCPContext.get_profile() + if not profile: + raise PermissionError("No active profile") + + # Admin and Team Leader have full access + if profile.role in [Role.ADMIN.value, Role.TEAM_LEADER.value]: + return True + + if not require_assignment: + return True + + # Team Member must be assigned + @database_sync_to_async + def check_assignment(): + if entity_type == 'service': + from core.models import Service + return Service.objects.filter( + pk=entity_id, + team_members__id=profile.id + ).exists() + elif entity_type == 'project': + from core.models import Project + return Project.objects.filter( + pk=entity_id, + team_members__id=profile.id + ).exists() + return False + + is_assigned = await check_assignment() + if not is_assigned: + raise PermissionError( + f"Access denied. You are not assigned to this {entity_type}." + ) + + return True + + +class MockRequest: + """Mock request object for GraphQL context.""" + + def __init__(self, profile: TeamProfile): + self.profile = profile + + +class MockContext: + """Mock context for GraphQL execution.""" + + def __init__(self, profile: TeamProfile): + self.request = MockRequest(profile) + + +async def execute_graphql( + query: str, + variables: Optional[dict] = None, + profile: Optional[TeamProfile] = None +) -> dict: + """ + Execute a GraphQL query with profile context. + + Args: + query: GraphQL query string + variables: Optional variables dict + profile: Optional profile override (uses active profile if not provided) + + Returns: + GraphQL result data + + Raises: + PermissionError: If no profile available + Exception: If GraphQL errors occur + """ + from core.graphql.schema import schema + + if profile is None: + profile = MCPContext.get_profile() + + if not profile: + raise PermissionError( + "No active profile. Call set_active_profile first." + ) + + context = MockContext(profile) + + result = await schema.execute( + query, + variable_values=variables, + context_value=context + ) + + if result.errors: + error_messages = [str(e) for e in result.errors] + return { + "errors": error_messages, + "data": result.data + } + + return {"data": result.data} + + +def json_response(data: Any, indent: int = 2) -> str: + """Convert data to JSON string for MCP response.""" + return json.dumps(data, indent=indent, default=str) + + +def error_response(message: str) -> str: + """Create an error response.""" + return json.dumps({"error": message}, indent=2) diff --git a/core/mcp/base.py b/core/mcp/base.py new file mode 100644 index 0000000..090bcd0 --- /dev/null +++ b/core/mcp/base.py @@ -0,0 +1,39 @@ +""" +MCP Base Module - Shared instance and utilities. + +This module creates the FastMCP instance that all tool modules use. +Import `mcp` from here to register tools. +""" + +import json +import logging +import os +import sys +from typing import Any + +# Add project root to path for Django imports +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + +# Setup Django before any model imports +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') +import django +django.setup() + +from mcp.server.fastmcp import FastMCP + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Create the shared MCP server instance +mcp = FastMCP(name="nexus") + + +def json_response(data: Any, indent: int = 2) -> str: + """Convert data to JSON string for MCP response.""" + return json.dumps(data, indent=indent, default=str) + + +def error_response(message: str) -> str: + """Create an error response.""" + return json.dumps({"error": message}, indent=2) diff --git a/core/mcp/server.py b/core/mcp/server.py new file mode 100644 index 0000000..db10115 --- /dev/null +++ b/core/mcp/server.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +""" +Nexus MCP Server - Role-Based GraphQL Integration + +Run with: python -m core.mcp.server + +This server exposes Nexus business operations as MCP tools with role-based +access control. All operations use GraphQL internally for consistency. + +Roles: +- ADMIN: Full access to all operations +- TEAM_LEADER: View all, no modifications +- TEAM_MEMBER: View/modify own assigned work only +""" + +# Import base to initialize Django and create MCP instance +from core.mcp.base import mcp + +# Import all tool modules to register them +from core.mcp.tools import ( # noqa: F401 + auth, + dashboard, + customers, + services, + projects, + sessions, + notifications, + admin, + utility, +) + + +# ============================================================================= +# RESOURCES +# ============================================================================= + +@mcp.resource("nexus://schema") +def get_graphql_schema() -> str: + """Get the GraphQL schema in SDL format.""" + from core.graphql.schema import schema + return str(schema) + + +@mcp.resource("nexus://roles") +def get_roles_info() -> str: + """Get information about role permissions.""" + return """# Nexus Role Permissions + +## ADMIN +- Full access to all operations +- Can view all customers, accounts, services, projects +- Can create/update/delete services and projects +- Can open/close sessions for any work +- Can manage system-wide notification rules + +## TEAM_LEADER +- View-only access to all data +- Can see all customers, accounts, services, projects +- Cannot create, update, or delete anything +- Cannot open/close sessions +- Can manage their own notifications + +## TEAM_MEMBER +- Limited to their own assigned work +- Can view only services/projects assigned to them +- Can open/close sessions for their assigned work +- Can manage task completions during sessions +- Can create personal notification rules (scoped to self) +- Can manage their own notifications +""" + + +if __name__ == "__main__": + mcp.run() diff --git a/core/mcp/tools/__init__.py b/core/mcp/tools/__init__.py new file mode 100644 index 0000000..2c2afaa --- /dev/null +++ b/core/mcp/tools/__init__.py @@ -0,0 +1,29 @@ +""" +MCP Tools Package + +Import all tool modules to register them with the MCP server. +""" + +from core.mcp.tools import ( + auth, + dashboard, + customers, + services, + projects, + sessions, + notifications, + admin, + utility, +) + +__all__ = [ + "auth", + "dashboard", + "customers", + "services", + "projects", + "sessions", + "notifications", + "admin", + "utility", +] diff --git a/core/mcp/tools/admin.py b/core/mcp/tools/admin.py new file mode 100644 index 0000000..c657781 --- /dev/null +++ b/core/mcp/tools/admin.py @@ -0,0 +1,279 @@ +"""Admin tools for MCP.""" + +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role, execute_graphql +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def list_team_profiles( + role: Optional[str] = None, + include_admin: bool = False +) -> str: + """ + List all team members. Requires ADMIN or TEAM_LEADER role. + + Args: + role: Optional role filter (TEAM_LEADER, TEAM_MEMBER) + include_admin: Whether to include admin profiles (default false) + + Returns: + JSON array of team profile objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import TeamProfile + + @database_sync_to_async + def fetch(): + qs = TeamProfile.objects.all() + + if not include_admin: + qs = qs.exclude(role='ADMIN') + + if role: + qs = qs.filter(role=role) + + return [ + { + "id": str(p.id), + "first_name": p.first_name, + "last_name": p.last_name, + "email": p.email, + "phone": p.phone, + "role": p.role, + "status": p.status + } + for p in qs + ] + + profiles = await fetch() + return json_response(profiles) + + +@mcp.tool() +async def list_notification_rules(is_active: Optional[bool] = None) -> str: + """ + List all notification rules. Requires ADMIN role. + + Args: + is_active: Optional filter by active status + + Returns: + JSON array of notification rule objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + query = """ + query ListRules($isActive: Boolean) { + notificationRules(isActive: $isActive) { + id + name + description + eventTypes + channels + targetRoles + isActive + templateSubject + templateBody + } + } + """ + + result = await execute_graphql(query, {"isActive": is_active}) + + if "errors" in result: + return json_response(result) + + return json_response(result["data"]["notificationRules"] or []) + + +@mcp.tool() +async def create_notification_rule( + name: str, + event_types: str, + channels: str, + description: Optional[str] = None, + target_roles: Optional[str] = None, + target_team_profile_ids: Optional[str] = None, + template_subject: Optional[str] = None, + template_body: Optional[str] = None, + is_active: bool = True +) -> str: + """ + Create a system-wide notification rule. Requires ADMIN role. + + Args: + name: Rule name + event_types: Comma-separated event types + channels: Comma-separated channels (IN_APP, EMAIL, SMS) + description: Optional description + target_roles: Comma-separated roles to notify (ADMIN, TEAM_LEADER, TEAM_MEMBER) + target_team_profile_ids: Comma-separated profile UUIDs to notify + template_subject: Subject template with {variables} + template_body: Body template with {variables} + is_active: Whether rule is active (default true) + + Returns: + JSON object with created rule + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation CreateRule($input: NotificationRuleInput!) { + createNotificationRule(input: $input) { + id + name + eventTypes + channels + targetRoles + isActive + } + } + """ + + input_data = { + "name": name, + "eventTypes": [e.strip() for e in event_types.split(",")], + "channels": [c.strip() for c in channels.split(",")], + "description": description, + "templateSubject": template_subject, + "templateBody": template_body, + "isActive": is_active + } + + if target_roles: + input_data["targetRoles"] = [r.strip() for r in target_roles.split(",")] + + if target_team_profile_ids: + input_data["targetTeamProfileIds"] = [ + pid.strip() for pid in target_team_profile_ids.split(",") + ] + + result = await execute_graphql(mutation, {"input": input_data}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "rule": result["data"]["createNotificationRule"] + }) + + +@mcp.tool() +async def update_notification_rule( + rule_id: str, + name: Optional[str] = None, + event_types: Optional[str] = None, + channels: Optional[str] = None, + target_roles: Optional[str] = None, + is_active: Optional[bool] = None +) -> str: + """ + Update a notification rule. Requires ADMIN role. + + Args: + rule_id: UUID of the rule to update + name: New rule name + event_types: Comma-separated event types + channels: Comma-separated channels + target_roles: Comma-separated roles + is_active: Whether rule is active + + Returns: + JSON object with updated rule + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation UpdateRule($input: NotificationRuleUpdateInput!) { + updateNotificationRule(input: $input) { + id + name + eventTypes + channels + isActive + } + } + """ + + input_data = {"id": rule_id} + if name: + input_data["name"] = name + if event_types: + input_data["eventTypes"] = [e.strip() for e in event_types.split(",")] + if channels: + input_data["channels"] = [c.strip() for c in channels.split(",")] + if target_roles: + input_data["targetRoles"] = [r.strip() for r in target_roles.split(",")] + if is_active is not None: + input_data["isActive"] = is_active + + result = await execute_graphql(mutation, {"input": input_data}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "rule": result["data"]["updateNotificationRule"] + }) + + +@mcp.tool() +async def delete_notification_rule(rule_id: str) -> str: + """ + Delete a notification rule. Requires ADMIN role. + + Args: + rule_id: UUID of the rule to delete + + Returns: + JSON object confirming deletion + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation DeleteRule($id: ID!) { + deleteNotificationRule(id: $id) + } + """ + + result = await execute_graphql(mutation, {"id": rule_id}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "deleted": result["data"]["deleteNotificationRule"] + }) diff --git a/core/mcp/tools/auth.py b/core/mcp/tools/auth.py new file mode 100644 index 0000000..8a3297d --- /dev/null +++ b/core/mcp/tools/auth.py @@ -0,0 +1,57 @@ +"""Authentication tools for MCP.""" + +from core.mcp.auth import ( + MCPContext, + set_active_profile as _set_active_profile, +) +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def set_active_profile(profile_id: str) -> str: + """ + Set the active profile for this MCP session. Must be called before using other tools. + + Args: + profile_id: UUID of the team profile + + Returns: + JSON object with profile info and role + """ + try: + profile = await _set_active_profile(profile_id) + return json_response({ + "success": True, + "profile": { + "id": str(profile.id), + "name": f"{profile.first_name} {profile.last_name}".strip(), + "email": profile.email, + "role": profile.role + }, + "message": f"Active profile set to {profile.first_name} ({profile.role})" + }) + except PermissionError as e: + return error_response(str(e)) + + +@mcp.tool() +async def get_my_profile() -> str: + """ + Get the current active profile's information. + + Returns: + JSON object with profile details including role and contact info + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + return json_response({ + "id": str(profile.id), + "first_name": profile.first_name, + "last_name": profile.last_name, + "email": profile.email, + "phone": profile.phone, + "role": profile.role, + "status": profile.status + }) diff --git a/core/mcp/tools/customers.py b/core/mcp/tools/customers.py new file mode 100644 index 0000000..dde4ec9 --- /dev/null +++ b/core/mcp/tools/customers.py @@ -0,0 +1,263 @@ +"""Customer and Account tools for MCP.""" + +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def list_customers( + limit: int = 25, + search: Optional[str] = None, + is_active: Optional[bool] = None +) -> str: + """ + List customers with optional filtering. Requires ADMIN or TEAM_LEADER role. + + Args: + limit: Maximum customers to return (default 25) + search: Optional search term for customer name + is_active: Optional filter for active status + + Returns: + JSON array of customer objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import Customer + + @database_sync_to_async + def fetch(): + qs = Customer.objects.prefetch_related('contacts') + + if search: + qs = qs.filter(name__icontains=search) + if is_active is not None: + qs = qs.filter(status='ACTIVE' if is_active else 'INACTIVE') + + results = [] + for c in qs[:limit]: + # Get primary contact if available + primary_contact = c.contacts.filter(is_primary=True, is_active=True).first() + results.append({ + "id": str(c.id), + "name": c.name, + "status": c.status, + "billing_email": c.billing_email, + "primary_contact": { + "name": f"{primary_contact.first_name} {primary_contact.last_name}".strip(), + "email": primary_contact.email, + "phone": primary_contact.phone + } if primary_contact else None + }) + return results + + customers = await fetch() + return json_response(customers) + + +@mcp.tool() +async def get_customer(customer_id: str) -> str: + """ + Get detailed customer information including accounts. Requires ADMIN or TEAM_LEADER role. + + Args: + customer_id: UUID of the customer + + Returns: + JSON object with customer details and accounts + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import Customer + + @database_sync_to_async + def fetch(): + try: + c = Customer.objects.prefetch_related( + 'accounts__addresses', 'contacts', 'addresses' + ).get(pk=customer_id) + + return { + "id": str(c.id), + "name": c.name, + "status": c.status, + "billing_email": c.billing_email, + "contacts": [ + { + "id": str(ct.id), + "first_name": ct.first_name, + "last_name": ct.last_name, + "email": ct.email, + "phone": ct.phone, + "is_primary": ct.is_primary + } + for ct in c.contacts.filter(is_active=True) + ], + "accounts": [ + { + "id": str(a.id), + "name": a.name, + "status": a.status, + "addresses": [ + { + "id": str(addr.id), + "name": addr.name or "Primary", + "street_address": addr.street_address, + "city": addr.city, + "state": addr.state, + "zip_code": addr.zip_code + } + for addr in a.addresses.all() + ] + } + for a in c.accounts.all() + ] + } + except Customer.DoesNotExist: + return None + + customer = await fetch() + if not customer: + return error_response(f"Customer {customer_id} not found") + + return json_response(customer) + + +@mcp.tool() +async def list_accounts( + limit: int = 25, + customer_id: Optional[str] = None, + search: Optional[str] = None, + is_active: Optional[bool] = None +) -> str: + """ + List accounts with optional filtering. Requires ADMIN or TEAM_LEADER role. + + Args: + limit: Maximum accounts to return (default 25) + customer_id: Optional customer UUID to filter by + search: Optional search term for account name + is_active: Optional filter for active status + + Returns: + JSON array of account objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import Account + + @database_sync_to_async + def fetch(): + qs = Account.objects.select_related('customer').prefetch_related('addresses') + + if customer_id: + qs = qs.filter(customer_id=customer_id) + if search: + qs = qs.filter(name__icontains=search) + if is_active is not None: + qs = qs.filter(status='ACTIVE' if is_active else 'INACTIVE') + + return [ + { + "id": str(a.id), + "name": a.name, + "status": a.status, + "customer": {"id": str(a.customer.id), "name": a.customer.name}, + "addresses": [ + { + "id": str(addr.id), + "name": addr.name or "Primary", + "city": addr.city, + "state": addr.state + } + for addr in a.addresses.all() + ] + } + for a in qs[:limit] + ] + + accounts = await fetch() + return json_response(accounts) + + +@mcp.tool() +async def get_account(account_id: str) -> str: + """ + Get detailed account information. Requires ADMIN or TEAM_LEADER role. + + Args: + account_id: UUID of the account + + Returns: + JSON object with account details, addresses, and contacts + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import Account + + @database_sync_to_async + def fetch(): + try: + a = Account.objects.select_related('customer').prefetch_related( + 'addresses', 'contacts' + ).get(pk=account_id) + + return { + "id": str(a.id), + "name": a.name, + "status": a.status, + "customer": {"id": str(a.customer.id), "name": a.customer.name}, + "addresses": [ + { + "id": str(addr.id), + "name": addr.name or "Primary", + "street_address": addr.street_address, + "city": addr.city, + "state": addr.state, + "zip_code": addr.zip_code + } + for addr in a.addresses.all() + ], + "contacts": [ + { + "id": str(ct.id), + "first_name": ct.first_name, + "last_name": ct.last_name, + "email": ct.email, + "phone": ct.phone + } + for ct in a.contacts.filter(is_active=True) + ] + } + except Account.DoesNotExist: + return None + + account = await fetch() + if not account: + return error_response(f"Account {account_id} not found") + + return json_response(account) diff --git a/core/mcp/tools/dashboard.py b/core/mcp/tools/dashboard.py new file mode 100644 index 0000000..3577d69 --- /dev/null +++ b/core/mcp/tools/dashboard.py @@ -0,0 +1,168 @@ +"""Dashboard tools for MCP.""" + +from datetime import date, timedelta +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role +from core.mcp.base import mcp, json_response, error_response, logger + + +@mcp.tool() +async def get_my_schedule( + start_date: Optional[str] = None, + end_date: Optional[str] = None, + status: Optional[str] = None +) -> str: + """ + Get your assigned services and projects for a date range. + + Args: + start_date: Start date in YYYY-MM-DD format (defaults to today) + end_date: End date in YYYY-MM-DD format (defaults to 7 days from start) + status: Optional status filter (SCHEDULED, IN_PROGRESS, COMPLETED) + + Returns: + JSON object with services and projects arrays + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from datetime import datetime + + # Default date range + if not start_date: + start_date = date.today().isoformat() + if not end_date: + start = datetime.strptime(start_date, "%Y-%m-%d").date() + end_date = (start + timedelta(days=7)).isoformat() + + @database_sync_to_async + def fetch_schedule(): + from core.models import Service, Project + + # Build base querysets + if profile.role == Role.TEAM_MEMBER.value: + services_qs = Service.objects.filter(team_members__id=profile.id) + projects_qs = Project.objects.filter(team_members__id=profile.id) + else: + services_qs = Service.objects.all() + projects_qs = Project.objects.all() + + # Apply date filters + services_qs = services_qs.filter(date__gte=start_date, date__lte=end_date) + projects_qs = projects_qs.filter(date__gte=start_date, date__lte=end_date) + + # Apply status filter + if status: + services_qs = services_qs.filter(status=status) + projects_qs = projects_qs.filter(status=status) + + # Fetch with related data + services_qs = services_qs.select_related( + 'account_address__account__customer' + ).prefetch_related('team_members').order_by('date') + + projects_qs = projects_qs.select_related( + 'customer', 'account_address__account' + ).prefetch_related('team_members').order_by('date') + + services = [] + for s in services_qs[:50]: + addr = s.account_address + services.append({ + "id": str(s.id), + "type": "service", + "date": str(s.date), + "status": s.status, + "customer": addr.account.customer.name, + "account": addr.account.name, + "location": addr.name or "Primary", + "address": f"{addr.street_address}, {addr.city}, {addr.state} {addr.zip_code}", + "team_members": [ + f"{t.first_name} {t.last_name}".strip() + for t in s.team_members.all() if t.role != 'ADMIN' + ] + }) + + projects = [] + for p in projects_qs[:50]: + if p.account_address: + addr = p.account_address + location = addr.name or "Primary" + address = f"{addr.street_address}, {addr.city}, {addr.state} {addr.zip_code}" + account = addr.account.name + else: + location = None + address = f"{p.street_address}, {p.city}, {p.state} {p.zip_code}" + account = None + + projects.append({ + "id": str(p.id), + "type": "project", + "name": p.name, + "date": str(p.date), + "status": p.status, + "customer": p.customer.name, + "account": account, + "location": location, + "address": address, + "labor": float(p.labor), + "amount": float(p.amount), + "team_members": [ + f"{t.first_name} {t.last_name}".strip() + for t in p.team_members.all() if t.role != 'ADMIN' + ] + }) + + return {"services": services, "projects": projects} + + try: + result = await fetch_schedule() + return json_response(result) + except Exception as e: + logger.error(f"Error fetching schedule: {e}") + return error_response(str(e)) + + +@mcp.tool() +async def get_system_stats() -> str: + """ + Get high-level system statistics. Requires ADMIN or TEAM_LEADER role. + + Returns: + JSON object with counts of customers, accounts, services, projects, etc. + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_MEMBER.value: + return error_response("Access denied. TEAM_LEADER or ADMIN role required.") + + from core.models import Customer, Account, Service, Project, TeamProfile + + @database_sync_to_async + def fetch_stats(): + return { + "customers": Customer.objects.count(), + "accounts": Account.objects.count(), + "services": { + "total": Service.objects.count(), + "scheduled": Service.objects.filter(status='SCHEDULED').count(), + "in_progress": Service.objects.filter(status='IN_PROGRESS').count(), + "completed": Service.objects.filter(status='COMPLETED').count(), + }, + "projects": { + "total": Project.objects.count(), + "scheduled": Project.objects.filter(status='SCHEDULED').count(), + "in_progress": Project.objects.filter(status='IN_PROGRESS').count(), + "completed": Project.objects.filter(status='COMPLETED').count(), + }, + "team_members": TeamProfile.objects.exclude(role='ADMIN').count(), + } + + stats = await fetch_stats() + return json_response(stats) diff --git a/core/mcp/tools/notifications.py b/core/mcp/tools/notifications.py new file mode 100644 index 0000000..77e8b2c --- /dev/null +++ b/core/mcp/tools/notifications.py @@ -0,0 +1,257 @@ +"""Notification tools for MCP.""" + +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role, execute_graphql +from core.mcp.base import mcp, json_response, error_response, logger + + +@mcp.tool() +async def get_my_notifications( + unread_only: bool = False, + limit: int = 50, + offset: int = 0 +) -> str: + """ + Get your notifications. + + Args: + unread_only: If true, only return unread notifications + limit: Maximum notifications to return (default 50) + offset: Pagination offset + + Returns: + JSON array of notification objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + query = """ + query GetNotifications($unreadOnly: Boolean, $limit: Int, $offset: Int) { + myNotifications(unreadOnly: $unreadOnly, limit: $limit, offset: $offset) { + id + subject + body + status + actionUrl + readAt + createdAt + event { + eventType + entityType + entityId + } + } + } + """ + + result = await execute_graphql(query, { + "unreadOnly": unread_only, + "limit": limit, + "offset": offset + }) + + if "errors" in result: + return json_response(result) + + return json_response(result["data"]["myNotifications"] or []) + + +@mcp.tool() +async def get_unread_notification_count() -> str: + """ + Get the count of unread notifications. + + Returns: + JSON object with unread count + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + query = """ + query GetUnreadCount { + myUnreadNotificationCount + } + """ + + result = await execute_graphql(query) + + if "errors" in result: + return json_response(result) + + return json_response({"unread_count": result["data"]["myUnreadNotificationCount"]}) + + +@mcp.tool() +async def mark_notification_read(notification_id: str) -> str: + """ + Mark a notification as read. + + Args: + notification_id: UUID of the notification + + Returns: + JSON object confirming the notification was marked read + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + mutation = """ + mutation MarkRead($id: ID!) { + markNotificationAsRead(id: $id) { + id + status + readAt + } + } + """ + + result = await execute_graphql(mutation, {"id": notification_id}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "notification": result["data"]["markNotificationAsRead"] + }) + + +@mcp.tool() +async def mark_all_notifications_read() -> str: + """ + Mark all unread notifications as read. + + Returns: + JSON object with count of notifications marked read + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + mutation = """ + mutation MarkAllRead { + markAllNotificationsAsRead + } + """ + + result = await execute_graphql(mutation) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "count_marked": result["data"]["markAllNotificationsAsRead"] + }) + + +@mcp.tool() +async def list_my_notification_rules() -> str: + """ + List notification rules that apply to you. + - ADMIN: See all rules + - Others: See rules targeting their role or profile + + Returns: + JSON array of notification rule objects + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import NotificationRule + + @database_sync_to_async + def fetch(): + if profile.role == Role.ADMIN.value: + qs = NotificationRule.objects.filter(is_active=True) + else: + from django.db.models import Q + qs = NotificationRule.objects.filter( + Q(is_active=True) & ( + Q(target_roles__contains=[profile.role]) | + Q(target_team_profiles__id=profile.id) + ) + ).distinct() + + return [ + { + "id": str(r.id), + "name": r.name, + "description": r.description, + "event_types": r.event_types, + "channels": r.channels, + "target_roles": r.target_roles, + "is_personal": r.target_team_profiles.filter(id=profile.id).exists() + } + for r in qs + ] + + rules = await fetch() + return json_response(rules) + + +@mcp.tool() +async def create_personal_notification_rule( + name: str, + event_types: str, + channels: str, + description: Optional[str] = None, + template_subject: Optional[str] = None, + template_body: Optional[str] = None +) -> str: + """ + Create a personal notification rule (scoped to yourself only). + + Args: + name: Rule name + event_types: Comma-separated event types (e.g., 'SERVICE_COMPLETED,PROJECT_COMPLETED') + channels: Comma-separated channels (IN_APP, EMAIL, SMS) + description: Optional description + template_subject: Optional subject template + template_body: Optional body template + + Returns: + JSON object with created rule + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import NotificationRule + + event_list = [e.strip() for e in event_types.split(",")] + channel_list = [c.strip() for c in channels.split(",")] + + @database_sync_to_async + def create(): + rule = NotificationRule.objects.create( + name=name, + description=description or "", + event_types=event_list, + channels=channel_list, + template_subject=template_subject or "", + template_body=template_body or "", + is_active=True + ) + rule.target_team_profiles.add(profile) + return { + "id": str(rule.id), + "name": rule.name, + "event_types": rule.event_types, + "channels": rule.channels, + "is_personal": True + } + + try: + rule = await create() + return json_response({"success": True, "rule": rule}) + except Exception as e: + logger.error(f"Error creating notification rule: {e}") + return error_response(str(e)) diff --git a/core/mcp/tools/projects.py b/core/mcp/tools/projects.py new file mode 100644 index 0000000..a48e9df --- /dev/null +++ b/core/mcp/tools/projects.py @@ -0,0 +1,424 @@ +"""Project tools for MCP.""" + +from datetime import datetime +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role, execute_graphql +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def list_projects( + limit: int = 25, + customer_id: Optional[str] = None, + status: Optional[str] = None, + date: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None +) -> str: + """ + List projects with optional filters. + - ADMIN/TEAM_LEADER: See all projects + - TEAM_MEMBER: See only assigned projects + + Args: + limit: Maximum projects to return (default 25) + customer_id: Optional customer UUID to filter by + status: Optional status filter (SCHEDULED, IN_PROGRESS, COMPLETED, CANCELLED) + date: Optional exact date in YYYY-MM-DD format + start_date: Optional range start date + end_date: Optional range end date + + Returns: + JSON array of project objects with full context + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import Project + + @database_sync_to_async + def fetch(): + qs = Project.objects.select_related( + 'customer', 'account_address__account' + ).prefetch_related('team_members') + + if profile.role == Role.TEAM_MEMBER.value: + qs = qs.filter(team_members__id=profile.id) + + if customer_id: + qs = qs.filter(customer_id=customer_id) + if status: + qs = qs.filter(status=status) + if date: + qs = qs.filter(date=datetime.strptime(date, "%Y-%m-%d").date()) + if start_date: + qs = qs.filter(date__gte=datetime.strptime(start_date, "%Y-%m-%d").date()) + if end_date: + qs = qs.filter(date__lte=datetime.strptime(end_date, "%Y-%m-%d").date()) + + qs = qs.order_by('-date')[:limit] + + results = [] + for p in qs: + if p.account_address: + addr = p.account_address + location = addr.name or "Primary" + address = f"{addr.street_address}, {addr.city}, {addr.state} {addr.zip_code}" + account = addr.account.name + else: + location = None + address = f"{p.street_address}, {p.city}, {p.state} {p.zip_code}" + account = None + + results.append({ + "id": str(p.id), + "name": p.name, + "date": str(p.date), + "status": p.status, + "customer": p.customer.name, + "account": account, + "location": location, + "address": address, + "labor": float(p.labor), + "amount": float(p.amount), + "team_members": [ + f"{t.first_name} {t.last_name}".strip() + for t in p.team_members.all() if t.role != 'ADMIN' + ], + "notes": p.notes or "" + }) + return results + + projects = await fetch() + return json_response(projects) + + +@mcp.tool() +async def get_project(project_id: str) -> str: + """ + Get detailed project information including scope and tasks. + - ADMIN/TEAM_LEADER: Any project + - TEAM_MEMBER: Only if assigned + + Args: + project_id: UUID of the project + + Returns: + JSON object with full project details + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import Project + + @database_sync_to_async + def fetch(): + try: + p = Project.objects.select_related( + 'customer', 'account_address__account' + ).prefetch_related( + 'team_members', 'scope__categories__tasks' + ).get(pk=project_id) + + if profile.role == Role.TEAM_MEMBER.value: + if not p.team_members.filter(id=profile.id).exists(): + return {"error": "Access denied. You are not assigned to this project."} + + if p.account_address: + addr = p.account_address + location = { + "id": str(addr.id), + "name": addr.name or "Primary", + "street_address": addr.street_address, + "city": addr.city, + "state": addr.state, + "zip_code": addr.zip_code + } + account = {"id": str(addr.account.id), "name": addr.account.name} + else: + location = { + "street_address": p.street_address, + "city": p.city, + "state": p.state, + "zip_code": p.zip_code + } + account = None + + scope_data = None + if hasattr(p, 'scope') and p.scope: + scope = p.scope + scope_data = { + "id": str(scope.id), + "categories": [ + { + "id": str(cat.id), + "name": cat.name, + "tasks": [ + { + "id": str(task.id), + "description": task.checklist_description, + "is_completed": task.is_completed + } + for task in cat.tasks.all() + ] + } + for cat in scope.categories.all() + ] + } + + return { + "id": str(p.id), + "name": p.name, + "date": str(p.date), + "status": p.status, + "labor": float(p.labor), + "amount": float(p.amount), + "notes": p.notes, + "customer": { + "id": str(p.customer.id), + "name": p.customer.name + }, + "account": account, + "location": location, + "team_members": [ + { + "id": str(t.id), + "name": f"{t.first_name} {t.last_name}".strip(), + "email": t.email, + "phone": t.phone + } + for t in p.team_members.all() if t.role != 'ADMIN' + ], + "scope": scope_data + } + except Project.DoesNotExist: + return {"error": f"Project {project_id} not found"} + + result = await fetch() + if "error" in result: + return error_response(result["error"]) + + return json_response(result) + + +@mcp.tool() +async def create_project( + customer_id: str, + name: str, + date: str, + labor: float, + amount: float = 0, + account_address_id: Optional[str] = None, + street_address: Optional[str] = None, + city: Optional[str] = None, + state: Optional[str] = None, + zip_code: Optional[str] = None, + team_member_ids: Optional[str] = None, + notes: Optional[str] = None +) -> str: + """ + Create a new project. Requires ADMIN role. + + Args: + customer_id: UUID of the customer + name: Project name + date: Project date in YYYY-MM-DD format + labor: Labor cost + amount: Total amount (default 0) + account_address_id: UUID of account address (OR use freeform address below) + street_address: Freeform street address + city: Freeform city + state: Freeform state + zip_code: Freeform zip code + team_member_ids: Comma-separated UUIDs of team members + notes: Optional notes + + Returns: + JSON object with created project + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation CreateProject($input: ProjectInput!) { + createProject(input: $input) { + id + name + date + status + labor + amount + } + } + """ + + input_data = { + "customerId": customer_id, + "name": name, + "date": date, + "labor": str(labor), + "amount": str(amount), + "notes": notes + } + + if account_address_id: + input_data["accountAddressId"] = account_address_id + else: + input_data["streetAddress"] = street_address + input_data["city"] = city + input_data["state"] = state + input_data["zipCode"] = zip_code + + if team_member_ids: + input_data["teamMemberIds"] = [ + tid.strip() for tid in team_member_ids.split(",") + ] + + result = await execute_graphql(mutation, {"input": input_data}) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "project": result["data"]["createProject"]}) + + +@mcp.tool() +async def update_project( + project_id: str, + name: Optional[str] = None, + date: Optional[str] = None, + status: Optional[str] = None, + labor: Optional[float] = None, + amount: Optional[float] = None, + team_member_ids: Optional[str] = None, + notes: Optional[str] = None +) -> str: + """ + Update an existing project. Requires ADMIN role. + + Args: + project_id: UUID of the project to update + name: New project name + date: New date in YYYY-MM-DD format + status: New status + labor: New labor cost + amount: New total amount + team_member_ids: Comma-separated UUIDs of team members (replaces existing) + notes: Updated notes + + Returns: + JSON object with updated project + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation UpdateProject($input: ProjectUpdateInput!) { + updateProject(input: $input) { + id + name + date + status + labor + amount + } + } + """ + + input_data = {"id": project_id} + if name: + input_data["name"] = name + if date: + input_data["date"] = date + if status: + input_data["status"] = status + if labor is not None: + input_data["labor"] = str(labor) + if amount is not None: + input_data["amount"] = str(amount) + if notes is not None: + input_data["notes"] = notes + if team_member_ids: + input_data["teamMemberIds"] = [ + tid.strip() for tid in team_member_ids.split(",") + ] + + result = await execute_graphql(mutation, {"input": input_data}) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "project": result["data"]["updateProject"]}) + + +@mcp.tool() +async def delete_project(project_id: str) -> str: + """ + Delete a project. Requires ADMIN role. + + WARNING: This is a destructive action that cannot be undone. + + Args: + project_id: UUID of the project to delete + + Returns: + JSON object confirming deletion + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + # First get project details for confirmation message + from core.models import Project + + @database_sync_to_async + def get_project_info(): + try: + p = Project.objects.select_related('customer').get(pk=project_id) + return { + "name": p.name, + "date": str(p.date), + "customer": p.customer.name, + "status": p.status + } + except Project.DoesNotExist: + return None + + project_info = await get_project_info() + if not project_info: + return error_response(f"Project {project_id} not found") + + mutation = """ + mutation DeleteProject($id: ID!) { + deleteProject(id: $id) + } + """ + + result = await execute_graphql(mutation, {"id": project_id}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "deleted_project": { + "id": project_id, + **project_info + } + }) diff --git a/core/mcp/tools/services.py b/core/mcp/tools/services.py new file mode 100644 index 0000000..9c44282 --- /dev/null +++ b/core/mcp/tools/services.py @@ -0,0 +1,494 @@ +"""Service tools for MCP.""" + +from datetime import datetime +from typing import Optional + +from channels.db import database_sync_to_async + +from core.mcp.auth import MCPContext, Role, execute_graphql +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def list_services( + limit: int = 25, + customer_id: Optional[str] = None, + account_id: Optional[str] = None, + status: Optional[str] = None, + date: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None +) -> str: + """ + List services with optional filters. + - ADMIN/TEAM_LEADER: See all services + - TEAM_MEMBER: See only assigned services + + Args: + limit: Maximum services to return (default 25) + customer_id: Optional customer UUID to filter by + account_id: Optional account UUID to filter by + status: Optional status filter (SCHEDULED, IN_PROGRESS, COMPLETED, CANCELLED) + date: Optional exact date in YYYY-MM-DD format + start_date: Optional range start date + end_date: Optional range end date + + Returns: + JSON array of service objects with full context + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import Service + + @database_sync_to_async + def fetch(): + qs = Service.objects.select_related( + 'account_address__account__customer' + ).prefetch_related('team_members') + + if profile.role == Role.TEAM_MEMBER.value: + qs = qs.filter(team_members__id=profile.id) + + if customer_id: + qs = qs.filter(account_address__account__customer_id=customer_id) + if account_id: + qs = qs.filter(account_address__account_id=account_id) + if status: + qs = qs.filter(status=status) + if date: + qs = qs.filter(date=datetime.strptime(date, "%Y-%m-%d").date()) + if start_date: + qs = qs.filter(date__gte=datetime.strptime(start_date, "%Y-%m-%d").date()) + if end_date: + qs = qs.filter(date__lte=datetime.strptime(end_date, "%Y-%m-%d").date()) + + qs = qs.order_by('-date')[:limit] + + results = [] + for s in qs: + addr = s.account_address + results.append({ + "id": str(s.id), + "date": str(s.date), + "status": s.status, + "customer": addr.account.customer.name, + "account": addr.account.name, + "location": addr.name or "Primary", + "address": f"{addr.street_address}, {addr.city}, {addr.state} {addr.zip_code}", + "team_members": [ + f"{t.first_name} {t.last_name}".strip() + for t in s.team_members.all() if t.role != 'ADMIN' + ], + "notes": s.notes or "" + }) + return results + + services = await fetch() + return json_response(services) + + +@mcp.tool() +async def get_service(service_id: str) -> str: + """ + Get detailed service information including scope and tasks. + - ADMIN/TEAM_LEADER: Any service + - TEAM_MEMBER: Only if assigned + + Args: + service_id: UUID of the service + + Returns: + JSON object with full service details + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + from core.models import Service + + @database_sync_to_async + def fetch(): + try: + s = Service.objects.select_related( + 'account_address__account__customer', + 'account_address__scope' + ).prefetch_related( + 'team_members', + 'account_address__scope__areas__tasks' + ).get(pk=service_id) + + if profile.role == Role.TEAM_MEMBER.value: + if not s.team_members.filter(id=profile.id).exists(): + return {"error": "Access denied. You are not assigned to this service."} + + addr = s.account_address + scope_data = None + + if hasattr(addr, 'scope') and addr.scope: + scope = addr.scope + scope_data = { + "id": str(scope.id), + "name": scope.name, + "areas": [ + { + "id": str(area.id), + "name": area.name, + "tasks": [ + { + "id": str(task.id), + "description": task.checklist_description, + "frequency": task.frequency + } + for task in area.tasks.all() + ] + } + for area in scope.areas.all() + ] + } + + return { + "id": str(s.id), + "date": str(s.date), + "status": s.status, + "notes": s.notes, + "customer": { + "id": str(addr.account.customer.id), + "name": addr.account.customer.name + }, + "account": {"id": str(addr.account.id), "name": addr.account.name}, + "location": { + "id": str(addr.id), + "name": addr.name or "Primary", + "street_address": addr.street_address, + "city": addr.city, + "state": addr.state, + "zip_code": addr.zip_code + }, + "team_members": [ + { + "id": str(t.id), + "name": f"{t.first_name} {t.last_name}".strip(), + "email": t.email, + "phone": t.phone + } + for t in s.team_members.all() if t.role != 'ADMIN' + ], + "scope": scope_data + } + except Service.DoesNotExist: + return {"error": f"Service {service_id} not found"} + + result = await fetch() + if "error" in result: + return error_response(result["error"]) + + return json_response(result) + + +@mcp.tool() +async def create_service( + account_address_id: str, + date: str, + status: str = "SCHEDULED", + team_member_ids: Optional[str] = None, + notes: Optional[str] = None +) -> str: + """ + Create a new service. Requires ADMIN role. + + Args: + account_address_id: UUID of the account address + date: Service date in YYYY-MM-DD format + status: Status (default SCHEDULED) + team_member_ids: Comma-separated UUIDs of team members to assign + notes: Optional notes + + Returns: + JSON object with created service + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation CreateService($input: ServiceInput!) { + createService(input: $input) { + id + date + status + notes + } + } + """ + + variables = { + "input": { + "accountAddressId": account_address_id, + "date": date, + "status": status, + "notes": notes + } + } + + if team_member_ids: + variables["input"]["teamMemberIds"] = [ + tid.strip() for tid in team_member_ids.split(",") + ] + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "service": result["data"]["createService"]}) + + +@mcp.tool() +async def update_service( + service_id: str, + date: Optional[str] = None, + status: Optional[str] = None, + team_member_ids: Optional[str] = None, + notes: Optional[str] = None +) -> str: + """ + Update an existing service. Requires ADMIN role. + + Args: + service_id: UUID of the service to update + date: New date in YYYY-MM-DD format + status: New status + team_member_ids: Comma-separated UUIDs of team members (replaces existing) + notes: Updated notes + + Returns: + JSON object with updated service + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + mutation = """ + mutation UpdateService($input: ServiceUpdateInput!) { + updateService(input: $input) { + id + date + status + notes + } + } + """ + + input_data = {"id": service_id} + if date: + input_data["date"] = date + if status: + input_data["status"] = status + if notes is not None: + input_data["notes"] = notes + if team_member_ids: + input_data["teamMemberIds"] = [ + tid.strip() for tid in team_member_ids.split(",") + ] + + result = await execute_graphql(mutation, {"input": input_data}) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "service": result["data"]["updateService"]}) + + +@mcp.tool() +async def delete_service(service_id: str) -> str: + """ + Delete a service. Requires ADMIN role. + + WARNING: This is a destructive action that cannot be undone. + + Args: + service_id: UUID of the service to delete + + Returns: + JSON object confirming deletion + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + # First get service details for confirmation message + from core.models import Service + + @database_sync_to_async + def get_service_info(): + try: + s = Service.objects.select_related( + 'account_address__account' + ).get(pk=service_id) + return { + "date": str(s.date), + "account": s.account_address.account.name if s.account_address else None, + "status": s.status + } + except Service.DoesNotExist: + return None + + service_info = await get_service_info() + if not service_info: + return error_response(f"Service {service_id} not found") + + mutation = """ + mutation DeleteService($id: ID!) { + deleteService(id: $id) + } + """ + + result = await execute_graphql(mutation, {"id": service_id}) + + if "errors" in result: + return json_response(result) + + return json_response({ + "success": True, + "deleted_service": { + "id": service_id, + **service_info + } + }) + + +@mcp.tool() +async def create_services_bulk( + services_json: str +) -> str: + """ + Create multiple services in a single operation. Requires ADMIN role. + + This is useful for creating annual calendars or scheduling multiple services at once. + Each service is validated before any are created (all-or-nothing). + + Args: + services_json: JSON array of service objects, each containing: + - account_address_id: UUID of the account address (required) + - date: Service date in YYYY-MM-DD format (required) + - status: Status (optional, default SCHEDULED) + - team_member_ids: Array of team member UUIDs (optional) + - notes: Notes (optional) + + Example: + [ + {"account_address_id": "uuid1", "date": "2026-01-06"}, + {"account_address_id": "uuid2", "date": "2026-01-10", "notes": "Special instructions"} + ] + + Returns: + JSON object with created services count and IDs + """ + import json + from django.db import transaction + + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role != Role.ADMIN.value: + return error_response("Access denied. ADMIN role required.") + + # Parse the JSON input + try: + services_data = json.loads(services_json) + except json.JSONDecodeError as e: + return error_response(f"Invalid JSON: {str(e)}") + + if not isinstance(services_data, list): + return error_response("services_json must be a JSON array") + + if len(services_data) == 0: + return error_response("services_json array is empty") + + if len(services_data) > 500: + return error_response("Maximum 500 services per bulk operation") + + # Validate all services have required fields + for i, svc in enumerate(services_data): + if not isinstance(svc, dict): + return error_response(f"Service at index {i} must be an object") + if "account_address_id" not in svc: + return error_response(f"Service at index {i} missing account_address_id") + if "date" not in svc: + return error_response(f"Service at index {i} missing date") + + from core.models import Service, AccountAddress + + @database_sync_to_async + def create_all(): + # Validate all account addresses exist + address_ids = list(set(svc["account_address_id"] for svc in services_data)) + existing_addresses = set( + str(a.id) for a in AccountAddress.objects.filter(id__in=address_ids) + ) + + for addr_id in address_ids: + if addr_id not in existing_addresses: + raise ValueError(f"Account address {addr_id} not found") + + # Check for duplicate date/address combinations + date_address_combos = [ + (svc["date"], svc["account_address_id"]) for svc in services_data + ] + + # Check against existing services + existing = Service.objects.filter( + account_address_id__in=address_ids + ).values_list('date', 'account_address_id') + + existing_combos = set((str(d), str(a)) for d, a in existing) + for date_val, addr_id in date_address_combos: + if (date_val, addr_id) in existing_combos: + raise ValueError( + f"Service already exists for address {addr_id} on {date_val}" + ) + + # Create all services in a transaction + with transaction.atomic(): + to_create = [] + for svc in services_data: + service = Service( + account_address_id=svc["account_address_id"], + date=datetime.strptime(svc["date"], "%Y-%m-%d").date(), + status=svc.get("status", "SCHEDULED"), + notes=svc.get("notes") + ) + to_create.append(service) + + created = Service.objects.bulk_create(to_create) + + # Handle team member assignments if provided + for i, service in enumerate(created): + team_ids = services_data[i].get("team_member_ids", []) + if team_ids: + service.team_members.set(team_ids) + + return [str(s.id) for s in created] + + try: + created_ids = await create_all() + except ValueError as e: + return error_response(str(e)) + + return json_response({ + "success": True, + "created_count": len(created_ids), + "service_ids": created_ids + }) diff --git a/core/mcp/tools/sessions.py b/core/mcp/tools/sessions.py new file mode 100644 index 0000000..c7b407e --- /dev/null +++ b/core/mcp/tools/sessions.py @@ -0,0 +1,373 @@ +"""Session tools for MCP.""" + +from typing import Optional + +from core.mcp.auth import MCPContext, Role, check_entity_access, execute_graphql +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def get_active_session(entity_type: str, entity_id: str) -> str: + """ + Get the active session for a service or project. + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + + Returns: + JSON object with session details or null if no active session + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + if entity_type == "service": + query = """ + query GetActiveSession($serviceId: UUID!) { + activeServiceSession(serviceId: $serviceId) { + id + start + end + createdBy { id firstName lastName } + } + } + """ + variables = {"serviceId": entity_id} + result_key = "activeServiceSession" + elif entity_type == "project": + query = """ + query GetActiveSession($projectId: UUID!) { + activeProjectSession(projectId: $projectId) { + id + start + end + createdBy { id firstName lastName } + } + } + """ + variables = {"projectId": entity_id} + result_key = "activeProjectSession" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(query, variables) + + if "errors" in result: + return json_response(result) + + session = result["data"].get(result_key) + return json_response({"active": session is not None, "session": session}) + + +@mcp.tool() +async def open_session(entity_type: str, entity_id: str) -> str: + """ + Start a work session for a service or project. + - ADMIN: Any service/project + - TEAM_MEMBER: Only if assigned + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + + Returns: + JSON object with opened session details + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_LEADER.value: + return error_response("Access denied. TEAM_LEADER role is view-only.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + if entity_type == "service": + mutation = """ + mutation OpenSession($input: OpenServiceSessionInput!) { + openServiceSession(input: $input) { + id + start + service { id status } + } + } + """ + variables = {"input": {"serviceId": entity_id}} + result_key = "openServiceSession" + elif entity_type == "project": + mutation = """ + mutation OpenSession($input: ProjectSessionStartInput!) { + openProjectSession(input: $input) { + id + start + project { id status } + } + } + """ + variables = {"input": {"projectId": entity_id}} + result_key = "openProjectSession" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "session": result["data"][result_key]}) + + +@mcp.tool() +async def close_session( + entity_type: str, + entity_id: str, + completed_task_ids: Optional[str] = None +) -> str: + """ + Complete a work session and mark tasks as done. + - ADMIN: Any session + - TEAM_MEMBER: Only their own sessions + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + completed_task_ids: Comma-separated UUIDs of completed tasks + + Returns: + JSON object with closed session details + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_LEADER.value: + return error_response("Access denied. TEAM_LEADER role is view-only.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + task_ids = [] + if completed_task_ids: + task_ids = [tid.strip() for tid in completed_task_ids.split(",")] + + if entity_type == "service": + mutation = """ + mutation CloseSession($input: CloseServiceSessionInput!) { + closeServiceSession(input: $input) { + id + start + end + service { id status } + } + } + """ + variables = {"input": {"serviceId": entity_id, "taskIds": task_ids}} + result_key = "closeServiceSession" + elif entity_type == "project": + mutation = """ + mutation CloseSession($input: ProjectSessionCloseInput!) { + closeProjectSession(input: $input) { + id + start + end + project { id status } + } + } + """ + variables = {"input": {"projectId": entity_id, "taskIds": task_ids}} + result_key = "closeProjectSession" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "session": result["data"][result_key]}) + + +@mcp.tool() +async def revert_session(entity_type: str, entity_id: str) -> str: + """ + Cancel an active session and revert status to SCHEDULED. + - ADMIN: Any session + - TEAM_MEMBER: Only their own sessions + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + + Returns: + JSON object confirming reversion + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_LEADER.value: + return error_response("Access denied. TEAM_LEADER role is view-only.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + if entity_type == "service": + mutation = """ + mutation RevertSession($input: RevertServiceSessionInput!) { + revertServiceSession(input: $input) + } + """ + variables = {"input": {"serviceId": entity_id}} + result_key = "revertServiceSession" + elif entity_type == "project": + mutation = """ + mutation RevertSession($input: ProjectSessionRevertInput!) { + revertProjectSession(input: $input) + } + """ + variables = {"input": {"projectId": entity_id}} + result_key = "revertProjectSession" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "reverted": result["data"][result_key]}) + + +@mcp.tool() +async def add_task_completion( + entity_type: str, + entity_id: str, + task_id: str, + notes: Optional[str] = None +) -> str: + """ + Mark a task as completed during an active session. + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + task_id: UUID of the task to mark complete + notes: Optional notes about task completion + + Returns: + JSON object confirming task completion + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_LEADER.value: + return error_response("Access denied. TEAM_LEADER role is view-only.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + if entity_type == "service": + mutation = """ + mutation AddTaskCompletion($serviceId: ID!, $taskId: ID!, $notes: String) { + addTaskCompletion(serviceId: $serviceId, taskId: $taskId, notes: $notes) { + id + } + } + """ + variables = {"serviceId": entity_id, "taskId": task_id, "notes": notes} + result_key = "addTaskCompletion" + elif entity_type == "project": + mutation = """ + mutation AddTaskCompletion($projectId: ID!, $taskId: ID!, $notes: String) { + addProjectTaskCompletion(projectId: $projectId, taskId: $taskId, notes: $notes) { + id + } + } + """ + variables = {"projectId": entity_id, "taskId": task_id, "notes": notes} + result_key = "addProjectTaskCompletion" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "completion": result["data"][result_key]}) + + +@mcp.tool() +async def remove_task_completion( + entity_type: str, + entity_id: str, + task_id: str +) -> str: + """ + Unmark a task completion from an active session. + + Args: + entity_type: Either 'service' or 'project' + entity_id: UUID of the service or project + task_id: UUID of the task to unmark + + Returns: + JSON object confirming removal + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + if profile.role == Role.TEAM_LEADER.value: + return error_response("Access denied. TEAM_LEADER role is view-only.") + + try: + await check_entity_access(entity_type, entity_id) + except PermissionError as e: + return error_response(str(e)) + + if entity_type == "service": + mutation = """ + mutation RemoveTaskCompletion($serviceId: ID!, $taskId: ID!) { + removeTaskCompletion(serviceId: $serviceId, taskId: $taskId) { + id + } + } + """ + variables = {"serviceId": entity_id, "taskId": task_id} + result_key = "removeTaskCompletion" + elif entity_type == "project": + mutation = """ + mutation RemoveTaskCompletion($projectId: ID!, $taskId: ID!) { + removeProjectTaskCompletion(projectId: $projectId, taskId: $taskId) { + id + } + } + """ + variables = {"projectId": entity_id, "taskId": task_id} + result_key = "removeProjectTaskCompletion" + else: + return error_response("entity_type must be 'service' or 'project'") + + result = await execute_graphql(mutation, variables) + + if "errors" in result: + return json_response(result) + + return json_response({"success": True, "removed": result["data"][result_key]}) diff --git a/core/mcp/tools/utility.py b/core/mcp/tools/utility.py new file mode 100644 index 0000000..21d9e81 --- /dev/null +++ b/core/mcp/tools/utility.py @@ -0,0 +1,27 @@ +"""Utility tools for MCP.""" + +import json + +from core.mcp.auth import MCPContext, execute_graphql +from core.mcp.base import mcp, json_response, error_response + + +@mcp.tool() +async def graphql_query(query: str, variables: str = None) -> str: + """ + Execute a raw GraphQL query. Uses the active profile for authentication context. + + Args: + query: The GraphQL query string + variables: Optional JSON string of variables + + Returns: + JSON string containing the query result + """ + profile = MCPContext.get_profile() + if not profile: + return error_response("No active profile. Call set_active_profile first.") + + vars_dict = json.loads(variables) if variables else None + result = await execute_graphql(query, vars_dict) + return json_response(result) diff --git a/core/middleware.py b/core/middleware.py new file mode 100644 index 0000000..76cb021 --- /dev/null +++ b/core/middleware.py @@ -0,0 +1,262 @@ +import logging +from typing import Optional, Dict, Any, Union + +from channels.db import database_sync_to_async +from corsheaders.middleware import CorsMiddleware as BaseCorsMiddleware +from django.conf import settings +from django.contrib.auth.models import AnonymousUser +from django.http import JsonResponse, HttpRequest, HttpResponseBase +from django.utils.deprecation import MiddlewareMixin + +from core.models import CustomerProfile, TeamProfile + +logger = logging.getLogger(__name__) + + +class ConditionalCorsMiddleware(BaseCorsMiddleware): + """ + CORS middleware that skips CORS processing when requests come through Oathkeeper. + + This prevents duplicate CORS headers since Oathkeeper handles CORS for proxied requests. + Only applies CORS for direct requests (development/testing scenarios). + """ + + def __call__(self, request: HttpRequest) -> HttpResponseBase: + # Check if request came through Oathkeeper + oathkeeper_secret = request.META.get('HTTP_X_OATHKEEPER_SECRET') + expected_secret = getattr(settings, 'OATHKEEPER_SECRET', None) + + # If request has valid Oathkeeper secret, skip CORS processing + if oathkeeper_secret and expected_secret and oathkeeper_secret == expected_secret: + logger.debug(f"Skipping CORS processing for Oathkeeper request to {request.path}") + response = self.get_response(request) + return response + + # For direct requests (development, testing), apply CORS normally + logger.debug(f"Applying CORS processing for direct request to {request.path}") + return super().__call__(request) + +class OryHeaderAuthenticationMiddleware(MiddlewareMixin): + """ + Strict middleware that requires Ory authentication headers AND Django profiles. + All users MUST have a corresponding Django profile to use the application. + + Security: Verifies requests came from Oathkeeper via shared secret. + """ + + def __init__(self, get_response): + self.get_response = get_response + super().__init__(get_response) + + def process_request(self, request: HttpRequest) -> Optional[JsonResponse]: + # Skip authentication for specific paths if needed + if self._should_skip_auth(request): + request.user = AnonymousUser() + return None + + # 🔒 SECURITY: Verify request came from Oathkeeper + oathkeeper_secret = request.META.get('HTTP_X_OATHKEEPER_SECRET') + expected_secret = settings.OATHKEEPER_SECRET + + if not expected_secret: + logger.critical("OATHKEEPER_SECRET not configured in settings!") + return JsonResponse( + {"detail": "Server configuration error"}, + status=500 + ) + + if oathkeeper_secret != expected_secret: + logger.warning( + f"Invalid or missing Oathkeeper secret from {request.META.get('REMOTE_ADDR')} " + f"for path {request.path}" + ) + return JsonResponse( + {"detail": "Forbidden - requests must come through the API gateway"}, + status=403 + ) + + # Extract required headers (now safe to trust these) + user_id = request.META.get('HTTP_X_USER_ID') + profile_type = request.META.get('HTTP_X_USER_PROFILE_TYPE') + django_profile_id = request.META.get('HTTP_X_DJANGO_PROFILE_ID') + + # Reject if no user ID + if not user_id: + return JsonResponse( + {"detail": "Authentication required - no user ID provided"}, + status=401, + headers={"WWW-Authenticate": 'Bearer realm="api"'} + ) + + # Reject if no profile type + if not profile_type: + return JsonResponse( + {"detail": "Authentication required - no profile type provided"}, + status=401, + headers={"WWW-Authenticate": 'Bearer realm="api"'} + ) + + # Validate profile type + if profile_type not in ['team', 'customer']: + return JsonResponse( + {"detail": f"Invalid profile type: {profile_type}"}, + status=403 + ) + + # Django profile ID is REQUIRED + if not django_profile_id: + return JsonResponse( + {"detail": "Django profile is required to access this application"}, + status=403 + ) + + # Get Django profile + profile = self._get_profile_by_id(django_profile_id, profile_type) + if not profile: + return JsonResponse( + {"detail": f"Django profile {django_profile_id} not found or type mismatch"}, + status=403 + ) + + # Store Ory user information on request for additional context + request.ory_user_id = user_id + request.ory_user_email = request.META.get('HTTP_X_USER_EMAIL') + request.ory_user_first_name = request.META.get('HTTP_X_USER_FIRST_NAME') + request.ory_user_last_name = request.META.get('HTTP_X_USER_LAST_NAME') + request.ory_user_phone = request.META.get('HTTP_X_USER_PHONE') + request.ory_profile_type = profile_type + + # Set the authenticated Django user and profile + request.profile = profile + return None + + @staticmethod + def _should_skip_auth(request: HttpRequest) -> bool: + """Determine if authentication should be skipped for this request.""" + skip_paths = [ + '/admin/', + '/health/', + '/static/', + ] + + # Allow CORS preflight requests + if request.method == 'OPTIONS': + return True + + return any(request.path.startswith(path) for path in skip_paths) + + @staticmethod + def _get_profile_by_id(profile_id: str, expected_type: str) -> Optional[Union[TeamProfile, CustomerProfile]]: + """ + Get Django profile by ID and validate it matches the expected type. + Returns None if not found or type mismatch. + """ + try: + if expected_type == 'team': + return TeamProfile.objects.select_related('user').get(id=profile_id) + elif expected_type == 'customer': + return CustomerProfile.objects.select_related('user').get(id=profile_id) + else: + logger.warning(f"Unknown profile type: {expected_type}") + return None + except (TeamProfile.DoesNotExist, CustomerProfile.DoesNotExist): + logger.warning(f"Profile {profile_id} not found for type {expected_type}") + return None + + +class OryWebSocketAuthMiddleware: + """ + WebSocket middleware that authenticates using Ory headers from the initial HTTP upgrade request. + Validates that users have the required Django profile to access the application. + + Security: Verifies requests came from Oathkeeper via shared secret. + """ + def __init__(self, app: Any) -> None: + self.app = app + + async def __call__(self, scope: Dict[str, Any], receive: Any, send: Any) -> Any: + if scope['type'] == 'websocket': + # Extract headers from the initial HTTP upgrade request + headers: Dict[bytes, bytes] = dict(scope.get('headers', [])) + + # 🔒 SECURITY: Verify request came from Oathkeeper + oathkeeper_secret = headers.get(b'x-oathkeeper-secret', b'').decode('utf-8') + expected_secret = settings.OATHKEEPER_SECRET + + if not expected_secret: + logger.critical("OATHKEEPER_SECRET not configured for WebSocket!") + await send({ + 'type': 'websocket.close', + 'code': 1011, # Internal error + }) + return None + + if oathkeeper_secret != expected_secret: + logger.warning(f"Invalid Oathkeeper secret for WebSocket connection") + await send({ + 'type': 'websocket.close', + 'code': 4403, # Forbidden + }) + return None + + # Ory headers should be passed through Oathkeeper (now safe to trust) + user_id = headers.get(b'x-user-id', b'').decode('utf-8') + profile_type = headers.get(b'x-user-profile-type', b'').decode('utf-8') + django_profile_id = headers.get(b'x-django-profile-id', b'').decode('utf-8') + + if user_id and profile_type and django_profile_id: + # Validate profile type + if profile_type not in ['team', 'customer']: + logger.warning(f"Invalid profile type for WebSocket: {profile_type}") + await send({ + 'type': 'websocket.close', + 'code': 4403, # Custom close code for forbidden + }) + return None + + # Fetch the profile from the database + profile: Optional[Union[TeamProfile, CustomerProfile]] = await self._get_profile(django_profile_id, profile_type) + if profile: + scope['profile'] = profile + + # Store Ory user information in scope for additional context + scope['ory_user_id'] = user_id + scope['ory_user_email'] = headers.get(b'x-user-email', b'').decode('utf-8') + scope['ory_user_first_name'] = headers.get(b'x-user-first-name', b'').decode('utf-8') + scope['ory_user_last_name'] = headers.get(b'x-user-last-name', b'').decode('utf-8') + scope['ory_user_phone'] = headers.get(b'x-user-phone', b'').decode('utf-8') + scope['ory_profile_type'] = profile_type + else: + # Reject connection if profile not found + logger.warning(f"Profile {django_profile_id} not found for WebSocket connection") + await send({ + 'type': 'websocket.close', + 'code': 4403, # Custom close code for forbidden + }) + return None + else: + # Reject connection if headers missing + logger.warning(f"Missing Ory headers for WebSocket connection") + await send({ + 'type': 'websocket.close', + 'code': 4401, # Custom close code for unauthorized + }) + return None + + return await self.app(scope, receive, send) + + @database_sync_to_async + def _get_profile(self, profile_id: str, profile_type: str) -> Optional[Union[TeamProfile, CustomerProfile]]: + """ + Get Django profile by ID and validate it matches the expected type. + Returns None if not found or type mismatch. + """ + try: + if profile_type == 'team': + return TeamProfile.objects.select_related('user').get(id=profile_id) + elif profile_type == 'customer': + return CustomerProfile.objects.select_related('user').get(id=profile_id) + except (TeamProfile.DoesNotExist, CustomerProfile.DoesNotExist): + logger.warning(f"Profile {profile_id} not found for type {profile_type}") + return None + return None diff --git a/core/migrations/__init__.py b/core/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/models/__init__.py b/core/models/__init__.py new file mode 100644 index 0000000..b642853 --- /dev/null +++ b/core/models/__init__.py @@ -0,0 +1,25 @@ +# Import all models to make them available when importing from the package +from core.models.account import * +from core.models.base import * +from core.models.enums import * +from core.models.customer import * +from core.models.invoice import * +from core.models.labor import * +from core.models.profile import * +from core.models.project import * +from core.models.report import * +from core.models.revenue import * +from core.models.schedule import * +from core.models.service import * +from core.models.scope import * +from core.models.project_scope import * +from core.models.project_scope_template import * +from core.models.scope_template import * +from core.models.session import * +from core.models.session_image import * +from core.models.session_video import * +from core.models.account_punchlist import * +from core.models.project_punchlist import * +from core.models.events import * +from core.models.messaging import * +from core.models.chat import * \ No newline at end of file diff --git a/core/models/account.py b/core/models/account.py new file mode 100644 index 0000000..8115f4e --- /dev/null +++ b/core/models/account.py @@ -0,0 +1,151 @@ +from django.db import models +from django.utils import timezone +from django.core.exceptions import ValidationError +from django_choices_field import TextChoicesField +from core.models.base import BaseModel, Address, Contact +from core.models.customer import Customer +from core.models.enums import StatusChoices + + +class Account(BaseModel): + """Account model belonging to a customer""" + customer = models.ForeignKey(Customer, on_delete=models.PROTECT, related_name='accounts') + name = models.CharField(max_length=200) + status = TextChoicesField(choices_enum=StatusChoices, default=StatusChoices.ACTIVE, + help_text="Current status of the account") + start_date = models.DateField(default=timezone.now) + end_date = models.DateField(blank=True, null=True) + + class Meta: + ordering = ['name'] + verbose_name = "Account" + verbose_name_plural = "Accounts" + indexes = [ + models.Index(fields=['customer', 'status']), + models.Index(fields=['status', 'start_date']) + ] + constraints = [ + models.UniqueConstraint( + fields=['customer', 'name'], + name='unique_account_name_per_customer' + ) + ] + + def __str__(self): + return f"{self.name} ({self.customer.name})" + + @property + def is_active(self): + """Check if the account is currently active based on dates and status""" + today = timezone.now().date() + return self.status == 'ACTIVE' and self.start_date <= today and ( + self.end_date is None or self.end_date >= today) + + @property + def primary_address(self): + """Get the primary address for this account""" + return self.addresses.filter(is_primary=True, is_active=True).first() + + def clean(self): + """Validate account data""" + if self.end_date and self.start_date and self.end_date < self.start_date: + raise ValidationError("End date cannot be earlier than start date") + + +class AccountAddress(Address): + """Physical address information for an account""" + account = models.ForeignKey('Account', on_delete=models.PROTECT, related_name='addresses') + name = models.CharField(max_length=200, blank=True) + is_active = models.BooleanField(default=True) + is_primary = models.BooleanField(default=False) + notes = models.TextField(blank=True) + + class Meta: + verbose_name = "Account Address" + verbose_name_plural = "Account Addresses" + indexes = [ + models.Index(fields=['account', 'is_active']), + ] + constraints = [ + models.UniqueConstraint( + fields=['account'], + condition=models.Q(is_primary=True, is_active=True), + name='unique_primary_address_per_account' + ) + ] + + def save(self, *args, **kwargs): + if self.is_active and not AccountAddress.objects.filter( + account=self.account, + is_active=True + ).exclude(pk=self.pk).exists(): + self.is_primary = True + super().save(*args, **kwargs) + + def clean(self): + """Validate address data""" + if self.is_primary and not self.is_active: + raise ValidationError("Primary address must be active") + + def __str__(self): + primary_indicator = " (Primary)" if self.is_primary else "" + return f"{self.account.name} - {self.street_address}{primary_indicator}" + + +class AccountContact(Contact): + """Contact information for an account""" + account = models.ForeignKey('Account', on_delete=models.PROTECT, related_name='contacts') + email = models.EmailField(blank=True) + is_active = models.BooleanField(default=True) + is_primary = models.BooleanField(default=False) + notes = models.TextField(blank=True) + + class Meta: + verbose_name = "Account Contact" + verbose_name_plural = "Account Contacts" + indexes = [ + models.Index(fields=['account', 'is_active']), + ] + constraints = [ + # Only one primary contact per account + models.UniqueConstraint( + fields=['account'], + condition=models.Q(is_primary=True, is_active=True), + name='unique_primary_contact_per_account' + ), + # Prevent duplicate phone numbers for the same account + models.UniqueConstraint( + fields=['account', 'phone'], + condition=models.Q(is_active=True, phone__isnull=False) & ~models.Q(phone=''), + name='unique_phone_per_account' + ), + # Prevent duplicate emails for the same account (when email provided) + models.UniqueConstraint( + fields=['account', 'email'], + condition=models.Q(is_active=True, email__isnull=False) & ~models.Q(email=''), + name='unique_email_per_account' + ) + ] + + def save(self, *args, **kwargs): + # Auto-set first active contact as primary + if self.is_active and not AccountContact.objects.filter( + account=self.account, + is_active=True + ).exclude(pk=self.pk).exists(): + self.is_primary = True + super().save(*args, **kwargs) + + def clean(self): + """Validate contact data""" + # Ensure primary contacts are active + if self.is_primary and not self.is_active: + raise ValidationError("Primary contact must be active") + + # Ensure we have at least phone or email + if not self.phone and not self.email: + raise ValidationError("Contact must have either phone number or email address") + + def __str__(self): + primary_indicator = " (Primary)" if self.is_primary else "" + return f"{self.full_name} - {self.account.name}{primary_indicator}" diff --git a/core/models/account_punchlist.py b/core/models/account_punchlist.py new file mode 100644 index 0000000..a1de65a --- /dev/null +++ b/core/models/account_punchlist.py @@ -0,0 +1,20 @@ +from django.db import models +from core.models.base import BaseModel +from core.models.account import Account + + +class AccountPunchlist(BaseModel): + """Punchlist records for accounts""" + account = models.ForeignKey(Account, on_delete=models.PROTECT, related_name='punchlists') + date = models.DateField() + + class Meta: + ordering = ['-date'] + indexes = [ + models.Index(fields=['account', 'date']), + ] + verbose_name = "Punchlist" + verbose_name_plural = "Punchlists" + + def __str__(self): + return f"Punchlist for {self.account.name} on {self.date}" \ No newline at end of file diff --git a/core/models/base.py b/core/models/base.py new file mode 100644 index 0000000..adc5189 --- /dev/null +++ b/core/models/base.py @@ -0,0 +1,214 @@ +import uuid +from django.db import models +from django.utils import timezone +from django.core.files.base import ContentFile +from PIL import Image as PilImage +from io import BytesIO +import os + + +class BaseModel(models.Model): + """Abstract base model for all models in the application""" + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + created_at = models.DateTimeField(default=timezone.now, editable=False) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + abstract = True + get_latest_by = 'created_at' + + +class Contact(BaseModel): + """Represents a contact person with their details""" + first_name = models.CharField(max_length=100) + last_name = models.CharField(max_length=100) + phone = models.CharField(max_length=20, blank=True, null=True, verbose_name="Phone Number") + + class Meta: + abstract = True + + def __str__(self): + return f"{self.first_name} {self.last_name}" + + @property + def full_name(self): + return f"{self.first_name} {self.last_name}" + + +class Address(BaseModel): + """Represents a physical address""" + street_address = models.CharField(max_length=255) + city = models.CharField(max_length=100) + state = models.CharField(max_length=100) + zip_code = models.CharField(max_length=20) + + class Meta: + abstract = True + + def __str__(self): + return f"{self.street_address}, {self.city}, {self.state} {self.zip_code}" + + +def _default_image_upload_to(instance: 'Image', filename: str) -> str: + """ + Default upload path for original images. + Override by setting the IMAGE_UPLOAD_TO staticmethod on subclass if needed. + """ + base, ext = os.path.splitext(filename) + ext = ext.lower() or ".jpg" + model_dir = instance._meta.model_name + return f"uploads/{model_dir}/{instance.id}/{uuid.uuid4().hex}{ext}" + + +def _default_thumb_upload_to(instance: 'Image', _filename: str) -> str: + """ + Default upload path for thumbnails. + """ + model_dir = instance._meta.model_name + return f"uploads/{model_dir}/{instance.id}/thumb/{uuid.uuid4().hex}.jpg" + + +class Note(BaseModel): + """ + Abstract base model for notes/comments. + Use this as a base for model-specific note types (e.g., ServiceNote, ProjectNote). + """ + content = models.TextField() + author = models.ForeignKey( + 'TeamProfile', + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(class)s_notes" + ) + internal = models.BooleanField( + default=True, + help_text="Internal notes are only visible to team members, not customers" + ) + + class Meta: + abstract = True + ordering = ('-created_at',) + + def __str__(self): + preview = self.content[:50] + "..." if len(self.content) > 50 else self.content + author_name = self.author.full_name if self.author else "Unknown" + return f"{author_name}: {preview}" + + +class Image(BaseModel): + """ + Abstract base for image-bearing models. + + Features: + - Stores original image and auto-generated JPEG thumbnail + - Captures width/height and content_type + - Tracks the uploading team profile (optional) + - Storage-agnostic (respects DEFAULT_FILE_STORAGE) + + Customize by overriding: + - THUMBNAIL_SIZE + - IMAGE_UPLOAD_TO / THUMB_UPLOAD_TO (callables like Django's upload_to) + """ + title = models.CharField(max_length=255, blank=True) + image = models.ImageField(upload_to=_default_image_upload_to) + thumbnail = models.ImageField(upload_to=_default_thumb_upload_to, blank=True, null=True) + content_type = models.CharField(max_length=100, blank=True) + width = models.PositiveIntegerField(default=0) + height = models.PositiveIntegerField(default=0) + uploaded_by_team_profile = models.ForeignKey( + 'TeamProfile', on_delete=models.SET_NULL, null=True, blank=True, + related_name="%(class)s_images" + ) + notes = models.TextField(blank=True) + internal = models.BooleanField(default=True) + + # Optional: subclasses can override these constants + THUMBNAIL_SIZE = (320, 320) + THUMBNAIL_JPEG_QUALITY = 85 + + # Optional: subclasses can provide their own upload_to callables + IMAGE_UPLOAD_TO = staticmethod(_default_image_upload_to) + THUMB_UPLOAD_TO = staticmethod(_default_thumb_upload_to) + + class Meta: + abstract = True + ordering = ('-created_at',) + + def __str__(self) -> str: + return self.title or str(self.id) + + def _make_thumbnail(self) -> None: + """ + Generate a JPEG thumbnail, update width/height from the original. + No-op if Pillow is unavailable. + """ + if not PilImage or not self.image: + return + + self.image.open() + with PilImage.open(self.image) as img: + img = img.convert('RGB') + self.width, self.height = img.size + + thumb = img.copy() + thumb.thumbnail(self.THUMBNAIL_SIZE) + buf = BytesIO() + thumb.save(buf, format='JPEG', quality=self.THUMBNAIL_JPEG_QUALITY) + buf.seek(0) + + # Name the thumb deterministically by the original basename to aid caching, + # but still safe to reuse upload_to which can rename. + original_basename = os.path.basename(self.image.name) + thumb_name = f"thumb_{original_basename}.jpg" + + # Respect custom THUMB_UPLOAD_TO if the subclass overrides the field's upload_to + # When saving directly to FieldFile, the field's upload_to is applied only if the name has no path. + # So we provide just a name; storage/backend will place it using field's configured upload_to. + self.thumbnail.save( + name=thumb_name, + content=ContentFile(buf.read()), + save=False, + ) + + def save(self, *args, **kwargs): + """ + Save then ensure a thumbnail exists and dimensions are set. + On creation or when the thumbnail is missing, attempt to generate a thumbnail. + """ + creating = self._state.adding + super().save(*args, **kwargs) + + if creating or (self.image and not self.thumbnail): + try: + self._make_thumbnail() + except (PilImage.UnidentifiedImageError, OSError, ValueError): + # If thumbnail generation fails (invalid image or I/O), keep the original image + return + else: + # Persist the derived fields on successful thumbnail generation + super().save(update_fields=['thumbnail', 'width', 'height']) + + def delete(self, *args, **kwargs): + """ + Delete the model and its associated files from storage. + """ + # Store file names before delete (after delete, fields may be cleared) + image_name = self.image.name if self.image else None + thumbnail_name = self.thumbnail.name if self.thumbnail else None + + # Delete the model instance + super().delete(*args, **kwargs) + + # Delete files from storage + if image_name: + try: + self.image.storage.delete(image_name) + except Exception: + pass # File may already be deleted or inaccessible + + if thumbnail_name: + try: + self.thumbnail.storage.delete(thumbnail_name) + except Exception: + pass # File may already be deleted or inaccessible diff --git a/core/models/chat.py b/core/models/chat.py new file mode 100644 index 0000000..66a6724 --- /dev/null +++ b/core/models/chat.py @@ -0,0 +1,94 @@ +""" +Chat models for AI assistant conversations. +""" +from django.db import models +from core.models.base import BaseModel + + +class ChatConversation(BaseModel): + """ + A chat conversation between a team member and the AI assistant. + Conversations persist across browser sessions and can be resumed. + """ + team_profile = models.ForeignKey( + 'TeamProfile', + on_delete=models.CASCADE, + related_name='chat_conversations', + help_text="The team member who owns this conversation" + ) + title = models.CharField( + max_length=255, + blank=True, + help_text="Auto-generated title based on conversation content" + ) + is_active = models.BooleanField( + default=True, + help_text="Whether this conversation is active (not archived)" + ) + + class Meta: + ordering = ('-updated_at',) + verbose_name = 'Chat Conversation' + verbose_name_plural = 'Chat Conversations' + + def __str__(self): + return f"{self.team_profile}: {self.title or 'Untitled'}" + + def generate_title(self) -> str: + """ + Generate a title from the first user message. + Returns the first 50 characters of the first user message. + """ + first_message = self.messages.filter(role='user').first() + if first_message: + content = first_message.content[:50] + if len(first_message.content) > 50: + content += '...' + return content + return 'New Conversation' + + +class ChatMessage(BaseModel): + """ + Individual message in a chat conversation. + Stores both user messages and assistant responses, including tool calls. + """ + ROLE_CHOICES = [ + ('user', 'User'), + ('assistant', 'Assistant'), + ] + + conversation = models.ForeignKey( + ChatConversation, + on_delete=models.CASCADE, + related_name='messages', + help_text="The conversation this message belongs to" + ) + role = models.CharField( + max_length=20, + choices=ROLE_CHOICES, + help_text="Whether this message is from the user or assistant" + ) + content = models.TextField( + blank=True, + help_text="The text content of the message" + ) + tool_calls = models.JSONField( + default=list, + blank=True, + help_text="List of MCP tool calls made during this message" + ) + tool_results = models.JSONField( + default=list, + blank=True, + help_text="Results from MCP tool executions" + ) + + class Meta: + ordering = ('created_at',) + verbose_name = 'Chat Message' + verbose_name_plural = 'Chat Messages' + + def __str__(self): + preview = self.content[:50] + '...' if len(self.content) > 50 else self.content + return f"[{self.role}] {preview}" diff --git a/core/models/customer.py b/core/models/customer.py new file mode 100644 index 0000000..800ee1b --- /dev/null +++ b/core/models/customer.py @@ -0,0 +1,127 @@ +from django.core.exceptions import ValidationError +from django.db import models +from django.utils import timezone +from django_choices_field.fields import TextChoicesField +from core.models.base import BaseModel, Address, Contact +from core.models.enums import AddressChoices, StatusChoices + + +class Customer(BaseModel): + """Customer model with contact information""" + name = models.CharField(max_length=200, unique=True) + status = TextChoicesField(choices_enum=StatusChoices, default=StatusChoices.ACTIVE, + help_text="Current status of the customer") + start_date = models.DateField(default=timezone.now) + end_date = models.DateField(blank=True, null=True) + billing_terms = models.TextField() + billing_email = models.EmailField(blank=True) + wave_customer_id = models.CharField(max_length=255, blank=True, null=True, + help_text="Wave customer ID") + + class Meta: + ordering = ['name'] + verbose_name = "Customer" + verbose_name_plural = "Customers" + indexes = [ + models.Index(fields=['status', 'start_date']) + ] + + def __str__(self): + return self.name + + @property + def is_active(self): + """Check if the customer is currently active based on dates and status""" + today = timezone.now().date() + return self.status == 'ACTIVE' and self.start_date <= today and ( + self.end_date is None or self.end_date >= today) + + +class CustomerAddress(Address): + """Address information for a customer""" + customer = models.ForeignKey('Customer', on_delete=models.CASCADE, related_name='addresses') + address_type = TextChoicesField(choices_enum=AddressChoices, default=AddressChoices.BILLING, + help_text="Type of address") + is_active = models.BooleanField(default=True) + is_primary = models.BooleanField(default=False) + + class Meta: + verbose_name = "Customer Address" + verbose_name_plural = "Customer Addresses" + indexes = [models.Index(fields=['customer', 'address_type', 'is_active'])] + constraints = [models.UniqueConstraint(fields=['customer'], condition=models.Q(is_primary=True, is_active=True), + name='unique_primary_address_per_customer')] + + def save(self, *args, **kwargs): + if not CustomerAddress.objects.filter(customer=self.customer, is_active=True).exists(): + self.is_primary = True + super().save(*args, **kwargs) + + def get_address_type_display(self) -> str: + try: + # address_type may be the raw value; coerce to the enum, then read its label + return AddressChoices(self.address_type).label + except (ValueError, TypeError): + # Fallback to string value if something unexpected is stored + return str(self.address_type) + + def __str__(self): + primary_indicator = " (Primary)" if self.is_primary else "" + return f"{self.customer.name} - {self.get_address_type_display()}{primary_indicator}" + + +class CustomerContact(Contact): + """Contact information for a customer""" + customer = models.ForeignKey('Customer', on_delete=models.CASCADE, related_name='contacts') + email = models.EmailField(blank=True) + is_primary = models.BooleanField(default=False) + is_active = models.BooleanField(default=True) + notes = models.TextField(blank=True) + + class Meta: + verbose_name = "Customer Contact" + verbose_name_plural = "Customer Contacts" + indexes = [ + models.Index(fields=['customer', 'is_active']), + ] + constraints = [ + # Only one primary contact per customer + models.UniqueConstraint( + fields=['customer'], + condition=models.Q(is_primary=True, is_active=True), + name='unique_primary_contact_per_customer' + ), + # Prevent duplicate phone numbers for the same customer (when phone provided) + models.UniqueConstraint( + fields=['customer', 'phone'], + condition=models.Q(is_active=True, phone__isnull=False) & ~models.Q(phone=''), + name='unique_phone_per_customer' + ), + # Prevent duplicate emails for same customer (when email provided) + models.UniqueConstraint( + fields=['customer', 'email'], + condition=models.Q(is_active=True, email__isnull=False) & ~models.Q(email=''), + name='unique_email_per_customer' + ) + ] + + def save(self, *args, **kwargs): + # Auto-set first active contact as primary + if self.is_active and not CustomerContact.objects.filter( + customer=self.customer, + is_active=True + ).exclude(pk=self.pk).exists(): + self.is_primary = True + super().save(*args, **kwargs) + + def clean(self): + """Validate contact data""" + if self.is_primary and not self.is_active: + raise ValidationError("Primary contact must be active") + + if not self.phone and not self.email: + raise ValidationError("Contact must have either phone number or email address") + + def __str__(self): + primary_indicator = " (Primary)" if self.is_primary else "" + return f"{self.full_name} - {self.customer.name}{primary_indicator}" diff --git a/core/models/enums.py b/core/models/enums.py new file mode 100644 index 0000000..dd80f36 --- /dev/null +++ b/core/models/enums.py @@ -0,0 +1,270 @@ +from django.db import models + + +class StatusChoices(models.TextChoices): + """Status choices for a Customer, Account, or a future model""" + ACTIVE = 'ACTIVE', 'Active' + INACTIVE = 'INACTIVE', 'Inactive' + PENDING = 'PENDING', 'Pending' + + +class ServiceChoices(models.TextChoices): + """Status choices for a service workflow""" + SCHEDULED = 'SCHEDULED', 'Scheduled' + IN_PROGRESS = 'IN_PROGRESS', 'In Progress' + COMPLETED = 'COMPLETED', 'Completed' + CANCELLED = 'CANCELLED', 'Cancelled' + + +class AddressChoices(models.TextChoices): + """Address choices for a customer""" + BILLING = 'BILLING', 'Billing' + SHIPPING = 'SHIPPING', 'Shipping' + OFFICE = 'OFFICE', 'Office Location' + OTHER = 'OTHER', 'Other Address' + + +class InvoiceChoices(models.TextChoices): + """Status choices for an invoice""" + DRAFT = 'DRAFT', 'Draft' + SENT = 'SENT', 'Sent' + PAID = 'PAID', 'Paid' + OVERDUE = 'OVERDUE', 'Overdue' + CANCELLED = 'CANCELLED', 'Cancelled' + + +class PaymentChoices(models.TextChoices): + """Payment choices for a transaction""" + CHECK = 'CHECK', 'Check' + CREDIT_CARD = 'CREDIT_CARD', 'Credit Card' + BANK_TRANSFER = 'BANK_TRANSFER', 'Bank Transfer' + CASH = 'CASH', 'Cash' + + +class TaskFrequencyChoices(models.TextChoices): + DAILY = 'daily', 'Daily' + WEEKLY = 'weekly', 'Weekly' + MONTHLY = 'monthly', 'Monthly' + QUARTERLY = 'quarterly', 'Quarterly' + TRIANNUAL = 'triannual', 'Tri-annual' + ANNUAL = 'annual', 'Annual' + AS_NEEDED = 'as_needed', 'As Needed' + + +class RoleChoices(models.TextChoices): + """Role choices for a user""" + ADMIN = 'ADMIN', 'Admin' + TEAM_LEADER = 'TEAM_LEADER', 'Team Leader' + TEAM_MEMBER = 'TEAM_MEMBER', 'Team Member' + + +class EventTypeChoices(models.TextChoices): + """Event types for the event management system""" + + # Customer events + CUSTOMER_CREATED = 'CUSTOMER_CREATED', 'Customer Created' + CUSTOMER_UPDATED = 'CUSTOMER_UPDATED', 'Customer Updated' + CUSTOMER_DELETED = 'CUSTOMER_DELETED', 'Customer Deleted' + CUSTOMER_STATUS_CHANGED = 'CUSTOMER_STATUS_CHANGED', 'Customer Status Changed' + CUSTOMER_ADDRESS_CREATED = 'CUSTOMER_ADDRESS_CREATED', 'Customer Address Created' + CUSTOMER_ADDRESS_UPDATED = 'CUSTOMER_ADDRESS_UPDATED', 'Customer Address Updated' + CUSTOMER_ADDRESS_DELETED = 'CUSTOMER_ADDRESS_DELETED', 'Customer Address Deleted' + CUSTOMER_CONTACT_CREATED = 'CUSTOMER_CONTACT_CREATED', 'Customer Contact Created' + CUSTOMER_CONTACT_UPDATED = 'CUSTOMER_CONTACT_UPDATED', 'Customer Contact Updated' + CUSTOMER_CONTACT_DELETED = 'CUSTOMER_CONTACT_DELETED', 'Customer Contact Deleted' + + # Account events + ACCOUNT_CREATED = 'ACCOUNT_CREATED', 'Account Created' + ACCOUNT_UPDATED = 'ACCOUNT_UPDATED', 'Account Updated' + ACCOUNT_DELETED = 'ACCOUNT_DELETED', 'Account Deleted' + ACCOUNT_STATUS_CHANGED = 'ACCOUNT_STATUS_CHANGED', 'Account Status Changed' + ACCOUNT_ADDRESS_CREATED = 'ACCOUNT_ADDRESS_CREATED', 'Account Address Created' + ACCOUNT_ADDRESS_UPDATED = 'ACCOUNT_ADDRESS_UPDATED', 'Account Address Updated' + ACCOUNT_ADDRESS_DELETED = 'ACCOUNT_ADDRESS_DELETED', 'Account Address Deleted' + ACCOUNT_CONTACT_CREATED = 'ACCOUNT_CONTACT_CREATED', 'Account Contact Created' + ACCOUNT_CONTACT_UPDATED = 'ACCOUNT_CONTACT_UPDATED', 'Account Contact Updated' + ACCOUNT_CONTACT_DELETED = 'ACCOUNT_CONTACT_DELETED', 'Account Contact Deleted' + + # Service events + SERVICE_CREATED = 'SERVICE_CREATED', 'Service Created' + SERVICE_UPDATED = 'SERVICE_UPDATED', 'Service Updated' + SERVICE_DELETED = 'SERVICE_DELETED', 'Service Deleted' + SERVICE_STATUS_CHANGED = 'SERVICE_STATUS_CHANGED', 'Service Status Changed' + SERVICE_COMPLETED = 'SERVICE_COMPLETED', 'Service Completed' + SERVICE_CANCELLED = 'SERVICE_CANCELLED', 'Service Cancelled' + SERVICE_TEAM_ASSIGNED = 'SERVICE_TEAM_ASSIGNED', 'Team Assigned to Service' + SERVICE_TEAM_UNASSIGNED = 'SERVICE_TEAM_UNASSIGNED', 'Team Unassigned from Service' + SERVICE_DISPATCHED = 'SERVICE_DISPATCHED', 'Service Dispatched' + SERVICES_BULK_GENERATED = 'SERVICES_BULK_GENERATED', 'Services Bulk Generated' + + # Service session events + SERVICE_SESSION_OPENED = 'SERVICE_SESSION_OPENED', 'Service Session Opened' + SERVICE_SESSION_CLOSED = 'SERVICE_SESSION_CLOSED', 'Service Session Closed' + SERVICE_SESSION_REVERTED = 'SERVICE_SESSION_REVERTED', 'Service Session Reverted' + SERVICE_TASK_COMPLETED = 'SERVICE_TASK_COMPLETED', 'Service Task Completed' + SERVICE_TASK_UNCOMPLETED = 'SERVICE_TASK_UNCOMPLETED', 'Service Task Uncompleted' + + # Schedule events + SCHEDULE_CREATED = 'SCHEDULE_CREATED', 'Schedule Created' + SCHEDULE_UPDATED = 'SCHEDULE_UPDATED', 'Schedule Updated' + SCHEDULE_DELETED = 'SCHEDULE_DELETED', 'Schedule Deleted' + SCHEDULE_FREQUENCY_CHANGED = 'SCHEDULE_FREQUENCY_CHANGED', 'Schedule Frequency Changed' + + # Project events + PROJECT_CREATED = 'PROJECT_CREATED', 'Project Created' + PROJECT_UPDATED = 'PROJECT_UPDATED', 'Project Updated' + PROJECT_STATUS_CHANGED = 'PROJECT_STATUS_CHANGED', 'Project Status Changed' + PROJECT_COMPLETED = 'PROJECT_COMPLETED', 'Project Completed' + PROJECT_CANCELLED = 'PROJECT_CANCELLED', 'Project Cancelled' + PROJECT_DISPATCHED = 'PROJECT_DISPATCHED', 'Project Dispatched' + PROJECT_DELETED = 'PROJECT_DELETED', 'Project Deleted' + + # Project session events + PROJECT_SESSION_OPENED = 'PROJECT_SESSION_OPENED', 'Project Session Opened' + PROJECT_SESSION_CLOSED = 'PROJECT_SESSION_CLOSED', 'Project Session Closed' + PROJECT_SESSION_REVERTED = 'PROJECT_SESSION_REVERTED', 'Project Session Reverted' + PROJECT_TASK_COMPLETED = 'PROJECT_TASK_COMPLETED', 'Project Task Completed' + PROJECT_TASK_UNCOMPLETED = 'PROJECT_TASK_UNCOMPLETED', 'Project Task Uncompleted' + + # Project scope events + PROJECT_SCOPE_CREATED = 'PROJECT_SCOPE_CREATED', 'Project Scope Created' + PROJECT_SCOPE_UPDATED = 'PROJECT_SCOPE_UPDATED', 'Project Scope Updated' + PROJECT_SCOPE_DELETED = 'PROJECT_SCOPE_DELETED', 'Project Scope Deleted' + PROJECT_SCOPE_CATEGORY_CREATED = 'PROJECT_SCOPE_CATEGORY_CREATED', 'Project Scope Category Created' + PROJECT_SCOPE_CATEGORY_UPDATED = 'PROJECT_SCOPE_CATEGORY_UPDATED', 'Project Scope Category Updated' + PROJECT_SCOPE_CATEGORY_DELETED = 'PROJECT_SCOPE_CATEGORY_DELETED', 'Project Scope Category Deleted' + PROJECT_SCOPE_TASK_CREATED = 'PROJECT_SCOPE_TASK_CREATED', 'Project Scope Task Created' + PROJECT_SCOPE_TASK_UPDATED = 'PROJECT_SCOPE_TASK_UPDATED', 'Project Scope Task Updated' + PROJECT_SCOPE_TASK_DELETED = 'PROJECT_SCOPE_TASK_DELETED', 'Project Scope Task Deleted' + PROJECT_SCOPE_TEMPLATE_INSTANTIATED = 'PROJECT_SCOPE_TEMPLATE_INSTANTIATED', 'Project Scope Template Instantiated' + + # Scope events + SCOPE_CREATED = 'SCOPE_CREATED', 'Scope Created' + SCOPE_UPDATED = 'SCOPE_UPDATED', 'Scope Updated' + SCOPE_DELETED = 'SCOPE_DELETED', 'Scope Deleted' + AREA_CREATED = 'AREA_CREATED', 'Area Created' + AREA_UPDATED = 'AREA_UPDATED', 'Area Updated' + AREA_DELETED = 'AREA_DELETED', 'Area Deleted' + TASK_CREATED = 'TASK_CREATED', 'Task Created' + TASK_UPDATED = 'TASK_UPDATED', 'Task Updated' + TASK_DELETED = 'TASK_DELETED', 'Task Deleted' + TASK_COMPLETION_RECORDED = 'TASK_COMPLETION_RECORDED', 'Task Completion Recorded' + + # Scope template events + SCOPE_TEMPLATE_CREATED = 'SCOPE_TEMPLATE_CREATED', 'Scope Template Created' + SCOPE_TEMPLATE_UPDATED = 'SCOPE_TEMPLATE_UPDATED', 'Scope Template Updated' + SCOPE_TEMPLATE_DELETED = 'SCOPE_TEMPLATE_DELETED', 'Scope Template Deleted' + SCOPE_TEMPLATE_INSTANTIATED = 'SCOPE_TEMPLATE_INSTANTIATED', 'Scope Template Instantiated' + AREA_TEMPLATE_CREATED = 'AREA_TEMPLATE_CREATED', 'Area Template Created' + AREA_TEMPLATE_UPDATED = 'AREA_TEMPLATE_UPDATED', 'Area Template Updated' + AREA_TEMPLATE_DELETED = 'AREA_TEMPLATE_DELETED', 'Area Template Deleted' + TASK_TEMPLATE_CREATED = 'TASK_TEMPLATE_CREATED', 'Task Template Created' + TASK_TEMPLATE_UPDATED = 'TASK_TEMPLATE_UPDATED', 'Task Template Updated' + TASK_TEMPLATE_DELETED = 'TASK_TEMPLATE_DELETED', 'Task Template Deleted' + + # Team profile events + TEAM_PROFILE_CREATED = 'TEAM_PROFILE_CREATED', 'Team Profile Created' + TEAM_PROFILE_UPDATED = 'TEAM_PROFILE_UPDATED', 'Team Profile Updated' + TEAM_PROFILE_DELETED = 'TEAM_PROFILE_DELETED', 'Team Profile Deleted' + TEAM_PROFILE_ROLE_CHANGED = 'TEAM_PROFILE_ROLE_CHANGED', 'Team Profile Role Changed' + + # Customer profile events + CUSTOMER_PROFILE_CREATED = 'CUSTOMER_PROFILE_CREATED', 'Customer Profile Created' + CUSTOMER_PROFILE_UPDATED = 'CUSTOMER_PROFILE_UPDATED', 'Customer Profile Updated' + CUSTOMER_PROFILE_DELETED = 'CUSTOMER_PROFILE_DELETED', 'Customer Profile Deleted' + CUSTOMER_PROFILE_ACCESS_GRANTED = 'CUSTOMER_PROFILE_ACCESS_GRANTED', 'Customer Profile Access Granted' + CUSTOMER_PROFILE_ACCESS_REVOKED = 'CUSTOMER_PROFILE_ACCESS_REVOKED', 'Customer Profile Access Revoked' + + # Punchlist events + ACCOUNT_PUNCHLIST_CREATED = 'ACCOUNT_PUNCHLIST_CREATED', 'Account Punchlist Created' + ACCOUNT_PUNCHLIST_UPDATED = 'ACCOUNT_PUNCHLIST_UPDATED', 'Account Punchlist Updated' + ACCOUNT_PUNCHLIST_DELETED = 'ACCOUNT_PUNCHLIST_DELETED', 'Account Punchlist Deleted' + PROJECT_PUNCHLIST_CREATED = 'PROJECT_PUNCHLIST_CREATED', 'Project Punchlist Created' + PROJECT_PUNCHLIST_UPDATED = 'PROJECT_PUNCHLIST_UPDATED', 'Project Punchlist Updated' + PROJECT_PUNCHLIST_DELETED = 'PROJECT_PUNCHLIST_DELETED', 'Project Punchlist Deleted' + PUNCHLIST_STATUS_CHANGED = 'PUNCHLIST_STATUS_CHANGED', 'Punchlist Status Changed' + PUNCHLIST_PRIORITY_CHANGED = 'PUNCHLIST_PRIORITY_CHANGED', 'Punchlist Priority Changed' + + # Session media events + SESSION_IMAGE_UPLOADED = 'SESSION_IMAGE_UPLOADED', 'Session Image Uploaded' + SESSION_IMAGE_UPDATED = 'SESSION_IMAGE_UPDATED', 'Session Image Updated' + SESSION_IMAGE_DELETED = 'SESSION_IMAGE_DELETED', 'Session Image Deleted' + SESSION_VIDEO_UPLOADED = 'SESSION_VIDEO_UPLOADED', 'Session Video Uploaded' + SESSION_VIDEO_UPDATED = 'SESSION_VIDEO_UPDATED', 'Session Video Updated' + SESSION_VIDEO_DELETED = 'SESSION_VIDEO_DELETED', 'Session Video Deleted' + SESSION_MEDIA_INTERNAL_FLAGGED = 'SESSION_MEDIA_INTERNAL_FLAGGED', 'Session Media Flagged as Internal' + + # Session notes events + SESSION_NOTE_CREATED = 'SESSION_NOTE_CREATED', 'Session Note Created' + SESSION_NOTE_UPDATED = 'SESSION_NOTE_UPDATED', 'Session Note Updated' + SESSION_NOTE_DELETED = 'SESSION_NOTE_DELETED', 'Session Note Deleted' + + # Report events + REPORT_CREATED = 'REPORT_CREATED', 'Report Created' + REPORT_UPDATED = 'REPORT_UPDATED', 'Report Updated' + REPORT_DELETED = 'REPORT_DELETED', 'Report Deleted' + REPORT_SUBMITTED = 'REPORT_SUBMITTED', 'Report Submitted' + REPORT_APPROVED = 'REPORT_APPROVED', 'Report Approved' + + # Invoice events + INVOICE_GENERATED = 'INVOICE_GENERATED', 'Invoice Generated' + INVOICE_SENT = 'INVOICE_SENT', 'Invoice Sent' + INVOICE_PAID = 'INVOICE_PAID', 'Invoice Paid' + INVOICE_OVERDUE = 'INVOICE_OVERDUE', 'Invoice Overdue' + INVOICE_CANCELLED = 'INVOICE_CANCELLED', 'Invoice Cancelled' + + # Labor & Revenue events + LABOR_RATE_CREATED = 'LABOR_RATE_CREATED', 'Labor Rate Created' + LABOR_RATE_UPDATED = 'LABOR_RATE_UPDATED', 'Labor Rate Updated' + LABOR_RATE_DELETED = 'LABOR_RATE_DELETED', 'Labor Rate Deleted' + REVENUE_RATE_CREATED = 'REVENUE_RATE_CREATED', 'Revenue Rate Created' + REVENUE_RATE_UPDATED = 'REVENUE_RATE_UPDATED', 'Revenue Rate Updated' + REVENUE_RATE_DELETED = 'REVENUE_RATE_DELETED', 'Revenue Rate Deleted' + + # Messaging events + CONVERSATION_CREATED = 'CONVERSATION_CREATED', 'Conversation Created' + CONVERSATION_ARCHIVED = 'CONVERSATION_ARCHIVED', 'Conversation Archived' + CONVERSATION_PARTICIPANT_ADDED = 'CONVERSATION_PARTICIPANT_ADDED', 'Participant Added to Conversation' + CONVERSATION_PARTICIPANT_REMOVED = 'CONVERSATION_PARTICIPANT_REMOVED', 'Participant Removed from Conversation' + MESSAGE_SENT = 'MESSAGE_SENT', 'Message Sent' + MESSAGE_RECEIVED = 'MESSAGE_RECEIVED', 'Message Received' + MESSAGE_READ = 'MESSAGE_READ', 'Message Read' + MESSAGE_DELETED = 'MESSAGE_DELETED', 'Message Deleted' + + # Monitoring events (audit trail) + MONITORING_INCOMPLETE_WORK_REMINDER = 'MONITORING_INCOMPLETE_WORK_REMINDER', 'Incomplete Work Reminder Sent' + MONITORING_NIGHTLY_ASSIGNMENTS = 'MONITORING_NIGHTLY_ASSIGNMENTS', 'Nightly Assignments Sent' + MONITORING_COMMAND_EXECUTED = 'MONITORING_COMMAND_EXECUTED', 'Monitoring Command Executed' + + +class NotificationChannelChoices(models.TextChoices): + """Delivery channels for notifications""" + IN_APP = 'IN_APP', 'In-App' + EMAIL = 'EMAIL', 'Email' + SMS = 'SMS', 'SMS' + + +class NotificationStatusChoices(models.TextChoices): + """Status choices for notifications""" + PENDING = 'PENDING', 'Pending' + SENT = 'SENT', 'Sent' + READ = 'READ', 'Read' + FAILED = 'FAILED', 'Failed' + + +class DeliveryStatusChoices(models.TextChoices): + """Status choices for notification delivery attempts""" + PENDING = 'PENDING', 'Pending' + QUEUED = 'QUEUED', 'Queued' + SENDING = 'SENDING', 'Sending' + SENT = 'SENT', 'Sent' + DELIVERED = 'DELIVERED', 'Delivered' + FAILED = 'FAILED', 'Failed' + BOUNCED = 'BOUNCED', 'Bounced' + + +class ConversationTypeChoices(models.TextChoices): + """Type of conversation""" + DIRECT = 'DIRECT', 'Direct Message' + GROUP = 'GROUP', 'Group Conversation' + SUPPORT = 'SUPPORT', 'Support Ticket' \ No newline at end of file diff --git a/core/models/events.py b/core/models/events.py new file mode 100644 index 0000000..20f4d8a --- /dev/null +++ b/core/models/events.py @@ -0,0 +1,269 @@ +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.contrib.postgres.fields import ArrayField +from django_choices_field import TextChoicesField + +from core.models.base import BaseModel +from core.models.enums import ( + EventTypeChoices, + NotificationChannelChoices, + NotificationStatusChoices, + DeliveryStatusChoices, + RoleChoices +) + + +class Event(BaseModel): + """ + Event model to track system events that may trigger notifications. + Provides audit trail and basis for notification system. + """ + event_type = TextChoicesField( + choices_enum=EventTypeChoices, + help_text="Type of event that occurred" + ) + entity_type = models.CharField( + max_length=100, + help_text="Type of entity (e.g., 'Project', 'Report', 'Invoice')" + ) + entity_id = models.UUIDField( + help_text="UUID of the entity that triggered this event" + ) + metadata = models.JSONField( + default=dict, + blank=True, + help_text="Additional event metadata (e.g., old_status, new_status, changed_fields)" + ) + # Generic foreign key to support both TeamProfile and CustomerProfile + triggered_by_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='triggered_events' + ) + triggered_by_object_id = models.UUIDField( + null=True, + blank=True + ) + triggered_by = GenericForeignKey('triggered_by_content_type', 'triggered_by_object_id') + + class Meta: + db_table = 'events' + ordering = ['-created_at'] + indexes = [ + models.Index(fields=['event_type', 'created_at']), + models.Index(fields=['entity_type', 'entity_id']), + models.Index(fields=['created_at']), + ] + + def __str__(self): + return f"{self.event_type} - {self.entity_type}:{self.entity_id} at {self.created_at}" + + +class NotificationRule(BaseModel): + """ + Admin-defined rules for generating notifications based on events. + """ + name = models.CharField( + max_length=200, + help_text="Descriptive name for this notification rule" + ) + description = models.TextField( + blank=True, + help_text="Description of when and how this rule applies" + ) + event_types = ArrayField( + TextChoicesField(choices_enum=EventTypeChoices), + help_text="List of event types that trigger this rule" + ) + channels = ArrayField( + TextChoicesField(choices_enum=NotificationChannelChoices), + help_text="Delivery channels for notifications (IN_APP, EMAIL, SMS)" + ) + target_roles = ArrayField( + TextChoicesField(choices_enum=RoleChoices), + blank=True, + default=list, + help_text="Roles that should receive notifications (empty = all authenticated users)" + ) + target_team_profiles = models.ManyToManyField( + 'TeamProfile', + blank=True, + related_name='notification_rules', + help_text="Specific team profiles to notify" + ) + target_customer_profiles = models.ManyToManyField( + 'CustomerProfile', + blank=True, + related_name='notification_rules', + help_text="Specific customer profiles to notify" + ) + is_active = models.BooleanField( + default=True, + help_text="Whether this rule is currently active" + ) + template_subject = models.CharField( + max_length=500, + blank=True, + help_text="Template for notification subject (supports variables)" + ) + template_body = models.TextField( + blank=True, + help_text="Template for notification body (supports variables)" + ) + conditions = models.JSONField( + default=dict, + blank=True, + help_text="Additional conditions for when this rule applies (e.g., {'status': 'COMPLETED'})" + ) + + class Meta: + db_table = 'notification_rules' + ordering = ['name'] + + def __str__(self): + return f"{self.name} ({', '.join(self.event_types)})" + + +class Notification(BaseModel): + """ + Individual notification instance sent to a specific recipient. + """ + event = models.ForeignKey( + Event, + on_delete=models.CASCADE, + related_name='notifications', + help_text="Event that triggered this notification" + ) + rule = models.ForeignKey( + NotificationRule, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='notifications', + help_text="Rule that generated this notification" + ) + # Generic foreign key to support both TeamProfile and CustomerProfile + recipient_content_type = models.ForeignKey( + ContentType, + on_delete=models.CASCADE, + related_name='notifications' + ) + recipient_object_id = models.UUIDField() + recipient = GenericForeignKey('recipient_content_type', 'recipient_object_id') + status = TextChoicesField( + choices_enum=NotificationStatusChoices, + default=NotificationStatusChoices.PENDING, + help_text="Current status of the notification" + ) + subject = models.CharField( + max_length=500, + help_text="Notification subject line" + ) + body = models.TextField( + help_text="Notification body content" + ) + action_url = models.URLField( + blank=True, + max_length=500, + help_text="Optional URL for action button (e.g., link to project detail)" + ) + read_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp when notification was marked as read" + ) + metadata = models.JSONField( + default=dict, + blank=True, + help_text="Additional notification metadata" + ) + + class Meta: + db_table = 'notifications' + ordering = ['-created_at'] + indexes = [ + models.Index(fields=['recipient_content_type', 'recipient_object_id', 'status', 'created_at']), + models.Index(fields=['recipient_content_type', 'recipient_object_id', 'read_at']), + models.Index(fields=['event']), + ] + + def __str__(self): + return f"Notification for {self.recipient} - {self.subject}" + + def mark_as_read(self): + """Mark notification as read""" + if not self.read_at: + from django.utils import timezone + self.read_at = timezone.now() + self.status = NotificationStatusChoices.READ + self.save(update_fields=['read_at', 'status', 'updated_at']) + + +class NotificationDelivery(BaseModel): + """ + Track delivery attempts for a notification via specific channels. + """ + notification = models.ForeignKey( + Notification, + on_delete=models.CASCADE, + related_name='deliveries', + help_text="Notification being delivered" + ) + channel = TextChoicesField( + choices_enum=NotificationChannelChoices, + help_text="Delivery channel (IN_APP, EMAIL, SMS)" + ) + status = TextChoicesField( + choices_enum=DeliveryStatusChoices, + default=DeliveryStatusChoices.PENDING, + help_text="Current delivery status" + ) + attempts = models.PositiveIntegerField( + default=0, + help_text="Number of delivery attempts" + ) + last_attempt_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp of last delivery attempt" + ) + sent_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp when successfully sent" + ) + delivered_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp when delivery was confirmed (if supported by channel)" + ) + error_message = models.TextField( + blank=True, + help_text="Error message from failed delivery attempts" + ) + external_id = models.CharField( + max_length=200, + blank=True, + help_text="External service ID (e.g., Twilio message SID, email message ID)" + ) + metadata = models.JSONField( + default=dict, + blank=True, + help_text="Additional delivery metadata" + ) + + class Meta: + db_table = 'notification_deliveries' + ordering = ['-created_at'] + indexes = [ + models.Index(fields=['notification', 'channel']), + models.Index(fields=['status', 'last_attempt_at']), + models.Index(fields=['channel', 'status']), + ] + unique_together = [['notification', 'channel']] + + def __str__(self): + return f"{self.channel} delivery for notification {self.notification_id} - {self.status}" diff --git a/core/models/invoice.py b/core/models/invoice.py new file mode 100644 index 0000000..24784b1 --- /dev/null +++ b/core/models/invoice.py @@ -0,0 +1,32 @@ +from django.db import models +from django_choices_field.fields import TextChoicesField +from core.models.base import BaseModel +from core.models.customer import Customer +from core.models.enums import InvoiceChoices, PaymentChoices +from core.models.project import Project +from core.models.revenue import Revenue + + +class Invoice(BaseModel): + """Invoice records""" + date = models.DateField() + customer = models.ForeignKey(Customer, on_delete=models.PROTECT, related_name='invoices') + projects = models.ManyToManyField(Project, related_name='invoices', blank=True) + revenues = models.ManyToManyField(Revenue, related_name='invoices', blank=True) + status = TextChoicesField(choices_enum=InvoiceChoices, default=InvoiceChoices.DRAFT, + help_text="Current status of the invoice") + date_paid = models.DateField(blank=True, null=True) + payment_type = TextChoicesField(choices_enum=PaymentChoices, blank=True, null=True) + wave_invoice_id = models.CharField(max_length=255, blank=True, null=True, + help_text="Wave invoice ID") + + class Meta: + ordering = ['-date'] + indexes = [ + models.Index(fields=['customer', 'date']), + ] + verbose_name = "Invoice" + verbose_name_plural = "Invoices" + + def __str__(self): + return f"Invoice for {self.customer.name} on {self.date}" diff --git a/core/models/labor.py b/core/models/labor.py new file mode 100644 index 0000000..1949812 --- /dev/null +++ b/core/models/labor.py @@ -0,0 +1,57 @@ +from django.core.exceptions import ValidationError +from django.db import models +from django.db.models import Q +import datetime +from core.models.base import BaseModel +from core.models.account import AccountAddress + +class Labor(BaseModel): + """Labor records for accounts""" + account_address = models.ForeignKey( + AccountAddress, + on_delete=models.PROTECT, + related_name='labors', + verbose_name="Account Service Address", + null=True + ) + amount = models.DecimalField(max_digits=10, decimal_places=2) + start_date = models.DateField() + end_date = models.DateField(blank=True, null=True) + + class Meta: + ordering = ['-start_date'] + indexes = [ + models.Index(fields=['account_address', 'start_date']), + ] + verbose_name = "Labor" + verbose_name_plural = "Labors" + + def __str__(self): + return f"{self.account_address.account.name} - {self.account_address.name} - ${self.amount}" + + def clean(self): + super().clean() + + # Basic date validity + if self.end_date and self.start_date > self.end_date: + raise ValidationError({'end_date': "End date must be after start date"}) + + # Optional: amount validation + if self.amount is None or self.amount < 0: + raise ValidationError({'amount': "Amount must be a non-negative value"}) + + # Overlap prevention within the same account_address + start = self.start_date + end = self.end_date or datetime.date.max + + qs = Labor.objects.filter(account_address=self.account_address) + if self.pk: + qs = qs.exclude(pk=self.pk) + + overlaps = qs.filter( + Q(end_date__isnull=True, start_date__lte=end) | + Q(end_date__isnull=False, start_date__lte=end, end_date__gte=start) + ) + + if overlaps.exists(): + raise ValidationError("Labor rate dates overlap with an existing labor rate for this address.") \ No newline at end of file diff --git a/core/models/messaging.py b/core/models/messaging.py new file mode 100644 index 0000000..b819ec4 --- /dev/null +++ b/core/models/messaging.py @@ -0,0 +1,254 @@ +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django_choices_field import TextChoicesField + +from core.models.base import BaseModel +from core.models.enums import ConversationTypeChoices + + +class Conversation(BaseModel): + """ + Conversation thread that groups messages together. + Can be linked to specific entities (Project, Service, Account, etc.) for context. + """ + subject = models.CharField( + max_length=500, + help_text="Conversation subject/title" + ) + conversation_type = TextChoicesField( + choices_enum=ConversationTypeChoices, + help_text="Type of conversation (DIRECT, GROUP, SUPPORT)" + ) + last_message_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp of most recent message" + ) + is_archived = models.BooleanField( + default=False, + help_text="Whether this conversation is archived (system-wide)" + ) + + # Generic foreign key for linking to any entity (Project, Service, Account, etc.) + entity_content_type = models.ForeignKey( + ContentType, + on_delete=models.CASCADE, + null=True, + blank=True, + related_name='conversations', + help_text="Content type of the related entity" + ) + entity_object_id = models.UUIDField( + null=True, + blank=True, + help_text="UUID of the related entity" + ) + entity = GenericForeignKey('entity_content_type', 'entity_object_id') + + # Generic foreign key for conversation creator + created_by_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='created_conversations', + help_text="Content type of the creator (TeamProfile or CustomerProfile)" + ) + created_by_object_id = models.UUIDField( + null=True, + blank=True, + help_text="UUID of the creator" + ) + created_by = GenericForeignKey('created_by_content_type', 'created_by_object_id') + + metadata = models.JSONField( + default=dict, + blank=True, + help_text="Additional conversation metadata" + ) + + class Meta: + db_table = 'conversations' + ordering = ['-last_message_at', '-created_at'] + indexes = [ + models.Index(fields=['-last_message_at']), + models.Index(fields=['entity_content_type', 'entity_object_id']), + models.Index(fields=['conversation_type', '-last_message_at']), + models.Index(fields=['created_by_content_type', 'created_by_object_id']), + ] + + def __str__(self): + entity_info = f" ({self.entity_content_type.model}:{self.entity_object_id})" if self.entity else "" + return f"{self.subject}{entity_info}" + + +class ConversationParticipant(BaseModel): + """ + Links users (TeamProfile or CustomerProfile) to conversations. + Tracks per-user read status and preferences. + """ + conversation = models.ForeignKey( + Conversation, + on_delete=models.CASCADE, + related_name='participants', + help_text="Conversation this participant belongs to" + ) + + # Generic foreign key to support both TeamProfile and CustomerProfile + participant_content_type = models.ForeignKey( + ContentType, + on_delete=models.CASCADE, + related_name='conversation_participants', + help_text="Content type of the participant (TeamProfile or CustomerProfile)" + ) + participant_object_id = models.UUIDField( + help_text="UUID of the participant" + ) + participant = GenericForeignKey('participant_content_type', 'participant_object_id') + + last_read_at = models.DateTimeField( + null=True, + blank=True, + help_text="Timestamp when participant last read messages in this conversation" + ) + unread_count = models.PositiveIntegerField( + default=0, + help_text="Number of unread messages for this participant" + ) + is_muted = models.BooleanField( + default=False, + help_text="Whether participant has muted notifications for this conversation" + ) + is_archived = models.BooleanField( + default=False, + help_text="Whether participant has archived this conversation (user-specific)" + ) + joined_at = models.DateTimeField( + auto_now_add=True, + help_text="When participant joined the conversation" + ) + + class Meta: + db_table = 'conversation_participants' + ordering = ['conversation', 'joined_at'] + indexes = [ + models.Index(fields=['participant_content_type', 'participant_object_id', 'is_archived']), + models.Index(fields=['conversation', 'participant_content_type', 'participant_object_id']), + models.Index(fields=['unread_count']), + ] + unique_together = [['conversation', 'participant_content_type', 'participant_object_id']] + + def __str__(self): + return f"{self.participant} in {self.conversation.subject}" + + +class Message(BaseModel): + """ + Individual message within a conversation. + """ + conversation = models.ForeignKey( + Conversation, + on_delete=models.CASCADE, + related_name='messages', + help_text="Conversation this message belongs to" + ) + + # Generic foreign key for sender (TeamProfile or CustomerProfile) + sender_content_type = models.ForeignKey( + ContentType, + on_delete=models.CASCADE, + related_name='sent_messages', + help_text="Content type of the sender" + ) + sender_object_id = models.UUIDField( + help_text="UUID of the sender" + ) + sender = GenericForeignKey('sender_content_type', 'sender_object_id') + + body = models.TextField( + help_text="Message content" + ) + + # For message threading/replies + reply_to = models.ForeignKey( + 'self', + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='replies', + help_text="Message this is replying to (for threading)" + ) + + # Attachments stored as array of file references + attachments = models.JSONField( + default=list, + blank=True, + help_text="Array of attachment metadata (file paths, names, sizes, types)" + ) + + is_system_message = models.BooleanField( + default=False, + help_text="Whether this is an automated system message" + ) + + metadata = models.JSONField( + default=dict, + blank=True, + help_text="Additional message metadata (formatting, mentions, etc.)" + ) + + class Meta: + db_table = 'messages' + ordering = ['created_at'] + indexes = [ + models.Index(fields=['conversation', 'created_at']), + models.Index(fields=['sender_content_type', 'sender_object_id', 'created_at']), + models.Index(fields=['reply_to']), + ] + + def __str__(self): + preview = self.body[:50] + "..." if len(self.body) > 50 else self.body + return f"Message from {self.sender} in {self.conversation.subject}: {preview}" + + +class MessageReadReceipt(BaseModel): + """ + Tracks when individual messages are read by specific participants. + Allows for fine-grained read tracking beyond conversation-level. + """ + message = models.ForeignKey( + Message, + on_delete=models.CASCADE, + related_name='read_receipts', + help_text="Message that was read" + ) + + # Generic foreign key for reader (TeamProfile or CustomerProfile) + reader_content_type = models.ForeignKey( + ContentType, + on_delete=models.CASCADE, + related_name='message_reads', + help_text="Content type of the reader" + ) + reader_object_id = models.UUIDField( + help_text="UUID of the reader" + ) + reader = GenericForeignKey('reader_content_type', 'reader_object_id') + + read_at = models.DateTimeField( + auto_now_add=True, + help_text="When the message was read" + ) + + class Meta: + db_table = 'message_read_receipts' + ordering = ['read_at'] + indexes = [ + models.Index(fields=['message', 'reader_content_type', 'reader_object_id']), + models.Index(fields=['reader_content_type', 'reader_object_id', 'read_at']), + ] + unique_together = [['message', 'reader_content_type', 'reader_object_id']] + + def __str__(self): + return f"{self.reader} read message {self.message_id} at {self.read_at}" diff --git a/core/models/profile.py b/core/models/profile.py new file mode 100644 index 0000000..d228bad --- /dev/null +++ b/core/models/profile.py @@ -0,0 +1,69 @@ +from django.contrib.auth.models import User +from django.core.exceptions import ValidationError +from django.db import models +from django_choices_field.fields import TextChoicesField +from core.models import Customer +from core.models.base import Contact +from core.models.enums import StatusChoices, RoleChoices + + +class Profile(Contact): + """Base profile class with common functionality""" + user = models.OneToOneField( + User, on_delete=models.SET_NULL, related_name='%(class)s', + null=True, blank=True, + help_text="Optional Django user for admin access only" + ) + status = TextChoicesField(choices_enum=StatusChoices, default=StatusChoices.ACTIVE, + help_text="Current status of the profile") + notes = models.TextField(blank=True, default="", verbose_name="Notes") + ory_kratos_id = models.CharField( + max_length=255, unique=True, null=True, blank=True, + verbose_name="Ory Kratos Identity ID", + help_text="Unique identifier from Ory Kratos authentication system" + ) + email = models.EmailField(null=True, blank=True) + + class Meta: + abstract = True + ordering = ['last_name', 'first_name'] + + def save(self, *args, **kwargs): + # Validate new users against existing profiles (if user is set) + # Strict enforcement of one profile per user + if not self.pk and self.user: + profile_types = [CustomerProfile, TeamProfile] + current_type = type(self) + + for profile_type in profile_types: + if profile_type != current_type: + if profile_type.objects.filter(user=self.user).exists(): + raise ValidationError( + f"User already has a {profile_type.__name__}. " + f"Cannot create {current_type.__name__}." + ) + super().save(*args, **kwargs) + + +class CustomerProfile(Profile): + """External/public-facing customer accounts""" + customers = models.ManyToManyField( + Customer, + related_name='profiles', + blank=True, + help_text="Customers this profile has access to" + ) + + class Meta: + verbose_name = "Customer Profile" + verbose_name_plural = "Customer Profiles" + + +class TeamProfile(Profile): + """Internal team member accounts""" + role = TextChoicesField(choices_enum=RoleChoices, default=RoleChoices.TEAM_MEMBER, + help_text="Role of the team member") + + class Meta: + verbose_name = "Team Profile" + verbose_name_plural = "Team Profiles" diff --git a/core/models/project.py b/core/models/project.py new file mode 100644 index 0000000..f0d09c0 --- /dev/null +++ b/core/models/project.py @@ -0,0 +1,122 @@ +from django.db import models +from django_choices_field import TextChoicesField +from core.models.base import BaseModel +from core.models.customer import Customer +from core.models.account import AccountAddress +from core.models.enums import ServiceChoices +from core.models.profile import TeamProfile + + +class Project(BaseModel): + """Project records for customers""" + customer = models.ForeignKey(Customer, on_delete=models.PROTECT, related_name='projects') + + # Optional: tie to a specific account address; if not provided, a freeform address is required + account_address = models.ForeignKey( + AccountAddress, + on_delete=models.PROTECT, + related_name='projects', + blank=True, + null=True, + help_text="If set, the project uses this account address; otherwise, fill the address fields below", + ) + + # Optional: if account_address is set, this is the scope of the project + scope = models.ForeignKey( + 'core.ProjectScope', + on_delete=models.SET_NULL, + related_name='projects', + blank=True, + null=True, + ) + + # Freeform address used only when account_address is not provided + street_address = models.CharField(max_length=255, blank=True, null=True) + city = models.CharField(max_length=100, blank=True, null=True) + state = models.CharField(max_length=100, blank=True, null=True) + zip_code = models.CharField(max_length=20, blank=True, null=True) + + name = models.CharField(max_length=200, blank=True) + date = models.DateField() + status = TextChoicesField( + choices_enum=ServiceChoices, + default=ServiceChoices.SCHEDULED, + help_text="Current status of the project", + ) + team_members = models.ManyToManyField(TeamProfile, related_name='projects') + notes = models.TextField(blank=True, null=True) + labor = models.DecimalField(max_digits=10, decimal_places=2) + amount = models.DecimalField(max_digits=10, decimal_places=2, default=0.00) + calendar_event_id = models.CharField(max_length=255, blank=True, null=True, + help_text="External calendar event ID") + wave_service_id = models.CharField(max_length=255, blank=True, null=True, + help_text="Wave service ID") + + class Meta: + ordering = ['-date'] + indexes = [ + models.Index(fields=['customer', 'date']), + models.Index(fields=['status', 'date']), + models.Index(fields=['account_address', 'date']), + ] + constraints = [ + # Enforce mutual exclusivity/requirement between account_address and freeform address + models.CheckConstraint( + name='project_addr_xor_check', + condition=( + # Case A: account_address is set, AND all freeform fields are NULL + ( + models.Q(account_address__isnull=False) + & models.Q(street_address__isnull=True) + & models.Q(city__isnull=True) + & models.Q(state__isnull=True) + & models.Q(zip_code__isnull=True) + ) + | # Case B: account_address is NULL, AND all freeform fields are non-NULL + ( + models.Q(account_address__isnull=True) + & models.Q(street_address__isnull=False) + & models.Q(city__isnull=False) + & models.Q(state__isnull=False) + & models.Q(zip_code__isnull=False) + ) + ), + ), + ] + verbose_name = "Project" + verbose_name_plural = "Projects" + + # python + def clean(self): + """Validate project data""" + from django.core.exceptions import ValidationError + + # Normalize blanks to None so DB constraint and logic align + def _blank_to_none(v): + return None if isinstance(v, str) and not v.strip() else v + + self.street_address = _blank_to_none(self.street_address) + self.city = _blank_to_none(self.city) + self.state = _blank_to_none(self.state) + self.zip_code = _blank_to_none(self.zip_code) + + has_account_address = self.account_address is not None + has_freeform = all([self.street_address, self.city, self.state, self.zip_code]) + + # Enforce XOR between account_address and freeform address + if has_account_address and has_freeform: + raise ValidationError("Provide either an account address or a freeform address, not both.") + if not has_account_address and not has_freeform: + raise ValidationError("Provide a freeform address when no account address is selected.") + + # If an account_address is provided, ensure it belongs to the same customer + if self.account_address and self.account_address.account.customer_id != self.customer_id: + raise ValidationError("Selected account address must belong to the specified customer.") + + def __str__(self): + if self.account_address: + addr_info = f" ({self.account_address.account.name} - {self.account_address.street_address})" + else: + parts = [p for p in [self.street_address, self.city, self.state, self.zip_code] if p] + addr_info = f" ({', '.join(parts)})" if parts else "" + return f"Project for {self.customer.name}{addr_info} on {self.date}" diff --git a/core/models/project_punchlist.py b/core/models/project_punchlist.py new file mode 100644 index 0000000..d2ddb12 --- /dev/null +++ b/core/models/project_punchlist.py @@ -0,0 +1,15 @@ +from django.db import models +from core.models.base import BaseModel +from core.models.project import Project + + +class ProjectPunchlist(BaseModel): + """Punchlist records for projects""" + project = models.ForeignKey(Project, on_delete=models.PROTECT, related_name='punchlists') + date = models.DateField() + + class Meta: + ordering = ['-date'] + indexes = [models.Index(fields=['project', 'date'])] + verbose_name = "Punchlist" + verbose_name_plural = "Punchlists" \ No newline at end of file diff --git a/core/models/project_scope.py b/core/models/project_scope.py new file mode 100644 index 0000000..9cb80ee --- /dev/null +++ b/core/models/project_scope.py @@ -0,0 +1,97 @@ +from django.db import models +from core.models.profile import TeamProfile +from core.models.account import Account, AccountAddress +from core.models.project import Project +from core.models.base import BaseModel + + +class ProjectScope(BaseModel): + """Scope of work definition for a project""" + name = models.CharField(max_length=255) + account = models.ForeignKey( + Account, + on_delete=models.PROTECT, + related_name='project_scopes', + blank=True, + null=True, + ) + project = models.ForeignKey(Project, on_delete=models.PROTECT, related_name='project_scopes') + account_address = models.ForeignKey( + AccountAddress, + on_delete=models.PROTECT, + related_name='project_scopes', + null=True, + blank=True, + ) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + + class Meta: + ordering = ['name'] + verbose_name = "Project Scope" + verbose_name_plural = "Project Scopes" + indexes = [ + models.Index(fields=['project', 'is_active']), + models.Index(fields=['account_address', 'is_active']), + ] + constraints = [ + # Ensure only one active scope per project/address combination (when address present) + models.UniqueConstraint( + fields=['project', 'account_address'], + condition=models.Q(is_active=True, account_address__isnull=False), + name='unique_active_project_scope_per_address', + ), + ] + + def __str__(self): + project_label = str(self.project_id) if self.project_id else "Unassigned Project" + return f"{self.name} - {project_label}" + + +class ProjectScopeCategory(BaseModel): + """Category of work definition for a project""" + name = models.CharField(max_length=255) + scope = models.ForeignKey(ProjectScope, on_delete=models.CASCADE, related_name='project_areas') + order = models.PositiveIntegerField(default=0) + + class Meta: + ordering = ['scope', 'order', 'name'] + verbose_name = "Project Scope Category" + verbose_name_plural = "Project Scope Categories" + indexes = [ + models.Index(fields=['scope', 'order']), + ] + + def __str__(self): + return f"{self.scope.name} - {self.name}" + + +class ProjectScopeTask(BaseModel): + """Specific task definition for a project""" + category = models.ForeignKey(ProjectScopeCategory, on_delete=models.CASCADE, related_name='project_tasks') + description = models.TextField() + checklist_description = models.TextField() + order = models.PositiveIntegerField(default=0) + estimated_minutes = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + ordering = ['category', 'order'] + verbose_name = "Project Scope Task" + verbose_name_plural = "Project Scope Tasks" + indexes = [ + models.Index(fields=['category', 'order']), + ] + + def __str__(self): + return f"{self.category.name}: {self.description[:50]}..." + + +class ProjectScopeTaskCompletion(BaseModel): + """Record of a task template being completed during a project""" + task = models.ForeignKey(ProjectScopeTask, on_delete=models.PROTECT, related_name='completions') + project = models.ForeignKey(Project, on_delete=models.PROTECT, related_name='task_completions') + account = models.ForeignKey(Account, on_delete=models.PROTECT, related_name='task_completions', null=True, blank=True) + account_address = models.ForeignKey(AccountAddress, on_delete=models.PROTECT, null=True, blank=True, related_name='project_task_completions') + completed_by = models.ForeignKey(TeamProfile, on_delete=models.PROTECT) + completed_at = models.DateTimeField() + notes = models.TextField(blank=True) \ No newline at end of file diff --git a/core/models/project_scope_template.py b/core/models/project_scope_template.py new file mode 100644 index 0000000..354f2ff --- /dev/null +++ b/core/models/project_scope_template.py @@ -0,0 +1,121 @@ +from django.db import models, transaction + +from core.models.base import BaseModel +from core.models.account import Account, AccountAddress +from core.models.project import Project +from core.models.project_scope import ( + ProjectScope, + ProjectScopeCategory, + ProjectScopeTask, +) + + +class ProjectScopeTemplate(BaseModel): + """Reusable blueprint for creating a ProjectScope with Categories and Tasks""" + name = models.CharField(max_length=255, unique=True) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + + class Meta: + ordering = ['name'] + verbose_name = "Project Scope Template" + verbose_name_plural = "Project Scope Templates" + indexes = [models.Index(fields=['is_active'])] + + def __str__(self): + return self.name + + @transaction.atomic + def instantiate( + self, + *, + project: Project, + account: Account | None = None, + account_address: AccountAddress | None = None, + name: str | None = None, + description: str | None = None, + is_active: bool = True, + ) -> ProjectScope: + """ + Create a ProjectScope (and nested Categories/Tasks) from this template. + + - If an account is not provided, tries to use project.account (when present). + """ + resolved_account = account or getattr(project, "account", None) + + scope = ProjectScope.objects.create( + name=name or self.name, + account=resolved_account, + project=project, + account_address=account_address, + description=description if description is not None else self.description, + is_active=is_active, + ) + + # Create Categories and Tasks preserving order + category_templates = self.category_templates.all().order_by('order', 'name', 'id') + for ct in category_templates: + category = ProjectScopeCategory.objects.create( + scope=scope, + name=ct.name, + order=ct.order, + ) + task_templates = ct.task_templates.all().order_by('order', 'id') + tasks_to_create = [ + ProjectScopeTask( + category=category, + description=tt.description, + checklist_description=tt.checklist_description, + order=tt.order, + estimated_minutes=tt.estimated_minutes, + ) + for tt in task_templates + ] + if tasks_to_create: + ProjectScopeTask.objects.bulk_create(tasks_to_create) + + return scope + + +class ProjectAreaTemplate(BaseModel): + """Reusable category definition belonging to a ProjectScopeTemplate""" + scope_template = models.ForeignKey( + ProjectScopeTemplate, + on_delete=models.CASCADE, + related_name='category_templates', + ) + name = models.CharField(max_length=255) + order = models.PositiveIntegerField(default=0) + + class Meta: + ordering = ['scope_template', 'order', 'name'] + verbose_name = "Project Area Template" + verbose_name_plural = "Project Area Templates" + indexes = [models.Index(fields=['scope_template', 'order'])] + + def __str__(self): + return f"{self.scope_template.name} - {self.name}" + + +class ProjectTaskTemplate(BaseModel): + """Reusable task definition belonging to a ProjectAreaTemplate""" + area_template = models.ForeignKey( + ProjectAreaTemplate, + on_delete=models.CASCADE, + related_name='task_templates', + ) + description = models.TextField() + checklist_description = models.TextField(blank=True) + order = models.PositiveIntegerField(default=0) + estimated_minutes = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + ordering = ['area_template', 'order', 'id'] + verbose_name = "Project Task Template" + verbose_name_plural = "Project Task Templates" + indexes = [ + models.Index(fields=['area_template', 'order']), + ] + + def __str__(self): + return f"{self.area_template.name}: {self.description[:50]}..." diff --git a/core/models/report.py b/core/models/report.py new file mode 100644 index 0000000..ea6b1f5 --- /dev/null +++ b/core/models/report.py @@ -0,0 +1,145 @@ +from django.conf import settings +from django.db import models +from django.db.models import Q +from decimal import Decimal +from core.models.base import BaseModel +from core.models.profile import TeamProfile +from core.models.service import Service +from core.models.project import Project + + +class Report(BaseModel): + """Report records""" + date = models.DateField() + team_member = models.ForeignKey(TeamProfile, on_delete=models.PROTECT, related_name='reports') + services = models.ManyToManyField(Service, related_name='reports', blank=True) + projects = models.ManyToManyField(Project, related_name='reports', blank=True) + + class Meta: + ordering = ['-date'] + indexes = [ + models.Index(fields=['team_member', 'date']), + ] + verbose_name = "Report" + verbose_name_plural = "Reports" + + def __str__(self): + return f"Report for {self.team_member.full_name} on {self.date}" + + def get_service_labor_share(self, service): + """Get this team member's share of labor for a service (excluding Dispatch)""" + if not service.account_address: + return Decimal('0.00') + + # Get the labor rate for the service's account address + labor = service.account_address.labors.filter( + Q(start_date__lte=self.date) & + (Q(end_date__isnull=True) | Q(end_date__gte=self.date)) + ).first() + + if not labor: + return Decimal('0.00') + + # Count team members assigned to this service, excluding Dispatch + team_members = service.team_members.exclude(id=settings.DISPATCH_TEAM_PROFILE_ID) + team_member_count = team_members.count() + + if team_member_count == 0: + return Decimal('0.00') + + # Only include this team member's share if they're assigned to the service (and not Dispatch) + if not team_members.filter(id=self.team_member.id).exists(): + return Decimal('0.00') + + # Divide labor rate by number of team members (excluding Dispatch) + return labor.amount / team_member_count + + def get_project_labor_share(self, project): + """Get this team member's share of labor for a project (excluding Dispatch)""" + # Count team members assigned to this project, excluding Dispatch + team_members = project.team_members.exclude(id=settings.DISPATCH_TEAM_PROFILE_ID) + team_member_count = team_members.count() + + if team_member_count == 0: + return Decimal('0.00') + + # Only include this team member's share if they're assigned to the project (and not Dispatch) + if not team_members.filter(id=self.team_member.id).exists(): + return Decimal('0.00') + + # Divide project labor by number of team members (excluding Dispatch) + return project.labor / team_member_count + + def get_services_labor_total(self): + """Calculate total labor share for all services in this report""" + total = Decimal('0.00') + for service in self.services.all(): + total += self.get_service_labor_share(service) + return total + + def get_projects_labor_total(self): + """Calculate total labor share for all projects in this report""" + total = Decimal('0.00') + for project in self.projects.all(): + total += self.get_project_labor_share(project) + return total + + def get_total_labor_value(self): + """Calculate total labor share for both services and projects""" + return self.get_services_labor_total() + self.get_projects_labor_total() + + def get_labor_breakdown(self): + """Get a detailed breakdown of labor shares for this specific team member""" + services_data = [] + for service in self.services.all(): + # Count team members excluding Dispatch + team_members = service.team_members.exclude(id=settings.DISPATCH_TEAM_PROFILE_ID) + team_member_count = team_members.count() + is_assigned = team_members.filter(id=self.team_member.id).exists() + labor_rate = Decimal('0.00') + + if service.account_address: + labor = service.account_address.labors.filter( + Q(start_date__lte=self.date) & + (Q(end_date__isnull=True) | Q(end_date__gte=self.date)) + ).first() + labor_rate = labor.amount if labor else Decimal('0.00') + + share = self.get_service_labor_share(service) + + services_data.append({ + 'service_id': service.id, + 'account_name': service.account_address.account.name if service.account_address else None, + 'address': service.account_address.name if service.account_address else None, + 'total_labor_rate': labor_rate, + 'team_member_count': team_member_count, + 'is_team_member_assigned': is_assigned, + 'labor_share': share + }) + + projects_data = [] + for project in self.projects.all(): + # Count team members excluding Dispatch + team_members = project.team_members.exclude(id=settings.DISPATCH_TEAM_PROFILE_ID) + team_member_count = team_members.count() + is_assigned = team_members.filter(id=self.team_member.id).exists() + share = self.get_project_labor_share(project) + + projects_data.append({ + 'project_id': project.id, + 'project_name': project.name, + 'total_labor_amount': project.labor, + 'team_member_count': team_member_count, + 'is_team_member_assigned': is_assigned, + 'labor_share': share + }) + + return { + 'team_member_id': self.team_member.id, + 'team_member_name': self.team_member.full_name, + 'services': services_data, + 'projects': projects_data, + 'services_total': self.get_services_labor_total(), + 'projects_total': self.get_projects_labor_total(), + 'grand_total': self.get_total_labor_value() + } \ No newline at end of file diff --git a/core/models/revenue.py b/core/models/revenue.py new file mode 100644 index 0000000..d2f3838 --- /dev/null +++ b/core/models/revenue.py @@ -0,0 +1,24 @@ +from django.db import models +from core.models.base import BaseModel +from core.models.account import Account + + +class Revenue(BaseModel): + """Revenue records for accounts""" + account = models.ForeignKey(Account, on_delete=models.PROTECT, related_name='revenues') + amount = models.DecimalField(max_digits=10, decimal_places=2) + start_date = models.DateField() + end_date = models.DateField(blank=True, null=True) + wave_service_id = models.CharField(max_length=255, blank=True, null=True, + help_text="Wave service ID") + + class Meta: + ordering = ['-start_date'] + indexes = [ + models.Index(fields=['account', 'start_date']), + ] + verbose_name = "Revenue" + verbose_name_plural = "Revenues" + + def __str__(self): + return f"{self.account.name} - ${self.amount}" diff --git a/core/models/schedule.py b/core/models/schedule.py new file mode 100644 index 0000000..1270194 --- /dev/null +++ b/core/models/schedule.py @@ -0,0 +1,96 @@ +import datetime +from django.core.exceptions import ValidationError +from django.db import models +from django.db.models import Q +from core.models.base import BaseModel +from core.models.account import AccountAddress + + +class Schedule(BaseModel): + """ + Service schedules for accounts. + """ + name = models.CharField(max_length=255, blank=True, null=True, verbose_name="Name") + account_address = models.ForeignKey(AccountAddress, on_delete=models.PROTECT, related_name='schedules', + verbose_name="Account Service Address", null=True) + monday_service = models.BooleanField(default=False, verbose_name="Monday Service") + tuesday_service = models.BooleanField(default=False, verbose_name="Tuesday Service") + wednesday_service = models.BooleanField(default=False, verbose_name="Wednesday Service") + thursday_service = models.BooleanField(default=False, verbose_name="Thursday Service") + friday_service = models.BooleanField(default=False, verbose_name="Friday Service") + saturday_service = models.BooleanField(default=False, verbose_name="Saturday Service") + sunday_service = models.BooleanField(default=False, verbose_name="Sunday Service") + weekend_service = models.BooleanField(default=False, verbose_name="Weekend Service", + help_text=( + "When enabled, represents a single service visit on Friday that can be performed " + "any time between Friday-Sunday and verified by Monday morning. " + "Individual Fri/Sat/Sun service flags must be disabled when this is enabled.")) + schedule_exception = models.TextField(blank=True, null=True, verbose_name="Schedule Exceptions", + help_text=( + "Notes about any exceptions or special requirements for this schedule")) + start_date = models.DateField(verbose_name="Start Date", help_text="Date when this schedule becomes active") + end_date = models.DateField(blank=True, null=True, verbose_name="End Date", + help_text="Optional date when this schedule expires") + + class Meta: + ordering = ['-start_date'] + verbose_name = "Schedule" + verbose_name_plural = "Schedules" + indexes = [ + models.Index(fields=['account_address', 'start_date']), + models.Index(fields=['weekend_service']), + ] + + def __str__(self): + return f"Schedule for {self.account_address.account.name} - {self.account_address.name if self.account_address.name else 'Primary Service Address'}" + + def clean(self): + """Validate schedule configuration""" + super().clean() + + if self.end_date and self.start_date > self.end_date: + raise ValidationError({ + 'end_date': "End date must be after start date" + }) + + start = self.start_date + end = self.end_date or datetime.date.max + + qs = Schedule.objects.filter(account_address=self.account_address) + if self.pk: + qs = qs.exclude(pk=self.pk) + + overlaps = qs.filter( + Q(end_date__isnull=True, start_date__lte=end) | + Q(end_date__isnull=False, start_date__lte=end, end_date__gte=start) + ) + + if overlaps.exists(): + raise ValidationError("Schedule dates overlap with an existing schedule for this address.") + + # Validate weekend service configuration + if self.weekend_service: + weekend_days = [ + self.friday_service, + self.saturday_service, + self.sunday_service + ] + if any(weekend_days): + raise ValidationError({ + 'weekend_service': "When weekend service is enabled, Friday, Saturday, " + "and Sunday service flags must be disabled" + }) + + has_regular_service = any([ + self.monday_service, + self.tuesday_service, + self.wednesday_service, + self.thursday_service, + self.friday_service, + self.saturday_service, + self.sunday_service + ]) + if not has_regular_service and not self.weekend_service: + raise ValidationError( + "At least one service day or weekend service must be selected" + ) diff --git a/core/models/scope.py b/core/models/scope.py new file mode 100644 index 0000000..1d6803e --- /dev/null +++ b/core/models/scope.py @@ -0,0 +1,127 @@ +from django.db import models +from django_choices_field import TextChoicesField +from core.models.account import Account, AccountAddress +from core.models.base import BaseModel +from core.models.enums import TaskFrequencyChoices +from core.models.profile import TeamProfile +from core.models.service import Service + + +class Scope(BaseModel): + """Scope of work definition for an account address""" + name = models.CharField(max_length=255) + account = models.ForeignKey(Account, on_delete=models.PROTECT, related_name='scopes') + account_address = models.ForeignKey(AccountAddress, on_delete=models.PROTECT, related_name='scopes', + null=True, blank=True) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + + class Meta: + ordering = ['name'] + verbose_name = "Scope" + verbose_name_plural = "Scopes" + indexes = [ + models.Index(fields=['account', 'is_active']), + models.Index(fields=['account_address', 'is_active']), + ] + constraints = [ + models.UniqueConstraint( + fields=['account_address'], + condition=models.Q(is_active=True, account_address__isnull=False), + name='unique_active_scope_per_address' + ) + ] + + def __str__(self): + return f"{self.name} (account_id={self.account_id})" + + +class Area(BaseModel): + """Area within a scope (e.g., Kitchen, Restrooms, Lobby)""" + name = models.CharField(max_length=100) + scope = models.ForeignKey(Scope, on_delete=models.CASCADE, related_name='areas') + order = models.PositiveIntegerField(default=0) + + class Meta: + ordering = ['scope', 'order', 'name'] + verbose_name = "Area" + verbose_name_plural = "Areas" + indexes = [ + models.Index(fields=['scope', 'order']), + ] + + def __str__(self): + return f"{self.name} (scope_id={self.scope_id})" + + +class Task(BaseModel): + """Individual task template within an area""" + area = models.ForeignKey(Area, on_delete=models.CASCADE, related_name='tasks') + description = models.TextField() + checklist_description = models.TextField(blank=True) + frequency = TextChoicesField( + choices_enum=TaskFrequencyChoices, + default=TaskFrequencyChoices.AS_NEEDED, + help_text="How often the task should be performed" + ) + order = models.PositiveIntegerField(default=0) + is_conditional = models.BooleanField(default=False, help_text="Task marked 'where applicable'") + estimated_minutes = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + ordering = ['area', 'order'] + verbose_name = "Task" + verbose_name_plural = "Tasks" + indexes = [ + models.Index(fields=['area', 'order']), + models.Index(fields=['frequency']), + ] + + def __str__(self): + return f"{self.description[:50]}... (area_id={self.area_id})" + + +class TaskCompletion(BaseModel): + """Record of a task template being completed during a service visit""" + task = models.ForeignKey(Task, on_delete=models.PROTECT, related_name='completions') + service = models.ForeignKey(Service, on_delete=models.PROTECT, related_name='task_completions') + account_address = models.ForeignKey(AccountAddress, on_delete=models.PROTECT, null=True, related_name='task_completions') + completed_by = models.ForeignKey(TeamProfile, on_delete=models.PROTECT) + completed_at = models.DateTimeField() + notes = models.TextField(blank=True) + + # Autopopulated for efficient monthly/quarterly queries + year = models.PositiveIntegerField(editable=False) + month = models.PositiveIntegerField(editable=False) + + class Meta: + ordering = ['-completed_at'] + verbose_name = "Task Completion" + verbose_name_plural = "Task Completions" + indexes = [ + models.Index(fields=['service']), + models.Index(fields=['task', 'year', 'month']), + models.Index(fields=['completed_by', 'completed_at']), + models.Index(fields=['account_address']), + ] + constraints = [ + # Prevent the same task being completed multiple times in the same service + models.UniqueConstraint( + fields=['service', 'task'], + name='unique_task_per_service' + ) + ] + + def save(self, *args, **kwargs): + """Autopopulate year/month from service date""" + # Backfill account_address from service if missing + if self.account_address_id is None and self.service_id and hasattr(self.service, 'account_address_id'): + self.account_address_id = getattr(self.service, 'account_address_id', None) + + if self.service_id and hasattr(self.service, 'date'): + self.year = self.service.date.year + self.month = self.service.date.month + super().save(*args, **kwargs) + + def __str__(self): + return f"TaskCompletion (task_id={self.task_id}, service_id={self.service_id})" \ No newline at end of file diff --git a/core/models/scope_template.py b/core/models/scope_template.py new file mode 100644 index 0000000..7cee63e --- /dev/null +++ b/core/models/scope_template.py @@ -0,0 +1,108 @@ +# Python +from django.db import models, transaction +from django_choices_field import TextChoicesField + +from core.models.base import BaseModel +from core.models.account import Account, AccountAddress +from core.models.enums import TaskFrequencyChoices +from core.models.scope import Scope, Area, Task + + +class ScopeTemplate(BaseModel): + """Reusable blueprint for creating a Scope with Areas and Tasks""" + name = models.CharField(max_length=255, unique=True) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + + class Meta: + ordering = ['name'] + verbose_name = "Scope Template" + verbose_name_plural = "Scope Templates" + indexes = [models.Index(fields=['is_active'])] + + def __str__(self): + return self.name + + @transaction.atomic + def instantiate( + self, + *, + account: Account, + account_address: AccountAddress | None = None, + name: str | None = None, + description: str | None = None, + is_active: bool = True, + ) -> Scope: + """Create a Scope (and nested Areas/Tasks) from this template.""" + scope = Scope.objects.create( + name=name or self.name, + account=account, + account_address=account_address, + description=description if description is not None else self.description, + is_active=is_active, + ) + + # Create Areas and Tasks preserving order + area_templates = self.area_templates.all().order_by('order', 'name') + for at in area_templates: + area = Area.objects.create(scope=scope, name=at.name, order=at.order) + task_templates = at.task_templates.all().order_by('order', 'id') + tasks_to_create = [ + Task( + area=area, + description=tt.description, + checklist_description=tt.checklist_description, + frequency=tt.frequency, + order=tt.order, + is_conditional=tt.is_conditional, + estimated_minutes=tt.estimated_minutes, + ) + for tt in task_templates + ] + if tasks_to_create: + Task.objects.bulk_create(tasks_to_create) + + return scope + + +class AreaTemplate(BaseModel): + """Reusable area definition belonging to a ScopeTemplate""" + scope_template = models.ForeignKey(ScopeTemplate, on_delete=models.CASCADE, related_name='area_templates') + name = models.CharField(max_length=100) + order = models.PositiveIntegerField(default=0) + + class Meta: + ordering = ['scope_template', 'order', 'name'] + verbose_name = "Area Template" + verbose_name_plural = "Area Templates" + indexes = [models.Index(fields=['scope_template', 'order'])] + + def __str__(self): + return f"{self.scope_template.name} - {self.name}" + + +class TaskTemplate(BaseModel): + """Reusable task definition belonging to an AreaTemplate""" + area_template = models.ForeignKey(AreaTemplate, on_delete=models.CASCADE, related_name='task_templates') + description = models.TextField() + checklist_description = models.TextField(blank=True) + frequency = TextChoicesField( + choices_enum=TaskFrequencyChoices, + default=TaskFrequencyChoices.AS_NEEDED, + help_text="How often the task should be performed" + ) + order = models.PositiveIntegerField(default=0) + is_conditional = models.BooleanField(default=False, help_text="Task marked 'where applicable'") + estimated_minutes = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + ordering = ['area_template', 'order'] + verbose_name = "Task Template" + verbose_name_plural = "Task Templates" + indexes = [ + models.Index(fields=['area_template', 'order']), + models.Index(fields=['frequency']), + ] + + def __str__(self): + return f"{self.area_template.name}: {self.description[:50]}..." \ No newline at end of file diff --git a/core/models/service.py b/core/models/service.py new file mode 100644 index 0000000..d0555e7 --- /dev/null +++ b/core/models/service.py @@ -0,0 +1,37 @@ +from django.db import models +from django_choices_field import TextChoicesField +from core.models.enums import ServiceChoices +from core.models.base import BaseModel +from core.models.account import AccountAddress +from core.models.profile import TeamProfile + + +class Service(BaseModel): + """Service records for accounts""" + account = models.ForeignKey('Account', on_delete=models.PROTECT, related_name='services', null=True) + account_address = models.ForeignKey(AccountAddress, on_delete=models.PROTECT, related_name='services', + verbose_name="Account Service Address", null=True) + date = models.DateField() + status = TextChoicesField(choices_enum=ServiceChoices, default=ServiceChoices.SCHEDULED, + help_text="Current status of the service") + team_members = models.ManyToManyField(TeamProfile, related_name='services') + notes = models.TextField(blank=True, null=True) + calendar_event_id = models.CharField(max_length=255, blank=True, null=True, + help_text="External calendar event ID") + + class Meta: + ordering = ['-date'] + indexes = [ + models.Index(fields=['account_address', 'date']), + ] + constraints = [ + models.UniqueConstraint( + fields=['account_address', 'date'], + name='unique_service_per_address_date' + ) + ] + + def __str__(self): + addr = self.account_address + label = addr.name or "Primary Service Address" + return f"Service for {addr.account.name} - {label} on {self.date}" diff --git a/core/models/session.py b/core/models/session.py new file mode 100644 index 0000000..7da750c --- /dev/null +++ b/core/models/session.py @@ -0,0 +1,289 @@ +from django.db import models +from django.core.exceptions import ValidationError +from django.db.models import Q, F, CheckConstraint, Index, UniqueConstraint +from core.models.base import BaseModel, Note +from core.models.profile import TeamProfile +from core.models.service import Service +from core.models.project import Project +from core.models.scope import Scope, TaskCompletion +from core.models.project_scope import ProjectScope, ProjectScopeTaskCompletion +from core.models.customer import Customer +from core.models.account import Account, AccountAddress + + +class Session(BaseModel): + """Session records""" + created_by = models.ForeignKey( + TeamProfile, + on_delete=models.PROTECT, + related_name="%(class)s_sessions", + related_query_name="%(class)s_session", + ) + closed_by = models.ForeignKey( + TeamProfile, + on_delete=models.PROTECT, + related_name="%(class)s_closed_sessions", + related_query_name="%(class)s_closed_session", + null=True, + blank=True, + ) + date = models.DateField() + + class Meta: + abstract = True + get_latest_by = 'date' + ordering = ['-date'] + + +class ServiceSession(Session): + """Service session records""" + service = models.ForeignKey( + Service, + on_delete=models.PROTECT, + related_name='sessions' + ) + account = models.ForeignKey( + Account, + on_delete=models.PROTECT, + related_name='service_sessions' + ) + account_address = models.ForeignKey( + AccountAddress, + on_delete=models.PROTECT, + related_name='service_sessions' + ) + customer = models.ForeignKey( + Customer, + on_delete=models.PROTECT, + related_name='service_sessions' + ) + scope = models.ForeignKey( + Scope, + on_delete=models.PROTECT, + related_name='service_sessions' + ) + start = models.DateTimeField() + end = models.DateTimeField(null=True, blank=True) + completed_tasks = models.ManyToManyField( + TaskCompletion, + related_name='service_sessions', + blank=True, + ) + + class Meta(Session.Meta): + constraints = [ + CheckConstraint( + name='service_session_end_gt_start_or_null', + condition=Q(end__isnull=True) | Q(end__gt=F('start')), + ), + UniqueConstraint( + fields=['service'], + condition=Q(end__isnull=True), + name='unique_active_service_session', + ), + ] + indexes = [ + Index(fields=['service', 'start']), + Index(fields=['account', 'start']), + Index(fields=['created_by', 'start']), + Index(fields=['date']), + ] + ordering = ['-start'] + + def clean(self): + if self.start: + self.date = self.start.date() + + errors = {} + + if self.end is not None and self.start is not None and self.end <= self.start: + errors['end'] = "End must be after start." + + if self.account_address_id and self.account_id: + if self.account_address.account_id != self.account_id: + errors['account_address'] = "Account address must belong to the selected account." + if self.account_id and self.customer_id: + if getattr(self.account, 'customer_id', None) and self.account.customer_id != self.customer_id: + errors['customer'] = "Customer must match the account's customer." + + if errors: + raise ValidationError(errors) + + def save(self, *args, **kwargs): + self.full_clean() + return super().save(*args, **kwargs) + + @property + def duration_seconds(self) -> int: + if self.start and self.end: + return int((self.end - self.start).total_seconds()) + return 0 + + @property + def is_active(self) -> bool: + """A session is active if it has not been closed.""" + return self.end is None + +class ProjectSession(Session): + """Project session records""" + project = models.ForeignKey( + Project, + on_delete=models.PROTECT, + related_name='sessions', + ) + account = models.ForeignKey( + Account, + on_delete=models.PROTECT, + related_name='project_sessions', + null=True, + blank=True, + ) + account_address = models.ForeignKey( + AccountAddress, + on_delete=models.PROTECT, + related_name='project_sessions', + null=True, + blank=True, + ) + customer = models.ForeignKey( + Customer, + on_delete=models.PROTECT, + related_name='project_sessions', + ) + scope = models.ForeignKey( + ProjectScope, + on_delete=models.PROTECT, + related_name='project_sessions', + ) + start = models.DateTimeField() + end = models.DateTimeField(null=True, blank=True) + completed_tasks = models.ManyToManyField( + ProjectScopeTaskCompletion, + related_name='project_sessions', + blank=True, + ) + + class Meta(Session.Meta): + constraints = [ + CheckConstraint( + name='project_session_end_gt_start_or_null', + condition=Q(end__isnull=True) | Q(end__gt=F('start')), + ), + UniqueConstraint( + fields=['project'], + condition=Q(end__isnull=True), + name='unique_active_project_session', + ), + ] + indexes = [ + Index(fields=['project', 'start']), + Index(fields=['account', 'start']), + Index(fields=['created_by', 'start']), + Index(fields=['date']), + ] + ordering = ['-start'] + + def clean(self): + if self.start: + self.date = self.start.date() + + errors = {} + + if self.end is not None and self.start is not None and self.end <= self.start: + errors['end'] = "End must be after start." + + # Account/address relationship + if self.account_address_id and self.account_id: + if self.account_address.account_id != self.account_id: + errors['account_address'] = "Account address must belong to the selected account." + + # Customer/account relationship + if self.account_id and self.customer_id: + if getattr(self.account, 'customer_id', None) and self.account.customer_id != self.customer_id: + errors['customer'] = "Customer must match the account's customer." + + # Project/linkage validations (when available on Project) + # Ensure project.account_address aligns with session.account_address + if getattr(self.project, 'account_address_id', None) and self.project.account_address_id != self.account_address_id: + errors['project'] = "Project's account address must match the session's account address." + # If project has an account_address, ensure session.account matches that address's account + if getattr(self.project, 'account_address_id', None) and self.account_id: + proj_account_id = getattr(self.project.account_address, 'account_id', None) + if proj_account_id and proj_account_id != self.account_id: + errors['account'] = "Project's account must match the session's account." + + # Customer must match the project's customer + if getattr(self.project, 'customer_id', None) and self.project.customer_id != self.customer_id: + errors['project'] = "Project's customer must match the session's customer." + + # Scope must belong to the same project + if self.scope_id and self.project_id: + if getattr(self.scope, 'project_id', None) != self.project_id: + errors['scope'] = "Selected scope must belong to the current project." + + if errors: + raise ValidationError(errors) + + + def save(self, *args, **kwargs): + self.full_clean() + return super().save(*args, **kwargs) + + @property + def duration_seconds(self) -> int: + if self.start and self.end: + return int((self.end - self.start).total_seconds()) + return 0 + + @property + def is_active(self) -> bool: + """A session is active if it has not been closed.""" + return self.end is None + + +class SessionNote(Note): + """ + Abstract base model for session notes. + Inherits content, author, internal, and timestamps from Note. + """ + class Meta: + abstract = True + ordering = ('-created_at',) + + +class ServiceSessionNote(SessionNote): + """Notes attached to service sessions""" + session = models.ForeignKey( + ServiceSession, + on_delete=models.CASCADE, + related_name='notes' + ) + + class Meta(SessionNote.Meta): + indexes = [ + Index(fields=['session', '-created_at']), + Index(fields=['author', '-created_at']), + ] + + def __str__(self): + preview = self.content[:50] + "..." if len(self.content) > 50 else self.content + return f"ServiceSession {self.session_id}: {preview}" + + +class ProjectSessionNote(SessionNote): + """Notes attached to project sessions""" + session = models.ForeignKey( + ProjectSession, + on_delete=models.CASCADE, + related_name='notes' + ) + + class Meta(SessionNote.Meta): + indexes = [ + Index(fields=['session', '-created_at']), + Index(fields=['author', '-created_at']), + ] + + def __str__(self): + preview = self.content[:50] + "..." if len(self.content) > 50 else self.content + return f"ProjectSession {self.session_id}: {preview}" diff --git a/core/models/session_image.py b/core/models/session_image.py new file mode 100644 index 0000000..6885570 --- /dev/null +++ b/core/models/session_image.py @@ -0,0 +1,61 @@ +import os +import uuid +from django.db import models +from core.models.base import Image +from core.models.session import ServiceSession, ProjectSession + + +def _service_session_upload_to(instance: "ServiceSessionImage", filename: str) -> str: + base, ext = os.path.splitext(filename) + ext = ext.lower() or ".jpg" + sid = instance.service_session_id or "unassigned" + return f"uploads/service_session/{sid}/{uuid.uuid4().hex}{ext}" + + +def _service_session_thumb_upload_to(instance: "ServiceSessionImage", _filename: str) -> str: + sid = instance.service_session_id or "unassigned" + return f"uploads/service_session/{sid}/thumb/{uuid.uuid4().hex}.jpg" + + +def _project_session_upload_to(instance: "ProjectSessionImage", filename: str) -> str: + base, ext = os.path.splitext(filename) + ext = ext.lower() or ".jpg" + sid = instance.project_session_id or "unassigned" + return f"uploads/project_session/{sid}/{uuid.uuid4().hex}{ext}" + + +def _project_session_thumb_upload_to(instance: "ProjectSessionImage", _filename: str) -> str: + sid = instance.project_session_id or "unassigned" + return f"uploads/project_session/{sid}/thumb/{uuid.uuid4().hex}.jpg" + + +class ServiceSessionImage(Image): + service_session = models.ForeignKey(ServiceSession, on_delete=models.PROTECT, related_name='service_session_images') + image = models.ImageField(upload_to=_service_session_upload_to) + thumbnail = models.ImageField(upload_to=_service_session_thumb_upload_to, blank=True, null=True) + + def __str__(self) -> str: + return self.title or f"ServiceSessionImage {self.id}" + + class Meta: + ordering = ('-created_at',) + indexes = [ + models.Index(fields=['service_session', 'created_at']), + models.Index(fields=['created_at']), + ] + + +class ProjectSessionImage(Image): + project_session = models.ForeignKey(ProjectSession, on_delete=models.PROTECT, related_name='project_session_images') + image = models.ImageField(upload_to=_project_session_upload_to) + thumbnail = models.ImageField(upload_to=_project_session_thumb_upload_to, blank=True, null=True) + + def __str__(self) -> str: + return self.title or f"ProjectSessionImage {self.id}" + + class Meta: + ordering = ('-created_at',) + indexes = [ + models.Index(fields=['project_session', 'created_at']), + models.Index(fields=['created_at']), + ] diff --git a/core/models/session_video.py b/core/models/session_video.py new file mode 100644 index 0000000..3bc42ff --- /dev/null +++ b/core/models/session_video.py @@ -0,0 +1,163 @@ +import os +import uuid +from django.db import models +from core.models.base import BaseModel +from core.models.session import ServiceSession, ProjectSession + + +def _service_session_video_upload_to(instance: "ServiceSessionVideo", filename: str) -> str: + """Upload path for service session videos.""" + base, ext = os.path.splitext(filename) + ext = ext.lower() or ".mp4" + sid = instance.service_session_id or "unassigned" + return f"videos/service_session/{sid}/{uuid.uuid4().hex}{ext}" + + +def _service_session_video_thumb_upload_to(instance: "ServiceSessionVideo", _filename: str) -> str: + """Upload path for service session video thumbnails.""" + sid = instance.service_session_id or "unassigned" + return f"videos/service_session/{sid}/thumb/{uuid.uuid4().hex}.jpg" + + +def _project_session_video_upload_to(instance: "ProjectSessionVideo", filename: str) -> str: + """Upload path for project session videos.""" + base, ext = os.path.splitext(filename) + ext = ext.lower() or ".mp4" + sid = instance.project_session_id or "unassigned" + return f"videos/project_session/{sid}/{uuid.uuid4().hex}{ext}" + + +def _project_session_video_thumb_upload_to(instance: "ProjectSessionVideo", _filename: str) -> str: + """Upload path for project session video thumbnails.""" + sid = instance.project_session_id or "unassigned" + return f"videos/project_session/{sid}/thumb/{uuid.uuid4().hex}.jpg" + + +class Video(BaseModel): + """ + Abstract base for video-bearing models. + + Features: + - Stores original video file with metadata + - Optional thumbnail image (can be extracted from video or uploaded separately) + - Captures dimensions, duration, file size, and content_type + - Tracks the uploading team profile (optional) + - Storage-agnostic (respects DEFAULT_FILE_STORAGE) + """ + title = models.CharField(max_length=255, blank=True) + video = models.FileField(upload_to="videos/") # Override in subclasses + thumbnail = models.ImageField(upload_to="videos/thumbs/", blank=True, null=True) + content_type = models.CharField(max_length=100, blank=True) + + # Video-specific metadata + duration_seconds = models.PositiveIntegerField( + default=0, + help_text="Video duration in seconds" + ) + file_size_bytes = models.PositiveBigIntegerField( + default=0, + help_text="File size in bytes" + ) + width = models.PositiveIntegerField(default=0) + height = models.PositiveIntegerField(default=0) + + uploaded_by_team_profile = models.ForeignKey( + 'TeamProfile', + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(class)s_videos" + ) + notes = models.TextField(blank=True) + internal = models.BooleanField(default=True) + + class Meta: + abstract = True + ordering = ('-created_at',) + + def __str__(self) -> str: + return self.title or str(self.id) + + def save(self, *args, **kwargs): + """ + Save and capture file size on creation. + Video metadata (duration, dimensions) should be set before save + by the upload handler using video processing utilities. + """ + if self._state.adding and self.video and hasattr(self.video, 'size'): + self.file_size_bytes = self.video.size + super().save(*args, **kwargs) + + def delete(self, *args, **kwargs): + """ + Delete the model and its associated files from storage. + """ + # Store file names before delete + video_name = self.video.name if self.video else None + thumbnail_name = self.thumbnail.name if self.thumbnail else None + + # Delete the model instance + super().delete(*args, **kwargs) + + # Delete files from storage + if video_name: + try: + self.video.storage.delete(video_name) + except Exception: + pass # File may already be deleted or inaccessible + + if thumbnail_name: + try: + self.thumbnail.storage.delete(thumbnail_name) + except Exception: + pass # File may already be deleted or inaccessible + + +class ServiceSessionVideo(Video): + """Video attached to a ServiceSession for documentation.""" + service_session = models.ForeignKey( + ServiceSession, + on_delete=models.PROTECT, + related_name='service_session_videos' + ) + video = models.FileField(upload_to=_service_session_video_upload_to) + thumbnail = models.ImageField( + upload_to=_service_session_video_thumb_upload_to, + blank=True, + null=True + ) + + def __str__(self) -> str: + return self.title or f"ServiceSessionVideo {self.id}" + + class Meta: + ordering = ('-created_at',) + indexes = [ + models.Index(fields=['service_session', 'created_at']), + models.Index(fields=['created_at']), + ] + + +class ProjectSessionVideo(Video): + """Video attached to a ProjectSession for documentation.""" + project_session = models.ForeignKey( + ProjectSession, + on_delete=models.PROTECT, + related_name='project_session_videos' + ) + video = models.FileField(upload_to=_project_session_video_upload_to) + thumbnail = models.ImageField( + upload_to=_project_session_video_thumb_upload_to, + blank=True, + null=True + ) + + def __str__(self) -> str: + return self.title or f"ProjectSessionVideo {self.id}" + + class Meta: + ordering = ('-created_at',) + indexes = [ + models.Index(fields=['project_session', 'created_at']), + models.Index(fields=['created_at']), + ] diff --git a/core/permissions.py b/core/permissions.py new file mode 100644 index 0000000..f23f045 --- /dev/null +++ b/core/permissions.py @@ -0,0 +1,13 @@ +""" +Custom permissions for Ory-based authentication. +""" +from rest_framework.permissions import BasePermission + + +class IsProfileAuthenticated(BasePermission): + """ + Permission that checks if the request has an authenticated profile from Ory. + Requires OryHeaderAuthenticationMiddleware to set request.profile. + """ + def has_permission(self, request, view): + return hasattr(request, 'profile') and request.profile is not None diff --git a/core/services/__init__.py b/core/services/__init__.py new file mode 100644 index 0000000..47a8ba4 --- /dev/null +++ b/core/services/__init__.py @@ -0,0 +1,2 @@ +from core.services.scope_builder import * +from core.services.session_service import * \ No newline at end of file diff --git a/core/services/email_renderer.py b/core/services/email_renderer.py new file mode 100644 index 0000000..b77a4d5 --- /dev/null +++ b/core/services/email_renderer.py @@ -0,0 +1,291 @@ +""" +HTML email rendering service for notifications. +Renders notification data into branded HTML emails using Django templates. +""" +from datetime import datetime +from typing import Dict, Any, List + +from django.template.loader import render_to_string + + +# Event type to display configuration mapping +# Colors match the frontend brand palette (layout.css) +EVENT_TYPE_CONFIG: Dict[str, Dict[str, str]] = { + # Customer events - Primary Blue + 'CUSTOMER_CREATED': {'label': 'Customer Created', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_UPDATED': {'label': 'Customer Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_DELETED': {'label': 'Customer Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'CUSTOMER_STATUS_CHANGED': {'label': 'Status Changed', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_ADDRESS_CREATED': {'label': 'Address Added', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_ADDRESS_UPDATED': {'label': 'Address Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_ADDRESS_DELETED': {'label': 'Address Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'CUSTOMER_CONTACT_CREATED': {'label': 'Contact Added', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_CONTACT_UPDATED': {'label': 'Contact Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CUSTOMER_CONTACT_DELETED': {'label': 'Contact Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Account events - Primary Blue + 'ACCOUNT_CREATED': {'label': 'Account Created', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_UPDATED': {'label': 'Account Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_DELETED': {'label': 'Account Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'ACCOUNT_STATUS_CHANGED': {'label': 'Status Changed', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_ADDRESS_CREATED': {'label': 'Address Added', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_ADDRESS_UPDATED': {'label': 'Address Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_ADDRESS_DELETED': {'label': 'Address Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'ACCOUNT_CONTACT_CREATED': {'label': 'Contact Added', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_CONTACT_UPDATED': {'label': 'Contact Updated', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'ACCOUNT_CONTACT_DELETED': {'label': 'Contact Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Service events - Secondary Green / Red for cancel + 'SERVICE_CREATED': {'label': 'Service Scheduled', 'color': '#458c5e', 'bg': '#458c5e20'}, + 'SERVICE_UPDATED': {'label': 'Service Updated', 'color': '#458c5e', 'bg': '#458c5e20'}, + 'SERVICE_DELETED': {'label': 'Service Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SERVICE_STATUS_CHANGED': {'label': 'Status Changed', 'color': '#458c5e', 'bg': '#458c5e20'}, + 'SERVICE_COMPLETED': {'label': 'Service Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'SERVICE_CANCELLED': {'label': 'Service Cancelled', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SERVICE_TEAM_ASSIGNED': {'label': 'Team Assigned', 'color': '#458c5e', 'bg': '#458c5e20'}, + 'SERVICE_TEAM_UNASSIGNED': {'label': 'Team Unassigned', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'SERVICE_DISPATCHED': {'label': 'Service Dispatched', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'SERVICES_BULK_GENERATED': {'label': 'Services Generated', 'color': '#458c5e', 'bg': '#458c5e20'}, + + # Service session events + 'SERVICE_SESSION_OPENED': {'label': 'Session Started', 'color': '#458c5e', 'bg': '#458c5e20'}, + 'SERVICE_SESSION_CLOSED': {'label': 'Session Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'SERVICE_SESSION_REVERTED': {'label': 'Session Reverted', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'SERVICE_TASK_COMPLETED': {'label': 'Task Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'SERVICE_TASK_UNCOMPLETED': {'label': 'Task Uncompleted', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + + # Schedule events - Cyan + 'SCHEDULE_CREATED': {'label': 'Schedule Created', 'color': '#0891b2', 'bg': '#0891b220'}, + 'SCHEDULE_UPDATED': {'label': 'Schedule Updated', 'color': '#0891b2', 'bg': '#0891b220'}, + 'SCHEDULE_DELETED': {'label': 'Schedule Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SCHEDULE_FREQUENCY_CHANGED': {'label': 'Frequency Changed', 'color': '#0891b2', 'bg': '#0891b220'}, + + # Project events - Orange + 'PROJECT_CREATED': {'label': 'Project Created', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_UPDATED': {'label': 'Project Updated', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_STATUS_CHANGED': {'label': 'Status Changed', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_COMPLETED': {'label': 'Project Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'PROJECT_CANCELLED': {'label': 'Project Cancelled', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PROJECT_DISPATCHED': {'label': 'Project Dispatched', 'color': '#3b78c4', 'bg': '#3b78c420'}, + + # Project session events + 'PROJECT_SESSION_OPENED': {'label': 'Session Started', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SESSION_CLOSED': {'label': 'Session Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'PROJECT_SESSION_REVERTED': {'label': 'Session Reverted', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'PROJECT_TASK_COMPLETED': {'label': 'Task Completed', 'color': '#22c546', 'bg': '#22c54620'}, + 'PROJECT_TASK_UNCOMPLETED': {'label': 'Task Uncompleted', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + + # Project scope events - Orange + 'PROJECT_SCOPE_CREATED': {'label': 'Scope Created', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_UPDATED': {'label': 'Scope Updated', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_DELETED': {'label': 'Scope Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PROJECT_SCOPE_CATEGORY_CREATED': {'label': 'Category Added', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_CATEGORY_UPDATED': {'label': 'Category Updated', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_CATEGORY_DELETED': {'label': 'Category Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PROJECT_SCOPE_TASK_CREATED': {'label': 'Task Added', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_TASK_UPDATED': {'label': 'Task Updated', 'color': '#e16a36', 'bg': '#e16a3620'}, + 'PROJECT_SCOPE_TASK_DELETED': {'label': 'Task Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PROJECT_SCOPE_TEMPLATE_INSTANTIATED': {'label': 'Template Applied', 'color': '#e16a36', 'bg': '#e16a3620'}, + + # Scope events - Accent3 (teal-ish) + 'SCOPE_CREATED': {'label': 'Scope Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'SCOPE_UPDATED': {'label': 'Scope Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'SCOPE_DELETED': {'label': 'Scope Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'AREA_CREATED': {'label': 'Area Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'AREA_UPDATED': {'label': 'Area Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'AREA_DELETED': {'label': 'Area Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'TASK_CREATED': {'label': 'Task Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'TASK_UPDATED': {'label': 'Task Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'TASK_DELETED': {'label': 'Task Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'TASK_COMPLETION_RECORDED': {'label': 'Task Completed', 'color': '#22c546', 'bg': '#22c54620'}, + + # Scope template events + 'SCOPE_TEMPLATE_CREATED': {'label': 'Template Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'SCOPE_TEMPLATE_UPDATED': {'label': 'Template Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'SCOPE_TEMPLATE_DELETED': {'label': 'Template Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SCOPE_TEMPLATE_INSTANTIATED': {'label': 'Template Applied', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'AREA_TEMPLATE_CREATED': {'label': 'Area Template Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'AREA_TEMPLATE_UPDATED': {'label': 'Area Template Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'AREA_TEMPLATE_DELETED': {'label': 'Area Template Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'TASK_TEMPLATE_CREATED': {'label': 'Task Template Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'TASK_TEMPLATE_UPDATED': {'label': 'Task Template Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'TASK_TEMPLATE_DELETED': {'label': 'Task Template Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Team profile events - Rose + 'TEAM_PROFILE_CREATED': {'label': 'Team Member Added', 'color': '#f43f5e', 'bg': '#f43f5e20'}, + 'TEAM_PROFILE_UPDATED': {'label': 'Profile Updated', 'color': '#f43f5e', 'bg': '#f43f5e20'}, + 'TEAM_PROFILE_DELETED': {'label': 'Team Member Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'TEAM_PROFILE_ROLE_CHANGED': {'label': 'Role Changed', 'color': '#f43f5e', 'bg': '#f43f5e20'}, + + # Customer profile events - Teal + 'CUSTOMER_PROFILE_CREATED': {'label': 'Access Created', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'CUSTOMER_PROFILE_UPDATED': {'label': 'Profile Updated', 'color': '#14b8a6', 'bg': '#14b8a620'}, + 'CUSTOMER_PROFILE_DELETED': {'label': 'Access Removed', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'CUSTOMER_PROFILE_ACCESS_GRANTED': {'label': 'Access Granted', 'color': '#22c546', 'bg': '#22c54620'}, + 'CUSTOMER_PROFILE_ACCESS_REVOKED': {'label': 'Access Revoked', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Punchlist events - Warning Yellow + 'ACCOUNT_PUNCHLIST_CREATED': {'label': 'Issue Reported', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'ACCOUNT_PUNCHLIST_UPDATED': {'label': 'Issue Updated', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'ACCOUNT_PUNCHLIST_DELETED': {'label': 'Issue Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PROJECT_PUNCHLIST_CREATED': {'label': 'Issue Reported', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'PROJECT_PUNCHLIST_UPDATED': {'label': 'Issue Updated', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'PROJECT_PUNCHLIST_DELETED': {'label': 'Issue Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'PUNCHLIST_STATUS_CHANGED': {'label': 'Issue Status Changed', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'PUNCHLIST_PRIORITY_CHANGED': {'label': 'Priority Changed', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + + # Session media events - Purple + 'SESSION_IMAGE_UPLOADED': {'label': 'Image Uploaded', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_IMAGE_UPDATED': {'label': 'Image Updated', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_IMAGE_DELETED': {'label': 'Image Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SESSION_VIDEO_UPLOADED': {'label': 'Video Uploaded', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_VIDEO_UPDATED': {'label': 'Video Updated', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_VIDEO_DELETED': {'label': 'Video Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'SESSION_MEDIA_INTERNAL_FLAGGED': {'label': 'Media Flagged Internal', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + + # Session notes events + 'SESSION_NOTE_CREATED': {'label': 'Note Added', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_NOTE_UPDATED': {'label': 'Note Updated', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'SESSION_NOTE_DELETED': {'label': 'Note Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Report events - Purple + 'REPORT_CREATED': {'label': 'Report Created', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'REPORT_SUBMITTED': {'label': 'Report Submitted', 'color': '#8b6bc2', 'bg': '#8b6bc220'}, + 'REPORT_APPROVED': {'label': 'Report Approved', 'color': '#22c546', 'bg': '#22c54620'}, + + # Invoice events - Indigo + 'INVOICE_GENERATED': {'label': 'Invoice Generated', 'color': '#6366f1', 'bg': '#6366f120'}, + 'INVOICE_SENT': {'label': 'Invoice Sent', 'color': '#6366f1', 'bg': '#6366f120'}, + 'INVOICE_PAID': {'label': 'Invoice Paid', 'color': '#22c546', 'bg': '#22c54620'}, + 'INVOICE_OVERDUE': {'label': 'Invoice Overdue', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'INVOICE_CANCELLED': {'label': 'Invoice Cancelled', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Labor & Revenue events - Indigo + 'LABOR_RATE_CREATED': {'label': 'Labor Rate Created', 'color': '#6366f1', 'bg': '#6366f120'}, + 'LABOR_RATE_UPDATED': {'label': 'Labor Rate Updated', 'color': '#6366f1', 'bg': '#6366f120'}, + 'LABOR_RATE_DELETED': {'label': 'Labor Rate Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + 'REVENUE_RATE_CREATED': {'label': 'Revenue Rate Created', 'color': '#6366f1', 'bg': '#6366f120'}, + 'REVENUE_RATE_UPDATED': {'label': 'Revenue Rate Updated', 'color': '#6366f1', 'bg': '#6366f120'}, + 'REVENUE_RATE_DELETED': {'label': 'Revenue Rate Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, + + # Messaging events - Primary Blue + 'CONVERSATION_CREATED': {'label': 'Conversation Started', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CONVERSATION_ARCHIVED': {'label': 'Conversation Archived', 'color': '#64748b', 'bg': '#64748b20'}, + 'CONVERSATION_PARTICIPANT_ADDED': {'label': 'Participant Added', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'CONVERSATION_PARTICIPANT_REMOVED': {'label': 'Participant Removed', 'color': '#d8a01d', 'bg': '#d8a01d20'}, + 'MESSAGE_SENT': {'label': 'Message Sent', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'MESSAGE_RECEIVED': {'label': 'Message Received', 'color': '#3b78c4', 'bg': '#3b78c420'}, + 'MESSAGE_READ': {'label': 'Message Read', 'color': '#64748b', 'bg': '#64748b20'}, + 'MESSAGE_DELETED': {'label': 'Message Deleted', 'color': '#e14a4a', 'bg': '#e14a4a20'}, +} + +# Default configuration for unknown event types +DEFAULT_EVENT_CONFIG = {'label': 'Notification', 'color': '#3b78c4', 'bg': '#3b78c420'} + +# Metadata keys to display in emails (with human-readable labels) +METADATA_DISPLAY_KEYS: Dict[str, str] = { + 'account_name': 'Account', + 'customer_name': 'Customer', + 'project_name': 'Project', + 'service_date': 'Service Date', + 'date': 'Date', + 'scheduled_date': 'Scheduled Date', + 'status': 'Status', + 'old_status': 'Previous Status', + 'new_status': 'New Status', + 'invoice_number': 'Invoice #', + 'amount': 'Amount', + 'team_member_name': 'Team Member', + 'month': 'Month', + 'count': 'Count', + 'old_role': 'Previous Role', + 'new_role': 'New Role', + 'old_frequency': 'Previous Frequency', + 'new_frequency': 'New Frequency', + 'priority': 'Priority', + 'frequency': 'Frequency', +} + + +class NotificationEmailRenderer: + """ + Renders notifications as HTML emails using Django templates. + """ + + @staticmethod + def render_html( + notification, + recipient_name: str, + recipient_email: str + ) -> str: + """ + Render notification as branded HTML email. + + Args: + notification: Notification model instance + recipient_name: Display name of the recipient + recipient_email: Email address of the recipient + + Returns: + Rendered HTML string ready for sending + """ + event = notification.event + event_type = event.event_type if event else None + + # Get event type display configuration + event_config = EVENT_TYPE_CONFIG.get(event_type, DEFAULT_EVENT_CONFIG) + + # Build metadata items for display + metadata = event.metadata if event and event.metadata else {} + metadata_items = NotificationEmailRenderer._build_metadata_items(metadata) + + context = { + 'subject': notification.subject, + 'body': notification.body, + 'action_url': notification.action_url or '', + 'recipient_name': recipient_name, + 'recipient_email': recipient_email, + 'event_type_label': event_config['label'], + 'event_type_color': event_config['color'], + 'event_type_bg_color': event_config['bg'], + 'metadata_items': metadata_items, + 'current_year': datetime.now().year, + } + + return render_to_string('email/base_notification.html', context) + + @staticmethod + def _build_metadata_items(metadata: Dict[str, Any]) -> List[Dict[str, str]]: + """ + Build list of metadata items for display in the email. + + Args: + metadata: Event metadata dictionary + + Returns: + List of dicts with 'label' and 'value' keys + """ + items = [] + + for key, label in METADATA_DISPLAY_KEYS.items(): + if key not in metadata or metadata[key] is None: + continue + + value = metadata[key] + + # Format specific value types + if isinstance(value, bool): + value = 'Yes' if value else 'No' + elif key == 'amount' and isinstance(value, (int, float)): + value = f'${value:,.2f}' + elif key in ('status', 'old_status', 'new_status', 'old_role', 'new_role'): + # Format status/role values (e.g., PENDING -> Pending) + value = str(value).replace('_', ' ').title() + else: + value = str(value) + + # Skip empty strings + if value: + items.append({'label': label, 'value': value}) + + return items diff --git a/core/services/email_service.py b/core/services/email_service.py new file mode 100644 index 0000000..d81a3a2 --- /dev/null +++ b/core/services/email_service.py @@ -0,0 +1,303 @@ +""" +Emailer Microservice Client + +This module provides integration with the Emailer microservice, +a Rust-based REST API for sending emails via Gmail API. + +Production URL: https://email.example.com +""" +import requests +from typing import List, Dict, Optional +from django.conf import settings +import logging + +logger = logging.getLogger(__name__) + + +class EmailerServiceError(Exception): + """Base exception for emailer service errors""" + pass + + +class EmailerClient: + """ + Client for the Emailer microservice. + + Features: + - Template-based emails with variable substitution + - Plain text and HTML email support + - Attachment support + - User impersonation for domain-wide delegation + - Health checking + + Example: + emailer = EmailerClient() + emailer.send_template_email( + to=['user@example.com'], + template_id='notification', + variables={ + 'subject': 'Project Completed', + 'team_member': 'John Doe', + 'message': 'The project has been marked as completed.', + }, + impersonate_user='noreply@example.com' + ) + """ + + def __init__(self, base_url: Optional[str] = None, api_key: Optional[str] = None): + """ + Initialize the emailer client. + + Args: + base_url: Base URL of the emailer service. Defaults to settings.EMAILER_BASE_URL + api_key: API key for authentication. Defaults to settings.EMAILER_API_KEY + """ + self.base_url = base_url or getattr( + settings, 'EMAILER_BASE_URL', 'https://email.example.com' + ) + self.api_key = api_key or getattr(settings, 'EMAILER_API_KEY', '') + self.timeout = 30 # seconds + + if not self.api_key: + logger.warning("EMAILER_API_KEY not configured. Email sending will fail.") + + def _get_headers(self, impersonate_user: Optional[str] = None) -> Dict[str, str]: + """ + Build request headers with authentication and optional impersonation. + + Args: + impersonate_user: Email address to send from (requires domain-wide delegation) + + Returns: + Dict of HTTP headers + """ + headers = { + 'Content-Type': 'application/json', + 'X-API-Key': self.api_key, + } + if impersonate_user: + headers['X-Impersonate-User'] = impersonate_user + return headers + + def _handle_response(self, response: requests.Response) -> Dict: + """ + Handle API response and raise appropriate exceptions. + + Args: + response: requests Response object + + Returns: + Parsed JSON response + + Raises: + EmailerServiceError: If the request failed + """ + try: + response.raise_for_status() + return response.json() if response.content else {} + except requests.exceptions.HTTPError as e: + error_detail = "Unknown error" + try: + error_data = response.json() + error_detail = error_data.get('message', error_data.get('error', str(e))) + except: + error_detail = response.text or str(e) + + logger.error( + f"Emailer API error: {response.status_code} - {error_detail}", + extra={ + 'status_code': response.status_code, + 'url': response.url, + 'error': error_detail + } + ) + raise EmailerServiceError(f"Email service error: {error_detail}") + except requests.exceptions.RequestException as e: + logger.error(f"Emailer request failed: {str(e)}") + raise EmailerServiceError(f"Failed to connect to email service: {str(e)}") + + def send_email( + self, + to: List[str], + subject: str, + body: str, + cc: Optional[List[str]] = None, + bcc: Optional[List[str]] = None, + impersonate_user: Optional[str] = None, + ) -> Dict: + """ + Send a plain email. + + Args: + to: List of recipient email addresses + subject: Email subject + body: Email body (plain text) + cc: Optional CC recipients + bcc: Optional BCC recipients + impersonate_user: Email address to send from (requires domain-wide delegation) + + Returns: + dict: Response with 'id', 'threadId', and 'labelIds' + + Raises: + EmailerServiceError: If the request fails + """ + data = { + 'to': to, + 'subject': subject, + 'body': body, + } + if cc: + data['cc'] = cc + if bcc: + data['bcc'] = bcc + + try: + response = requests.post( + f"{self.base_url}/api/v1/emails", + headers=self._get_headers(impersonate_user), + json=data, + timeout=self.timeout + ) + return self._handle_response(response) + except Exception as e: + logger.exception("Failed to send email") + raise + + def send_template_email( + self, + to: List[str], + template_id: str, + variables: Dict[str, str], + cc: Optional[List[str]] = None, + bcc: Optional[List[str]] = None, + impersonate_user: Optional[str] = None, + ) -> Dict: + """ + Send an email using a pre-defined template. + + Available templates: + - 'notification': General notifications + Variables: subject, team_member, message + - 'service_scheduled': Service scheduling notifications + Variables: team_member, customer_name, service_date, service_address + - 'project_update': Project status updates + Variables: team_member, project_name, project_status, message + + Args: + to: List of recipient email addresses + template_id: Template identifier + variables: Template variables (depends on template) + cc: Optional CC recipients + bcc: Optional BCC recipients + impersonate_user: Email address to send from + + Returns: + dict: Response with 'id', 'threadId', and 'labelIds' + + Raises: + EmailerServiceError: If the request fails + """ + data = { + 'to': to, + 'template_id': template_id, + 'variables': variables, + } + if cc: + data['cc'] = cc + if bcc: + data['bcc'] = bcc + + try: + response = requests.post( + f"{self.base_url}/api/v1/templates/send", + headers=self._get_headers(impersonate_user), + json=data, + timeout=self.timeout + ) + result = self._handle_response(response) + logger.info( + f"Template email sent successfully", + extra={ + 'template_id': template_id, + 'recipients': to, + 'email_id': result.get('id') + } + ) + return result + except Exception as e: + logger.exception(f"Failed to send template email: {template_id}") + raise + + def list_templates(self) -> List[str]: + """ + Get list of available email templates. + + Returns: + list: List of template IDs + + Raises: + EmailerServiceError: If the request fails + """ + try: + response = requests.get( + f"{self.base_url}/api/v1/templates", + headers=self._get_headers(), + timeout=self.timeout + ) + return self._handle_response(response) + except Exception as e: + logger.exception("Failed to list templates") + raise + + def get_template(self, template_id: str) -> Dict: + """ + Get details of a specific email template. + + Args: + template_id: Template identifier + + Returns: + dict: Template details including variables + + Raises: + EmailerServiceError: If the request fails + """ + try: + response = requests.get( + f"{self.base_url}/api/v1/templates/{template_id}", + headers=self._get_headers(), + timeout=self.timeout + ) + return self._handle_response(response) + except Exception as e: + logger.exception(f"Failed to get template: {template_id}") + raise + + def health_check(self) -> bool: + """ + Check if the emailer service is healthy. + + Returns: + bool: True if service is healthy, False otherwise + """ + try: + response = requests.get( + f"{self.base_url}/health", + timeout=5 + ) + return response.status_code == 200 + except Exception as e: + logger.warning(f"Emailer health check failed: {e}") + return False + + +# Convenience function for quick access +def get_emailer_client() -> EmailerClient: + """ + Get a configured emailer client instance. + + Returns: + EmailerClient: Configured client instance + """ + return EmailerClient() diff --git a/core/services/events.py b/core/services/events.py new file mode 100644 index 0000000..e3623a7 --- /dev/null +++ b/core/services/events.py @@ -0,0 +1,1632 @@ +""" +Event publishing service for the notification system. +Creates Event records that can trigger notifications. +""" +from typing import Any, Optional, Dict, Set +from django.contrib.contenttypes.models import ContentType +from channels.db import database_sync_to_async + +from core.models.events import Event +from core.models.enums import EventTypeChoices +from core.services.metadata import MetadataEnricher + + +# Mission-critical events that are ALWAYS created (regardless of notification rules) +# These events are essential for audit trails, compliance, and business operations +MISSION_CRITICAL_EVENTS: Set[EventTypeChoices] = { + # Service delivery (customer-facing) + EventTypeChoices.SERVICE_COMPLETED, + EventTypeChoices.SERVICE_CANCELLED, + EventTypeChoices.SERVICE_SESSION_CLOSED, + + # Project milestones + EventTypeChoices.PROJECT_COMPLETED, + EventTypeChoices.PROJECT_CANCELLED, + EventTypeChoices.PROJECT_SESSION_CLOSED, + + # Issues requiring attention + EventTypeChoices.ACCOUNT_PUNCHLIST_CREATED, + EventTypeChoices.PROJECT_PUNCHLIST_CREATED, + EventTypeChoices.PUNCHLIST_STATUS_CHANGED, + + # Financial events (compliance/audit) + EventTypeChoices.INVOICE_GENERATED, + EventTypeChoices.INVOICE_SENT, + EventTypeChoices.INVOICE_PAID, + EventTypeChoices.INVOICE_OVERDUE, + EventTypeChoices.INVOICE_CANCELLED, + EventTypeChoices.LABOR_RATE_CREATED, + EventTypeChoices.LABOR_RATE_UPDATED, + EventTypeChoices.LABOR_RATE_DELETED, + EventTypeChoices.REVENUE_RATE_CREATED, + EventTypeChoices.REVENUE_RATE_UPDATED, + EventTypeChoices.REVENUE_RATE_DELETED, + + # Reporting & documentation (legal/compliance) + EventTypeChoices.REPORT_SUBMITTED, + EventTypeChoices.REPORT_APPROVED, + EventTypeChoices.SESSION_IMAGE_UPLOADED, + EventTypeChoices.SESSION_VIDEO_UPLOADED, + + # Account & access management (security/audit) + EventTypeChoices.CUSTOMER_CREATED, + EventTypeChoices.CUSTOMER_DELETED, + EventTypeChoices.CUSTOMER_STATUS_CHANGED, + EventTypeChoices.ACCOUNT_CREATED, + EventTypeChoices.ACCOUNT_DELETED, + EventTypeChoices.ACCOUNT_STATUS_CHANGED, + EventTypeChoices.TEAM_PROFILE_CREATED, + EventTypeChoices.TEAM_PROFILE_DELETED, + EventTypeChoices.TEAM_PROFILE_ROLE_CHANGED, + EventTypeChoices.CUSTOMER_PROFILE_CREATED, + EventTypeChoices.CUSTOMER_PROFILE_DELETED, + EventTypeChoices.CUSTOMER_PROFILE_ACCESS_GRANTED, + EventTypeChoices.CUSTOMER_PROFILE_ACCESS_REVOKED, + + # Schedule changes (operational impact) + EventTypeChoices.SCHEDULE_CREATED, + EventTypeChoices.SCHEDULE_DELETED, + EventTypeChoices.SCHEDULE_FREQUENCY_CHANGED, + EventTypeChoices.SERVICES_BULK_GENERATED, +} + + +class EventPublisher: + """ + Service for publishing events to the event management system. + Events are stored in the database and can trigger notifications via Celery tasks. + """ + + @staticmethod + async def publish( + event_type: EventTypeChoices, + entity_type: str, + entity_id: str, + triggered_by: Optional[Any] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> Optional[Event]: + """ + Publish an event to the event management system. + + For mission-critical events, the event is always created. + For non-critical events, the event is only created if there are active notification rules. + + Args: + event_type: Type of event (from EventTypeChoices enum) + entity_type: Type of entity (e.g., 'Project', 'Report', 'Invoice') + entity_id: UUID of the entity that triggered this event + triggered_by: Profile instance (TeamProfile or CustomerProfile) that triggered the event + metadata: Additional event metadata (e.g., old_status, new_status, changed_fields) + + Returns: + Event: The created Event instance, or None if event was skipped + + Example: + await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_CREATED, + entity_type='Project', + entity_id=project.id, + triggered_by=team_profile, + metadata={'status': 'ACTIVE'}) + """ + # Check if event is mission-critical + is_critical = event_type in MISSION_CRITICAL_EVENTS + + # For non-critical events, check if there are any active notification rules + if not is_critical: + from core.models.events import NotificationRule + + has_rules = await database_sync_to_async( + lambda: NotificationRule.objects.filter( + is_active=True, + event_types__contains=[event_type] + ).exists() + )() + + if not has_rules: + # Skip event creation - no rules configured for this event type + return None + + # Enrich metadata with domain-appropriate fields + enriched_metadata = await MetadataEnricher.enrich( + event_type=event_type, + entity_type=entity_type, + entity_id=entity_id, + existing_metadata=metadata + ) + + event_data = { + 'event_type': event_type, + 'entity_type': entity_type, + 'entity_id': entity_id, + 'metadata': enriched_metadata, + } + + # Set a generic foreign key for triggered_by if provided + if triggered_by: + content_type = await database_sync_to_async(ContentType.objects.get_for_model)(triggered_by) + event_data['triggered_by_content_type'] = content_type + event_data['triggered_by_object_id'] = triggered_by.id + + # Create the event + event = await database_sync_to_async(Event.objects.create)(**event_data) + + # Queue notification processing task + # Note: .delay() is non-blocking and doesn't need sync_to_async wrapping + from core.tasks.notifications import process_event_notifications + process_event_notifications.delay(str(event.id)) + + return event + + @staticmethod + def publish_sync( + event_type: EventTypeChoices, + entity_type: str, + entity_id: str, + triggered_by: Optional[Any] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> Optional[Event]: + """ + Synchronous version of publish() for use in non-async contexts. + + For mission-critical events, the event is always created. + For non-critical events, the event is only created if there are active notification rules. + + Args: + event_type: Type of event (from EventTypeChoices enum) + entity_type: Type of entity (e.g., 'Project', 'Report', 'Invoice') + entity_id: UUID of the entity that triggered this event + triggered_by: Profile instance (TeamProfile or CustomerProfile) that triggered the event + metadata: Additional event metadata + + Returns: + Event: The created Event instance, or None if event was skipped + """ + # Check if event is mission-critical + is_critical = event_type in MISSION_CRITICAL_EVENTS + + # For non-critical events, check if there are any active notification rules + if not is_critical: + from core.models.events import NotificationRule + + has_rules = NotificationRule.objects.filter( + is_active=True, + event_types__contains=[event_type] + ).exists() + + if not has_rules: + # Skip event creation - no rules configured for this event type + return None + + event_data = { + 'event_type': event_type, + 'entity_type': entity_type, + 'entity_id': entity_id, + 'metadata': metadata or {}, + } + + # Set a generic foreign key for triggered_by if provided + if triggered_by: + content_type = ContentType.objects.get_for_model(triggered_by) + event_data['triggered_by_content_type'] = content_type + event_data['triggered_by_object_id'] = triggered_by.id + + # Create the event + event = Event.objects.create(**event_data) + + # Queue notification processing task + from core.tasks.notifications import process_event_notifications + process_event_notifications.delay(str(event.id)) + + return event + + +# ======================================== +# Convenience functions for common event types +# ======================================== + +# Customer events +async def publish_customer_created(customer_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_CREATED, + entity_type='Customer', + entity_id=customer_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_updated(customer_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_UPDATED, + entity_type='Customer', + entity_id=customer_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_deleted(customer_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_DELETED, + entity_type='Customer', + entity_id=customer_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_status_changed(customer_id: str, old_status: str, new_status: str, triggered_by=None): + """Publish CUSTOMER_STATUS_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_STATUS_CHANGED, + entity_type='Customer', + entity_id=customer_id, + triggered_by=triggered_by, + metadata={'old_status': old_status, 'new_status': new_status} + ) + + +# Account events +async def publish_account_created(account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_CREATED, + entity_type='Account', + entity_id=account_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_account_updated(account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_UPDATED, + entity_type='Account', + entity_id=account_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_account_deleted(account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_DELETED, + entity_type='Account', + entity_id=account_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_account_status_changed(account_id: str, old_status: str, new_status: str, triggered_by=None): + """Publish ACCOUNT_STATUS_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_STATUS_CHANGED, + entity_type='Account', + entity_id=account_id, + triggered_by=triggered_by, + metadata={'old_status': old_status, 'new_status': new_status} + ) + + +# Service events +async def publish_service_created(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_CREATED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_service_updated(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_UPDATED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_service_deleted(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_DELETED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_service_status_changed(service_id: str, old_status: str, new_status: str, triggered_by=None): + """Publish SERVICE_STATUS_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_STATUS_CHANGED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata={'old_status': old_status, 'new_status': new_status} + ) + + +async def publish_service_completed(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_COMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_COMPLETED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_service_cancelled(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_CANCELLED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_CANCELLED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_service_team_assigned(service_id: str, team_member_id: str, team_member_name: str, triggered_by=None): + """Publish SERVICE_TEAM_ASSIGNED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_TEAM_ASSIGNED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata={'team_member_id': team_member_id, 'team_member_name': team_member_name} + ) + + +async def publish_service_team_unassigned(service_id: str, team_member_id: str, team_member_name: str, triggered_by=None): + """Publish SERVICE_TEAM_UNASSIGNED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_TEAM_UNASSIGNED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata={'team_member_id': team_member_id, 'team_member_name': team_member_name} + ) + + +async def publish_service_dispatched(service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_DISPATCHED event (admin assigned to service)""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_DISPATCHED, + entity_type='Service', + entity_id=service_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_services_bulk_generated(account_id: str, count: int, month: str, triggered_by=None): + """Publish SERVICES_BULK_GENERATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICES_BULK_GENERATED, + entity_type='Account', + entity_id=account_id, + triggered_by=triggered_by, + metadata={'count': count, 'month': month} + ) + + +# Schedule events +async def publish_schedule_created(schedule_id: str, triggered_by=None, metadata=None): + """Publish SCHEDULE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCHEDULE_CREATED, + entity_type='Schedule', + entity_id=schedule_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_schedule_updated(schedule_id: str, triggered_by=None, metadata=None): + """Publish SCHEDULE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCHEDULE_UPDATED, + entity_type='Schedule', + entity_id=schedule_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_schedule_deleted(schedule_id: str, triggered_by=None, metadata=None): + """Publish SCHEDULE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCHEDULE_DELETED, + entity_type='Schedule', + entity_id=schedule_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_schedule_frequency_changed(schedule_id: str, old_frequency: str, new_frequency: str, triggered_by=None): + """Publish SCHEDULE_FREQUENCY_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCHEDULE_FREQUENCY_CHANGED, + entity_type='Schedule', + entity_id=schedule_id, + triggered_by=triggered_by, + metadata={'old_frequency': old_frequency, 'new_frequency': new_frequency} + ) + + +# Project events +async def publish_project_created(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_CREATED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_project_updated(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_UPDATED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_project_status_changed(project_id: str, old_status: str, new_status: str, triggered_by=None): + """Publish PROJECT_STATUS_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_STATUS_CHANGED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata={'old_status': old_status, 'new_status': new_status} + ) + + +async def publish_project_completed(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_COMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_COMPLETED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_project_cancelled(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_CANCELLED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_CANCELLED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_project_dispatched(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_DISPATCHED event (admin assigned to project)""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_DISPATCHED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_project_deleted(project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_DELETED, + entity_type='Project', + entity_id=project_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +# Team profile events +async def publish_team_profile_created(profile_id: str, triggered_by=None, metadata=None): + """Publish TEAM_PROFILE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TEAM_PROFILE_CREATED, + entity_type='TeamProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_team_profile_updated(profile_id: str, triggered_by=None, metadata=None): + """Publish TEAM_PROFILE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TEAM_PROFILE_UPDATED, + entity_type='TeamProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_team_profile_deleted(profile_id: str, triggered_by=None, metadata=None): + """Publish TEAM_PROFILE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TEAM_PROFILE_DELETED, + entity_type='TeamProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_team_profile_role_changed(profile_id: str, old_role: str, new_role: str, triggered_by=None): + """Publish TEAM_PROFILE_ROLE_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TEAM_PROFILE_ROLE_CHANGED, + entity_type='TeamProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata={'old_role': old_role, 'new_role': new_role} + ) + + +# Customer profile events +async def publish_customer_profile_created(profile_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_PROFILE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_PROFILE_CREATED, + entity_type='CustomerProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_profile_updated(profile_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_PROFILE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_PROFILE_UPDATED, + entity_type='CustomerProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_profile_deleted(profile_id: str, triggered_by=None, metadata=None): + """Publish CUSTOMER_PROFILE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_PROFILE_DELETED, + entity_type='CustomerProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_customer_profile_access_granted(profile_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_PROFILE_ACCESS_GRANTED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_PROFILE_ACCESS_GRANTED, + entity_type='CustomerProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_profile_access_revoked(profile_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_PROFILE_ACCESS_REVOKED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_PROFILE_ACCESS_REVOKED, + entity_type='CustomerProfile', + entity_id=profile_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +# Report events +async def publish_report_created(report_id: str, triggered_by=None, metadata=None): + """Publish REPORT_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REPORT_CREATED, + entity_type='Report', + entity_id=report_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_report_updated(report_id: str, triggered_by=None, metadata=None): + """Publish REPORT_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REPORT_UPDATED, + entity_type='Report', + entity_id=report_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_report_deleted(report_id: str, triggered_by=None, metadata=None): + """Publish REPORT_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REPORT_DELETED, + entity_type='Report', + entity_id=report_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_report_submitted(report_id: str, triggered_by=None, metadata=None): + """Publish REPORT_SUBMITTED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REPORT_SUBMITTED, + entity_type='Report', + entity_id=report_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_report_approved(report_id: str, triggered_by=None, metadata=None): + """Publish REPORT_APPROVED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REPORT_APPROVED, + entity_type='Report', + entity_id=report_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +# Invoice events +async def publish_invoice_generated(invoice_id: str, triggered_by=None, metadata=None): + """Publish INVOICE_GENERATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_GENERATED, + entity_type='Invoice', + entity_id=invoice_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_invoice_sent(invoice_id: str, triggered_by=None, metadata=None): + """Publish INVOICE_SENT event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_SENT, + entity_type='Invoice', + entity_id=invoice_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_invoice_paid(invoice_id: str, triggered_by=None, metadata=None): + """Publish INVOICE_PAID event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_PAID, + entity_type='Invoice', + entity_id=invoice_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_invoice_overdue(invoice_id: str, triggered_by=None, metadata=None): + """Publish INVOICE_OVERDUE event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_OVERDUE, + entity_type='Invoice', + entity_id=invoice_id, + triggered_by=triggered_by, + metadata=metadata + ) + + +async def publish_invoice_cancelled(invoice_id: str, triggered_by=None, metadata=None): + """Publish INVOICE_CANCELLED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.INVOICE_CANCELLED, + entity_type='Invoice', + entity_id=invoice_id, + triggered_by=triggered_by, + metadata=metadata + ) + +# Service Session events +async def publish_service_session_opened(session_id: str, service_id: str, triggered_by=None): + """Publish SERVICE_SESSION_OPENED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_SESSION_OPENED, + entity_type='ServiceSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata={'service_id': service_id} + ) + + +async def publish_service_session_closed(session_id: str, service_id: str, triggered_by=None, metadata=None): + """Publish SERVICE_SESSION_CLOSED event""" + event_metadata = {'service_id': service_id} + if metadata: + event_metadata.update(metadata) + + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_SESSION_CLOSED, + entity_type='ServiceSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata=event_metadata + ) + + +async def publish_service_session_reverted(session_id: str, service_id: str, triggered_by=None): + """Publish SERVICE_SESSION_REVERTED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_SESSION_REVERTED, + entity_type='ServiceSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata={'service_id': service_id} + ) + + +async def publish_service_task_completed(task_id: str, service_id: str, task_name: str, triggered_by=None): + """Publish SERVICE_TASK_COMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_TASK_COMPLETED, + entity_type='TaskCompletion', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'service_id': service_id, 'task_name': task_name} + ) + + +async def publish_service_task_uncompleted(task_id: str, service_id: str, task_name: str, triggered_by=None): + """Publish SERVICE_TASK_UNCOMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SERVICE_TASK_UNCOMPLETED, + entity_type='TaskCompletion', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'service_id': service_id, 'task_name': task_name} + ) + + +# Project Session events +async def publish_project_session_opened(session_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SESSION_OPENED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SESSION_OPENED, + entity_type='ProjectSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata={'project_id': project_id} + ) + + +async def publish_project_session_closed(session_id: str, project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_SESSION_CLOSED event""" + event_metadata = {'project_id': project_id} + if metadata: + event_metadata.update(metadata) + + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SESSION_CLOSED, + entity_type='ProjectSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata=event_metadata + ) + + +async def publish_project_session_reverted(session_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SESSION_REVERTED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SESSION_REVERTED, + entity_type='ProjectSession', + entity_id=session_id, + triggered_by=triggered_by, + metadata={'project_id': project_id} + ) + + +async def publish_project_task_completed(task_id: str, project_id: str, task_name: str, triggered_by=None): + """Publish PROJECT_TASK_COMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_TASK_COMPLETED, + entity_type='ProjectTaskCompletion', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'project_id': project_id, 'task_name': task_name} + ) + + +async def publish_project_task_uncompleted(task_id: str, project_id: str, task_name: str, triggered_by=None): + """Publish PROJECT_TASK_UNCOMPLETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_TASK_UNCOMPLETED, + entity_type='ProjectTaskCompletion', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'project_id': project_id, 'task_name': task_name} + ) + + +# Punchlist events +async def publish_account_punchlist_created(punchlist_id: str, account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_PUNCHLIST_CREATED event""" + meta = metadata or {} + meta['account_id'] = account_id + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_PUNCHLIST_CREATED, + entity_type='AccountPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_account_punchlist_updated(punchlist_id: str, account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_PUNCHLIST_UPDATED event""" + meta = metadata or {} + meta['account_id'] = account_id + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_PUNCHLIST_UPDATED, + entity_type='AccountPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_account_punchlist_deleted(punchlist_id: str, account_id: str, triggered_by=None, metadata=None): + """Publish ACCOUNT_PUNCHLIST_DELETED event""" + meta = metadata or {} + meta['account_id'] = account_id + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_PUNCHLIST_DELETED, + entity_type='AccountPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_project_punchlist_created(punchlist_id: str, project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_PUNCHLIST_CREATED event""" + meta = metadata or {} + meta['project_id'] = project_id + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_PUNCHLIST_CREATED, + entity_type='ProjectPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_project_punchlist_updated(punchlist_id: str, project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_PUNCHLIST_UPDATED event""" + meta = metadata or {} + meta['project_id'] = project_id + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_PUNCHLIST_UPDATED, + entity_type='ProjectPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_project_punchlist_deleted(punchlist_id: str, project_id: str, triggered_by=None, metadata=None): + """Publish PROJECT_PUNCHLIST_DELETED event""" + meta = metadata or {} + meta['project_id'] = project_id + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_PUNCHLIST_DELETED, + entity_type='ProjectPunchlist', + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_punchlist_status_changed(punchlist_id: str, entity_type: str, old_status: str, new_status: str, triggered_by=None): + """Publish PUNCHLIST_STATUS_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PUNCHLIST_STATUS_CHANGED, + entity_type=entity_type, + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata={'old_status': old_status, 'new_status': new_status} + ) + + +async def publish_punchlist_priority_changed(punchlist_id: str, entity_type: str, old_priority: str, new_priority: str, triggered_by=None): + """Publish PUNCHLIST_PRIORITY_CHANGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PUNCHLIST_PRIORITY_CHANGED, + entity_type=entity_type, + entity_id=punchlist_id, + triggered_by=triggered_by, + metadata={'old_priority': old_priority, 'new_priority': new_priority} + ) + + +# Session Media events +async def publish_session_image_uploaded(image_id: str, session_id: str, is_internal: bool, triggered_by=None): + """Publish SESSION_IMAGE_UPLOADED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_IMAGE_UPLOADED, + entity_type='SessionImage', + entity_id=image_id, + triggered_by=triggered_by, + metadata={'session_id': session_id, 'is_internal': is_internal} + ) + + +async def publish_session_image_updated(image_id: str, session_id: str, triggered_by=None, metadata=None): + """Publish SESSION_IMAGE_UPDATED event""" + meta = metadata or {} + meta['session_id'] = session_id + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_IMAGE_UPDATED, + entity_type='SessionImage', + entity_id=image_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_session_image_deleted(image_id: str, session_id: str, triggered_by=None): + """Publish SESSION_IMAGE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_IMAGE_DELETED, + entity_type='SessionImage', + entity_id=image_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +async def publish_session_video_uploaded(video_id: str, session_id: str, is_internal: bool, triggered_by=None): + """Publish SESSION_VIDEO_UPLOADED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_VIDEO_UPLOADED, + entity_type='SessionVideo', + entity_id=video_id, + triggered_by=triggered_by, + metadata={'session_id': session_id, 'is_internal': is_internal} + ) + + +async def publish_session_video_updated(video_id: str, session_id: str, triggered_by=None, metadata=None): + """Publish SESSION_VIDEO_UPDATED event""" + meta = metadata or {} + meta['session_id'] = session_id + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_VIDEO_UPDATED, + entity_type='SessionVideo', + entity_id=video_id, + triggered_by=triggered_by, + metadata=meta + ) + + +async def publish_session_video_deleted(video_id: str, session_id: str, triggered_by=None): + """Publish SESSION_VIDEO_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_VIDEO_DELETED, + entity_type='SessionVideo', + entity_id=video_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +async def publish_session_media_internal_flagged(media_id: str, media_type: str, session_id: str, triggered_by=None): + """Publish SESSION_MEDIA_INTERNAL_FLAGGED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_MEDIA_INTERNAL_FLAGGED, + entity_type=media_type, + entity_id=media_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +# Session Notes events +async def publish_session_note_created(note_id: str, session_id: str, triggered_by=None): + """Publish SESSION_NOTE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_NOTE_CREATED, + entity_type='SessionNote', + entity_id=note_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +async def publish_session_note_updated(note_id: str, session_id: str, triggered_by=None): + """Publish SESSION_NOTE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_NOTE_UPDATED, + entity_type='SessionNote', + entity_id=note_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +async def publish_session_note_deleted(note_id: str, session_id: str, triggered_by=None): + """Publish SESSION_NOTE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SESSION_NOTE_DELETED, + entity_type='SessionNote', + entity_id=note_id, + triggered_by=triggered_by, + metadata={'session_id': session_id} + ) + + +# Customer Address & Contact events +async def publish_customer_address_created(address_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_ADDRESS_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_ADDRESS_CREATED, + entity_type='CustomerAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_address_updated(address_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_ADDRESS_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_ADDRESS_UPDATED, + entity_type='CustomerAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_address_deleted(address_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_ADDRESS_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_ADDRESS_DELETED, + entity_type='CustomerAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_contact_created(contact_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_CONTACT_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_CONTACT_CREATED, + entity_type='CustomerContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_contact_updated(contact_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_CONTACT_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_CONTACT_UPDATED, + entity_type='CustomerContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +async def publish_customer_contact_deleted(contact_id: str, customer_id: str, triggered_by=None): + """Publish CUSTOMER_CONTACT_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.CUSTOMER_CONTACT_DELETED, + entity_type='CustomerContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'customer_id': customer_id} + ) + + +# Account Address & Contact events +async def publish_account_address_created(address_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_ADDRESS_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_ADDRESS_CREATED, + entity_type='AccountAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_account_address_updated(address_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_ADDRESS_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_ADDRESS_UPDATED, + entity_type='AccountAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_account_address_deleted(address_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_ADDRESS_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_ADDRESS_DELETED, + entity_type='AccountAddress', + entity_id=address_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_account_contact_created(contact_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_CONTACT_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_CONTACT_CREATED, + entity_type='AccountContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_account_contact_updated(contact_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_CONTACT_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_CONTACT_UPDATED, + entity_type='AccountContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_account_contact_deleted(contact_id: str, account_id: str, triggered_by=None): + """Publish ACCOUNT_CONTACT_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.ACCOUNT_CONTACT_DELETED, + entity_type='AccountContact', + entity_id=contact_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +# Scope events +async def publish_scope_created(scope_id: str, account_id: str, triggered_by=None): + """Publish SCOPE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_CREATED, + entity_type='Scope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_scope_updated(scope_id: str, account_id: str, triggered_by=None): + """Publish SCOPE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_UPDATED, + entity_type='Scope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_scope_deleted(scope_id: str, account_id: str, triggered_by=None): + """Publish SCOPE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_DELETED, + entity_type='Scope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'account_id': account_id} + ) + + +async def publish_area_created(area_id: str, scope_id: str, triggered_by=None): + """Publish AREA_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_CREATED, + entity_type='Area', + entity_id=area_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_area_updated(area_id: str, scope_id: str, triggered_by=None): + """Publish AREA_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_UPDATED, + entity_type='Area', + entity_id=area_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_area_deleted(area_id: str, scope_id: str, triggered_by=None): + """Publish AREA_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_DELETED, + entity_type='Area', + entity_id=area_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_task_created(task_id: str, area_id: str, triggered_by=None): + """Publish TASK_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_CREATED, + entity_type='Task', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'area_id': area_id} + ) + + +async def publish_task_updated(task_id: str, area_id: str, triggered_by=None): + """Publish TASK_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_UPDATED, + entity_type='Task', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'area_id': area_id} + ) + + +async def publish_task_deleted(task_id: str, area_id: str, triggered_by=None): + """Publish TASK_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_DELETED, + entity_type='Task', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'area_id': area_id} + ) + + +async def publish_task_completion_recorded(completion_id: str, task_id: str, service_id: str, triggered_by=None): + """Publish TASK_COMPLETION_RECORDED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_COMPLETION_RECORDED, + entity_type='TaskCompletion', + entity_id=completion_id, + triggered_by=triggered_by, + metadata={'task_id': task_id, 'service_id': service_id} + ) + + +# Scope Template events +async def publish_scope_template_created(template_id: str, triggered_by=None): + """Publish SCOPE_TEMPLATE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_TEMPLATE_CREATED, + entity_type='ScopeTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_scope_template_updated(template_id: str, triggered_by=None): + """Publish SCOPE_TEMPLATE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_TEMPLATE_UPDATED, + entity_type='ScopeTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_scope_template_deleted(template_id: str, triggered_by=None): + """Publish SCOPE_TEMPLATE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_TEMPLATE_DELETED, + entity_type='ScopeTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_scope_template_instantiated(scope_id: str, template_id: str, account_id: str, triggered_by=None): + """Publish SCOPE_TEMPLATE_INSTANTIATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.SCOPE_TEMPLATE_INSTANTIATED, + entity_type='Scope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'template_id': template_id, 'account_id': account_id} + ) + + +async def publish_area_template_created(template_id: str, scope_template_id: str, triggered_by=None): + """Publish AREA_TEMPLATE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_TEMPLATE_CREATED, + entity_type='AreaTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'scope_template_id': scope_template_id} + ) + + +async def publish_area_template_updated(template_id: str, scope_template_id: str, triggered_by=None): + """Publish AREA_TEMPLATE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_TEMPLATE_UPDATED, + entity_type='AreaTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'scope_template_id': scope_template_id} + ) + + +async def publish_area_template_deleted(template_id: str, scope_template_id: str, triggered_by=None): + """Publish AREA_TEMPLATE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.AREA_TEMPLATE_DELETED, + entity_type='AreaTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'scope_template_id': scope_template_id} + ) + + +async def publish_task_template_created(template_id: str, area_template_id: str, triggered_by=None): + """Publish TASK_TEMPLATE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_TEMPLATE_CREATED, + entity_type='TaskTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'area_template_id': area_template_id} + ) + + +async def publish_task_template_updated(template_id: str, area_template_id: str, triggered_by=None): + """Publish TASK_TEMPLATE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_TEMPLATE_UPDATED, + entity_type='TaskTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'area_template_id': area_template_id} + ) + + +async def publish_task_template_deleted(template_id: str, area_template_id: str, triggered_by=None): + """Publish TASK_TEMPLATE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.TASK_TEMPLATE_DELETED, + entity_type='TaskTemplate', + entity_id=template_id, + triggered_by=triggered_by, + metadata={'area_template_id': area_template_id} + ) + + +# Project Scope events +async def publish_project_scope_created(scope_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_CREATED, + entity_type='ProjectScope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'project_id': project_id} + ) + + +async def publish_project_scope_updated(scope_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_UPDATED, + entity_type='ProjectScope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'project_id': project_id} + ) + + +async def publish_project_scope_deleted(scope_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_DELETED, + entity_type='ProjectScope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'project_id': project_id} + ) + + +async def publish_project_scope_category_created(category_id: str, scope_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_CATEGORY_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_CATEGORY_CREATED, + entity_type='ProjectScopeCategory', + entity_id=category_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_project_scope_category_updated(category_id: str, scope_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_CATEGORY_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_CATEGORY_UPDATED, + entity_type='ProjectScopeCategory', + entity_id=category_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_project_scope_category_deleted(category_id: str, scope_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_CATEGORY_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_CATEGORY_DELETED, + entity_type='ProjectScopeCategory', + entity_id=category_id, + triggered_by=triggered_by, + metadata={'scope_id': scope_id} + ) + + +async def publish_project_scope_task_created(task_id: str, category_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_TASK_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_TASK_CREATED, + entity_type='ProjectScopeTask', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'category_id': category_id} + ) + + +async def publish_project_scope_task_updated(task_id: str, category_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_TASK_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_TASK_UPDATED, + entity_type='ProjectScopeTask', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'category_id': category_id} + ) + + +async def publish_project_scope_task_deleted(task_id: str, category_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_TASK_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_TASK_DELETED, + entity_type='ProjectScopeTask', + entity_id=task_id, + triggered_by=triggered_by, + metadata={'category_id': category_id} + ) + + +async def publish_project_scope_template_instantiated(scope_id: str, template_id: str, project_id: str, triggered_by=None): + """Publish PROJECT_SCOPE_TEMPLATE_INSTANTIATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.PROJECT_SCOPE_TEMPLATE_INSTANTIATED, + entity_type='ProjectScope', + entity_id=scope_id, + triggered_by=triggered_by, + metadata={'template_id': template_id, 'project_id': project_id} + ) + + +# Labor & Revenue events +async def publish_labor_rate_created(rate_id: str, triggered_by=None): + """Publish LABOR_RATE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.LABOR_RATE_CREATED, + entity_type='LaborRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_labor_rate_updated(rate_id: str, triggered_by=None): + """Publish LABOR_RATE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.LABOR_RATE_UPDATED, + entity_type='LaborRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_labor_rate_deleted(rate_id: str, triggered_by=None): + """Publish LABOR_RATE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.LABOR_RATE_DELETED, + entity_type='LaborRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_revenue_rate_created(rate_id: str, triggered_by=None): + """Publish REVENUE_RATE_CREATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REVENUE_RATE_CREATED, + entity_type='RevenueRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_revenue_rate_updated(rate_id: str, triggered_by=None): + """Publish REVENUE_RATE_UPDATED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REVENUE_RATE_UPDATED, + entity_type='RevenueRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) + + +async def publish_revenue_rate_deleted(rate_id: str, triggered_by=None): + """Publish REVENUE_RATE_DELETED event""" + return await EventPublisher.publish( + event_type=EventTypeChoices.REVENUE_RATE_DELETED, + entity_type='RevenueRate', + entity_id=rate_id, + triggered_by=triggered_by, + metadata={} + ) diff --git a/core/services/metadata/__init__.py b/core/services/metadata/__init__.py new file mode 100644 index 0000000..2f65560 --- /dev/null +++ b/core/services/metadata/__init__.py @@ -0,0 +1,19 @@ +""" +Metadata enrichment module for events. + +Provides automatic metadata population based on entity type and event domain. +""" +from core.services.metadata.base import MetadataEnricher +from core.services.metadata.service import SERVICE_EVENTS +from core.services.metadata.project import PROJECT_EVENTS +from core.services.metadata.account import ACCOUNT_EVENTS +from core.services.metadata.customer import CUSTOMER_EVENTS + + +__all__ = [ + 'MetadataEnricher', + 'SERVICE_EVENTS', + 'PROJECT_EVENTS', + 'ACCOUNT_EVENTS', + 'CUSTOMER_EVENTS', +] diff --git a/core/services/metadata/account.py b/core/services/metadata/account.py new file mode 100644 index 0000000..6e9ddfa --- /dev/null +++ b/core/services/metadata/account.py @@ -0,0 +1,135 @@ +""" +Account domain metadata enrichment. +Handles Account, AccountAddress, AccountContact, and AccountPunchlist entities. +""" +from typing import Dict, Any, Set + +from asgiref.sync import sync_to_async + +from core.models.enums import EventTypeChoices + + +ACCOUNT_EVENTS: Set[str] = { + EventTypeChoices.ACCOUNT_CREATED, + EventTypeChoices.ACCOUNT_UPDATED, + EventTypeChoices.ACCOUNT_DELETED, + EventTypeChoices.ACCOUNT_STATUS_CHANGED, + EventTypeChoices.ACCOUNT_ADDRESS_CREATED, + EventTypeChoices.ACCOUNT_ADDRESS_UPDATED, + EventTypeChoices.ACCOUNT_ADDRESS_DELETED, + EventTypeChoices.ACCOUNT_CONTACT_CREATED, + EventTypeChoices.ACCOUNT_CONTACT_UPDATED, + EventTypeChoices.ACCOUNT_CONTACT_DELETED, + EventTypeChoices.ACCOUNT_PUNCHLIST_CREATED, + EventTypeChoices.ACCOUNT_PUNCHLIST_UPDATED, + EventTypeChoices.ACCOUNT_PUNCHLIST_DELETED, +} + + +async def enrich_account_domain(entity_type: str, entity_id: str) -> Dict[str, Any]: + """Enrich metadata for account-domain events.""" + if entity_type == 'Account': + return await _load_account_metadata(entity_id) + elif entity_type == 'AccountAddress': + return await _load_account_address_metadata(entity_id) + elif entity_type == 'AccountContact': + return await _load_account_contact_metadata(entity_id) + elif entity_type == 'AccountPunchlist': + return await _load_account_punchlist_metadata(entity_id) + return {} + + +async def _load_account_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from an Account entity.""" + from core.models.account import Account + + def _load(): + try: + account = Account.objects.select_related('customer').get(pk=entity_id) + metadata = {'account_name': account.name or ''} + if account.customer: + metadata['customer_name'] = account.customer.name or '' + return metadata + except Account.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_account_address_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from an AccountAddress entity.""" + from core.models.account import AccountAddress + + def _load(): + try: + address = AccountAddress.objects.select_related( + 'account', + 'account__customer', + ).get(pk=entity_id) + + metadata = {'account_id': str(address.account_id)} + if address.account: + metadata['account_name'] = address.account.name or '' + if address.account.customer: + metadata['customer_name'] = address.account.customer.name or '' + + # Address + address_parts = [] + if address.street_address: + address_parts.append(address.street_address) + if address.city: + address_parts.append(address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + return metadata + except AccountAddress.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_account_contact_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from an AccountContact entity.""" + from core.models.account import AccountContact + + def _load(): + try: + contact = AccountContact.objects.select_related( + 'account', + 'account__customer', + ).get(pk=entity_id) + + metadata = {'account_id': str(contact.account_id)} + if contact.account: + metadata['account_name'] = contact.account.name or '' + if contact.account.customer: + metadata['customer_name'] = contact.account.customer.name or '' + return metadata + except AccountContact.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_account_punchlist_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from an AccountPunchlist entity.""" + from core.models.account_punchlist import AccountPunchlist + + def _load(): + try: + punchlist = AccountPunchlist.objects.select_related( + 'account', + 'account__customer', + ).get(pk=entity_id) + + metadata = {'account_id': str(punchlist.account_id)} + if punchlist.account: + metadata['account_name'] = punchlist.account.name or '' + if punchlist.account.customer: + metadata['customer_name'] = punchlist.account.customer.name or '' + return metadata + except AccountPunchlist.DoesNotExist: + return {} + + return await sync_to_async(_load)() diff --git a/core/services/metadata/base.py b/core/services/metadata/base.py new file mode 100644 index 0000000..e8d4dc8 --- /dev/null +++ b/core/services/metadata/base.py @@ -0,0 +1,67 @@ +""" +Metadata enrichment service for events. +Automatically populates domain-appropriate metadata based on entity type. + +All methods are async-safe, using sync_to_async with proper select_related +to avoid lazy loading issues in an async context. +""" +from typing import Dict, Any, Optional +import logging + +from core.services.metadata.service import SERVICE_EVENTS, enrich_service_domain +from core.services.metadata.project import PROJECT_EVENTS, enrich_project_domain +from core.services.metadata.account import ACCOUNT_EVENTS, enrich_account_domain +from core.services.metadata.customer import CUSTOMER_EVENTS, enrich_customer_domain + + +logger = logging.getLogger(__name__) + + +class MetadataEnricher: + """ + Enriches event metadata with domain-appropriate fields. + + All methods handle async context properly by loading entities + with select_related in a single sync_to_async call. + """ + + @staticmethod + async def enrich( + event_type: str, + entity_type: str, + entity_id: str, + existing_metadata: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """ + Enrich metadata based on the event type and entity. + + Args: + event_type: The EventTypeChoices value + entity_type: Type of entity (Service, Project, ServiceSession, etc.) + entity_id: UUID of the entity + existing_metadata: Any metadata already provided (takes precedence) + + Returns: + Enriched metadata dict (existing values are preserved) + """ + enriched = {} + + try: + # Determine domain and enrich accordingly + if event_type in SERVICE_EVENTS: + enriched = await enrich_service_domain(entity_type, entity_id) + elif event_type in PROJECT_EVENTS: + enriched = await enrich_project_domain(entity_type, entity_id) + elif event_type in ACCOUNT_EVENTS: + enriched = await enrich_account_domain(entity_type, entity_id) + elif event_type in CUSTOMER_EVENTS: + enriched = await enrich_customer_domain(entity_type, entity_id) + except Exception as e: + # Log but don't fail - enrichment is optional + logger.warning(f"Metadata enrichment failed for {entity_type}/{entity_id}: {e}") + + # Merge with existing metadata (existing takes precedence) + if existing_metadata: + enriched.update(existing_metadata) + + return enriched diff --git a/core/services/metadata/customer.py b/core/services/metadata/customer.py new file mode 100644 index 0000000..8acd107 --- /dev/null +++ b/core/services/metadata/customer.py @@ -0,0 +1,92 @@ +""" +Customer domain metadata enrichment. +Handles Customer, CustomerAddress, and CustomerContact entities. +""" +from typing import Dict, Any, Set + +from asgiref.sync import sync_to_async + +from core.models.enums import EventTypeChoices + + +CUSTOMER_EVENTS: Set[str] = { + EventTypeChoices.CUSTOMER_CREATED, + EventTypeChoices.CUSTOMER_UPDATED, + EventTypeChoices.CUSTOMER_DELETED, + EventTypeChoices.CUSTOMER_STATUS_CHANGED, + EventTypeChoices.CUSTOMER_ADDRESS_CREATED, + EventTypeChoices.CUSTOMER_ADDRESS_UPDATED, + EventTypeChoices.CUSTOMER_ADDRESS_DELETED, + EventTypeChoices.CUSTOMER_CONTACT_CREATED, + EventTypeChoices.CUSTOMER_CONTACT_UPDATED, + EventTypeChoices.CUSTOMER_CONTACT_DELETED, +} + + +async def enrich_customer_domain(entity_type: str, entity_id: str) -> Dict[str, Any]: + """Enrich metadata for customer-domain events.""" + if entity_type == 'Customer': + return await _load_customer_metadata(entity_id) + elif entity_type == 'CustomerAddress': + return await _load_customer_address_metadata(entity_id) + elif entity_type == 'CustomerContact': + return await _load_customer_contact_metadata(entity_id) + return {} + + +async def _load_customer_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a Customer entity.""" + from core.models.customer import Customer + + def _load(): + try: + customer = Customer.objects.get(pk=entity_id) + return {'customer_name': customer.name or ''} + except Customer.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_customer_address_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a CustomerAddress entity.""" + from core.models.customer import CustomerAddress + + def _load(): + try: + address = CustomerAddress.objects.select_related('customer').get(pk=entity_id) + metadata = {'customer_id': str(address.customer_id)} + if address.customer: + metadata['customer_name'] = address.customer.name or '' + + # Address + address_parts = [] + if address.street_address: + address_parts.append(address.street_address) + if address.city: + address_parts.append(address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + return metadata + except CustomerAddress.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_customer_contact_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a CustomerContact entity.""" + from core.models.customer import CustomerContact + + def _load(): + try: + contact = CustomerContact.objects.select_related('customer').get(pk=entity_id) + metadata = {'customer_id': str(contact.customer_id)} + if contact.customer: + metadata['customer_name'] = contact.customer.name or '' + return metadata + except CustomerContact.DoesNotExist: + return {} + + return await sync_to_async(_load)() diff --git a/core/services/metadata/project.py b/core/services/metadata/project.py new file mode 100644 index 0000000..7c2c449 --- /dev/null +++ b/core/services/metadata/project.py @@ -0,0 +1,188 @@ +""" +Project domain metadata enrichment. +Handles Project, ProjectSession, and ProjectPunchlist entities. +""" +from typing import Dict, Any, Set + +from asgiref.sync import sync_to_async + +from core.models.enums import EventTypeChoices + + +PROJECT_EVENTS: Set[str] = { + EventTypeChoices.PROJECT_CREATED, + EventTypeChoices.PROJECT_UPDATED, + EventTypeChoices.PROJECT_STATUS_CHANGED, + EventTypeChoices.PROJECT_COMPLETED, + EventTypeChoices.PROJECT_CANCELLED, + EventTypeChoices.PROJECT_DISPATCHED, + EventTypeChoices.PROJECT_SESSION_OPENED, + EventTypeChoices.PROJECT_SESSION_CLOSED, + EventTypeChoices.PROJECT_SESSION_REVERTED, + EventTypeChoices.PROJECT_TASK_COMPLETED, + EventTypeChoices.PROJECT_TASK_UNCOMPLETED, + EventTypeChoices.PROJECT_PUNCHLIST_CREATED, + EventTypeChoices.PROJECT_PUNCHLIST_UPDATED, + EventTypeChoices.PROJECT_PUNCHLIST_DELETED, +} + + +async def enrich_project_domain(entity_type: str, entity_id: str) -> Dict[str, Any]: + """Enrich metadata for project-domain events.""" + if entity_type == 'Project': + return await _load_project_metadata(entity_id) + elif entity_type == 'ProjectSession': + return await _load_project_session_metadata(entity_id) + elif entity_type == 'ProjectPunchlist': + return await _load_project_punchlist_metadata(entity_id) + return {} + + +async def _load_project_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a Project entity.""" + from core.models.project import Project + + def _load(): + try: + project = Project.objects.select_related( + 'customer', + 'account_address', + 'account_address__account', + 'account_address__account__customer', + ).get(pk=entity_id) + + return _extract_project_metadata(project) + except Project.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_project_session_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a ProjectSession entity.""" + from core.models.session import ProjectSession + + def _load(): + try: + session = ProjectSession.objects.select_related( + 'project', + 'customer', + 'account_address', + 'account_address__account', + 'account_address__account__customer', + ).get(pk=entity_id) + + return _extract_project_session_metadata(session) + except ProjectSession.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_project_punchlist_metadata(entity_id: str) -> Dict[str, Any]: + """Load metadata from a ProjectPunchlist entity.""" + from core.models.project_punchlist import ProjectPunchlist + + def _load(): + try: + punchlist = ProjectPunchlist.objects.select_related( + 'project', + 'project__customer', + 'project__account_address', + 'project__account_address__account', + ).get(pk=entity_id) + + metadata = {'project_id': str(punchlist.project_id)} + if punchlist.project: + metadata.update(_extract_project_metadata(punchlist.project)) + return metadata + except ProjectPunchlist.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +def _extract_project_metadata(project) -> Dict[str, Any]: + """Extract metadata dict from a pre-loaded Project object.""" + metadata = {} + + # Project name + if project.name: + metadata['project_name'] = project.name + + # Date + if project.date: + metadata['date'] = str(project.date) + + # Projects can be affiliated with account (via account_address) OR direct customer + if project.account_address_id and project.account_address: + # Account-affiliated project + if project.account_address.account: + metadata['account_name'] = project.account_address.account.name or '' + if project.account_address.account.customer: + metadata['customer_name'] = project.account_address.account.customer.name or '' + + # Address from account_address + address_parts = [] + if project.account_address.street_address: + address_parts.append(project.account_address.street_address) + if project.account_address.city: + address_parts.append(project.account_address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + elif project.customer: + # Direct customer project (no account affiliation) + metadata['customer_name'] = project.customer.name or '' + # Use customer name as account_name fallback for template compatibility + metadata['account_name'] = project.customer.name or '' + + # Address from project fields + address_parts = [] + if project.street_address: + address_parts.append(project.street_address) + if project.city: + address_parts.append(project.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + return metadata + + +def _extract_project_session_metadata(session) -> Dict[str, Any]: + """Extract metadata dict from a preloaded ProjectSession object.""" + metadata = {} + + # Project ID for reference + if session.project_id: + metadata['project_id'] = str(session.project_id) + + # Project name and date + if session.project: + if session.project.name: + metadata['project_name'] = session.project.name + if session.project.date: + metadata['date'] = str(session.project.date) + + # Account/customer via account_address path + if session.account_address_id and session.account_address: + if session.account_address.account: + metadata['account_name'] = session.account_address.account.name or '' + if session.account_address.account.customer: + metadata['customer_name'] = session.account_address.account.customer.name or '' + + # Address + address_parts = [] + if session.account_address.street_address: + address_parts.append(session.account_address.street_address) + if session.account_address.city: + address_parts.append(session.account_address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + elif session.customer: + # Direct customer project + metadata['customer_name'] = session.customer.name or '' + metadata['account_name'] = session.customer.name or '' + + return metadata diff --git a/core/services/metadata/service.py b/core/services/metadata/service.py new file mode 100644 index 0000000..df9cd16 --- /dev/null +++ b/core/services/metadata/service.py @@ -0,0 +1,146 @@ +""" +Service domain metadata enrichment. +Handles Service and ServiceSession entities. +""" +from typing import Dict, Any, Set + +from asgiref.sync import sync_to_async + +from core.models.enums import EventTypeChoices + + +SERVICE_EVENTS: Set[str] = { + EventTypeChoices.SERVICE_CREATED, + EventTypeChoices.SERVICE_UPDATED, + EventTypeChoices.SERVICE_DELETED, + EventTypeChoices.SERVICE_STATUS_CHANGED, + EventTypeChoices.SERVICE_COMPLETED, + EventTypeChoices.SERVICE_CANCELLED, + EventTypeChoices.SERVICE_TEAM_ASSIGNED, + EventTypeChoices.SERVICE_TEAM_UNASSIGNED, + EventTypeChoices.SERVICE_DISPATCHED, + EventTypeChoices.SERVICE_SESSION_OPENED, + EventTypeChoices.SERVICE_SESSION_CLOSED, + EventTypeChoices.SERVICE_SESSION_REVERTED, + EventTypeChoices.SERVICE_TASK_COMPLETED, + EventTypeChoices.SERVICE_TASK_UNCOMPLETED, +} + + +async def enrich_service_domain(entity_type: str, entity_id: str) -> Dict[str, Any]: + """Enrich metadata for service-domain events.""" + if entity_type == 'Service': + return await _load_service_metadata(entity_id) + elif entity_type == 'ServiceSession': + return await _load_service_session_metadata(entity_id) + return {} + + +async def _load_service_metadata(entity_id: str) -> Dict[str, Any]: + """ + Load metadata from a Service entity. + + NOTE: Services use account_address -> account path. + The direct service.account FK is deprecated. + """ + from core.models.service import Service + + def _load(): + try: + service = Service.objects.select_related( + 'account_address', + 'account_address__account', + 'account_address__account__customer', + ).get(pk=entity_id) + + return _extract_service_metadata(service) + except Service.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +async def _load_service_session_metadata(entity_id: str) -> Dict[str, Any]: + """ + Load metadata from a ServiceSession entity. + + NOTE: Uses account_address -> account path, not deprecated direct FKs. + """ + from core.models.session import ServiceSession + + def _load(): + try: + session = ServiceSession.objects.select_related( + 'service', + 'account_address', + 'account_address__account', + 'account_address__account__customer', + ).get(pk=entity_id) + + return _extract_service_session_metadata(session) + except ServiceSession.DoesNotExist: + return {} + + return await sync_to_async(_load)() + + +def _extract_service_metadata(service) -> Dict[str, Any]: + """Extract metadata dict from a pre-loaded Service object.""" + metadata = {} + + # Date + if service.date: + metadata['date'] = str(service.date) + + # Traverse account_address -> account (NOT the deprecated 'service.account') + if service.account_address_id and service.account_address: + # Address + address_parts = [] + if service.account_address.street_address: + address_parts.append(service.account_address.street_address) + if service.account_address.city: + address_parts.append(service.account_address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + # Account name (via account_address) + if service.account_address.account: + metadata['account_name'] = service.account_address.account.name or '' + # Customer name (via account) + if service.account_address.account.customer: + metadata['customer_name'] = service.account_address.account.customer.name or '' + + return metadata + + +def _extract_service_session_metadata(session) -> Dict[str, Any]: + """Extract metadata dict from a preloaded ServiceSession object.""" + metadata = {} + + # Service ID for reference + if session.service_id: + metadata['service_id'] = str(session.service_id) + + # Date from the service + if session.service and session.service.date: + metadata['date'] = str(session.service.date) + + # Traverse account_address -> account (NOT deprecated session.account) + if session.account_address_id and session.account_address: + # Address + address_parts = [] + if session.account_address.street_address: + address_parts.append(session.account_address.street_address) + if session.account_address.city: + address_parts.append(session.account_address.city) + if address_parts: + metadata['address'] = ', '.join(address_parts) + + # Account name (via account_address) + if session.account_address.account: + metadata['account_name'] = session.account_address.account.name or '' + # Customer name (via account) + if session.account_address.account.customer: + metadata['customer_name'] = session.account_address.account.customer.name or '' + + return metadata diff --git a/core/services/monitoring/__init__.py b/core/services/monitoring/__init__.py new file mode 100644 index 0000000..8e7072f --- /dev/null +++ b/core/services/monitoring/__init__.py @@ -0,0 +1,14 @@ +""" +Monitoring service module for scheduled operational commands. +""" +from core.services.monitoring.base import BaseMonitoringCommand, MonitoringResult +from core.services.monitoring.registry import MonitoringCommandRegistry + +# Import commands to trigger registration +from core.services.monitoring.commands import * # noqa + +__all__ = [ + 'BaseMonitoringCommand', + 'MonitoringResult', + 'MonitoringCommandRegistry', +] diff --git a/core/services/monitoring/base.py b/core/services/monitoring/base.py new file mode 100644 index 0000000..a706a6c --- /dev/null +++ b/core/services/monitoring/base.py @@ -0,0 +1,84 @@ +""" +Base classes for monitoring commands. +All monitoring commands are inherited from BaseMonitoringCommand. +""" +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import date, datetime, timedelta +from typing import Any, Dict, List, Optional +from zoneinfo import ZoneInfo +import uuid + +from core.models.enums import EventTypeChoices + + +@dataclass +class MonitoringResult: + """Result from a monitoring command execution.""" + command_name: str + execution_date: date + summary: Dict[str, Any] + event_type: EventTypeChoices + entity_id: str = field(default_factory=lambda: str(uuid.uuid4())) + emails_sent: int = 0 + details: List[Dict[str, Any]] = field(default_factory=list) + metadata: Optional[Dict[str, Any]] = None + + +class BaseMonitoringCommand(ABC): + """ + Abstract base class for monitoring commands. + + Subclasses must implement: + - name: Unique command identifier + - event_type: EventTypeChoices for audit logging + - execute(): Main command logic + """ + + # Default timezone for all monitoring commands + DEFAULT_TIMEZONE = ZoneInfo('America/New_York') + + @property + @abstractmethod + def name(self) -> str: + """Unique identifier for this command.""" + pass + + @property + @abstractmethod + def event_type(self) -> EventTypeChoices: + """Event type to publish for audit logging.""" + pass + + @property + def description(self) -> str: + """Human-readable description of what this command does.""" + return f"Monitoring command: {self.name}" + + @abstractmethod + def execute(self, **kwargs) -> MonitoringResult: + """ + Execute the monitoring command. + + Args: + **kwargs: Command-specific arguments + + Returns: + MonitoringResult with execution summary + """ + pass + + def get_previous_day(self, timezone=None) -> date: + """ + Get the previous day's date in the specified timezone. + + Args: + timezone: pytz timezone (defaults to America/New_York) + + Returns: + date object for previous day + """ + tz = timezone or self.DEFAULT_TIMEZONE + now = datetime.now(tz) + yesterday = now - timedelta(days=1) + return yesterday.date() diff --git a/core/services/monitoring/commands/__init__.py b/core/services/monitoring/commands/__init__.py new file mode 100644 index 0000000..43cf18b --- /dev/null +++ b/core/services/monitoring/commands/__init__.py @@ -0,0 +1,12 @@ +""" +Monitoring commands module. +Import all commands here to ensure they are registered with the registry. +""" +from core.services.monitoring.commands.incomplete_work_reminder import ( + IncompleteWorkReminderCommand, +) +from core.services.monitoring.commands.nightly_assignments import ( + NightlyAssignmentsCommand, +) + +__all__ = ['IncompleteWorkReminderCommand', 'NightlyAssignmentsCommand'] diff --git a/core/services/monitoring/commands/incomplete_work_reminder.py b/core/services/monitoring/commands/incomplete_work_reminder.py new file mode 100644 index 0000000..9d1e7be --- /dev/null +++ b/core/services/monitoring/commands/incomplete_work_reminder.py @@ -0,0 +1,275 @@ +""" +Incomplete work reminder monitoring command. +Sends email reminders to team members about work from the previous day +that is still in SCHEDULED or IN_PROGRESS status. +""" +from collections import defaultdict +from datetime import date, datetime, timedelta +from typing import Dict, Any, List +import logging + +from django.conf import settings +from django.template.loader import render_to_string + +from core.models.service import Service +from core.models.project import Project +from core.models.enums import ServiceChoices, RoleChoices, EventTypeChoices +from core.services.email_service import get_emailer_client, EmailerServiceError +from core.services.monitoring.base import BaseMonitoringCommand, MonitoringResult +from core.services.monitoring.registry import MonitoringCommandRegistry + +logger = logging.getLogger(__name__) + +# IMPORTANT: Weekend services are identified by this note text, which is set +# in core/graphql/mutations/service.py during service generation. +# If that note text changes, this constant must be updated. +WEEKEND_SERVICE_NOTE = "Weekend service window" + + +@MonitoringCommandRegistry.register +class IncompleteWorkReminderCommand(BaseMonitoringCommand): + """ + Send reminders to non-admin team members about incomplete work from yesterday. + + Queries services and projects from the previous day that are still + SCHEDULED or IN_PROGRESS, groups them by assigned team member, and + sends each team member an email listing their incomplete items. + """ + + @property + def name(self) -> str: + return "incomplete_work_reminder" + + @property + def event_type(self) -> EventTypeChoices: + return EventTypeChoices.MONITORING_INCOMPLETE_WORK_REMINDER + + @property + def description(self) -> str: + return "Send email reminders for incomplete work from previous day" + + def execute(self, target_date: date = None, **kwargs) -> MonitoringResult: + """ + Execute the incomplete work reminder command. + + Args: + target_date: Date to query (defaults to previous day) + + Returns: + MonitoringResult with execution summary + """ + query_date = target_date or self.get_previous_day() + + # Query incomplete services from the target date + incomplete_services = Service.objects.filter( + date=query_date, + status__in=[ServiceChoices.SCHEDULED, ServiceChoices.IN_PROGRESS] + ).select_related( + 'account_address', + 'account_address__account' + ).prefetch_related('team_members') + + # Weekend service handling: + # - Saturday morning (yesterday=Friday): SKIP Friday weekend services (give them the weekend) + # - Monday morning (yesterday=Sunday): ALSO include Friday weekend services still incomplete + weekday = query_date.weekday() # Day we're querying (yesterday) + weekend_services = [] + + if weekday == 4: # Yesterday was Friday - exclude weekend services + incomplete_services = incomplete_services.exclude( + notes__icontains=WEEKEND_SERVICE_NOTE + ) + elif weekday == 6: # Yesterday was Sunday - also check Friday's weekend services + friday_date = query_date - timedelta(days=2) # Sunday - 2 = Friday + weekend_services = list(Service.objects.filter( + date=friday_date, + status__in=[ServiceChoices.SCHEDULED, ServiceChoices.IN_PROGRESS], + notes__icontains=WEEKEND_SERVICE_NOTE + ).select_related( + 'account_address', + 'account_address__account' + ).prefetch_related('team_members')) + + # Combine regular and weekend services + all_incomplete_services = list(incomplete_services) + weekend_services + + # Query incomplete projects from the target date + incomplete_projects = Project.objects.filter( + date=query_date, + status__in=[ServiceChoices.SCHEDULED, ServiceChoices.IN_PROGRESS] + ).select_related( + 'customer', + 'account_address', + 'account_address__account' + ).prefetch_related('team_members') + + # Build map of team_member -> their incomplete items + team_member_items: Dict[Any, Dict[str, List]] = defaultdict( + lambda: {'services': [], 'projects': []} + ) + + for service in all_incomplete_services: + for member in service.team_members.exclude(role=RoleChoices.ADMIN): + team_member_items[member]['services'].append(service) + + for project in incomplete_projects: + for member in project.team_members.exclude(role=RoleChoices.ADMIN): + team_member_items[member]['projects'].append(project) + + # Send emails to each team member + emails_sent = 0 + email_errors = [] + emailer = get_emailer_client() + + for team_member, items in team_member_items.items(): + if not team_member.email: + logger.warning( + f"Team member {team_member.id} has no email address, skipping" + ) + continue + + # Skip if no items (shouldn't happen, but safety check) + if not items['services'] and not items['projects']: + continue + + try: + email_html = self._render_email_html(team_member, items, query_date) + emailer.send_email( + to=[team_member.email], + subject=f"Incomplete Work Reminder - {query_date.strftime('%B %d, %Y')}", + body=email_html, + impersonate_user=settings.EMAILER_DEFAULT_SENDER + ) + emails_sent += 1 + logger.info( + f"Sent incomplete work reminder to {team_member.email}", + extra={ + 'team_member_id': str(team_member.id), + 'services_count': len(items['services']), + 'projects_count': len(items['projects']), + } + ) + except EmailerServiceError as e: + error_msg = f"Failed to send email to {team_member.email}: {e}" + logger.error(error_msg) + email_errors.append(error_msg) + + # Build summary + summary = { + 'date': str(query_date), + 'incomplete_services_count': incomplete_services.count(), + 'weekend_services_count': len(weekend_services), + 'incomplete_projects_count': incomplete_projects.count(), + 'team_members_notified': len(team_member_items), + 'emails_sent': emails_sent, + 'email_errors': len(email_errors), + } + + logger.info( + f"Incomplete work reminder completed", + extra=summary + ) + + return MonitoringResult( + command_name=self.name, + execution_date=query_date, + summary=summary, + event_type=self.event_type, + emails_sent=emails_sent, + details=email_errors if email_errors else [], + metadata={ + 'error_details': email_errors, + } + ) + + def _render_email_html( + self, + team_member, + items: Dict[str, List], + query_date: date + ) -> str: + """Render the HTML email for a team member using Django template.""" + # Get team member's name + recipient_name = team_member.first_name or 'there' + if team_member.first_name and team_member.last_name: + recipient_name = f"{team_member.first_name} {team_member.last_name}" + + # Build service items for template + services_data = [] + for service in items.get('services', []): + # Build address string + address_parts = [] + if service.account_address: + if service.account_address.street_address: + address_parts.append(service.account_address.street_address) + if service.account_address.city: + address_parts.append(service.account_address.city) + address_str = ', '.join(address_parts) if address_parts else 'No address' + + # Build account name (via account_address -> account, not deprecated service.account) + account_name = 'Unknown Account' + if service.account_address and service.account_address.account: + account_name = service.account_address.account.name or 'Unknown Account' + + # Track if this is a weekend service (from Friday, shown on Monday) + is_weekend_service = service.date < query_date + + services_data.append({ + 'account_name': account_name, + 'address': address_str, + 'status': service.status, + 'status_display': service.status.replace('_', ' ').title(), + 'is_weekend_service': is_weekend_service, + }) + + # Build project items for template + projects_data = [] + for project in items.get('projects', []): + project_name = project.name or 'Unnamed Project' + + # If project is affiliated with an account (via account_address), show account name + # Otherwise, show customer name + if project.account_address_id and project.account_address and project.account_address.account: + affiliation_name = project.account_address.account.name or 'Unknown Account' + elif project.customer: + affiliation_name = project.customer.name or 'Unknown Customer' + else: + affiliation_name = 'Unknown' + + # Build address string + address_str = '' + if project.account_address: + address_parts = [] + if project.account_address.street_address: + address_parts.append(project.account_address.street_address) + if project.account_address.city: + address_parts.append(project.account_address.city) + address_str = ', '.join(address_parts) + elif project.street_address: + address_parts = [project.street_address] + if project.city: + address_parts.append(project.city) + address_str = ', '.join(address_parts) + + projects_data.append({ + 'name': project_name, + 'affiliation_name': affiliation_name, + 'address': address_str, + 'status': project.status, + 'status_display': project.status.replace('_', ' ').title(), + }) + + # Check if any services are weekend services (for conditional messaging) + has_weekend_services = any(s.get('is_weekend_service') for s in services_data) + + context = { + 'recipient_name': recipient_name, + 'recipient_email': team_member.email, + 'date_formatted': query_date.strftime('%B %d, %Y'), + 'services': services_data, + 'projects': projects_data, + 'has_weekend_services': has_weekend_services, + 'current_year': datetime.now().year, + } + + return render_to_string('email/incomplete_work_reminder.html', context) diff --git a/core/services/monitoring/commands/nightly_assignments.py b/core/services/monitoring/commands/nightly_assignments.py new file mode 100644 index 0000000..c2cb008 --- /dev/null +++ b/core/services/monitoring/commands/nightly_assignments.py @@ -0,0 +1,276 @@ +""" +Nightly assignments monitoring command. +Sends email notifications to team members about their scheduled work for the night. +""" +from collections import defaultdict +from datetime import date, datetime, timedelta +from typing import Dict, Any, List +import logging + +from django.conf import settings +from django.template.loader import render_to_string + +from core.models.service import Service +from core.models.project import Project +from core.models.enums import ServiceChoices, RoleChoices, EventTypeChoices +from core.services.email_service import get_emailer_client, EmailerServiceError +from core.services.monitoring.base import BaseMonitoringCommand, MonitoringResult +from core.services.monitoring.registry import MonitoringCommandRegistry + +logger = logging.getLogger(__name__) + +# IMPORTANT: Weekend services are identified by this note text, which is set +# in core/graphql/mutations/service.py during service generation. +# If that note text changes, this constant must be updated. +WEEKEND_SERVICE_NOTE = "Weekend service window" + + +@MonitoringCommandRegistry.register +class NightlyAssignmentsCommand(BaseMonitoringCommand): + """ + Send nightly assignment notifications to non-admin team members. + + Queries services and projects scheduled for today and sends each team + member an email listing their assignments for the night. + """ + + @property + def name(self) -> str: + return "nightly_assignments" + + @property + def event_type(self) -> EventTypeChoices: + return EventTypeChoices.MONITORING_NIGHTLY_ASSIGNMENTS + + @property + def description(self) -> str: + return "Send email notifications for tonight's scheduled work" + + def execute(self, target_date: date = None, **kwargs) -> MonitoringResult: + """ + Execute the nightly assignments command. + + Args: + target_date: Date to query (defaults to current day) + + Returns: + MonitoringResult with execution summary + """ + query_date = target_date or self.get_current_day() + + # Query scheduled services for the target date + scheduled_services = Service.objects.filter( + date=query_date, + status=ServiceChoices.SCHEDULED + ).select_related( + 'account_address', + 'account_address__account' + ).prefetch_related('team_members') + + # On Saturday/Sunday, also include Friday weekend services still SCHEDULED + weekend_services = [] + weekday = query_date.weekday() + if weekday in (5, 6): # Saturday=5, Sunday=6 + friday_date = query_date - timedelta(days=(weekday - 4)) + weekend_services = list(Service.objects.filter( + date=friday_date, + status=ServiceChoices.SCHEDULED, + notes__icontains=WEEKEND_SERVICE_NOTE + ).select_related( + 'account_address', + 'account_address__account' + ).prefetch_related('team_members')) + + # Combine regular and weekend services + all_services = list(scheduled_services) + weekend_services + + # Query scheduled projects for the target date + scheduled_projects = Project.objects.filter( + date=query_date, + status=ServiceChoices.SCHEDULED + ).select_related( + 'customer', + 'account_address', + 'account_address__account' + ).prefetch_related('team_members') + + # Build map of team_member -> their scheduled items + team_member_items: Dict[Any, Dict[str, List]] = defaultdict( + lambda: {'services': [], 'projects': []} + ) + + for service in all_services: + for member in service.team_members.exclude(role=RoleChoices.ADMIN): + team_member_items[member]['services'].append(service) + + for project in scheduled_projects: + for member in project.team_members.exclude(role=RoleChoices.ADMIN): + team_member_items[member]['projects'].append(project) + + # Send emails to each team member + emails_sent = 0 + email_errors = [] + emailer = get_emailer_client() + + for team_member, items in team_member_items.items(): + if not team_member.email: + logger.warning( + f"Team member {team_member.id} has no email address, skipping" + ) + continue + + # Skip if no items (shouldn't happen, but safety check) + if not items['services'] and not items['projects']: + continue + + try: + email_html = self._render_email_html(team_member, items, query_date) + emailer.send_email( + to=[team_member.email], + subject=f"Tonight's Assignments - {query_date.strftime('%B %d, %Y')}", + body=email_html, + impersonate_user=settings.EMAILER_DEFAULT_SENDER + ) + emails_sent += 1 + logger.info( + f"Sent nightly assignments to {team_member.email}", + extra={ + 'team_member_id': str(team_member.id), + 'services_count': len(items['services']), + 'projects_count': len(items['projects']), + } + ) + except EmailerServiceError as e: + error_msg = f"Failed to send email to {team_member.email}: {e}" + logger.error(error_msg) + email_errors.append(error_msg) + + # Build summary + summary = { + 'date': str(query_date), + 'scheduled_services_count': scheduled_services.count(), + 'weekend_services_count': len(weekend_services), + 'scheduled_projects_count': scheduled_projects.count(), + 'team_members_notified': len(team_member_items), + 'emails_sent': emails_sent, + 'email_errors': len(email_errors), + } + + logger.info( + f"Nightly assignments completed", + extra=summary + ) + + return MonitoringResult( + command_name=self.name, + execution_date=query_date, + summary=summary, + event_type=self.event_type, + emails_sent=emails_sent, + details=email_errors if email_errors else [], + metadata={ + 'error_details': email_errors, + } + ) + + def get_current_day(self, timezone=None) -> date: + """ + Get the current day's date in the specified timezone. + + Args: + timezone: pytz timezone (defaults to America/New_York) + + Returns: + date object for current day + """ + tz = timezone or self.DEFAULT_TIMEZONE + now = datetime.now(tz) + return now.date() + + def _render_email_html( + self, + team_member, + items: Dict[str, List], + query_date: date + ) -> str: + """Render the HTML email for a team member using Django template.""" + # Get team member's name + recipient_name = team_member.first_name or 'there' + if team_member.first_name and team_member.last_name: + recipient_name = f"{team_member.first_name} {team_member.last_name}" + + # Build service items for template + services_data = [] + for service in items.get('services', []): + # Build address string + address_parts = [] + if service.account_address: + if service.account_address.street_address: + address_parts.append(service.account_address.street_address) + if service.account_address.city: + address_parts.append(service.account_address.city) + address_str = ', '.join(address_parts) if address_parts else 'No address' + + # Build account name (via account_address -> account, not deprecated service.account) + account_name = 'Unknown Account' + if service.account_address and service.account_address.account: + account_name = service.account_address.account.name or 'Unknown Account' + + # Track if this is a weekend service (from Friday, shown on Sat/Sun) + is_weekend_service = service.date < query_date + + services_data.append({ + 'account_name': account_name, + 'address': address_str, + 'is_weekend_service': is_weekend_service, + }) + + # Build project items for template + projects_data = [] + for project in items.get('projects', []): + project_name = project.name or 'Unnamed Project' + + # If project is affiliated with an account (via account_address), show account name + # Otherwise, show customer name + if project.account_address_id and project.account_address and project.account_address.account: + affiliation_name = project.account_address.account.name or 'Unknown Account' + elif project.customer: + affiliation_name = project.customer.name or 'Unknown Customer' + else: + affiliation_name = 'Unknown' + + # Build address string + address_str = '' + if project.account_address: + address_parts = [] + if project.account_address.street_address: + address_parts.append(project.account_address.street_address) + if project.account_address.city: + address_parts.append(project.account_address.city) + address_str = ', '.join(address_parts) + elif project.street_address: + address_parts = [project.street_address] + if project.city: + address_parts.append(project.city) + address_str = ', '.join(address_parts) + + projects_data.append({ + 'name': project_name, + 'affiliation_name': affiliation_name, + 'address': address_str, + }) + + # Check if any services are weekend services (for conditional messaging) + has_weekend_services = any(s.get('is_weekend_service') for s in services_data) + + context = { + 'recipient_name': recipient_name, + 'recipient_email': team_member.email, + 'date_formatted': query_date.strftime('%B %d, %Y'), + 'services': services_data, + 'projects': projects_data, + 'has_weekend_services': has_weekend_services, + 'current_year': datetime.now().year, + } + + return render_to_string('email/nightly_assignments.html', context) diff --git a/core/services/monitoring/registry.py b/core/services/monitoring/registry.py new file mode 100644 index 0000000..ad91486 --- /dev/null +++ b/core/services/monitoring/registry.py @@ -0,0 +1,78 @@ +""" +Monitoring command registry for extensible command discovery and execution. +""" +import logging +from typing import Dict, Optional, Type + +from core.services.monitoring.base import BaseMonitoringCommand, MonitoringResult + +logger = logging.getLogger(__name__) + + +class MonitoringCommandRegistry: + """ + Singleton registry for monitoring commands. + + Usage: + # Register a command + @MonitoringCommandRegistry.register + class MyCommand(BaseMonitoringCommand): + ... + + # Execute a command + result = MonitoringCommandRegistry.execute('my_command') + """ + _commands: Dict[str, Type[BaseMonitoringCommand]] = {} + + @classmethod + def register(cls, command_class: Type[BaseMonitoringCommand]) -> Type[BaseMonitoringCommand]: + """ + Register a monitoring command class. + Can be used as a decorator. + + Args: + command_class: BaseMonitoringCommand subclass + + Returns: + The command class (unchanged) + """ + instance = command_class() + cls._commands[instance.name] = command_class + logger.info(f"Registered monitoring command: {instance.name}") + return command_class + + @classmethod + def get_command(cls, name: str) -> Optional[Type[BaseMonitoringCommand]]: + """Get a command class by name.""" + return cls._commands.get(name) + + @classmethod + def execute(cls, name: str, **kwargs) -> MonitoringResult: + """ + Execute a registered command by name. + + Args: + name: Command name + **kwargs: Arguments passed to command execute() + + Returns: + MonitoringResult from command execution + + Raises: + ValueError: If command not found + """ + command_class = cls.get_command(name) + if not command_class: + raise ValueError(f"Monitoring command not found: {name}") + + command = command_class() + logger.info(f"Executing monitoring command: {name}") + return command.execute(**kwargs) + + @classmethod + def list_commands(cls) -> Dict[str, str]: + """List all registered commands with descriptions.""" + return { + name: cls._commands[name]().description + for name in cls._commands + } diff --git a/core/services/notifications.py b/core/services/notifications.py new file mode 100644 index 0000000..d868700 --- /dev/null +++ b/core/services/notifications.py @@ -0,0 +1,249 @@ +""" +Notification processing service. +Matches events against rules and generates notifications for recipients. +""" +from typing import List, Dict, Any + +from django.contrib.contenttypes.models import ContentType + +from core.models.enums import NotificationStatusChoices, DeliveryStatusChoices +from core.models.events import Event, NotificationRule, Notification, NotificationDelivery +from core.models.profile import TeamProfile + + +class NotificationProcessor: + """ + Processes events and generates notifications based on rules. + """ + + @staticmethod + def process_event(event: Event) -> List[Notification]: + """ + Process an event and generate notifications based on active rules. + + Args: + event: The Event instance to process + + Returns: + List of created Notification instances + """ + # Find active rules matching this event type + matching_rules = NotificationRule.objects.filter( + is_active=True, + event_types__contains=[event.event_type] + ) + + notifications = [] + + for rule in matching_rules: + # Check conditions if any + if rule.conditions and not NotificationProcessor._check_conditions(event, rule.conditions): + continue + + # Get recipients for this rule + recipients = NotificationProcessor._get_recipients(rule, event) + + # Generate notification subject and body + subject = NotificationProcessor._render_template(rule.template_subject, event) + body = NotificationProcessor._render_template(rule.template_body, event) + + # Create action URL if applicable + action_url = NotificationProcessor._generate_action_url(event) + + # Create notifications for each recipient + for recipient in recipients: + notification = NotificationProcessor._create_notification( + event=event, + rule=rule, + recipient=recipient, + subject=subject, + body=body, + action_url=action_url + ) + notifications.append(notification) + + # Queue delivery tasks for each channel + for channel in rule.channels: + NotificationProcessor._create_delivery(notification, channel) + + return notifications + + @staticmethod + def _check_conditions(event: Event, conditions: Dict[str, Any]) -> bool: + """ + Check if event metadata matches rule conditions. + + Args: + event: Event to check + conditions: Conditions from NotificationRule + + Returns: + True if conditions are met, False otherwise + """ + for key, value in conditions.items(): + if event.metadata.get(key) != value: + return False + return True + + @staticmethod + def _get_recipients(rule: NotificationRule, event: Event) -> List[Any]: + """ + Get list of recipients based on rule configuration. + + Args: + rule: NotificationRule to process + event: Event being processed + + Returns: + List of profile instances (TeamProfile or CustomerProfile) + """ + recipients = [] + + # If specific profiles are targeted, use them + team_profiles = list(rule.target_team_profiles.all()) + customer_profiles = list(rule.target_customer_profiles.all()) + + if team_profiles or customer_profiles: + recipients.extend(team_profiles) + recipients.extend(customer_profiles) + # Otherwise, use role-based targeting + elif rule.target_roles: + recipients.extend( + TeamProfile.objects.filter(role__in=rule.target_roles) + ) + # If no specific targeting, notify all team admins by default + else: + from core.models.enums import RoleChoices + recipients.extend( + TeamProfile.objects.filter(role=RoleChoices.ADMIN) + ) + + return recipients + + @staticmethod + def _render_template(template: str, event: Event) -> str: + """ + Render a template string with event data. + + Args: + template: Template string (supports simple variable substitution) + event: Event instance + + Returns: + Rendered string + """ + if not template: + # Generate default message + return NotificationProcessor._generate_default_message(event) + + # Simple template variable substitution + # Supports: {event_type}, {entity_type}, {entity_id}, and metadata fields + context = { + 'event_type': event.event_type, # Use raw value instead of get_event_type_display() + 'entity_type': event.entity_type, + 'entity_id': str(event.entity_id), + **event.metadata + } + + try: + return template.format(**context) + except KeyError as e: + # If template has unknown variables, try to provide defaults + # Add empty string for missing keys + import re + missing_keys = re.findall(r'\{(\w+)\}', template) + for key in missing_keys: + if key not in context: + context[key] = f'[{key} not available]' + + try: + return template.format(**context) + except: + # If still fails, return template as-is + return template + + @staticmethod + def _generate_default_message(event: Event) -> str: + """Generate default notification message for an event""" + return f"{event.event_type}: {event.entity_type} {event.entity_id}" + + @staticmethod + def _generate_action_url(event: Event) -> str: + """ + Generate action URL for the event entity. + + Args: + event: Event instance + + Returns: + URL string (can be empty) + """ + # This would ideally be configured based on your frontend routes + entity_type_map = { + 'Project': f'/projects/{event.entity_id}', + 'Report': f'/reports/{event.entity_id}', + 'Invoice': f'/invoices/{event.entity_id}', + } + return entity_type_map.get(event.entity_type, '') + + @staticmethod + def _create_notification( + event: Event, + rule: NotificationRule, + recipient: Any, + subject: str, + body: str, + action_url: str + ) -> Notification: + """ + Create a Notification instance. + + Args: + event: Event that triggered the notification + rule: Rule that matched + recipient: Profile receiving the notification + subject: Notification subject + body: Notification body + action_url: Action URL + + Returns: + Created Notification instance + """ + content_type = ContentType.objects.get_for_model(recipient) + + notification = Notification.objects.create( + event=event, + rule=rule, + recipient_content_type=content_type, + recipient_object_id=recipient.id, + status=NotificationStatusChoices.PENDING, + subject=subject, + body=body, + action_url=action_url + ) + + return notification + + @staticmethod + def _create_delivery(notification: Notification, channel: str) -> NotificationDelivery: + """ + Create a NotificationDelivery instance and queue delivery task. + + Args: + notification: Notification to deliver + channel: Delivery channel + + Returns: + Created NotificationDelivery instance + """ + delivery = NotificationDelivery.objects.create( + notification=notification, + channel=channel, + status=DeliveryStatusChoices.PENDING + ) + + # Queue the appropriate delivery task + from core.tasks.notifications import deliver_notification + deliver_notification.delay(str(delivery.id)) + + return delivery diff --git a/core/services/scope_builder.py b/core/services/scope_builder.py new file mode 100644 index 0000000..e9d70f6 --- /dev/null +++ b/core/services/scope_builder.py @@ -0,0 +1,79 @@ +from typing import Dict, Any, List +from django.db import transaction +from core.models.project_scope_template import ProjectScopeTemplate, ProjectAreaTemplate, ProjectTaskTemplate +from core.models.scope_template import ScopeTemplate, AreaTemplate, TaskTemplate +from core.models.enums import TaskFrequencyChoices as Freq + +TemplatePayload = Dict[str, Any] + + +@transaction.atomic +def build_scope_template(payload: TemplatePayload) -> ScopeTemplate: + tpl = ScopeTemplate.objects.create( + name=payload["name"], + description=payload.get("description") or "", + is_active=payload.get("is_active") or payload.get("isActive", True), + ) + + # Support both camelCase (areaTemplates) and snake_case (areas) for flexibility + areas = payload.get("areaTemplates") or payload.get("areas", []) + for area_data in areas: + area = AreaTemplate.objects.create( + scope_template=tpl, + name=area_data["name"], + order=area_data.get("order", 0), + ) + + # Support both camelCase (taskTemplates) and snake_case (tasks) for flexibility + tasks_data = area_data.get("taskTemplates") or area_data.get("tasks", []) + tasks: List[TaskTemplate] = [] + for t in tasks_data: + # Normalize frequency to lowercase to match enum values + frequency = t.get("frequency", Freq.AS_NEEDED) + if isinstance(frequency, str): + frequency = frequency.lower() + + tasks.append(TaskTemplate( + area_template=area, + description=t["description"], + checklist_description=t.get("checklist_description") or t.get("checklistDescription") or "", + frequency=frequency, + order=t.get("order", 0), + is_conditional=t.get("is_conditional") or t.get("isConditional", False), + estimated_minutes=t.get("estimated_minutes") or t.get("estimatedMinutes"), + )) + if tasks: + TaskTemplate.objects.bulk_create(tasks) + + return tpl + + +@transaction.atomic +def build_project_scope_template(payload: Dict[str, Any]) -> ProjectScopeTemplate: + tpl = ProjectScopeTemplate.objects.create( + name=payload["name"], + description=payload.get("description") or "", + is_active=payload.get("is_active", True), + ) + + for cat_data in payload.get("categories", []): + category = ProjectAreaTemplate.objects.create( + scope_template=tpl, + name=cat_data["name"], + order=cat_data.get("order", 0), + ) + + tasks: List[ProjectTaskTemplate] = [] + for t in cat_data.get("tasks", []): + tasks.append(ProjectTaskTemplate( + area_template=category, + description=t["description"], + checklist_description=t.get("checklist_description") or "", + order=t.get("order", 0), + estimated_minutes=t.get("estimated_minutes"), + )) + + if tasks: + ProjectTaskTemplate.objects.bulk_create(tasks) + + return tpl diff --git a/core/services/session_service.py b/core/services/session_service.py new file mode 100644 index 0000000..0f18664 --- /dev/null +++ b/core/services/session_service.py @@ -0,0 +1,346 @@ +from dataclasses import dataclass +from uuid import UUID +from django.core.exceptions import ValidationError +from django.db import transaction +from django.utils import timezone + +from core.models.enums import ServiceChoices +from core.models.scope import Scope, TaskCompletion, Task +from core.models.project_scope import ProjectScope, ProjectScopeTask, ProjectScopeTaskCompletion +from core.models.session import ServiceSession, ProjectSession +from core.models.service import Service +from core.models.project import Project + + +@dataclass(frozen=True) +class OpenSessionResult: + session_id: UUID + entity_id: UUID + entity_type: str + started_at_iso: str + + +@dataclass(frozen=True) +class CloseSessionResult: + session_id: UUID + entity_id: UUID + entity_type: str + ended_at_iso: str + + +@dataclass(frozen=True) +class RevertSessionResult: + session_id: UUID + entity_id: UUID + entity_type: str + reverted_at_iso: str + + +class SessionService: + @transaction.atomic + def open_session(self, *, entity_type: str, entity_id: UUID, actor) -> OpenSessionResult: + """ + Open a session for a service or project. + Changes status from 'scheduled' to 'in progress' and creates the session with tasks. + """ + if entity_type == "service": + entity = Service.objects.select_for_update().get(id=entity_id) + try: + scope = Scope.objects.select_for_update().get(is_active=True, account_address=entity.account_address) + except Scope.DoesNotExist: + raise ValidationError("No active scope found for this account address.") + except Scope.MultipleObjectsReturned: + raise ValidationError("Multiple active scopes found for this account address.") + # Get account - use service.account if set, otherwise get from account_address + account = entity.account if entity.account else entity.account_address.account + # Validate scope's account matches the service's account + if scope.account_id != account.id: + raise ValidationError("Resolved scope does not match the service's account.") + if entity.status != ServiceChoices.SCHEDULED: + raise ValidationError(f"Service must be scheduled to open session. Current status: {entity.status}") + if ServiceSession.objects.filter(service=entity, end__isnull=True).exists(): + raise ValidationError("An active session already exists for this service.") + session = ServiceSession.objects.create( + service=entity, + account=account, + account_address=entity.account_address, + customer=account.customer, + scope=scope, + start=timezone.now(), + created_by=actor, + date=timezone.now().date() + ) + entity.status = ServiceChoices.IN_PROGRESS + entity.save(update_fields=['status']) + + elif entity_type == "project": + entity = Project.objects.select_for_update().get(id=entity_id) + scope_id = entity.scope_id + try: + scope = ProjectScope.objects.select_for_update().get( + id=scope_id, is_active=True, project=entity + ) + except ProjectScope.DoesNotExist: + raise ValidationError("No active project scope found for this project.") + except ProjectScope.MultipleObjectsReturned: + raise ValidationError("Multiple active project scopes found for this project.") + if entity.status != ServiceChoices.SCHEDULED: + raise ValidationError(f"Project must be scheduled to open session. Current status: {entity.status}") + if ProjectSession.objects.filter(project=entity, end__isnull=True).exists(): + raise ValidationError("An active session already exists for this project.") + session = ProjectSession.objects.create( + project=entity, + account=getattr(entity.account_address, 'account', None), + account_address=entity.account_address, + customer=entity.customer, + scope=scope, + start=timezone.now(), + created_by=actor, + date=timezone.now().date(), + ) + entity.status = ServiceChoices.IN_PROGRESS + entity.save(update_fields=['status']) + + else: + raise ValidationError(f"Invalid entity_type: {entity_type}") + + return OpenSessionResult( + session_id=session.id, + entity_id=entity.id, + entity_type=entity_type, + started_at_iso=session.start.isoformat() + ) + + @transaction.atomic + def close_session(self, *, entity_type: str, entity_id: UUID, actor, tasks) -> CloseSessionResult: + """ + Close the active session for a service or project. + Creates task completions and changes status to 'completed'. + """ + # Check entity type and validate the active session + if entity_type == "service": + entity = Service.objects.select_for_update().get(id=entity_id) + session = ServiceSession.objects.select_for_update().get( + service=entity, end__isnull=True + ) + if entity.status != ServiceChoices.IN_PROGRESS: + raise ValidationError(f"Service must be in progress to close session. Current status: {entity.status}") + if session.end is not None: + raise ValidationError("Service session is already closed.") + elif entity_type == "project": + entity = Project.objects.select_for_update().get(id=entity_id) + session = ProjectSession.objects.select_for_update().get( + project=entity, end__isnull=True + ) + if entity.status != ServiceChoices.IN_PROGRESS: + raise ValidationError(f"Project must be in progress to close session. Current status: {entity.status}") + if session.end is not None: + raise ValidationError("Project session is already closed.") + else: + raise ValidationError(f"Invalid entity_type: {entity_type}") + + # Handle task completions + now = timezone.now() + if tasks is not None: + existing_task_ids = set(session.completed_tasks.values_list('task_id', flat=True)) + unique_tasks: list[Task] or list[ProjectScopeTask] = [] + seen_ids = set() + for task in tasks: + if task.id in seen_ids: + continue + seen_ids.add(task.id) + if task.id in existing_task_ids: + continue + unique_tasks.append(task) + for task in unique_tasks: + if entity_type == "service": + if getattr(task, "area", None) and task.area.scope_id != session.scope_id: + raise ValidationError("Task does not belong to the service session's scope.") + task_completion = TaskCompletion.objects.create( + task=task, + service=session.service, + account_address=session.account_address, + completed_by=actor, + completed_at=now, + ) + session.completed_tasks.add(task_completion) + elif entity_type == "project": + if getattr(task, "category", None) and task.category.scope_id != session.scope_id: + raise ValidationError("Task does not belong to the project session's scope.") + task_completion = ProjectScopeTaskCompletion.objects.create( + task=task, + project=session.project, + account_address=session.account_address, + completed_by=actor, + completed_at=now, + ) + session.completed_tasks.add(task_completion) + + # Close the session + session.end = now + session.closed_by = actor + session.save(update_fields=['end', 'closed_by']) + entity.status = ServiceChoices.COMPLETED + entity.save(update_fields=['status']) + return CloseSessionResult( + session_id=session.id, + entity_id=entity.id, + entity_type=entity_type, + ended_at_iso=now.isoformat() + ) + + @transaction.atomic + def revert_session(self, *, entity_type: str, entity_id: UUID, actor) -> RevertSessionResult: + """ + Revert an active session to a scheduled state for a service or project. + - Requires the entity to be IN_PROGRESS with an active (open) session. + - Deletes the active session and any task completion records associated with that session. + - Sets the entity status back to SCHEDULED. + """ + now = timezone.now() + if entity_type == "service": + entity = Service.objects.select_for_update().get(id=entity_id) + if entity.status != ServiceChoices.IN_PROGRESS: + raise ValidationError( + f"Service must be in progress to revert session. Current status: {entity.status}" + ) + session = ServiceSession.objects.select_for_update().get(service=entity, end__isnull=True) + # Delete task completions associated to this session (and unlink) + completions = list(session.completed_tasks.all()) + for tc in completions: + session.completed_tasks.remove(tc) + tc.delete() + # Delete the session itself + sid = session.id + session.delete() + # Reset status + entity.status = ServiceChoices.SCHEDULED + entity.save(update_fields=['status']) + return RevertSessionResult( + session_id=sid, + entity_id=entity.id, + entity_type=entity_type, + reverted_at_iso=now.isoformat(), + ) + elif entity_type == "project": + entity = Project.objects.select_for_update().get(id=entity_id) + if entity.status != ServiceChoices.IN_PROGRESS: + raise ValidationError( + f"Project must be in progress to revert session. Current status: {entity.status}" + ) + session = ProjectSession.objects.select_for_update().get(project=entity, end__isnull=True) + # Delete task completions associated to this session (and unlink) + completions = list(session.completed_tasks.all()) + for ptc in completions: + session.completed_tasks.remove(ptc) + ptc.delete() + sid = session.id + session.delete() + entity.status = ServiceChoices.SCHEDULED + entity.save(update_fields=['status']) + return RevertSessionResult( + session_id=sid, + entity_id=entity.id, + entity_type=entity_type, + reverted_at_iso=now.isoformat(), + ) + else: + raise ValidationError(f"Invalid entity_type: {entity_type}") + + @transaction.atomic + def add_task_completion(self, *, service_id: UUID, task_id: UUID, actor, notes: str | None = None) -> UUID: + """ + Add a single task completion to the active session for a service. + """ + service = Service.objects.select_for_update().get(id=service_id) + session = ServiceSession.objects.select_for_update().get(service=service, end__isnull=True) + + task = Task.objects.get(id=task_id) + if getattr(task, "area", None) and task.area.scope_id != session.scope_id: + raise ValidationError("Task does not belong to the session's scope.") + + # Create or reuse existing completion (guarded by unique_task_per_service) + tc, _created = TaskCompletion.objects.get_or_create( + service=service, + task=task, + defaults={ + "account_address": session.account_address, + "completed_by": actor, + "completed_at": timezone.now(), + "notes": notes or "", + }, + ) + # Ensure M2M link exists + session.completed_tasks.add(tc) + + return session.id + + @transaction.atomic + def remove_task_completion(self, *, service_id: UUID, task_id: UUID) -> UUID: + """ + Remove a single task completion from the active session for a service. + """ + service = Service.objects.select_for_update().get(id=service_id) + session = ServiceSession.objects.select_for_update().get(service=service, end__isnull=True) + + tc = TaskCompletion.objects.filter(service=service, task_id=task_id).first() + if not tc: + # Idempotent: nothing to remove + return session.id + + # Remove association and delete the completion record + session.completed_tasks.remove(tc) + tc.delete() + + return session.id + + @transaction.atomic + def add_project_task_completion(self, *, project_id: UUID, task_id: UUID, actor, notes: str | None = None) -> UUID: + """ + Add a single project-scope task completion to the active ProjectSession for a project. + Validates that the task belongs to the session's scope. + Returns the ProjectSession ID. + """ + # Load active project session + project = Project.objects.select_for_update().get(id=project_id) + session = ProjectSession.objects.select_for_update().get(project=project, end__isnull=True) + + # Load the task and validate it belongs to the same ProjectScope as the session + pst = ProjectScopeTask.objects.get(id=task_id) + if getattr(pst, "category", None) and pst.category.scope_id != session.scope_id: + raise ValidationError("Task does not belong to the session's project scope.") + + # Create or reuse existing completion for this (project, task) + now = timezone.now() + ptc, _created = ProjectScopeTaskCompletion.objects.get_or_create( + project=project, + task=pst, + defaults={ + "account": session.account, + "account_address": session.account_address, + "completed_by": actor, + "completed_at": now, + "notes": notes or "", + }, + ) + # Ensure M2M link exists + session.completed_tasks.add(ptc) + + return session.id + + @transaction.atomic + def remove_project_task_completion(self, *, project_id: UUID, task_id: UUID) -> UUID: + """ + Remove a single project-scope task completion from the active ProjectSession for a project. + Idempotent: if not present, returns the current session ID without error. + """ + project = Project.objects.select_for_update().get(id=project_id) + session = ProjectSession.objects.select_for_update().get(project=project, end__isnull=True) + + ptc = ProjectScopeTaskCompletion.objects.filter(project=project, task_id=task_id).first() + if not ptc: + return session.id + + session.completed_tasks.remove(ptc) + ptc.delete() + return session.id diff --git a/core/services/video.py b/core/services/video.py new file mode 100644 index 0000000..8d5e69a --- /dev/null +++ b/core/services/video.py @@ -0,0 +1,154 @@ +""" +Video processing and validation utilities. + +This module provides video file validation, metadata extraction, +and optional thumbnail generation for uploaded videos. +""" +import io +import os +import tempfile +import ffmpeg +import mimetypes +from typing import Optional, Tuple +from django.core.exceptions import ValidationError + + +# Allowed video MIME types +ALLOWED_VIDEO_TYPES = { + 'video/mp4', + 'video/quicktime', # .mov + 'video/x-msvideo', # .avi + 'video/webm', + 'video/x-matroska', # .mkv +} + +# Maximum video file size (250 MB) +MAX_VIDEO_SIZE = 250 * 1024 * 1024 + + +def verify_video_bytes(data: bytes, filename: str = "") -> str: + """ + Verify the uploaded bytes are a valid video file. + + Uses MIME type detection to validate the file format. + For more thorough validation, install python-magic or ffmpeg-python. + + Args: + data: The uploaded file bytes + filename: Original filename for extension-based fallback + + Returns: + str: The detected content type (MIME type) + + Raises: + ValidationError: If the file is not a valid video or exceeds size limits + """ + if not data: + raise ValidationError("Uploaded file is empty.") + + # Check file size + if len(data) > MAX_VIDEO_SIZE: + size_mb = len(data) / (1024 * 1024) + max_mb = MAX_VIDEO_SIZE / (1024 * 1024) + raise ValidationError( + f"Video file too large ({size_mb:.1f} MB). Maximum size is {max_mb:.0f} MB." + ) + + # Try to detect MIME type from file extension + content_type = None + if filename: + content_type, _ = mimetypes.guess_type(filename) + + # Basic validation: check if it looks like a video MIME type + if not content_type or not content_type.startswith('video/'): + raise ValidationError( + "Uploaded file does not appear to be a video. " + "Supported formats: MP4, MOV, WebM, AVI, MKV" + ) + + if content_type not in ALLOWED_VIDEO_TYPES: + raise ValidationError( + f"Video format '{content_type}' is not allowed. " + f"Supported formats: {', '.join(ALLOWED_VIDEO_TYPES)}" + ) + + return content_type + + +def extract_video_metadata(video_path: str) -> Optional[Tuple[int, int, int]]: + """ + Extract video metadata (width, height, duration) from a video file. + + Uses ffmpeg to probe the video file and extract dimensions and duration. + + Args: + video_path: Path to the video file on disk + + Returns: + Optional[Tuple[int, int, int]]: (width, height, duration_seconds) or None if extraction fails + """ + try: + probe = ffmpeg.probe(video_path) + + # Find the first video stream + video_stream = next( + (stream for stream in probe['streams'] if stream['codec_type'] == 'video'), + None + ) + + if not video_stream: + return None + + width = int(video_stream.get('width', 0)) + height = int(video_stream.get('height', 0)) + + # Duration can be in the stream or format section + duration = video_stream.get('duration') or probe.get('format', {}).get('duration') + duration_seconds = int(float(duration)) if duration else 0 + + return (width, height, duration_seconds) + + except (ffmpeg.Error, KeyError, ValueError, StopIteration) as e: + # If extraction fails, return None and let fields default to 0 + return None + + +def generate_video_thumbnail(video_path: str, output_path: str, timestamp: float = 1.0) -> bool: + """ + Generate a thumbnail image from a video file. + + Uses ffmpeg to extract a frame from the video at the specified timestamp + and save it as a JPEG thumbnail scaled to 320px width. + + Args: + video_path: Path to the video file + output_path: Path where thumbnail should be saved (should end in .jpg) + timestamp: Time in seconds to extract frame from (default 1.0) + + Returns: + bool: True if successful, False otherwise + """ + try: + # Ensure output directory exists + output_dir = os.path.dirname(output_path) + if output_dir and not os.path.exists(output_dir): + os.makedirs(output_dir, exist_ok=True) + + # Extract frame at timestamp, scale to 320px width (preserve aspect ratio), save as JPEG + ( + ffmpeg + .input(video_path, ss=timestamp) + .filter('scale', 320, -1) # -1 maintains aspect ratio + .output(output_path, vframes=1, format='image2', vcodec='mjpeg') + .overwrite_output() + .run(capture_stdout=True, capture_stderr=True, quiet=True) + ) + + return os.path.exists(output_path) + + except ffmpeg.Error as e: + # FFmpeg error (corrupt video, invalid timestamp, etc.) + return False + except Exception as e: + # Other errors (permissions, disk space, etc.) + return False diff --git a/core/static/images/logo-white.png b/core/static/images/logo-white.png new file mode 100644 index 0000000000000000000000000000000000000000..dbeff205dacfd042e508ffda69cc5e015622444d GIT binary patch literal 77097 zcmbSSWm8i2m{g{&uemfCK<2 z$w_PZQRQp%i%5_IDzV(N@7*0h1v@B~2Zo1Z0QP!?%P1H@sqR zDB;1)1&-9{aomXozA$`hM!jJfU=iP_uR^g>c3Y~oQy0^%a31SP=fZ=z&Ig?j-UTj* zah&U?fcyNFwe;_HwVm_#HRs5zT4m}*GLcxs|Nj?L(!%`*z{_|SM>GC}UUdMVIb?6k zI}Gp^zz~dvL;~Q2fu~5|BvSRSe!7T%+49GOq*%(epYDn`7_8NY!fx9L2#`ZcJjkL< z(T7PA0^cFN`;dO=^DW{rewkIPjV>-9m_QCNGX=CoH$yiApr47#N~zrC>mm}(?*{2J z*hA4G@b4nk0FB@A1;T+)>?@i0c5ceJ-YE+b^l~p?&%~3{Wfmr`NZ)OeulpTQ> ziceetLx^}7V@OO7GHd{z-~Vt#4Izek)R%M9eYNPZmZ~4f|JHG^P~SIS7N_ |ibw z3;_xqOi73h74#zlPz6|Qe+LpKtZyZMj)<%W{dMLD@%oPP13h#es8_sp+5^O<1IQ}d zuGpZ}UH5Ux5e-CtvGKu^=}QWMhECzp5**0kS6#q9{Gf`*l%6m7i27wKY0v@iX%oMf z38?^c#+WJ}tptsT@0axvRP{eQauj%jn zc+8URB}NL;2s$!JKS@XkECj3$qO`dueJE|W(jcpSSzrJ_t@Z|Tq-xU&H$;UH`S_a@ zFC|{aqB!E$@rscEQbV{R)*({gDGCYgat#eX)TwjGj-p`q@MMx@gYhZANc~(R0$^CE zsT3!MSAm`t02zezHpFx^&h9=?f<05vI0S8&48M2ceG0^fUj02az0gBGY8xC%NFU=M z8bvlwZu2?{2g#Z;M##zM%i7gfC!dJFemD)DxYdW~R!98o(#LSu*GK@ONyjLQhptV2eL)l?tf2!pwBk7 z4q0pdI8>sCkdh7C@+aWeWu`!J1@S-??b*lGr1h{z1a7Jv-}!2uXJrlcc49B`u1USW zl>9>+8Bgxl)n_t0f-~yCUNsLHONy!cfNV2jH>{>%6oW-v&mQ`tRnv z03PPC>BclUPk`e*2?2y+wIfKMyTrN>_EoORa4}(t4D+2buou!i5v?c#ifSTR=CNza z#^yx^ymi^G#fcSV+3@v8w{y=H;#G8wHUhWiMDpd zkevc|u8FaIXV!dHeCUriXr`<*?stbVgVo8n#Yql$bvHLh7A*Jln4S|2D33Rt zOy`Hv;fd$&-Z>*Vop|S5jT6@PBXf?hl+-{RySA3YF9SbGP7W18K#rP(!6qg(4+8>) z!|F)FfYgL(c#!5Kde}&xVi++A55?7*NisM&%!pQWUOgU#*BX5SnF|sk->0V1@ftmX zjGS{wZG5i-m?d3zsGq@$Nd1V~c41a;BtM}3Elpkq9pCbyA*QGaRch)2JlqG)@r?(C zN)v@EhDC%Wp(5doaMRoS%!eV6j<9T)BeNlj&6$_>nY9m>Rh-KUoZ|<=@)Z8$*Z+i0 zHCnc|o~XZ%L|9-CkYHqA=Nav?QT{Jnu@H8XAz!7~L-|9`jT5;R&bB`bqHLb{;l`f6 z))s7^(lq0Jg{QAwSI2?9MMeNa=EKy*tAZ@{--XKal*O{MTf^#~u&Bu~vOwz~dT8s` zrW*M5XL4@q#uF~e&oEJg3_qA&>+6wZOXFP2H1Rf$mh_9u^fY)(sD%|!eOsg5@;zN{ z6n~`4{^xuhKWgpa@k$FTm$OWc<33^V1(!Dl=_iXZ$b6QL?yb@DTY6zhf)UR*cAFkz z@Bb_}sG3arVYySA7rl0Y`H}zWlEc0+X%@Q_^|!-%c;s&DuLSW*Ro9ngVon+8!HF){aPQ1sm+QCBSW-N^LV9L*yDpa zm4)fXH7L==nnl7YjoXQy)6A(!xzjD%|q8e zPbL6e<1Wqd*pfZYm~{NijA@P));r}RuaS0|nqoc)&Vu<|lY>;e#^ol8>9?NA8G*l3 z9I;)FMv^1x6;{t>F-fzvxNOwpSis}TumshBokx|2DG%9m$60}Z{%+wJb$eC$Zo!=e z?yr+UlFoz5IdgBXn)ioJAlAy-qtv zdi99nlx)L$T$BaCOsf6X>NE;9j)K+nt$Z`HuheAIrLv>$QuMc>{L}7}ob>K}?m?~} zpDlww?&qH^ENUi<>%*HN_R=yIW8c=_-%9|)wz@C@BQg@4e}s&#c~J=vnUs}iOhnIH z&odWvvOSfmy-B0G>GO4QLJ0#fJzKgf$8cI=_H!rqf`xRcEL7=mHKE zY=YE=|Jdd)@<;2^e}{1;>(Aut-7Q|0xc_u+e(IFDM4FaY8pWr=;dn4b8@I^sbv?IY zzC6!)UxT_mt%~*nEu76p3tMNJBn z5p*kVLq_9=Ul$T10S!NlB)R!!=lAY@$0=$Og#3t0R85FaRj)>RF}E*vubn^HFH{CJ zLMzjw6VaYB)v3xC7^Cn5@w~(meJ1ja-n`-)7lUxkkhH(aj~Q?3wzDVB&b`5IZ=Wu8 z(jfV*QSsj|DT&L0d@u^VonXWBb=tY!5famcW7r%K$-=_i$XvSicwcN49kUGp%F?e1 zU^QJ9)&{J0D*@wY^u%EFNx1!A*?Wn~FUhL)$rJ0j$D9{V-5+n~>4(yoxL^m75Np&C zw}f3T!}%v2lAF)ZCiW=caQ$i|0g`385HH~-W*|N)Qk+0YQ;aDOn&;WX6lxX-9r!IgvlQ2Io5**x+SDWuKcptt&U`A!h)ulw|l}g08 zeTDqn*T$x;eYo8+kz=@HTL5}uUDgN?DmggfF0^K=oX|ecB*@RV^ZL$i#o&w#%#B7z z5gl!K`5F_C1&Gc!G!^5`kaAya`ignKh(0(g3N@BJF5T~yEz~(iIp5JdY`=+$2U+&e z=2MynlF~%u?eA<*hM7tfMv$8ycA@QN}Ao{^rb_5t39Ikt8 zYLil3*{+rdC(@?GRQ;r!;@qRf6Z-S#t2bLCH>Kg z0TQI~!UbyPI&VB?7OIzQ#|7*%3u?^2otaxzgqCN^*rQ+2Z?Y6JUhrB-snUqsD8AdC z{Ns*{RLY(*s{a@8t9Em0E8TzhWs_S8&Ec9-NQiKEwGf(F(G7A+HW8TDk#3#blWY5< z%*A+A(hN{Tq=InjB_h?o_o^ywL^gq}p+$UR{2&~)SH*hDG9MLfw&8?`pq)6<8H;zU z2HlMhMVIwEpT~$XOBFsv=W@R#;^e6vWla0MD*VbXywB@9MNSqH3LpzLv5|~`;rEIP zB0zK`p5e`z#F$w9>>rBrA|-~+2Z?F*!iV7BRQ9vOQoT}rc@3!2jH!49I5)GvKkr}Q zQ_EDg(;&{fMvDBYAMLm%_{J`Q$LGqCt!F#361n@I?NZ=Ku)H2POh?CxE;gH~XIVx$ z^ooy&fiN@T5PS&sx2|=tOdR$3Q09^^v=VQsM~e(iy6cjX5x;YISeSIYnLFI`I##jX zbSw-FErSB^xMlx(`s8Dnf=&5Cpi-=uy=ot%UA7~m(=L**CzCy~_RW)DwQF&yeCy|< zwC>PEq@P-2pY;0>XRxr?sHYGrWBg$TVsAJ%i$Zhg%e!0>s%JiUj{XM~xXrWPP-VbzU9ErN|tN)#f&ozELQy~oI*+bz+h9OZQY1`jX zn|2vndJq1OZJZDc2|jH4p->GPOgd}}QcGfsx!$mAhllE~uG$R9_^2S);Z04~Avq2X zx+^HC`CQdMYQ?B;_1P%(o(jBk%omXu=Oz~a=t0rk`0A(H+|Q4rMB^X<5(lz)KPn5d zc_AK|E-x&=m*KsNZC#YQsKbw3t{^J={q#N5keTiEy8Xfsk1 zhIXw%=F=LTe`%2EH}qM1CG5}r@0Cq>_KVv6UK=zSEJ0a!J{0wl=#uw?0X0cFm=1Dx zy-k3B&r9xX?(g!?%69B+9Q&t_X%$LPX}QM>g`DIy9kPEsemrO5IpaD)vJCgj`kjwx z^(#G%BUPt&j?N?JB4Apg8p!&SHqDLKnV*OwPDK% z9+_xLVPlw0N4+7nW_sKzkMNE)lxHrXW1oQoa6C*2!IlO|)4oY=Nr*7zi{Mx1azzlH z2KwI~ARcaD>z@}zMI#Y^I8F*|H@~4A!9RuC(kz-N2^nc>J=;WL5xRIad<@k*VCmS) zQhmHJACeWjI}v90ytiwRw2{7}MqprLSC-^_ZJ+~|$iNSUvX;KA!m#6x?oZ`dzQo04 zCOrtY3~bY|R?Et7Se5=(1#EH`z1kb~*%2b#qC!Qwf1(b9gz%Hpc~z^alxS4)Ln2&2 zo%9$MFllxeq3ppcdg1kC>`amy#x}kZQ$;;`T}AK9k2>L@C0?IS;R)E$P==oT_gM?3 zxX9c`(OZZ@r0`Dn8KDkm&YKB=ZG%_mjxA2yrW&pZv>q96r5~edyzMt+#oNGR5T7S7*zW?E7eRSjVaS2vfs~p|NXGBzLd{(42(v|FibhJ}# zI2Q;%%2Z(#05JE6v7p^lLVH(UrNH{31I{N#lQO_%ZwwI>b@CcOwxG&!Wp++ye<#bC z32X4HOuJ@^3w6f07#>s}Y37WOfOEylV)M2hUIbAZW)AcU*x{IZc9L zm+tD<^*>$;V2_F;j%}&o3fc)xMG?C?am`&ms6diq3%daMbCtlBFjATC1=HU1w;)2n z!=&eov0wNO;x#y07*O8pc3V^{$R?&D!grF$HBuuUauW(dw6H`jzX5vwi0Rtny+Zc* zZglk!xX2YnqX*p4qRVu&s(KHypPHUb%`Lzi_>(qp{@$D+lHG7JE$nq?wms1ZvcsxH zm@o6=v9FW3V=7~6Rj5k{ixQq|i0ww=?d0rA3$?=`3NL@hMY}A;$D7$;r*AR0GhwxZe7|)q@m4u@#^lXuGrHLlB&$)2NW-0w;pv&FT}9@wi2Lqv%MN zQ#Aqs8rz+?lsYizei0}Ow&y;;g>8cs?3wAc9C8HLWz9XgKfyi*HZlfa0Ky`kH+`XQWxXNqWI*2}^_HA>3RUKhM2>p5MS|KUYrc(WJyfM%l=?K<0Y_D5*z*l9mMHgL5CvS2go`Yg-zO1V-lM6ak?dZw~F zeLn;^3AI}9+~nL`)jDsbKVW9of@VL&9O+cM%$W+Ggcjk0W7JiO94`}?wEyay2ObF} z)Ou<$$=P$k^E3Dt^>IY9&Pp}qCSbd1B#ey}ll19>0oL_5@6A+h*SGI`EHX@u)j#{x z2N3RMygROuqutD&H`5!l5wJhNMF)e>fuiDaok$Ku)}M$KV@t-iyI8(+*5)R=vo!1| zKdA3zFub`MTMlR)n5I*U3|JFe*3a_sAs|8TGgsUo#vOVB_#eH`ycQo$<$<22l==o=Xh5f4?f*zohBYrVX~3Oehx zCN&t!_}nSIHuNn6GP{ZJMk%S+Gq$H>96^r%6aD+@5A;}-sEe(oeDbCNWhcGHge&T6 zATI^!G9j&fQSUg{yb!s6certFVlNC}%8tj>R_!)3M9|6(6D?nlli$jmMogL{YDKYj zqgi*LRxzme(OdMF{Y6)}-Qpd1uiWIk*zjhfKA6Ty*ah&$Id`y#>`|*S0hjW3MbU}A z>~*Y8D=T~z6W>STg)=NnTs=p*qT4&^Juaq05=7WqKczu64Cp&{^enLVBl(yXJ zP0}eHST}RLE;#Iw5{y5d@;L6)lu~`GNqIHF(rHA$^t!(0<}Ds#2k4*E6Usu|eK7od z!||}MW8(3&^)Fz}_|kxX;T}YZgslT_b%^})=DJd+^^5B_QWTuXCW-auSjxCM1dU=|~Ui_dVH z1d*X3;Olgvue$bu;oAN~T4$oW^C4-+gcX_3gi z4J59$gfvUvTb}wToTlE~{Mf!ZzTIrs###8WbrFYpGiEnjv8ld>N&MQs@meGf8c`IL zqKcXv3c(Fb(ZxSn!_Cuw82FekUaD4eYY;fwNiEiYB7!Lj+zrzkp&3nP_#J+HC-Rl& zb^7I~bHS2~dqw`4O>7uaB5GRF#a0^YeI7M>CpCIF(ii5CdG2jp>%TQ&MT#%szno%G zc9lQxOPpZ-1@B^BvDTo%$vKIL_$}tUzf;RS?@>+qlUzyNKW&R9P}+4rTXOzvImvr! z%emQZiBzqPjg*QAY#Q{XdK+0uNW22?vrG(x`%(*FI{TYp2H#v09=D!n zrRttWMh_545$r1Iu(AE_$J`nQr#0KQkj3=Ka*GLSxiFtSY~vmis%Hc$d%cc~m_rwc z>q(BXh=tR@;7dv%?TXjQyYMuVBwXe{^v*V{4 z$!GdDo9QsNb=H0TA8w9c9g&{kf3zpL0A679x{ll4fTQtA+B0Y_QUvx2$0Q8QP8Jf~e+82oi?TOh^j=@#q(vj+o9Oi55SeWuT zWp~gWR6|5b1)2CN-yZ~_qLkpMRN>!ZL?gfyY-12}X^}%C24cvkqTUWxw6RNcrmxMV z4vM&lc?LsfGO&A%0hoJ)B#eXJz=v{xI_D;I>06i6Ke1Oo<}ZJ626Nv`&i)(eBYG|z zkwPFhr1w_1P?w!$V|`{&!=-glKKJkHir(jn6 z#MmPtm*cz2AO3Z3LoAAC9eV;V^ejC(x2%>dJosTbFnMzH&5oawjey$0qfk`*z&Hc~ zG@N8_HfK4YJtWPTo?`MfMlgOJSj~C2J4`cj%}rbW;q5Sc4U=|Cfw{F%dP=Q^*2%JGi+M&bqztFTK*t%sSgc00b^r<1>A)6{h3#~4;N2?e-$>XDFW&0>t-OW{<%y^$nY zDhuZJxNQ!Xcl8fYhxqJr(Q;K%n@DgrKJK%G;B3P`ve<9&4&j)uc(3~CcHbwgxD3Vn3pnKt{mja& zYtbev$0DV9sWx78^y#8h+5s_b3_7kyke&uVpNmI+EYQ=8T9kjf$rOvk*En zmTHA+sCJBiC6m4}?wMiof<%cDOEX3BO$v(wGKFL)qoy?Co)s`mTN29_ycK-S7V85F zQ`emXWoY+%^|WDb;B}wc>ZeZz2?w9o0(J$R#XI&l zPHfvdwd+-gUo*Tgsx9B%X^0#;R_tUiRdL4mYzI4d!MoZ&ompYv?8$N)*0PHUQqux-buKdIvc4{4 z=!U?y^l&Ea_|n7TA1*_e#7~O_jcU6O8CtgGKlZ<)D%(Nfu{bH$L9gs{4}ZaKJ`=7r zGm+ZJj&xn;F*qIjWAAe9#a$kWmZjq|1HiPpBXBI>2r;ocE$Dc;4fl^i7Q})4waP|m zyFbGlwm;FzQO?sz-|dmb_hg&}lJl6b<&ncshxIcRWrw)GsL`y=c^)u_y)vIAK!RC8 z9AzZcOc7MU-vmGS6!<0)s=+ezl{5GlRTfc9{hxTaqk>}Y3vs+WN2O3+vD)n zOJmDd?gcZ7G3g%hih$VbZ|O{e0!}4^pUX=PN(CQ`ZNz%sC#{3NEcOAB-jbLd_(RSm zviLBxZgMr7esf2}uN5xVQ;W7_d|f4}OZ^DJ>F<5Phqxl(!j|QzcuLG-M&dXNn_kFU zwVIJS5jWP;m55ii{R_+Z>_~lB^p5w+Y!vP1XrXD+HQj|3Zz5Sg)g7~Xt?{yEk?#E_>iwaTaLDfG( ztJ6LC%BNrYETfNVs$7)*W8+A3@U4*+$?oH8T=6G~3Q9ZOX>GNyuHHo%>?hpMS96$hpX(TFM~)G`4}_;cXYsYB z?#ltDcV&~3j~121*~Xjv`1Po|QVeznQH*TuC?k+OUbB~y{^)`VapKG&gK}2H{WjQw z-3PCzr7q^j9P!IkXYHCC{yyHki3pr|`kcVVubmpFBP|`CcZr;$P3M4wd8aHzfH=Yn zpJ24g#+kJ@7Ta-sxUOwi?Y<=TcB0Jt{KnehD~s;dsI+IPc59iCBe;~VV`nC@_UXR# zWu5iOBl0hEfYo8d)Dh}9%NfN7I>#{ zr0T0ugQL`vC`1V}cm0vf+xXs-R8u59F?V}t@w9a4!)RN&yEo76Fpr)Mzo9WeUF>94 zg&tqF&j!a7=P0bMMd7mOQ2&9#*rzk-P!&V4Ybz2X@yr=eJ(i96zNnA6B3CCeHtcJc z^gGN|e-V+JON@NoyQ7i4KgJc-#?gp4r9p@3 z%MyjR^qr0Er~DCX)B-CB;)_-MThe`454qzYB_Oz24T-Mq()J*^N+| z?y7mxG4{&oI^7%}jK>xdp{tPD#?pCRA==W1wsmEPQItE#5^wGu_>|Q)Dek9vZQ1Zu z$+u3OtHiK}{l`x{Cgb5Lysx`au5dcv+DmkYl>PLmWA^mVIU$C5+IlA>8H7)g*y6yt zuEwy5LG)-xug8hjc+r}x3r*?O?foq4W*m)TN&%w$9)*_hLn}hY>5KHR7=AG*v>31D zGz(#W){0;2dx`zoxx4GKW#9fTWQfGSllAk5tx<)S1d~fTAs44M4#audtV=yLdALAH ztQ=YhpD$ehZT6e+Tj4O#s5E&98n&Nt{6e$h&1VlCBm?X$hqv{SW_?F(3?B3IMa7W! z@dX}_+XNJ~SDArO*nJ=~;efwEwb#t;Ri#rm*}2ct+vkBI+sCsG+{LnKvMQJBLM#gH zMUvBATCr1<6NHcIOQ{CDQ$6%>7mpi!D1AK~ldcWP&d(MGQs@Ce7#?rQA=qlqVf4{P z>tkDw*-|7cF}rnv|0=E)KT)vX&bX};8K6*ooC?Piz^ZF&N1wrlu$wmvM;cT1IVCr+ zt<)$9@bPz9&hV6{a=T_9r>bkqMu?wWH%zEG z2sEDx_1UkM_lnh;(vCYUMca)7<5$h&#puhemyO6Bc5Yz5OfT-)3PT{8>qDh1OQ>`$ ziciJx4R{9Gk}b{M9`P#)vBSIciXAL_{c_OW&BG?l4O_ego^NzIOP)q^aQ{8Nb+UOJ zl}R6~a7iz2N3c1>sNen+M3%xC!*|)R>!mJ~ zl_w1KyD#A-*AO3xUX26tTU<*51%hqF!Eaw}y3{{E#TW_^K*moRwZ3I6xQw5W#x|Mz zmfPfC`1lKRf#IYvJ4xb%|6;izDOPw-FWx*_g_!lJQ`Y^QK}kk$PlDO6P|Uq1<=qh9 zmSuauEVFvP(@sXixqiNbQ|sp%;B&q@x#9H``fuZfZfTGAVl4nzCoB6h4hH$JzV?iK z_|qbJ*@Ni*o4yk(M2hJlPnGY`msX|Lh87KpG2@COoG%>aeh8bfNH46rC)tUnmG$(6;2eC- zD_{Uypus}hYatG(kzn9F*SQj>IKDQ zMyF40&xWjD?tXi^mVDU$d?Vh_{^|#RPbE^x6ehaOeD?Pho6xhMpuewsd37dI1siOqnakGR`+sdPef_pP@|j;F#6^=F1;fx+w{Rwu_p>R7dRJK9OOb ziVMF~v4HPyGaM}3h^@sYOHvu^aM#iB>R)e+h>ILYTqFVbw z+aZ*KS33V5E8jicf2NjP*N4W3kY_rM(?Pkh(as_h3YF7Xd^dr@Y7bE}X5ltG34@R7^I77t zeOke|ZMmZ8zE1Z+u+==2>usX=!yu}_5mfBKpxUmd8#b-hPwFoMAGF+=xc4ZJ7bvlv zCV%e&am(L)7SK;F^OaM@L(0J~MC(5Y#Pq)H1$h3g}92wcHmTG~P{TYmRaTAtYDjvTU8s-_Nj&!uVw(iGm3vmu_%A#6$n;vjp`W5{8(|QRmx&JB|3E#?k-nEhsY$0@ zV^7X)Y$j$(MSJa)vbV`}k=w_7#*s8>Xf%P%8oD-S4!q&{W|uX4&;>DP-4AymEzZ4HL?_Ie`qx`iMqpuPQnlN z?%|>PG)LRS|Dm_Q{;$u5=Z_=&YJkT$Rsgh)fOC6wdFkI+K}HTf*?HDLs^m{h|HTt0 z7#=Dx?4_w9v79Gx-zi~6C94#(I0!~X@>_fTBAAl|O(kM{3bfN>={U)6M91kyrw!E| zf{m3e7c96iI=S1qz+I5Tc-aILHnopYXOJFH&}$FiSzo<=CXH7n0}95dM}MmSQKkQC=ZSmbl{P=W7BPJF$4>7;n9wVGu4 zuN##XT73VZ1vObhIsx?YI0!f-%QUlh96ql836CQDCFpYP!Z;8;BaL+gT;tK6pklIk z1r%kVoJL%4VWO2g)?n)(oaOJZxJh}^uc?r)^bGc^Z6%g8YWnf8RCA9p0tcHODA(S* zaM=VCHw4wG$@7xQvScjDO?mhWOE|)8Q*gEGj13CJGSNq5;Rav3USG^iHmlqFWHYGL zR1Yi0Cee!(I$Q>#YupG8+8?O$SQQJ`nr&3n3puxSYug1kax1wjA|C@8I#iE!e-^VySv@>SApbBe;{yM zB8*wC%skgfBxjC}2WiZH47dYKjv;EW#Z2tjoj`dTm5z!)Eygi3Z@>t{{IFUIj&ehMbkJx=am%{>cyA0q@lC3kCZ!W z$ntfQ4O4(b+uq}QpvU|&P$cJq0I^3Yg8l3Asejr9dMbBHEww&|jIFyBFmE#@?_Plf zMN<*#@lcgH0Ps_6anuIjtY7EVJ}2i`U0`@>VV&!c&-+<#Js>|xlh*O$DO*t|S)n-c zFR$@eIn_M=edxY>!)bWN{v*;X#CY|+{o3BVmGc~7SCjw}ZFOGCCzuo#cng%6!Of?a zIc@`1bW3H=^103C@X(|ILJjxPU*x?55PDb=7@m7MoLXe4wm4^Oytu;w|3Gokx2q-O z950W`K^u`j5T3Y^sIzu8N_AS!W4r+NtBA4|aQ*dU;-?$9HsM!kNkLIR>^H^D{; zvy3tYz#z4Dh?D#|M|RB$p+AQ2RP?wUw2L?|#xHkR;n3wREd7<;PSzdIc$h6-aWPTX zh3>;5#`)iNKO0Z@JC*ZD(x36fKW1$4Xt1hS9nnoDgxc;B*&?L;e2J`IrRLF@#%<^`YubBdVK}C zkPb&)Y6|=HGWQ3e;6?;77_947r9V#+?h~RGj5Fz#e>Z78|E{}g%|yJtKIoNw_=l!e z-})x9#!ht%7z5W75(IpL&#nh14qc+(vc`s@g-LIwy_X}DnS&}aQo%!?3w17{rjN^r2O#s8Gfko`?d> zrsuiS`P#Sz{t$PR@KUL}ty{3t)D5STTN-dFl+_OaFH?k9J92@S>}Z<_kD>b29##aM=N! zIzH$(vf&(zGJO0A);L|-v;Maho+uIh;x*~jUaKj2&ZBa*DSI@pO*DZpd63LWJL6p+ z&Mwc-T0XU;T1e$^OEr}g% zs*B*Bo7%Vlod^QK5eRjh>i9A?QYJ_UhAICjtFX&`UUO^9qQ~{IJ>y*!-hG!MRSdmv5VbZ)fqTj$kg#C;0xn^c&G%v&aukWI$@1UyLV|T;OJxqdOpRW%XRW)&aOT9^EcA9`pbD| zF+UGyoXj;w5CwLmm(5hg2dV^4H0^P$#)D$2+JCckP2WwL1#|n8vi;0*pM4{8GGkZKIm)O%^u z-VD>!TWbVB_MTA;{JGBzrP8bsQa0uhl>Ch`&1=q<9W^plWP>q#H~|Ntr3p`$3;D*n zTMBO{*nOJrK~u*}=*Mo{`l7OS!PEN@sgvkyulk&D*RDOUz8lS^VVJP?w0Nxy`M#uo%a-qZLTch%&Nc2e!Pe1^WXs{>>6Ia~twso&42dfj{$i1Sh7veM&P_nK+fp~ZR zO@l(?q$ zSubyn-KJ_2=x@F$JAKH-~G3#R`+S^-0ynwv|#Yz)mh4grmx|CV#or1(MP0v-R5^syQ9C_f&=aL z=(Pn&FBi*)rp{_ji(85!q+%nlb-6OxXW3=zJ$_hZ_Bs@C0DRyXrcK4Ym;f2P9z@vP zm4r2SM4_H1i5Rrp6R&q|s}+boN-{_Pd+yI;EQ?$C z5a#8;4e8s@*A=_z^{3JEu~BU~xsu@gbnWs>zBu9&8&dfe0VpYHsuKVUcYjw9Qi}7% zm@NJ&3>2d3;a}|R$OLmVGdXf z&$%eog^^<+b}Bws{uqRgy;3QwAa@ljrcY*|WD(~ zhI!0p%P@6^=>-oYl7QcheEQ6qf0>#pY{qZg{kU6q_CMY5`-ya!U!iq8T{KC-1*{w0 z4?4Mg)~S_jyE=K7XBb5CE>AeB=jKCMxOSOrEe$SU1TUp`R3;fW2`O1|(F@GBtC9rc zibgF#juSwmfzF(aS^S*tazr>PDgRMJHCP^+N>$1XJPheX)WgcBg|{VtdDU#Ii&e!p1mRG)*b8`x+%X&$YOeY>KSUjL zFB$Z^h{+(wZL~P66ZU(x=QZc)JF#g_`;r5nY1Pc2{PmtVw`30i*aep$yty^*rC6>` zWR5qe&YwkA5e<<9F?)~=^&2@`StcVI6lz#uG2SYV8CiPAH@ugPqg*l3d92RT) zPDZ}-&Qg%d>E9GZx8?PUNzjE2RY(l~@XYI=lMwVxQm+3S7$Tmx> z(RVen2dj7(a%S4gt#KNlaY zM3B=)5st0~6RPdBVlWDNsVC%W>RRRg+DbY}%hQqQ>s3?!G(#BnV-Vn?p=3?@5)x=; zuGP3I{1=O?2~X=rx9pS#hmA5z7io4qU=&mNLBICd#WT|!Fo}}FeOyI>pAB-7W*BNH zdvW(qB$@bh!cu6J>(QxLTw>|J4IIh_+ehU!CtOWd!3W0h_Gsu1^(#K>?em~NEiD_@ z8J|xJ%wZdu>LcxECudSM8jY%?*>v!!=r4f0viOgmyYI(xE{{ZM0`4pA>b$=-txBBY zl%Z4%L`NdoA*=M2|6#Ht8XSb!CLLj0`l_kfl`hhbx$=l+-)l!P?PHeUqa-(vaO;qW z-u~{HSd0yOk7SBIJpmHEU}k%jwEn<(f_?NGmHX|lOm^$kbyMIuc zBNg5Rb4FH_a0v(97ok2w#8BwB1ZQ!VvZtH#M}en`l^W62II@SwOWa*2@Jcr(iMvo0#DHrZ?STYK@^@!5^-})W#|nv43KuMqQw0s+hUtZgDe4G3=BbO+jAe zb?A-0TxQHbi@5)a?MQn9U5$l|iExNS|gj%<{KiZW2D zz8Y}GX*qptBPJF^#b-CCKQUI{#HSS{(^&O!%O6Mz{(B#f>gMLKebRtQIL7l|X4qQP zkDHY2^gEIDmMo_>T3TKBOuZ=xNGaoglvB+n&?EDgkF2MshjB!h%7p{g_AIMjl-G&$ z5RNoF1cs<+UA48Bo{&Zoonud5J>a{QuU@VZz3Gbt5TZ&s$!6F6IDdR4+P%-;y4N$l z;pBU#KBM3G_-VR}!sCiu*QVbBpRT8v?e_7=oQcM7e}{?2q55XhSNy04czRS%&dqMG zK24{(0-kb$H?>`WHBvSrCXR}Q&Lf$4i~q13fA4VhoJ0QBc2YEyo0E11c!8E-7rKd; zb#F)#JO91KP5q62>V9QiVsGWTX#F-e8& z(vlTQc{1UN^7pv;{M?J%{LO9o^i51zxaOp+A;^QpzAjUi;#cw~4tW*ZhlFb8HU6Cv zU{-G5uLH~?7f0|w?pji47G;BH8;^ndjc~vkwoxqXstvS97@h{ydZu^2u6qHeUFK9eI7&d-O+R^ zO(AjeQ@JRKNW(gQYyHzmPu|;+Oq*iX4hQHKbsQj*s}=qIJ3J)>U_h$N?;1BhOSJ96 zoA0RkkmKavd$i#@MDcuzy?-&Zq9pQx`O6r^LAGi{d+fkXxk=Y?9d6L=6O%-@OY@To z_NZSnCjJ~(X|ZNG?rUHi69?Vq5Ej#S^rqT3Kj5W!O5 zYEiTlv_`d18}#qE`*NeZN2*9ebS$Ryv|X?CVU43ftbj(Oc1JPA*q~hNgy;-hVy=-t zS!W?!Yxs)ovg`N8ekUg_>)sYK=CK;sWZG7iWk}Ffj2vBK)lNy-l?l4>f|>_~sJPTX z7iW@xPf)sl$1rK@A1D9Lp5X_Oo@?7G+0hgxxpEb4bO{MNxo;r#{W1O=X9Zq93#0StzlB`6LuUxq6c#3+s zS&-nar=_edJj_PSZpcZBf-S&Z6?qKd@M1*PB&!|6vljFRtE0TMKabD>5-tWfD{x$`wcUA=Vfe9EJ(l%$w> zuot74CVAHM>l)!qB97jFJID?lgx91lnRPbA5XM?xY#W%gopNgr9`hhZnsA1*7}fp{ z;6NY0ox_>;ef~c6wG+#&m597%&4&ab&StGc&9uuEaBlL$B3 zKop}{tAqR+h!ogp*HR)^s{@>0m@^F_3Rq109GlH^?Hl(u^k03$x8et0_wu7R*DG&p zD{PuJV%>xF13b0-;IBUP$o=p9`soK-9bWc&Sb?Dd;3dj^xzYqHbf&7FW}BEt=2-x+ zt)fCkfEXd@B=0A!SNh*pH1t&3>C&OBDs!13(?Yyh$G3HkG3{3E)C?KKjMer!KK+Hy zv4$d%F`?{5d!Ry@k~ z8*Z;KOpw!+gLOs7aS&o{2IqXQ~G|)@jH&I3o0_U%r>Mf?`0|Rc15X8i{>U(``zzHGy?k@p`sbIK3)-`S9l==+}Lr7E#*==zH zpXbDaa}Pz4@?KJWN8WJd_PK>YytK7rc1+m*>W7nrBNzZiYqSpRK^%nJu2&ha%LLEH zjISIIj{Ud)_0fe5)LVmQ9g&T}5km+CS(cJ0g&@j=hKzMLlKV>JZqgaiMItEB1VhTk z!k8GoCm~IYxYI`O+!ErYMNAtFh7S@xh&cUm~x?BH}zmJiR2^4Qsaz)=2TT&-?e1*)R0KP zd27XbMY5&(sgZoBC?!^k=a?DORC6!ecAof=t8=oPEA5~pKa;OH%hyDStgcLuGn~b6 z=PJR|8;5_Nqtswe8|;mkr3JU>G5*O zhZKoWK>B-^Bos%5xwlAb2%@CU%suDE5E_0RwjW~3YfM}(MP6x#bf|Wnej2{aGrJbecldtM@Wf%)*4LJ8_>N?2&1vNxP;#FS*S38fI_S! zSk@pCO72LJbZNUq&&y^Hw_%20fjEeuJP9cYaTq}A5HGlXZ~K4$k+-dW7uS0z1i`Y)U%n4hV~`VlO}t>@{1{p+ z!rd<1>o%G*Mj6d;+=68i#%L~V_|?uHpyK_wU7gr=HI_-p?L0-YFFne?!RYyraOO4d zvGPdYv)mT82?@IfuQCfy-f-v$Zdteq2lnjU_Gl~P^*my!Z?H)MUw*cG?6*Jg@r9*W zv<6<&GenR?UGL#-`QypAIro)B)a#-F!piA$7_6=Wv72o{h@_6| zd`@Oq&t-Go<=7#4<8rY1hLAv|1<>?mE1i`%qRoV?@?07HXx z{>8{!ohnh%&mecd<&AafMdb;M>7qqd4~v|u>HefSFD@TL{v6MPtd#HZ#XLsKkh6S~ zQzQ{Z&ytE0jqH5M88OR4-)dY)KV@5Hxwe9R6d-a!yhgZ*>kWsG;H|I!9yH^rZO^ST zUe6QG4PtbZz$flMcjnJNcduHtek<~7UJL?*;gN(A67dzJW}Q-LUG9kiSO{wXYq50t z6nd*Gph!cqfE6Af#Y9T~PC@L*7?v3j76t-w*o*fzz4q-#7W6;+$#-wuen1@cci8mr zTEIql21fSLm8XC7!}osR7am=FP%Xu0TcPTCkqAIaz+(?=Y?iN_6kaa(PrD#+*(=)) zleIK5uV7^Rx&K#Won2ydQXq%`>5N#xLEm9rrc4a7O+uN$V1S0-z)XE^+oP$B*K>?h zn{C+o3|3ft-)HYX`JUhZ=vP`@HSKAsU`WBj)xcN=v^L4iY4S|6tW`L2tL$)Gp=`Fb zwJ{l6ArIsxy-6J1YIkEsAy!VD#K!V6l(k7JLprZgdWpVak{wBAu3H{w_PunZ0Jl?E zV-RVEP<|{~w_kVL;nlzYuJ8M=x9k`HZCv}3YZYte49>;J|BnZcf9{`t{p45Gsr3`B zSZdFB8k#JK6pYp|A_kGubyOJUgwpFTgn%(_)@-fGa#N`>bB?uu5=r|K&8`?V$%Ps1 z3eSa*S=$j{3}t?D+%JIyGec{W9G?WpvEr;XAX2$|Cdr8b04$2(i{$<$F=FNs6CoLEuSeLE0Q(tm4QczwL$Og@8_oq6sN&pxB!8e5G4<;Oj1 z9h2N5Hq&lQ*@(ju1tNkKIm2d?`zLu{tJRS*2b8w$=K+P_M5-?C9D7ZN8a);?wqCfU)UL59q8J1gl2$1baD5o zPanJciBB$^?LX1d^}g5jx=CL&k#)`yB*5(CvjnFDPjQ<>1f^2ifCMK*g|vw%vjH=? zP$3hHN!YUFK}i9yfi-|Noa*obz(|O)tjfg6{ZgFKQ&JPia9xwxB zJF3F-11GlNsL82^?>^s|0!wAQo{qAI0(ZG1W8cEzgr) zJcyu^_99N*JJFCN0!cU>$O&_&al{&u(s?RIBaU4+V`~X)JQ!nO48v&W5ozs=Run~r z?Tl)17Ohu)GAD&ZtyW8#mkvn-L`mN^k$Qt;=%s~_;dz7)vwOF=?aUY!NsX8FBs4`? z`*1AI^E~nj9m7qrGhW7p{)quY?zIzC#_Ku4xq(4X3Vh_tkNwX3zw-4~RG&v^2|iIW zKonV8z>xHCIp6jq2-c)CSPKb4O-P)2>KXJ`SK(RfijvtP2t!QG%zz+sC$+PFTq!BT ziH7QA-vX>PrhOTEw%`8Yx4drkr{4Z8M{n^jH&g%Iqodal%Lp504S)WXKRov3;~!TW zYH?rBE~7T(A?zDSS%=ApF9ZQc1t$Rro68AtaVE?Jz&hi@OfcE~%jggKh@%K$7$S-o zYu1lxF0Vii0tinchJl4ZvpMY=PC`s9_RQD&IBi0gr!ee1QcpQmcmoTLBFr~D%*n9* zLvQ#it3USk*B!lKhh12&W_0Z`I<$)W&V2FA$M5+7R)bS&BV22Vnnm5O!G;z}cmSJR z9>jt`@alCKZ4l|$DKeyN&?L6!c^(E~2pt<|lwyfNXb^@WjL`^#IBRv|jFralE)@m# zAVatbNIqW%Q7~AAnx{nj(B(#xcJUU7BOr!OmP-DT6CLvE9z0K?-tgi35=sdO>2g_8 z3Jiiiqy&`b!KL1EMaz^t7dazFoYo?`oE@<&Qu9j>+=!M&o|Q`?%X?A!B^A4T7wjif zmXj;r<-?R!>6@JA>T1;7@0mS+Hvb(%5^qCqFOE}OiV%$#Kg|g*x|;$3ASA&;yAA-P zAfVur%JQV!p)y{V7-#zyZ2~_3=<;v<+2`(Sb=0gEB|na|C>Sn>D{E$Ft%W3p073V9 z=q|0Ix3UUE5j~(&DIKeRY3xf0-onV2nB6j zQL*xxAVDzNB91kpD1wd+20F7&e88?dD@zpE(Nnb90t@*E&ph+Oz_EI4PpM!pjx-?Oz~ zNN14QAR>kkH4J);a4-Ncz+7tvwOS3nFTrf!`Q*F+2`~&yK3_(f@5HmP&2xXv`{>a?35xk1yy>Rus4%^CK`tl^1YSu zdY;i!z}-)F&iv0$-QC(So)=3WI+4`5?5xl(Ktze`i|fO~)}phzitfq^^q>zVT!A%% zK~g0i*ZIMnxO9K?I-QTuQHz?)Xz_2$#*htE( zG$D{Bb^lBOKl{0djL-b%oYr*F&_Q<1XeF4)g$)H`c4`hDRJ0RMvoc;68B1M_4Hmfn zc<0Rj`{B>3o@}-PsWRhKlDW+Oml8BCccc%Mj$@G4=(g9fxx9?1--9FzD{N9VZy{({ zpqM3T)3)e4xH*{(f*^8(k!Ca$;wjIz-~HA%t-kXuuROZfyEYBiCJzvzj}0vTjV~Yn z*nj%WLmzJS>K$(bOJJE4ZZm^OI`07jh8b)ubga=22M7iMv|+&3VK@!th=Q>yV;t!K zxyjVF_ixJPn^4=g)i!Kd{T;a+SGgE$xK{4}sTxO`cT2@!Er*{X#D-n2+!Dcn!Gcipebi*F zSA`9q4@^ls^jPoApM3U#z31YF7uOPnfu!cd+TxTaPu93J?H0;U)ddsIG8 zM%d|~v$Tx3-%09xoa%xghGGVhW1V5l?5{ktOj~N$-0=|+eBX!FA?hB-YICXm?(hBP z)t`RbE04CW;o{Owy9kK>yFUK#AN}H&9{YGJP@A3!HLQ-o!ZACA^#K+zu|;e&Vy&U$ z7*P}u(3wGxd;cs8MIJqYpYq)@X?NE-! zpKa5kpdpygFDK3tGe9B+Ndu1rl$7XpLwKr=FaS=UY-76RV`^GqdfJ5(l~0gTLS!C4 z15BA>#t5>aY83vCeby7Xcac-mAD{Co?T3=dA&%?<{PjP&>x=4%^~mdq8f5lhm1luh4)UVC zk_c97&>%u*aS6KDgU0|XKXbyfNW$Bct^<%s^F9JhsS5e|-ru7+ue{-!+=71GcE-HV=bE!PnJW<> zL6FMFR8t}h<7AR4(AfwOg%Z79gsCYXji%>vVs!~AUEViuj580mguF}=Fy5x8$8+AQ z+ZVF#^8icEYZX=}6C;79Y_{uE`>zs{BLFEX<8_``8^jO{OnVaTP8XP-!BbuQ$S?lU zUB7g4U|W4*A-p91Z3Q?9$50kROLJW37b{FBIHgeY4p3GsDA#4=G!BKx+H?!))D{#N zx@u*~cXWRMY*I}-1u$plo1dxWE9{4{atq?*sK`PhAqxnJMoq%R0jR%@zxI}I z)<6C3Hy@q9hUxH|dd;Xi zOg(_2?7o)dvmP?Cq zVKH0!eac#nWYY*q`u*19*XZGtwkMqKr6pjdHicUc-29c6oJC8tw%jgI8Lta~g(ZQ$ zjbWx|aAJUW{`Oyf{y!hzj9MF3dX@p7IY}6c%-Ey@!=>%WdU3$k!ZIL~a!fU?(OX|f zXK@jOwRMOn0!jDa33fNCdnjEYvLgx%lgtzML&%JnX6r~E)}X(+8SibP{oQZ+7X5dA z_)UlBuYhZIsnF)*xclTMj(zT_4=%O?+!V_)!^Q+_42~Z^ zhxz$wwB~2gXi7-W(dtP!4~#8i*qJq#639HS;!*5QTXE~0;gt@-fU?3$Qxz#+Qg}Ro zA%TIY<8+K4{LPQu`-fkCW({6r8q**cZotw|WqF7R$?7e?y%3yl&dbfgh#8Uvgl0s& z9@ZDnVX(di#36*WIYn>*FoA?KTq0?Yo6gg+Fk)7rpD@jcl5mGi#*v}e?rgK(e#0wo zUj2LTeACfG9zdq3gLkc<$7k`$6Yo3r!LPh`p(i$5+KW6`h7My`86b{&}IUj81qn56JDj91QiVKjs=qS#Q(iE_Cvscao~JkX~( z8G@1wBdpULC6jrtmVq!FV0G2y4qNlHn3}3V`2@diA*2S7-lN-xz0MmWtz41>-q>e* zmpQPJ>%H~XwnySQPKcOT;-njTLZacF6W1!^b)K*Ugwn$~tG@pa@A}f8e*Wtx5O_1F zMKOpJK%NT`BysU{5STOrOP|@1nc4FlCes$MA;K_1)a#+Qz6L}Qh&3!6Aw%W~8M~bo zk}Y_l4X=j&<_4yk_4u|!d-dP?!SDHxw_T%Jg@Im0PpsmD_y3P$UpVp6g^pTj$-3_a zfkqHnM1~ObxXTzq2)>8T~#!UusT`ZkC z2^$2EoSctvg`e)>ml+LS5M@Sp@>#nCKc|YEoCp-_0P_tQw?xqXnIHY}jT`6X?@!#s zRgAV?#)e$_p}QXWlmGJhCqJe(XtUKf1262h5n4h3LJXCJTMa<&m$fPDZJ5LW;09>U z!n#8gRlc2PT&R=EeJ>JX!&cL87I{_##^%TJT82RR9xvrYMtiXo-(LW^F5k*NTU{{N zQX=vTu(6H~4+2|@IEdj151tT!1$%QnG9|I(k+O?Z#^$6GK;cIc zr6+pQrBD4lF8#=+d_hK@%c52xmG&?)dX&eXEc%+y&E-L3-(~uW%&66B7z{#KW+Qlo_;t^4&kM z`l?%MNB3T1%))lK0^`S6wTnOgwI@IQ3!i)P6RkmW!`s~0#AdGxk_s9Uv2aD(AX9YL z1%bHK%Duuwl&=$!0LXv@Ln;Mr5?M0NGa-i>jlNe$r_)0a4bbm3uy=10ji!QBi7E#~ z5^cG*?&F=ng{ldL8J<#@tV_s#v&I6|Io&c>eV0o)?1HWiye>~zQiIHf8?%5KKKjwJoEY1 zQoQO7f(|;H>!>yAh%7^cVXe;TQw)ul6a+A3N)qWzS!9YC#hG{B>lyZQ8do=U`NOHw zml=-ZG16BGsbU7;a%lgbJD^O<8l^7Bq2wv@I)JG;)=R>w?s-O)HkXsxEOXMR4OY0jSA2~8fT zjMuL5Ou+c+Q{7{K`pJ9MxxVnotHJkt^db$Pd{|0bGbEBZTPPMh|0v$5Ss2B-RvBb1 zdK+!@yB*jlgs+BmL2MaF+GRK;hv95=cSm3WYeBH^5X9f`!t3-;yz})(_g#asPhGo; z6VcOu_mdBQKbgB=#H9XtpOo|M> zs{tb|Ol(f}i5ww`tW69>$@3y0iURn44TdGU-5?pHEkHgRO@E|lH@Bzc@6~fH&liG$ zn)EO~J=+$7%uXA3R>o@=ShkFNS8(jVf9URoCs+HekbQVUL2t$oZZmHX$Uo=*0Calc|@7L#)J9c7v_1po^sz*iDBG-!ANt7p!k zy|Mxm1t7uDtP|zwp_C6nO?xnwz*IwLU;vT@bqioffluEXYqNZ#5Mo$z zMS%M)Xrd!vo!bRwo78*_b1p+w`XU`H@40c((9-5U6ZsoD>+Lkmk2w9mZ0Rf~l){wQ zhvgcy0&Sm?Zr@qXeUjbD1Pzn%iWy)hTh{ONu>asJ_B5vTbZu&7*5A9MW4H!~iSxrV zodC~l;$466;d>UISlVof=By`8{z0NdE+@G$uD}VEfQ8i-z7puJuVMM@8Q5R|Vha-P z1}%F30v8ez5e#@FV0)XGZ*cq*?|Qqw;fs zd+ccp!Wh!`03d5!k{XNr!27&2Qg;fnV+ZOXMin+L6~>V@($BNM$+1eIz=C1Qo-B_> z8`u*1{TL}_-uHZXzJ-uUyBJ}SufIz}?PUU1xrNG53X3qg&aPA!S61A(3>7$xF;GH) zV-3XwCH?p8blu8$?EvRA<8+93{L#nn{V(@E_E?KW)0336ixB0R;jpeI|9*GHh|eKX zmJr~klW~k73bB0dEW%y~o)D$b#}>21F_?7zB*@LijK+Mu7QgemUfX`tiw_<4$8F>) z2de-NEPv*gK62lmw4Paf+T$9Wrehp{wE?N2_slx=fZW7M2m#pPa3+9EMd4aQR)&%@ zGUFmpyOEnp%MXmvIRldWeni2-ZNJ|i02IMfb+q=>;nxU4C6s(($V8@##-tSro?tHj zD1`Ig(%QmGRmJD=}l8fsft$%&*^d@7G(#pYj+2z0M}QS_7PfC1^s9CVil?Ti`OYVa`c( z`8k3`FzBJbzK+e+RVXDLvoO0Kw>DhNt~_Aueo96iZohF;QTr9Q-Jt)*cfI=X6o9$Q z)yQ8Wba)95ZG8FI`#<+T)iWDUdm#moP#~6&Lcr#QZ*4Aol@=kVAtZ4*5P~lMJkHL# z86;j=5=I5!6lMUJ9N}fsmQo|v?}uO;pwT=4FCCL}VZ3pM>uN_KiVOg-P|AZboLrBc z-^Z2lnt+ukzyK1<8sB&KlWXt!z~}F8t;EW!DS{;fRg+-WfKUL7T)4rlZ?d?4GRUM~ zq<_lKclx8A=ipt|I!rNnYc9uSn4yG#2Z9MBbl29HFz5BmTy-7`rE#%-?YHoHAd1H@CMG{;h*~8m;ZR-q0^7I z?398f1vh=SjvOdN*7wReS&j*p-axxCT0=Hr?l%*|8MGjRO}|f7z_XlDngc7%>j&`SY$_^zf6^v==lu9wpQZ4uiTO8BtaAn0tOv$h5k4B#sPDg3b_t%3+bO4k~KD5o02 z6Gy1@I!LMDdp?K+!rrES)3<(8|2tn$d*vlN#mg97bnus7`_q4Q-^sgMUET9MG+`5t zg)CczT=6)7i5_l+?NRP-u(dHGKoHotiI>&8V{lpq`n@RexGY+Gnjlz^NF>v)@tQnq zHHdu7&CTJ4>u=f>W4U?)#z7D~p|H}oKl0~a_}adQPq#g&CKP!|U6ep9+-tl3HqSrY zL&H>jA!+3xB*9ROZf_H7D@!oJ05vJV#wom)jcwU*rJqd#aWPaNleoRnI{t<`Z^I9K z=QsWGB{=CT0*{?|^w&T4=vT0)mpoDyWEqA83@I2Iwlyg}rWZz%*)_-xUjT+|!Evvh ztiLrZ6DSE?b_H7j3ydFf?GRD;>lUfgoKSnRM5MNH|5CM44vm z?b+PElb7HuFYOwg55F5?H)6=S7ujup!|l$0D;Vp#bJy#295`@j7spu{udx^yg(o`p z-S7S6SAYMLUws6zHwB@*qy{8q7+ranE+@NwGSYH z*yM6I3@Dw^Yrq)RyA%noqhZ2wJy#Z|-{nKS)AiH%I3pp_=IVrmVfeAZ#?l&sl{HWl z!BZX#7{mmtxe%Umd9G1va6VR;*ckGLcA7AL(UQ0l^90*&GprqyJBDCKg#;R$7l( z=B|xM<>mfw1%d3y8pH3&soHb?9xBT-jnsAFA|tOJ!sXqMVQM-`6D5>(m$S9GTvw9Y zns6HrTiz>J18l5^m~YKvs@X)OyAUL^ys#jbQ`<81E(F+evj22)NlATWp8hXDghat2 zclM>pW;VY`?<+f=B|RiuCArU1j^0wz6=FhNUAm3@{g~zEG+;x4x@@>K%nnf*uN;=c z5bLpruRgi@TYvev2U=?bfkrb)pqDUGOnjLmHDFnu4o=Xux3YrX${KVqfD$=M_Ly8< zo}iJjD0kv`u@f;Bu<%5Lm)>%ne%rUc>OWkIhHJnZc(Qx!XTS8s=N6XYvn?x(2Z&@+ zkce*aa4_60^|(CRZ`b^>xbP0@un1c$lYW2s|E{~cuEXUG8a0PAb(h{F&tQcR#lCM7hV-xBt1Y$5E&Vgb>B@20N5)aeqcSFPA<0^A9Bx2`K3Su|TUiA5Z%; z%R*NTA!O_2BvL2EB?v3@9O7OA~9bvATvR3?YStO8P*JT`%#v zMx~~90s*iBG2mGpf5$6tZ@=lahYw%=bMuRj7!jUcf9%ZHPv6%%y>ikSuB*!X+=93P z?25Q?DR9B8zJ%x`1R#XOmgkybG(X<$1Zc1I5e5uva%BbQdwe6;)Rz8ETITs@IZEmL zr|H^EyMgKdhjzg>tUNCO%l#OOR=n;1{fh^G>z>D-ZFy5OU@PDW0X^XdF2{t+H(sWx zR{AYUq2=M$M{9Ts%jG^Py-Vw_M%PdKzmY zOQe{KiS4|M5nOav{|YlEYixp_Ye= z`pjc;T*bP!og%AqDjXh-bW#_jz+h}{1Q2o^hmK6c2b_F8fO0#Kk=|<7g_ScWH)BIG z*g7pnSHG8B|E!E94Snvg?kl|y)8S=;YD;>&SwFGT_k||B$t&{#83^J4bvuO{58R}E z)Ud}tIPsXus5~zKfl~PLQyc&7uKONqg`(~m5(z8s`Jb6w2=dQ;`hQ_95Nj--UCbV2 zYb-2}YPS))BEo#GYqwA}54E}nVS@P1TMp~nudf}w77f=Lui=UIW54wD@)K&)tigx~ ziL&wYeP}z!m2|8nFx#N+@#GsCsj!<^=1cG{{Sg%z%*EQ~ssYnb-;edjlCOkJbC{s29E=KfE;>)EBJTK%{O z?O7P9FKI2)`9KgAUgbKb(U-`*wm+HYBUp7&4c7=a}8kV}q4* zix9?w&9MHefK)A;B&5jSP`kffqBsQ1Ab#D;ZqYA(!QOw1XdBc+$>hnD};U<#s(Af+Th3+{V8iR}X zJ5Fp-ayfu3OdNt3x}oZu(_(&Ct6!t@V25=E-hcOr|N0kSeX3==mM5Wxbq+}smjo5! z@FRV?()U@;srY_`0-LJ)lyX!fIk1WQD=wdcY4meTfXQOg7R12PsWXU!5W=LC>{Kf+ z*QY31M@a=D39@OyCA-eYg#(&wj06KxYs@O!{{C-%<)2?Shd-IP@KuiE%TK&zLodSc z0G=cWv9ofEQQ5IA2DULYcHU)8$999e%>CY+2-n(#*;Zu9CqlN&U>GD*|E#~5Gu$G% z24d%>CuL(;I9`_%N|XM~48fuhG2jscKWWDkuCs9UUL*ww5<^|nBsW-EPiFfo|1JQa zFjbodObtO0p-#%315(MTVa*q(4>rNo1R_@_#XKBOIL*Ni_al;Y6UL?Z%1FMKX`jIC z+NI;-_`0^Nv;+D3!#er6@-A|}hcPjf08AVh#AT!vG%OOJEG08b4x9y+;x?Bn-5 ztlFmLK{c{A1rArHipC@e2BJZL&GrU#6eDe!K4wYxuMio1{(M1f(U2Ced)XcO<+s1! zC-->Qrq{I;pT>#hr<*IAXB0?7T`B#y&I3})LP)6~Y*n7eae0Z0fShq+wk5-Laoi9g z1U%2n&h3Qm(ZtDWB@^h+iW!&4FDQIDPai3 zX<=FWvY12L`S3JOz2V~Kwu&HPD5ZuurAaxUEy*1M!63rgS{o)#8NH=*9@FshN<>}< zby(LZFxQyH-suI*G+RHt>up}l1H2Njsu`;x-ujncdE%GuduC~0FDB3PysOS|r4S4# z3&efrb&*ZNI1}Ra#BOGp8m^Q%U4ub0ff@8v*PmIXatL_R<>R z=L&g*2mi@pTDc;DIo{((>3(=xR=G=zfH8WIFHCYvp$ z*iK(#w{ znBOx8QUVYcKGRD*9h2{;M+9NZ<6CJWwVfeO8I4GPdrESP?(5<8O!AJTvv{Hd&&?o2 z*zLfC5d_;@)41_S>CeFV%~rM6==J*WeIM5!+OJ=9{r>mfICJe9t}bt2b#vvl5kken zfOE#5a?IA}8s~MdPxKgV>HDF0tAGm=<*D~+NKsyZSD#10J0ZJcuj`^e?=;NwY=!iA zGb6`71_O=G<{%LZPG48&bCcDzF~9?hz0(U5`ypkXZdWU6d)KW2%dpzOnxTOq28bjW!m(L*SR=L-Qih=cgULc9EI@(_ zPi>bs+O?e^o1@DQ^$)YJX(y9mk>#|4fkAgOz|4FzQ6Sm*&|JKBMV8>mcctqrO2=Im zgdVGqGIqb|26Nsh0FjbT03QM}q06SQC=Qh*D0>Xkkid4x z9{R*+`o0gVLo`UYUwYdO8?*HbJ!HG0anX9v9 z_%b)>=Sdgov0DZN3kVh-#CYT9e1z`z6pg}BhbpB$P z_KHJc!r;!ur8y|0_aB%$)Sju&{WnkeyWad&Z@j#^f)T*ikH<^@>(gIrb!@}4q6RQ9 z2u;?3boY3EZz{pV2b7cg9xJSSs7b?UgQ(j>-04HcF(d%a%RgoHkV)MJTpdnIYt1wp z%lpm!-zMA1ygeac*}&rnw;h_-uYO_cUtOz3$tmV(4}R~9|Brp7yP%_B1EGPVeo{0X z{C2=58)e?*GzSY1W8Ya4pyc$}K)SKsK8J2^9cRy; zz_ZII)T-#V1|slkQ#GvjT>mQ~2Epkg$|zB#Nb*G4WXhI3htkzM&)ZTIJY4}61U6+O zmAi)d9I+K6p0X3WCNNus{TMnDPz}$855;i1Yc1Rj zt%C@&brrw#ro-4fg+IN(GrB5p$E|n%*~Ky zUgW$#S6s`;5eY22@t=X3SBJ0)Hni|SXpoO^5X3Jy{DSs%`)<-x-YjP8^BXhXY=5D- z7kip}A3m_>;KTD%dw!wj%@mCnT<Ns17|L*3#-sb8;NC9aX#-s!FZhuV+5(Hxj zal{A)8q>Z6N#lyYuk^WJHbHZLtTrx3PXtf+m}~5TkMj%9T-C@Gt%5+>bY+ zxwsSbJWFXe00kvDQ~m}pW_D3hE9atTJUXLJmKQm;S!$@VTXgutSQ@tdA$D~}oOCV~ zAixlG%q~Ll4M*R*f9CL`^WL8Srfd!SXzz}%!)`Gn_ZQyx zc=fyQy!E!9{GC7hoqzr52fv{D{kTO_+Owp`8m=-f6;VYXVt1Rzbj!OE0zwc( z08c^ei12Zlf^0SkD|q_=*Z`6Q1SC9BZ%^0fzbd$PUc0$!jn`tpcxv7L&HweuFRSI4 zpj86~1S=9xAu(XGG}A4|!bfIem)UsC?7}-Nteaf3Mt`%5s5d}eDUd0uD`tnEu>^swV-WC7D(#AM8X)eQ5Sn=7cf<$|9&yg^Ge2k+J`?o^}qU`x&KphQ;pep zQ?Gh}Q}G_@_m<=wM~zhx<=jc?P36U16%&L|u+~DyF<>nW0YW5Rwl?rUhzB8NYYoVl z(Xh?<#{IXouiy6~z0f$k@xtp~a`(YKNB+iv`F#+OsG)`#n!PC3Ww)531*`{Wf8!^< z@27v`+g|zR-~Y&$KlJ9i?)zw~$D5w4DOl)&9F)`aOY<`9CVCg0eqN+Kxf~Z~&rvql zv|JDgrgRdWEKcllcSc(fNtZj8G+Kvs$q7D*$CT{GW-?F7*Cm6Rolkmn3H{&ZScjQc zYn|kr6h07Zyy*7Z^owtM*;}W@`FZSDrSV#g4Nmp&j{o(Udw%}Ovn#ETnl8tXgt!1C zvxDdJD9;aE^1RaUJ;Skg;xK>>B5-UV%_RN8QAi;m)h=%b7GRM)zAPYN;0Y7I`1Tud zXx`i1wytu#=Eyg`@)b9{`q&qr_^6UivCjw#ZM9P}*};7{oP`$`p5z@1cK1Xa_mkQo zfGlCU-T)vReLZG)TE%;td)fyU_UhZOyA8J-K8nMwn>LQLZvJ<^o%y4E^12H<@r#W4 z`XK;7k2ikwr8m6<3-j}5{@_EuuSlRp4dwNo=zWp9UnLQws$p(s52n=IdGF<_G+tdJ@R>)>{OTv}f2tL-@&xH@ zCS*&jt@Dj_Uv5alK7(!_;UEAlc2QhO1!&;jO*CCb9vbmX`h0U&~Gy)zBX8f>y z#|w{aT-e!MeK<@<@TJ91|Ha9rN8f!)KOSq_^E{88!o;N*pCrSwK`a6gEPSs9-_%gE zUd(-J&(-$m>keFxI}W`JFS+%Ve|c!nkq71)dw)&BM-4ShU&3(U#lRHJVIx|>U;E~F zAMUR7kA3<0=W(ugre&q|GU`4n5KNNeE~0GoCj=TUICPg9u5_PcbCd*tot#z}#)xAB z@-(Cpqcq9O3?q}LWO$HCKMgI0Y)=M*lF3@lZ^R38`v9E(B=@Q^Udv$!Ej&E6uK)I3 z_dKN9q440R45VSze=(JKX_1l{E)`6o%p;P0#e)HK5IV^#JEkjBUZrbxN_Eh<<5!|`|W&h;6U;F*v`zN3MT@?pG%bOQo%zBp{yKUiq z8j9a0lL!_d79dLyP?#bG4=QfbRJ%^o`j$ht?eH&8VGn$o zzldl0++fak2KIRH#L@ZB%^&-NkNqAZ3R_IpvrJGVT$1G?pxhc{=j-frc?*U>6k3>= zl6o%FWo1f$8MSu&Ho0{3Q)$6r|E{6TRAGH^QM6-Zpeo)-<@;(xFoA|& zFo0ONK1wwM1f4FT%^qx!FtEfp9XQVYJIt{Zw=ZBZtjht49v3iVB^kp|?^9iGy%np-i=znrFo35#SOTOPHUc%) z-GfRhgkcEjc>oN;FoLfXJeE)bQ0$@UO~JDs6npS>Exz;M9qq$2NA#^X-L`RL-;KYs z&^+*;8kog29eN(G#Kpx6>Mz72>-Qh^eE-=0_{8sGEnL~hU^vDa-xI((74P;>fm=ou3;)yhH^e(`Xe835i3~NA&*s!H>;SYm4*Q8dsI^ z3IN>o)iY;4bN>_j0xO}UO4LY-p_$ZExmu*d7(f(72>Sy>K?q^lNkNT^g>Ln5u>f|_ zhQeBgOyzf^z*Jqe@3`suiXy)S_S1fJc=N~ac>9a+xu@?s_Qi)ksUJK3Fsx$L^0^h* zz#~NnO#~u~UeJS*PSz_P#F%c(Ko24S3Nu~{K2#j{b-U?J>4lknxbfhPSeV_1>-JxV z15<}^+rsVtyrvt!H(i@W{VFn8W!y3UavTdDJo@CB$IpD~flt5)4a?@d^V&j`Cu;)6 z7$w@3W;^a}J-z$KXqnlsmA;ayEb`$`LCRYi8y;J)z>+SiO3)%ivW4ziimky(><%uLpCO{YO<}cTI zxqe>+W&*>&hs127p>MnShW}bo>mn zTCAQu_jD^1eb18$aTLKb3Uz9t9yj6{Z-4v1`~iJn{yMa3`_WW08#f%fzJJpVxBT&e z`u<;3@ZrOUga-)){{oWYR}!wP-GJ}>#y8=i#~zGVq9rfjXhgd&#mIA=c?_jon7BAI zv58ugSzC#uqdA7kCzKVEUp`3BBdhBw0H8o$zt<>7z%%+hA7uVzr2nnRp4UKC?)0M71Jca=vd@;S<}Tki53kO{6=mkA?8 zy#Y)xfFcHgpWB<>GNiJYswvbJ4W7FlU)i|cdqMi&cLuzPZ@g_0r`JyWVxPL-d(Q)3 z)=S-Uh*f~a)kR37aCpxl+;N_ zhQlwu;iXHTe&nu}71k5Gh$Q5ZhQqYhh4+`T2iXt;&!y(&bA{9s^HnY0GqZpxf98U= zcU2g#GhN0fj%^(K(@%d*MO61RYXsHyy>DGR90OKjDS`ZFZDEGQzqn zr5ehi%aQ(ssRuUV$jXEPosK(+(!U1wF6?bzw{XJ;D88_<@rpNID;_Xy8&Tz~_N`)v6Fy#T?@bVX^vn zm0>zxEYN;D-hbqU_aFb-nUx0?V=IJLuop?lWw*n^a~pYa0tzI?g&EHeyd9I|5<`-J zU}~DSvyItvv;M&g+U6DQo38Z@R;|Rx?mqUPo;c_EXp`3y?1C;#moJuIA z|GxzHVX6zrrPqaGT>EsuQBR32Y!o8u_h5nuGHI=SX^}{4k$aI6ur`KL0(ZRVR@9#x zDaQ&GCgNb@5bn77&W-)8gKbaxP*Oojhky|iRpZm8LdMsD8@rs1NJx?s3`HL^y*NSD$ z^JFd>aMbwxxt?#nE2lEPQ^axT z1_F*ztIMm|(XDWWV3y`_`}HsWbW_Y~Pt>zc327(itqfQB`MjhU`<#}I+itfvBz$Cf zx=Wn8*zR~z!z);zA)A9{<=M-}MEY4&g;0*f;DKP+JrptnHDOA{l@C)Z^HS#xA_>AwWP2+YVC?48;N9bLFVy<$DDbA84q z?|$TGA3MFO1}vfBD$!k0u?wluMT!?+2y9Z=n7(%FiKQ6oqa_cTF^C2M%pe3CGc2Yi zyW{Wz5YWazN(r)zL9c@_3}J0j#$Ga9BKMkveDld2Aq!~~LmQRR zs&F}BnwmIz^ym`+VgMsyz;x2aY%g&!u-)Q??0K@3R7^<*r#)%}6(*RMMY&G?`G?K& zTY}uHocDroMohA+I-H-`7f;n^mK9xi(C>;fUh7?h#WwIiKk|izvmI{5EWInX@68Ti z*uo?p7qiQW2{2#{!a)FIsey7t+ko7({q~B z+7{Q&luxo#`mc+rw&dDxruB0df-5TLb!sibS|sq{FCAO@$|EOQUD5P(at6cDG!@F`xtxhar@<=y(4=_WnCglH)uN z$Dg;Vx_iRz&g{Hv#!r7Lk9gXw4M8XOknJbg~5K$3Kf?_P_2=AkhwM2;u zc&0Y-<6~o+4p)z#gdr34b8B8_j;c&&SpDbI!fe$Ao$~L*Nm>L-2(k%a5tr$LiR<6> z9HVjmY4nwCVM-Fh`rmaY$6_zoT(ov7s?Plv$^*;noy~_6-RHSpFa{7zfX^5m91bqO z@QVLibW0c!XI{17teI`5>x|r>gXfQg$G?96Q{_pY9V!+&0tn6}7_tLQgQ269A_?m_ zZq#ZJnmdSt7Y!-es9Tl;;#R>wtu=xm=xbhmY#1c{)5MxqE1I_>`F0tcg%*)vp3{Gq zi}q;Wwam?0t!sIn2Pqxo-8|0QyrVi+8hc)um8tc2*3A4$0;i{e+aG?u{Os|%!+95T zwNpJ;G#kS#u<}qOAwcl!4I7EL%zGrFrTe?Oz!H`k_`Z)uqp|P-ee|(LXvyVd90?8tk)aKoV9K9d0{0#_D5wkwj$>~D}>jvsStAjo;*-L z{;h|eaO%QAU_y{=k7ha7kWM%juq(-+(FBDdbhEMa8ivuTP>7b+)h6JVxoTtvOkfZ; z!?UA9_3?^8T%Z*INK-2fCKXI|W`Zk|e7wR~?Zx+6Ebd*`LC|bMHbpR+ABTgLqu@C! zd|rVoaj1q5eeSM@%hk|Fs5L-{3btjclSZ>=!q*p9F^zwY^O;&k=C<$I#f|A9BFMzm zQKP)Cls71iY{X$U4vURpk}PTDQOfFoT5HGh$T$$j*(`taO5#>Y3~*<;T@n*d6=Fe0$!kc6>| zHNg-`#F4;4i(!+k*T-GP_)=v;ir|nDpj4`W1VJyNm}Q3mBGW#HWfr9E{;YUfQ!e)xsGPSa>Rsc(d^ z(N#uF4HZYu#*|k<5FiW!uwH+yv&H;jOPKf3$3{W3-hf|kBz?4FvTM_p&U`9NYkG@3 zqPw+&T1nGqJ0uZYC~Pfl3oH4}9~)f3>IWNMcpa=8ynKA-x9@-SNi{V+iy)e?+rJa?jT_>TJ@DnsIB;q9vTaVr+}DjatrX>f~O` zKF^Nat<>`*0NU7G8J`87diDhzf6XQT6u{rjdy{?iu?83#EMsVB2sBC7M^3_y12P~I zNe(5O*67KS*mgwX#iqu%PE|Aj?YN@$;|&rYaCTf7LOcNNdF;vH`vLO#0suft0h39E zi5IKOm2I85m?rc4BF~c?-Fsmm08|Dm*fzEuJGY$wo6sxk5#O+y*Rf_3wU9pi4|m+9 z4xOwywEze;K?s#3#I8nZlZ_0)K%-VesLwn*eWeT$`CI`kB!ZCe(t$(j;L($>s`fC? z^wGy!qSK;N+ z!nTpdY^?@A&=AVO)LgTC`1tV;^BMZ$`siaNFf%ucdZP{|1EC}|Y>layZx4b1A#CKW zBtQ~aSZ=|%z7|UcONJ4q6`{C zz!fgGjBkT0a{#Zbp>IQ)4cQQmRqOc9L(hC*axQQJQGnJA1whck;jD{6)_m9#>ntRb zXg+2T2qp-Z5!JLhAVKgv4~7|m4lozgoVjM$XW8guL!chapb<8~f*>3L4ycvTlIG(i zla#CPhIB2x`AVL|wFH+vWsrgE5u~FnS+72mldUxj3<*e`evSe=$1lNnaSH?s7_+h# znGGqt93^q@6E7dW>xq}08TJ4{$awA#3yC6=O?Vmqz4E1HTtdBlc z4oCdM_`=<{O-#+4Qh{#3KtsAVFVv6$!TNqQ z&k&W75*QH&$jjXRmI}5ke9)Ie2>iQ9O5~`N)b~ppQP5iJ7p9V^c>< zKAL6eIDm@F3PsV%qW4%>{5V-Yr(yfpP5a(LK=@z;8^8zG8{Ota9nA<*NO(>T1$PKL zM=wH-2mhLMx8S-n!-f-HFHJMaPlk+PwwN~LA7@Jw_ZNJ4;tEW@(z@ec99hcuB1N;22slw z2FaFaUdrerKvF=c=>LlxJB;4GuHJN1t0Anq)DSxpkg?4C89$nUt?V9OQrCfzd?nbn zdB<-%B8M!+}xgx85W&NZ8hG~^U968j zmWdgj!b~tdeyn;-X<;mvrz4{hC3c^5QQG+eDzbq~^JC$lh;D55z}dm3!96O|6Od@2 zQcC!~#-LNew(^eZpc*=fkYKbxQH`u)3PyQvvXWZ#`7mccNbQMb0r%mU-G* z826)iQ?InDY@Mx`eVsBkU+U;=QJtcEQ>|7KunKt0kDZFn&O>-*qdeh{yYLLnNzsK zoG8-chHU@|5y5DULczn}U~%4YdZu|^9Zg+8lWea(Eg;ySeKxe2u96H42+bfs!5xG@ z=6db<$(Pzph=K@FN3osmalJ&Fw#w(>xUbOBn zeaQhHiz!b@1j7bKn<&zdTWwpXTo59Hi5%WqNU#ugWg!Iu!RkAYJY6`zj52_svpip= zkFyIk%;DhigE&<`<*;X3g6j;hnwn93ZqM_e9EG!u4oU#fEFb_heIKC=aq;*i!G&9P z!KIaD)yQ6V-Ek^p%mVo0H||!`b?xK_hU`mB34+9wW=ygw0+1A!TP`|Gbkde0F;DK3PL#`CE-X$XiWLLk4{vcJAuPzo=)9I zA4|hzbrSmy?L|GTfuyw-oauPU`hHg`=t)Rn`9VowV4w)sb< zaLJB~s~2s%_@jdw7l~ujHOvOU_nx0Q{;m6-D4&{ZqULM(fr00)p446( z*8TC=dP7FT7y}Mlc@oQ6Nv3W4%gGaCwAO^o49x~YC|gEFNJtr%hf&Un29Mo-{}b?+ zewjY{ST<(N6i&@d{@ID?qe^;F>60Gl>1#GE-}K_J^aSA8{1=mpOVY^Yoz%HjuTI#? zK_qUS*e5?wD8LiB>ekYElOt-=pO)3ivB+n!?q);d2-HI0Yu~!BT&KLlo(Comp%w`1 zezaOj&$N+1iQZ}KOpW^)aVE@_OxyQteXb5}zob)9+j+Bfd_?SHiti&DBw3jtGIqia znbVP`7M&-vowbJ6x@~tuEv`aZAdTPVv;fQ+WULP>Ex;(M<0a$gn>?>pGf5DT3^o}F zPyxhA(=1@vK`01y3EcD49`%-MFPJ#NynC!~I_zT&z~p1!@fY8__uvcV+<*sdIO+R3 z_7{WO2C8)p=)}*>Fo|!K{1E2|(wfHEv(e{4DPNOLn^GwHT%j>r-n!i|0xc348%SGy)W~8*a&8@a=Kh z-m6tBf^}CdLU!=blP?~A0h`AmmHosf zTOtV}1u0!HIWRJZqtlJ@SMGVF)K6OOW36y_<`5?KK84BIlg{kiY%)IL*`(Vf9h(#r zlb{e?qYRT{GYiZ!8G~9INCD)F3M9e}u4&B=VFZTUAzXR>)fgMt{L`y_y>->RPSpcE zf3oqjU%LH%Rre)Gxd03VH35oi_GSySwMW$@s{NwFS=-So=OQ`V0=XLTe#s=hihP5B z9cW}JvH{Sl=e)(Wj*t>ufMf%pdAz9xUwn3N1COJ#Pk!_w~sAansy#~@M zC_6?QkRXKG*yq$z%pimTL4p|2AWSul^X$PH_1Qa~o;b|Bduw0&>SL8~s(unrzVJlp zz=;E8Rg`d~gJ#%hOHPi*OPcVyn^23X=whx?!v;XaKyL_a2!eH2Q-0!y=M1GCGoUlR zb7??A!}SChIULsN;XoFL%eOx?QF-V^GtsAw>SMLwhdz!^AA=U56LN@Jtv+umOS~=3 zfTN9R`^4J;JMJVT1QDc+1X&tOEjvO6UQQxka4|Soh)jlOnf4`YzGt%;K)|VP9lr3s z0a{rmjh>fYldAw|&K^f&_9SY}8cqg`!_)dF|KVHrsN-`DhrAq`#=sA)ZHZE3mm>Kr zG--D!EO!<=KeJeg+>e#YQ8Cma;mm@WZC;wu5DYjRLTl|DZ@A^p-u^&o|4Dr4j4s|s zA3bp3&;h)B>;M94LI`1n6RjK@1(0Iy$&4c+f)FAK_+Z4E~6VA=%8 z7aTMKACo>i-+gkA`t(;H`1Pl%{DG+r@J{aqMs#8{OhNW?U6eCUzjc zy~HP&S)gX~U^8q+uInJEX>9TagDZAih05UY3O06rteNm)a-hk@Y$)*H6VLzcW6wMf zBU}Ix@vg8=%2>m&uTW#f)aNrq+FUUIehaDQaq?rF1f58!u6v4TfGs}TKii(gYdC`E z&snilF!ukkDJE@-!!j~68WaX{5R$<#3x?HiM1-M6 zDOUT4pz48$$VHd;bkD8N#R5tKlTrI+=ba%!`90o1%p&DMA!bdh>4q` z%=Zqv?(!-QFBct!N>^|JmDakJbWHqz(MrZVQ&_vV024t*d?V?5XF^ncRskRo83+I> zm&fFE!@2!?FR4;#__rob1fSbIc6OOd`q)_Tbra7WpLpwm*?r}Z0(hPa5WpOES_`l; zg3FjF(vVXTibMdSsN~3EkywBPf!Vr7fDlqTAW0|`a*#?Q3``^pS9+no2;%Q0AZ@84 z5M0N_kStd(8NGaRgvNibM5Qx2|5C7KDvgGVCIR2L|B1iZb7&IE%|Qvvmflv^%EZxY z^ZR)^yffU=RL&$Kak0}naqE-b)@{G#AbWp{uPM|UXF_0{hKTQ-2r}hm&YEDYug>}} zr7<8j5S|OKJc=WAlt25e2P$8AWMbm6WAzW8)Qst~ap~E|y5q#maqK<*B96`+cE}?z z8H605+oE@61Q$hf)k3pHd!PgnLF)jF03-uQM?=ckta$eH{X|$BMmXERXkj$mHFnwG zZ7E*5rtMf0&Fg5jj;0cL=~(cy-+b^%oT@P(G+0RpYFD-sO2RlFBhx?9^=t%?VxEul zBU-S;5ZO+V;Tp_bptC=Ao-_O1p2C{VvO<#uw+XQL>Tx}=tS_zOs&V}+Wf?4%5=l!b z{=(R~Y?vX@d7o`hrNhq^y^qu}S%wp~1-+t0hXEjM?iV2NMU*q7~@bh&=)0o{7W%<%t86>)n(U!KwO5 zJhShq>!<27Wyeu~WP}tzLxU4`_LYHP0T@d17#D{w*QXG!2NVZ9DAhWjSs}~FYJrFl z1Yz=jMJ@(RDY$Ig6&P^VF3K>*nrL3WHh5;QIsBO~+~FKPaSBG|00;;YkqX~>X(oEv ziAJ=_3&f5IeT}2vI<6n}^^PFRW%{ZT4QV-zn+MDc5z%!E26T$gjevxsO~%qr$b?*g z5W;0U2skg%^mF*ell#g?=R&;W)mKbBt@)R(ko46RZt^P@)0#0QfX5DmkNlUvQq$O3oX^Dv%z;ZQ{TBw&mKNYSN)Kp)*8BDU+*gdx*W;#{C} z{KUS#*kNrqcAiggD~OUgD&YbHaH@6y5!}HN>P_uDb>LKa^3?s9nz`m<_fGQIYb*5E zD}9fzBvhGU06c#RANtCJPygq;Ctk+UrmJ!To1g|tP^(Wrx2l-tDcte$S3i8Tc0|dX z0+d8Z^@u7i&Jl@MgEp#pK3TAk1;sEgeZESrA8(Lwog50q92h=~=2c&ypDALOL2MyY zq#&gO55?+huefD$03-jinBTnS?OJu=Rc$g3O!1H0b^rGii5#Q=w2>gngEasV02H<| z(36C}C{a0PS&p~I^=YDIe8O$mtLPl3YWcE=ObIPy10krR{#I5qL)1R|H}TaalX$i# zX7WSg{gMdzm>V&3fzdKr_L06`0|iXoTgzzyB#bbL8~@|IeOo{OdQK=l%ATgY@~O z-|;IGRc6d+pe}*O=P+^meFsV}9Gy}x9X(dA8P91-2Lmo2dBbb3t8I2R_^i1APK8sr z=jnS($HL=flV=Er!J*A!Y!%vTIc*sX0TF3W*=iN7{R#6&2@??{5sWcFp>=$f}i+~lCY z<*WTurd%_w)1)FG4HSpC;==9NwcWdZz1w`Im^kqCCtp1F0vf10EUjotlj5!hND!7) zwIrI$hzJA40`djBB($!R*58eUbkw#+dsESws~Q3hi%D~bQLpzjfRYB zWJxJugn(Yhl0WfEjF};#FpM}MnNk*zSWqTm$s%&}3JD>iWb{;ijOB#0d1!3d2~9XQ zX>@S=#1ZxAq3P0&@zKMVowp4)@7_K&F~`5WyGUPL+dJPFm~Ps{?~s5w4IDg;j~t%C z&wS#(*}~DO+0rxnUsf}rF4x0`!&0Hf0-6A!Bs`-rB7)%h^Tx3D?)8<$OLKeh$!~o8 z)dvplRRJ~OI1Y>nGfXn*$omwa#680iV@;Z2o?!E_IQl((d~BH@5}nLBivbyc0t^n7 zF*xJ`2qG=6Rh>K`Y3L!y);gq~ShSY=3>Zhluld2Qk?qy1wq5h@m07|8V2wo30;HKt-Lqn%HT>&}1vy1ptl)!08$M*oy}yfB(7t zNAc3}8O*SU875U@Do-`ZF~WmVo}HK-ODm=T6pRke%cJ_b%SL`ypMeIy5wP#%^B;fi z(37RW)XPvNQ^c{Akg}3sHZVF`XfK9+AVr#B!|pX4*TBmWq@-y4@XA`3Rtdr=7&35# z0vlDm=Dh1B%NYI3VKuyNZCW+qb=okVIWY5!U%ul$W#piv$-3xO9JCUzXl%fm?QSe= z{!{8@CS|oU`Cl;-(>8=POJ(|0N1jByvWb0G3qp0F_FN(5@zxV3P*_Bc+`*?C+ zu_hMhu|>I3q1JF+7d3*tNd^{^>5$HgV`dnwp`=WTN-i`8t@|r_u%>mx%u!tEB9o`N zi*G=1l)nZf>>-w|Wb zfE<{h0N*g`6d;g*$wlQ{jcZdID1;*z;nWm1msIt~-v0W@ae>c|=FZ+{bet#g>AU{t zYxkbstBeX9DXCqZdo44DQJZ{@c+32}Qur2Gq``m{Et-s0;%E15X_>6UV0JN)!9`;`yT|vA5b#b@Iv~ zkwd}*OBXC%fN}r;35E~`S_rVT(O~h4->`rZ9?+aa&iKLQTegRnZbm z44yvnbZHKAWheugTCo~~WsQsZ5m&f?t$M+c(U8Rgi?=A)$^b%=4UZr4;J6F}KW;HL z0(WaSFd3_4L_=O0g<(|=u08+ND5;UZT;JBMlJJsB;K-ro&wS_J$I48eLz)s5z>w93 zf~dnKiLV<*b&kys8>Kxh*h(=1OxosGE#n-q_9fKve0yngX~dlXaM&9z1fQp@@T@ zF6KRq4ix_IqOnn2bLoXs56<#$1~nAM2k}>q017TJzJ?2#Dl-}Y>I@vM;>Y$KoBM_T zegE;oWZ;)({17uWA2ZDm2alalbB(a$|Ye4ug!ayq<0Hq)S zV<2HQRxKI;DY$A7wUcw8Q^%{Xy5O}_#kF!^l$cBc;h+lcfon%W2EZ}hMu8FidloSf#8F;>uKni0Z zgy<65zq%liP9ANA=2FLMIi6*sAbzrK)_wq1!kSft!dwu*kN~-Pgu->EeLbcq4^b1? zJ3WiX_Z|)IeeQr7$jd((a3mbv{OCwAhg}!$ocijEwc55y{`X|+`4$V}wlqm}F6EET zWH|r;8jbc1QrT^_x#myQnvG$%m_L1bW_J7MAKw3?CuaT9Q!gFFv=5w~3oz{)73Ol~ zKq-fnf`&p!MW7%-Brz9i5Cjw?R78XlVa0sIrjLk|K0JT)=~A_Ex=aO%=-=!UQ4$E2#D1{=7PX-6)SS{|oTA_) zAv6j(56Tf>4q!Wt0ExwLB(4!CV_J>MB*nEA$vDA|%C2yG`Ql%ew0^#>7%MKkPSqHD zPx?Q3-;>X)V>1EBag(u~&WUnN%Wnk0nibRgWD^eJ$t_!!C++wLK-|8xp}-jTR+UiTFJt}Bgu14 z?MFlqupzo#gJe<-Y4vMu?Rr45soQh_1BAZX>n7iH(5Z^mR0g4nm=zo%w{J70cADSoOn zIFS2e;+AK$P$>Ly-t{mrFo0an1<7`T*IKQHTrP)VaR8oI#Id6%QJeMAY&M}B2SkKs zvx#Q22`zx8U^GpL&_HQYWdq02LIC*!OhF*DY1_$tP`uDHfD!;a0} zz5~fHL|v1M<0!)HVlk~*adu3k{9AUR-CZ!n{`u5dli$)2w*bI8Kt8A7emZ*7U{i*3^BpGmB z7sk&)>pB===kbexB+=%WT`X2gizUOB#5cV$9gmBYh8x-36Jt*lx|=LkGVS5Eq;CN& zQvo5Z3pqq$jEup_2E!PRf*T0bWGE3pPENs!;3iBel1&H#(KLiO2_gxw3pPT3V`Z|o?Vh(Dm>>v8!p(W8`3;bv9+vib&c6y~!fd#Ex9csQ+A6K9 zrT?xBlBr)1SlFNyiT3BB4Z}AYfQAq1gbFEiTt~ui2+;DKcxQ1M(x%t~Ph&|o-3a2yA&>stH1 z77KUiRcco^Eo`~k*>aC#6Q^9*9+6ERT}W8#Xn%opBqbmnTPQV&EQ|NofDi)0aX|oT ze$evYNn78H1+`6xgvFSf7Y~lftPMj70vTh47Xx$EY1F4@!9j>2!NJ9w$EvTt=DeR* zYu`my6nm#%_~l1lc=Y;xNB5Q+rU_Q_&@%Nx5;?cfac8z*ZL#t&Tu;DpH1c^D3=M)^ z^qduIc`^u_KJr>suf6D|$xB8r|M?1yt#_*uteEgRr5O_k>OcS0Z{Mq?eB%Tr9HDydxS=r`_WShjsfP_e~ z+;cY;$4&v(@xI#4iVF#a4xdNnctk=ot&stdP>XZZQvMAhf@VO628fA_DQXAV@Di9* z^knw@nf4~%lX_0=zsGAo5!GU|&V6e6UcwNbkZ8^|aQM(c2ooSL0awXj)HT(A`i|F4 z?kMB7(mK(btpuj|H16Dg`-h)7{Ir_onj<^`mNx&WCaAZ!AvU*dfw z5Qn7$u=JwJflw;)A5n&3U;!8gYa0Xrx%?$yS}R;C;t4}a5&4Ujr$@)y5=GibB9!8^ zz%$Tz3_5*kZ8D(cc3M;4Z93vLCnV;o)0jC`1%(FeH(*2vErRN`*Ihk%%|)f%<+E6D z0l?AvAv|^9snYSr@v>F{NVP{rFdOJpNI6ACTbE9y6brZPf>YT=w2Ajw7(@i61WKhm z@&yNsz>ZOoP7#T(w=p4DBAj7dI({LBRq40ayFFQ~nD9Dq5+C}~H}AvLT*DDg5xx#! zKmbV?hzRao*%`Ee45`bJ!_^OZ$=aBb^e9W!9vpI4aK?x zu|d67$IR(ds7=oyCna>CAzg!Qn=0WOZhBR1hkJGlE&w=o`sklLz3&-JHcmL&wf0d$ z6ot!xHl}5|1w#sfa1kN%NkprlC@`wE8llL-KxhqVAqZw6ms6ziIgSI*6Brp8LZP5w zOamZG(y%eGupL>jLV<>Yt9QK$rNa8>b!D+4Px9~#<162M_}8C(>5%e@!%%JxI@FMA z^{5lG4e#mJ?lC837y28eOLc?b_V${{lqiLSl4{9?Dr5hd)~!LmR#m5Cq*?kGzd~Ny zwky-`UoPS;Qn)7e?x6-Uh>BlAEj9YYKTI0|x{@FeEKUS&x zRCZJjjk*sa2{HzdHg6}+V@a47qT}PbPrasrj(o2Ot(dfo4=nS>8M19+CW=aK$L_e; z6c;(zJ4+a6&SzTOgZ6yc3ueA&muekmsH}~;mA6Hd$x(~SkJ#W%f>$VlwYHy3-ddsv zCg+XY+X}BXMnf6P@}A_}Wdw;9Iyzr;oQd$)B$bn zO5T6GIaQZVGq=VK1BwJpqXEf)Uu$6YRYegHMP^l7Zzy# zhR5N#eII?``R|ra&KxcCpn;(fAOpulfe(fu;@nIV1+oy^vP%SnBDFXo0%uY*;@AlT zkSQF|SNn~1E4S1DLKDKKhLg`>ytKV~(-q|zV15V zjD>rs1it;i_x@nt!6RjmUBs*=4c;=Nj$`vHSU`u2oHu|R264&O zE5q^P_Ft)Zn^v}+D}*!Bye4NtoM-@d-TSaQQJr$~^gJFOvd{D(RBa7jbLpG@vXt8xY#QBgM#})7B<_D~ z&*7I29xE4$B^Yg^euR`TBw8kqMO-MZ+L`?o0yv|}=LtpJ zLJ!dZ6oL`7G(DkF9B@z=5FpV`Ew<4rT2M02MtunqLV0i~A6&WP)z!`3j(;zCfL0gbj3*{*8se4S_+lbjrA+=?>~rfz?Uiy_Z78(S^?=*_j2!|j z%N>_>X=Vd`mQdOyUDK=;9oK3ZV6B538b~3aNT7DI3U2sl)*A>KO~`%7zg23jNLBq=t#y1b4GL{1n!h~i? zrox359Sh|(n(XiM?a#Ivx;;jMj6zUv2E#*}syAHpy8rGq1~D)&ytsz11C}Mclq)e6 z01rR4|IhXvnJPEgwGSZ-AX6?zLuaqK@N5AQ0ZPDzVgX?oSOIQ$(f+7d`-&587mGOe ztb>4tTZz>h5hhOX1y($jA&XvIhTSeJh#;c8yD-$yVF-U}2BHxn^nGiR$_h_GP=bR^ zgHH8L*Iyp)+~oY#*>5on@c4_5{L0=_d&*7aJAn*gL>%2|BlRdMZYu@#%=L{6oGu!i z1jnb&5s_b$u-a87Y28)@Hy~oTE}=B+Azvf_Aq+PGY}`CoB3rG2iEMJb5!f=gqk7Y2 zubbRHc;SDlI2(rwsvDLiyylv~!4r+2{`S2Oj~zNuML^0rjl%$71xqHnT+Xp|3h+oO zJ7;H=C35mD`}pO-AX<$@0!;c2eA5I`>&meZ>*aflYkb)VzwTUUtDl6My>5KLv(q5l zQxi)x^UmK5Gd$&>*=(X-uLHFvWUU4_WJqoEnqtmbK>`LLNQdyaS-ooK*2#Ci>C)W} z>|B19@xQj==&8ea;EB6&WahBr=X~pjiL|bkVMD}>V9_cn>Ze__tzPeC`V)H7IVbbAH@fDt;X!oX14 z3CdzLymH5NpWf(#3s~mNP-`^s{Qkqg@#G8pph*G3fiVW5v7wjNmtxvJeXJfV69j>j zfbv|B6yPZQy=B*){~Sw=s9JBVSCr#wAbvlJ0pkY<=IW?dr!jN#l-*7OuInI|%Ylrg zKr&HiHaBO3tz|EK_nTky#q%9Pi3kHk7&=Q?HR?Q#XJ3Bi_7@NBQ9jpT$V6J#DuCw6 z)oc4LmDfk6?R#trR?5KjG|J^Xl*0%^D^NG`c!~=H3pgM+P{ylXdET{C-p%E`%WtNj8gO@xx8?N^ccJ;v&mdTk};#tzoVmy_h#WcKf2 zcgSWrCEYwT7DUIui+o8bp9PT0!^)mqudEeva~BazL|Q2Zte~p~OFxux3a5 zr|YR1G6*?@WLuLet7)}oLoF>Rq}9TdFNRwASJ92xJecTrnR$?Llpn=;WSNjkj`QQ9 zewIA;jMPqiO_>|Y`l34Wvlc+>IBvTynfNB-{J?me*m0nQM9y_#f)I__8tSz<1dS$~ zh7Z?RA8wP8bTBOpAS56phH?cA6`IvI-*DN~tFIXUzw- zWl@w6?Asu{=y`Ic-^ca2T2YAY?~JH@K}Osf=mc9^F5aXge<)LwGbY)Z-zk2-5hr2l z5<&=Q(*!Av(M@A82mozBaO^dxoj|ywURMX^pZ^w0yXYvy+(YZ6h@(*o7e*62;RTxq zw}x-M=KFqctJ<|d8#e})CA?nTf8@VC^t~r={L~bN21gTWm6j7)BY6o?Vy*0DM(JZk zA*6ukc>sWb>p-*R5J(H0tnjg6^9Djh3I}=f?5NO**P?4 zYY6LgP^h6eHuEwxfVjA#<@>dXum0>zaM1;W;d|b8i1N)CC&nqO`lSH@pd%}DnmKHvp zRXx_qMx0zXyYk6)ydEo~o<6hH)g+T=n~q+T0#440LaCcDez2&(8SDEpNjZ`^;T9FuT_YJs;E->kx+tck2T!2A{40hN&^x z`Y(0($Lyjls+tg8#|kkCYcb*|(KvN(!6**6s8mYG7d&X)NX~CR8ksccUbnxh=w8i$ zkuSI~S_QjDuMFRM?K^*`LOa)`eapd;p5(EbkC$Ga{P+WpK3~zsDVu-+-72@;%~_h1 z_DFqJ0=>c_I@!in_BtCG%$$U@3nAdImN>}FNtJ|lWM>xGg(}$`FUK_~ z4;L|1DnN(X{IiX#aRD~JE2xDin1Sl6FL>=YOBnm)dbepQSOCgEHNTSPH9Z&Nfk&VJ z{ulP2EQfHMVr~$cTPl|TqTB(rmOznTCCyQo&Tz@*VRa}jS%!<~Z?|sIZ3uI=^Y<{X zB)fIgf6AUO&X~`hwnqqbb~dE6CIj^HfhE_^rX6J!)8s#PX7|0&EQBfa?pF`~9pv?F9IV8Ti z>~|fpfhVz+(X~1KwFT39Z$xAZNv3{Fx<#w7R&>6IBQtYC%L3mrbs3KG%81}F!E;>r zjT-86HH35ZNNCxRch;@~B9>INCgv-zLmsLYM4k!nL6Aj{&Lv#^?NuXoyUlDZG#u#f5 z=Djgt2!Nv?$p%U=0qlqKMnJ{Cg<%NCSvW_f-5iEh1;Lr^J3X24?=h# zSuQ(g zuM-_SbUkx$dZ?m{M2lsA@omqN=q_fJn`MwuWG87PZsU zK&T-NpoD-D5{d+3*G}64f}%-V#Aam!hyCEv?IYFq-uk+!(kkA^TH-Vx$NkUUHSye` zrxkhZWS3{{f@?j39XhWWYhC5x0+ib_Bc2T~#=x)pa7;efI&^+`>kaSTJtnuWce9p- zMQL8ukn!DTPfgtO%!^9p%kTpRpuswNVQqz#GX9Lrswv){6(YS>R~?gQ9Mp&QzrDH+ zi?BT(C=)G|;-QG5Zgo;it6RGhGPE9y5R|P4)o~ut*6yT!Rge4caM^X=tply+*&2LO zx)3=B%!CjP7~eXnEtQkK9~o&X5&&z4CXRAdB9?(DQOiKL$qHMu9kB4Gygd<;6TaiIB_MWM7&6TOM{FCM-G2uLK}Un8D`E0ngJ-(a7kEZ_~@;O*d%6*6;{zUL-F|q zhzQz*fC*3;r0TzX@0&ipyMkXCItR3_S+3&Y(|1jL>&Y)wX6w^sUo;?d0w4pOfCqAX9jJ4q~@U-Y#!Koo<l>nI1pwN7^iU`su&ZVT5C(e!xEL8JsEf#7)_h^;ThXx$c=kVP0;ADMCiAy+r- zzZ;BU2+?jmv^i-u5F!?Y#_%Gz8RM|$<^JA@)`o3FV6JC4C*sqMbXaAU+2oBSo9PJf5hA04tqBZd zRD0R5f*}D%hcJo0r`m+JE!`0du$DQx zorpE2{aGxLc(Q29UMI2RMvHx-$P^fdUZ)a>4TH6Y2{kAP!GT5?_%>II2ujK*2qccO zY=5dI4#&(O?pl%!92o|epEnl1@2xle_g&uEuW?Ot9W~71U}MiO-#2l$IyQC0QJ#X& zO@z%5kg(-Q5RcmyG4$HkD6_d-t^fPjw7HCt(E$)K=4!KW-Hp4rw&ro2q<0A8f$Ov@Yb8BuG#MVDb4{92zAWi(E|_v(nBviP!62{T80qD zE`(c@lr5o!h}AtNn(lsL)5ElAkXE|Wd13;HXzikR!Pfj||8L2~n?VzjL}p0-Bs*h9`4qY7JcjF0Q<)degj3}sUAO%D;}g$+{Pz2whVTj?7&wjttruzj$!K1SwTAU77MM(0 z@qXFklFLP%CN(Ki>iFz|i7-tFdm}SWLOi@+i`%{>1s~Ii++Et+Li_zL#@w|Obkhh) z5R`NvgoF?Rfeau+4Py*AWauc-mYKoYAV2^~5~Kul#7vNV{q{O0MYEIDyxPrrOYt?$ z*WZ?Mjvl`NKth`*6JgAW@X|U2=>P~c5Sqx#VW2=TQoxv2l%@?XLD!Cxkjlb79}z7E zb3!5vLk#7;>bq{aVe)O)ZrXh=1ZV(203S!@_W#zm9{$I%nwmub--g9Yz(k4eHW{8< ziRWw3F6PL@8w)OD7iX@kU=YCd2$iuSTn{kZi~>B?pkZeSL4+XifiRD}9>VLdxb;ip z*#7a6e0jZ_cE(tg@Y0Psww7}L`&WPVM>aM6&-~(J&mSrWfp)yYAV>(Xp`H^;>=)zZ za{P6Kpl)0>{cJ+Vgb-0nNymYd3dUG-nD2)OgTU7Op(r{~8yGT>9G*Fd)A|&0Zp(?6O7D)s zOifboG=84FmK<^5OP3`=>ztiO0pLXA0$Qf^xDY%b;CT)P%ML1|1&}ZZwU3rxYkflG z(~O_#zb!AY0h0kshG`In-Emxf{+n>=_~jpQ_2Nwb8y$<9Gjg7Tf@tE3okPF-|NDs# zOzzq-Qq4I!n3+8dqg#FmlQz{Mc6P1}x=lu_k<*o!nc}3z)aPu6fz`rD2}sw0oAco1 zbC8Y$jaGQN1|u{EVHiM1Ca%TggvH_DtA%uKTDCpi60#HiKyKw(B~)hGfCFvWFzQX1 zdJ|!ziNN=PAcTkmT#T_ADw_rGzk8!YFo%f-n)e*!r3o(EGF1K1ci%L%b?{7{$i~CN zFWvjdBYW>d(`iEF1hj}I*=~7Kk=DLwUsp$ct{%OOTRDx)lu6K!h;%6g5W)|pF;dB6 z^Oh1cH!KS~$LS+$?u45kEZR6Ri3JQ3Bs}cedL6#+)$jT6s8fM#Zm@PQ6N}QkM)IxI z(UB(aZvNyCPW-{2e-e8S&y)uWc_+|-ANtU&L1fv?$%Ft5AY{bN*4+jWo9E)kna#=Q zU^$5DgtfA>qE1MW`6Xp@j#tBN7eG|6C;rCm*QX?3BxAdlF)3$c-pNv?CT8#KB$nB) zN2Y!2a`Us=CqB3y$5ZzCbv5T9Vc~8aZ<3s^#Lq+oMG{5FK@bFJHk-B-dL$UaP=kX2 zL;}(kkV1mMP^pN-xLqb*7ALNx$hVYeA{n1o=JpdaZtR1Kd|xujyW%KsDs?>o&{iW% z?ulFEscAo>h&$SU(<_$hXI_DT|VPoy2z+b*h-9qvb=3uhDM4w&&AH9V%h~ zMk6pjwv<)%r`~(RKJmN%^{;T`=!vq*4La;;iM+5wM1NmF^iZz zdO5T+ErQm}_LlTp`bjqkV)9*+5`Y3$t8`r#j^o%#U2FK-x28pH&1yQ-5tWp+v&SLz z#O>odOHoQ_pO3QpXVHt|RK$KIYTZY2P6CY4o!W6cVKHnZAOoxcZL}3wXbFD-f|#da zOJyTrW{`3`|zq&re0$#D`{u5Bs_TVrfcht>um2T?$WhGu{q*t{J{ZHr-ojoeKOm$r(| zXqi2Ll<>$*cJV%0yLZ+L5hv5f=Zb|D5NT48PfKuAIdF*)#Q(V-(|bi*}DenfI$;4)t@_j=Tl!-GtFul(l`oE^xp0L6A3W1 zH6NxdH6o>1FZb%by(6vcaobN6NEiT3csasQc@V?HLr_YB*+3}??MJ?@nDJ$uK%{IA z7l+o0!U6^nJBa{52CjCh=M7yk`Kt5Z^qC#*#p~AA)xxqfPR}nAo;ljseeB50;m>{T z{@~PH=oE%4XqraUDFqA(DA8#O?1pYr!A2OrNr?(-X(ZW|y&*tHbTA~iCCJdl7l+>3 z;Cc$W#VCy0m_)<0rZ@>9B$NUiC(7Xz$>M!u6CVS~ZvNh}q}xt4eSD zPpj9G^}`v_C5#mbKmJeN`m4u|p7?|NA3sjWxusVG- z?LMnc61kQDiS|nU+sVk$QBI5E+!6c#gaR*7wK{uTJORcb%{sGB+hiG=Nx3!Ywl_c| zzOgjoHf0;!>9cCjZ;FvvEf*PDpB1XH53F;0?{mbg5%&KQt|Q^3R0fms=d?6i22skd z7Mc18CbO8$yvOKWaa3Q-Rj4KdwEO>~Tq?LLdn?qXW$ z5SfKdFEAUh?_(%uf@?3@TK!l5lQwrDTCBw8;3z6u`^e`6DkFms2w?(YSyznhbE0QbeQZvo zwoNS|vp7pkr%x=?7eT4#BOsw*Ai$&m0hpi(M|c>?ZNk+TyaqcP z=;6Fk^@)G|;SYZF#>>yIa-$l^z`)S}>t-uxW0^5#tCdlot)H~iHpnOdApXcDMjW2S zZm|WCj=q#aCO)(D?*f!K=B@9Ct+_5T*-_*VN}Fm8+kN8wIMjeP9Usnq!puJLInbKY z1le~esB>`@Bd(xq*nZO0!{ZOIv_302M2`I6ypeqMpS|l%f4qCM^B3n*fCc~@uD$$g zcRuzF9G^MjI7JcpTMRlQ1J?%a(Wr!U8mT}CM**W_Ll_vySz#6Zw$vh!s{xVe8-fw# zris*dV1NdK?Zt~u+;Y|T|Mrk7uR}}M24_Z82Vgiyn4IE2{>Tr%W3y1Vf8@bu4wh${ zfkUJt6;{pb=-w!3RZ_NpmfGT^lV>Kcv*6ZO6rGxGyO|}XR?-ovb1UkQ6Ky!!V70A` zuaQXf)@sqstk=Gh&%TfNYgU_H2+Q}uMC~vJ!UVxZn+B^t`9trVyyl|vM@#3L;A-r{ z=b!r2KRZy}qgaHPtJWZ01(3}XjD0mgYy4&FqphG4))j(PluPDAItG=IL5z(LLpcG$ z4O;OjF~LJbs(s9jX`n(1+|aFFyZsH5V>s{63aYrija@~YvG4+bN{LXd@vr0x?>#^UNVmT|WG%E;qP}W3~ zb%q0RsmR{%di%^4g1yg_FEFNOB1#E@_KB=p>{;8>jE>8`_clM*nKXI@(5{IQj4==h zp^P7l=289754>^m`b#RiN6#_AHOo~za_HXwdDl~SR;uPy+1EaViql%+Ac`npVwD)< zJG9`e3nIQQg#h^&86LpcrXrLZqF%4V%Sj;NK-qXO1h9w*jLAT16%0|idecR3oV;xO zRlA32>v}hFRk0$%3joRm!s#jggP-}aci_)H^@V@??Z@_%{ZKmjyo!n*MiZ(S#n(%mWzlLz#6l&xVDz2Z|8{Ao zO;l|qK*m?>j&{1K7OPukxyPC4%GgyD-ES*zE8|;Aj_>Y6izoan$6rS)d(Qqb?NfZ> zI3X)@`D8zvgO&cCM3kj{U8eP%ZQfpX!s%jaN-tta%xjiS``(bcY|lkXMyRyt!xkqN zvy~T!4LlBl9c5Mhr{DLw$#>m+{_c@;OmNjr6$kVSKl9Zm{_pz^Oz$l>-8qMH5`YA= zhRG~a(ouhjZz$=vsXhV$v%s@znbt(?@Jvo)n4@}I!iJU;AQV6dFgPes8S{|O1pwP* zabgJ|Y@1WWg3-=BRQsq?iUn8L3vnN|Cl+X7&C{t#=ZJb(97KMn>`|4oZN7=r}hD zf^-RiX@G?QOb!C`uaegksfaWn9rV^dx%4vgr7O1rb|v2)2N|EQUE|sa zj^NcqQmix|--AgJv;wKcL`D!Mf^iX#Gd8)l39<|!b8@|GyE5%t$m|=J z^)L|dOd}X}P4!1^eeL9XZ`r+j{2UZq0PxuUM?d||$8X2c#vw<`F!O7M*(I}BTrnS4 zn<-MlgjNY7lMWy`_sep260FGpYVUYZ0U;YMuM`R%2!!bFG@ix5c<{!d&qnG~X zdN*sWuqwvf@d3i6+e*LjuYUO5fAy+e+p9y44n*iDg31t}Ng;#+cj?TF`Sd1MZAY53 zZlUdo;EE#M*tDY4(x3U-1=tCY;s~}L&nSs@OI{lWGChy5UDYSnk#c=_ZHzUlJaTh4*nT-~sz@%aDqt*5_! z?UA_yszLRLDSmCx3i`0rNwLY$M#mFKr?f6I0Qo476i^u(gqL$ro15!>@^z`?+zE<& z#BpLh`+(ETSaHoO#(5Qq6Lb8_|K?wPaQaU^{<)vO_j~)w znw8@?5*iHV0Du5<=L)(FJyy7k_EH?N53I9~wHOs_%0~yeVk`G6*_1N%P&;ua^`IMW zbIYY_xhHH=u}{sU22XT<*32I_GIQb{d0#D`Uv!$p2e!aukzzq_i|*@zUMBhu!DN%L zI)6)j5z!5hv3XVPMMB$s95J}Zs-%+@KRQoWg4fh>8O=Aje&%manD(1&JwMao+BXm| z+Cb=fFp@|02j6(z=3gfG-09n92j z?EpGsD%ziDr)u4?_)K^uZBU5dNUPQYM3z9waTRLy(-^K4v1QXR@}7X!4S1fH8r|`P zlD@#g0<3d!lsRU>SD(4Rj$Z|Iq#Zg=0m2k;`PS=k&9<992(4hkbF*)C!>S1{z$P&t zo92)H>_7j2CVdX(bgr~v?uo0~??$NQWin%?O%FD>9q8?_Y`M3_d zt<)D2+1TPhjE0hl&1BJl6*2RKShYyW^}u|ZjcC2x~Mh`GEL{y=he1)NK6mNOeyO2Zqa{?ZKteN)Q#i|M~0N6A{I6ck3 z`H>&FRW`%fpL+0_1LbN%J5mZT6SPrLa$Re(rw)20V{1v-Evwent`qMf=bB`a%!X}u z?6^UbM^*{F%)4<|f9EsLYEBz$vR{H3OmY}ha#Vfm^;b{+%lBWm`+{@SR0seQ{Jo$0 z_*eh*EyrtzmGYDm>JWa-w?$0T?{qDMLrOzp4s2RVl&V3*kkA;eL#^oT(XCTKL^Ow;eOqSSd%w1JVdzs!qMOO zxu5*Nf4%YQ^Q$E{2uv^s7!6_#W(}~0fPv)D8ge7L3|I8`PoAccdV zX~1h}QXr0#o+H+HLog-^jfV}Jk&Z@rq=2#UK{T3GfI^6~V6YP#5F3~vgkTSzE(h1| zdQiUqcX(cO3XZGpVry2tZihh%mOPCe~b%jNB;tEV+_w3!Tm`^*UT$^Q@$qY!YBM zbozYa*68j(#OD-tWn_|~F>$5o7~PYalhbwGn&x&~2&wu}K)VmD3$oYAYEJ3xjLzrM zkvPrq`|Gve;?`5Wo`3djb|$L!NOef^j23%c8x%k!T9KnH;MW=$$g98;jq01O+co)b ze&jXh6wz)LRi42YAN$*h2PW>Sgtm1YlpU}fD!wIk#LlR)*6AH!=}v@j8q(WUI--&gi#>V(pE&vwjKS+M4y@k`e()5U0l7j40Ee{PLn8^jZDmPbe-1+uj^h^ooGKNSBOTNlz0 zj3FBiANd}&VHgcKo=Pjxq^U@h;q0#mRYbJrtPm_fo7GlwRl1G2VV2R-^_=j zpBj?ob!**vW6doUuWl zp}cL^%GLuLGud&quyo6nJ5RU;V>CU280oirbWJMtqdzH$;bdKk_bZ>iMYyf9El7V2sR+{F+e(SST<>oOW&IJdu>wi2Ivh8BsEEv`1pMYrkHH-K({IB?Z`0 z&zIeQsmHH0x8q_sBs(HEU%?Q9pmmfxW(*wD2y(hsefL|gpZpgeeD&_3b9jw1%Tt)e zDSY?ouN>mgHH;{I@|MB0nA(7s?+F-g4u6s+Vtl^;TVbg%3cihwx$mq#;cMih;M^ zcmZu0{-Hnk_-$YN@FNrZ%ae0W#}Ff|PoY>Sz`%vk8jukuUJ?b%QJzw4GjGqoX`y>U zPm2rDe(#w~$(d91sF&5sv&wAGrg@n5{ae>ZbU$5K)Z4zd@89cwdZHtb%(Ne$L2{kw z&r(;byvX`uJs0L3H1+X$yYJTV{1lhdqfD#gZy@tFAuyx3*zVEGX zocz$ccJHp7!=^&ORs{I!lb=0&*E3%zSLv7&kPkx=90E#M%IP4f(`8HCm^f;|EFPW|5QI1Kk;EK~R+O`RX33XJ8h^$LO8Vn9O7~eDm z$29;#fK^g)*kWU7U2&Vz6&LLD6mY^^u(f<~^_@5W(Bxn^_TP)t7hGv<$Pst6+Qi}G zllasZzA^Fjd-qn3RZZD-3JwWEXf(92r2s$%Dl!4Yt0$tkUQa|fCEA5qi_B&}?w9bL6t>2VV12zq{GH_>W4oY4PWMg<#D!uf7OCY}>`z-gB+T(~a`YlgWAw36A)@s~ z#O9KsA=4~iOb9T8j1JguR>yKCeD~|GoBHs3ZrZ)G_{w5dKv<)ZL3n@JeBxN3@Al_Fc=xmW7Bv6g&c!S5VfTnmSf7)SS`yAv9akE zG6z#v*qq;5{ehc)XtFQ3dSgQhuNaq}H;hBmynFLV@h`t}=OgcVY+_&eXtnM%LWYX6 z=miwoyJvQ!oLPQYXM!~K*xtuRz+!z~aVdIXL|epXKmbEQ!-IhUaUDUk8k9X*J%2nu z`QtxubL|~B?Ye%%ePsx)X+D9w4t!(c%TND(<%PK?%aF#AMx+E%>^q8lz&aWqU;r8* zx#S4d#lVhc39T6f0nZ`iT!l?r^C;vQM2s-xL=$RhL?&~V^&Q7?y)(%MLMtdaggi#^ zj_ZCTJU@Tc?!MsajSVfl0ASk?;lK(1UtV+7d-0V!pFRA=fBY_<-+x5;fi5$Y!(wUO zuT^Za!@jkocCD6=X|1u)WZ%t9vX=L~@~-|C?7cH!3_`NL5hWaZJr;IC32Q--8r3&m zdBx-p|G*n+Z@nmg{VFwQL*l8)$3Aw)6JISocHqG>7mPzX68NHJdRq+AwapgZuccXa z8?FOj`)7SD9X9NCQ%J|kKI8MEjw+b*`S#~=JX73w%o-qTHq7N+)Mx9+J4Fb8HrPHas^9;bt0w>X zdv4gh`4ysV#dvP!34HwCKb&~*;60VO=G>Tx1QwIAd66k|VFx2;l)jSffAdB5S+9n?-l1fN8O4-~lgs4;`c5JUe8e>D^0T^unR&8@Y2lr;Sl5?iLrXvk!X?b4r z?#}`m1nq0&92a?i7;m`v19;Q!AD{sq2L%$!+wl6uy5X#OGDG=f;`q6Jrx{N~iOfbeP)n#RtX#NyYan5rg0?|%fK}!PzqdqkQ z7aB@8f{V8fS3h{`jg#-Y<>K9?S6Bzx-*^uH@bKSFJhK0R$}}d+l5DhIEVM)>lwl#X ze8s(gXJ!znO`v2ov6u|mXpPxs9mTwd3(nt!O2tL9UWJlQlKLP>!Pu1L4GTpAId1?? zSirRxzX7k`{e6@V%P^*uj?+hXoHfELjZ$9Y!|%Ut>m?Ue{^ZN|J@SEXKlGStHgvgJ z4;?><^Ss>oPZA-^u71Wg@Q!+28`EUbiCzQcENOqw>Hs|)>|_9DO8_0!agq!SCApIByt(C4zuKK9>iA5E2At0s=x?^LZgw?>PbMj(@NofO)L+oKgq-`(DmTf@C_hfMx^9ED9 zxuYL_xH2$3R-YYHQD#39@#Jb36UqC5Wn$on?v!94SVOUZWBg#a;8cI`9dDfc!1rCb zyW{|sS3nqi8vAR{<9mnipZL;4pQ{|I?JKhvIzTicVxnD21wfo(f^Vmm)#G_b*Ku8m znVBgoxOR+VQzZu_LzqBYYNx0g^2T{<9YuOtrdHzyYPMLta{JBU+pc*3uZ>_kjAm$K z;N%=kO`Tr*=AK2^IPdrjaA>xHQ?mi?`1ZqxKli0?sfPB-$0z3;BMN8+1QIIoJ=p~|IrDA=4IsR1&emiN@|pIoIk;{uTOr7h&c>h6PT`M@&5c7gLPj>4BRyjOP(-E$6E5y7i5dx7>KqZ*CgG zXE!_N%&=HAj`JZrbnvc;+wT3F%AS)?l>;&BXc;D?x2=BFJ@sUzg;z|_C4_>61RDZt zgJMBq(|8fX6%Tm_P+BLEzn#J>zJ70kv)aPTq!&&Q;J`(}EkgwP;Hs^!IeF`)KfHCT zck!am*+)ChxlvjT8M95`*zr@iX@xF(TE#GcrB6>vZH+w)?0YBd@-qWGXM&sqM|m$n*^cJs0Ac+b;rnH z_}WW%;Qils%g3&`#QoUlE7a6`nor_^Bj1_$+QWZed1Bwg#%?$yr|fxk78b8(|8G$kvu#X00&XNXOy}Uh2t}uR$vxH>wKy2 z$@N!BcqO|>w=W|NQcw<_uHfRKSDkp*YkuSxN3i2lg+A@mvT#=VyyinWDganx#uev( z-}u{aed~|^)u%uIiTfXZ0>>t&0d}Fu1(UR+jcgV^$kq%Kab{6$H&6A|)y!{NEC=F3 z=!j5SByn&&*q1whZtw^-+1M}-#t#ZE$QOt5N6C2KmlI% zU;KsJzWq19`mGoKu{s2>n?obukIMQ>~~!ZAy12oiteKm=dkdav^U2BBs>I^!%RT9=X=lH_P~>SUia8D zFJj{PeX1Ey*~<+$LO2Km-6HZ$C9wi_k`*L6%!4sFlYAUc;_bpq^d#DC?Q&+qt7EM7 z5=NQHUr@VQrFW^9*7N9g-GzkdQiWH~`|BdSj4{xmhL8e62#{gudL4s#88qrs)f=w5 zB>bhH{NQg~HjaL=JoL>`#8>q1!%s=M(=}-aoh3kwb)dQ6r5tuV;%X z_7YyYM^tDp;T4+;Ib*h?h^3yHp@Sxdh6b>CQyF8U18|fEX&(Y6i55<`%UT^W+t*om zAr#2FBS0gMU7KEow_N$YpBZ;|{y7^L+GkYqgNr|3A8k12%!V-nX;8_Hb!@MO{K2=r z{!;8YaPr^%kH7lcpLk^AIOYPUT(3189hVJGg#o91R+=>cvJ?BFC%^6t9Z5h)cF=bS zn~gcib}$q4+_C*@*2N$)&m+ciQV*0=aFl~khcL##g9(amP`zY)Fnr_nH%z_bjW>Su z;&J?+pt0bo&S@9~n8Zmu^z3*4^seXbaE^ut;1&dg1nLN*y;Mu9SwI_SOLMXK>-P{C z=_F+(1o8s~jE&ik=P-h16E4~OEdWu{blgW6M506UT33*PTW|x^O!b0+E5lo_{@`zn zsS7_&@X$aL0h;UG!m}D1b7ov4yl}dPJx3b9b??IyANkI=@4=pz_A5iVGQ)KY6buB6 zlvapElV{AS#)6=vN&*V9ffp^!tGnjZGg>YajVMIB*=D0?URf+w^UBimh#t?QTi}Nw z`*>RBsq{+QvOYLOXx64NSnz`L#!J=rzV)?}uYc8rzd1gD&u;Nv(cL}4ao&r2_TF*$ zv)}r}mY0HOfucc9y!!J*no;`kVd?p z4XgozW*vhAIc(ip!Imw905_ri07}SYpID=71@y#=!S;JyJI$+8>l4NeWIbQKV(V)r zZ@uP+|8SGKR%=uTz*|E3~*P-ZfzFxr@z091wj!PB=@qNdFiA)q{kvX5R;m|@7JkNt4YUofy z3K1ZzRULmWyyUzw+0Lpi`3*FF;!$W?k>@IY2lT&ETp*$TiZ0i%6<$(m;eJsh0U7_7_Jnd1Shq? z1khnF>}*5UY&g%u=yhxtVV&KlwFomdOh2kp5Y#oKvWwb*VC=*Vo8B?`wrk$Ed)(=V z!>=^@qdJ8X(;5K;9(nS>|9bGTr>}qfxxIxa_Z_L!HOkYo4aadj7)?;F%?%Syr`5H0 zsCZv^bvsVRnrmVTf=yE(bO<0FpRhzfm15sPM+CFQ2;lf~~bz?b`8o zS6n>s`#A=N7Fo>lm4GUrz_Op3doth6B0X7oE8iNIe&6@^Lsd#Xd4S7!=(M@=@x8n?jSC{s+ql@rL81@Ci zKtdQdD9GXJ?$H~=ckce@TQ}!+&EJSVmW6ZH=d~P+4J8BlUje{VFP_3Wx=jH2GuieS7WtONX!D^a`VMS}YFp0eolQ z9TQ)8@H3Scr=KaiMaQYv0i=T6;@m2rW%}bbvz#^iEfulg>ij`~j071C;-=NO#s^Rt z$-|L0qL2|Mm@hKla*cdm9To%_2m|Dc0|=|d>ZQZ4n|#~l@83P-j;>9EUV-S3^aU_& z7_$NJ*fYof%bu4{{?h%AK8-zl590LH9BOlQ2;~B?;}`-gBwB05_UJ+rg~GQwzUd;I zqRAtQdeqUR9utgaL-rTayn6Y>;_C|0eNsq#c5z5@#s}6*^XhD0cg-snUNRA0oo$R7 zV)sV~#UY@5aOh+6rg2<;@y-*kyZVyJo3A|Y-(NZM$_;y82vt5|1=qc|Ri3RrUJi3L z#{?mY#XK5eBPz@pp@-&SdTBNbXkNzXw&NvatM9P@hXODf0|UUwa1L8Gm*7Z%SVMrU zSuuUxg@Q*i9dorbuNDjl2GRh+xX7Uryz0WYoV;nrySDE1uA2YieVh^c!fOGXXy~>M zJvD3ai(r5tfTwa`<3Udsf}1Y*zUrH@Y2gOKmWqZC;y*Eo_W6X{0sXqU8~{PWK{($ z%YLXGBV5eXLwNZDT;;%M)3Fi-#Js{Qi)Fp_IcBj=h)b)_)HNASqe`wsuj zH~vyhoO+@h=6#10K#tXDm^-!Jl@ea|SkpH78)n3j_=Xv-B#aCeuzm9gN@WkkO$ZYJ zQ2+^>P9NNc5?%}xl|on}P@^2Y{>pb&Z{GR#@sijKPnMT(-9DCs{-|F7Gxg&b%#Zi{ z{b;obmM*4ib-Z-o(4Rl~+)K9{JbI$^;($Y&01XVtsMCfGAX*5JH?X98d8OdtL*Mw=#~=LDA9>>B!&@L2Q?(fs zOF3w07%+%UM*zi{3a^$)Fw(wK0UrS1`#y*S^0^!a2OM0meFUCjC}E(4H5bBgN0g(^ zu&4CGVue@t{wtE}#RS77+P|lt2eE((32tpDc=MGXsJ`x^?;jW85DHG;&$Sll2W%_= zxq#7pbGfZx+)tkh7_)VQQ!_Jo_QfNA_soIG*Y7=a63^{<5vNbpl;@gqGYFk}*hCm= z2xSR^0h_5rpf;ab7D;it?DqC}j}F8?$4W{z->MtZ^Hx&nmIy@AU(rQ~5w%Sif}_g2 z9GO|z=3^0?Kp?CRpdN0jl(BQ$4s59mOc_JDEKd-CMw^ujms z3dA%Y$Mc7t`Y)fp^AkVt=;Q;6Dn7ynBVVv2x5OOvgQW*wECz|-g6%`CNKykw3gkTp zi4^j+~l;VF3dP zHUa^p4I73PY8>&u#VbzFvlEauI^sg)M6hV)D2%-IT18AaoDwo|kVdG(wtx{SB!skP z%ut7Rbxs5!BvcetsZ)LuAq6CHYCA(Sfk=X+fFxl9EsTLMKBQ>|qeJ`(1LmIAvxLP#sSmd+>jK_U^xW*E2^SRh+ArL+Lvr zir8!8DO}W9=TBCTH6j7a&@NyiUzGuo0;ElJ76>7X1_upn-#U)*b&x$v001oHNkl;swylqqMSrGPLs+d0&Fjv5RqvxE)m1f&^j`sNCBl3q#~9a^2$|0@xvc^VI$y!X#<`|khw!1s>d zU#4Q+A?+tgz7~;|AN!CNJnO>3E7`+DJ~YFL=H;^uPEyb)dJ5wsgBTtvASVfo5Uwkr zP1uTVjZU>;gjZA(j-^eeR=NOH9|*?s7gXQ#hJQ9W99+1&;_jHYO?|8m`oe3CF%`1) z3mYJ0Ag_R?0p@Bz!w*rbo1Zv({N$Af_8-3G@Z?nC#bedV)LgSXHC1(HXXj9_*TBpO zje!QCQ4gV+tQW&Dq?8aMA`oT;Kmlw8Tx|A}GKyF<8bU~D9m4m0XszLSUbKP+fDj~` z5Yj*i0YQY};So5F1IKlOf~2Yn{BTQU5a;dKf=y$?7_AK6RmiES;y~_1-gAC!Q)M%x z0z3zBB_K?zY)v04hvR${PaJ>fFFtwa$A9ptqmPfVry)HC!G}or3W2QfiqTt$EiN>j zj3q)QSQ;o3`G^7&Kq=6NoCB%eG5onLJiUc=fD{ml9r*jCtGed87H{@rcf#eXzFo91m#AFGG{EV6-d$}na$(A0qMXL>pq zZ!{X^+1lJs*BUSyncybarcH}gIz&hv)g#t(jOixdP0SpZd zp*T>4@B3&p8qh%q9fl|thcGfSjN(8J#R4IpS11&VD0n$+*}4Uuja^k+99^@;8Jxib z1a}Aw?moD?dmy+4ch}$q4;tJZf(3UAGPos3aCbTL=KBL@Zu(}Pr@L$Su2r?GYOQ_B zyz!!gkvCm+E~CVIMRqK{TrGft|8|pA#vctB^ls1R-010mBPXV5B~iUxsRedy5FMCR z=E~NXeG2R~Xc#&G3^XhBeg`OL=S%G`sYjKTiJ-*pyI0DnLt&&AwO+k}=y(lE=m3By zY?~2xpMlD%m^B>-4hwdk8<%!PZRc!AL-EZ`1F;{y@V1)G}HH3{XCRCvgtNkk1`9ZyGK3k5Ya z(zdydlT<)*?jrKYw;PZy&^ z`~5n@sBW9$*+TlG=ShN`;sIWvt#~B-4ZW_1o!0R5c%z=LeN-_oq%|0!-RFlz35@_s zB5uu#tys@19kP4hPS6$lv&8$ja*F<%hF`?K<9-gY(nG7*Hr|u~N~9Qmdoprqn=+VS z?6?(DLrCzxEYW+@02L|TxA{zK>vu!wwwa8XRSB%Xo}?0{I59q4^O1KbJowO>^~2?z za{NB)%7<&1h@1fDA!&(!FRvHIWaR?ca3X^S^l!+x!(^cb$zp&%LfI^A=)KYzLpxTK z@I)U7L0lP;Vt_?T=;M6H>sRpWZo5{I zVdfAiXZbyx9F`dtZGEK|rfp}5Yh%)a7mLsvMohzPI1GCj1}hvhE)Rj?>G6B=5AFLe zE^^0)ecVsF%-7*M&HAvxL0<}uGv9^axupTTw9FK=jkvU#SX$`C*x*9tR0pk) z(|tpA4NDR5Hnfp7#_e>bYO~x`}+drj?tNqO1Yg`oY>qQSzc6 zUKYvo*{G_WNR}rt@f@M|D^51A7Kke`@(2(X08e)b-TWRhGfirYL2b6ZV0)(t#|wP#oJTlY`xq+--!sj49}UA zlg>qsi%bfr!^WpNeU2+CmP}8QOk%ghmR4x1z0B_uI zJg8fv)P+|#*23{Ip?#z(Ny_-FMhvA?LG!B7?7A)F4bw5Y|M4cUXLOA``YLd0!#f8FIgL1xC)ous}cXFm** zuTc26!vM+yOc1?Ubt0{Oz1G8>@>aTDBGx4}rHgD-#~gC+9{ueN{~4o-Gyam1^qM|k z_p_(}ua??7i;M^IIZmgkAr4Ei`J8{=eL;G~bQ(PgeNLY#rZ^`;WBg^IJ{-k1;g|h$OE%HDVl{Mx zRTX6*X}Kzyvl=6YSzDsy)MwMCh_ffvigb{X0WM`Jyf)vh1FSk@*k=*a0?;2#koKB! zm-+CAlyv(p%R7R*WNrrw^2*qMNZ=llFxR%f)rtvi@gZDD875|K1gouWx!gzOD8MHX@o;?V}Ky^FI+)D=Vx`$o}p?Nw2U=s=h=OWCG*+fRl=4qevXaQ zyQZMl8w$b1J9sMQsKWurq< z2P^&#B`r-xc#-!9A`6Iv_l34Xhc-;CK=)^fIC4Kc*;p~`A4BATt<@tj znEBDeZqll>%vl&!)R9UYro-pM;@=SE?!#x^=U!}w$4c0G2d$dr;UV#V=u5nXKt3Qj zeEL9c;&fvHN?nKVCy!l?E2T#T~k=drF0buzF%@Opq;&Ge0bw^ov-w0T>$U zjYTafWzXvC92OiBfrUG{W^)LK!P@Y0ME5ilW3+*DhJ!SDKZV&g*e`Y?MNIfL0}Dtiq;aCf zqp@Tt{(uUc(&^%njz6?zv~$)n;UKxx4?Y@VAhIE=aPR^0`34ToDR|<(n0k5ryu_LM zjA1sVc$H;xq#jg%)A9uXYkR>(+|b-&NSy*0hAP4ngJ3}i@d#2~#Xsz><5CPFJWe)* z8sPcWz-n61#^0b8{J*V&dd?oZe>6?{ z`qc5JEbM(BeTm|~Rv;7{9>Nh667(?`+br1*KN~`zksUkw_}vsRks8*mK;>H_3rfaR z9YOn&pa~M-2rI~eFW_Ff2gJbwCFSdt4a4Yg^j(tc(PF~V`+02iiG!s%!;n;+70SFi z>P9=-eEMs6pC7IEpWk+WEeSMrB*X>%a}xLxp{3aQ!8WMfNMboWo*BvTd0nEFG@(9@GN-~j&6Momy6 z9Lz{WQ7uV=BXdJ?vX4QM6B?WVHiePcC?jHa457}Z*UeKOFXE-Nnm}L zzz(IhLSe)wCE~K>#&c!mv+h&F`CUYt)AN}$Y$l>7lG3zINqO*VoDNk*9f^-k61#uu zN9N`zcx+DSFd~pLKhyN8Ra5il zY(yk38sPM!Q3Ab3|Jp*Bb$$VY>O0atYfC_AuGs8nO6)>CCyH8oE^_Gq8)gG(2vi>k zK=f1eo#rKX+M1SWAezT@20ncR<&4UBEJIR9@^16Pkou$7LiM&SEZ`n!=emFM=>YuC z^>H}I+-*l6-qCg6&+N-jlX@F2sIiR;vM&jzeAl6a2mjCkF3WIm z+xbi-;J2^I6YpKu`L{7~DBTjb*05T9^%e<|!_y~t0&q?$>8?&V#X;0095yHdMmstY z7X&&Nmxh;?Ji@L?CSt*aHYb_RkEHjRA3EdRC@34&{swie>s{7(dzcNLa7ZkoAveVeengei6y_{j7Sb=AR z+&?HZ5;imRlPH^cagWo$I*dMR_A;E1QW6$FQ_LcG%?A^S0^SS{L0)`{ywbS+?@da& zWBYbb9KrO{6)dsQxxW83u;^cUM3#f8A}PByxcn*bBv^R!<UzW@#>$%Khk65XIN3kN}kLkKn%*|jVcnLX8$lKS?}ae|axTf?hZ zMF|`oJ!9=l`ri~H3x%#Yad7@r@8T?4C82!>osiSPSh-S@-CdMGV6gP?K47#({OhuxIbnx zM>@DLAFm&YrHY3^0{91{-Q{(Cd7fO&t9U}%?&e8mi9S>EK(YiC%>IX5vxuel*veby zDw`gg^shHin!obH!R7jhOnBHt`|=gR5d5?#8ZT<|j5__yVCLPNU@i>Wa3zOs`3B~6}+RmduJ18r~Of7DJyxvZ^s>R^t*6v@=sCS%XJAN z>ekNqk@au%ryF;1fu47qn@nAPmr`1spLZk51(kcl3;2?!YMwHvv~M#e-xG^{60Rnq z8l-YE!;WMRlCVCpa0VqLje*AC5&!7vUr{7kj$)@W5oC&}+?{oOB$evLoe(H?ALQ5R znl1O1Qhb*P!;H;LgaNorEGoz!Sgjy*>T^I1FUIQDbSo? zT2YwCWk1D)Z9QZX1r#HMDK;^X zOw>6%S@FdOrGHLL!KvX<&B5$);>GYl9km3o!I*|Oi8;Jw3$QN4(@-TNA_!s+g^z~< zvN?!Y9m`kPB>h4fQD7wYV9?-N8GUJs2HIy=G=*Pk7@zH*gc3gknYP&d} zlCA}&pU#BGTNf|g@7$*4T^W{BE&0S;lVYFYVO_sfvY@&G5oOU=gH9-n@nGZ4MjGBz z)Z+@)Q_#kkQb*-bw9hE97^xOWsqavwP(_luyq^-8<6N-&u<1E9I(Vn$?*18A?HnOF z^|0=yX2IH#(A8)Bg-ObcB8v^>Bq~8EL?KesH7%+jBH))YN9i@25W*Y_bvoJgjD6Us z3S|~(wb&1^50u`d3%Ex=!Rf2Y*U2p-7rM|WLWTtTTxq)AUfJkA_?UeAVK*&^6THOp zWsQE#)^X$1uV#y{qFBo-qL?pe(C|BM6kqY|H-9@ zfm&;D@t7`CY1u40-BIMq_wDzE=z|Q~dO-KepC8p%j7oD&!31%psfeV;-c5Jwb?gjq zXRN5-GGoe~{&{afCy!K zGTbx7ecR-LAg?SN$?2M;N-cFH^ofhd6#Cc416`L}UhOO7S4jzR*7~uSwd{d0=F22- zNFSq0(J(_XQR5&1FI1K`_HDm4m+#Hm-RwH%+XySC3&MNVUyJf)HhBbYXB%mgguUcB z;&PtLyj(YUoAc<8w9?|XT-v7ox--)p(1Do6q=VD?J<_PSbWegh@=0G!%~;blM~e#y zH}O=7=q2W!szQkElhrXRd@^#0zmeuT#Oq+OtI~_4H@<%5LO&Q?Nw@-2Cv`~U z)sD#E?4sH>t5n~%jfLkuosuNHylU+C^iN(OO8+Lw5K80or+rSHrUKU4c zg}dqyCqxBCoN77y2qn(G0d>0JC0*|_k^0MX>!P<&lhN|-2x6cWSxV>F%Ir$L#( z^L@M8b&(#CX55g9i=9lx+f|ashqS7obnUVgW=!K($@F(+-~vrMTr5h`3C-D=GaDaE z`Pz|@s$Sdhw4jAZPR#xOvBRjXHtHmz$EXM6`R&H$a5UA?U(!1S#pv?PAz1Sp+eTso zP18mTCx-gfBgctW_A@nQI&*VH$npLS-dpq3J;F`QI`S2@cIxQF`#&=ldUi}GaN2qQ z1B3l#OhK-RrJa>exxPofJ-1ytWaPOWm+2)_Lkrz2Ey~%u z4Q@+AJlpJ3^WlFJ><_prUpRBt@9~DmID3pMn%@i?LgMs7lM;aS4n*yx?eA+ECM~-! zLf^a}kkfN_SMCd!-31qDj#ojS5srTHp2YU#oeiPJ`t=etTVt`>MSIDV(Vw7+hWyI< z`jt{HIgvh%hMQ;?C`1663~5d;Zk(%RePUayZw*G?S(4U3(MLmyU1an=NExl0njB+j zWuAmT5`DOd>n;DR<#8-(i*ugVP1>QM!iw`S^Up#{n5`Kgt1-+3lgGvOv)s4amB#eE z3ai~EWy`!wMRq+2s}H}Io8B{FI=UQxmpVrPYi4sZRAY2k8)A~lF!x5Omhis>S^KpxY-mh*-rt50ud@G6i8ZhY=9L*Er!TC0?!Wb zFH5^`UYuU%dOFX)wEwNq?#|F7zwH=|dM|@(Be;h~A_=fYD8`o-BZ*{y6n9GYK<#8I(dKu&XaNwma$X>tqu|0YfKDw1I8!z3b?Y&YzbIJ2!Y_czRB#jOBqsQFB|| z2;sm`b|hLoBN}FE<`=7?zzF0;(Lw3g^V*Uszh+Ze%2|70dtkIf|43wHX2al~sGr$W z`=c#^tq8Lx8e= z7v=P}u$QEGwXUZe{EN7VWwG?2grbqzUDSvi->1v_d};dmuaEa$dS#V_tZMHs4VNH3 zod1H5n)=^0b7hPuS=l$uftSzO)33iQY6E;v`d&$n((l;n=>;cTrJuUKu zYkyH0#Yi1>YIt30vN_S`3y z_Z-SaT0|ZhL7mhJ9AtLY$%yzsckG-AVH3UDyYqUCRJk0Q)cNv9GuyZIcw!P8Lwr;s z3VMHo$j#38427!t?mG+llKpiP2RHIQ-qA`pIMuU{`Q_&u)jg+nS2MS?u-PUNv_L_w z>nhBXI~>|r4>MmLh=>ykSv4tfXx=-HzeR?%tjRaC7aPlaOIut=%%y>%XrU+KKOQ5a z@xGhOnS(WGE^ELSF>63Nvsk(b0AD9m*XHTjN%(3EH}B;RD?(c(Y{hCn4yKrjzn0AC$ab2e0$Awebfu4mH5Ikr(HWGQ8fB=+5!5=V&SF1^ zT9sY48i}H`wx2lF?aRDD<)4bDghtd}`7TGz;oHnwteZZ4QeEI3?l=L(R@DSxjSrulEob#(Pbf6QMME)f6iU+tv0GmT@93Au{ z%^153?v<1Bt}zR}<)S1Xhgr!;?4RRlX@TN1d|YKbg6s*stw|$B)KEf5Cv&At1O z^SRAO(TWSopa%>2=8wX1mZItA*yrpVcgQ-?%XaJij{nU@+kq{jh2$h0!?5_Au zvNhzr@yifRih_NTJ_3WRsPL~)!pTtE_&(#qlrZ{;+(m-`Ww&Yh#`(L3cKKH+}3 zN$V#bRtBWd5Spux?e+>mSo3TDVPw)eN+I4fq$`=m zfK!B0L)RK1BqxU>nPu~lvBjLfZ1L&GYPq{toz4&4*dYkt8E}@!L6yl&0Gf_Tr{RBO zvZN5sst+}lO$k_M)|S@CkK^W-Zm4Ejzt?vhU$x14RRe5u;@oE~+Paj`Zn)jvx45^- z`eMZd$L(t+jSNFeot-K~X9?b~gM_bp>K!Tuhsgiu+`36p5IxI|tfZwf=yfAF?#ASUAkJPhsnIgT>XnUXQzHBEEkYNrs(H zjksks-qGF;tXw=EXR3TrCKH^yPC4{cs%ysZumZo-mz^g_rNRSxkG4W@#k`-{>rxheN~NGsT_uD4WwA&?v*!m)vb@?jxJZ4O+O$o{{smz90LFV literal 0 HcmV?d00001 diff --git a/core/tasks/__init__.py b/core/tasks/__init__.py new file mode 100644 index 0000000..e9d68fb --- /dev/null +++ b/core/tasks/__init__.py @@ -0,0 +1,4 @@ +# Celery tasks module +from .notifications import * # noqa +from .event_cleanup import * # noqa +from .monitoring import * # noqa diff --git a/core/tasks/event_cleanup.py b/core/tasks/event_cleanup.py new file mode 100644 index 0000000..57b99ee --- /dev/null +++ b/core/tasks/event_cleanup.py @@ -0,0 +1,77 @@ +""" +Celery tasks for event management and cleanup. +""" +from datetime import timedelta +from django.utils import timezone +from celery import shared_task + +from core.models.events import Event +from core.services.events import MISSION_CRITICAL_EVENTS + + +@shared_task +def cleanup_old_events(): + """ + Periodic task to clean up old non-critical events. + + Retention policy: + - Mission-critical events: Kept forever (audit trail, compliance) + - Non-critical events: Deleted after 90 days + + This task should be scheduled to run daily or weekly via Celery Beat. + """ + # Calculate cutoff date (90 days ago) + cutoff_date = timezone.now() - timedelta(days=90) + + # Delete non-critical events older than 90 days + deleted_count, _ = Event.objects.filter( + created_at__lt=cutoff_date + ).exclude( + event_type__in=MISSION_CRITICAL_EVENTS + ).delete() + + return { + 'deleted_count': deleted_count, + 'cutoff_date': cutoff_date.isoformat(), + 'retention_days': 90 + } + + +@shared_task +def get_event_statistics(): + """ + Generate statistics about event storage and distribution. + Useful for monitoring and capacity planning. + """ + from django.db.models import Count + + total_events = Event.objects.count() + + # Count by event type + events_by_type = Event.objects.values('event_type').annotate( + count=Count('event_type') + ).order_by('-count')[:20] # Top 20 event types + + # Count mission-critical vs non-critical + critical_count = Event.objects.filter( + event_type__in=MISSION_CRITICAL_EVENTS + ).count() + non_critical_count = total_events - critical_count + + # Events in last 24 hours + last_24h = timezone.now() - timedelta(hours=24) + recent_count = Event.objects.filter(created_at__gte=last_24h).count() + + # Events in last 7 days + last_7d = timezone.now() - timedelta(days=7) + weekly_count = Event.objects.filter(created_at__gte=last_7d).count() + + return { + 'total_events': total_events, + 'critical_events': critical_count, + 'non_critical_events': non_critical_count, + 'events_last_24h': recent_count, + 'events_last_7d': weekly_count, + 'top_event_types': list(events_by_type), + 'mission_critical_types_count': len(MISSION_CRITICAL_EVENTS), + } diff --git a/core/tasks/monitoring.py b/core/tasks/monitoring.py new file mode 100644 index 0000000..94e0ed1 --- /dev/null +++ b/core/tasks/monitoring.py @@ -0,0 +1,72 @@ +""" +Celery tasks for monitoring command execution. +""" +import logging + +from celery import shared_task + +from core.services.events import EventPublisher +from core.services.monitoring.registry import MonitoringCommandRegistry + +logger = logging.getLogger(__name__) + + +@shared_task(bind=True, max_retries=3, default_retry_delay=300) +def run_monitoring_command(self, command_name: str, **kwargs): + """ + Execute a monitoring command and publish its results as an audit event. + + Email delivery happens inside the command's execute() method. + Event publishing creates an audit trail of command executions. + + Args: + command_name: Name of the registered monitoring command + **kwargs: Arguments passed to the command + + Returns: + Dict with execution summary + """ + try: + # Execute the command (email delivery happens here) + result = MonitoringCommandRegistry.execute(command_name, **kwargs) + + # Publish event for audit trail + EventPublisher.publish_sync( + event_type=result.event_type, + entity_type="MonitoringCommand", + entity_id=result.entity_id, + triggered_by=None, # System-generated + metadata={ + 'command_name': result.command_name, + 'execution_date': str(result.execution_date), + 'emails_sent': result.emails_sent, + 'summary': result.summary, + } + ) + + logger.info( + f"Monitoring command '{command_name}' executed successfully", + extra={ + 'command_name': command_name, + 'summary': result.summary, + } + ) + + return { + 'status': 'success', + 'command': command_name, + 'summary': result.summary, + } + + except ValueError as e: + # Command not found + logger.error(f"Monitoring command not found: {command_name}") + return { + 'status': 'error', + 'command': command_name, + 'error': str(e), + } + + except Exception as exc: + logger.exception(f"Monitoring command '{command_name}' failed") + raise self.retry(exc=exc) diff --git a/core/tasks/notifications.py b/core/tasks/notifications.py new file mode 100644 index 0000000..470fca0 --- /dev/null +++ b/core/tasks/notifications.py @@ -0,0 +1,267 @@ +""" +Celery tasks for notification processing and delivery. +""" +from celery import shared_task +from django.utils import timezone + +from core.models.events import Event, NotificationDelivery +from core.models.enums import NotificationChannelChoices, DeliveryStatusChoices +from core.services.notifications import NotificationProcessor + + +@shared_task(bind=True, max_retries=3, default_retry_delay=60) +def process_event_notifications(self, event_id: str): + """ + Process an event and generate notifications based on rules. + + Args: + event_id: UUID of the Event to process + + This task is triggered automatically when an event is published. + """ + try: + event = Event.objects.get(pk=event_id) + notifications = NotificationProcessor.process_event(event) + + return { + 'event_id': str(event_id), + 'notifications_created': len(notifications), + 'notification_ids': [str(n.id) for n in notifications] + } + + except Event.DoesNotExist: + # Event was deleted, nothing to do + return {'error': f'Event {event_id} not found'} + + except Exception as exc: + # Retry on failure + raise self.retry(exc=exc) + + +@shared_task(bind=True, max_retries=5, default_retry_delay=300) +def deliver_notification(self, delivery_id: str): + """ + Deliver a notification via its specified channel. + + Args: + delivery_id: UUID of the NotificationDelivery to process + + This task is queued automatically when a NotificationDelivery is created. + Handles delivery via IN_APP, EMAIL, or SMS channels. + """ + try: + delivery = NotificationDelivery.objects.select_related('notification').get(pk=delivery_id) + + # Update status to sending + delivery.status = DeliveryStatusChoices.SENDING + delivery.attempts += 1 + delivery.last_attempt_at = timezone.now() + delivery.save(update_fields=['status', 'attempts', 'last_attempt_at', 'updated_at']) + + # Route to appropriate delivery handler + if delivery.channel == NotificationChannelChoices.IN_APP: + result = _deliver_in_app(delivery) + elif delivery.channel == NotificationChannelChoices.EMAIL: + result = _deliver_email(delivery) + elif delivery.channel == NotificationChannelChoices.SMS: + result = _deliver_sms(delivery) + else: + raise ValueError(f"Unknown delivery channel: {delivery.channel}") + + # Update delivery status on success + delivery.status = DeliveryStatusChoices.SENT + delivery.sent_at = timezone.now() + delivery.metadata.update(result) + delivery.save(update_fields=['status', 'sent_at', 'metadata', 'updated_at']) + + # Update notification status if all deliveries are sent + _update_notification_status(delivery.notification) + + return { + 'delivery_id': str(delivery_id), + 'channel': delivery.channel, + 'status': 'sent', + 'result': result + } + + except NotificationDelivery.DoesNotExist: + return {'error': f'NotificationDelivery {delivery_id} not found'} + + except Exception as exc: + # Update delivery with error + try: + delivery.status = DeliveryStatusChoices.FAILED + delivery.error_message = str(exc) + delivery.save(update_fields=['status', 'error_message', 'updated_at']) + except: + pass + + # Retry with exponential backoff + if self.request.retries < self.max_retries: + raise self.retry(exc=exc, countdown=300 * (2 ** self.request.retries)) + else: + # Max retries reached, mark as failed + return { + 'delivery_id': str(delivery_id), + 'status': 'failed', + 'error': str(exc), + 'retries': self.request.retries + } + + +def _deliver_in_app(delivery: NotificationDelivery) -> dict: + """ + Deliver in-app notification. + For in-app, the notification is already in the database, so just mark as delivered. + + Args: + delivery: NotificationDelivery instance + + Returns: + Result dictionary + """ + # In-app notifications are already stored in DB + # Just need to mark as delivered + delivery.delivered_at = timezone.now() + delivery.status = DeliveryStatusChoices.DELIVERED + delivery.save(update_fields=['delivered_at', 'status', 'updated_at']) + + return { + 'channel': 'in_app', + 'delivered_at': delivery.delivered_at.isoformat() + } + + +def _deliver_email(delivery: NotificationDelivery) -> dict: + """ + Deliver email notification via the Emailer microservice. + + Renders the notification as a branded HTML email using Django templates. + + Args: + delivery: NotificationDelivery instance + + Returns: + Result dictionary with email details + + Raises: + ValueError: If recipient has no email address + EmailerServiceError: If the emailer service fails + """ + from django.conf import settings + from core.services.email_service import get_emailer_client + from core.services.email_renderer import NotificationEmailRenderer + + notification = delivery.notification + recipient = notification.recipient + + # Get recipient email + if hasattr(recipient, 'email') and recipient.email: + recipient_email = recipient.email + else: + raise ValueError(f"Recipient {recipient} has no email address") + + # Get recipient name + recipient_name = 'there' # Default fallback + if hasattr(recipient, 'first_name') and recipient.first_name: + recipient_name = recipient.first_name + if hasattr(recipient, 'last_name') and recipient.last_name: + recipient_name = f"{recipient.first_name} {recipient.last_name}" + + # Render branded HTML email + html_body = NotificationEmailRenderer.render_html( + notification=notification, + recipient_name=recipient_name, + recipient_email=recipient_email + ) + + # Send email via emailer microservice (HTML body) + emailer = get_emailer_client() + result = emailer.send_email( + to=[recipient_email], + subject=notification.subject, + body=html_body, + impersonate_user=settings.EMAILER_DEFAULT_SENDER + ) + + return { + 'channel': 'email', + 'recipient': recipient_email, + 'subject': notification.subject, + 'email_id': result.get('id'), + 'thread_id': result.get('threadId') + } + + +def _deliver_sms(delivery: NotificationDelivery) -> dict: + """ + Deliver SMS notification via Twilio. + + Args: + delivery: NotificationDelivery instance + + Returns: + Result dictionary with SMS details + """ + from django.conf import settings + + notification = delivery.notification + recipient = notification.recipient + + # Get recipient phone number + if hasattr(recipient, 'phone') and recipient.phone: + recipient_phone = recipient.phone + else: + raise ValueError(f"Recipient {recipient} has no phone number") + + # Check Twilio configuration + if not hasattr(settings, 'TWILIO_ACCOUNT_SID') or not hasattr(settings, 'TWILIO_AUTH_TOKEN'): + raise ValueError("Twilio credentials not configured in settings") + + # Import Twilio client + try: + from twilio.rest import Client + except ImportError: + raise ImportError("Twilio package not installed. Run: pip install twilio") + + # Initialize Twilio client + client = Client(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN) + + # Send SMS + message = client.messages.create( + body=f"{notification.subject}\n\n{notification.body}", + from_=settings.TWILIO_PHONE_NUMBER, + to=recipient_phone + ) + + # Store external message ID + delivery.external_id = message.sid + delivery.save(update_fields=['external_id', 'updated_at']) + + return { + 'channel': 'sms', + 'recipient': recipient_phone, + 'message_sid': message.sid, + 'status': message.status + } + + +def _update_notification_status(notification): + """ + Update notification status based on delivery statuses. + + Args: + notification: Notification instance + """ + from core.models.enums import NotificationStatusChoices + + deliveries = notification.deliveries.all() + + # If all deliveries are sent or delivered, mark notification as sent + if all(d.status in [DeliveryStatusChoices.SENT, DeliveryStatusChoices.DELIVERED] for d in deliveries): + notification.status = NotificationStatusChoices.SENT + notification.save(update_fields=['status', 'updated_at']) + # If any delivery failed after max retries, mark notification as failed + elif any(d.status == DeliveryStatusChoices.FAILED for d in deliveries): + notification.status = NotificationStatusChoices.FAILED + notification.save(update_fields=['status', 'updated_at']) diff --git a/core/templates/email/base_notification.html b/core/templates/email/base_notification.html new file mode 100644 index 0000000..22ec436 --- /dev/null +++ b/core/templates/email/base_notification.html @@ -0,0 +1,167 @@ + + + + + + + {{ subject }} + + + + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + +
+ + + + + +
+ + + Nexus +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + {% if metadata_items %} + + + + {% endif %} + + + {% if action_url %} + + + + {% else %} + + + + {% endif %} + +
+ + + + +
+ {{ event_type_label }} +
+
+

{{ subject }}

+
+

Hi {{ recipient_name }},

+
+

{{ body }}

+
+ + {% for item in metadata_items %} + + + + + {% endfor %} +
+ {{ item.label }} + + {{ item.value }} +
+
+ + + + +
+ View Details +
+
+
+ + + + + + + + + + +
+

+ Visit Nexus +  •  + Contact Us +

+
+

+ © {{ current_year }} Corellon Digital. All rights reserved. +

+
+

+ This email was sent to {{ recipient_email }} because email notifications are enabled for {{ event_type_label }} events.
+ To change your notification preferences, please contact an administrator. +

+
+
+
+ + diff --git a/core/templates/email/incomplete_work_reminder.html b/core/templates/email/incomplete_work_reminder.html new file mode 100644 index 0000000..500e900 --- /dev/null +++ b/core/templates/email/incomplete_work_reminder.html @@ -0,0 +1,249 @@ + + + + + + + Incomplete Work Reminder - {{ date_formatted }} + + + + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + +
+ + + + + +
+ + + Nexus +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + {% if services %} + + + + {% endif %} + + + {% if projects %} + + + + {% endif %} + + + + + + + + + + + +
+ + + + +
+ Action Required +
+
+

+ Incomplete Work Reminder

+
+

+ Hi {{ recipient_name }},

+
+

+ {% if has_weekend_services %} + The following work items are still incomplete and may need your attention. This includes weekend services from Friday that were not completed. + {% else %} + The following work items from {{ date_formatted }} are still + incomplete and may need your attention. + {% endif %} +

+
+ + + + +
+ Services ({{ services|length }}) +
+ + {% for service in services %} + + + + {% endfor %} +
+ + + + + + + + +
+ {{ service.account_name }} + {% if service.is_weekend_service %} + WEEKEND + {% endif %} + + {{ service.status_display }} +
+ {{ service.address }} +
+
+
+ + + + +
+ Projects ({{ projects|length }}) +
+ + {% for project in projects %} + + + + {% endfor %} +
+ + + + + + + + +
+ {{ project.name }} + + {{ project.status_display }} +
+ {{ project.affiliation_name }}{% if project.address %} • {{ project.address }}{% endif %} +
+
+
+

+ Please visit your dashboard to update the status of these items or contact your team lead if assistance + is needed. +

+
+ + + + +
+ Open + Dashboard +
+
+
+ + + + + + + + + + +
+

+ Visit Nexus +  •  + Contact Us +

+
+

+ © {{ current_year }} Corellon Digital. All rights reserved. +

+
+

+ This is an automated daily reminder sent to {{ recipient_email }}.
+ You received this because you have incomplete work items assigned to you. +

+
+
+
+ + diff --git a/core/templates/email/nightly_assignments.html b/core/templates/email/nightly_assignments.html new file mode 100644 index 0000000..41b3333 --- /dev/null +++ b/core/templates/email/nightly_assignments.html @@ -0,0 +1,248 @@ + + + + + + + Tonight's Assignments - {{ date_formatted }} + + + + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + +
+ + + + + +
+ + + Nexus +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + {% if services %} + + + + {% endif %} + + + {% if projects %} + + + + {% endif %} + + + + + + + + + + + +
+ + + + +
+ Tonight's Schedule +
+
+

+ Your Assignments for Tonight

+
+

+ Hi {{ recipient_name }},

+
+

+ {% if has_weekend_services %} + Here's your scheduled work for {{ date_formatted }}, including weekend services that can be completed through Sunday. + {% else %} + Here's your scheduled work for {{ date_formatted }}. + {% endif %} +

+
+ + + + +
+ Services ({{ services|length }}) +
+ + {% for service in services %} + + + + {% endfor %} +
+ + + + + + + +
+ {{ service.account_name }} + {% if service.is_weekend_service %} + WEEKEND + {% endif %} +
+ {{ service.address }} +
+
+
+ + + + +
+ Projects ({{ projects|length }}) +
+ + {% for project in projects %} + + + + {% endfor %} +
+ + + + + + + +
+ {{ project.name }} +
+ {{ project.affiliation_name }}{% if project.address %} • {{ project.address }}{% endif %} +
+
+
+

+ {% if has_weekend_services %} + Regular services are due by 8:00am tomorrow morning. Weekend services are due by Monday morning. + {% else %} + All of your completed work is due by 8:00am tomorrow morning. + {% endif %} +

+

+ Please visit your dashboard to get started. +

+
+ + + + +
+ View + Dashboard +
+
+
+ + + + + + + + + + +
+

+ Visit Nexus +  •  + Contact Us +

+
+

+ © {{ current_year }} Corellon Digital. All rights reserved. +

+
+

+ This is an automated notification sent to {{ recipient_email }}.
+ You received this because you have work scheduled for tonight. +

+
+
+
+ + diff --git a/core/views.py b/core/views.py new file mode 100644 index 0000000..0fc52d3 --- /dev/null +++ b/core/views.py @@ -0,0 +1,482 @@ +from __future__ import annotations +import base64 +import binascii +import io +import os +import mimetypes +from typing import Optional, Any +from PIL import Image as PilImage +from django.conf import settings +from django.core.files.base import ContentFile +from django.core.exceptions import ValidationError +from django.db import transaction +from rest_framework.request import Request +from rest_framework.decorators import api_view, authentication_classes, permission_classes, parser_classes +from rest_framework.parsers import MultiPartParser, FormParser +from rest_framework.response import Response +from rest_framework import status +from core.models.session import ServiceSession, ProjectSession +from core.models.session_image import ServiceSessionImage, ProjectSessionImage +from core.models.session_video import ServiceSessionVideo, ProjectSessionVideo +from core.permissions import IsProfileAuthenticated +from core.services.video import verify_video_bytes, extract_video_metadata, generate_video_thumbnail + + +def _verify_image_bytes_or_400(data: bytes) -> Optional[Response]: + try: + PilImage.open(io.BytesIO(data)).verify() + return None + except (PilImage.UnidentifiedImageError, OSError, ValueError): + return Response({"detail": "Uploaded file is not a valid image."}, status=status.HTTP_400_BAD_REQUEST) + + +def _normalize_image_to_jpeg(data: bytes, filename: str, content_type: str) -> tuple[bytes, str, str]: + """ + Convert uploaded image to JPEG if it's in HEIC format. + Returns (normalized_bytes, normalized_filename, normalized_content_type). + + HEIC files from iOS devices are converted to JPEG for compatibility and storage normalization. + Other image formats are passed through unchanged. + """ + # Check if file is HEIC by extension or content type + is_heic = ( + filename.lower().endswith(('.heic', '.heif')) or + content_type in ('image/heic', 'image/heif') + ) + + if is_heic: + # Convert HEIC to JPEG + try: + img = PilImage.open(io.BytesIO(data)) + + # Convert to RGB if needed (HEIC can have alpha channel) + if img.mode in ('RGBA', 'LA', 'P'): + # Create white background for transparent images + background = PilImage.new('RGB', img.size, (255, 255, 255)) + if img.mode == 'P': + img = img.convert('RGBA') + background.paste(img, mask=img.split()[-1] if img.mode in ('RGBA', 'LA') else None) + img = background + elif img.mode != 'RGB': + img = img.convert('RGB') + + # Save as JPEG + output = io.BytesIO() + img.save(output, format='JPEG', quality=90, optimize=True) + jpeg_data = output.getvalue() + + # Update filename and content type + new_filename = filename.rsplit('.', 1)[0] + '.jpg' + new_content_type = 'image/jpeg' + + return jpeg_data, new_filename, new_content_type + except Exception as e: + raise ValidationError(f"Failed to convert HEIC image to JPEG: {str(e)}") + + # Not HEIC, return unchanged + return data, filename, content_type + + +def decode_global_id(gid: Optional[str]) -> Optional[str]: + """ + Decode a Relay Global ID ("Type:uuid") or return the input if it's already a raw ID. + """ + if gid is None: + return None + try: + decoded = base64.b64decode(gid).decode("utf-8") + if ":" in decoded: + return decoded.split(":", 1)[1] + except (binascii.Error, UnicodeDecodeError): + pass + return gid + + +def _save_image_for_session( + *, + request: Request, + sess: ServiceSession | ProjectSession, + image_model: type[ServiceSessionImage] | type[ProjectSessionImage], + session_field_name: str, + file_obj, + title: str, + notes: str = "", +) -> Response: + """ + Persist an uploaded image for the given session and return a JSON response. + Assumes file_obj has already been validated and read to bytes. + """ + data = file_obj.read() + if not data: + return Response({"detail": "Empty file upload."}, status=status.HTTP_400_BAD_REQUEST) + + bad = _verify_image_bytes_or_400(data) + if bad: + return bad + + content_type = getattr(file_obj, "content_type", "") or "" + filename = getattr(file_obj, "name", "upload.jpg") + + # Normalize HEIC images to JPEG for storage compatibility + try: + data, filename, content_type = _normalize_image_to_jpeg(data, filename, content_type) + except ValidationError as e: + return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST) + + with transaction.atomic(): + payload = { + "title": title, + "notes": notes, + session_field_name: sess, + "uploaded_by_team_profile": request.profile, + "content_type": content_type, + } + img = image_model(**payload) + img.image.save(filename, ContentFile(data), save=True) + + i: Any = img + session_key = f"{session_field_name}Id" + return Response( + { + "id": str(i.id), + "title": i.title, + "notes": i.notes, + session_key: str(getattr(sess, "id", "")), + "contentType": i.content_type, + "width": i.width, + "height": i.height, + "image": getattr(i.image, "url", None), + "thumbnail": getattr(i.thumbnail, "url", None), + "createdAt": i.created_at.isoformat(), + "uploadedByTeamProfileId": str(i.uploaded_by_team_profile.id) if i.uploaded_by_team_profile else None, + }, + status=status.HTTP_201_CREATED, + ) + + +@api_view(["POST"]) +@authentication_classes([]) +@permission_classes([IsProfileAuthenticated]) +@parser_classes([MultiPartParser, FormParser]) +def upload_service_session_image(request: Request) -> Response: + """ + POST multipart/form-data: + - file: image file + - sessionId: Relay Global ID or raw UUID of ServiceSession + - title: optional string + """ + file_obj = request.FILES.get("file") + session_id = request.data.get("sessionId") + title = request.data.get("title") or "" + notes = request.data.get("notes") or "" + + if not file_obj: + return Response({"detail": "No file provided."}, status=status.HTTP_400_BAD_REQUEST) + if not session_id: + return Response({"detail": "sessionId is required."}, status=status.HTTP_400_BAD_REQUEST) + + sess_pk = decode_global_id(session_id) + if not sess_pk: + return Response({"detail": "Invalid sessionId."}, status=status.HTTP_400_BAD_REQUEST) + + try: + sess = ServiceSession.objects.get(pk=sess_pk) + except ServiceSession.DoesNotExist: + return Response({"detail": "Session not found."}, status=status.HTTP_404_NOT_FOUND) + + return _save_image_for_session( + request=request, + sess=sess, + image_model=ServiceSessionImage, + session_field_name="service_session", + file_obj=file_obj, + title=title, + notes=notes, + ) + + +@api_view(["POST"]) +@authentication_classes([]) +@permission_classes([IsProfileAuthenticated]) +@parser_classes([MultiPartParser, FormParser]) +def upload_project_session_image(request: Request) -> Response: + """ + POST multipart/form-data: + - file: image file + - sessionId: Relay Global ID or raw UUID of ProjectSession + - title: optional string + """ + file_obj = request.FILES.get("file") + session_id = request.data.get("sessionId") + title = request.data.get("title") or "" + notes = request.data.get("notes") or "" + + if not file_obj: + return Response({"detail": "No file provided."}, status=status.HTTP_400_BAD_REQUEST) + if not session_id: + return Response({"detail": "sessionId is required."}, status=status.HTTP_400_BAD_REQUEST) + + sess_pk = decode_global_id(session_id) + if not sess_pk: + return Response({"detail": "Invalid sessionId."}, status=status.HTTP_400_BAD_REQUEST) + + try: + sess = ProjectSession.objects.get(pk=sess_pk) + except ProjectSession.DoesNotExist: + return Response({"detail": "Session not found."}, status=status.HTTP_404_NOT_FOUND) + + return _save_image_for_session( + request=request, + sess=sess, + image_model=ProjectSessionImage, + session_field_name="project_session", + file_obj=file_obj, + title=title, + notes=notes, + ) + +def _save_video_for_session( + *, + request: Request, + sess: ServiceSession | ProjectSession, + video_model: type[ServiceSessionVideo] | type[ProjectSessionVideo], + session_field_name: str, + file_obj, + title: str, + notes: str = "", +) -> Response: + """ + Persist an uploaded video for the given session and return a JSON response. + Validates video, extracts metadata, generates thumbnail, and saves to the database. + + Note: Video processing (ffmpeg) requires local file paths, so we write to temp files + before saving to S3 storage. This works for both local and S3 storage backends. + """ + import tempfile + from django.core.files import File + + data = file_obj.read() + if not data: + return Response({"detail": "Empty file upload."}, status=status.HTTP_400_BAD_REQUEST) + + filename = getattr(file_obj, "name", "upload.mp4") + + # Validate video file and get content type + try: + content_type = verify_video_bytes(data, filename) + except ValidationError as e: + return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST) + + # Write video to temp file for ffmpeg processing (required for S3 storage) + video_ext = os.path.splitext(filename)[1] or '.mp4' + video_fd, video_tmp_path = tempfile.mkstemp(suffix=video_ext) + thumb_fd, thumb_tmp_path = tempfile.mkstemp(suffix='.jpg') + + try: + # Write video bytes to temp file + os.write(video_fd, data) + os.close(video_fd) + os.close(thumb_fd) + + # Extract metadata from temp file (before saving to S3) + metadata = extract_video_metadata(video_tmp_path) + + # Generate thumbnail from temp file + thumbnail_generated = generate_video_thumbnail(video_tmp_path, thumb_tmp_path, timestamp=1.0) + + with transaction.atomic(): + payload = { + "title": title, + "notes": notes, + session_field_name: sess, + "uploaded_by_team_profile": request.profile, + "content_type": content_type, + } + vid = video_model(**payload) + + # Set metadata before saving + if metadata: + vid.width, vid.height, vid.duration_seconds = metadata + + # Save video to storage (S3 or local) + vid.video.save(filename, ContentFile(data), save=True) + + # Save thumbnail if generated + if thumbnail_generated and os.path.exists(thumb_tmp_path): + with open(thumb_tmp_path, 'rb') as thumb_file: + vid.thumbnail.save( + f'thumb_{vid.id}.jpg', + File(thumb_file), + save=False + ) + + vid.save() + + finally: + # Clean up temp files + if os.path.exists(video_tmp_path): + os.unlink(video_tmp_path) + if os.path.exists(thumb_tmp_path): + os.unlink(thumb_tmp_path) + + v: Any = vid + session_key = f"{session_field_name}Id" + return Response( + { + "id": str(v.id), + "title": v.title, + "notes": v.notes, + session_key: str(getattr(sess, "id", "")), + "contentType": v.content_type, + "width": v.width, + "height": v.height, + "durationSeconds": v.duration_seconds, + "fileSizeBytes": v.file_size_bytes, + "video": getattr(v.video, "url", None), + "thumbnail": getattr(v.thumbnail, "url", None), + "createdAt": v.created_at.isoformat(), + "uploadedByTeamProfileId": str(v.uploaded_by_team_profile.id) if v.uploaded_by_team_profile else None, + }, + status=status.HTTP_201_CREATED, + ) + + +@api_view(["POST"]) +@authentication_classes([]) +@permission_classes([IsProfileAuthenticated]) +@parser_classes([MultiPartParser, FormParser]) +def upload_service_session_video(request: Request) -> Response: + """ + POST multipart/form-data: + - file: video file + - sessionId: Relay Global ID or raw UUID of ServiceSession + - title: optional string + - notes: optional string + """ + file_obj = request.FILES.get("file") + session_id = request.data.get("sessionId") + title = request.data.get("title") or "" + notes = request.data.get("notes") or "" + + if not file_obj: + return Response({"detail": "No file provided."}, status=status.HTTP_400_BAD_REQUEST) + if not session_id: + return Response({"detail": "sessionId is required."}, status=status.HTTP_400_BAD_REQUEST) + + sess_pk = decode_global_id(session_id) + if not sess_pk: + return Response({"detail": "Invalid sessionId."}, status=status.HTTP_400_BAD_REQUEST) + + try: + sess = ServiceSession.objects.get(pk=sess_pk) + except ServiceSession.DoesNotExist: + return Response({"detail": "Session not found."}, status=status.HTTP_404_NOT_FOUND) + + return _save_video_for_session( + request=request, + sess=sess, + video_model=ServiceSessionVideo, + session_field_name="service_session", + file_obj=file_obj, + title=title, + notes=notes, + ) + + +@api_view(["POST"]) +@authentication_classes([]) +@permission_classes([IsProfileAuthenticated]) +@parser_classes([MultiPartParser, FormParser]) +def upload_project_session_video(request: Request) -> Response: + """ + POST multipart/form-data: + - file: video file + - sessionId: Relay Global ID or raw UUID of ProjectSession + - title: optional string + - notes: optional string + """ + file_obj = request.FILES.get("file") + session_id = request.data.get("sessionId") + title = request.data.get("title") or "" + notes = request.data.get("notes") or "" + + if not file_obj: + return Response({"detail": "No file provided."}, status=status.HTTP_400_BAD_REQUEST) + if not session_id: + return Response({"detail": "sessionId is required."}, status=status.HTTP_400_BAD_REQUEST) + + sess_pk = decode_global_id(session_id) + if not sess_pk: + return Response({"detail": "Invalid sessionId."}, status=status.HTTP_400_BAD_REQUEST) + + try: + sess = ProjectSession.objects.get(pk=sess_pk) + except ProjectSession.DoesNotExist: + return Response({"detail": "Session not found."}, status=status.HTTP_404_NOT_FOUND) + + return _save_video_for_session( + request=request, + sess=sess, + video_model=ProjectSessionVideo, + session_field_name="project_session", + file_obj=file_obj, + title=title, + notes=notes, + ) + + +@api_view(["GET"]) +@permission_classes([IsProfileAuthenticated]) +def serve_protected_media(request, path: str): + """ + DEPRECATED: Legacy auth-gated media serving for filesystem storage. + Kept for backwards compatibility during S3 migration. + + With S3 storage, nginx uses auth_request to media_auth_check() instead. + """ + from django.http import HttpResponse, Http404 + + # Normalize and prevent path traversal + media_root = os.path.abspath(str(settings.MEDIA_ROOT)) + requested_path = os.path.abspath(os.path.join(media_root, path)) + + if not requested_path.startswith(media_root) or not os.path.isfile(requested_path): + raise Http404("Media file not found") + + # Guess content-type (fallback to octet-stream) + content_type, _ = mimetypes.guess_type(requested_path) + content_type = content_type or "application/octet-stream" + + # Construct the internal path for nginx + internal_prefix = "/media-internal/" # must match nginx internal location + internal_path = internal_prefix + path + + # Use Django's HttpResponse instead of DRF's Response + # This respects the ConditionalCorsMiddleware and avoids duplicate CORS headers + resp = HttpResponse(status=200) + resp["Content-Type"] = content_type + resp["X-Accel-Redirect"] = internal_path + # Optionally set caching headers or Content-Disposition + return resp + + +@api_view(["GET", "HEAD"]) +@permission_classes([IsProfileAuthenticated]) +def media_auth_check(request, path: str = ""): + """ + Lightweight auth check endpoint for nginx auth_request. + + Nginx calls this before proxying to S3. If the user is authenticated + (via Oathkeeper session cookie), returns 204 to allow access. + The IsProfileAuthenticated permission class handles the actual auth check + and will return 401/403 if the user is not authenticated. + + Args: + path: The media path being requested (for logging/auditing) + + Returns: + 204 No Content if authenticated (nginx proceeds to S3) + 401/403 if not authenticated (handled by permission class) + """ + from django.http import HttpResponse + return HttpResponse(status=204) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..03112f2 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,58 @@ +services: + app-setup: + build: . + command: ["/app/setup.sh"] + env_file: .env + network_mode: host + + web: + build: . + command: ["poetry", "run", "daphne", "-b", "0.0.0.0", "-p", "8000", "config.asgi:application"] + restart: unless-stopped + env_file: .env + depends_on: + app-setup: + condition: service_completed_successfully + # Media storage is now S3 (Garage) - no local volume mount needed + networks: + - internal + + celery: + build: . + command: ["poetry", "run", "celery", "-A", "config", "worker", "-l", "info"] + restart: unless-stopped + env_file: .env + depends_on: + app-setup: + condition: service_completed_successfully + # Media storage is now S3 (Garage) - no local volume mount needed + networks: + - internal + + celery-beat: + build: . + command: ["poetry", "run", "celery", "-A", "config", "beat", "-l", "info"] + restart: unless-stopped + env_file: .env + depends_on: + app-setup: + condition: service_completed_successfully + networks: + - internal + + nginx: + image: nginx:1.25-alpine + restart: unless-stopped + depends_on: + - web + ports: + - "5500:5500" + volumes: + # Media serving is now via S3 proxy - no local volume mount needed + - ./nginx.conf:/etc/nginx/conf.d/default.conf:ro + networks: + - internal + +networks: + internal: + driver: bridge \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..61d11ed --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -e + +# 1. Bridge env vars to files for the agent +# (The agent is configured to read from these paths) +echo "Writing AppRole creds to files..." +echo -n "$VAULT_ROLE_ID" > /vault/secrets/role_id +echo -n "$VAULT_SECRET_ID" > /vault/secrets/secret_id + +# 2. Start the Vault Agent in the background +echo "Starting Vault Agent..." +vault agent -config=/etc/vault/agent-config.hcl -log-level=debug & +AGENT_PID=$! + +# 3. Wait for BOTH secret files to be rendered +echo "Waiting for admin credentials..." +while [ ! -f /vault/secrets/.admin-ready ]; do + sleep 1 +done +echo "Admin credentials ready." + +echo "Waiting for app credentials..." +while [ ! -f /vault/secrets/.app-ready ]; do + sleep 1 +done +echo "App credentials ready." + +# 4. Source the credentials into the environment +echo "Sourcing credentials..." +set -a # Automatically export all variables +. /vault/secrets/admin.env +. /vault/secrets/app.env +set +a + +# 5. Execute the main container command (e.g., setup.sh or daphne) +echo "Executing command: $@" +exec "$@" \ No newline at end of file diff --git a/manage.py b/manage.py new file mode 100755 index 0000000..8e7ac79 --- /dev/null +++ b/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 0000000..8c778f3 --- /dev/null +++ b/nginx.conf @@ -0,0 +1,108 @@ +map $http_origin $cors_origin { + default ""; + "https://local.example.com:5173" $http_origin; + "https://app.example.com" $http_origin; +} + +# Garage S3 web endpoint (port 3902 for public reads via website mode) +upstream garage_web { + server 10.10.10.39:3902; + server 10.10.10.40:3902 backup; + server 10.10.10.41:3902 backup; +} + +server { + listen 5500; + server_name _; + + # Use Docker's embedded DNS for dynamic container resolution + # Prevents stale IPs when containers restart and get new addresses + resolver 127.0.0.11 valid=10s; + set $upstream_web web:8000; + + # 🔒 SECURITY: Remote Oathkeeper proxies here with auth headers already added + # Flow: Internet → Oathkeeper → This Nginx → Django/S3 + + # Internal auth check endpoint for S3 media + # Forwards all Oathkeeper headers so Django middleware can authenticate + location = /internal-auth-check { + internal; + proxy_pass http://$upstream_web/api/media-auth/; + proxy_pass_request_body off; + proxy_set_header Content-Length ""; + proxy_set_header X-Original-URI $request_uri; + + # Forward standard headers + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + # 🔒 Forward all Oathkeeper auth headers for middleware validation + proxy_set_header X-Oathkeeper-Secret $http_x_oathkeeper_secret; + proxy_set_header X-User-Id $http_x_user_id; + proxy_set_header X-User-Email $http_x_user_email; + proxy_set_header X-User-First-Name $http_x_user_first_name; + proxy_set_header X-User-Last-Name $http_x_user_last_name; + proxy_set_header X-User-Phone $http_x_user_phone; + proxy_set_header X-User-Profile-Type $http_x_user_profile_type; + proxy_set_header X-Django-Profile-Id $http_x_django_profile_id; + } + + # S3-backed media serving with auth_request + # Flow: auth_request → Django validates → proxy to Garage S3 + location /api/media/ { + # Auth check before proxying to S3 + auth_request /internal-auth-check; + + # Strip /api/media/ prefix and proxy to Garage web endpoint + # Website mode serves bucket content at root path + rewrite ^/api/media/(.*)$ /$1 break; + proxy_pass http://garage_web/; + proxy_http_version 1.1; + + # Set Host header to bucket name for Garage website mode + proxy_set_header Host nexus-media.web.garage.nebula; + + # Video streaming support - forward range requests + proxy_set_header Range $http_range; + proxy_set_header If-Range $http_if_range; + add_header Accept-Ranges bytes; + + # Cache headers for static content (private = browser only, not CDN) + add_header Cache-Control "private, max-age=3600"; + } + + # Legacy internal location (kept for backwards compatibility during migration) + # Can be removed once S3 migration is verified + location /media-internal/ { + internal; + alias /app/media/; + add_header Accept-Ranges bytes; + sendfile on; + sendfile_max_chunk 1m; + tcp_nopush on; + tcp_nodelay on; + } + + # All other requests proxy to Django + # Oathkeeper has already validated session and added headers before reaching here + location / { + proxy_pass http://$upstream_web; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_http_version 1.1; + + # WebSocket support (for GraphQL subscriptions) + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + # Large file uploads (videos) - increased from 50m + client_max_body_size 250m; + proxy_buffering off; + proxy_request_buffering off; + proxy_read_timeout 600s; + proxy_send_timeout 600s; + } +} diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..b6c71e0 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2827 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "amqp" +version = "5.3.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, +] + +[package.dependencies] +vine = ">=5.0.0,<6.0.0" + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anthropic" +version = "0.75.0" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b"}, + {file = "anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +docstring-parser = ">=0.15,<1" +httpx = ">=0.25.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + +[package.extras] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth[requests] (>=2,<3)"] + +[[package]] +name = "anyio" +version = "4.12.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb"}, + {file = "anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0"}, +] + +[package.dependencies] +idna = ">=2.8" + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + +[[package]] +name = "asgiref" +version = "3.11.0" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d"}, + {file = "asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4"}, +] + +[package.extras] +tests = ["mypy (>=1.14.0)", "pytest", "pytest-asyncio"] + +[[package]] +name = "attrs" +version = "25.4.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, +] + +[[package]] +name = "autobahn" +version = "25.12.2" +description = "WebSocket client & server library, WAMP real-time framework" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "autobahn-25.12.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16df879672c60f1f3fe452138c80f0fd221b3cb2ee5a14390c80f33b994104c1"}, + {file = "autobahn-25.12.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6ffe28048ef96eb0f925f24c2569bd72332e120f4cb31cd6c40dd66718a5f85e"}, + {file = "autobahn-25.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:3ec6a3719a00fd57b044e4694f3d6e9335892f4ef21f045f090495da7385d240"}, + {file = "autobahn-25.12.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:220748f21e91bd4a538d2d3de640cc17ee30b79f1c04a6c3dcdef321d531ee1c"}, + {file = "autobahn-25.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:ba1867aafdbe585d3d4a5abd35238a78ab54ab3de5bd12a21bca20379c9f512b"}, + {file = "autobahn-25.12.2-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:bc17f6cab9438156d2701c293c76fd02a144f9be0a992c065dfee1935ce4845b"}, + {file = "autobahn-25.12.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5297a782fc7d0a26842438ef1342549ceee29496cda52672ac44635c79eeb94"}, + {file = "autobahn-25.12.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0c3f1d5dafda52f8dc962ab583b6f3473b7b7186cab082d05372ed43a8261a5"}, + {file = "autobahn-25.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:e9e2a962f2de0bc4c53b452916458417a15f5137c956245ac6d0a783a83fa1f7"}, + {file = "autobahn-25.12.2-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c840ee136bfaf6560467160129b0b25a0e33c9a51e2b251e98c5474f27583915"}, + {file = "autobahn-25.12.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9abda5cf817c0f8a19a55a67a031adf2fc70ed351719b5bd9e6fa0f5f4bc8f89"}, + {file = "autobahn-25.12.2-cp314-cp314-win_amd64.whl", hash = "sha256:18b12e8af7fc115487715afa10b3f5b5a4b5989bebbe05b71722cf9fce7b1bfb"}, + {file = "autobahn-25.12.2-pp311-pypy311_pp73-macosx_15_0_arm64.whl", hash = "sha256:0c226329ddec154c6f3b491ea3e4713035f0326c96ebfd6b305bf90f27a2fba1"}, + {file = "autobahn-25.12.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f079393a7626eb448c8accf21151f5f206d02f8e9cee4313d62a5ca30a3aaed"}, + {file = "autobahn-25.12.2-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b3a6c7d54a9f0434a435d88b86555510e5d0a84aa87042e292f29f707cab237"}, + {file = "autobahn-25.12.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:0ad4c10c897ad67d31be2ef8547ed2922875d90ddb95553787cc46c271f822de"}, + {file = "autobahn-25.12.2.tar.gz", hash = "sha256:754c06a54753aeb7e8d10c5cbf03249ad9e2a1a32bca8be02865c6f00628a98c"}, +] + +[package.dependencies] +cbor2 = ">=5.2.0" +cffi = ">=2.0.0" +cryptography = ">=3.4.6" +hyperlink = ">=21.0.0" +msgpack = {version = ">=1.0.2", markers = "platform_python_implementation == \"CPython\""} +py-ubjson = ">=0.16.1" +txaio = ">=25.12.2" +u-msgpack-python = {version = ">=2.1", markers = "platform_python_implementation != \"CPython\""} +ujson = ">=4.0.2" + +[package.extras] +all = ["argon2-cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.1)", "brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.0.0) ; platform_python_implementation != \"CPython\"", "ecdsa (>=0.19.1)", "passlib (>=1.7.4)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service-identity (>=18.1.0)", "twisted (>=22.10.0)", "zope-interface (>=5.2.0)"] +benchmark = ["humanize (>=4.0.0)", "jinja2 (>=3.0.0)", "vmprof (>=0.4.15) ; python_version >= \"3.11\"", "vmprof-flamegraph (>=0.0.1) ; python_version >= \"3.11\""] +build-tools = ["auditwheel (>=5.0.0) ; sys_platform == \"linux\"", "build (>=1.0.0)", "wheel (>=0.36.2)"] +compress = ["brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.0.0) ; platform_python_implementation != \"CPython\""] +dev = ["auditwheel (>=5.0.0) ; sys_platform == \"linux\"", "backports-tempfile (>=1.0)", "build (>=1.0.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "furo (>=2024.7.0)", "humanize (>=4.0.0)", "linkify-it-py (>=2.0.0)", "myst-parser (>=2.0)", "passlib", "pep8-naming (>=0.3.3)", "pyenchant (>=3.2)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname (>=0.1.0)", "ruff (>=0.12.1)", "scour (>=0.38)", "sphinx (>=8.2.3)", "sphinx-autoapi (>=2.1.0)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.5)", "sphinxcontrib-images (>=0.9)", "sphinxcontrib-spelling (>=8.0)", "sphinxext-opengraph (>=0.9)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=25.12.2)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] +docs = ["furo (>=2024.7.0)", "linkify-it-py (>=2.0.0)", "myst-parser (>=2.0)", "pyenchant (>=3.2)", "scour (>=0.38)", "sphinx (>=8.2.3)", "sphinx-autoapi (>=2.1.0)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.5)", "sphinxcontrib-images (>=0.9)", "sphinxcontrib-spelling (>=8.0)", "sphinxext-opengraph (>=0.9)"] +encryption = ["base58 (>=2.1.1)", "ecdsa (>=0.19.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service-identity (>=18.1.0)"] +scram = ["argon2-cffi (>=20.1.0)", "passlib (>=1.7.4)"] +twisted = ["attrs (>=20.3.0)", "twisted (>=22.10.0)", "zope-interface (>=5.2.0)"] + +[[package]] +name = "automat" +version = "25.4.16" +description = "Self-service finite-state machines for the programmer on the go." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "automat-25.4.16-py3-none-any.whl", hash = "sha256:04e9bce696a8d5671ee698005af6e5a9fa15354140a87f4870744604dcdd3ba1"}, + {file = "automat-25.4.16.tar.gz", hash = "sha256:0017591a5477066e90d26b0e696ddc143baafd87b588cfac8100bc6be9634de0"}, +] + +[package.extras] +visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] + +[[package]] +name = "billiard" +version = "4.2.4" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5"}, + {file = "billiard-4.2.4.tar.gz", hash = "sha256:55f542c371209e03cd5862299b74e52e4fbcba8250ba611ad94276b369b6a85f"}, +] + +[[package]] +name = "boto3" +version = "1.42.16" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "boto3-1.42.16-py3-none-any.whl", hash = "sha256:37a43d42aebd06a8f93ee801ea1b7b5181ac42a30869ef403c9dadc160a748e5"}, + {file = "boto3-1.42.16.tar.gz", hash = "sha256:811391611db88c8a061f6e6fabbd7ca784ad9de04490a879f091cbaa9de7de74"}, +] + +[package.dependencies] +botocore = ">=1.42.16,<1.43.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.16.0,<0.17.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.42.16" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "botocore-1.42.16-py3-none-any.whl", hash = "sha256:b1f584a0f8645c12e07bf6ec9c18e05221a789f2a9b2d3c6291deb42f8c1c542"}, + {file = "botocore-1.42.16.tar.gz", hash = "sha256:29ee8555cd5d5023350405387cedcf3fe1c7f02fcb8060bf9e01602487482c25"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.29.2)"] + +[[package]] +name = "cbor2" +version = "5.7.1" +description = "CBOR (de)serializer with extensive tag support" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "cbor2-5.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a0fc6cc50e0aa04e54792e7824e65bf66c691ae2948d7c012153df2bab1ee314"}, + {file = "cbor2-5.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2fe69c1473d18d102f1e20982edab5bfa543fa1cda9888bdecc49f8b2f3d720"}, + {file = "cbor2-5.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34cbbe4fcf82080412a641984a0be43dfe66eac50a8f45596da63fde36189450"}, + {file = "cbor2-5.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fc3d3f00aed397a1e4634b8e1780f347aad191a2e1e7768a233baadd4f87561"}, + {file = "cbor2-5.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:99e1666887a868e619096e9b5953734efd034f577e078f4efc5abd23dc1bcd32"}, + {file = "cbor2-5.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59b78c90a5e682e7d004586fb662be6e451ec06f32fc3a738bbfb9576c72ecc9"}, + {file = "cbor2-5.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:6300e0322e52f831892054f1ccf25e67fa8040664963d358db090f29d8976ae4"}, + {file = "cbor2-5.7.1-cp310-cp310-win_arm64.whl", hash = "sha256:7badbde0d89eb7c8b9f7ef8e4f2395c02cfb24b514815656fef8e23276a7cd36"}, + {file = "cbor2-5.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b1efbe6e82721be44b9faf47d0fd97b0150213eb6a4ba554f4947442bc4e13f"}, + {file = "cbor2-5.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fb94bab27e00283bdd8f160e125e17dbabec4c9e6ffc8da91c36547ec1eb707f"}, + {file = "cbor2-5.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29f22266b5e08e0e4152e87ba185e04d3a84a4fd545b99ae3ebe42c658c66a53"}, + {file = "cbor2-5.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25d4c7554d6627da781c9bd1d0dd0709456eecb71f605829f98961bb98487dda"}, + {file = "cbor2-5.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1e15c3a08008cf13ce1dfc64d17c960df5d66d935788d28ec7df54bf0ffb0ef"}, + {file = "cbor2-5.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f6cdf7eb604ea0e7ef34e3f0b5447da0029ecd3ab7b2dc70e43fa5f7bcfca89"}, + {file = "cbor2-5.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:dd25cbef8e8e6dbf69f0de95311aecaca7217230cda83ae99fdc37cd20d99250"}, + {file = "cbor2-5.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:40cc9c67242a7abac5a4e062bc4d1d2376979878c0565a4b2f08fd9ed9212945"}, + {file = "cbor2-5.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bd5ca44891c06f6b85d440836c967187dc1d30b15f86f315d55c675d3a841078"}, + {file = "cbor2-5.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:537d73ef930ccc1a7b6a2e8d2cbf81407d270deb18e40cda5eb511bd70f71078"}, + {file = "cbor2-5.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:edbf814dd7763b6eda27a5770199f6ccd55bd78be8f4367092460261bfbf19d0"}, + {file = "cbor2-5.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fc81da8c0e09beb42923e455e477b36ff14a03b9ca18a8a2e9b462de9a953e8"}, + {file = "cbor2-5.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e4a7d660d428911a3aadb7105e94438d7671ab977356fdf647a91aab751033bd"}, + {file = "cbor2-5.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:228e0af9c0a9ddf6375b6ae010eaa1942a1901d403f134ac9ee6a76a322483f9"}, + {file = "cbor2-5.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:2d08a6c0d9ed778448e185508d870f4160ba74f59bb17a966abd0d14d0ff4dd3"}, + {file = "cbor2-5.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:752506cfe72da0f4014b468b30191470ee8919a64a0772bd3b36a4fccf5fcefc"}, + {file = "cbor2-5.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:59d5da59fffe89692d5bd1530eef4d26e4eb7aa794aaa1f4e192614786409009"}, + {file = "cbor2-5.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:533117918d518e01348f8cd0331271c207e7224b9a1ed492a0ff00847f28edc8"}, + {file = "cbor2-5.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8d6d9436ff3c3323ea5863ecf7ae1139590991685b44b9eb6b7bb1734a594af6"}, + {file = "cbor2-5.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:661b871ca754a619fcd98c13a38b4696b2b57dab8b24235c00b0ba322c040d24"}, + {file = "cbor2-5.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8065aa90d715fd9bb28727b2d774ee16e695a0e1627ae76e54bf19f9d99d63f"}, + {file = "cbor2-5.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cb1b7047d73590cfe8e373e2c804fa99be47e55b1b6186602d0f86f384cecec1"}, + {file = "cbor2-5.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:31d511df7ebd6624fdb4cecdafb4ffb9a205f9ff8c8d98edd1bef0d27f944d74"}, + {file = "cbor2-5.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:f5d37f7b0f84394d2995bd8722cb01c86a885c4821a864a34b7b4d9950c5e26e"}, + {file = "cbor2-5.7.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e5826e4fa4c33661960073f99cf67c82783895524fb66f3ebdd635c19b5a7d68"}, + {file = "cbor2-5.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f19a00d6ac9a77cb611073250b06bf4494b41ba78a1716704f7008e0927d9366"}, + {file = "cbor2-5.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2113aea044cd172f199da3520bc4401af69eae96c5180ca7eb660941928cb89"}, + {file = "cbor2-5.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f17eacea2d28fecf28ac413c1d7927cde0a11957487d2630655d6b5c9c46a0b"}, + {file = "cbor2-5.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d65deea39cae533a629561e7da672402c46731122b6129ed7c8eaa1efe04efce"}, + {file = "cbor2-5.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:57d8cc29ec1fd20500748e0e767ff88c13afcee839081ba4478c41fcda6ee18b"}, + {file = "cbor2-5.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:94fb939d0946f80c49ba45105ca3a3e13e598fc9abd63efc6661b02d4b4d2c50"}, + {file = "cbor2-5.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4fd7225ac820bbb9f03bd16bc1a7efb6c4d1c451f22c0a153ff4ec46495c59c5"}, + {file = "cbor2-5.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0a94c265d92ecc25b11072f5f41685a881c8d95fa64d6691db79cea6eac8c94a"}, + {file = "cbor2-5.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a56a92bd6070c98513eacdd3e0efbe07c373a5a1637acef94b18f141e71079e"}, + {file = "cbor2-5.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4682973d385020786ff0c8c6d9694e2428f1bb4cd82a8a0f172eaa9cd674c814"}, + {file = "cbor2-5.7.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f2e226066b801d1015c632a8309e3b322e5f1488a4472ffc8310bbf1386d84"}, + {file = "cbor2-5.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f6f342a3a745f8aecc0a6253ea45952dbaf9ffdfeb641490298b3b92074365c7"}, + {file = "cbor2-5.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:45e6a01c028b3588028995b4016009d6525b82981ab095ffaaef78798be35583"}, + {file = "cbor2-5.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd044d65dc026f710104515359350014101eb5be86925314328ebe6221312a1c"}, + {file = "cbor2-5.7.1-cp39-cp39-win_arm64.whl", hash = "sha256:d7e2d2a116108d7e4e9cda46385beed4102f8dca599a84e78bffdc5b07ebed89"}, + {file = "cbor2-5.7.1-py3-none-any.whl", hash = "sha256:68834e4eff2f56629ce6422b0634bc3f74c5a4269de5363f5265fe452c706ba7"}, + {file = "cbor2-5.7.1.tar.gz", hash = "sha256:7a405a1d7c8230ee9acf240aad48ae947ef584e8af05f169f3c1bde8f01f8b71"}, +] + +[[package]] +name = "celery" +version = "5.6.0" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "celery-5.6.0-py3-none-any.whl", hash = "sha256:33cf01477b175017fc8f22c5ee8a65157591043ba8ca78a443fe703aa910f581"}, + {file = "celery-5.6.0.tar.gz", hash = "sha256:641405206042d52ae460e4e9751a2e31b06cf80ab836fcf92e0b9311d7ea8113"}, +] + +[package.dependencies] +billiard = ">=4.2.1,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +exceptiongroup = ">=1.3.0" +kombu = ">=5.6.0" +python-dateutil = ">=2.8.2" +tzlocal = "*" +vine = ">=5.1.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==46.0.3)"] +azureblockblob = ["azure-identity (>=1.19.0)", "azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlipy (>=0.7.0) ; platform_python_implementation == \"PyPy\""] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0) ; platform_python_implementation != \"PyPy\" and (platform_system != \"Windows\" or python_version < \"3.10\")"] +couchdb = ["pycouchdb (==1.16.0)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elastic-transport (<=9.1.0)", "elasticsearch (<=9.1.2)"] +eventlet = ["eventlet (>=0.32.0) ; python_version < \"3.10\""] +gcs = ["google-cloud-firestore (==2.21.0)", "google-cloud-storage (>=2.10.0)", "grpcio (==1.75.1)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] +memcache = ["pylibmc (==1.6.3) ; platform_system != \"Windows\""] +mongodb = ["kombu[mongodb]"] +msgpack = ["kombu[msgpack]"] +pydantic = ["pydantic (>=2.12.0a1) ; python_version >= \"3.14\"", "pydantic (>=2.4) ; python_version < \"3.14\""] +pymemcache = ["python-memcached (>=1.61)"] +pyro = ["pyro4 (==4.82) ; python_version < \"3.11\""] +pytest = ["pytest-celery[all] (>=1.2.0,<1.3.0)"] +redis = ["kombu[redis]"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer_messaging (>=1.0.3)"] +solar = ["ephem (==4.2) ; platform_python_implementation != \"PyPy\""] +sqlalchemy = ["kombu[sqlalchemy]"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.5.0)", "pycurl (>=7.43.0.5,<7.45.4) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\" and python_version < \"3.9\"", "pycurl (>=7.45.4) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "urllib3 (>=1.26.16)"] +tblib = ["tblib (==3.2.2)"] +yaml = ["kombu[yaml]"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.23.0)"] + +[[package]] +name = "celery-types" +version = "0.23.0" +description = "Type stubs for Celery and its related packages" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "celery_types-0.23.0-py3-none-any.whl", hash = "sha256:0cc495b8d7729891b7e070d0ec8d4906d2373209656a6e8b8276fe1ed306af9a"}, + {file = "celery_types-0.23.0.tar.gz", hash = "sha256:402ed0555aea3cd5e1e6248f4632e4f18eec8edb2435173f9e6dc08449fa101e"}, +] + +[package.dependencies] +typing-extensions = ">=4.9.0,<5.0.0" + +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "channels" +version = "4.3.2" +description = "Brings async, event-driven capabilities to Django." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "channels-4.3.2-py3-none-any.whl", hash = "sha256:fef47e9055a603900cf16cef85f050d522d9ac4b3daccf24835bd9580705c176"}, + {file = "channels-4.3.2.tar.gz", hash = "sha256:f2bb6bfb73ad7fb4705041d07613c7b4e69528f01ef8cb9fb6c21d9295f15667"}, +] + +[package.dependencies] +asgiref = ">=3.9.0,<4" +Django = ">=4.2" + +[package.extras] +daphne = ["daphne (>=4.0.0)"] +tests = ["async-timeout", "coverage (>=4.5,<5.0)", "pytest", "pytest-asyncio", "pytest-django", "selenium"] +types = ["types-channels"] + +[[package]] +name = "channels-valkey" +version = "0.3.0" +description = "Valkey-backed ASGI channel layer implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "channels_valkey-0.3.0-py3-none-any.whl", hash = "sha256:84e2328b11ed67bd370235b1f6d721b3423630d5ecf43686b644b40e374e57df"}, + {file = "channels_valkey-0.3.0.tar.gz", hash = "sha256:baea49d16f087db64e2a0901a73a422187ce0e243946978e570f227283af02a4"}, +] + +[package.dependencies] +asgiref = ">=3.9.1" +channels = ">=4.2.2" +msgpack = ">=1.1.0" +valkey = ">=6.0.0" + +[package.extras] +cryptography = ["cryptography (>=43.0.1)"] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2" +groups = ["main"] +files = [ + {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, + {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1.2" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6"}, + {file = "click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "constantly" +version = "23.10.4" +description = "Symbolic constants in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, + {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +files = [ + {file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"}, + {file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"}, + {file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"}, + {file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"}, + {file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"}, + {file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"}, + {file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"}, + {file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"}, + {file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"}, + {file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"}, + {file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "daphne" +version = "4.2.1" +description = "Django ASGI (HTTP/WebSocket) server" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "daphne-4.2.1-py3-none-any.whl", hash = "sha256:881e96b387b95b35ad85acd855f229d7f5b79073d6649089c8a33f661885e055"}, + {file = "daphne-4.2.1.tar.gz", hash = "sha256:5f898e700a1fda7addf1541d7c328606415e96a7bd768405f0463c312fcb31b3"}, +] + +[package.dependencies] +asgiref = ">=3.5.2,<4" +autobahn = ">=22.4.2" +twisted = {version = ">=22.4", extras = ["tls"]} + +[package.extras] +tests = ["black", "django", "flake8", "flake8-bugbear", "hypothesis", "mypy", "pytest", "pytest-asyncio", "pytest-cov", "tox"] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "django" +version = "6.0" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.12" +groups = ["main"] +files = [ + {file = "django-6.0-py3-none-any.whl", hash = "sha256:1cc2c7344303bbfb7ba5070487c17f7fc0b7174bbb0a38cebf03c675f5f19b6d"}, + {file = "django-6.0.tar.gz", hash = "sha256:7b0c1f50c0759bbe6331c6a39c89ae022a84672674aeda908784617ef47d8e26"}, +] + +[package.dependencies] +asgiref = ">=3.9.1" +sqlparse = ">=0.5.0" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=23.1.0)"] +bcrypt = ["bcrypt (>=4.1.1)"] + +[[package]] +name = "django-choices-field" +version = "3.1.1" +description = "Django field that set/get django's new TextChoices/IntegerChoices enum." +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "django_choices_field-3.1.1-py3-none-any.whl", hash = "sha256:409d691b3b4bd3982e9a8af499fe28fce4ee19a83b2a93ea69c5d98797f4897d"}, + {file = "django_choices_field-3.1.1.tar.gz", hash = "sha256:93d851766ebcd87095d19e56f78164c1015935bfb8fce4f3be27ef689c10efc5"}, +] + +[package.dependencies] +django = ">=3.2" +typing_extensions = ">=4.0.0" + +[[package]] +name = "django-cors-headers" +version = "4.9.0" +description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "django_cors_headers-4.9.0-py3-none-any.whl", hash = "sha256:15c7f20727f90044dcee2216a9fd7303741a864865f0c3657e28b7056f61b449"}, + {file = "django_cors_headers-4.9.0.tar.gz", hash = "sha256:fe5d7cb59fdc2c8c646ce84b727ac2bca8912a247e6e68e1fb507372178e59e8"}, +] + +[package.dependencies] +asgiref = ">=3.6" +django = ">=4.2" + +[[package]] +name = "django-storages" +version = "1.14.6" +description = "Support for many storage backends in Django" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "django_storages-1.14.6-py3-none-any.whl", hash = "sha256:11b7b6200e1cb5ffcd9962bd3673a39c7d6a6109e8096f0e03d46fab3d3aabd9"}, + {file = "django_storages-1.14.6.tar.gz", hash = "sha256:7a25ce8f4214f69ac9c7ce87e2603887f7ae99326c316bc8d2d75375e09341c9"}, +] + +[package.dependencies] +boto3 = {version = ">=1.4.4", optional = true, markers = "extra == \"s3\""} +Django = ">=3.2" + +[package.extras] +azure = ["azure-core (>=1.13)", "azure-storage-blob (>=12)"] +boto3 = ["boto3 (>=1.4.4)"] +dropbox = ["dropbox (>=7.2.1)"] +google = ["google-cloud-storage (>=1.36.1)"] +libcloud = ["apache-libcloud"] +s3 = ["boto3 (>=1.4.4)"] +sftp = ["paramiko (>=1.15)"] + +[[package]] +name = "django-valkey" +version = "0.2.1" +description = "a valkey backend for django" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "django_valkey-0.2.1-py3-none-any.whl", hash = "sha256:24c3296eba78eae0543f21517dab297995e4b2f65dafd75cd2ded42b25f7d6e7"}, + {file = "django_valkey-0.2.1.tar.gz", hash = "sha256:747e35609f7d0af906d3d1af8a0f8edfd9a872998e4750c299d1d5a17d0496a9"}, +] + +[package.dependencies] +django = ">=4.2.20" +valkey = ">=6.0.2" + +[package.extras] +brotli = ["brotli (>=1.1.0,<2.0.0)"] +libvalkey = ["libvalkey (>=4.0.1,<5.0.0)"] +lz4 = ["lz4 (>=4.3.3,<5.0.0)"] +msgpack = ["msgpack (>=1.1.0,<2.0.0)"] +pyzstd = ["pyzstd (>=0.16.2,<0.17.0)"] + +[[package]] +name = "djangorestframework" +version = "3.16.1" +description = "Web APIs for Django, made easy." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "djangorestframework-3.16.1-py3-none-any.whl", hash = "sha256:33a59f47fb9c85ede792cbf88bde71893bcda0667bc573f784649521f1102cec"}, + {file = "djangorestframework-3.16.1.tar.gz", hash = "sha256:166809528b1aced0a17dc66c24492af18049f2c9420dbd0be29422029cfc3ff7"}, +] + +[package.dependencies] +django = ">=4.2" + +[[package]] +name = "docstring-parser" +version = "0.17.0" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708"}, + {file = "docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912"}, +] + +[package.extras] +dev = ["pre-commit (>=2.16.0) ; python_version >= \"3.9\"", "pydoctor (>=25.4.0)", "pytest"] +docs = ["pydoctor (>=25.4.0)"] +test = ["pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "ffmpeg-python" +version = "0.2.0" +description = "Python bindings for FFmpeg - with complex filtering support" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "ffmpeg-python-0.2.0.tar.gz", hash = "sha256:65225db34627c578ef0e11c8b1eb528bb35e024752f6f10b78c011f6f64c4127"}, + {file = "ffmpeg_python-0.2.0-py3-none-any.whl", hash = "sha256:ac441a0404e053f8b6a1113a77c0f452f1cfc62f6344a769475ffdc0f56c23c5"}, +] + +[package.dependencies] +future = "*" + +[package.extras] +dev = ["Sphinx (==2.1.0)", "future (==0.17.1)", "numpy (==1.16.4)", "pytest (==4.6.1)", "pytest-mock (==1.10.4)", "tox (==3.12.1)"] + +[[package]] +name = "future" +version = "1.0.0" +description = "Clean single-source support for Python 3 and 2" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, +] + +[[package]] +name = "graphql-core" +version = "3.2.7" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = "<4,>=3.7" +groups = ["main"] +files = [ + {file = "graphql_core-3.2.7-py3-none-any.whl", hash = "sha256:17fc8f3ca4a42913d8e24d9ac9f08deddf0a0b2483076575757f6c412ead2ec0"}, + {file = "graphql_core-3.2.7.tar.gz", hash = "sha256:27b6904bdd3b43f2a0556dad5d579bdfdeab1f38e8e8788e555bdcb586a6f62c"}, +] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "httpx-sse" +version = "0.4.3" +description = "Consume Server-Sent Event (SSE) messages with HTTPX." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc"}, + {file = "httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d"}, +] + +[[package]] +name = "hvac" +version = "2.4.0" +description = "HashiCorp Vault API client" +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "hvac-2.4.0-py3-none-any.whl", hash = "sha256:008db5efd8c2f77bd37d2368ea5f713edceae1c65f11fd608393179478649e0f"}, + {file = "hvac-2.4.0.tar.gz", hash = "sha256:e0056ad9064e7923e874e6769015b032580b639e29246f5ab1044f7959c1c7e0"}, +] + +[package.dependencies] +requests = ">=2.27.1,<3.0.0" + +[package.extras] +parser = ["pyhcl (>=0.4.4,<0.5.0)"] + +[[package]] +name = "hyperlink" +version = "21.0.0" +description = "A featureful, immutable, and correct URL for Python." +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, + {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, +] + +[package.dependencies] +idna = ">=2.5" + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "incremental" +version = "24.11.0" +description = "A CalVer version manager that supports the future." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "incremental-24.11.0-py3-none-any.whl", hash = "sha256:a34450716b1c4341fe6676a0598e88a39e04189f4dce5dc96f656e040baa10b3"}, + {file = "incremental-24.11.0.tar.gz", hash = "sha256:87d3480dbb083c1d736222511a8cf380012a8176c2456d01ef483242abbbcf8c"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[[package]] +name = "jiter" +version = "0.12.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jiter-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e7acbaba9703d5de82a2c98ae6a0f59ab9770ab5af5fa35e43a303aee962cf65"}, + {file = "jiter-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:364f1a7294c91281260364222f535bc427f56d4de1d8ffd718162d21fbbd602e"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ee4d25805d4fb23f0a5167a962ef8e002dbfb29c0989378488e32cf2744b62"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:796f466b7942107eb889c08433b6e31b9a7ed31daceaecf8af1be26fb26c0ca8"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35506cb71f47dba416694e67af996bbdefb8e3608f1f78799c2e1f9058b01ceb"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:726c764a90c9218ec9e4f99a33d6bf5ec169163f2ca0fc21b654e88c2abc0abc"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa47810c5565274810b726b0dc86d18dce5fd17b190ebdc3890851d7b2a0e74"}, + {file = "jiter-0.12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ec0259d3f26c62aed4d73b198c53e316ae11f0f69c8fbe6682c6dcfa0fcce2"}, + {file = "jiter-0.12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:79307d74ea83465b0152fa23e5e297149506435535282f979f18b9033c0bb025"}, + {file = "jiter-0.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cf6e6dd18927121fec86739f1a8906944703941d000f0639f3eb6281cc601dca"}, + {file = "jiter-0.12.0-cp310-cp310-win32.whl", hash = "sha256:b6ae2aec8217327d872cbfb2c1694489057b9433afce447955763e6ab015b4c4"}, + {file = "jiter-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7f49ce90a71e44f7e1aa9e7ec415b9686bbc6a5961e57eab511015e6759bc11"}, + {file = "jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9"}, + {file = "jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6"}, + {file = "jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725"}, + {file = "jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6"}, + {file = "jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e"}, + {file = "jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c"}, + {file = "jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f"}, + {file = "jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5"}, + {file = "jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37"}, + {file = "jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403"}, + {file = "jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126"}, + {file = "jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9"}, + {file = "jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86"}, + {file = "jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44"}, + {file = "jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb"}, + {file = "jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789"}, + {file = "jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e"}, + {file = "jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed"}, + {file = "jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9"}, + {file = "jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626"}, + {file = "jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c"}, + {file = "jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de"}, + {file = "jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a"}, + {file = "jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60"}, + {file = "jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6"}, + {file = "jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4"}, + {file = "jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb"}, + {file = "jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7"}, + {file = "jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3"}, + {file = "jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525"}, + {file = "jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a"}, + {file = "jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a"}, + {file = "jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67"}, + {file = "jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b"}, + {file = "jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42"}, + {file = "jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf"}, + {file = "jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451"}, + {file = "jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f"}, + {file = "jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783"}, + {file = "jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b"}, + {file = "jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6"}, + {file = "jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183"}, + {file = "jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873"}, + {file = "jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473"}, + {file = "jiter-0.12.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c9d28b218d5f9e5f69a0787a196322a5056540cb378cac8ff542b4fa7219966c"}, + {file = "jiter-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0ee12028daf8cfcf880dd492349a122a64f42c059b6c62a2b0c96a83a8da820"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b135ebe757a82d67ed2821526e72d0acf87dd61f6013e20d3c45b8048af927b"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15d7fafb81af8a9e3039fc305529a61cd933eecee33b4251878a1c89859552a3"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92d1f41211d8a8fe412faad962d424d334764c01dac6691c44691c2e4d3eedaf"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a64a48d7c917b8f32f25c176df8749ecf08cec17c466114727efe7441e17f6d"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122046f3b3710b85de99d9aa2f3f0492a8233a2f54a64902b096efc27ea747b5"}, + {file = "jiter-0.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:27ec39225e03c32c6b863ba879deb427882f243ae46f0d82d68b695fa5b48b40"}, + {file = "jiter-0.12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26b9e155ddc132225a39b1995b3b9f0fe0f79a6d5cbbeacf103271e7d309b404"}, + {file = "jiter-0.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ab05b7c58e29bb9e60b70c2e0094c98df79a1e42e397b9bb6eaa989b7a66dd0"}, + {file = "jiter-0.12.0-cp39-cp39-win32.whl", hash = "sha256:59f9f9df87ed499136db1c2b6c9efb902f964bed42a582ab7af413b6a293e7b0"}, + {file = "jiter-0.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3719596a1ebe7a48a498e8d5d0c4bf7553321d4c3eee1d620628d51351a3928"}, + {file = "jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:4739a4657179ebf08f85914ce50332495811004cc1747852e8b2041ed2aab9b8"}, + {file = "jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:41da8def934bf7bec16cb24bd33c0ca62126d2d45d81d17b864bd5ad721393c3"}, + {file = "jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c44ee814f499c082e69872d426b624987dbc5943ab06e9bbaa4f81989fdb79e"}, + {file = "jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2097de91cf03eaa27b3cbdb969addf83f0179c6afc41bbc4513705e013c65d"}, + {file = "jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb"}, + {file = "jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b"}, + {file = "jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f"}, + {file = "jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c"}, + {file = "jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b"}, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"}, + {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "kombu" +version = "5.6.1" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "kombu-5.6.1-py3-none-any.whl", hash = "sha256:b69e3f5527ec32fc5196028a36376501682973e9620d6175d1c3d4eaf7e95409"}, + {file = "kombu-5.6.1.tar.gz", hash = "sha256:90f1febb57ad4f53ca327a87598191b2520e0c793c75ea3b88d98e3b111282e4"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +packaging = "*" +tzdata = {version = ">=2025.2", markers = "python_version >= \"3.9\""} +vine = "5.1.0" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (>=2.2.0)"] +consul = ["python-consul2 (==0.1.5)"] +gcpubsub = ["google-cloud-monitoring (>=2.16.0)", "google-cloud-pubsub (>=2.18.4)", "grpcio (==1.75.1)", "protobuf (==6.32.1)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] +mongodb = ["pymongo (==4.15.3)"] +msgpack = ["msgpack (==1.1.2)"] +pyro = ["pyro4 (==4.82)"] +qpid = ["qpid-python (==1.36.0-1)", "qpid-tools (==1.36.0-1)"] +redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2,<6.5)"] +slmq = ["softlayer_messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + +[[package]] +name = "lia-web" +version = "0.2.3" +description = "A library for working with web frameworks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "lia_web-0.2.3-py3-none-any.whl", hash = "sha256:237c779c943cd4341527fc0adfcc3d8068f992ee051f4ef059b8474ee087f641"}, + {file = "lia_web-0.2.3.tar.gz", hash = "sha256:ccc9d24cdc200806ea96a20b22fb68f4759e6becdb901bd36024df7921e848d7"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.0" + +[[package]] +name = "mcp" +version = "1.25.0" +description = "Model Context Protocol SDK" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a"}, + {file = "mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802"}, +] + +[package.dependencies] +anyio = ">=4.5" +httpx = ">=0.27.1" +httpx-sse = ">=0.4" +jsonschema = ">=4.20.0" +pydantic = ">=2.11.0,<3.0.0" +pydantic-settings = ">=2.5.2" +pyjwt = {version = ">=2.10.1", extras = ["crypto"]} +python-multipart = ">=0.0.9" +pywin32 = {version = ">=310", markers = "sys_platform == \"win32\""} +sse-starlette = ">=1.6.1" +starlette = ">=0.27" +typing-extensions = ">=4.9.0" +typing-inspection = ">=0.4.1" +uvicorn = {version = ">=0.31.1", markers = "sys_platform != \"emscripten\""} + +[package.extras] +cli = ["python-dotenv (>=1.0.0)", "typer (>=0.16.0)"] +rich = ["rich (>=13.9.4)"] +ws = ["websockets (>=15.0.1)"] + +[[package]] +name = "msgpack" +version = "1.1.2" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}, + {file = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}, + {file = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}, + {file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}, + {file = "msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}, + {file = "msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}, + {file = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}, + {file = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}, + {file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}, + {file = "msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}, + {file = "msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}, + {file = "msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa"}, + {file = "msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f"}, + {file = "msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9"}, + {file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620"}, + {file = "msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}, + {file = "msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}, + {file = "msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf"}, + {file = "msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999"}, + {file = "msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162"}, + {file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794"}, + {file = "msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c"}, + {file = "msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9"}, + {file = "msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"}, + {file = "msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"}, + {file = "msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"}, + {file = "msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"}, + {file = "msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e"}, + {file = "msgpack-1.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23"}, + {file = "msgpack-1.1.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8"}, + {file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833"}, + {file = "msgpack-1.1.2-cp39-cp39-win32.whl", hash = "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c"}, + {file = "msgpack-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030"}, + {file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pillow" +version = "12.0.0" +description = "Python Imaging Library (fork)" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pillow-12.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:3adfb466bbc544b926d50fe8f4a4e6abd8c6bffd28a26177594e6e9b2b76572b"}, + {file = "pillow-12.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ac11e8ea4f611c3c0147424eae514028b5e9077dd99ab91e1bd7bc33ff145e1"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d49e2314c373f4c2b39446fb1a45ed333c850e09d0c59ac79b72eb3b95397363"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c7b2a63fd6d5246349f3d3f37b14430d73ee7e8173154461785e43036ffa96ca"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d64317d2587c70324b79861babb9c09f71fbb780bad212018874b2c013d8600e"}, + {file = "pillow-12.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d77153e14b709fd8b8af6f66a3afbb9ed6e9fc5ccf0b6b7e1ced7b036a228782"}, + {file = "pillow-12.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32ed80ea8a90ee3e6fa08c21e2e091bba6eda8eccc83dbc34c95169507a91f10"}, + {file = "pillow-12.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c828a1ae702fc712978bda0320ba1b9893d99be0badf2647f693cc01cf0f04fa"}, + {file = "pillow-12.0.0-cp310-cp310-win32.whl", hash = "sha256:bd87e140e45399c818fac4247880b9ce719e4783d767e030a883a970be632275"}, + {file = "pillow-12.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:455247ac8a4cfb7b9bc45b7e432d10421aea9fc2e74d285ba4072688a74c2e9d"}, + {file = "pillow-12.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:6ace95230bfb7cd79ef66caa064bbe2f2a1e63d93471c3a2e1f1348d9f22d6b7"}, + {file = "pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc"}, + {file = "pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c"}, + {file = "pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227"}, + {file = "pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b"}, + {file = "pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e"}, + {file = "pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739"}, + {file = "pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e"}, + {file = "pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d"}, + {file = "pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371"}, + {file = "pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953"}, + {file = "pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8"}, + {file = "pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79"}, + {file = "pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba"}, + {file = "pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0"}, + {file = "pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a"}, + {file = "pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4"}, + {file = "pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399"}, + {file = "pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5"}, + {file = "pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e"}, + {file = "pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344"}, + {file = "pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27"}, + {file = "pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79"}, + {file = "pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098"}, + {file = "pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905"}, + {file = "pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a"}, + {file = "pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3"}, + {file = "pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a"}, + {file = "pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe"}, + {file = "pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee"}, + {file = "pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef"}, + {file = "pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9"}, + {file = "pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b"}, + {file = "pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2"}, + {file = "pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a"}, + {file = "pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b"}, + {file = "pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e"}, + {file = "pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e"}, + {file = "pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9"}, + {file = "pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab"}, + {file = "pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b"}, + {file = "pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b"}, + {file = "pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0"}, + {file = "pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6"}, + {file = "pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca"}, + {file = "pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925"}, + {file = "pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8"}, + {file = "pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4"}, + {file = "pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52"}, + {file = "pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a"}, + {file = "pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76"}, + {file = "pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5"}, + {file = "pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +test-arrow = ["arro3-compute", "arro3-core", "nanoarrow", "pyarrow"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma (>=5)", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] +xmp = ["defusedxml"] + +[[package]] +name = "pillow-heif" +version = "1.1.1" +description = "Python interface for libheif library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pillow_heif-1.1.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:7048eb0ecae3b230f086ec913ab2798dcf21d0301edecf3061b34ed50a5d4411"}, + {file = "pillow_heif-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2a1eca5eca44336ea213a7acd9b50572e15d77b6065c962cc9f61137b6a5b55"}, + {file = "pillow_heif-1.1.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dcc25739ceff0701d32693de8e5b65ff92163638f1c3c2466e203b7b978b8ddc"}, + {file = "pillow_heif-1.1.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75d79393fcbcc50ef7a01b7dd5b716e08f78bd5542ded6e4c51121f59d5be8da"}, + {file = "pillow_heif-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03ba3f7e8b13b07636f5a87a6ec5ed1e39b2aa20d4b645b83c80d40be0abeb50"}, + {file = "pillow_heif-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c85fa6f9c0c2c572b1b146443813519ca9a942ef51e92858387c6dca2bbc42f9"}, + {file = "pillow_heif-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:36e98bee9cd707b1daa865fec0ff505431746587ce471b44b5eab61a115e800a"}, + {file = "pillow_heif-1.1.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:57c140368c7ddefa20ecb9b737b4af2d2d5ea0806d1d59be4c525e6a73e6aa72"}, + {file = "pillow_heif-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b6ae0b002ecc7873273ded99aaffa567f7806f4bc57ee1eff7ab5fe1f70e5e7"}, + {file = "pillow_heif-1.1.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39ea2fe8878e722bdfaf30a9b711629c3a4b8a0627b70a833f7381cbd3ef8e87"}, + {file = "pillow_heif-1.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a01644c3c4bc576437c05e1ece4b89814fc381684f5d7926850e01d6e9b6502"}, + {file = "pillow_heif-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5ebe3b7f707b984c8886f367697531d004967b7d8949a34645c7bc1c6a888fe6"}, + {file = "pillow_heif-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c8c8e1a561877006a5a0b654392e614c879d9e4db89d0786a94fe9f5773bcacb"}, + {file = "pillow_heif-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:3d296f874bea4dd17bab7309b843a766834d2b5df53c591eaf3f7cdc91a4c1a3"}, + {file = "pillow_heif-1.1.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:b3a66c7d3a4ad2f9f6d08b81e102210e1b676039dbd2522b88b6957ada2186e3"}, + {file = "pillow_heif-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b31fd9b5b3c3f056f98f806e2ffe0f54710700045e28f68568753e56101d2ca"}, + {file = "pillow_heif-1.1.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dea5f8a304e6b7fee3f76ac7756962af72e51bafab1bba07993a8c8fc57d5a79"}, + {file = "pillow_heif-1.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34e36485409468816227fbbf59b8ae4c7567702e066ca6e2a8b5e423a7a2fe92"}, + {file = "pillow_heif-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:561e8086128df0aeb6ea68b4fd60bb18428a65099f95349a6674718e4f8132bd"}, + {file = "pillow_heif-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:da66c0c6b119042dda6efb67ca30fcb00f0715eb6205e5636ab487d76f1699ad"}, + {file = "pillow_heif-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:9af92c2a3492b9bb4625b1f6ec7da17ec185e6b77d519d71c06d7e79c65a6f9e"}, + {file = "pillow_heif-1.1.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:8269cae2e0232f73bda5128181a42fbbb562c29b76fbcced22fef70a61b94dbe"}, + {file = "pillow_heif-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:08787cc64b4a519789a348f137b914981ce520d4b906e09e2b8e974c87e3e215"}, + {file = "pillow_heif-1.1.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac0fc8523a77c351991f78766d41290241dd87fbe036b6f777c49f2bd3561119"}, + {file = "pillow_heif-1.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18db6c78b8fa52065339ffb69739f5c45748c0b5f836349f0aba786f7bb905ab"}, + {file = "pillow_heif-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c5db8a8ee7ee4b1311f81d223d32538d63a73adc2ece7610a9f19519856c8e68"}, + {file = "pillow_heif-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a2e6d4f7209aade2d55bbbcdbbbe623118722bcc7a12edef15cf4ee0d8586c3e"}, + {file = "pillow_heif-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:ff158ff338082d39864abd31c69ae2ee57de3f193c85ccbe365f4d7260712229"}, + {file = "pillow_heif-1.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:7f19389ffeb3866f95370eb917e6a32706c110a9fa670daefb63b5660948a82e"}, + {file = "pillow_heif-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8d5fa5539ff3c7bbe64aa446c10bf10f6d8c1604997a66b195bec02e2965eb10"}, + {file = "pillow_heif-1.1.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b08c81602ffd660cd27456fbfa3cbf396cf23bb39d3015cc7a6cd56ade82fd"}, + {file = "pillow_heif-1.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0f2d68af87d5e1f6af0db021b61f62e456f413eba98ea7723d7f49f2a6f1f01"}, + {file = "pillow_heif-1.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e486b15696a958a04178aa9ff7f7db4f803d1ec7bbded924671576125c052ed5"}, + {file = "pillow_heif-1.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a37999e53c0cd32401667303e0b34c43240c32530809827091fabc7eb04d7cad"}, + {file = "pillow_heif-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:1d35e973b2463b03f7b0bd5c898c7a424a46d69f7c20a9c251b322dfe4f45068"}, + {file = "pillow_heif-1.1.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:fe2567cd7e14cc50af9d44eab0d2a29a1579c803aa52c5b9065c0f370439eb87"}, + {file = "pillow_heif-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24416955115a31d704af9505056daab49197f6ce13bad6b092343b984f6c87f8"}, + {file = "pillow_heif-1.1.1-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88a150a28bd633014a6033f7f0172be5ab4ea05aa24c17e8496847fd07f87250"}, + {file = "pillow_heif-1.1.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:946d7a789dea87f09d18ec1a0c274d7821a556d7867a52d6f910ffd3bd33e465"}, + {file = "pillow_heif-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f2aa06fb0426d129526dcf6c0b10e1504d2de4b99f11f4e8dc029f186b53f4a3"}, + {file = "pillow_heif-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ac61bba9b67afa59f0872d3fd3dd54a28937acf2edc1cfcf18a71f89f2c3e760"}, + {file = "pillow_heif-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:207b067228b7b91cd48302281140f13cd149d2263866269e274053544ad6e930"}, + {file = "pillow_heif-1.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3cdb939e4d6d4879f09edd9225d0813350ecae1901b8ea7a1172caf9e644ba5"}, + {file = "pillow_heif-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9c9f6911a6107f9ac4194d3793a36df0b43352ceb13dd379c8ecfbd24b6ca53f"}, + {file = "pillow_heif-1.1.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcebadabbce24134f602c50db0ebf5632d9ef80a04c5167964c419b3d2f14a5"}, + {file = "pillow_heif-1.1.1-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fa5253dc6491d457425db34326f78638ea65938be4a631edd4b7198d7d088ab"}, + {file = "pillow_heif-1.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aac2c3634ee420bb30e19090a1e1870e6ed12717020891cd6ffd34c3cca5c412"}, + {file = "pillow_heif-1.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0ea9c72f5cbe1b35229be883797eb7f113d2e7353dc21a66fd813a33d95a16b3"}, + {file = "pillow_heif-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:16f83a7e0ad3aa4209ae592db2842d35faab21b44d269fb3b1145e07ecbecebc"}, + {file = "pillow_heif-1.1.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fc8273124fe96d83fd6dee9476a5b58b6338cb41ffe97581fc2e8f17c97864c"}, + {file = "pillow_heif-1.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca64d2e83b28ae7f194640e1c6d5d842de8f061845a4fd700a4ab7efb9df15f9"}, + {file = "pillow_heif-1.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7520b37f183f5339c9a0dbdd4cae468cc7d7f191fff26fd18d8d96cf69089994"}, + {file = "pillow_heif-1.1.1.tar.gz", hash = "sha256:f60e8c8a8928556104cec4fff39d43caa1da105625bdb53b11ce3c89d09b6bde"}, +] + +[package.dependencies] +pillow = ">=11.1.0" + +[package.extras] +dev = ["coverage", "defusedxml", "numpy", "opencv-python (==4.12.0.88)", "packaging", "pre-commit", "pylint", "pympler", "pytest", "setuptools"] +docs = ["sphinx (>=4.4)", "sphinx-issues (>=3.0.1)", "sphinx-rtd-theme (>=1.0)"] +tests = ["defusedxml", "numpy", "packaging", "pympler", "pytest"] +tests-min = ["defusedxml", "packaging", "pytest"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20e7fb94e20b03dcc783f76c0865f9da39559dcc0c28dd1a3fce0d01902a6b9c"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bdab48575b6f870f465b397c38f1b415520e9879fdf10a53ee4f49dcbdf8a21"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9d3a9edcfbe77a3ed4bc72836d466dfce4174beb79eda79ea155cc77237ed9e8"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:44fc5c2b8fa871ce7f0023f619f1349a0aa03a0857f2c96fbc01c657dcbbdb49"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c55460033867b4622cda1b6872edf445809535144152e5d14941ef591980edf"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2d11098a83cca92deaeaed3d58cfd150d49b3b06ee0d0852be466bf87596899e"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:691c807d94aecfbc76a14e1408847d59ff5b5906a04a23e12a89007672b9e819"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b81627b691f29c4c30a8f322546ad039c40c328373b11dff7490a3e1b517855"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:b637d6d941209e8d96a072d7977238eea128046effbf37d1d8b2c0764750017d"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41360b01c140c2a03d346cec3280cf8a71aa07d94f3b1509fa0161c366af66b4"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:875039274f8a2361e5207857899706da840768e2a775bf8c65e82f60b197df02"}, +] + +[[package]] +name = "py-ubjson" +version = "0.16.1" +description = "Universal Binary JSON encoder/decoder" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "py-ubjson-0.16.1.tar.gz", hash = "sha256:b9bfb8695a1c7e3632e800fb83c943bf67ed45ddd87cd0344851610c69a5a482"}, +] + +[package.extras] +dev = ["Pympler (>=0.7,<0.8)", "coverage (>=4.5.3,<4.6)"] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.23" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809"}, + {file = "pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyopenssl" +version = "25.3.0" +description = "Python wrapper module around the OpenSSL library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6"}, + {file = "pyopenssl-25.3.0.tar.gz", hash = "sha256:c981cb0a3fd84e8602d7afc209522773b94c1c2446a3c710a75b06fe1beae329"}, +] + +[package.dependencies] +cryptography = ">=45.0.7,<47" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] +test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.2.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.21" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090"}, + {file = "python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92"}, +] + +[[package]] +name = "pywin32" +version = "311" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, + {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, + {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, + {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, + {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, + {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, + {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, + {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, + {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, + {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, + {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, + {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, + {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, + {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, + {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, + {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, + {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, + {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, + {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, + {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, +] + +[[package]] +name = "redis" +version = "5.3.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97"}, + {file = "redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c"}, +] + +[package.dependencies] +PyJWT = ">=2.9.0" + +[package.extras] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + +[[package]] +name = "referencing" +version = "0.37.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"}, + {file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.30.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288"}, + {file = "rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221"}, + {file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7"}, + {file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139"}, + {file = "rpds_py-0.30.0-cp310-cp310-win32.whl", hash = "sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464"}, + {file = "rpds_py-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169"}, + {file = "rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425"}, + {file = "rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d"}, + {file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed"}, + {file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85"}, + {file = "rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c"}, + {file = "rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825"}, + {file = "rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229"}, + {file = "rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad"}, + {file = "rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6"}, + {file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e"}, + {file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394"}, + {file = "rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf"}, + {file = "rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b"}, + {file = "rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e"}, + {file = "rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2"}, + {file = "rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e"}, + {file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31"}, + {file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95"}, + {file = "rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d"}, + {file = "rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15"}, + {file = "rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1"}, + {file = "rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a"}, + {file = "rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9"}, + {file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08"}, + {file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27"}, + {file = "rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6"}, + {file = "rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d"}, + {file = "rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0"}, + {file = "rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07"}, + {file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f"}, + {file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53"}, + {file = "rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed"}, + {file = "rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950"}, + {file = "rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6"}, + {file = "rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb"}, + {file = "rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8"}, + {file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856"}, + {file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40"}, + {file = "rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0"}, + {file = "rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4"}, + {file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e"}, + {file = "rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84"}, +] + +[[package]] +name = "s3transfer" +version = "0.16.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe"}, + {file = "s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920"}, +] + +[package.dependencies] +botocore = ">=1.37.4,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] + +[[package]] +name = "service-identity" +version = "24.2.0" +description = "Service identity verification for pyOpenSSL & cryptography." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85"}, + {file = "service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09"}, +] + +[package.dependencies] +attrs = ">=19.1.0" +cryptography = "*" +pyasn1 = "*" +pyasn1-modules = "*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "idna", "mypy", "pyopenssl", "pytest", "types-pyopenssl"] +docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] +idna = ["idna"] +mypy = ["idna", "mypy", "types-pyopenssl"] +tests = ["coverage[toml] (>=5.0.2)", "pytest"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlparse" +version = "0.5.4" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "sqlparse-0.5.4-py3-none-any.whl", hash = "sha256:99a9f0314977b76d776a0fcb8554de91b9bb8a18560631d6bc48721d07023dcb"}, + {file = "sqlparse-0.5.4.tar.gz", hash = "sha256:4396a7d3cf1cd679c1be976cf3dc6e0a51d0111e87787e7a8d780e7d5a998f9e"}, +] + +[package.extras] +dev = ["build"] +doc = ["sphinx"] + +[[package]] +name = "sse-starlette" +version = "3.1.1" +description = "SSE plugin for Starlette" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sse_starlette-3.1.1-py3-none-any.whl", hash = "sha256:bb38f71ae74cfd86b529907a9fda5632195dfa6ae120f214ea4c890c7ee9d436"}, + {file = "sse_starlette-3.1.1.tar.gz", hash = "sha256:bffa531420c1793ab224f63648c059bcadc412bf9fdb1301ac8de1cf9a67b7fb"}, +] + +[package.dependencies] +anyio = ">=4.7.0" +starlette = ">=0.49.1" + +[package.extras] +daphne = ["daphne (>=4.2.0)"] +examples = ["aiosqlite (>=0.21.0)", "fastapi (>=0.115.12)", "sqlalchemy[asyncio] (>=2.0.41)", "uvicorn (>=0.34.0)"] +granian = ["granian (>=2.3.1)"] +uvicorn = ["uvicorn (>=0.34.0)"] + +[[package]] +name = "starlette" +version = "0.50.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca"}, + {file = "starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "strawberry-graphql" +version = "0.287.3" +description = "A library for creating GraphQL APIs" +optional = false +python-versions = "<4.0,>=3.10" +groups = ["main"] +files = [ + {file = "strawberry_graphql-0.287.3-py3-none-any.whl", hash = "sha256:2bb1f9b122ef1213f82f01cf27a095eb0776fda78e12af9e60c54de6e543797c"}, + {file = "strawberry_graphql-0.287.3.tar.gz", hash = "sha256:c81126cc75102aa32417048f074429d6c5c8d096424aa939fdb8827b8c5f84a9"}, +] + +[package.dependencies] +graphql-core = ">=3.2.0,<3.4.0" +lia-web = ">=0.2.1" +packaging = ">=23" +python-dateutil = ">=2.7" +typing-extensions = ">=4.5.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.7.4.post0,<4)"] +asgi = ["python-multipart (>=0.0.7)", "starlette (>=0.18.0)"] +chalice = ["chalice (>=1.22,<2.0)"] +channels = ["asgiref (>=3.2,<4.0)", "channels (>=3.0.5)"] +cli = ["libcst", "pygments (>=2.3,<3.0)", "python-multipart (>=0.0.7)", "rich (>=12.0.0)", "starlette (>=0.18.0)", "typer (>=0.12.4)", "uvicorn (>=0.11.6)", "websockets (>=15.0.1,<16)"] +debug = ["libcst", "rich (>=12.0.0)"] +debug-server = ["libcst", "pygments (>=2.3)", "python-multipart (>=0.0.7)", "rich (>=12.0.0)", "starlette (>=0.18.0)", "typer (>=0.12.4)", "uvicorn (>=0.11.6)", "websockets (>=15.0.1,<16)"] +django = ["Django (>=3.2)", "asgiref (>=3.2,<4.0)"] +fastapi = ["fastapi (>=0.65.2)", "python-multipart (>=0.0.7)"] +flask = ["flask (>=1.1)"] +litestar = ["litestar (>=2) ; python_version ~= \"3.10\""] +opentelemetry = ["opentelemetry-api (<2)", "opentelemetry-sdk (<2)"] +pydantic = ["pydantic (>1.6.1)"] +pyinstrument = ["pyinstrument (>=4.0.0)"] +quart = ["quart (>=0.19.3)"] +sanic = ["sanic (>=20.12.2)"] + +[[package]] +name = "strawberry-graphql-django" +version = "0.70.1" +description = "Strawberry GraphQL Django extension" +optional = false +python-versions = "<4.0,>=3.10" +groups = ["main"] +files = [ + {file = "strawberry_graphql_django-0.70.1-py3-none-any.whl", hash = "sha256:4dbe72069406ce8f4242bde6030ef0bec54ef49db9cd140baac3cf6af6d5a9b3"}, + {file = "strawberry_graphql_django-0.70.1.tar.gz", hash = "sha256:19496d3d2adf464c538f480b61e2b1825b6dbf09485673b6b6438b7065351e94"}, +] + +[package.dependencies] +asgiref = ">=3.8" +django = ">=4.2" +strawberry-graphql = ">=0.283.2" + +[package.extras] +debug-toolbar = ["django-debug-toolbar (>=6.0.0)"] +enum = ["django-choices-field (>=2.2.2)"] + +[[package]] +name = "twisted" +version = "25.5.0" +description = "An asynchronous networking framework written in Python" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "twisted-25.5.0-py3-none-any.whl", hash = "sha256:8559f654d01a54a8c3efe66d533d43f383531ebf8d81d9f9ab4769d91ca15df7"}, + {file = "twisted-25.5.0.tar.gz", hash = "sha256:1deb272358cb6be1e3e8fc6f9c8b36f78eb0fa7c2233d2dbe11ec6fee04ea316"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +automat = ">=24.8.0" +constantly = ">=15.1" +hyperlink = ">=17.1.1" +idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} +incremental = ">=24.7.0" +pyopenssl = {version = ">=21.0.0", optional = true, markers = "extra == \"tls\""} +service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""} +typing-extensions = ">=4.2.0" +zope-interface = ">=5" + +[package.extras] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "wsproto", "wsproto"] +conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] +dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "pydoctor (>=24.11.1,<24.12.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] +dev-release = ["pydoctor (>=24.11.1,<24.12.0)", "pydoctor (>=24.11.1,<24.12.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "wsproto", "wsproto"] +http2 = ["h2 (>=3.2,<5.0)", "priority (>=1.1.0,<2.0)"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core (<11) ; python_version < \"3.9\"", "pyobjc-core (<11) ; python_version < \"3.9\"", "pyobjc-core ; python_version >= \"3.9\"", "pyobjc-core ; python_version >= \"3.9\"", "pyobjc-framework-cfnetwork (<11) ; python_version < \"3.9\"", "pyobjc-framework-cfnetwork (<11) ; python_version < \"3.9\"", "pyobjc-framework-cfnetwork ; python_version >= \"3.9\"", "pyobjc-framework-cfnetwork ; python_version >= \"3.9\"", "pyobjc-framework-cocoa (<11) ; python_version < \"3.9\"", "pyobjc-framework-cocoa (<11) ; python_version < \"3.9\"", "pyobjc-framework-cocoa ; python_version >= \"3.9\"", "pyobjc-framework-cocoa ; python_version >= \"3.9\"", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "wsproto", "wsproto"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (==1.10.1)", "mypy-zope (==1.0.6)", "priority (>=1.1.0,<2.0)", "pydoctor (>=24.11.1,<24.12.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools", "wsproto"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core (<11) ; python_version < \"3.9\"", "pyobjc-core (<11) ; python_version < \"3.9\"", "pyobjc-core ; python_version >= \"3.9\"", "pyobjc-core ; python_version >= \"3.9\"", "pyobjc-framework-cfnetwork (<11) ; python_version < \"3.9\"", "pyobjc-framework-cfnetwork (<11) ; python_version < \"3.9\"", "pyobjc-framework-cfnetwork ; python_version >= \"3.9\"", "pyobjc-framework-cfnetwork ; python_version >= \"3.9\"", "pyobjc-framework-cocoa (<11) ; python_version < \"3.9\"", "pyobjc-framework-cocoa (<11) ; python_version < \"3.9\"", "pyobjc-framework-cocoa ; python_version >= \"3.9\"", "pyobjc-framework-cocoa ; python_version >= \"3.9\"", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "wsproto", "wsproto"] +serial = ["pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\""] +test = ["cython-test-exception-raiser (>=1.0.2,<2)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] +tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] +websocket = ["wsproto"] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)", "wsproto", "wsproto"] + +[[package]] +name = "txaio" +version = "25.12.2" +description = "Compatibility API between asyncio/Twisted/Trollius" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "txaio-25.12.2-py3-none-any.whl", hash = "sha256:5f6cd6c6b397fc3305790d15efd46a2d5b91cdbefa96543b4f8666aeb56ba026"}, + {file = "txaio-25.12.2.tar.gz", hash = "sha256:9f232c21e12aa1ff52690e365b5a0ecfd42cc27a6ec86e1b92ece88f763f4b78"}, +] + +[package.extras] +all = ["twisted (>=22.10.0)", "zope-interface (>=5.2.0)"] +dev = ["build (>=1.0.0)", "coverage (>=7.0.0)", "pytest (>=8.0.0)", "pytest-cov (>=4.0.0)", "ruff (>=0.4.0)", "twine (>=5.0.0)", "wheel (>=0.42.0)"] +docs = ["furo (>=2024.7.0)", "linkify-it-py (>=2.0.0)", "myst-parser (>=2.0)", "pyenchant (>=3.2)", "scour (>=0.38)", "sphinx (>=8.2.3)", "sphinx-autoapi (>=2.1.0)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.5)", "sphinxcontrib-images (>=0.9)", "sphinxcontrib-spelling (>=8.0)", "sphinxext-opengraph (>=0.9)"] +twisted = ["twisted (>=22.10.0)", "zope-interface (>=5.2.0)"] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "tzdata" +version = "2025.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1"}, + {file = "tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7"}, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d"}, + {file = "tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "u-msgpack-python" +version = "2.8.0" +description = "A portable, lightweight MessagePack serializer and deserializer written in pure Python." +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_python_implementation != \"CPython\"" +files = [ + {file = "u-msgpack-python-2.8.0.tar.gz", hash = "sha256:b801a83d6ed75e6df41e44518b4f2a9c221dc2da4bcd5380e3a0feda520bc61a"}, + {file = "u_msgpack_python-2.8.0-py2.py3-none-any.whl", hash = "sha256:1d853d33e78b72c4228a2025b4db28cda81214076e5b0422ed0ae1b1b2bb586a"}, +] + +[[package]] +name = "ujson" +version = "5.11.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25"}, + {file = "ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89"}, + {file = "ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6"}, + {file = "ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb"}, + {file = "ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db"}, + {file = "ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c"}, + {file = "ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138"}, + {file = "ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915"}, + {file = "ujson-5.11.0-cp310-cp310-win32.whl", hash = "sha256:30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723"}, + {file = "ujson-5.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0"}, + {file = "ujson-5.11.0-cp310-cp310-win_arm64.whl", hash = "sha256:85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105"}, + {file = "ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f"}, + {file = "ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58"}, + {file = "ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26"}, + {file = "ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a"}, + {file = "ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6"}, + {file = "ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b"}, + {file = "ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba"}, + {file = "ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3"}, + {file = "ujson-5.11.0-cp311-cp311-win32.whl", hash = "sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34"}, + {file = "ujson-5.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01"}, + {file = "ujson-5.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835"}, + {file = "ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702"}, + {file = "ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d"}, + {file = "ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80"}, + {file = "ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc"}, + {file = "ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c"}, + {file = "ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5"}, + {file = "ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b"}, + {file = "ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc"}, + {file = "ujson-5.11.0-cp312-cp312-win32.whl", hash = "sha256:be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88"}, + {file = "ujson-5.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f"}, + {file = "ujson-5.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6"}, + {file = "ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53"}, + {file = "ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752"}, + {file = "ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50"}, + {file = "ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a"}, + {file = "ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03"}, + {file = "ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701"}, + {file = "ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60"}, + {file = "ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328"}, + {file = "ujson-5.11.0-cp313-cp313-win32.whl", hash = "sha256:8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241"}, + {file = "ujson-5.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0"}, + {file = "ujson-5.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9"}, + {file = "ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302"}, + {file = "ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d"}, + {file = "ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638"}, + {file = "ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c"}, + {file = "ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba"}, + {file = "ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018"}, + {file = "ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840"}, + {file = "ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c"}, + {file = "ujson-5.11.0-cp314-cp314-win32.whl", hash = "sha256:1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac"}, + {file = "ujson-5.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629"}, + {file = "ujson-5.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764"}, + {file = "ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433"}, + {file = "ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3"}, + {file = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823"}, + {file = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9"}, + {file = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076"}, + {file = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c"}, + {file = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746"}, + {file = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef"}, + {file = "ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5"}, + {file = "ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec"}, + {file = "ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab"}, + {file = "ujson-5.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65f3c279f4ed4bf9131b11972040200c66ae040368abdbb21596bf1564899694"}, + {file = "ujson-5.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99c49400572cd77050894e16864a335225191fd72a818ea6423ae1a06467beac"}, + {file = "ujson-5.11.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0654a2691fc252c3c525e3d034bb27b8a7546c9d3eb33cd29ce6c9feda361a6a"}, + {file = "ujson-5.11.0-cp39-cp39-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:6b6ec7e7321d7fc19abdda3ad809baef935f49673951a8bab486aea975007e02"}, + {file = "ujson-5.11.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f62b9976fabbcde3ab6e413f4ec2ff017749819a0786d84d7510171109f2d53c"}, + {file = "ujson-5.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f1a27ab91083b4770e160d17f61b407f587548f2c2b5fbf19f94794c495594a"}, + {file = "ujson-5.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ecd6ff8a3b5a90c292c2396c2d63c687fd0ecdf17de390d852524393cd9ed052"}, + {file = "ujson-5.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9aacbeb23fdbc4b256a7d12e0beb9063a1ba5d9e0dbb2cfe16357c98b4334596"}, + {file = "ujson-5.11.0-cp39-cp39-win32.whl", hash = "sha256:674f306e3e6089f92b126eb2fe41bcb65e42a15432c143365c729fdb50518547"}, + {file = "ujson-5.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c6618f480f7c9ded05e78a1938873fde68baf96cdd74e6d23c7e0a8441175c4b"}, + {file = "ujson-5.11.0-cp39-cp39-win_arm64.whl", hash = "sha256:5600202a731af24a25e2d7b6eb3f648e4ecd4bb67c4d5cf12f8fab31677469c9"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49"}, + {file = "ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04"}, + {file = "ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0"}, +] + +[[package]] +name = "urllib3" +version = "2.6.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd"}, + {file = "urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + +[[package]] +name = "uvicorn" +version = "0.40.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "sys_platform != \"emscripten\"" +files = [ + {file = "uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee"}, + {file = "uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "valkey" +version = "6.1.1" +description = "Python client for Valkey forked from redis-py" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "valkey-6.1.1-py3-none-any.whl", hash = "sha256:e2691541c6e1503b53c714ad9a35551ac9b7c0bbac93865f063dbc859a46de92"}, + {file = "valkey-6.1.1.tar.gz", hash = "sha256:5880792990c6c2b5eb604a5ed5f98f300880b6dd92d123819b66ed54bb259731"}, +] + +[package.extras] +libvalkey = ["libvalkey (>=4.0.1)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + +[[package]] +name = "vine" +version = "5.1.0" +description = "Python promises." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, + {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.14" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}, + {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, +] + +[[package]] +name = "whitenoise" +version = "6.11.0" +description = "Radically simplified static file serving for WSGI applications" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "whitenoise-6.11.0-py3-none-any.whl", hash = "sha256:b2aeb45950597236f53b5342b3121c5de69c8da0109362aee506ce88e022d258"}, + {file = "whitenoise-6.11.0.tar.gz", hash = "sha256:0f5bfce6061ae6611cd9396a8231e088722e4fc67bc13a111be74c738d99375f"}, +] + +[package.extras] +brotli = ["brotli"] + +[[package]] +name = "zope-interface" +version = "8.1.1" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "zope_interface-8.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c6b12b656c7d7e3d79cad8e2afc4a37eae6b6076e2c209a33345143148e435e"}, + {file = "zope_interface-8.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:557c0f1363c300db406e9eeaae8ab6d1ba429d4fed60d8ab7dadab5ca66ccd35"}, + {file = "zope_interface-8.1.1-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:127b0e4c873752b777721543cf8525b3db5e76b88bd33bab807f03c568e9003f"}, + {file = "zope_interface-8.1.1-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0892c9d2dd47b45f62d1861bcae8b427fcc49b4a04fff67f12c5c55e56654d7"}, + {file = "zope_interface-8.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff8a92dc8c8a2c605074e464984e25b9b5a8ac9b2a0238dd73a0f374df59a77e"}, + {file = "zope_interface-8.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:54627ddf6034aab1f506ba750dd093f67d353be6249467d720e9f278a578efe5"}, + {file = "zope_interface-8.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8a0fdd5048c1bb733e4693eae9bc4145a19419ea6a1c95299318a93fe9f3d72"}, + {file = "zope_interface-8.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4cb0ea75a26b606f5bc8524fbce7b7d8628161b6da002c80e6417ce5ec757c0"}, + {file = "zope_interface-8.1.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:c267b00b5a49a12743f5e1d3b4beef45479d696dab090f11fe3faded078a5133"}, + {file = "zope_interface-8.1.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e25d3e2b9299e7ec54b626573673bdf0d740cf628c22aef0a3afef85b438aa54"}, + {file = "zope_interface-8.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63db1241804417aff95ac229c13376c8c12752b83cc06964d62581b493e6551b"}, + {file = "zope_interface-8.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:9639bf4ed07b5277fb231e54109117c30d608254685e48a7104a34618bcbfc83"}, + {file = "zope_interface-8.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a16715808408db7252b8c1597ed9008bdad7bf378ed48eb9b0595fad4170e49d"}, + {file = "zope_interface-8.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce6b58752acc3352c4aa0b55bbeae2a941d61537e6afdad2467a624219025aae"}, + {file = "zope_interface-8.1.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:807778883d07177713136479de7fd566f9056a13aef63b686f0ab4807c6be259"}, + {file = "zope_interface-8.1.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50e5eb3b504a7d63dc25211b9298071d5b10a3eb754d6bf2f8ef06cb49f807ab"}, + {file = "zope_interface-8.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eee6f93b2512ec9466cf30c37548fd3ed7bc4436ab29cd5943d7a0b561f14f0f"}, + {file = "zope_interface-8.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:80edee6116d569883c58ff8efcecac3b737733d646802036dc337aa839a5f06b"}, + {file = "zope_interface-8.1.1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:84f9be6d959640de9da5d14ac1f6a89148b16da766e88db37ed17e936160b0b1"}, + {file = "zope_interface-8.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:531fba91dcb97538f70cf4642a19d6574269460274e3f6004bba6fe684449c51"}, + {file = "zope_interface-8.1.1-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:fc65f5633d5a9583ee8d88d1f5de6b46cd42c62e47757cfe86be36fb7c8c4c9b"}, + {file = "zope_interface-8.1.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efef80ddec4d7d99618ef71bc93b88859248075ca2e1ae1c78636654d3d55533"}, + {file = "zope_interface-8.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:49aad83525eca3b4747ef51117d302e891f0042b06f32aa1c7023c62642f962b"}, + {file = "zope_interface-8.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:71cf329a21f98cb2bd9077340a589e316ac8a415cac900575a32544b3dffcb98"}, + {file = "zope_interface-8.1.1-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:da311e9d253991ca327601f47c4644d72359bac6950fbb22f971b24cd7850f8c"}, + {file = "zope_interface-8.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3fb25fca0442c7fb93c4ee40b42e3e033fef2f648730c4b7ae6d43222a3e8946"}, + {file = "zope_interface-8.1.1-cp314-cp314-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:bac588d0742b4e35efb7c7df1dacc0397b51ed37a17d4169a38019a1cebacf0a"}, + {file = "zope_interface-8.1.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3d1f053d2d5e2b393e619bce1e55954885c2e63969159aa521839e719442db49"}, + {file = "zope_interface-8.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64a1ad7f4cb17d948c6bdc525a1d60c0e567b2526feb4fa38b38f249961306b8"}, + {file = "zope_interface-8.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:169214da1b82b7695d1a36f92d70b11166d66b6b09d03df35d150cc62ac52276"}, + {file = "zope_interface-8.1.1.tar.gz", hash = "sha256:51b10e6e8e238d719636a401f44f1e366146912407b58453936b781a19be19ec"}, +] + +[package.extras] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "79174f4fab65f960be5d4f125f08b5ba60f2770b9dcc4b2467791cd0ff7adc83" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000..ab1033b --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2e6c50b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[tool.poetry] +name = "nexus-5" +version = "0.1.0" +description = "" +authors = ["damien "] +package-mode = false + +[tool.poetry.dependencies] +python = "^3.13" +django = "^6.0" +celery = "^5.6.0" +strawberry-graphql-django = "^0.70.1" +channels = "^4.3.2" +daphne = "^4.2.1" +django-cors-headers = "^4.9.0" +python-dotenv = "^1.2.1" +whitenoise = "^6.11.0" +django-choices-field = "^3.1.1" +channels-valkey = "^0.3.0" +psycopg2-binary = "^2.9.11" +hvac = "^2.4.0" +django-valkey = "^0.2.0" +valkey = "^6.0.0" +redis = "^5.0.0" # Required by Celery's Redis/Sentinel backends +pillow = "^12.0.0" +pillow-heif = "^1.1.1" +djangorestframework = "^3.15.2" +ffmpeg-python = "^0.2.0" +requests = "^2.32.5" +celery-types = "^0.23.0" +django-storages = {extras = ["s3"], version = "^1.14"} +boto3 = "^1.35" +mcp = "^1.0.0" +httpx = "^0.28.0" +anthropic = "^0.75.0" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/setup.sh b/setup.sh new file mode 100644 index 0000000..69e650a --- /dev/null +++ b/setup.sh @@ -0,0 +1,51 @@ +#!/bin/bash +set -euo pipefail + +echo "--- RUNNING ONE-TIME SETUP (AGENT-POWERED) ---" + +# The entrypoint.sh has already fetched all credentials +# and exported them as environment variables. +# We just need to run the Django commands. + +# 1) Run migrations +# (Assumes settings.py's 'admin' db reads from env vars) +echo "=> Applying admin DB credentials and running migrations..." +poetry run python manage.py migrate --database=admin + +# 2) Fix table ownership for Vault +# (Assumes settings.py's 'admin' db reads from env vars) +echo "=> Fixing table ownership for Vault..." +poetry run python manage.py shell -c " +from django.db import connections +import sys + +print('Connecting to admin DB to fix table ownership...') +try: + admin_connection = connections['admin'] + with admin_connection.cursor() as cursor: + # Get all tables and change ownership + cursor.execute(\"\"\" + SELECT tablename FROM pg_tables WHERE schemaname = 'public' + \"\"\") + tables = cursor.fetchall() + for (table,) in tables: + print(f'Setting owner of table {table} to nexus5_owner...') + cursor.execute(f'ALTER TABLE public.{table} OWNER TO nexus5_owner;') + + # Get all sequences and change ownership + cursor.execute(\"\"\" + SELECT sequencename FROM pg_sequences WHERE schemaname = 'public' + \"\"\") + sequences = cursor.fetchall() + for (sequence,) in sequences: + print(f'Setting owner of sequence {sequence} to nexus5_owner...') + cursor.execute(f'ALTER SEQUENCE public.{sequence} OWNER TO nexus5_owner;') + + print('Table ownership fixed.') +except Exception as e: + print(f'Error fixing table ownership: {e}') + sys.exit(1) +" + +# Steps 1 and 4 from your old script are GONE. +echo '=> Setup complete.' \ No newline at end of file diff --git a/vault/db-admin-template.hcl b/vault/db-admin-template.hcl new file mode 100644 index 0000000..bebe552 --- /dev/null +++ b/vault/db-admin-template.hcl @@ -0,0 +1,4 @@ +{{ with secret "secret/data/nexus-5/admin-db" }} +export DB_ADMIN_USER="{{ .Data.data.username }}" +export DB_ADMIN_PASSWORD="{{ .Data.data.password }}" +{{ end }} \ No newline at end of file diff --git a/vault/db-app-template.hcl b/vault/db-app-template.hcl new file mode 100644 index 0000000..b08f1b4 --- /dev/null +++ b/vault/db-app-template.hcl @@ -0,0 +1,4 @@ +{{ with secret "database/creds/nexus5-app" }} +export DB_USER="{{ .Data.username }}" +export DB_PASSWORD="{{ .Data.password }}" +{{ end }} \ No newline at end of file diff --git a/vault/vault-agent-config.hcl b/vault/vault-agent-config.hcl new file mode 100644 index 0000000..5f3b38b --- /dev/null +++ b/vault/vault-agent-config.hcl @@ -0,0 +1,25 @@ +pid_file = "/tmp/vault.pid" +exit_after_auth = false # Keep running to renew leases +vault { + address = "http://10.10.10.20:8200" +} +auto_auth { + method "approle" { + mount_path = "auth/approle" + config = { + role_id_file_path = "/vault/secrets/role_id" + secret_id_file_path = "/vault/secrets/secret_id" + remove_secret_id_file_after_read = true + } + } +} +template { + source = "/etc/vault/admin-template.hcl" + destination = "/vault/secrets/admin.env" + command = "touch /vault/secrets/.admin-ready" +} +template { + source = "/etc/vault/app-template.hcl" + destination = "/vault/secrets/app.env" + command = "touch /vault/secrets/.app-ready" +} \ No newline at end of file