From a909591f7365ed18e3e3d03272e4342e88fd25be Mon Sep 17 00:00:00 2001 From: Andrew Dickinson Date: Tue, 8 Oct 2024 13:12:56 -0400 Subject: [PATCH 1/6] Fix: import script creates empty "husk" members (#633) --- src/meshdb/utils/spreadsheet_import/main.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/meshdb/utils/spreadsheet_import/main.py b/src/meshdb/utils/spreadsheet_import/main.py index f0644e90..efdec44e 100644 --- a/src/meshdb/utils/spreadsheet_import/main.py +++ b/src/meshdb/utils/spreadsheet_import/main.py @@ -105,6 +105,17 @@ def main(): building = get_or_create_building(row, addr_parser, dropped_modifications.append) if not building: skipped[row.id] = "Unable to parse address" + if ( + new + and not member.all_email_addresses + and not member.all_phone_numbers + and not member.name + and not member.notes + ): + # If this member object stores no contact information, and is not going to be + # used for an install because of an invalid address, remove the member object + # to avoid cluttering the DB with "husk" members that are entirely blank + member.delete() continue node = get_or_create_node(row) From 7150786ddbd4b7fe84ad041ae47a0d76b6b596bf Mon Sep 17 00:00:00 2001 From: Andrew Dickinson Date: Tue, 8 Oct 2024 13:19:17 -0400 Subject: [PATCH 2/6] Fix: join form endpoint crashes due to custom serializer bug (#627) * Fix: join form endpoint crashes due to custom serializer bug * Formatting --- .../nested_key_object_related_field.py | 12 ++++++++- src/meshapi/tests/test_slack_notification.py | 26 +++++++++++++++++-- 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/src/meshapi/serializers/nested_key_object_related_field.py b/src/meshapi/serializers/nested_key_object_related_field.py index 32e7c3c9..69b8ca5c 100644 --- a/src/meshapi/serializers/nested_key_object_related_field.py +++ b/src/meshapi/serializers/nested_key_object_related_field.py @@ -1,5 +1,6 @@ import typing from typing import Any, Dict, Tuple, cast +from uuid import UUID from django.core.exceptions import ObjectDoesNotExist from django.db.models import Model @@ -42,7 +43,16 @@ def _get_key_fields(self) -> Tuple[str, ...]: return ("id",) + self.additional_keys def to_representation(self, value: Model) -> dict[str, Any]: - return {key: getattr(value, key) for key in self._get_key_fields()} + output = {} + for key in self._get_key_fields(): + output[key] = getattr(value, key) + + # Convert UUID objects to str so that the resulting data + # is trivially JSON serializable + if isinstance(output[key], UUID): + output[key] = str(output[key]) + + return output def to_internal_value(self, data: dict) -> Model: queryset = self.get_queryset() diff --git a/src/meshapi/tests/test_slack_notification.py b/src/meshapi/tests/test_slack_notification.py index 1f3789ea..34190b53 100644 --- a/src/meshapi/tests/test_slack_notification.py +++ b/src/meshapi/tests/test_slack_notification.py @@ -6,12 +6,26 @@ from django.test import RequestFactory, TestCase from requests import RequestException -from meshapi.models import Member +from meshapi.models import Building, Install, Member from meshapi.serializers import MemberSerializer +from meshapi.tests.sample_data import sample_building, sample_install, sample_member from meshapi.util.admin_notifications import notify_administrators_of_data_issue class TestSlackNotification(TestCase): + def setUp(self): + self.sample_install_copy = sample_install.copy() + self.building_1 = Building(**sample_building) + self.building_1.save() + self.sample_install_copy["building"] = self.building_1 + + self.sample_member = Member(**sample_member) + self.sample_member.save() + self.sample_install_copy["member"] = self.sample_member + + self.install = Install(**self.sample_install_copy) + self.install.save() + @requests_mock.Mocker() @patch("meshapi.util.admin_notifications.SLACK_ADMIN_NOTIFICATIONS_WEBHOOK_URL", "https://mock-slack-url") def test_slack_notification_for_name_change(self, requests_mocker): @@ -23,6 +37,9 @@ def test_slack_notification_for_name_change(self, requests_mocker): ) member.save() + self.install.member = member + self.install.save() + rf = RequestFactory() mock_join_form_request = rf.post("https://mock-meshdb-url.example/join-form/") @@ -56,7 +73,12 @@ def test_slack_notification_for_name_change(self, requests_mocker): ' "all_phone_numbers": [\n' ' "+1 212-555-5555"\n' " ],\n" - ' "installs": [],\n' + ' "installs": [\n' + " {\n" + f' "id": "{self.install.id}",\n' + f' "install_number": {self.install.install_number}\n' + " }\n" + " ],\n" ' "name": "Stacy Maidenname",\n' ' "primary_email_address": "stacy@example.com",\n' ' "stripe_email_address": null,\n' From 89e90cb5daadc6b53c91821def4f3fbf6911b6ae Mon Sep 17 00:00:00 2001 From: Andrew Dickinson Date: Tue, 8 Oct 2024 13:26:17 -0400 Subject: [PATCH 3/6] Fix: map render crash due to Nodes without installs (#628) * Fix: map render crash due to Nodes without installs * Fix: nodes without buildings causes map render crash --- src/meshapi/tests/test_map_endpoints.py | 80 +++++++++++++++++++++++++ src/meshapi/views/map.py | 36 ++++++++--- 2 files changed, 108 insertions(+), 8 deletions(-) diff --git a/src/meshapi/tests/test_map_endpoints.py b/src/meshapi/tests/test_map_endpoints.py index db74d5cb..7e506d2f 100644 --- a/src/meshapi/tests/test_map_endpoints.py +++ b/src/meshapi/tests/test_map_endpoints.py @@ -325,6 +325,54 @@ def test_install_data(self): ) ) + nodes.append( + Node( + network_number=9823, + status=Node.NodeStatus.ACTIVE, + latitude=40.724868, + longitude=-73.987881, + ) + ) + installs.append( + Install( + install_number=12381924, + status=Install.InstallStatus.PENDING, + request_date=datetime.date(2024, 1, 27), + roof_access=True, + building=buildings[-1], + node=nodes[-1], + member=member, + ) + ) + + nodes.append( + Node( + network_number=9821, + status=Node.NodeStatus.ACTIVE, + latitude=40.724868, + longitude=-73.987881, + ) + ) + + buildings.append( + Building( + address_truth_sources=[], + latitude=40.6962265, + longitude=-73.9917741, + altitude=66, + primary_node=nodes[-1], + ) + ) + + nodes.append( + Node( + network_number=9820, + status=Node.NodeStatus.ACTIVE, + latitude=40.724868, + longitude=-73.987881, + ) + ) + for node in nodes: node.save() @@ -391,6 +439,30 @@ def test_install_data(self): "roofAccess": True, "panoramas": [], }, + { + "coordinates": [-73.987881, 40.724868, None], + "id": 9820, + "panoramas": [], + "requestDate": None, + "roofAccess": True, + "status": "NN assigned", + }, + { + "coordinates": [-73.987881, 40.724868, None], + "id": 9821, + "panoramas": [], + "requestDate": None, + "roofAccess": True, + "status": "NN assigned", + }, + { + "coordinates": [-73.987881, 40.724868, None], + "id": 9823, + "panoramas": [], + "requestDate": 1706331600000, + "roofAccess": True, + "status": "NN assigned", + }, { "id": 9999, "status": "Installed", @@ -438,6 +510,14 @@ def test_install_data(self): "panoramas": [], "roofAccess": True, }, + { + "coordinates": [-73.9917741, 40.6962265, 66.0], + "id": 12381924, + "panoramas": [], + "requestDate": 1706331600000, + "roofAccess": True, + "status": "Interested", + }, { "id": 1123456, "name": "Northwest AP", diff --git a/src/meshapi/views/map.py b/src/meshapi/views/map.py index c012b364..228c7019 100644 --- a/src/meshapi/views/map.py +++ b/src/meshapi/views/map.py @@ -12,7 +12,7 @@ from rest_framework.response import Response from rest_framework.views import APIView -from meshapi.models import LOS, AccessPoint, Device, Install, Link, Node, Sector +from meshapi.models import LOS, AccessPoint, Building, Device, Install, Link, Node, Sector from meshapi.serializers import ( EXCLUDED_INSTALL_STATUSES, MapDataInstallSerializer, @@ -71,6 +71,7 @@ def get_queryset(self) -> List[Install]: # type: ignore[override] Node.objects.filter(~Q(status=Node.NodeStatus.INACTIVE)) .prefetch_related("devices") .prefetch_related("installs") + .prefetch_related("buildings") .prefetch_related( Prefetch( "installs", @@ -89,10 +90,27 @@ def get_queryset(self) -> List[Install]: # type: ignore[override] if node.network_number and node.network_number not in covered_nns: # Arbitrarily pick a representative install for the details of the "Fake" node, # preferring active installs if possible - representative_install = ( - node.active_installs # type: ignore[attr-defined] - or node.prefetched_installs # type: ignore[attr-defined] - )[0] + try: + representative_install = ( + node.active_installs # type: ignore[attr-defined] + or node.prefetched_installs # type: ignore[attr-defined] + )[0] + except IndexError: + representative_install = None + + if representative_install: + building = representative_install.building + else: + building = node.buildings.first() + + if not building: + # If we couldn't get a building from the install or node, + # make a faux one instead, to carry the lat/lon info into the serializer + building = Building( + latitude=node.latitude, + longitude=node.longitude, + altitude=node.altitude, + ) all_installs.append( Install( @@ -101,9 +119,11 @@ def get_queryset(self) -> List[Install]: # type: ignore[override] status=Install.InstallStatus.NN_REASSIGNED if node.status == node.NodeStatus.ACTIVE else Install.InstallStatus.REQUEST_RECEIVED, - building=representative_install.building, - request_date=representative_install.request_date, - roof_access=representative_install.roof_access, + building=building, + request_date=representative_install.request_date + if representative_install + else node.install_date, + roof_access=representative_install.roof_access if representative_install else True, ), ) covered_nns.add(node.network_number) From cc5f067f5fd83441f4b3d4876c9711c4bdf6b786 Mon Sep 17 00:00:00 2001 From: james-otten Date: Tue, 8 Oct 2024 23:46:02 -0400 Subject: [PATCH 4/6] Remove pgadmin (#585) --- .env.sample | 4 - .github/workflows/deploy-to-k8s.yaml | 3 - docker-compose.yaml | 10 --- infra/README.md | 2 - infra/helm/meshdb/charts/pgadmin/.helmignore | 23 ----- infra/helm/meshdb/charts/pgadmin/Chart.yaml | 24 ----- .../charts/pgadmin/templates/_helpers.tpl | 62 ------------- .../charts/pgadmin/templates/ingress.yaml | 45 ---------- .../charts/pgadmin/templates/pg_admin.yaml | 76 ---------------- infra/helm/meshdb/charts/pgadmin/values.yaml | 89 ------------------- infra/helm/meshdb/templates/configmap.yaml | 1 - .../meshdb/templates/pg_admin_secrets.yaml | 9 -- infra/helm/meshdb/templates/service.yaml | 16 ---- infra/helm/meshdb/values.yaml | 3 - src/meshdb/settings.py | 1 - 15 files changed, 368 deletions(-) delete mode 100644 infra/helm/meshdb/charts/pgadmin/.helmignore delete mode 100644 infra/helm/meshdb/charts/pgadmin/Chart.yaml delete mode 100644 infra/helm/meshdb/charts/pgadmin/templates/_helpers.tpl delete mode 100644 infra/helm/meshdb/charts/pgadmin/templates/ingress.yaml delete mode 100644 infra/helm/meshdb/charts/pgadmin/templates/pg_admin.yaml delete mode 100644 infra/helm/meshdb/charts/pgadmin/values.yaml delete mode 100644 infra/helm/meshdb/templates/pg_admin_secrets.yaml diff --git a/.env.sample b/.env.sample index 01a0dd7a..201df04f 100644 --- a/.env.sample +++ b/.env.sample @@ -15,9 +15,6 @@ SMTP_PORT= SMTP_USER= SMTP_PASSWORD= -PGADMIN_EMAIL=admin@nycmesh.net -PGADMIN_PASSWORD=localdev - # Backups AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= @@ -61,7 +58,6 @@ UISP_PASS= ADMIN_MAP_BASE_URL=http://devadminmap.mesh.nycmesh.net MAP_BASE_URL=https://devmap.mesh.nycmesh.net LOS_URL=https://devlos.mesh.nycmesh.net -PG_ADMIN_URL=/pgadmin/ FORMS_URL=https://devforms.mesh.nycmesh.net OSTICKET_URL=https://support.nycmesh.net diff --git a/.github/workflows/deploy-to-k8s.yaml b/.github/workflows/deploy-to-k8s.yaml index 7954bbff..ccd4de62 100644 --- a/.github/workflows/deploy-to-k8s.yaml +++ b/.github/workflows/deploy-to-k8s.yaml @@ -57,9 +57,6 @@ jobs: --set meshweb.nn_assign_psk="${{ secrets.NN_ASSIGN_PSK }}" \ --set meshweb.query_psk="${{ secrets.QUERY_PSK }}" \ --set meshweb.pano_github_token="${{ secrets.GH_TOKEN }}" \ - --set pgadmin.default_email="${{ secrets.PGADMIN_EMAIL }}" \ - --set pgadmin.default_password="${{ secrets.PGADMIN_PASSWORD }}" \ - --set global.pgadmin.fqdn="${{ vars.PGADMIN_FQDN }}" \ --set uisp.url="${{ secrets.UISP_URL }}" \ --set uisp.user="${{ secrets.UISP_USER }}" \ --set uisp.psk="${{ secrets.UISP_PSK }}" \ diff --git a/docker-compose.yaml b/docker-compose.yaml index 993677e5..50db2977 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -33,16 +33,6 @@ services: ports: - 6379:6379 - pgadmin: - networks: - - api - expose: - - 443 - image: dpage/pgadmin4:8.9 - environment: - PGADMIN_DEFAULT_EMAIL: ${PGADMIN_EMAIL} - PGADMIN_DEFAULT_PASSWORD: ${PGADMIN_PASSWORD} - pelias: networks: - api diff --git a/infra/README.md b/infra/README.md index 6a35dea3..d4f988ac 100644 --- a/infra/README.md +++ b/infra/README.md @@ -16,8 +16,6 @@ These instructions will set up a 4 node k3s cluster on proxmox. | `DJANGO_SECRET_KEY` | Django secret key | | `GH_TOKEN` | Github token for pulling down panoramas | | `NN_ASSIGN_PSK` | Legacy node number assign password | -| `PGADMIN_EMAIL` | Default username for pgadmin | -| `PGADMIN_PASSWORD` | Default password for pgadmin | | `PG_PASSWORD` | meshdb postgres database password | | `PROJECT_PATH` | Absolute file system path to the clone of meshdb, likely `/root/meshdb` | | `QUERY_PSK` | Legacy query password | diff --git a/infra/helm/meshdb/charts/pgadmin/.helmignore b/infra/helm/meshdb/charts/pgadmin/.helmignore deleted file mode 100644 index 0e8a0eb3..00000000 --- a/infra/helm/meshdb/charts/pgadmin/.helmignore +++ /dev/null @@ -1,23 +0,0 @@ -# Patterns to ignore when building packages. -# This supports shell glob matching, relative path matching, and -# negation (prefixed with !). Only one pattern per line. -.DS_Store -# Common VCS dirs -.git/ -.gitignore -.bzr/ -.bzrignore -.hg/ -.hgignore -.svn/ -# Common backup files -*.swp -*.bak -*.tmp -*.orig -*~ -# Various IDEs -.project -.idea/ -*.tmproj -.vscode/ diff --git a/infra/helm/meshdb/charts/pgadmin/Chart.yaml b/infra/helm/meshdb/charts/pgadmin/Chart.yaml deleted file mode 100644 index 4fd722d0..00000000 --- a/infra/helm/meshdb/charts/pgadmin/Chart.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v2 -name: pgadmin -description: A Helm chart for Kubernetes - -# A chart can be either an 'application' or a 'library' chart. -# -# Application charts are a collection of templates that can be packaged into versioned archives -# to be deployed. -# -# Library charts provide useful utilities or functions for the chart developer. They're included as -# a dependency of application charts to inject those utilities and functions into the rendering -# pipeline. Library charts do not define any templates and therefore cannot be deployed. -type: application - -# This is the chart version. This version number should be incremented each time you make changes -# to the chart and its templates, including the app version. -# Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.0 - -# This is the version number of the application being deployed. This version number should be -# incremented each time you make changes to the application. Versions are not expected to -# follow Semantic Versioning. They should reflect the version the application is using. -# It is recommended to use it with quotes. -appVersion: "1.16.0" diff --git a/infra/helm/meshdb/charts/pgadmin/templates/_helpers.tpl b/infra/helm/meshdb/charts/pgadmin/templates/_helpers.tpl deleted file mode 100644 index 4b9c4abf..00000000 --- a/infra/helm/meshdb/charts/pgadmin/templates/_helpers.tpl +++ /dev/null @@ -1,62 +0,0 @@ -{{/* -Expand the name of the chart. -*/}} -{{- define "pgadmin.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "pgadmin.fullname" -}} -{{- if .Values.fullnameOverride }} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- $name := default .Chart.Name .Values.nameOverride }} -{{- if contains $name .Release.Name }} -{{- .Release.Name | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} -{{- end }} -{{- end }} -{{- end }} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "pgadmin.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Common labels -*/}} -{{- define "pgadmin.labels" -}} -helm.sh/chart: {{ include "pgadmin.chart" . }} -{{ include "pgadmin.selectorLabels" . }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} -app.kubernetes.io/managed-by: {{ .Release.Service }} -{{- end }} - -{{/* -Selector labels -*/}} -{{- define "pgadmin.selectorLabels" -}} -app.kubernetes.io/name: {{ include "pgadmin.name" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- end }} - -{{/* -Create the name of the service account to use -*/}} -{{- define "pgadmin.serviceAccountName" -}} -{{- if .Values.serviceAccount.create }} -{{- default (include "pgadmin.fullname" .) .Values.serviceAccount.name }} -{{- else }} -{{- default "default" .Values.serviceAccount.name }} -{{- end }} -{{- end }} diff --git a/infra/helm/meshdb/charts/pgadmin/templates/ingress.yaml b/infra/helm/meshdb/charts/pgadmin/templates/ingress.yaml deleted file mode 100644 index feec1cc6..00000000 --- a/infra/helm/meshdb/charts/pgadmin/templates/ingress.yaml +++ /dev/null @@ -1,45 +0,0 @@ -{{- if .Values.ingress.enabled -}} -{{- $fullName := include "pgadmin.fullname" . -}} -{{- $svcPort := .Values.pgadmin.port -}} -{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} - {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} - {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} - {{- end }} -{{- end }} -{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} -apiVersion: networking.k8s.io/v1 -{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} -apiVersion: networking.k8s.io/v1beta1 -{{- else -}} -apiVersion: extensions/v1beta1 -{{- end }} -kind: Ingress -metadata: - name: {{ $fullName }} - labels: - {{- include "pgadmin.labels" . | nindent 4 }} - {{- with .Values.ingress.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} - ingressClassName: {{ .Values.ingress.className }} - {{- end }} - rules: - - host: {{ .Values.global.pgadmin.fqdn | quote }} - http: - paths: - - path: / - pathType: Prefix - backend: - {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} - service: - name: {{ include "pgadmin.fullname" . }} - port: - number: {{ $svcPort }} - {{- else }} - serviceName: {{ include "pgadmin.fullname" . }} - servicePort: {{ $svcPort }} - {{- end }} -{{- end }} diff --git a/infra/helm/meshdb/charts/pgadmin/templates/pg_admin.yaml b/infra/helm/meshdb/charts/pgadmin/templates/pg_admin.yaml deleted file mode 100644 index b6fa1d07..00000000 --- a/infra/helm/meshdb/charts/pgadmin/templates/pg_admin.yaml +++ /dev/null @@ -1,76 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "pgadmin.fullname" . }}-pgadmin - namespace: {{ .Values.pgadmin_app_namespace }} - labels: - {{- include "pgadmin.labels" . | nindent 4 }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - {{- include "pgadmin.selectorLabels" . | nindent 6 }} - template: - metadata: - {{- with .Values.podAnnotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - app: meshdb-pgadmin-app - {{- include "pgadmin.labels" . | nindent 8 }} - {{- with .Values.podLabels }} - {{- toYaml . | nindent 8 }} - {{- end }} - spec: - securityContext: - {{- toYaml .Values.pgadmin.podSecurityContext | nindent 8 }} - containers: - - name: {{ .Chart.Name }}-pgadmin - securityContext: - {{- toYaml .Values.pgadmin.securityContext | nindent 12 }} - image: "{{ .Values.pgadmin.image.repository }}:{{ .Values.pgadmin.image.tag }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - ports: - - name: http - containerPort: {{ .Values.pgadmin.port }} - protocol: TCP - env: - - name: PGADMIN_DEFAULT_EMAIL - valueFrom: - secretKeyRef: - name: pgadmin-secrets - key: pgadmin-default-email - - name: PGADMIN_DEFAULT_PASSWORD - valueFrom: - secretKeyRef: - name: pgadmin-secrets - key: pgadmin-default-password - - name: PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION - value: "False" - {{ if eq .Values.pgadmin.liveness_probe "true" }} - livenessProbe: - exec: - command: - - curl - - http://127.0.0.1:{{ .Values.pgadmin.port }} - periodSeconds: 3 - initialDelaySeconds: 4 - timeoutSeconds: 3 - {{ end }} - readinessProbe: - {{- toYaml .Values.readinessProbe | nindent 12 }} - resources: - {{- toYaml .Values.pgadmin.resources | nindent 12 }} - {{- with .Values.pgadmin.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.pgadmin.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.pgadmin.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/infra/helm/meshdb/charts/pgadmin/values.yaml b/infra/helm/meshdb/charts/pgadmin/values.yaml deleted file mode 100644 index a292d7e2..00000000 --- a/infra/helm/meshdb/charts/pgadmin/values.yaml +++ /dev/null @@ -1,89 +0,0 @@ -replicaCount: 0 - -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" - -podAnnotations: {} -podLabels: {} - -podSecurityContext: {} - # fsGroup: 2000 - -securityContext: {} - -pgadmin: - port: 80 - image: - repository: docker.io/dpage/pgadmin4 - tag: 8.9 - liveness_probe: "false" - podSecurityContext: {} - securityContext: {} - resources: {} - nodeSelector: {} - affinity: {} - tolerations: [] - -ingress: - enabled: true - className: "" - annotations: {} - # kubernetes.io/ingress.class: nginx - # kubernetes.io/tls-acme: "true" - hosts: - - host: pgadmin.local # Will be overridden - paths: - - path: / - pathType: Prefix - tls: [] - -resources: {} - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # limits: - # cpu: 100m - # memory: 128Mi - # requests: - # cpu: 100m - # memory: 128Mi - -livenessProbe: - httpGet: - path: / - port: http -readinessProbe: - httpGet: - path: / - port: http - -image: - pullPolicy: Always - -autoscaling: - enabled: false - minReplicas: 1 - maxReplicas: 100 - targetCPUUtilizationPercentage: 80 - # targetMemoryUtilizationPercentage: 80 - -# Additional volumes on the output Deployment definition. -volumes: [] -# - name: foo -# secret: -# secretName: mysecret -# optional: false - -# Additional volumeMounts on the output Deployment definition. -volumeMounts: [] -# - name: foo -# mountPath: "/etc/foo" -# readOnly: true - -nodeSelector: {} - -tolerations: [] - -affinity: {} diff --git a/infra/helm/meshdb/templates/configmap.yaml b/infra/helm/meshdb/templates/configmap.yaml index f565986a..dea8b740 100644 --- a/infra/helm/meshdb/templates/configmap.yaml +++ b/infra/helm/meshdb/templates/configmap.yaml @@ -33,7 +33,6 @@ data: ADMIN_MAP_BASE_URL: {{ .Values.adminmap.base_url | quote }} MAP_BASE_URL: {{ .Values.map.base_url | quote }} LOS_URL: {{ .Values.meshweb.los_url | quote }} - PG_ADMIN_URL: https://{{ .Values.global.pgadmin.fqdn }} FORMS_URL: {{ .Values.meshweb.forms_url | quote }} SITE_BASE_URL: {{ .Values.meshdb.site_base_url | quote }} diff --git a/infra/helm/meshdb/templates/pg_admin_secrets.yaml b/infra/helm/meshdb/templates/pg_admin_secrets.yaml deleted file mode 100644 index 9b0acf33..00000000 --- a/infra/helm/meshdb/templates/pg_admin_secrets.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: pgadmin-secrets - namespace: {{ .Values.meshdb_app_namespace }} -type: Opaque -data: - pgadmin-default-email: {{ .Values.pgadmin.default_email | b64enc | quote }} - pgadmin-default-password: {{ .Values.pgadmin.default_password | b64enc | quote }} diff --git a/infra/helm/meshdb/templates/service.yaml b/infra/helm/meshdb/templates/service.yaml index 843b7982..de06cc8d 100644 --- a/infra/helm/meshdb/templates/service.yaml +++ b/infra/helm/meshdb/templates/service.yaml @@ -77,19 +77,3 @@ spec: name: meshweb-service selector: app: meshdb-meshweb-app ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "meshdb.fullname" . }}-pgadmin - namespace: {{ .Values.meshdb_app_namespace }} - labels: - {{- include "meshdb.labels" . | nindent 4 }} -spec: - ports: - - port: {{ .Values.pgadmin.port }} - targetPort: {{ .Values.pgadmin.port }} - protocol: TCP - name: pgadmin-service - selector: - app: meshdb-pgadmin-app diff --git a/infra/helm/meshdb/values.yaml b/infra/helm/meshdb/values.yaml index 1e6c4c4c..ae535868 100644 --- a/infra/helm/meshdb/values.yaml +++ b/infra/helm/meshdb/values.yaml @@ -52,9 +52,6 @@ meshweb: affinity: {} tolerations: [] -pgadmin: - port: 80 - nginx: port: 80 podSecurityContext: {} diff --git a/src/meshdb/settings.py b/src/meshdb/settings.py index fa7051a4..01010fef 100644 --- a/src/meshdb/settings.py +++ b/src/meshdb/settings.py @@ -52,7 +52,6 @@ LOS_URL = os.environ.get("LOS_URL", "https://devlos.mesh.nycmesh.net") MAP_URL = os.environ.get("MAP_BASE_URL", "https://devmap.mesh.nycmesh.net") -PG_ADMIN_URL = os.environ.get("PG_ADMIN_URL", "/pgadmin/") FORMS_URL = os.environ.get("FORMS_URL", "https://devforms.mesh.nycmesh.net") # SMTP Config for password reset emails From 6b2f203bee6faf65a75800714a38d4b2acb6e3a9 Mon Sep 17 00:00:00 2001 From: Andrew Dickinson Date: Sat, 12 Oct 2024 13:13:56 -0400 Subject: [PATCH 5/6] Add install-create event handlers (#608) * Add notifications on install create for slack and osticket * Fix: admin UI redirects aren't followed correctly * Formatting * Fix url typo * Move address one-line helper to Building * Gate integrations behind feature flags * Fix warning in flag decorator * Add vars for "Add install-create event handlers" (#617) * add vars * Add new ticket endpoint variable --------- Co-authored-by: Andrew Dickinson * Add tests for install event hooks * Formatting * Fix failing test * Add slack webhook retries & tests * Revert "Fix: admin UI redirects aren't followed correctly" This reverts commit 41d738bb25fcf950ac509b49fabd79031c2aceea. --------- Co-authored-by: james-otten --- .env.sample | 3 + .github/workflows/deploy-to-k8s.yaml | 3 + infra/helm/meshdb/templates/configmap.yaml | 2 + infra/helm/meshdb/templates/meshweb.yaml | 10 + infra/helm/meshdb/templates/secrets.yaml | 2 + src/meshapi/apps.py | 4 + src/meshapi/models/building.py | 17 ++ src/meshapi/tests/test_building.py | 23 ++ .../tests/test_install_create_signals.py | 205 ++++++++++++++++++ src/meshapi/util/django_flag_decorator.py | 22 ++ src/meshapi/util/events/__init__.py | 2 + .../events/join_requests_slack_channel.py | 54 +++++ src/meshapi/util/events/osticket_creation.py | 101 +++++++++ src/meshdb/settings.py | 2 + 14 files changed, 450 insertions(+) create mode 100644 src/meshapi/tests/test_building.py create mode 100644 src/meshapi/tests/test_install_create_signals.py create mode 100644 src/meshapi/util/django_flag_decorator.py create mode 100644 src/meshapi/util/events/__init__.py create mode 100644 src/meshapi/util/events/join_requests_slack_channel.py create mode 100644 src/meshapi/util/events/osticket_creation.py diff --git a/.env.sample b/.env.sample index 201df04f..9e21ff47 100644 --- a/.env.sample +++ b/.env.sample @@ -61,5 +61,8 @@ LOS_URL=https://devlos.mesh.nycmesh.net FORMS_URL=https://devforms.mesh.nycmesh.net OSTICKET_URL=https://support.nycmesh.net +OSTICKET_API_TOKEN= +OSTICKET_NEW_TICKET_ENDPOINT=https://devsupport.nycmesh.net/api/http.php/tickets.json SLACK_ADMIN_NOTIFICATIONS_WEBHOOK_URL= +SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL= diff --git a/.github/workflows/deploy-to-k8s.yaml b/.github/workflows/deploy-to-k8s.yaml index ccd4de62..a5a369a7 100644 --- a/.github/workflows/deploy-to-k8s.yaml +++ b/.github/workflows/deploy-to-k8s.yaml @@ -66,6 +66,9 @@ jobs: --set meshweb.forms_url="${{ vars.FORMS_URL }}" \ --set meshdb.site_base_url="${{ vars.SITE_BASE_URL }}" \ --set meshweb.slack_webhook="${{ secrets.SLACK_ADMIN_NOTIFICATIONS_WEBHOOK_URL }}" \ + --set meshweb.slack_join_webhook="${{ secrets.SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL }}" \ + --set meshweb.osticket_api_token="${{ secrets.OSTICKET_API_TOKEN }}" \ + --set meshweb.osticket_new_ticket_endpoint="${{ vars.OSTICKET_NEW_TICKET_ENDPOINT }}" \ --set meshweb.environment="${{ inputs.environment }}" \ --set ingress.hosts[0].host="${{ vars.INGRESS_HOST }}",ingress.hosts[0].paths[0].path=/,ingress.hosts[0].paths[0].pathType=Prefix \ --set ingress.hosts[1].host="${{ vars.INGRESS_HOST_LEGACY }}",ingress.hosts[1].paths[0].path=/,ingress.hosts[1].paths[0].pathType=Prefix diff --git a/infra/helm/meshdb/templates/configmap.yaml b/infra/helm/meshdb/templates/configmap.yaml index dea8b740..6a689c69 100644 --- a/infra/helm/meshdb/templates/configmap.yaml +++ b/infra/helm/meshdb/templates/configmap.yaml @@ -36,3 +36,5 @@ data: FORMS_URL: {{ .Values.meshweb.forms_url | quote }} SITE_BASE_URL: {{ .Values.meshdb.site_base_url | quote }} + + OSTICKET_NEW_TICKET_ENDPOINT: {{ .Values.meshweb.osticket_new_ticket_endpoint | quote }} \ No newline at end of file diff --git a/infra/helm/meshdb/templates/meshweb.yaml b/infra/helm/meshdb/templates/meshweb.yaml index 8a3c1174..20171d4b 100644 --- a/infra/helm/meshdb/templates/meshweb.yaml +++ b/infra/helm/meshdb/templates/meshweb.yaml @@ -95,6 +95,16 @@ spec: secretKeyRef: name: meshdb-secrets key: slack-webhook + - name: SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL + valueFrom: + secretKeyRef: + name: meshdb-secrets + key: slack-join-webhook + - name: OSTICKET_API_TOKEN + valueFrom: + secretKeyRef: + name: meshdb-secrets + key: osticket-api-token volumeMounts: - name: static-content-vol mountPath: /opt/meshdb/static diff --git a/infra/helm/meshdb/templates/secrets.yaml b/infra/helm/meshdb/templates/secrets.yaml index 30f88b6b..3b51d6cd 100644 --- a/infra/helm/meshdb/templates/secrets.yaml +++ b/infra/helm/meshdb/templates/secrets.yaml @@ -16,3 +16,5 @@ data: uisp-pass: {{ .Values.uisp.psk | b64enc | quote }} pano-github-token: {{ .Values.meshweb.pano_github_token | b64enc | quote }} slack-webhook: {{ .Values.meshweb.slack_webhook | b64enc | quote }} + slack-join-webhook: {{ .Values.meshweb.slack_join_webhook | b64enc | quote }} + osticket-api-token: {{ .Values.meshweb.osticket_api_token | b64enc | quote }} diff --git a/src/meshapi/apps.py b/src/meshapi/apps.py index 91a6c4ed..d7b5909d 100644 --- a/src/meshapi/apps.py +++ b/src/meshapi/apps.py @@ -4,3 +4,7 @@ class MeshapiConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" name = "meshapi" + + def ready(self) -> None: + # Implicitly connect signal handlers decorated with @receiver. + from meshapi.util import events # noqa: F401 diff --git a/src/meshapi/models/building.py b/src/meshapi/models/building.py index c4a8a6df..db34161d 100644 --- a/src/meshapi/models/building.py +++ b/src/meshapi/models/building.py @@ -112,6 +112,23 @@ def save(self, *args: Any, **kwargs: Any) -> None: if self.primary_node and self.primary_node not in self.nodes.all(): self.nodes.add(self.primary_node) + @property + def one_line_complete_address(self) -> str: + addr_components = [] + if self.street_address: + addr_components.append(self.street_address) + if self.city or self.city: + city_state = [] + if self.city: + city_state.append(self.city) + if self.state: + city_state.append(self.state) + addr_components.append(" ".join(city_state)) + if self.zip_code: + addr_components.append(self.zip_code) + + return ", ".join(addr_components) + def __str__(self) -> str: if self.street_address: addr_str = str(self.street_address) diff --git a/src/meshapi/tests/test_building.py b/src/meshapi/tests/test_building.py new file mode 100644 index 00000000..c3efb89c --- /dev/null +++ b/src/meshapi/tests/test_building.py @@ -0,0 +1,23 @@ +from django.test import TestCase + +from meshapi.models import Building + + +class TestBuilding(TestCase): + def test_building_address_single_line_str(self): + full_address_building = Building( + street_address="123 Chom Street", + city="Brooklyn", + state="NY", + zip_code="12345", + latitude=0, + longitude=0, + ) + self.assertEqual(full_address_building.one_line_complete_address, "123 Chom Street, Brooklyn NY, 12345") + + limited_address_building = Building( + street_address="123 Chom Street", + latitude=0, + longitude=0, + ) + self.assertEqual(limited_address_building.one_line_complete_address, "123 Chom Street") diff --git a/src/meshapi/tests/test_install_create_signals.py b/src/meshapi/tests/test_install_create_signals.py new file mode 100644 index 00000000..1e9df8e6 --- /dev/null +++ b/src/meshapi/tests/test_install_create_signals.py @@ -0,0 +1,205 @@ +import json +from unittest.mock import patch + +import requests_mock +from django.test import TestCase +from flags.state import disable_flag, enable_flag + +from meshapi.models import Building, Install, Member +from meshapi.tests.sample_data import sample_building, sample_install, sample_member + + +class TestInstallCreateSignals(TestCase): + def setUp(self): + self.sample_install_copy = sample_install.copy() + self.building_1 = Building(**sample_building) + self.building_1.save() + self.sample_install_copy["building"] = self.building_1 + + self.member = Member(**sample_member) + self.member.save() + self.sample_install_copy["member"] = self.member + + self.maxDiff = None + + @requests_mock.Mocker() + def test_no_events_happen_by_default(self, request_mocker): + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual(len(request_mocker.request_history), 0) + + @patch( + "meshapi.util.events.join_requests_slack_channel.SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL", + "http://example.com/test-url", + ) + @requests_mock.Mocker() + def test_constructing_install_triggers_slack_message(self, request_mocker): + request_mocker.post("http://example.com/test-url", text="data") + + enable_flag("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") + disable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual(len(request_mocker.request_history), 1) + self.assertEqual( + request_mocker.request_history[0].url, + "http://example.com/test-url", + ) + self.assertEqual( + json.loads(request_mocker.request_history[0].text), + { + "text": f"**\n" + f"Altitude not found · Roof access · No LoS Data Available" + }, + ) + + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_NEW_TICKET_ENDPOINT", + "http://example.com/test-url", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_API_TOKEN", + "mock-token", + ) + @requests_mock.Mocker() + def test_constructing_install_triggers_osticket(self, request_mocker): + request_mocker.post("http://example.com/test-url", text="00123456", status_code=201) + + disable_flag("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") + enable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual(len(request_mocker.request_history), 1) + self.assertEqual( + request_mocker.request_history[0].url, + "http://example.com/test-url", + ) + self.assertEqual( + json.loads(request_mocker.request_history[0].text), + { + "node": install.install_number, + "userNode": install.install_number, + "email": "john.smith@example.com", + "name": "John Smith", + "subject": f"NYC Mesh {install.install_number} Rooftop Install", + "message": f"date: 2022-02-27\r\nnode: {install.install_number}\r\nname: John Smith\r\nemail: john.smith@example.com\r\nphone: +1 555-555-5555\r\nlocation: 3333 Chom St, Brooklyn NY, 11111\r\nrooftop: Rooftop install\r\nagree to ncl: True", + "phone": "+1 555-555-5555", + "location": "3333 Chom St, Brooklyn NY, 11111", + "rooftop": "Rooftop install", + "ncl": True, + "ip": "*.*.*.*", + "locale": "en", + }, + ) + + install.refresh_from_db() + self.assertEqual(install.ticket_number, "00123456") + + @patch( + "meshapi.util.events.join_requests_slack_channel.SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL", + "", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_NEW_TICKET_ENDPOINT", + "", + ) + @requests_mock.Mocker() + def test_no_events_when_env_variables_unset(self, request_mocker): + enable_flag("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") + enable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual(len(request_mocker.request_history), 0) + + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_NEW_TICKET_ENDPOINT", + "http://example.com/test-url", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_API_TOKEN", + "", + ) + @requests_mock.Mocker() + def test_no_osticket_event_when_no_api_token(self, request_mocker): + enable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual(len(request_mocker.request_history), 0) + + @patch( + "meshapi.util.events.join_requests_slack_channel.SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL", + "http://example.com/test-url", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_NEW_TICKET_ENDPOINT", + "http://example.com/test-url", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_API_TOKEN", + "mock-token", + ) + @requests_mock.Mocker() + def test_no_events_for_install_edit(self, request_mocker): + install = Install(**self.sample_install_copy) + install.save() + + enable_flag("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") + enable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + + install.notes = "foo" + install.save() + + self.assertEqual(len(request_mocker.request_history), 0) + + @patch( + "meshapi.util.events.join_requests_slack_channel.SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL", + "http://example.com/test-url-slack", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_NEW_TICKET_ENDPOINT", + "http://example.com/test-url-os-ticket", + ) + @patch( + "meshapi.util.events.osticket_creation.OSTICKET_API_TOKEN", + "mock-token", + ) + @requests_mock.Mocker() + def test_many_retry_no_crash_on_integration_404(self, request_mocker): + request_mocker.post("http://example.com/test-url-slack", text="Not found", status_code=404) + request_mocker.post("http://example.com/test-url-os-ticket", text="Not found", status_code=404) + + enable_flag("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") + enable_flag("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") + + install = Install(**self.sample_install_copy) + install.save() + + self.assertEqual( + len( + [ + request + for request in request_mocker.request_history + if request.url == "http://example.com/test-url-os-ticket" + ] + ), + 4, + ) + self.assertEqual( + len( + [ + request + for request in request_mocker.request_history + if request.url == "http://example.com/test-url-slack" + ] + ), + 4, + ) diff --git a/src/meshapi/util/django_flag_decorator.py b/src/meshapi/util/django_flag_decorator.py new file mode 100644 index 00000000..894202e5 --- /dev/null +++ b/src/meshapi/util/django_flag_decorator.py @@ -0,0 +1,22 @@ +from functools import wraps +from typing import Any, Callable + +from flags.state import flag_state + + +def skip_if_flag_disabled(flag_name: str) -> Callable: + """ + Decorator that transforms the annotated function into a noop if the given flag name is disabled + :param flag_name: the flag to check + """ + + def decorator(func: Callable) -> Callable: + def inner(*args: list, **kwargs: dict) -> Any: + enabled = flag_state(flag_name) + + if enabled: + return func(*args, **kwargs) + + return wraps(func)(inner) + + return decorator diff --git a/src/meshapi/util/events/__init__.py b/src/meshapi/util/events/__init__.py new file mode 100644 index 00000000..055b6a34 --- /dev/null +++ b/src/meshapi/util/events/__init__.py @@ -0,0 +1,2 @@ +from .join_requests_slack_channel import send_join_request_slack_message +from .osticket_creation import create_os_ticket_for_install diff --git a/src/meshapi/util/events/join_requests_slack_channel.py b/src/meshapi/util/events/join_requests_slack_channel.py new file mode 100644 index 00000000..d2886308 --- /dev/null +++ b/src/meshapi/util/events/join_requests_slack_channel.py @@ -0,0 +1,54 @@ +import logging +import os +import time + +import requests +from django.db.models.base import ModelBase +from django.db.models.signals import post_save +from django.dispatch import receiver + +from meshapi.models import Install +from meshapi.util.django_flag_decorator import skip_if_flag_disabled + +SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL = os.environ.get("SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL") + + +@receiver(post_save, sender=Install, dispatch_uid="join_requests_slack_channel") +@skip_if_flag_disabled("INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES") +def send_join_request_slack_message(sender: ModelBase, instance: Install, created: bool, **kwargs: dict) -> None: + if not created: + return + + install: Install = instance + if not SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL: + logging.error( + f"Unable to send join request notification for install {str(install)}, did you set the " + f"SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL environment variable?" + ) + return + + building_height = str(int(install.building.altitude)) + "m" if install.building.altitude else "Altitude not found" + roof_access = "Roof access" if install.roof_access else "No roof access" + + attempts = 0 + while attempts < 4: + attempts += 1 + response = requests.post( + SLACK_JOIN_REQUESTS_CHANNEL_WEBHOOK_URL, + json={ + "text": f"**\n" + f"{building_height} · {roof_access} · No LoS Data Available" + }, + ) + + if response.status_code == 200: + break + + time.sleep(1) + + if response.status_code != 200: + logging.error( + f"Got HTTP {response.status_code} while sending install create notification to " + f"join-requests channel. HTTP response was {response.text}" + ) diff --git a/src/meshapi/util/events/osticket_creation.py b/src/meshapi/util/events/osticket_creation.py new file mode 100644 index 00000000..841ae66c --- /dev/null +++ b/src/meshapi/util/events/osticket_creation.py @@ -0,0 +1,101 @@ +import logging +import os +import time + +import requests +from django.db.models.base import ModelBase +from django.db.models.signals import post_save +from django.dispatch import receiver + +from meshapi.models import Install +from meshapi.util.django_flag_decorator import skip_if_flag_disabled + +OSTICKET_API_TOKEN = os.environ.get("OSTICKET_API_TOKEN") +OSTICKET_NEW_TICKET_ENDPOINT = os.environ.get("OSTICKET_NEW_TICKET_ENDPOINT") + + +@receiver(post_save, sender=Install, dispatch_uid="create_os_ticket_for_install") +@skip_if_flag_disabled("INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS") +def create_os_ticket_for_install(sender: ModelBase, instance: Install, created: bool, **kwargs: dict) -> None: + if not created: + return + + install: Install = instance + if not OSTICKET_API_TOKEN or not OSTICKET_NEW_TICKET_ENDPOINT: + logging.error( + f"Unable to create ticket for install {str(install)}, did you set the OSTICKET_API_TOKEN " + f"and OSTICKET_NEW_TICKET_ENDPOINT env vars?" + ) + return + + name = install.member.name + email = install.member.primary_email_address + phone = install.member.phone_number + location = install.building.one_line_complete_address + rooftop_access = install.roof_access + ncl = True + timestamp = install.request_date + id = install.install_number + + if not email: + logging.warning( + f"Not creating OSTicket for install {str(install)}. Member {str(install.member)} " + f"does not have a primary email address" + ) + return + + if rooftop_access: + rooftop = "Rooftop install" + emailTitle = f"NYC Mesh {id} Rooftop Install" + else: + rooftop = "Standard install" + emailTitle = f"NYC Mesh {id} Install" + + message = f"date: {timestamp}\r\n" + message += f"node: {id}\r\n" + message += f"name: {name}\r\n" + message += f"email: {email}\r\n" + message += f"phone: {phone}\r\n" + message += f"location: {location}\r\n" + message += f"rooftop: {rooftop}\r\n" + message += f"agree to ncl: {ncl}" + + data = { + "node": id, + "userNode": id, + "email": email, + "name": name, + "subject": emailTitle, + "message": message, + "phone": phone, + "location": location, + "rooftop": rooftop, + "ncl": ncl, + "ip": "*.*.*.*", + "locale": "en", + } + + attempts = 0 + while attempts < 4: + attempts += 1 + response = requests.post( + OSTICKET_NEW_TICKET_ENDPOINT, + json=data, + headers={"X-API-Key": OSTICKET_API_TOKEN}, + ) + + if response.status_code == 201: + break + + time.sleep(1) + + if response.status_code != 201: + logging.error( + f"Unable to create ticket for install {str(install)}. OSTicket returned " + f"HTTP {response.status_code}: {response.text}" + ) + return + + # If we got a good response, update the install object to reflect the ticket ID we just created + install.ticket_number = response.text + install.save() diff --git a/src/meshdb/settings.py b/src/meshdb/settings.py index 01010fef..b0cc51a5 100644 --- a/src/meshdb/settings.py +++ b/src/meshdb/settings.py @@ -46,6 +46,8 @@ FLAGS: Dict[str, Any] = { "MAINTENANCE_MODE": [], "EDIT_PANORAMAS": [], + "INTEGRATION_ENABLED_SEND_JOIN_REQUEST_SLACK_MESSAGES": [], + "INTEGRATION_ENABLED_CREATE_OSTICKET_TICKETS": [], } USE_X_FORWARDED_HOST = True From 7b21c21af71f93fb81cb2065e13f43879912af0c Mon Sep 17 00:00:00 2001 From: Andrew Dickinson Date: Sat, 12 Oct 2024 13:37:00 -0400 Subject: [PATCH 6/6] Inactivate Devices & Links that are deleted in UISP (#630) * Inactivate Devices & Links that are deleted in UISP * Add tests covering new behavior * Make UISP devices that haven't been seen go INACTIVE --- src/meshapi/tests/test_uisp_import.py | 66 ++++++++++++++++--- src/meshapi/util/uisp_import/sync_handlers.py | 34 ++++++++++ .../util/uisp_import/update_objects.py | 54 ++++++++------- 3 files changed, 122 insertions(+), 32 deletions(-) diff --git a/src/meshapi/tests/test_uisp_import.py b/src/meshapi/tests/test_uisp_import.py index 89ebac49..30d506c1 100644 --- a/src/meshapi/tests/test_uisp_import.py +++ b/src/meshapi/tests/test_uisp_import.py @@ -435,7 +435,7 @@ def test_update_link_many_changes(self, mock_get_last_seen): change_messages, [ "Changed connected device pair from [nycmesh-1234-dev1, nycmesh-5678-dev2] to [nycmesh-1234-dev1, nycmesh-9012-dev3]", - "Marked as Inactive due to being offline for more than 30 days", + "Marked as Inactive due to it being offline in UISP for more than 30 days", ], ) @@ -503,11 +503,11 @@ def test_update_link_unknown_offline_duration(self, mock_get_last_seen): self.link.refresh_from_db() self.assertEqual(self.link.from_device, self.device1) self.assertEqual(self.link.to_device, self.device2) - self.assertEqual(self.link.status, Link.LinkStatus.ACTIVE) + self.assertEqual(self.link.status, Link.LinkStatus.INACTIVE) self.assertEqual(self.link.type, Link.LinkType.FIVE_GHZ) self.assertEqual(self.link.abandon_date, None) - self.assertEqual(change_messages, []) + self.assertEqual(change_messages, ["Marked as Inactive due to it being offline in UISP"]) @patch("meshapi.util.uisp_import.update_objects.get_uisp_link_last_seen") def test_update_link_reactivate_old_device(self, mock_get_last_seen): @@ -535,7 +535,7 @@ def test_update_link_reactivate_old_device(self, mock_get_last_seen): self.assertEqual( change_messages, [ - "Marked as Active due to coming back online in UISP. Warning: this link was " + "Marked as Active due to it coming back online in UISP. Warning: this link was " "previously abandoned on 2018-11-14, if this link has been re-purposed, " "please make sure the device names and network numbers are updated to reflect the new location" ], @@ -590,7 +590,7 @@ def test_update_device_many_changes(self): [ 'Changed name from "nycmesh-1234-dev1" to "nycmesh-5678-dev1"', "Changed network number from 1234 to 5678", - "Marked as Inactive due to being offline for more than 30 days", + "Marked as Inactive due to it being offline in UISP for more than 30 days", ], ) @@ -606,12 +606,14 @@ def test_update_device_uncertain_offline_duration(self): self.device1.refresh_from_db() self.assertEqual(self.device1.name, "nycmesh-1234-dev1") self.assertEqual(self.device1.node, self.node1) - self.assertEqual(self.device1.status, Device.DeviceStatus.ACTIVE) + self.assertEqual(self.device1.status, Device.DeviceStatus.INACTIVE) self.assertEqual(self.device1.abandon_date, None) self.assertEqual( change_messages, - [], + [ + "Marked as Inactive due to it being offline in UISP", + ], ) def test_update_device_add_abandon_date(self): @@ -663,7 +665,7 @@ def test_update_device_reactivate_old_device(self): self.assertEqual( change_messages, [ - "Marked as Active due to coming back online in UISP. Warning: this device was " + "Marked as Active due to it coming back online in UISP. Warning: this device was " "previously abandoned on 2018-11-14, if this device has been re-purposed, " "please make sure the device name and network number are updated to reflect the new location " "and function" @@ -775,6 +777,14 @@ def setUp(self): ) self.device4.save() + self.device5 = Device( + node=self.node3, + status=Device.DeviceStatus.ACTIVE, + name="nycmesh-7890-dev5", + uisp_id="uisp-uuid-not-real-dont-match-me", + ) + self.device5.save() + self.link1 = Link( from_device=self.device1, to_device=self.device2, @@ -793,6 +803,15 @@ def setUp(self): ) self.link2.save() + self.link3 = Link( + from_device=self.device2, + to_device=self.device3, + status=Link.LinkStatus.ACTIVE, + type=Link.LinkType.FIVE_GHZ, + uisp_id="uisp-uuid-not-real-dont-match-me", + ) + self.link3.save() + @patch("meshapi.util.uisp_import.sync_handlers.notify_admins_of_changes") @patch("meshapi.util.uisp_import.sync_handlers.update_device_from_uisp_data") def test_import_and_sync_devices(self, mock_update_device, mock_notify_admins): @@ -934,6 +953,9 @@ def test_import_and_sync_devices(self, mock_update_device, mock_notify_admins): import_and_sync_uisp_devices(uisp_devices) + self.device5.refresh_from_db() + self.assertEqual(self.device5.status, Device.DeviceStatus.INACTIVE) + created_sector1 = Sector.objects.get(uisp_id="uisp-uuid99") created_sector2 = Sector.objects.get(uisp_id="uisp-uuid999") @@ -967,6 +989,20 @@ def test_import_and_sync_devices(self, mock_update_device, mock_notify_admins): ], created=True, ), + call( + self.device4, + [ + "Marked as inactive because there is no corresponding device in UISP, " + "it was probably deleted there", + ], + ), + call( + self.device5, + [ + "Marked as inactive because there is no corresponding device in UISP, " + "it was probably deleted there", + ], + ), ] ) @@ -1235,6 +1271,9 @@ def test_import_and_sync_links(self, mock_update_link, mock_notify_admins, mock_ ] ) + self.link3.refresh_from_db() + self.assertEqual(self.link3.status, Link.LinkStatus.INACTIVE) + created_link3 = Link.objects.get(uisp_id="uisp-uuid3") created_link5 = Link.objects.get(uisp_id="uisp-uuid5") @@ -1249,6 +1288,13 @@ def test_import_and_sync_links(self, mock_update_link, mock_notify_admins, mock_ ], created=True, ), + call( + self.link3, + [ + "Marked as inactive because there is no corresponding link in UISP, " + "it was probably deleted there", + ], + ), ] ) @@ -1475,6 +1521,7 @@ def test_sync_same_building_link_with_los(self): # Clear out the existing links so the only LOS is a building self-loop self.link1.delete() self.link2.delete() + self.link3.delete() link = Link( from_device=self.device3, @@ -1502,8 +1549,9 @@ def test_sync_fiber_link(self): self.link1.type = Link.LinkType.FIBER self.link1.save() self.link2.type = Link.LinkType.ETHERNET - self.link2.save() + self.link3.type = Link.LinkType.ETHERNET + self.link3.save() link3 = Link( from_device=self.device2, diff --git a/src/meshapi/util/uisp_import/sync_handlers.py b/src/meshapi/util/uisp_import/sync_handlers.py index bc5ae63b..037e61ae 100644 --- a/src/meshapi/util/uisp_import/sync_handlers.py +++ b/src/meshapi/util/uisp_import/sync_handlers.py @@ -164,6 +164,23 @@ def import_and_sync_uisp_devices(uisp_devices: List[UISPDevice]) -> None: device = Device(**device_fields) device.save() + with transaction.atomic(): + for device in Device.objects.filter(uisp_id__isnull=False): + uisp_uuid_set = {uisp_device["identification"]["id"] for uisp_device in uisp_devices} + + if device.uisp_id and device.uisp_id not in uisp_uuid_set and device.status != Device.DeviceStatus.INACTIVE: + # If this device has been removed from UISP, mark it as inactive + device.status = Device.DeviceStatus.INACTIVE + device.save() + + notify_admins_of_changes( + device, + [ + "Marked as inactive because there is no corresponding device in UISP, " + "it was probably deleted there", + ], + ) + def import_and_sync_uisp_links(uisp_links: List[UISPDataLink]) -> None: uisp_session = get_uisp_session() @@ -258,6 +275,23 @@ def import_and_sync_uisp_links(uisp_links: List[UISPDataLink]) -> None: created=True, ) + with transaction.atomic(): + for link in Link.objects.filter(uisp_id__isnull=False): + uisp_uuid_set = {uisp_link["id"] for uisp_link in uisp_links} + + if link.uisp_id and link.uisp_id not in uisp_uuid_set and link.status != Link.LinkStatus.INACTIVE: + # If this link has been removed from UISP, mark it as inactive + link.status = Link.LinkStatus.INACTIVE + link.save() + + notify_admins_of_changes( + link, + [ + "Marked as inactive because there is no corresponding link in UISP, " + "it was probably deleted there", + ], + ) + def sync_link_table_into_los_objects() -> None: for link in ( diff --git a/src/meshapi/util/uisp_import/update_objects.py b/src/meshapi/util/uisp_import/update_objects.py index fbc92822..ad0c2879 100644 --- a/src/meshapi/util/uisp_import/update_objects.py +++ b/src/meshapi/util/uisp_import/update_objects.py @@ -28,25 +28,28 @@ def update_device_from_uisp_data( existing_device.node = uisp_node if existing_device.status != uisp_status: - if uisp_status == Device.DeviceStatus.INACTIVE and uisp_last_seen is not None: + if uisp_status == Device.DeviceStatus.INACTIVE: # We wait 30 days to make sure this device is actually inactive, # and not just temporarily offline - if ( - datetime.datetime.now(datetime.timezone.utc) - uisp_last_seen - ) > UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE: - existing_device.abandon_date = uisp_last_seen.date() - existing_device.status = Device.DeviceStatus.INACTIVE + if uisp_last_seen is None or ( + (datetime.datetime.now(datetime.timezone.utc) - uisp_last_seen) + > UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE + ): + change_message = f"Marked as {Device.DeviceStatus.INACTIVE} due to it being offline in UISP" + if uisp_last_seen: + existing_device.abandon_date = uisp_last_seen.date() + change_message += ( + " for more than " + f"{int(UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE.total_seconds() / 60 / 60 / 24)} days" + ) - change_messages.append( - f"Marked as {Device.DeviceStatus.INACTIVE} due to being offline " - f"for more than " - f"{int(UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE.total_seconds() / 60 / 60 / 24)} days" - ) + existing_device.status = Device.DeviceStatus.INACTIVE + change_messages.append(change_message) if uisp_status == Device.DeviceStatus.ACTIVE: existing_device.status = Device.DeviceStatus.ACTIVE - change_message = f"Marked as {Device.DeviceStatus.ACTIVE} due to coming back online in UISP" + change_message = f"Marked as {Device.DeviceStatus.ACTIVE} due to it coming back online in UISP" if existing_device.abandon_date: change_message += ( ". Warning: this device was previously abandoned on " @@ -99,24 +102,29 @@ def update_link_from_uisp_data( ) if existing_link.status != uisp_status: - if uisp_status == Link.LinkStatus.INACTIVE and uisp_last_seen is not None: + if uisp_status == Link.LinkStatus.INACTIVE: # We wait 30 days to make sure this link is actually inactive, # and not just temporarily offline - if ( - datetime.datetime.now(datetime.timezone.utc) - uisp_last_seen - ) > UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE: - existing_link.abandon_date = uisp_last_seen.date() - existing_link.status = Link.LinkStatus.INACTIVE + if uisp_last_seen is None or ( + (datetime.datetime.now(datetime.timezone.utc) - uisp_last_seen) + > UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE + ): + change_message = f"Marked as {Link.LinkStatus.INACTIVE} due to it being offline in UISP" + + if uisp_last_seen: + existing_link.abandon_date = uisp_last_seen.date() + change_message += ( + " for more than " + f"{int(UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE.total_seconds() / 60 / 60 / 24)} days" + ) - change_messages.append( - f"Marked as {Link.LinkStatus.INACTIVE} due to being offline for more than " - f"{int(UISP_OFFLINE_DURATION_BEFORE_MARKING_INACTIVE.total_seconds() / 60 / 60 / 24)} days" - ) + existing_link.status = Link.LinkStatus.INACTIVE + change_messages.append(change_message) if uisp_status == Link.LinkStatus.ACTIVE: existing_link.status = Link.LinkStatus.ACTIVE - change_message = f"Marked as {Link.LinkStatus.ACTIVE} due to coming back online in UISP" + change_message = f"Marked as {Link.LinkStatus.ACTIVE} due to it coming back online in UISP" if existing_link.abandon_date: change_message += ( ". Warning: this link was previously abandoned on "