From d95a83a05f5ef1e5fb45ab81aecd3a46f9a1b90e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Fri, 14 Apr 2023 10:24:48 -0500 Subject: [PATCH 01/24] Models for Validated, Hourly, Daily and Monthly --- README.md | 1 + daily/__init__.py | 0 daily/apps.py | 20 ++ daily/filters.py | 30 +++ daily/migrations/__init__.py | 0 daily/models.py | 294 +++++++++++++++++++++ daily/serializers.py | 193 ++++++++++++++ daily/urls.py | 60 +++++ daily/views.py | 401 ++++++++++++++++++++++++++++ djangomain/settings.py | 4 + docker-compose.yml | 2 +- hourly/__init__.py | 0 hourly/apps.py | 20 ++ hourly/filters.py | 30 +++ hourly/migrations/__init__.py | 0 hourly/models.py | 302 +++++++++++++++++++++ hourly/serializers.py | 193 ++++++++++++++ hourly/urls.py | 60 +++++ hourly/views.py | 401 ++++++++++++++++++++++++++++ measurement/models.py | 4 +- monthly/__init__.py | 0 monthly/apps.py | 20 ++ monthly/filters.py | 30 +++ monthly/migrations/__init__.py | 0 monthly/models.py | 293 +++++++++++++++++++++ monthly/serializers.py | 193 ++++++++++++++ monthly/urls.py | 60 +++++ monthly/views.py | 401 ++++++++++++++++++++++++++++ station/models.py | 23 ++ validated/__init__.py | 0 validated/apps.py | 20 ++ validated/filters.py | 80 ++++++ validated/migrations/__init__.py | 0 validated/models.py | 290 +++++++++++++++++++++ validated/models_v1.py | 355 +++++++++++++++++++++++++ validated/others/__init__.py | 0 validated/others/forms.py | 82 ++++++ validated/others/functions.py | 124 +++++++++ validated/serializers.py | 193 ++++++++++++++ validated/urls.py | 60 +++++ validated/views.py | 432 +++++++++++++++++++++++++++++++ 41 files changed, 4669 insertions(+), 2 deletions(-) create mode 100755 daily/__init__.py create mode 100755 daily/apps.py create mode 100755 daily/filters.py create mode 100755 daily/migrations/__init__.py create mode 100755 daily/models.py create mode 100755 daily/serializers.py create mode 100755 daily/urls.py create mode 100755 daily/views.py create mode 100755 hourly/__init__.py create mode 100755 hourly/apps.py create mode 100755 hourly/filters.py create mode 100755 hourly/migrations/__init__.py create mode 100755 hourly/models.py create mode 100755 hourly/serializers.py create mode 100755 hourly/urls.py create mode 100755 hourly/views.py create mode 100755 monthly/__init__.py create mode 100755 monthly/apps.py create mode 100755 monthly/filters.py create mode 100755 monthly/migrations/__init__.py create mode 100755 monthly/models.py create mode 100755 monthly/serializers.py create mode 100755 monthly/urls.py create mode 100755 monthly/views.py create mode 100755 validated/__init__.py create mode 100755 validated/apps.py create mode 100755 validated/filters.py create mode 100755 validated/migrations/__init__.py create mode 100755 validated/models.py create mode 100755 validated/models_v1.py create mode 100755 validated/others/__init__.py create mode 100755 validated/others/forms.py create mode 100755 validated/others/functions.py create mode 100755 validated/serializers.py create mode 100755 validated/urls.py create mode 100755 validated/views.py diff --git a/README.md b/README.md index a3d83fcd..88f5a852 100755 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ If installing this system from scratch: - If you want to load initial data (variables, units, stations...): - In a separate terminal run `docker exec -it bash` e.g. `docker exec -it paricia_web_1 bash` to start a bash session in the container. You can find the name of the container in the Docker Desktop GUI, or by running `docker container ls`. - Run `python manage.py shell < utilities/load_initial_data.py`. + - Create **admin** user running `python manage.py screatesuperuser`. ## Database Schema diff --git a/daily/__init__.py b/daily/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/daily/apps.py b/daily/apps.py new file mode 100755 index 00000000..e5f54372 --- /dev/null +++ b/daily/apps.py @@ -0,0 +1,20 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class HourlyConfig(AppConfig): + name = "hourly" diff --git a/daily/filters.py b/daily/filters.py new file mode 100755 index 00000000..ce28398e --- /dev/null +++ b/daily/filters.py @@ -0,0 +1,30 @@ +from django_filters import rest_framework as filters +from station.models import Station + + + +class DailyFilter(filters.FilterSet): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have no depth information. + """ + + date = filters.DateFilter(field_name="date", lookup_expr="exact") + min_date = filters.DateFilter(field_name="date", lookup_expr="gte") + max_date = filters.DateFilter(field_name="date", lookup_expr="lte") + value = filters.NumberFilter(field_name="value", lookup_expr="exact") + min_value = filters.NumberFilter(field_name="value", lookup_expr="gte") + max_value = filters.NumberFilter(field_name="value", lookup_expr="lte") + station_id = filters.NumberFilter(field_name="station_id", lookup_expr="exact") + used_for_daily = filters.BooleanFilter(fieldname='used_for_daily', lookup_expr="exact") + + +class DailyFilterDepth(DailyFilter): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have depth information. + """ + + depth = filters.NumberFilter(field_name="depth", lookup_expr="exact") + min_depth = filters.NumberFilter(field_name="depth", lookup_expr="gte") + max_depth = filters.NumberFilter(field_name="depth", lookup_expr="lte") diff --git a/daily/migrations/__init__.py b/daily/migrations/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/daily/models.py b/daily/models.py new file mode 100755 index 00000000..570cd6c5 --- /dev/null +++ b/daily/models.py @@ -0,0 +1,294 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from __future__ import unicode_literals + +from typing import List, Type + +from django.db import models +from django.urls import reverse +from timescale.db.models.models import TimescaleModel +from django.core.exceptions import ValidationError + +from station.models import Station + +DAILYS: List[str] = [] +"""Available daily variables.""" + +# TODO check if PolarWind is needed in daily +class PolarWind(TimescaleModel): + """ + Polar Wind daily with a velocity and direction at a specific time. + """ + + speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) + direction = models.DecimalField( + "Direction", max_digits=14, decimal_places=6, null=True + ) + + class Meta: + """ + Para que no se cree en la migracion. + + NOTE: Why don't we want this in the migration? + """ + + default_permissions = () + managed = False + + + +# TODO check functioning od startswith Dai/Day +class BaseDaily(TimescaleModel): + @classmethod + def __init_subclass__(cls, *args, **kwargs) -> None: + if not cls.__name__.startswith("_Dai") and cls.__name__ not in DAILYS: + DAILYS.append(cls.__name__) + + # TODO ask if "date" name is OK + # TODO ask if default=timezone.now is OK, + # date = models.DateField(default=timezone.now) + date = models.DateField("date") + station_id = models.PositiveIntegerField("station_id") + used_for_monthly = models.BooleanField("used_for_monthly", default=False) + completeness = models.DecimalField(max_digits=4, decimal_places=1) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["used_for_monthly"]), + models.Index(fields=["station_id", "time"]), + models.Index(fields=["time", "station_id"]), + ] + abstract = True + + +def create_Dai_model( + digits=14, decimals=6, fields=("Average") +) -> Type[TimescaleModel]: + num = len(DAILYS) + 1 + _fields = { + key.lower(): models.DecimalField( + key, + max_digits=digits, + decimal_places=decimals, + null=True, + ) + for key in fields + } + + class Meta: + abstract = True + + attrs = {"__module__": __name__, "Meta": Meta} + attrs.update(_fields) + + return type( + f"_Dai{num}", + (BaseDaily,), + attrs, + ) + + +class Precipitation(create_Dai_model(digits=6, decimals=2, fields=("Total",))): + """Precipitation.""" + + +class AirTemperature(create_Dai_model(digits=5, decimals=2)): + """Air temperature.""" + + +class Humidity(create_Dai_model()): + """Humidity.""" + + +class WindVelocity(create_Dai_model()): + """Wind velocity.""" + + +class WindDirection(create_Dai_model()): + """Wind direction.""" + + +class SoilMoisture(create_Dai_model()): + """Soil moisture.""" + + +class SolarRadiation(create_Dai_model()): + """Solar radiation.""" + + +class AtmosphericPressure(create_Dai_model()): + """Atmospheric pressure.""" + + +class WaterTemperature(create_Dai_model()): + """Water temperature.""" + + +class Flow(create_Dai_model()): + """Flow.""" + + +class WaterLevel(create_Dai_model()): + """Water level.""" + + +class BatteryVoltage(create_Dai_model()): + """Battery voltage.""" + + +class FlowManual(create_Dai_model()): + """Flow (manual).""" + + +# TODO Check if There id needed StripLevelReading daily. +class StripLevelReading(create_Dai_model(fields=("Value", "Uncertainty"))): + """Strip level reading.""" + + data_import_date = models.DateTimeField("Data import date") + data_start_date = models.DateTimeField("Data start date") + calibrated = models.BooleanField("Calibrated") + comments = models.CharField("Comments", null=True, max_length=250) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "data_import_date"]), + models.Index(fields=["station_id", "data_start_date", "time"]), + models.Index(fields=["data_import_date"]), + ] + + +class SoilTemperature(create_Dai_model()): + """Soil temperature.""" + + +class IndirectRadiation(create_Dai_model()): + """Indirect radiation.""" + + +# Variables created for buoy with different depths +class WaterTemperatureDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Water temperature (degrees celcius) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterAcidityDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Water acidity (pH) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class RedoxPotentialDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Redox potential (mV) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterTurbidityDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Water turbidity (NTU) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class ChlorineConcentrationDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Chlorine concentration (ug/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class OxygenConcentrationDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Oxygen concentration (mg/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PercentageOxygenConcentrationDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Percentage oxygen concentration (mg/l) at a depth in cm. + + HELPWANTED: Is this wrong? It's teh same as above, perhaps units should + be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. + """ + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PhycocyaninDepth( + create_Dai_model(digits=6, decimals=2,), +): + """Phycocyanin (?) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] diff --git a/daily/serializers.py b/daily/serializers.py new file mode 100755 index 00000000..d6acafab --- /dev/null +++ b/daily/serializers.py @@ -0,0 +1,193 @@ +from rest_framework import serializers + +from .models import ( + AirTemperature, + AtmosphericPressure, + BatteryVoltage, + ChlorineConcentrationDepth, + # DischargeCurve, + Flow, + FlowManual, + Humidity, + IndirectRadiation, + # LevelFunction, + OxygenConcentrationDepth, + PercentageOxygenConcentrationDepth, + PhycocyaninDepth, + PolarWind, + Precipitation, + RedoxPotentialDepth, + SoilMoisture, + SoilTemperature, + SolarRadiation, + StripLevelReading, + WaterAcidityDepth, + WaterLevel, + WaterTemperature, + WaterTemperatureDepth, + WaterTurbidityDepth, + WindDirection, + WindVelocity, +) + + +class PolarWindSerializer(serializers.ModelSerializer): + class Meta: + model = PolarWind + exclude = [] + +# TODO Confirm if DischargeCurveSerializer is not needed in Validated Models +# class DischargeCurveSerializer(serializers.ModelSerializer): +# class Meta: +# model = DischargeCurve +# exclude = [] + + +# class LevelFunctionSerializer(serializers.ModelSerializer): +# class Meta: +# model = LevelFunction +# exclude = [] + + +class PrecipitationSerializer(serializers.ModelSerializer): + class Meta: + model = Precipitation + exclude = [] + + +class AirTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = AirTemperature + exclude = [] + + +class HumiditySerializer(serializers.ModelSerializer): + class Meta: + model = Humidity + exclude = [] + + +class WindVelocitySerializer(serializers.ModelSerializer): + class Meta: + model = WindVelocity + exclude = [] + + +class WindDirectionSerializer(serializers.ModelSerializer): + class Meta: + model = WindDirection + exclude = [] + + +class SoilMoistureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilMoisture + exclude = [] + + +class SolarRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = SolarRadiation + exclude = [] + + +class AtmosphericPressureSerializer(serializers.ModelSerializer): + class Meta: + model = AtmosphericPressure + exclude = [] + + +class WaterTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperature + exclude = [] + + +class FlowSerializer(serializers.ModelSerializer): + class Meta: + model = Flow + exclude = [] + + +class WaterLevelSerializer(serializers.ModelSerializer): + class Meta: + model = WaterLevel + exclude = [] + + +class BatteryVoltageSerializer(serializers.ModelSerializer): + class Meta: + model = BatteryVoltage + exclude = [] + + +class FlowManualSerializer(serializers.ModelSerializer): + class Meta: + model = FlowManual + exclude = [] + + +class StripLevelReadingSerializer(serializers.ModelSerializer): + class Meta: + model = StripLevelReading + exclude = [] + + +class SoilTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilTemperature + exclude = [] + + +class IndirectRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = IndirectRadiation + exclude = [] + + +class WaterTemperatureDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperatureDepth + exclude = [] + + +class WaterAcidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterAcidityDepth + exclude = [] + + +class RedoxPotentialDepthSerializer(serializers.ModelSerializer): + class Meta: + model = RedoxPotentialDepth + exclude = [] + + +class WaterTurbidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTurbidityDepth + exclude = [] + + +class ChlorineConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = ChlorineConcentrationDepth + exclude = [] + + +class OxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = OxygenConcentrationDepth + exclude = [] + + +class PercentageOxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PercentageOxygenConcentrationDepth + exclude = [] + + +class PhycocyaninDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PhycocyaninDepth + exclude = [] diff --git a/daily/urls.py b/daily/urls.py new file mode 100755 index 00000000..09417685 --- /dev/null +++ b/daily/urls.py @@ -0,0 +1,60 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from django.urls import path +from rest_framework.urlpatterns import format_suffix_patterns + +from . import views + +app_name = "hourly" +urlpatterns = [ + path("polarwind/", views.PolarWindList.as_view()), + # TODO Verify if it's not really needed + # path("dischargecurve/", views.DischargeCurveList.as_view()), + # path("levelfunction/", views.LevelFunctionList.as_view()), + path("precipitation/", views.PrecipitationList.as_view()), + path("airtemperature/", views.AirTemperatureList.as_view()), + path("humidity/", views.HumidityList.as_view()), + path("windvelocity/", views.WindVelocityList.as_view()), + path("winddirection/", views.WindDirectionList.as_view()), + path("soilmoisture/", views.SoilMoistureList.as_view()), + path("solarradiation/", views.SolarRadiationList.as_view()), + path("atmosphericpressure/", views.AtmosphericPressureList.as_view()), + path("watertemperature/", views.WaterTemperatureList.as_view()), + path("flow/", views.FlowList.as_view()), + path("waterlevel/", views.WaterLevelList.as_view()), + path("batteryvoltage/", views.BatteryVoltageList.as_view()), + path("flowmanual/", views.FlowManualList.as_view()), + path("striplevelreading/", views.StripLevelReadingList.as_view()), + path("soiltemperature/", views.SoilTemperatureList.as_view()), + path("indirectradiation/", views.IndirectRadiationList.as_view()), + path("watertemperature_depth/", views.WaterTemperatureDepthList.as_view()), + path("wateracidity_depth/", views.WaterAcidityDepthList.as_view()), + path("redoxpotential_depth/", views.RedoxPotentialDepthList.as_view()), + path("waterturbidity_depth/", views.WaterTurbidityDepthList.as_view()), + path( + "chlorineconcentration_depth/", + views.ChlorineConcentrationDepthList.as_view(), + ), + path( + "oxygenconcentration_depth/", + views.OxygenConcentrationDepthList.as_view(), + ), + path( + "percentageoxygen_depth/", + views.PercentageOxygenConcentrationDepthList.as_view(), + ), + path("phycocyanin_depth/", views.PhycocyaninDepthList.as_view()), +] + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/daily/views.py b/daily/views.py new file mode 100755 index 00000000..87742936 --- /dev/null +++ b/daily/views.py @@ -0,0 +1,401 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.contrib.auth.decorators import permission_required +from django.contrib.auth.mixins import PermissionRequiredMixin +from django.db import connection +from django.http import HttpResponseRedirect, JsonResponse +from django.shortcuts import render +from django.urls import reverse +from django.views.generic.detail import DetailView +from django.views.generic.edit import CreateView, DeleteView, UpdateView +from rest_framework import generics + +import daily.models as day +import daily.serializers as serializers +# from Daily.models import DischargeCurve, LevelFunction + +from .filters import ( + # DischargeCurveFilter, + # LevelFunctionFilter, + DailyFilter, + DailyFilterDepth, + # PolarWindFilter, +) + +# from Daily.others.functions import level_function_table + + + + +class DailyListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + DailyFilter class to filter the results. + """ + + filterset_class = DailyFilter + + +class DailyDepthListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + DailyFilterDepth class to filter the results. + """ + + filterset_class = DailyFilterDepth + + +class PrecipitationList(DailyListBase): + """ + List all measurements of Precipitation. + """ + + queryset = day.Precipitation.objects.all() + serializer_class = serializers.PrecipitationSerializer + + +class AirTemperatureList(DailyListBase): + """ + List all Dailys of Air Temperature. + """ + + queryset = day.AirTemperature.objects.all() + serializer_class = serializers.AirTemperatureSerializer + + +class HumidityList(DailyListBase): + """ + List all Dailys of Humidity. + """ + + queryset = day.Humidity.objects.all() + serializer_class = serializers.HumiditySerializer + + +class WindVelocityList(DailyListBase): + """ + List all Dailys of Wind Velocity. + """ + + queryset = day.WindVelocity.objects.all() + serializer_class = serializers.WindVelocitySerializer + + +class WindDirectionList(DailyListBase): + """ + List all Dailys of Wind Direction. + """ + + queryset = day.WindDirection.objects.all() + serializer_class = serializers.WindDirectionSerializer + + +class SoilMoistureList(DailyListBase): + """ + List all Dailys of Soil Moisture. + """ + + queryset = day.SoilMoisture.objects.all() + serializer_class = serializers.SoilMoistureSerializer + + +class SolarRadiationList(DailyListBase): + """ + List all Dailys of Solar Radiation. + """ + + queryset = day.SolarRadiation.objects.all() + serializer_class = serializers.SolarRadiationSerializer + + +class AtmosphericPressureList(DailyListBase): + """ + List all Dailys of Atmospheric Pressure. + """ + + queryset = day.AtmosphericPressure.objects.all() + serializer_class = serializers.AtmosphericPressureSerializer + + +class WaterTemperatureList(DailyListBase): + """ + List all Dailys of Water Temperature. + """ + + queryset = day.WaterTemperature.objects.all() + serializer_class = serializers.WaterTemperatureSerializer + + +class FlowList(DailyListBase): + """ + List all Dailys of Flow. + """ + + queryset = day.Flow.objects.all() + serializer_class = serializers.FlowSerializer + + +class WaterLevelList(DailyListBase): + """ + List all Dailys of Water Level. + """ + + queryset = day.WaterLevel.objects.all() + serializer_class = serializers.WaterLevelSerializer + + +class BatteryVoltageList(DailyListBase): + """ + List all Dailys of Battery Voltage. + """ + + queryset = day.BatteryVoltage.objects.all() + serializer_class = serializers.BatteryVoltageSerializer + + +class FlowManualList(DailyListBase): + """ + List all Dailys of Flow Manual. + """ + + queryset = day.FlowManual.objects.all() + serializer_class = serializers.FlowManualSerializer + + +class StripLevelReadingList(DailyListBase): + """ + List all Dailys of Strip Level Reading. + """ + + queryset = day.StripLevelReading.objects.all() + serializer_class = serializers.StripLevelReadingSerializer + + +class SoilTemperatureList(DailyListBase): + """ + List all Dailys of Soil Temperature. + """ + + queryset = day.SoilTemperature.objects.all() + serializer_class = serializers.SoilTemperatureSerializer + + +class IndirectRadiationList(DailyListBase): + """ + List all Dailys of Indirect Radiation. + """ + + queryset = day.IndirectRadiation.objects.all() + serializer_class = serializers.IndirectRadiationSerializer + + +class WaterTemperatureDepthList(DailyDepthListBase): + """ + List all Dailys of Water Temperature Depth. + """ + + queryset = day.WaterTemperatureDepth.objects.all() + serializer_class = serializers.WaterTemperatureDepthSerializer + + +class WaterAcidityDepthList(DailyDepthListBase): + """ + List all Dailys of Water Acidity Depth. + """ + + queryset = day.WaterAcidityDepth.objects.all() + serializer_class = serializers.WaterAcidityDepthSerializer + + +class RedoxPotentialDepthList(DailyDepthListBase): + """ + List all Dailys of Redox Potential Depth. + """ + + queryset = day.RedoxPotentialDepth.objects.all() + serializer_class = serializers.RedoxPotentialDepthSerializer + + +class WaterTurbidityDepthList(DailyDepthListBase): + """ + List all Dailys of Water Turbidity Depth. + """ + + queryset = day.WaterTurbidityDepth.objects.all() + serializer_class = serializers.WaterTurbidityDepthSerializer + + +class ChlorineConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Chlorine Concentration Depth. + """ + + queryset = day.ChlorineConcentrationDepth.objects.all() + serializer_class = serializers.ChlorineConcentrationDepthSerializer + + +class OxygenConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Oxygen Concentration Depth. + """ + + queryset = day.OxygenConcentrationDepth.objects.all() + serializer_class = serializers.OxygenConcentrationDepthSerializer + + +class PercentageOxygenConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Percentage Oxygen Concentration Depth. + """ + + queryset = day.PercentageOxygenConcentrationDepth.objects.all() + serializer_class = serializers.PercentageOxygenConcentrationDepthSerializer + + +class PhycocyaninDepthList(DailyDepthListBase): + """ + List all Dailys of Phycocyanin Depth. + """ + + queryset = day.PhycocyaninDepth.objects.all() + serializer_class = serializers.PhycocyaninDepthSerializer + + +######################################################################################## +# TODO: Revisit theses specialised views that use level_function_table() and create +# Django Rest Framework equivalents. +######################################################################################## + + +# class DischargeCurveDetail(PermissionRequiredMixin, DetailView): +# model = DischargeCurve +# permission_required = "Daily.view_dischargecurve" +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# dischargecurve_id = self.object.pk +# context["levelfunctiontable"] = level_function_table(dischargecurve_id) +# return context + + +# class LevelFunctionCreate(PermissionRequiredMixin, CreateView): +# permission_required = "Daily.add_dischargecurve" +# model = LevelFunction +# form_class = LevelFunctionForm +# +# def post(self, request, *args, **kwargs): +# dischargecurve_id = kwargs.get("id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# form = LevelFunctionForm(self.request.POST or None) +# try: +# # Verify if form is correct +# levelfunction = form.save(commit=False) +# except Exception: +# # If it is not, send an informative message. +# _levelfunctiontable = level_function_table(dischargecurve_id) +# new_levelfunction = render( +# request, +# "measurement/levelfunction_form.html", +# {"form": LevelFunctionForm(self.request.POST or None)}, +# ) +# return render( +# request, +# "measurement/dischargecurve_detail.html", +# { +# "dischargecurve": dischargecurve, +# "levelfunctiontable": _levelfunctiontable, +# "new_levelfunction": new_levelfunction.content.decode("utf-8"), +# }, +# ) +# levelfunction.dischargecurve = dischargecurve +# levelfunction.save() +# dischargecurve.requiere_recalculo_caudal = True +# dischargecurve.save() +# url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return HttpResponseRedirect(url) +# +# def get_context_data(self, **kwargs): +# context = super(LevelFunctionCreate, self).get_context_data(**kwargs) +# context["title"] = "Create" +# dischargecurve_id = self.kwargs.get("id") +# context["url"] = reverse( +# "measurement:levelfunction_create", kwargs={"id": dischargecurve_id} +# ) +# return context +# +# +# class LevelFunctionUpdate(PermissionRequiredMixin, UpdateView): +# permission_required = "Daily.change_dischargecurve" +# model = LevelFunction +# fields = ["level", "function"] +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# context["title"] = "Modify" +# levelfunction_pk = self.kwargs.get("pk") +# context["url"] = reverse( +# "measurement:levelfunction_update", kwargs={"pk": levelfunction_pk} +# ) +# context["dischargecurve_id"] = self.object.dischargecurve.id +# return context +# +# def post(self, request, *args, **kwargs): +# data = request.POST.copy() +# dischargecurve_id = data.get("dischargecurve_id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.success_url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return super().post(data, **kwargs) +# +# +# class LevelFunctionDelete(PermissionRequiredMixin, DeleteView): +# permission_required = "Daily.delete_dischargecurve" +# model = LevelFunction +# +# def delete(self, request, *args, **kwargs): +# self.object = self.get_object() +# dischargecurve = self.object.dischargecurve +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.object.delete() +# return HttpResponseRedirect( +# reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve.id} +# ) +# ) +# +# +# @permission_required("Daily.add_dischargecurve") +# def recalculate_flow(request): +# dischargecurve_id = int(request.POST.get("dischargecurve_id", None)) +# sql = "SELECT calculate_flow(%s);" +# try: +# with connection.cursor() as cursor: +# cursor.execute(sql, [dischargecurve_id]) +# cursor.fetchone() +# except Exception: +# result = {"res": False} +# return JsonResponse(result) +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = False +# dischargecurve.save() +# result = {"res": True} +# return JsonResponse(result) diff --git a/djangomain/settings.py b/djangomain/settings.py index 978347aa..b595124f 100755 --- a/djangomain/settings.py +++ b/djangomain/settings.py @@ -57,6 +57,10 @@ "variable.apps.VariableConfig", "formatting.apps.FormattingConfig", "measurement.apps.MeasurementConfig", + "validated.apps.ValidatedConfig", + "hourly.apps.HourlyConfig", + "daily.apps.DailyConfig", + "monthly.apps.MonthlyConfig", "importing.apps.ImportingConfig", "bootstrap4", "django_extensions", diff --git a/docker-compose.yml b/docker-compose.yml index b6806b86..72d19fb9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,7 +4,7 @@ services: db: image: timescale/timescaledb-ha:pg14-latest volumes: - - ./data/db:/home/postgres/pgdata/data + - ./data/db:/var/lib/postgresql/data environment: - POSTGRES_DB=postgres - POSTGRES_USER=postgres diff --git a/hourly/__init__.py b/hourly/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/hourly/apps.py b/hourly/apps.py new file mode 100755 index 00000000..e5f54372 --- /dev/null +++ b/hourly/apps.py @@ -0,0 +1,20 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class HourlyConfig(AppConfig): + name = "hourly" diff --git a/hourly/filters.py b/hourly/filters.py new file mode 100755 index 00000000..5e40ce96 --- /dev/null +++ b/hourly/filters.py @@ -0,0 +1,30 @@ +from django_filters import rest_framework as filters +from station.models import Station + + + +class HourlyFilter(filters.FilterSet): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have no depth information. + """ + + time = filters.DateTimeFilter(field_name="time", lookup_expr="exact") + min_time = filters.DateTimeFilter(field_name="time", lookup_expr="gte") + max_time = filters.DateTimeFilter(field_name="time", lookup_expr="lte") + value = filters.NumberFilter(field_name="value", lookup_expr="exact") + min_value = filters.NumberFilter(field_name="value", lookup_expr="gte") + max_value = filters.NumberFilter(field_name="value", lookup_expr="lte") + station_id = filters.NumberFilter(field_name="station_id", lookup_expr="exact") + used_for_hourly = filters.BooleanFilter(fieldname='used_for_hourly', lookup_expr="exact") + + +class HourlyFilterDepth(HourlyFilter): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have depth information. + """ + + depth = filters.NumberFilter(field_name="depth", lookup_expr="exact") + min_depth = filters.NumberFilter(field_name="depth", lookup_expr="gte") + max_depth = filters.NumberFilter(field_name="depth", lookup_expr="lte") diff --git a/hourly/migrations/__init__.py b/hourly/migrations/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/hourly/models.py b/hourly/models.py new file mode 100755 index 00000000..e8ccc21f --- /dev/null +++ b/hourly/models.py @@ -0,0 +1,302 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from __future__ import unicode_literals + +from typing import List, Type + +from django.db import models +from django.urls import reverse +from timescale.db.models.models import TimescaleModel +from django.core.exceptions import ValidationError + +from station.models import Station + +HOURLYS: List[str] = [] +"""Available hourly variables.""" + +# TODO check if PolarWind is needed in HOurly +class PolarWind(TimescaleModel): + """ + Polar Wind hourly with a velocity and direction at a specific time. + """ + + speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) + direction = models.DecimalField( + "Direction", max_digits=14, decimal_places=6, null=True + ) + + class Meta: + """ + Para que no se cree en la migracion. + + NOTE: Why don't we want this in the migration? + """ + + default_permissions = () + managed = False + + + + +class BaseHourly(TimescaleModel): + @classmethod + def __init_subclass__(cls, *args, **kwargs) -> None: + if not cls.__name__.startswith("_Hour") and cls.__name__ not in HOURLYS: + HOURLYS.append(cls.__name__) + + time = models.DateTimeField(precision=0) + station_id = models.PositiveIntegerField("station_id") + used_for_daily = models.BooleanField("used_for_daily", default=False) + completeness = models.DecimalField(max_digits=4, decimal_places=1) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["used_for_daily"]), + models.Index(fields=["station_id", "time"]), + models.Index(fields=["time", "station_id"]), + ] + abstract = True + + # TODO Check if this is wanted/needed + def clean(self): + super().clean() + if self.time.minute or self.time.second: + raise ValidationError('The information of minutes and seconds is not allowed in this field.') + + # TODO check if this is wanted/needed + def save(self, *args, **kwargs): + self.time = self.time.replace(minute=0, second=0, microsecond=0) + super().save(*args, **kwargs) + + +def create_hour_model( + digits=14, decimals=6, fields=("Average", "Maximum", "Minimum") +) -> Type[TimescaleModel]: + num = len(HOURLYS) + 1 + _fields = { + key.lower(): models.DecimalField( + key, + max_digits=digits, + decimal_places=decimals, + null=True, + ) + for key in fields + } + + class Meta: + abstract = True + + attrs = {"__module__": __name__, "Meta": Meta} + attrs.update(_fields) + + return type( + f"_Hour{num}", + (BaseHourly,), + attrs, + ) + + +class Precipitation(create_hour_model(digits=6, decimals=2, fields=("Total",))): + """Precipitation.""" + + +class AirTemperature(create_hour_model(digits=5, decimals=2)): + """Air temperature.""" + + +class Humidity(create_hour_model()): + """Humidity.""" + + +class WindVelocity(create_hour_model()): + """Wind velocity.""" + + +class WindDirection(create_hour_model()): + """Wind direction.""" + + +class SoilMoisture(create_hour_model()): + """Soil moisture.""" + + +class SolarRadiation(create_hour_model()): + """Solar radiation.""" + + +class AtmosphericPressure(create_hour_model()): + """Atmospheric pressure.""" + + +class WaterTemperature(create_hour_model()): + """Water temperature.""" + + +class Flow(create_hour_model()): + """Flow.""" + + +class WaterLevel(create_hour_model()): + """Water level.""" + + +class BatteryVoltage(create_hour_model()): + """Battery voltage.""" + +# TODO Check if this variable wouldn´t have table in hourly, daily and MOnthly +class FlowManual(create_hour_model(fields=("Value",))): + """Flow (manual).""" + + +# TODO Check if There is needed StripLevelReading hourly. +class StripLevelReading(create_hour_model(fields=("Value", "Uncertainty"))): + """Strip level reading.""" + + data_import_date = models.DateTimeField("Data import date") + data_start_date = models.DateTimeField("Data start date") + calibrated = models.BooleanField("Calibrated") + comments = models.CharField("Comments", null=True, max_length=250) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "data_import_date"]), + models.Index(fields=["station_id", "data_start_date", "time"]), + models.Index(fields=["data_import_date"]), + ] + + +class SoilTemperature(create_hour_model()): + """Soil temperature.""" + + +class IndirectRadiation(create_hour_model()): + """Indirect radiation.""" + + +# Variables created for buoy with different depths +class WaterTemperatureDepth( + create_hour_model(digits=6, decimals=2,), +): + """Water temperature (degrees celcius) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterAcidityDepth( + create_hour_model(digits=6, decimals=2,), +): + """Water acidity (pH) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class RedoxPotentialDepth( + create_hour_model(digits=6, decimals=2,), +): + """Redox potential (mV) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterTurbidityDepth( + create_hour_model(digits=6, decimals=2,), +): + """Water turbidity (NTU) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class ChlorineConcentrationDepth( + create_hour_model(digits=6, decimals=2,), +): + """Chlorine concentration (ug/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class OxygenConcentrationDepth( + create_hour_model(digits=6, decimals=2,), +): + """Oxygen concentration (mg/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PercentageOxygenConcentrationDepth( + create_hour_model(digits=6, decimals=2,), +): + """Percentage oxygen concentration (mg/l) at a depth in cm. + + HELPWANTED: Is this wrong? It's teh same as above, perhaps units should + be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. + """ + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PhycocyaninDepth( + create_hour_model(digits=6, decimals=2,), +): + """Phycocyanin (?) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] diff --git a/hourly/serializers.py b/hourly/serializers.py new file mode 100755 index 00000000..d6acafab --- /dev/null +++ b/hourly/serializers.py @@ -0,0 +1,193 @@ +from rest_framework import serializers + +from .models import ( + AirTemperature, + AtmosphericPressure, + BatteryVoltage, + ChlorineConcentrationDepth, + # DischargeCurve, + Flow, + FlowManual, + Humidity, + IndirectRadiation, + # LevelFunction, + OxygenConcentrationDepth, + PercentageOxygenConcentrationDepth, + PhycocyaninDepth, + PolarWind, + Precipitation, + RedoxPotentialDepth, + SoilMoisture, + SoilTemperature, + SolarRadiation, + StripLevelReading, + WaterAcidityDepth, + WaterLevel, + WaterTemperature, + WaterTemperatureDepth, + WaterTurbidityDepth, + WindDirection, + WindVelocity, +) + + +class PolarWindSerializer(serializers.ModelSerializer): + class Meta: + model = PolarWind + exclude = [] + +# TODO Confirm if DischargeCurveSerializer is not needed in Validated Models +# class DischargeCurveSerializer(serializers.ModelSerializer): +# class Meta: +# model = DischargeCurve +# exclude = [] + + +# class LevelFunctionSerializer(serializers.ModelSerializer): +# class Meta: +# model = LevelFunction +# exclude = [] + + +class PrecipitationSerializer(serializers.ModelSerializer): + class Meta: + model = Precipitation + exclude = [] + + +class AirTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = AirTemperature + exclude = [] + + +class HumiditySerializer(serializers.ModelSerializer): + class Meta: + model = Humidity + exclude = [] + + +class WindVelocitySerializer(serializers.ModelSerializer): + class Meta: + model = WindVelocity + exclude = [] + + +class WindDirectionSerializer(serializers.ModelSerializer): + class Meta: + model = WindDirection + exclude = [] + + +class SoilMoistureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilMoisture + exclude = [] + + +class SolarRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = SolarRadiation + exclude = [] + + +class AtmosphericPressureSerializer(serializers.ModelSerializer): + class Meta: + model = AtmosphericPressure + exclude = [] + + +class WaterTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperature + exclude = [] + + +class FlowSerializer(serializers.ModelSerializer): + class Meta: + model = Flow + exclude = [] + + +class WaterLevelSerializer(serializers.ModelSerializer): + class Meta: + model = WaterLevel + exclude = [] + + +class BatteryVoltageSerializer(serializers.ModelSerializer): + class Meta: + model = BatteryVoltage + exclude = [] + + +class FlowManualSerializer(serializers.ModelSerializer): + class Meta: + model = FlowManual + exclude = [] + + +class StripLevelReadingSerializer(serializers.ModelSerializer): + class Meta: + model = StripLevelReading + exclude = [] + + +class SoilTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilTemperature + exclude = [] + + +class IndirectRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = IndirectRadiation + exclude = [] + + +class WaterTemperatureDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperatureDepth + exclude = [] + + +class WaterAcidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterAcidityDepth + exclude = [] + + +class RedoxPotentialDepthSerializer(serializers.ModelSerializer): + class Meta: + model = RedoxPotentialDepth + exclude = [] + + +class WaterTurbidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTurbidityDepth + exclude = [] + + +class ChlorineConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = ChlorineConcentrationDepth + exclude = [] + + +class OxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = OxygenConcentrationDepth + exclude = [] + + +class PercentageOxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PercentageOxygenConcentrationDepth + exclude = [] + + +class PhycocyaninDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PhycocyaninDepth + exclude = [] diff --git a/hourly/urls.py b/hourly/urls.py new file mode 100755 index 00000000..09417685 --- /dev/null +++ b/hourly/urls.py @@ -0,0 +1,60 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from django.urls import path +from rest_framework.urlpatterns import format_suffix_patterns + +from . import views + +app_name = "hourly" +urlpatterns = [ + path("polarwind/", views.PolarWindList.as_view()), + # TODO Verify if it's not really needed + # path("dischargecurve/", views.DischargeCurveList.as_view()), + # path("levelfunction/", views.LevelFunctionList.as_view()), + path("precipitation/", views.PrecipitationList.as_view()), + path("airtemperature/", views.AirTemperatureList.as_view()), + path("humidity/", views.HumidityList.as_view()), + path("windvelocity/", views.WindVelocityList.as_view()), + path("winddirection/", views.WindDirectionList.as_view()), + path("soilmoisture/", views.SoilMoistureList.as_view()), + path("solarradiation/", views.SolarRadiationList.as_view()), + path("atmosphericpressure/", views.AtmosphericPressureList.as_view()), + path("watertemperature/", views.WaterTemperatureList.as_view()), + path("flow/", views.FlowList.as_view()), + path("waterlevel/", views.WaterLevelList.as_view()), + path("batteryvoltage/", views.BatteryVoltageList.as_view()), + path("flowmanual/", views.FlowManualList.as_view()), + path("striplevelreading/", views.StripLevelReadingList.as_view()), + path("soiltemperature/", views.SoilTemperatureList.as_view()), + path("indirectradiation/", views.IndirectRadiationList.as_view()), + path("watertemperature_depth/", views.WaterTemperatureDepthList.as_view()), + path("wateracidity_depth/", views.WaterAcidityDepthList.as_view()), + path("redoxpotential_depth/", views.RedoxPotentialDepthList.as_view()), + path("waterturbidity_depth/", views.WaterTurbidityDepthList.as_view()), + path( + "chlorineconcentration_depth/", + views.ChlorineConcentrationDepthList.as_view(), + ), + path( + "oxygenconcentration_depth/", + views.OxygenConcentrationDepthList.as_view(), + ), + path( + "percentageoxygen_depth/", + views.PercentageOxygenConcentrationDepthList.as_view(), + ), + path("phycocyanin_depth/", views.PhycocyaninDepthList.as_view()), +] + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/hourly/views.py b/hourly/views.py new file mode 100755 index 00000000..95996b5f --- /dev/null +++ b/hourly/views.py @@ -0,0 +1,401 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.contrib.auth.decorators import permission_required +from django.contrib.auth.mixins import PermissionRequiredMixin +from django.db import connection +from django.http import HttpResponseRedirect, JsonResponse +from django.shortcuts import render +from django.urls import reverse +from django.views.generic.detail import DetailView +from django.views.generic.edit import CreateView, DeleteView, UpdateView +from rest_framework import generics + +import hourly.models as hour +import hourly.serializers as serializers +# from Hourly.models import DischargeCurve, LevelFunction + +from .filters import ( + # DischargeCurveFilter, + # LevelFunctionFilter, + HourlyFilter, + HourlyFilterDepth, + # PolarWindFilter, +) + +# from Hourly.others.functions import level_function_table + + + + +class HourlyListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + HourlyFilter class to filter the results. + """ + + filterset_class = HourlyFilter + + +class HourlyDepthListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + HourlyFilterDepth class to filter the results. + """ + + filterset_class = HourlyFilterDepth + + +class PrecipitationList(HourlyListBase): + """ + List all measurements of Precipitation. + """ + + queryset = hour.Precipitation.objects.all() + serializer_class = serializers.PrecipitationSerializer + + +class AirTemperatureList(HourlyListBase): + """ + List all Hourlys of Air Temperature. + """ + + queryset = hour.AirTemperature.objects.all() + serializer_class = serializers.AirTemperatureSerializer + + +class HumidityList(HourlyListBase): + """ + List all Hourlys of Humidity. + """ + + queryset = hour.Humidity.objects.all() + serializer_class = serializers.HumiditySerializer + + +class WindVelocityList(HourlyListBase): + """ + List all Hourlys of Wind Velocity. + """ + + queryset = hour.WindVelocity.objects.all() + serializer_class = serializers.WindVelocitySerializer + + +class WindDirectionList(HourlyListBase): + """ + List all Hourlys of Wind Direction. + """ + + queryset = hour.WindDirection.objects.all() + serializer_class = serializers.WindDirectionSerializer + + +class SoilMoistureList(HourlyListBase): + """ + List all Hourlys of Soil Moisture. + """ + + queryset = hour.SoilMoisture.objects.all() + serializer_class = serializers.SoilMoistureSerializer + + +class SolarRadiationList(HourlyListBase): + """ + List all Hourlys of Solar Radiation. + """ + + queryset = hour.SolarRadiation.objects.all() + serializer_class = serializers.SolarRadiationSerializer + + +class AtmosphericPressureList(HourlyListBase): + """ + List all Hourlys of Atmospheric Pressure. + """ + + queryset = hour.AtmosphericPressure.objects.all() + serializer_class = serializers.AtmosphericPressureSerializer + + +class WaterTemperatureList(HourlyListBase): + """ + List all Hourlys of Water Temperature. + """ + + queryset = hour.WaterTemperature.objects.all() + serializer_class = serializers.WaterTemperatureSerializer + + +class FlowList(HourlyListBase): + """ + List all Hourlys of Flow. + """ + + queryset = hour.Flow.objects.all() + serializer_class = serializers.FlowSerializer + + +class WaterLevelList(HourlyListBase): + """ + List all Hourlys of Water Level. + """ + + queryset = hour.WaterLevel.objects.all() + serializer_class = serializers.WaterLevelSerializer + + +class BatteryVoltageList(HourlyListBase): + """ + List all Hourlys of Battery Voltage. + """ + + queryset = hour.BatteryVoltage.objects.all() + serializer_class = serializers.BatteryVoltageSerializer + + +class FlowManualList(HourlyListBase): + """ + List all Hourlys of Flow Manual. + """ + + queryset = hour.FlowManual.objects.all() + serializer_class = serializers.FlowManualSerializer + + +class StripLevelReadingList(HourlyListBase): + """ + List all Hourlys of Strip Level Reading. + """ + + queryset = hour.StripLevelReading.objects.all() + serializer_class = serializers.StripLevelReadingSerializer + + +class SoilTemperatureList(HourlyListBase): + """ + List all Hourlys of Soil Temperature. + """ + + queryset = hour.SoilTemperature.objects.all() + serializer_class = serializers.SoilTemperatureSerializer + + +class IndirectRadiationList(HourlyListBase): + """ + List all Hourlys of Indirect Radiation. + """ + + queryset = hour.IndirectRadiation.objects.all() + serializer_class = serializers.IndirectRadiationSerializer + + +class WaterTemperatureDepthList(HourlyDepthListBase): + """ + List all Hourlys of Water Temperature Depth. + """ + + queryset = hour.WaterTemperatureDepth.objects.all() + serializer_class = serializers.WaterTemperatureDepthSerializer + + +class WaterAcidityDepthList(HourlyDepthListBase): + """ + List all Hourlys of Water Acidity Depth. + """ + + queryset = hour.WaterAcidityDepth.objects.all() + serializer_class = serializers.WaterAcidityDepthSerializer + + +class RedoxPotentialDepthList(HourlyDepthListBase): + """ + List all Hourlys of Redox Potential Depth. + """ + + queryset = hour.RedoxPotentialDepth.objects.all() + serializer_class = serializers.RedoxPotentialDepthSerializer + + +class WaterTurbidityDepthList(HourlyDepthListBase): + """ + List all Hourlys of Water Turbidity Depth. + """ + + queryset = hour.WaterTurbidityDepth.objects.all() + serializer_class = serializers.WaterTurbidityDepthSerializer + + +class ChlorineConcentrationDepthList(HourlyDepthListBase): + """ + List all Hourlys of Chlorine Concentration Depth. + """ + + queryset = hour.ChlorineConcentrationDepth.objects.all() + serializer_class = serializers.ChlorineConcentrationDepthSerializer + + +class OxygenConcentrationDepthList(HourlyDepthListBase): + """ + List all Hourlys of Oxygen Concentration Depth. + """ + + queryset = hour.OxygenConcentrationDepth.objects.all() + serializer_class = serializers.OxygenConcentrationDepthSerializer + + +class PercentageOxygenConcentrationDepthList(HourlyDepthListBase): + """ + List all Hourlys of Percentage Oxygen Concentration Depth. + """ + + queryset = hour.PercentageOxygenConcentrationDepth.objects.all() + serializer_class = serializers.PercentageOxygenConcentrationDepthSerializer + + +class PhycocyaninDepthList(HourlyDepthListBase): + """ + List all Hourlys of Phycocyanin Depth. + """ + + queryset = hour.PhycocyaninDepth.objects.all() + serializer_class = serializers.PhycocyaninDepthSerializer + + +######################################################################################## +# TODO: Revisit theses specialised views that use level_function_table() and create +# Django Rest Framework equivalents. +######################################################################################## + + +# class DischargeCurveDetail(PermissionRequiredMixin, DetailView): +# model = DischargeCurve +# permission_required = "Hourly.view_dischargecurve" +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# dischargecurve_id = self.object.pk +# context["levelfunctiontable"] = level_function_table(dischargecurve_id) +# return context + + +# class LevelFunctionCreate(PermissionRequiredMixin, CreateView): +# permission_required = "Hourly.add_dischargecurve" +# model = LevelFunction +# form_class = LevelFunctionForm +# +# def post(self, request, *args, **kwargs): +# dischargecurve_id = kwargs.get("id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# form = LevelFunctionForm(self.request.POST or None) +# try: +# # Verify if form is correct +# levelfunction = form.save(commit=False) +# except Exception: +# # If it is not, send an informative message. +# _levelfunctiontable = level_function_table(dischargecurve_id) +# new_levelfunction = render( +# request, +# "measurement/levelfunction_form.html", +# {"form": LevelFunctionForm(self.request.POST or None)}, +# ) +# return render( +# request, +# "measurement/dischargecurve_detail.html", +# { +# "dischargecurve": dischargecurve, +# "levelfunctiontable": _levelfunctiontable, +# "new_levelfunction": new_levelfunction.content.decode("utf-8"), +# }, +# ) +# levelfunction.dischargecurve = dischargecurve +# levelfunction.save() +# dischargecurve.requiere_recalculo_caudal = True +# dischargecurve.save() +# url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return HttpResponseRedirect(url) +# +# def get_context_data(self, **kwargs): +# context = super(LevelFunctionCreate, self).get_context_data(**kwargs) +# context["title"] = "Create" +# dischargecurve_id = self.kwargs.get("id") +# context["url"] = reverse( +# "measurement:levelfunction_create", kwargs={"id": dischargecurve_id} +# ) +# return context +# +# +# class LevelFunctionUpdate(PermissionRequiredMixin, UpdateView): +# permission_required = "Hourly.change_dischargecurve" +# model = LevelFunction +# fields = ["level", "function"] +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# context["title"] = "Modify" +# levelfunction_pk = self.kwargs.get("pk") +# context["url"] = reverse( +# "measurement:levelfunction_update", kwargs={"pk": levelfunction_pk} +# ) +# context["dischargecurve_id"] = self.object.dischargecurve.id +# return context +# +# def post(self, request, *args, **kwargs): +# data = request.POST.copy() +# dischargecurve_id = data.get("dischargecurve_id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.success_url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return super().post(data, **kwargs) +# +# +# class LevelFunctionDelete(PermissionRequiredMixin, DeleteView): +# permission_required = "Hourly.delete_dischargecurve" +# model = LevelFunction +# +# def delete(self, request, *args, **kwargs): +# self.object = self.get_object() +# dischargecurve = self.object.dischargecurve +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.object.delete() +# return HttpResponseRedirect( +# reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve.id} +# ) +# ) +# +# +# @permission_required("Hourly.add_dischargecurve") +# def recalculate_flow(request): +# dischargecurve_id = int(request.POST.get("dischargecurve_id", None)) +# sql = "SELECT calculate_flow(%s);" +# try: +# with connection.cursor() as cursor: +# cursor.execute(sql, [dischargecurve_id]) +# cursor.fetchone() +# except Exception: +# result = {"res": False} +# return JsonResponse(result) +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = False +# dischargecurve.save() +# result = {"res": True} +# return JsonResponse(result) diff --git a/measurement/models.py b/measurement/models.py index 0c0bc499..abfac0c5 100755 --- a/measurement/models.py +++ b/measurement/models.py @@ -156,7 +156,9 @@ class Meta: attrs, ) - +# TODO Tell that tables (i.e. measurement_precipitation) are not inheriting indexes on multiple columns +# They only present primary key index +# By the way, Models has explicit index (i.e. WaterTemperatureDepth) shows those indexes in the database class Precipitation(create_meas_model(digits=6, decimals=2, fields=("Value",))): """Precipitation.""" diff --git a/monthly/__init__.py b/monthly/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/monthly/apps.py b/monthly/apps.py new file mode 100755 index 00000000..e5f54372 --- /dev/null +++ b/monthly/apps.py @@ -0,0 +1,20 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class HourlyConfig(AppConfig): + name = "hourly" diff --git a/monthly/filters.py b/monthly/filters.py new file mode 100755 index 00000000..ce28398e --- /dev/null +++ b/monthly/filters.py @@ -0,0 +1,30 @@ +from django_filters import rest_framework as filters +from station.models import Station + + + +class DailyFilter(filters.FilterSet): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have no depth information. + """ + + date = filters.DateFilter(field_name="date", lookup_expr="exact") + min_date = filters.DateFilter(field_name="date", lookup_expr="gte") + max_date = filters.DateFilter(field_name="date", lookup_expr="lte") + value = filters.NumberFilter(field_name="value", lookup_expr="exact") + min_value = filters.NumberFilter(field_name="value", lookup_expr="gte") + max_value = filters.NumberFilter(field_name="value", lookup_expr="lte") + station_id = filters.NumberFilter(field_name="station_id", lookup_expr="exact") + used_for_daily = filters.BooleanFilter(fieldname='used_for_daily', lookup_expr="exact") + + +class DailyFilterDepth(DailyFilter): + """ + Filter class for hourlys that are not Polar Wind, Discharge Curve, or Level Function + and that have depth information. + """ + + depth = filters.NumberFilter(field_name="depth", lookup_expr="exact") + min_depth = filters.NumberFilter(field_name="depth", lookup_expr="gte") + max_depth = filters.NumberFilter(field_name="depth", lookup_expr="lte") diff --git a/monthly/migrations/__init__.py b/monthly/migrations/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/monthly/models.py b/monthly/models.py new file mode 100755 index 00000000..2f4eaa5a --- /dev/null +++ b/monthly/models.py @@ -0,0 +1,293 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from __future__ import unicode_literals + +from typing import List, Type + +from django.db import models +from django.urls import reverse +from timescale.db.models.models import TimescaleModel +from django.core.exceptions import ValidationError + +from station.models import Station + +MONTHLYS: List[str] = [] +"""Available monthly variables.""" + +# TODO check if PolarWind is needed in monthly +class PolarWind(TimescaleModel): + """ + Polar Wind monthly with a velocity and direction at a specific time. + """ + + speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) + direction = models.DecimalField( + "Direction", max_digits=14, decimal_places=6, null=True + ) + + class Meta: + """ + Para que no se cree en la migracion. + + NOTE: Why don't we want this in the migration? + """ + + default_permissions = () + managed = False + + + + +class BaseMonthly(TimescaleModel): + @classmethod + def __init_subclass__(cls, *args, **kwargs) -> None: + if not cls.__name__.startswith("_Mon") and cls.__name__ not in MONTHLYS: + MONTHLYS.append(cls.__name__) + + # TODO ask if "date" name is OK + # TODO ask if default=timezone.now is OK, + # date = models.DateField(default=timezone.now) + date = models.DateField("date") + station_id = models.PositiveIntegerField("station_id") + completeness = models.DecimalField(max_digits=4, decimal_places=1) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["used_for_monthly"]), + models.Index(fields=["station_id", "time"]), + models.Index(fields=["time", "station_id"]), + ] + abstract = True + + +def create_Mon_model( + digits=14, decimals=6, fields=("Average") +) -> Type[TimescaleModel]: + num = len(MONTHLYS) + 1 + _fields = { + key.lower(): models.DecimalField( + key, + max_digits=digits, + decimal_places=decimals, + null=True, + ) + for key in fields + } + + class Meta: + abstract = True + + attrs = {"__module__": __name__, "Meta": Meta} + attrs.update(_fields) + + return type( + f"_Mon{num}", + (BaseMonthly,), + attrs, + ) + + +class Precipitation(create_Mon_model(digits=6, decimals=2, fields=("Total",))): + """Precipitation.""" + + +class AirTemperature(create_Mon_model(digits=5, decimals=2)): + """Air temperature.""" + + +class Humidity(create_Mon_model()): + """Humidity.""" + + +class WindVelocity(create_Mon_model()): + """Wind velocity.""" + + +class WindDirection(create_Mon_model()): + """Wind direction.""" + + +class SoilMoisture(create_Mon_model()): + """Soil moisture.""" + + +class SolarRadiation(create_Mon_model()): + """Solar radiation.""" + + +class AtmosphericPressure(create_Mon_model()): + """Atmospheric pressure.""" + + +class WaterTemperature(create_Mon_model()): + """Water temperature.""" + + +class Flow(create_Mon_model()): + """Flow.""" + + +class WaterLevel(create_Mon_model()): + """Water level.""" + + +class BatteryVoltage(create_Mon_model()): + """Battery voltage.""" + + +class FlowManual(create_Mon_model(fields=("Value",))): + """Flow (manual).""" + + +# TODO Check if There id needed StripLevelReading monthly. +class StripLevelReading(create_Mon_model(fields=("Value", "Uncertainty"))): + """Strip level reading.""" + + data_import_date = models.DateTimeField("Data import date") + data_start_date = models.DateTimeField("Data start date") + calibrated = models.BooleanField("Calibrated") + comments = models.CharField("Comments", null=True, max_length=250) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "data_import_date"]), + models.Index(fields=["station_id", "data_start_date", "time"]), + models.Index(fields=["data_import_date"]), + ] + + +class SoilTemperature(create_Mon_model()): + """Soil temperature.""" + + +class IndirectRadiation(create_Mon_model()): + """Indirect radiation.""" + + +# Variables created for buoy with different depths +class WaterTemperatureDepth( + create_Mon_model(digits=6, decimals=2,), +): + """Water temperature (degrees celcius) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterAcidityDepth( + create_Mon_model(digits=6, decimals=2,), +): + """Water acidity (pH) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class RedoxPotentialDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Redox potential (mV) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterTurbidityDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Water turbidity (NTU) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class ChlorineConcentrationDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Chlorine concentration (ug/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class OxygenConcentrationDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Oxygen concentration (mg/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PercentageOxygenConcentrationDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Percentage oxygen concentration (mg/l) at a depth in cm. + + HELPWANTED: Is this wrong? It's teh same as above, perhaps units should + be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. + """ + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PhycocyaninDepth( + create_Mon_model(digits=6, decimals=2, ), +): + """Phycocyanin (?) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] diff --git a/monthly/serializers.py b/monthly/serializers.py new file mode 100755 index 00000000..d6acafab --- /dev/null +++ b/monthly/serializers.py @@ -0,0 +1,193 @@ +from rest_framework import serializers + +from .models import ( + AirTemperature, + AtmosphericPressure, + BatteryVoltage, + ChlorineConcentrationDepth, + # DischargeCurve, + Flow, + FlowManual, + Humidity, + IndirectRadiation, + # LevelFunction, + OxygenConcentrationDepth, + PercentageOxygenConcentrationDepth, + PhycocyaninDepth, + PolarWind, + Precipitation, + RedoxPotentialDepth, + SoilMoisture, + SoilTemperature, + SolarRadiation, + StripLevelReading, + WaterAcidityDepth, + WaterLevel, + WaterTemperature, + WaterTemperatureDepth, + WaterTurbidityDepth, + WindDirection, + WindVelocity, +) + + +class PolarWindSerializer(serializers.ModelSerializer): + class Meta: + model = PolarWind + exclude = [] + +# TODO Confirm if DischargeCurveSerializer is not needed in Validated Models +# class DischargeCurveSerializer(serializers.ModelSerializer): +# class Meta: +# model = DischargeCurve +# exclude = [] + + +# class LevelFunctionSerializer(serializers.ModelSerializer): +# class Meta: +# model = LevelFunction +# exclude = [] + + +class PrecipitationSerializer(serializers.ModelSerializer): + class Meta: + model = Precipitation + exclude = [] + + +class AirTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = AirTemperature + exclude = [] + + +class HumiditySerializer(serializers.ModelSerializer): + class Meta: + model = Humidity + exclude = [] + + +class WindVelocitySerializer(serializers.ModelSerializer): + class Meta: + model = WindVelocity + exclude = [] + + +class WindDirectionSerializer(serializers.ModelSerializer): + class Meta: + model = WindDirection + exclude = [] + + +class SoilMoistureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilMoisture + exclude = [] + + +class SolarRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = SolarRadiation + exclude = [] + + +class AtmosphericPressureSerializer(serializers.ModelSerializer): + class Meta: + model = AtmosphericPressure + exclude = [] + + +class WaterTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperature + exclude = [] + + +class FlowSerializer(serializers.ModelSerializer): + class Meta: + model = Flow + exclude = [] + + +class WaterLevelSerializer(serializers.ModelSerializer): + class Meta: + model = WaterLevel + exclude = [] + + +class BatteryVoltageSerializer(serializers.ModelSerializer): + class Meta: + model = BatteryVoltage + exclude = [] + + +class FlowManualSerializer(serializers.ModelSerializer): + class Meta: + model = FlowManual + exclude = [] + + +class StripLevelReadingSerializer(serializers.ModelSerializer): + class Meta: + model = StripLevelReading + exclude = [] + + +class SoilTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilTemperature + exclude = [] + + +class IndirectRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = IndirectRadiation + exclude = [] + + +class WaterTemperatureDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperatureDepth + exclude = [] + + +class WaterAcidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterAcidityDepth + exclude = [] + + +class RedoxPotentialDepthSerializer(serializers.ModelSerializer): + class Meta: + model = RedoxPotentialDepth + exclude = [] + + +class WaterTurbidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTurbidityDepth + exclude = [] + + +class ChlorineConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = ChlorineConcentrationDepth + exclude = [] + + +class OxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = OxygenConcentrationDepth + exclude = [] + + +class PercentageOxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PercentageOxygenConcentrationDepth + exclude = [] + + +class PhycocyaninDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PhycocyaninDepth + exclude = [] diff --git a/monthly/urls.py b/monthly/urls.py new file mode 100755 index 00000000..09417685 --- /dev/null +++ b/monthly/urls.py @@ -0,0 +1,60 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from django.urls import path +from rest_framework.urlpatterns import format_suffix_patterns + +from . import views + +app_name = "hourly" +urlpatterns = [ + path("polarwind/", views.PolarWindList.as_view()), + # TODO Verify if it's not really needed + # path("dischargecurve/", views.DischargeCurveList.as_view()), + # path("levelfunction/", views.LevelFunctionList.as_view()), + path("precipitation/", views.PrecipitationList.as_view()), + path("airtemperature/", views.AirTemperatureList.as_view()), + path("humidity/", views.HumidityList.as_view()), + path("windvelocity/", views.WindVelocityList.as_view()), + path("winddirection/", views.WindDirectionList.as_view()), + path("soilmoisture/", views.SoilMoistureList.as_view()), + path("solarradiation/", views.SolarRadiationList.as_view()), + path("atmosphericpressure/", views.AtmosphericPressureList.as_view()), + path("watertemperature/", views.WaterTemperatureList.as_view()), + path("flow/", views.FlowList.as_view()), + path("waterlevel/", views.WaterLevelList.as_view()), + path("batteryvoltage/", views.BatteryVoltageList.as_view()), + path("flowmanual/", views.FlowManualList.as_view()), + path("striplevelreading/", views.StripLevelReadingList.as_view()), + path("soiltemperature/", views.SoilTemperatureList.as_view()), + path("indirectradiation/", views.IndirectRadiationList.as_view()), + path("watertemperature_depth/", views.WaterTemperatureDepthList.as_view()), + path("wateracidity_depth/", views.WaterAcidityDepthList.as_view()), + path("redoxpotential_depth/", views.RedoxPotentialDepthList.as_view()), + path("waterturbidity_depth/", views.WaterTurbidityDepthList.as_view()), + path( + "chlorineconcentration_depth/", + views.ChlorineConcentrationDepthList.as_view(), + ), + path( + "oxygenconcentration_depth/", + views.OxygenConcentrationDepthList.as_view(), + ), + path( + "percentageoxygen_depth/", + views.PercentageOxygenConcentrationDepthList.as_view(), + ), + path("phycocyanin_depth/", views.PhycocyaninDepthList.as_view()), +] + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/monthly/views.py b/monthly/views.py new file mode 100755 index 00000000..87742936 --- /dev/null +++ b/monthly/views.py @@ -0,0 +1,401 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.contrib.auth.decorators import permission_required +from django.contrib.auth.mixins import PermissionRequiredMixin +from django.db import connection +from django.http import HttpResponseRedirect, JsonResponse +from django.shortcuts import render +from django.urls import reverse +from django.views.generic.detail import DetailView +from django.views.generic.edit import CreateView, DeleteView, UpdateView +from rest_framework import generics + +import daily.models as day +import daily.serializers as serializers +# from Daily.models import DischargeCurve, LevelFunction + +from .filters import ( + # DischargeCurveFilter, + # LevelFunctionFilter, + DailyFilter, + DailyFilterDepth, + # PolarWindFilter, +) + +# from Daily.others.functions import level_function_table + + + + +class DailyListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + DailyFilter class to filter the results. + """ + + filterset_class = DailyFilter + + +class DailyDepthListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + DailyFilterDepth class to filter the results. + """ + + filterset_class = DailyFilterDepth + + +class PrecipitationList(DailyListBase): + """ + List all measurements of Precipitation. + """ + + queryset = day.Precipitation.objects.all() + serializer_class = serializers.PrecipitationSerializer + + +class AirTemperatureList(DailyListBase): + """ + List all Dailys of Air Temperature. + """ + + queryset = day.AirTemperature.objects.all() + serializer_class = serializers.AirTemperatureSerializer + + +class HumidityList(DailyListBase): + """ + List all Dailys of Humidity. + """ + + queryset = day.Humidity.objects.all() + serializer_class = serializers.HumiditySerializer + + +class WindVelocityList(DailyListBase): + """ + List all Dailys of Wind Velocity. + """ + + queryset = day.WindVelocity.objects.all() + serializer_class = serializers.WindVelocitySerializer + + +class WindDirectionList(DailyListBase): + """ + List all Dailys of Wind Direction. + """ + + queryset = day.WindDirection.objects.all() + serializer_class = serializers.WindDirectionSerializer + + +class SoilMoistureList(DailyListBase): + """ + List all Dailys of Soil Moisture. + """ + + queryset = day.SoilMoisture.objects.all() + serializer_class = serializers.SoilMoistureSerializer + + +class SolarRadiationList(DailyListBase): + """ + List all Dailys of Solar Radiation. + """ + + queryset = day.SolarRadiation.objects.all() + serializer_class = serializers.SolarRadiationSerializer + + +class AtmosphericPressureList(DailyListBase): + """ + List all Dailys of Atmospheric Pressure. + """ + + queryset = day.AtmosphericPressure.objects.all() + serializer_class = serializers.AtmosphericPressureSerializer + + +class WaterTemperatureList(DailyListBase): + """ + List all Dailys of Water Temperature. + """ + + queryset = day.WaterTemperature.objects.all() + serializer_class = serializers.WaterTemperatureSerializer + + +class FlowList(DailyListBase): + """ + List all Dailys of Flow. + """ + + queryset = day.Flow.objects.all() + serializer_class = serializers.FlowSerializer + + +class WaterLevelList(DailyListBase): + """ + List all Dailys of Water Level. + """ + + queryset = day.WaterLevel.objects.all() + serializer_class = serializers.WaterLevelSerializer + + +class BatteryVoltageList(DailyListBase): + """ + List all Dailys of Battery Voltage. + """ + + queryset = day.BatteryVoltage.objects.all() + serializer_class = serializers.BatteryVoltageSerializer + + +class FlowManualList(DailyListBase): + """ + List all Dailys of Flow Manual. + """ + + queryset = day.FlowManual.objects.all() + serializer_class = serializers.FlowManualSerializer + + +class StripLevelReadingList(DailyListBase): + """ + List all Dailys of Strip Level Reading. + """ + + queryset = day.StripLevelReading.objects.all() + serializer_class = serializers.StripLevelReadingSerializer + + +class SoilTemperatureList(DailyListBase): + """ + List all Dailys of Soil Temperature. + """ + + queryset = day.SoilTemperature.objects.all() + serializer_class = serializers.SoilTemperatureSerializer + + +class IndirectRadiationList(DailyListBase): + """ + List all Dailys of Indirect Radiation. + """ + + queryset = day.IndirectRadiation.objects.all() + serializer_class = serializers.IndirectRadiationSerializer + + +class WaterTemperatureDepthList(DailyDepthListBase): + """ + List all Dailys of Water Temperature Depth. + """ + + queryset = day.WaterTemperatureDepth.objects.all() + serializer_class = serializers.WaterTemperatureDepthSerializer + + +class WaterAcidityDepthList(DailyDepthListBase): + """ + List all Dailys of Water Acidity Depth. + """ + + queryset = day.WaterAcidityDepth.objects.all() + serializer_class = serializers.WaterAcidityDepthSerializer + + +class RedoxPotentialDepthList(DailyDepthListBase): + """ + List all Dailys of Redox Potential Depth. + """ + + queryset = day.RedoxPotentialDepth.objects.all() + serializer_class = serializers.RedoxPotentialDepthSerializer + + +class WaterTurbidityDepthList(DailyDepthListBase): + """ + List all Dailys of Water Turbidity Depth. + """ + + queryset = day.WaterTurbidityDepth.objects.all() + serializer_class = serializers.WaterTurbidityDepthSerializer + + +class ChlorineConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Chlorine Concentration Depth. + """ + + queryset = day.ChlorineConcentrationDepth.objects.all() + serializer_class = serializers.ChlorineConcentrationDepthSerializer + + +class OxygenConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Oxygen Concentration Depth. + """ + + queryset = day.OxygenConcentrationDepth.objects.all() + serializer_class = serializers.OxygenConcentrationDepthSerializer + + +class PercentageOxygenConcentrationDepthList(DailyDepthListBase): + """ + List all Dailys of Percentage Oxygen Concentration Depth. + """ + + queryset = day.PercentageOxygenConcentrationDepth.objects.all() + serializer_class = serializers.PercentageOxygenConcentrationDepthSerializer + + +class PhycocyaninDepthList(DailyDepthListBase): + """ + List all Dailys of Phycocyanin Depth. + """ + + queryset = day.PhycocyaninDepth.objects.all() + serializer_class = serializers.PhycocyaninDepthSerializer + + +######################################################################################## +# TODO: Revisit theses specialised views that use level_function_table() and create +# Django Rest Framework equivalents. +######################################################################################## + + +# class DischargeCurveDetail(PermissionRequiredMixin, DetailView): +# model = DischargeCurve +# permission_required = "Daily.view_dischargecurve" +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# dischargecurve_id = self.object.pk +# context["levelfunctiontable"] = level_function_table(dischargecurve_id) +# return context + + +# class LevelFunctionCreate(PermissionRequiredMixin, CreateView): +# permission_required = "Daily.add_dischargecurve" +# model = LevelFunction +# form_class = LevelFunctionForm +# +# def post(self, request, *args, **kwargs): +# dischargecurve_id = kwargs.get("id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# form = LevelFunctionForm(self.request.POST or None) +# try: +# # Verify if form is correct +# levelfunction = form.save(commit=False) +# except Exception: +# # If it is not, send an informative message. +# _levelfunctiontable = level_function_table(dischargecurve_id) +# new_levelfunction = render( +# request, +# "measurement/levelfunction_form.html", +# {"form": LevelFunctionForm(self.request.POST or None)}, +# ) +# return render( +# request, +# "measurement/dischargecurve_detail.html", +# { +# "dischargecurve": dischargecurve, +# "levelfunctiontable": _levelfunctiontable, +# "new_levelfunction": new_levelfunction.content.decode("utf-8"), +# }, +# ) +# levelfunction.dischargecurve = dischargecurve +# levelfunction.save() +# dischargecurve.requiere_recalculo_caudal = True +# dischargecurve.save() +# url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return HttpResponseRedirect(url) +# +# def get_context_data(self, **kwargs): +# context = super(LevelFunctionCreate, self).get_context_data(**kwargs) +# context["title"] = "Create" +# dischargecurve_id = self.kwargs.get("id") +# context["url"] = reverse( +# "measurement:levelfunction_create", kwargs={"id": dischargecurve_id} +# ) +# return context +# +# +# class LevelFunctionUpdate(PermissionRequiredMixin, UpdateView): +# permission_required = "Daily.change_dischargecurve" +# model = LevelFunction +# fields = ["level", "function"] +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# context["title"] = "Modify" +# levelfunction_pk = self.kwargs.get("pk") +# context["url"] = reverse( +# "measurement:levelfunction_update", kwargs={"pk": levelfunction_pk} +# ) +# context["dischargecurve_id"] = self.object.dischargecurve.id +# return context +# +# def post(self, request, *args, **kwargs): +# data = request.POST.copy() +# dischargecurve_id = data.get("dischargecurve_id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.success_url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return super().post(data, **kwargs) +# +# +# class LevelFunctionDelete(PermissionRequiredMixin, DeleteView): +# permission_required = "Daily.delete_dischargecurve" +# model = LevelFunction +# +# def delete(self, request, *args, **kwargs): +# self.object = self.get_object() +# dischargecurve = self.object.dischargecurve +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.object.delete() +# return HttpResponseRedirect( +# reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve.id} +# ) +# ) +# +# +# @permission_required("Daily.add_dischargecurve") +# def recalculate_flow(request): +# dischargecurve_id = int(request.POST.get("dischargecurve_id", None)) +# sql = "SELECT calculate_flow(%s);" +# try: +# with connection.cursor() as cursor: +# cursor.execute(sql, [dischargecurve_id]) +# cursor.fetchone() +# except Exception: +# result = {"res": False} +# return JsonResponse(result) +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = False +# dischargecurve.save() +# result = {"res": True} +# return JsonResponse(result) diff --git a/station/models.py b/station/models.py index 13709a5d..20544cf8 100755 --- a/station/models.py +++ b/station/models.py @@ -274,3 +274,26 @@ def get_absolute_url(self): class Meta: ordering = ("station_id",) + + +# TODO Discuss if it's really necessary to implement multiple deltaTs for different dates +class DeltaT(models.Model): + """ + Delta T: Interval of data adquisition (In minutes) + """ + id = models.AutoField("Id", primary_key=True) + station = models.ForeignKey( + Station, + on_delete=models.SET_NULL, + verbose_name="Station", + ) + delta_t = models.PositiveSmallIntegerField() + + def __str__(self): + return str(self.station.station_code + ' - ' + str(self.delta_t)) + + def get_absolute_url(self): + return reverse("station:delta_t_detail", kwargs={"pk": self.pk}) + + class Meta: + ordering = ("id",) \ No newline at end of file diff --git a/validated/__init__.py b/validated/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/validated/apps.py b/validated/apps.py new file mode 100755 index 00000000..58e52acf --- /dev/null +++ b/validated/apps.py @@ -0,0 +1,20 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.apps import AppConfig + + +class ValidatedConfig(AppConfig): + name = "validated" diff --git a/validated/filters.py b/validated/filters.py new file mode 100755 index 00000000..f759693e --- /dev/null +++ b/validated/filters.py @@ -0,0 +1,80 @@ +from django_filters import rest_framework as filters + +# from validated.models import DischargeCurve +from station.models import Station + + +# class PolarWindFilter(filters.FilterSet): +# """ +# Filter class for the Polar Wind validateds. +# """ +# +# time = filters.DateTimeFilter(field_name="time", lookup_expr="exact") +# min_time = filters.DateTimeFilter(field_name="time", lookup_expr="gte") +# max_time = filters.DateTimeFilter(field_name="time", lookup_expr="lte") +# speed = filters.NumberFilter(field_name="speed", lookup_expr="exact") +# min_speed = filters.NumberFilter(field_name="speed", lookup_expr="gte") +# max_speed = filters.NumberFilter(field_name="speed", lookup_expr="lte") +# direction = filters.NumberFilter(field_name="direction", lookup_expr="exact") +# min_direction = filters.NumberFilter(field_name="direction", lookup_expr="gte") +# max_direction = filters.NumberFilter(field_name="direction", lookup_expr="lte") + + +# class DischargeCurveFilter(filters.FilterSet): +# """ +# Filter class for the Discharge Curve validateds. +# """ +# +# time = filters.DateTimeFilter(field_name="time", lookup_expr="exact") +# min_time = filters.DateTimeFilter(field_name="time", lookup_expr="gte") +# max_time = filters.DateTimeFilter(field_name="time", lookup_expr="lte") +# require_recalculate_flow = filters.BooleanFilter( +# field_name="require_recalculate_flow" +# ) +# station = filters.ModelChoiceFilter( +# field_name="station", queryset=Station.objects.all() +# ) + + +# class LevelFunctionFilter(filters.FilterSet): +# """ +# Filter class for the Level Function validateds. +# """ +# +# time = filters.DateTimeFilter(field_name="time", lookup_expr="exact") +# min_time = filters.DateTimeFilter(field_name="time", lookup_expr="gte") +# max_time = filters.DateTimeFilter(field_name="time", lookup_expr="lte") +# discharge_curve = filters.ModelChoiceFilter( +# field_name="discharge_curve", queryset=DischargeCurve.objects.all() +# ) +# level = filters.NumberFilter(field_name="level", lookup_expr="exact") +# min_level = filters.NumberFilter(field_name="level", lookup_expr="gte") +# max_level = filters.NumberFilter(field_name="level", lookup_expr="lte") +# function = filters.CharFilter(field_name="function", lookup_expr="icontains") + + +class ValidatedFilter(filters.FilterSet): + """ + Filter class for validateds that are not Polar Wind, Discharge Curve, or Level Function + and that have no depth information. + """ + + time = filters.DateTimeFilter(field_name="time", lookup_expr="exact") + min_time = filters.DateTimeFilter(field_name="time", lookup_expr="gte") + max_time = filters.DateTimeFilter(field_name="time", lookup_expr="lte") + value = filters.NumberFilter(field_name="value", lookup_expr="exact") + min_value = filters.NumberFilter(field_name="value", lookup_expr="gte") + max_value = filters.NumberFilter(field_name="value", lookup_expr="lte") + station_id = filters.NumberFilter(field_name="station_id", lookup_expr="exact") + #TODO Include used_for_hourly + used_for_hourly = filters.BooleanFilter(fieldname='used_for_hourly', lookup_expr="exact") + +class ValidatedFilterDepth(ValidatedFilter): + """ + Filter class for validateds that are not Polar Wind, Discharge Curve, or Level Function + and that have depth information. + """ + + depth = filters.NumberFilter(field_name="depth", lookup_expr="exact") + min_depth = filters.NumberFilter(field_name="depth", lookup_expr="gte") + max_depth = filters.NumberFilter(field_name="depth", lookup_expr="lte") diff --git a/validated/migrations/__init__.py b/validated/migrations/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/validated/models.py b/validated/models.py new file mode 100755 index 00000000..f2bd32d7 --- /dev/null +++ b/validated/models.py @@ -0,0 +1,290 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from __future__ import unicode_literals + +from typing import List, Type + +from django.db import models +from django.urls import reverse +from timescale.db.models.models import TimescaleModel +import measurement.models as meas +from station.models import Station + +VALIDATEDS: List[str] = [] +"""Available validated variables.""" + + + + +class PolarWind(TimescaleModel): + """ + Polar Wind validated with a velocity and direction at a specific time. + """ + + speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) + direction = models.DecimalField( + "Direction", max_digits=14, decimal_places=6, null=True + ) + + class Meta: + """ + Para que no se cree en la migracion. + + NOTE: Why don't we want this in the migration? + """ + + default_permissions = () + managed = False + + +class BaseValidated(TimescaleModel): + @classmethod + def __init_subclass__(cls, *args, **kwargs) -> None: + if not cls.__name__.startswith("_Vali") and cls.__name__ not in VALIDATEDS: + VALIDATEDS.append(cls.__name__) + + station_id = models.PositiveIntegerField("station_id") + used_for_hourly = models.BooleanField("used_for_hourly", default=False) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["used_for_hourly"]), + models.Index(fields=["station_id", "time"]), + models.Index(fields=["time", "station_id"]), + ] + abstract = True + + +def create_vali_model( + digits=14, decimals=6, fields=("Value", "Maximum", "Minimum") +) -> Type[TimescaleModel]: + num = len(VALIDATEDS) + 1 + _fields = { + key.lower(): models.DecimalField( + key, + max_digits=digits, + decimal_places=decimals, + null=True, + ) + for key in fields + } + + class Meta: + abstract = True + + attrs = {"__module__": __name__, "Meta": Meta} + attrs.update(_fields) + + return type( + f"_Vali{num}", + (BaseValidated,), + attrs, + ) + +# TODO Copy the decimal_places and max_digits from a measurement model +# meas.Precipitation._meta.get_field('Value').max_digits +# meas.Precipitation._meta.get_field('Value').decimal_places +class Precipitation(create_vali_model(digits=6, decimals=2, fields=("Total",))): + """Precipitation.""" + + +class AirTemperature(create_vali_model(digits=5, decimals=2)): + """Air temperature.""" + + +class Humidity(create_vali_model()): + """Humidity.""" + + +class WindVelocity(create_vali_model()): + """Wind velocity.""" + + +class WindDirection(create_vali_model()): + """Wind direction.""" + + +class SoilMoisture(create_vali_model()): + """Soil moisture.""" + + +class SolarRadiation(create_vali_model()): + """Solar radiation.""" + + +class AtmosphericPressure(create_vali_model()): + """Atmospheric pressure.""" + + +class WaterTemperature(create_vali_model()): + """Water temperature.""" + + +class Flow(create_vali_model()): + """Flow.""" + + +class WaterLevel(create_vali_model()): + """Water level.""" + + +class BatteryVoltage(create_vali_model()): + """Battery voltage.""" + + +class FlowManual(create_vali_model(fields=("Value",))): + """Flow (manual).""" + + +# TODO Check if There id needed StripLevelReading validated. +class StripLevelReading(create_vali_model(fields=("Value", "Uncertainty"))): + """Strip level reading.""" + + data_import_date = models.DateTimeField("Data import date") + data_start_date = models.DateTimeField("Data start date") + calibrated = models.BooleanField("Calibrated") + comments = models.CharField("Comments", null=True, max_length=250) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "data_import_date"]), + models.Index(fields=["station_id", "data_start_date", "time"]), + models.Index(fields=["data_import_date"]), + ] + + +class SoilTemperature(create_vali_model()): + """Soil temperature.""" + + +class IndirectRadiation(create_vali_model()): + """Indirect radiation.""" + + +# Variables created for buoy with different depths +class WaterTemperatureDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water temperature (degrees celcius) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterAcidityDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water acidity (pH) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class RedoxPotentialDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Redox potential (mV) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterTurbidityDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water turbidity (NTU) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class ChlorineConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Chlorine concentration (ug/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class OxygenConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Oxygen concentration (mg/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PercentageOxygenConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Percentage oxygen concentration (mg/l) at a depth in cm. + + HELPWANTED: Is this wrong? It's teh same as above, perhaps units should + be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. + """ + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PhycocyaninDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Phycocyanin (?) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] diff --git a/validated/models_v1.py b/validated/models_v1.py new file mode 100755 index 00000000..36cc5320 --- /dev/null +++ b/validated/models_v1.py @@ -0,0 +1,355 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from __future__ import unicode_literals + +from typing import List, Type + +from django.db import models +from django.urls import reverse +from timescale.db.models.models import TimescaleModel + +from station.models import Station + +VALIDATEDS: List[str] = [] +"""Available validated variables.""" + + +# class PermissionsValidated(models.Model): +# """ +# Model used to define the permission "validar". +# """ +# +# class Meta: +# managed = False +# default_permissions = () +# permissions = (("validar", "usar interfaz de validación"),) + + +class PolarWind(TimescaleModel): + """ + Polar Wind validated with a velocity and direction at a specific time. + """ + + speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) + direction = models.DecimalField( + "Direction", max_digits=14, decimal_places=6, null=True + ) + + class Meta: + """ + Para que no se cree en la migracion. + + NOTE: Why don't we want this in the migration? + """ + + default_permissions = () + managed = False + + +# class DischargeCurve(TimescaleModel): +# """ +# Discharge curve. +# +# Relates a station and a time and a bool as to whether a flow recalculation is +# required. +# """ +# +# id = models.AutoField("Id", primary_key=True) +# station = models.ForeignKey( +# Station, on_delete=models.SET_NULL, null=True, verbose_name="Station" +# ) +# require_recalculate_flow = models.BooleanField( +# verbose_name="Requires re-calculate flow?", default=False +# ) +# +# def __str__(self): +# return self.id +# +# def get_absolute_url(self): +# return reverse("validated:dischargecurve_detail", kwargs={"pk": self.pk}) +# +# class Meta: +# ordering = ("station", "time") +# unique_together = ("station", "time") + + +# class LevelFunction(TimescaleModel): +# """ +# Function Level. Relates a discharge curve to a level (in cm) to a function. +# +# NOTE: No idea what this is -> Ask Pablo +# """ +# +# discharge_curve = models.ForeignKey(DischargeCurve, on_delete=models.CASCADE) +# level = models.DecimalField( +# "Level (cm)", max_digits=5, decimal_places=1, db_index=True +# ) +# function = models.CharField("Function", max_length=80) +# +# def __str__(self): +# return str(self.pk) +# +# def get_absolute_url(self): +# return reverse("validated:levelfunction_detail", kwargs={"pk": self.pk}) +# +# class Meta: +# default_permissions = () +# ordering = ( +# "discharge_curve", +# "level", +# ) + + +############################################################## + + +class BaseValidated(TimescaleModel): + @classmethod + def __init_subclass__(cls, *args, **kwargs) -> None: + if not cls.__name__.startswith("_Vali") and cls.__name__ not in VALIDATEDS: + VALIDATEDS.append(cls.__name__) + + station_id = models.PositiveIntegerField("station_id") + # TODO check + used_for_hourly = models.BooleanField("used_for_hourly", default=False) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["used_for_hourly"]), + models.Index(fields=["station_id", "time"]), + models.Index(fields=["time", "station_id"]), + ] + abstract = True + + +def create_vali_model( + digits=14, decimals=6, fields=("Value", "Maximum", "Minimum") +) -> Type[TimescaleModel]: + num = len(VALIDATEDS) + 1 + _fields = { + key.lower(): models.DecimalField( + key, + max_digits=digits, + decimal_places=decimals, + null=True, + ) + for key in fields + } + + class Meta: + abstract = True + + attrs = {"__module__": __name__, "Meta": Meta} + attrs.update(_fields) + + return type( + f"_Vali{num}", + (BaseValidated,), + attrs, + ) + + +class Precipitation(create_vali_model(digits=6, decimals=2, fields=("Value",))): + """Precipitation.""" + + +class AirTemperature(create_vali_model(digits=5, decimals=2)): + """Air temperature.""" + + +class Humidity(create_vali_model()): + """Humidity.""" + + +class WindVelocity(create_vali_model()): + """Wind velocity.""" + + +class WindDirection(create_vali_model()): + """Wind direction.""" + + +class SoilMoisture(create_vali_model()): + """Soil moisture.""" + + +class SolarRadiation(create_vali_model()): + """Solar radiation.""" + + +class AtmosphericPressure(create_vali_model()): + """Atmospheric pressure.""" + + +class WaterTemperature(create_vali_model()): + """Water temperature.""" + + +class Flow(create_vali_model()): + """Flow.""" + + +class WaterLevel(create_vali_model()): + """Water level.""" + + +class BatteryVoltage(create_vali_model()): + """Battery voltage.""" + + +class FlowManual(create_vali_model(fields=("Value",))): + """Flow (manual).""" + + +# TODO Check if There id needed StripLevelReading validated. +class StripLevelReading(create_vali_model(fields=("Value", "Uncertainty"))): + """Strip level reading.""" + + data_import_date = models.DateTimeField("Data import date") + data_start_date = models.DateTimeField("Data start date") + calibrated = models.BooleanField("Calibrated") + comments = models.CharField("Comments", null=True, max_length=250) + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "data_import_date"]), + models.Index(fields=["station_id", "data_start_date", "time"]), + models.Index(fields=["data_import_date"]), + ] + + +class SoilTemperature(create_vali_model()): + """Soil temperature.""" + + +class IndirectRadiation(create_vali_model()): + """Indirect radiation.""" + + +# Variables created for buoy with different depths +class WaterTemperatureDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water temperature (degrees celcius) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterAcidityDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water acidity (pH) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class RedoxPotentialDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Redox potential (mV) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class WaterTurbidityDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Water turbidity (NTU) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class ChlorineConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Chlorine concentration (ug/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class OxygenConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Oxygen concentration (mg/l) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PercentageOxygenConcentrationDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Percentage oxygen concentration (mg/l) at a depth in cm. + + HELPWANTED: Is this wrong? It's teh same as above, perhaps units should + be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. + """ + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] + + +class PhycocyaninDepth( + create_vali_model(digits=6, decimals=2, fields=("Value",)), +): + """Phycocyanin (?) at a depth in cm.""" + + depth = models.PositiveSmallIntegerField("Depth") + + class Meta: + default_permissions = () + indexes = [ + models.Index(fields=["station_id", "depth", "time"]), + ] diff --git a/validated/others/__init__.py b/validated/others/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/validated/others/forms.py b/validated/others/forms.py new file mode 100755 index 00000000..d3c9ed5d --- /dev/null +++ b/validated/others/forms.py @@ -0,0 +1,82 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from django import forms +from django.core.exceptions import ValidationError +from django.db import connection + +from station.models import Station +from variable.models import Variable + +from .models import LevelFunction + + +class LevelFunctionForm(forms.ModelForm): + class Meta: + model = LevelFunction + fields = ["level", "function"] + + def clean_function(self): + function = self.cleaned_data["function"] + + # Verifica si tiene letra H + if "H" not in function: + raise ValidationError("It must include parameter H (water level)") + + # Verifica si la función devuelve resultado + test_func = function.replace("H", "10") + sql = "SELECT eval_math('" + test_func + "');" + try: + with connection.cursor() as cursor: + cursor.execute(sql) + len = cursor.rowcount + cursor.fetchall() + except Exception as err: + raise ValidationError(f"Formula syntax error. {err}") + + if len < 1: + raise ValidationError("Formula syntax error. No rows found!") + return function + + +class ValidationSearchForm(forms.Form): + station = forms.ModelChoiceField( + queryset=Station.objects.order_by("station_code").filter( + station_external=False, station_type__in=(1, 2, 3) + ), + empty_label="Station", + ) + variable = forms.ModelChoiceField( + queryset=Variable.objects.order_by("variable_id").exclude(variable_id="10"), + empty_label="Variable", + ) + start = forms.DateField( + widget=forms.TextInput(attrs={"autocomplete": "off"}), + input_formats=["%Y-%m-%d"], + label="Start date", + required=True, + ) + end = forms.DateField( + widget=forms.TextInput(attrs={"autocomplete": "off"}), + input_formats=["%Y-%m-%d"], + label="End date", + required=True, + ) + lower_limit = forms.IntegerField(required=False) + upper_limit = forms.IntegerField(required=False) + + def __init__(self, *args, **kwargs): + super(ValidationSearchForm, self).__init__(*args, **kwargs) + self.fields["station"].widget.attrs["placeholder"] = self.fields[ + "station" + ].label diff --git a/validated/others/functions.py b/validated/others/functions.py new file mode 100755 index 00000000..084f88d5 --- /dev/null +++ b/validated/others/functions.py @@ -0,0 +1,124 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## +from django.db import models + + +class ValidationReport(models.Model): + """ + NOTE: No idea what this one does. Why is there a model definition outside of + models.py, anyway? + """ + + id = models.BigAutoField(primary_key=True) + estado = models.BooleanField() + fecha = models.DateTimeField() + valor_seleccionado = models.DecimalField(max_digits=14, decimal_places=6, null=True) + valor = models.DecimalField(max_digits=14, decimal_places=6, null=True) + variacion_consecutiva = models.DecimalField( + max_digits=14, decimal_places=6, null=True + ) + comentario = models.CharField(max_length=350) + class_fila = models.CharField(max_length=30) + class_fecha = models.CharField(max_length=30) + class_validacion = models.CharField(max_length=30) + class_valor = models.CharField(max_length=30) + class_variacion_consecutiva = models.CharField(max_length=30) + class_stddev_error = models.CharField(max_length=30) + + class Meta: + managed = False + + +class LevelFunctionTable(models.Model): + """ + NOTE: No idea what this one does. Why is there a model definition outside of + models.py, anyway? + """ + + id = models.SmallIntegerField(primary_key=True) + funcion = models.CharField("Función", max_length=80) + level_inf = models.DecimalField("Level Inf. (cm)", max_digits=5, decimal_places=1) + level_1 = models.DecimalField("Level 1", max_digits=5, decimal_places=1) + level_2 = models.DecimalField("Level 2", max_digits=5, decimal_places=1) + level_3 = models.DecimalField("Level 3", max_digits=5, decimal_places=1) + level_4 = models.DecimalField("Level 4", max_digits=5, decimal_places=1) + level_5 = models.DecimalField("Level 5", max_digits=5, decimal_places=1) + level_sup = models.DecimalField("Level Sup. (cm)", max_digits=5, decimal_places=1) + flow_inf = models.DecimalField("Flow Inf. (cm)", max_digits=10, decimal_places=5) + flow_1 = models.DecimalField("Flow 1", max_digits=10, decimal_places=5) + flow_2 = models.DecimalField("Flow 2", max_digits=10, decimal_places=5) + flow_3 = models.DecimalField("Flow 3", max_digits=10, decimal_places=5) + flow_4 = models.DecimalField("Flow 4", max_digits=10, decimal_places=5) + flow_5 = models.DecimalField("Flow 5", max_digits=10, decimal_places=5) + flow_sup = models.DecimalField("Flow Sup. (cm)", max_digits=10, decimal_places=5) + + class Meta: + managed = False + default_permissions = () + ordering = ("level_inf",) + + +def level_function_table(curvadescarga_id): + sql = """ + WITH base AS ( + select nv.id, + nv.funcion, + coalesce( lag(nv.level) OVER (ORDER BY nv.level ASC), 0.0 ) AS level_inf, + nv.level AS level_sup + from measurement_levelfuncion nv + WHERE nv.curvadescarga_id = %s + ), + levels AS ( + select + b.id, + b.funcion, + b.level_inf, + (SELECT ROUND(b.level_inf + (b.level_sup - b.level_inf)/6.0, 1)) AS level1, + (SELECT ROUND(b.level_inf + 2*(b.level_sup - b.level_inf)/6.0, 1)) AS level2, + (SELECT ROUND(b.level_inf + 3*(b.level_sup - b.level_inf)/6.0, 1)) AS level3, + (SELECT ROUND(b.level_inf + 4*(b.level_sup - b.level_inf)/6.0, 1)) AS level4, + (SELECT ROUND(b.level_inf + 5*(b.level_sup - b.level_inf)/6.0, 1)) AS level5, + b.level_sup + from base b ORDER BY b.level_inf + ), + funciones AS ( + SELECT *, + replace(n.funcion, 'H', CAST(n.level_inf AS VarChar) ) AS f_inf, + replace(n.funcion, 'H', CAST(n.level1 AS VarChar) ) AS f1, + replace(n.funcion, 'H', CAST(n.level2 AS VarChar) ) AS f2, + replace(n.funcion, 'H', CAST(n.level3 AS VarChar) ) AS f3, + replace(n.funcion, 'H', CAST(n.level4 AS VarChar) ) AS f4, + replace(n.funcion, 'H', CAST(n.level5 AS VarChar) ) AS f5, + replace(n.funcion, 'H', CAST(n.level_sup AS VarChar) ) AS f_sup + from levels n + ) + select + f.id, + f.funcion, + f.level_inf, + f.level1, + f.level2, + f.level3, + f.level4, + f.level5, + f.level_sup, + (SELECT eval_math(f.f_inf)) AS flow_inf, + (SELECT eval_math(f.f1)) AS flow1, + (SELECT eval_math(f.f2)) AS flow2, + (SELECT eval_math(f.f3)) AS flow3, + (SELECT eval_math(f.f4)) AS flow4, + (SELECT eval_math(f.f5)) AS flow5, + (SELECT eval_math(f.f_sup)) AS flow_sup + FROM funciones f ORDER BY f.level_inf; + """ # noqa: W291 + return LevelFunctionTable.objects.raw(sql, [curvadescarga_id]) diff --git a/validated/serializers.py b/validated/serializers.py new file mode 100755 index 00000000..d0ab282b --- /dev/null +++ b/validated/serializers.py @@ -0,0 +1,193 @@ +from rest_framework import serializers + +from .models import ( + AirTemperature, + AtmosphericPressure, + BatteryVoltage, + ChlorineConcentrationDepth, + DischargeCurve, + Flow, + FlowManual, + Humidity, + IndirectRadiation, + LevelFunction, + OxygenConcentrationDepth, + PercentageOxygenConcentrationDepth, + PhycocyaninDepth, + PolarWind, + Precipitation, + RedoxPotentialDepth, + SoilMoisture, + SoilTemperature, + SolarRadiation, + StripLevelReading, + WaterAcidityDepth, + WaterLevel, + WaterTemperature, + WaterTemperatureDepth, + WaterTurbidityDepth, + WindDirection, + WindVelocity, +) + + +class PolarWindSerializer(serializers.ModelSerializer): + class Meta: + model = PolarWind + exclude = [] + +# TODO Confirm if DischargeCurveSerializer is not needed in Validated Models +# class DischargeCurveSerializer(serializers.ModelSerializer): +# class Meta: +# model = DischargeCurve +# exclude = [] + + +# class LevelFunctionSerializer(serializers.ModelSerializer): +# class Meta: +# model = LevelFunction +# exclude = [] + + +class PrecipitationSerializer(serializers.ModelSerializer): + class Meta: + model = Precipitation + exclude = [] + + +class AirTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = AirTemperature + exclude = [] + + +class HumiditySerializer(serializers.ModelSerializer): + class Meta: + model = Humidity + exclude = [] + + +class WindVelocitySerializer(serializers.ModelSerializer): + class Meta: + model = WindVelocity + exclude = [] + + +class WindDirectionSerializer(serializers.ModelSerializer): + class Meta: + model = WindDirection + exclude = [] + + +class SoilMoistureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilMoisture + exclude = [] + + +class SolarRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = SolarRadiation + exclude = [] + + +class AtmosphericPressureSerializer(serializers.ModelSerializer): + class Meta: + model = AtmosphericPressure + exclude = [] + + +class WaterTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperature + exclude = [] + + +class FlowSerializer(serializers.ModelSerializer): + class Meta: + model = Flow + exclude = [] + + +class WaterLevelSerializer(serializers.ModelSerializer): + class Meta: + model = WaterLevel + exclude = [] + + +class BatteryVoltageSerializer(serializers.ModelSerializer): + class Meta: + model = BatteryVoltage + exclude = [] + + +class FlowManualSerializer(serializers.ModelSerializer): + class Meta: + model = FlowManual + exclude = [] + + +class StripLevelReadingSerializer(serializers.ModelSerializer): + class Meta: + model = StripLevelReading + exclude = [] + + +class SoilTemperatureSerializer(serializers.ModelSerializer): + class Meta: + model = SoilTemperature + exclude = [] + + +class IndirectRadiationSerializer(serializers.ModelSerializer): + class Meta: + model = IndirectRadiation + exclude = [] + + +class WaterTemperatureDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTemperatureDepth + exclude = [] + + +class WaterAcidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterAcidityDepth + exclude = [] + + +class RedoxPotentialDepthSerializer(serializers.ModelSerializer): + class Meta: + model = RedoxPotentialDepth + exclude = [] + + +class WaterTurbidityDepthSerializer(serializers.ModelSerializer): + class Meta: + model = WaterTurbidityDepth + exclude = [] + + +class ChlorineConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = ChlorineConcentrationDepth + exclude = [] + + +class OxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = OxygenConcentrationDepth + exclude = [] + + +class PercentageOxygenConcentrationDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PercentageOxygenConcentrationDepth + exclude = [] + + +class PhycocyaninDepthSerializer(serializers.ModelSerializer): + class Meta: + model = PhycocyaninDepth + exclude = [] diff --git a/validated/urls.py b/validated/urls.py new file mode 100755 index 00000000..a93b491b --- /dev/null +++ b/validated/urls.py @@ -0,0 +1,60 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from django.urls import path +from rest_framework.urlpatterns import format_suffix_patterns + +from . import views + +app_name = "validated" +urlpatterns = [ + path("polarwind/", views.PolarWindList.as_view()), + # TODO Verify if it's not really needed + # path("dischargecurve/", views.DischargeCurveList.as_view()), + # path("levelfunction/", views.LevelFunctionList.as_view()), + path("precipitation/", views.PrecipitationList.as_view()), + path("airtemperature/", views.AirTemperatureList.as_view()), + path("humidity/", views.HumidityList.as_view()), + path("windvelocity/", views.WindVelocityList.as_view()), + path("winddirection/", views.WindDirectionList.as_view()), + path("soilmoisture/", views.SoilMoistureList.as_view()), + path("solarradiation/", views.SolarRadiationList.as_view()), + path("atmosphericpressure/", views.AtmosphericPressureList.as_view()), + path("watertemperature/", views.WaterTemperatureList.as_view()), + path("flow/", views.FlowList.as_view()), + path("waterlevel/", views.WaterLevelList.as_view()), + path("batteryvoltage/", views.BatteryVoltageList.as_view()), + path("flowmanual/", views.FlowManualList.as_view()), + path("striplevelreading/", views.StripLevelReadingList.as_view()), + path("soiltemperature/", views.SoilTemperatureList.as_view()), + path("indirectradiation/", views.IndirectRadiationList.as_view()), + path("watertemperature_depth/", views.WaterTemperatureDepthList.as_view()), + path("wateracidity_depth/", views.WaterAcidityDepthList.as_view()), + path("redoxpotential_depth/", views.RedoxPotentialDepthList.as_view()), + path("waterturbidity_depth/", views.WaterTurbidityDepthList.as_view()), + path( + "chlorineconcentration_depth/", + views.ChlorineConcentrationDepthList.as_view(), + ), + path( + "oxygenconcentration_depth/", + views.OxygenConcentrationDepthList.as_view(), + ), + path( + "percentageoxygen_depth/", + views.PercentageOxygenConcentrationDepthList.as_view(), + ), + path("phycocyanin_depth/", views.PhycocyaninDepthList.as_view()), +] + +urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/validated/views.py b/validated/views.py new file mode 100755 index 00000000..e340b0fb --- /dev/null +++ b/validated/views.py @@ -0,0 +1,432 @@ +######################################################################################## +# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos +# (iMHEA)basada en los desarrollos realizados por: +# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. +# Contacto: info@fonag.org.ec +# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), +# Ecuador. +# Contacto: paramh2o@aguaquito.gob.ec +# +# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones +# creadoras, ya sea en uso total o parcial del código. +######################################################################################## + +from __future__ import unicode_literals + +from django.contrib.auth.decorators import permission_required +from django.contrib.auth.mixins import PermissionRequiredMixin +from django.db import connection +from django.http import HttpResponseRedirect, JsonResponse +from django.shortcuts import render +from django.urls import reverse +from django.views.generic.detail import DetailView +from django.views.generic.edit import CreateView, DeleteView, UpdateView +from rest_framework import generics + +import validated.models as vali +import validated.serializers as serializers +# from validated.models import DischargeCurve, LevelFunction + +from .filters import ( + # DischargeCurveFilter, + # LevelFunctionFilter, + ValidatedFilter, + ValidatedFilterDepth, + PolarWindFilter, +) +from .forms import LevelFunctionForm +from validated.others.functions import level_function_table + + +class PolarWindList(generics.ListAPIView): + """ + List all measurements of Polar Wind. + """ + + queryset = vali.PolarWind.objects.all() + serializer_class = serializers.PolarWindSerializer + filterset_class = PolarWindFilter + + +# class DischargeCurveList(generics.ListAPIView): +# """ +# List all measurements of Discharge Curve. +# """ +# +# queryset = vali.DischargeCurve.objects.all() +# serializer_class = serializers.DischargeCurveSerializer +# filterset_class = DischargeCurveFilter +# +# +# class LevelFunctionList(generics.ListAPIView): +# """ +# List all measurements of Level Function. +# """ +# +# queryset = vali.LevelFunction.objects.all() +# serializer_class = serializers.LevelFunctionSerializer +# filterset_class = LevelFunctionFilter +# + +############################################################## + + +class ValidatedListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + ValidatedFilter class to filter the results. + """ + + filterset_class = ValidatedFilter + + +class ValidatedDepthListBase(generics.ListAPIView): + """ + Base class for the measurement list views that all use the + ValidatedFilterDepth class to filter the results. + """ + + filterset_class = ValidatedFilterDepth + + +class PrecipitationList(ValidatedListBase): + """ + List all measurements of Precipitation. + """ + + queryset = vali.Precipitation.objects.all() + serializer_class = serializers.PrecipitationSerializer + + +class AirTemperatureList(ValidatedListBase): + """ + List all validateds of Air Temperature. + """ + + queryset = vali.AirTemperature.objects.all() + serializer_class = serializers.AirTemperatureSerializer + + +class HumidityList(ValidatedListBase): + """ + List all validateds of Humidity. + """ + + queryset = vali.Humidity.objects.all() + serializer_class = serializers.HumiditySerializer + + +class WindVelocityList(ValidatedListBase): + """ + List all validateds of Wind Velocity. + """ + + queryset = vali.WindVelocity.objects.all() + serializer_class = serializers.WindVelocitySerializer + + +class WindDirectionList(ValidatedListBase): + """ + List all validateds of Wind Direction. + """ + + queryset = vali.WindDirection.objects.all() + serializer_class = serializers.WindDirectionSerializer + + +class SoilMoistureList(ValidatedListBase): + """ + List all validateds of Soil Moisture. + """ + + queryset = vali.SoilMoisture.objects.all() + serializer_class = serializers.SoilMoistureSerializer + + +class SolarRadiationList(ValidatedListBase): + """ + List all validateds of Solar Radiation. + """ + + queryset = vali.SolarRadiation.objects.all() + serializer_class = serializers.SolarRadiationSerializer + + +class AtmosphericPressureList(ValidatedListBase): + """ + List all validateds of Atmospheric Pressure. + """ + + queryset = vali.AtmosphericPressure.objects.all() + serializer_class = serializers.AtmosphericPressureSerializer + + +class WaterTemperatureList(ValidatedListBase): + """ + List all validateds of Water Temperature. + """ + + queryset = vali.WaterTemperature.objects.all() + serializer_class = serializers.WaterTemperatureSerializer + + +class FlowList(ValidatedListBase): + """ + List all validateds of Flow. + """ + + queryset = vali.Flow.objects.all() + serializer_class = serializers.FlowSerializer + + +class WaterLevelList(ValidatedListBase): + """ + List all validateds of Water Level. + """ + + queryset = vali.WaterLevel.objects.all() + serializer_class = serializers.WaterLevelSerializer + + +class BatteryVoltageList(ValidatedListBase): + """ + List all validateds of Battery Voltage. + """ + + queryset = vali.BatteryVoltage.objects.all() + serializer_class = serializers.BatteryVoltageSerializer + + +class FlowManualList(ValidatedListBase): + """ + List all validateds of Flow Manual. + """ + + queryset = vali.FlowManual.objects.all() + serializer_class = serializers.FlowManualSerializer + + +class StripLevelReadingList(ValidatedListBase): + """ + List all validateds of Strip Level Reading. + """ + + queryset = vali.StripLevelReading.objects.all() + serializer_class = serializers.StripLevelReadingSerializer + + +class SoilTemperatureList(ValidatedListBase): + """ + List all validateds of Soil Temperature. + """ + + queryset = vali.SoilTemperature.objects.all() + serializer_class = serializers.SoilTemperatureSerializer + + +class IndirectRadiationList(ValidatedListBase): + """ + List all validateds of Indirect Radiation. + """ + + queryset = vali.IndirectRadiation.objects.all() + serializer_class = serializers.IndirectRadiationSerializer + + +class WaterTemperatureDepthList(ValidatedDepthListBase): + """ + List all validateds of Water Temperature Depth. + """ + + queryset = vali.WaterTemperatureDepth.objects.all() + serializer_class = serializers.WaterTemperatureDepthSerializer + + +class WaterAcidityDepthList(ValidatedDepthListBase): + """ + List all validateds of Water Acidity Depth. + """ + + queryset = vali.WaterAcidityDepth.objects.all() + serializer_class = serializers.WaterAcidityDepthSerializer + + +class RedoxPotentialDepthList(ValidatedDepthListBase): + """ + List all validateds of Redox Potential Depth. + """ + + queryset = vali.RedoxPotentialDepth.objects.all() + serializer_class = serializers.RedoxPotentialDepthSerializer + + +class WaterTurbidityDepthList(ValidatedDepthListBase): + """ + List all validateds of Water Turbidity Depth. + """ + + queryset = vali.WaterTurbidityDepth.objects.all() + serializer_class = serializers.WaterTurbidityDepthSerializer + + +class ChlorineConcentrationDepthList(ValidatedDepthListBase): + """ + List all validateds of Chlorine Concentration Depth. + """ + + queryset = vali.ChlorineConcentrationDepth.objects.all() + serializer_class = serializers.ChlorineConcentrationDepthSerializer + + +class OxygenConcentrationDepthList(ValidatedDepthListBase): + """ + List all validateds of Oxygen Concentration Depth. + """ + + queryset = vali.OxygenConcentrationDepth.objects.all() + serializer_class = serializers.OxygenConcentrationDepthSerializer + + +class PercentageOxygenConcentrationDepthList(ValidatedDepthListBase): + """ + List all validateds of Percentage Oxygen Concentration Depth. + """ + + queryset = vali.PercentageOxygenConcentrationDepth.objects.all() + serializer_class = serializers.PercentageOxygenConcentrationDepthSerializer + + +class PhycocyaninDepthList(ValidatedDepthListBase): + """ + List all validateds of Phycocyanin Depth. + """ + + queryset = vali.PhycocyaninDepth.objects.all() + serializer_class = serializers.PhycocyaninDepthSerializer + + +######################################################################################## +# TODO: Revisit theses specialised views that use level_function_table() and create +# Django Rest Framework equivalents. +######################################################################################## + + +# class DischargeCurveDetail(PermissionRequiredMixin, DetailView): +# model = DischargeCurve +# permission_required = "validated.view_dischargecurve" +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# dischargecurve_id = self.object.pk +# context["levelfunctiontable"] = level_function_table(dischargecurve_id) +# return context + + +# class LevelFunctionCreate(PermissionRequiredMixin, CreateView): +# permission_required = "validated.add_dischargecurve" +# model = LevelFunction +# form_class = LevelFunctionForm +# +# def post(self, request, *args, **kwargs): +# dischargecurve_id = kwargs.get("id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# form = LevelFunctionForm(self.request.POST or None) +# try: +# # Verify if form is correct +# levelfunction = form.save(commit=False) +# except Exception: +# # If it is not, send an informative message. +# _levelfunctiontable = level_function_table(dischargecurve_id) +# new_levelfunction = render( +# request, +# "measurement/levelfunction_form.html", +# {"form": LevelFunctionForm(self.request.POST or None)}, +# ) +# return render( +# request, +# "measurement/dischargecurve_detail.html", +# { +# "dischargecurve": dischargecurve, +# "levelfunctiontable": _levelfunctiontable, +# "new_levelfunction": new_levelfunction.content.decode("utf-8"), +# }, +# ) +# levelfunction.dischargecurve = dischargecurve +# levelfunction.save() +# dischargecurve.requiere_recalculo_caudal = True +# dischargecurve.save() +# url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return HttpResponseRedirect(url) +# +# def get_context_data(self, **kwargs): +# context = super(LevelFunctionCreate, self).get_context_data(**kwargs) +# context["title"] = "Create" +# dischargecurve_id = self.kwargs.get("id") +# context["url"] = reverse( +# "measurement:levelfunction_create", kwargs={"id": dischargecurve_id} +# ) +# return context +# +# +# class LevelFunctionUpdate(PermissionRequiredMixin, UpdateView): +# permission_required = "validated.change_dischargecurve" +# model = LevelFunction +# fields = ["level", "function"] +# +# def get_context_data(self, **kwargs): +# context = super().get_context_data(**kwargs) +# context["title"] = "Modify" +# levelfunction_pk = self.kwargs.get("pk") +# context["url"] = reverse( +# "measurement:levelfunction_update", kwargs={"pk": levelfunction_pk} +# ) +# context["dischargecurve_id"] = self.object.dischargecurve.id +# return context +# +# def post(self, request, *args, **kwargs): +# data = request.POST.copy() +# dischargecurve_id = data.get("dischargecurve_id") +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.success_url = reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} +# ) +# return super().post(data, **kwargs) +# +# +# class LevelFunctionDelete(PermissionRequiredMixin, DeleteView): +# permission_required = "validated.delete_dischargecurve" +# model = LevelFunction +# +# def delete(self, request, *args, **kwargs): +# self.object = self.get_object() +# dischargecurve = self.object.dischargecurve +# dischargecurve.require_recalculate_flow = True +# dischargecurve.save() +# self.object.delete() +# return HttpResponseRedirect( +# reverse( +# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve.id} +# ) +# ) +# +# +# @permission_required("validated.add_dischargecurve") +# def recalculate_flow(request): +# dischargecurve_id = int(request.POST.get("dischargecurve_id", None)) +# sql = "SELECT calculate_flow(%s);" +# try: +# with connection.cursor() as cursor: +# cursor.execute(sql, [dischargecurve_id]) +# cursor.fetchone() +# except Exception: +# result = {"res": False} +# return JsonResponse(result) +# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) +# dischargecurve.require_recalculate_flow = False +# dischargecurve.save() +# result = {"res": True} +# return JsonResponse(result) From 509253903af2351a4ec4d0d64eee77456a3cd59a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Fri, 14 Apr 2023 22:23:54 -0500 Subject: [PATCH 02/24] Added initial migrations --- .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 7 + .idea/paricia.iml | 7 + .idea/vcs.xml | 6 + Dockerfile | 0 LICENSE | 0 daily/apps.py | 4 +- daily/migrations/0001_initial.py | 533 +++++++ djangomain/renderers.py | 0 djangomain/views.py | 0 docker-compose.yml | 0 formatting/migrations/0001_initial.py | 348 +---- formatting/serializers.py | 0 hourly/migrations/0001_initial.py | 463 ++++++ hourly/models.py | 2 +- importing/migrations/0001_initial.py | 94 +- .../migrations/0002_auto_20220720_1247.py | 50 - importing/migrations/0002_initial.py | 33 + importing/serializers.py | 0 management/__init__.py | 0 management/admin.py | 0 management/apps.py | 0 management/initial_data.py | 0 management/migrations/0001_initial.py | 130 +- management/migrations/0002_initial_data.py | 20 - .../migrations/0003_alter_user_first_name.py | 20 - management/migrations/__init__.py | 0 management/models.py | 0 management/permissions.py | 0 management/serializers.py | 0 management/tests.py | 0 management/urls.py | 0 management/views.py | 0 measurement/filters.py | 0 measurement/migrations/0001_initial.py | 1344 ++++------------- measurement/serializers.py | 0 monthly/apps.py | 4 +- monthly/migrations/0001_initial.py | 505 +++++++ sensor/migrations/0001_initial.py | 91 +- sensor/serializers.py | 0 setup.cfg | 0 static/images/database_viz.png | Bin station/migrations/0001_initial.py | 347 +---- station/models.py | 2 +- station/serializers.py | 0 templates/menu_bar.html | 0 templates/table.html | 0 templates/table_base.html | 0 tests/__init__.py | 0 tests/formatting/__init__.py | 0 tests/formatting/test_models.py | 0 tests/importing/__init__.py | 0 tests/importing/test_functions.py | 0 tests/importing/test_models.py | 0 tests/measurement/__init__.py | 0 tests/measurement/test_models.py | 0 tests/sensor/__init__.py | 0 tests/sensor/test_models.py | 0 tests/station/__init__.py | 0 tests/station/test_models.py | 0 tests/test_data/iMHEA_HMT_01_HI_01_raw.csv | 0 tests/timescaledb/__init__.py | 0 tests/timescaledb/test_behaviour.py | 0 tests/variable/__init__.py | 0 tests/variable/test_models.py | 0 utilities/__init__.py | 0 utilities/data/formatting_association.json | 0 utilities/data/formatting_classification.json | 0 utilities/data/formatting_format.json | 0 utilities/frontend_menu/menu.json | 0 utilities/init.sh | 0 utilities/install_ssh.sh | 0 utilities/load_initial_data.py | 0 utilities/sshd_config | 0 .../maintenance/install_postgres_functions.py | 0 validated/migrations/0001_initial.py | 423 ++++++ validated/models_v1.py | 355 ----- validated/others/__init__.py | 0 validated/others/forms.py | 82 - validated/others/functions.py | 124 -- variable/migrations/0001_initial.py | 177 +-- variable/serializers.py | 0 82 files changed, 2492 insertions(+), 2685 deletions(-) create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/paricia.iml create mode 100644 .idea/vcs.xml mode change 100644 => 100755 Dockerfile mode change 100644 => 100755 LICENSE create mode 100644 daily/migrations/0001_initial.py mode change 100644 => 100755 djangomain/renderers.py mode change 100644 => 100755 djangomain/views.py mode change 100644 => 100755 docker-compose.yml mode change 100644 => 100755 formatting/serializers.py create mode 100644 hourly/migrations/0001_initial.py delete mode 100644 importing/migrations/0002_auto_20220720_1247.py create mode 100644 importing/migrations/0002_initial.py mode change 100644 => 100755 importing/serializers.py mode change 100644 => 100755 management/__init__.py mode change 100644 => 100755 management/admin.py mode change 100644 => 100755 management/apps.py mode change 100644 => 100755 management/initial_data.py delete mode 100644 management/migrations/0002_initial_data.py delete mode 100644 management/migrations/0003_alter_user_first_name.py mode change 100644 => 100755 management/migrations/__init__.py mode change 100644 => 100755 management/models.py mode change 100644 => 100755 management/permissions.py mode change 100644 => 100755 management/serializers.py mode change 100644 => 100755 management/tests.py mode change 100644 => 100755 management/urls.py mode change 100644 => 100755 management/views.py mode change 100644 => 100755 measurement/filters.py mode change 100644 => 100755 measurement/serializers.py create mode 100644 monthly/migrations/0001_initial.py mode change 100644 => 100755 sensor/serializers.py mode change 100644 => 100755 setup.cfg mode change 100644 => 100755 static/images/database_viz.png mode change 100644 => 100755 station/serializers.py mode change 100644 => 100755 templates/menu_bar.html mode change 100644 => 100755 templates/table.html mode change 100644 => 100755 templates/table_base.html mode change 100644 => 100755 tests/__init__.py mode change 100644 => 100755 tests/formatting/__init__.py mode change 100644 => 100755 tests/formatting/test_models.py mode change 100644 => 100755 tests/importing/__init__.py mode change 100644 => 100755 tests/importing/test_functions.py mode change 100644 => 100755 tests/importing/test_models.py mode change 100644 => 100755 tests/measurement/__init__.py mode change 100644 => 100755 tests/measurement/test_models.py mode change 100644 => 100755 tests/sensor/__init__.py mode change 100644 => 100755 tests/sensor/test_models.py mode change 100644 => 100755 tests/station/__init__.py mode change 100644 => 100755 tests/station/test_models.py mode change 100644 => 100755 tests/test_data/iMHEA_HMT_01_HI_01_raw.csv mode change 100644 => 100755 tests/timescaledb/__init__.py mode change 100644 => 100755 tests/timescaledb/test_behaviour.py mode change 100644 => 100755 tests/variable/__init__.py mode change 100644 => 100755 tests/variable/test_models.py mode change 100644 => 100755 utilities/__init__.py mode change 100644 => 100755 utilities/data/formatting_association.json mode change 100644 => 100755 utilities/data/formatting_classification.json mode change 100644 => 100755 utilities/data/formatting_format.json mode change 100644 => 100755 utilities/frontend_menu/menu.json mode change 100644 => 100755 utilities/init.sh mode change 100644 => 100755 utilities/install_ssh.sh mode change 100644 => 100755 utilities/load_initial_data.py mode change 100644 => 100755 utilities/sshd_config mode change 100644 => 100755 utilities/unused_scripts/maintenance/install_postgres_functions.py create mode 100644 validated/migrations/0001_initial.py delete mode 100755 validated/models_v1.py delete mode 100755 validated/others/__init__.py delete mode 100755 validated/others/forms.py delete mode 100755 validated/others/functions.py mode change 100644 => 100755 variable/serializers.py diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..105ce2da --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..95111376 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/paricia.iml b/.idea/paricia.iml new file mode 100644 index 00000000..ec63674c --- /dev/null +++ b/.idea/paricia.iml @@ -0,0 +1,7 @@ + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..35eb1ddf --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/Dockerfile b/Dockerfile old mode 100644 new mode 100755 diff --git a/LICENSE b/LICENSE old mode 100644 new mode 100755 diff --git a/daily/apps.py b/daily/apps.py index e5f54372..1bd2e4be 100755 --- a/daily/apps.py +++ b/daily/apps.py @@ -16,5 +16,5 @@ from django.apps import AppConfig -class HourlyConfig(AppConfig): - name = "hourly" +class DailyConfig(AppConfig): + name = "daily" diff --git a/daily/migrations/0001_initial.py b/daily/migrations/0001_initial.py new file mode 100644 index 00000000..99f4a75b --- /dev/null +++ b/daily/migrations/0001_initial.py @@ -0,0 +1,533 @@ +# Generated by Django 3.2.14 on 2023-04-15 03:19 + +from django.db import migrations, models +import timescale.db.models.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PolarWind', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('speed', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Speed')), + ('direction', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Direction')), + ], + options={ + 'managed': False, + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='AirTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='AtmosphericPressure', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='BatteryVoltage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ChlorineConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Flow', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FlowManual', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Humidity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='IndirectRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='OxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PercentageOxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PhycocyaninDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Precipitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('total', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Total')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='RedoxPotentialDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='SoilMoisture', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SoilTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SolarRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='StripLevelReading', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('uncertainty', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Uncertainty')), + ('data_import_date', models.DateTimeField(verbose_name='Data import date')), + ('data_start_date', models.DateTimeField(verbose_name='Data start date')), + ('calibrated', models.BooleanField(verbose_name='Calibrated')), + ('comments', models.CharField(max_length=250, null=True, verbose_name='Comments')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterAcidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterLevel', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperatureDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterTurbidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WindDirection', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WindVelocity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_monthly', models.BooleanField(default=False, verbose_name='used_for_monthly')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddIndex( + model_name='waterturbiditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_water_station_c4fe31_idx'), + ), + migrations.AddIndex( + model_name='watertemperaturedepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_water_station_e502a4_idx'), + ), + migrations.AddIndex( + model_name='wateraciditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_water_station_b03995_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_import_date'], name='daily_strip_station_b85ccb_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_start_date', 'time'], name='daily_strip_station_2224c7_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['data_import_date'], name='daily_strip_data_im_e8bc79_idx'), + ), + migrations.AddIndex( + model_name='redoxpotentialdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_redox_station_ecdca1_idx'), + ), + migrations.AddIndex( + model_name='phycocyanindepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_phyco_station_1a5f4d_idx'), + ), + migrations.AddIndex( + model_name='percentageoxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_perce_station_d4e30d_idx'), + ), + migrations.AddIndex( + model_name='oxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_oxyge_station_025c07_idx'), + ), + migrations.AddIndex( + model_name='chlorineconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='daily_chlor_station_8c40d8_idx'), + ), + ] diff --git a/djangomain/renderers.py b/djangomain/renderers.py old mode 100644 new mode 100755 diff --git a/djangomain/views.py b/djangomain/views.py old mode 100644 new mode 100755 diff --git a/docker-compose.yml b/docker-compose.yml old mode 100644 new mode 100755 diff --git a/formatting/migrations/0001_initial.py b/formatting/migrations/0001_initial.py index d5e4158d..cf2ce844 100644 --- a/formatting/migrations/0001_initial.py +++ b/formatting/migrations/0001_initial.py @@ -1,7 +1,7 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 -import django.db.models.deletion from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): @@ -9,328 +9,106 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ("variable", "0001_initial"), - ("station", "0001_initial"), + ('variable', '0001_initial'), + ('station', '0001_initial'), ] operations = [ migrations.CreateModel( - name="Date", + name='Date', fields=[ - ( - "date_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("date_format", models.CharField(max_length=20, verbose_name="Format")), - ("code", models.CharField(max_length=20, verbose_name="Code")), + ('date_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('date_format', models.CharField(max_length=20, verbose_name='Format')), + ('code', models.CharField(max_length=20, verbose_name='Code')), ], options={ - "ordering": ("date_id",), + 'ordering': ('date_id',), }, ), migrations.CreateModel( - name="Delimiter", + name='Delimiter', fields=[ - ( - "delimiter_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=100, verbose_name="Name")), - ( - "character", - models.CharField( - blank=True, max_length=10, verbose_name="Character" - ), - ), + ('delimiter_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=100, verbose_name='Name')), + ('character', models.CharField(blank=True, max_length=10, verbose_name='Character')), ], ), migrations.CreateModel( - name="Extension", + name='Extension', fields=[ - ( - "extension_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("value", models.CharField(max_length=5, verbose_name="Value")), + ('extension_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('value', models.CharField(max_length=5, verbose_name='Value')), ], ), migrations.CreateModel( - name="Time", + name='Time', fields=[ - ( - "time_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("time_format", models.CharField(max_length=20, verbose_name="Format")), - ("code", models.CharField(max_length=20, verbose_name="Code")), + ('time_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('time_format', models.CharField(max_length=20, verbose_name='Format')), + ('code', models.CharField(max_length=20, verbose_name='Code')), ], options={ - "ordering": ("time_id",), + 'ordering': ('time_id',), }, ), migrations.CreateModel( - name="Format", + name='Format', fields=[ - ( - "format_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="format_id" - ), - ), - ("name", models.CharField(max_length=35, verbose_name="Format name")), - ( - "description", - models.TextField(null=True, verbose_name="Description"), - ), - ( - "location", - models.CharField( - blank=True, max_length=300, null=True, verbose_name="Location" - ), - ), - ( - "file", - models.CharField( - blank=True, - help_text="Only applies to automatic transmission", - max_length=100, - null=True, - verbose_name="Archivo", - ), - ), - ("first_row", models.SmallIntegerField(verbose_name="First row")), - ( - "footer_rows", - models.SmallIntegerField( - blank=True, null=True, verbose_name="Number of footer rows" - ), - ), - ( - "utc_date", - models.BooleanField( - default=False, verbose_name="Is time UTC? (substract 5 hours)" - ), - ), - ("date_column", models.SmallIntegerField(verbose_name="Date column")), - ("time_column", models.SmallIntegerField(verbose_name="Time column")), - ( - "format_type", - models.CharField( - choices=[ - ("automatic", "automatic"), - ("conventional", "conventional"), - ], - max_length=25, - verbose_name="Format type", - ), - ), - ("status", models.BooleanField(default=True, verbose_name="Status")), - ( - "date", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Date", - verbose_name="Date format", - ), - ), - ( - "delimiter", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Delimiter", - verbose_name="Delimiter", - ), - ), - ( - "extension", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Extension", - verbose_name="File extension", - ), - ), - ( - "time", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Time", - verbose_name="Time format", - ), - ), + ('format_id', models.AutoField(primary_key=True, serialize=False, verbose_name='format_id')), + ('name', models.CharField(max_length=35, verbose_name='Format name')), + ('description', models.TextField(null=True, verbose_name='Description')), + ('location', models.CharField(blank=True, max_length=300, null=True, verbose_name='Location')), + ('file', models.CharField(blank=True, help_text='Only applies to automatic transmission', max_length=100, null=True, verbose_name='Archivo')), + ('first_row', models.SmallIntegerField(verbose_name='First row')), + ('footer_rows', models.SmallIntegerField(blank=True, null=True, verbose_name='Number of footer rows')), + ('utc_date', models.BooleanField(default=False, verbose_name='Is time UTC? (substract 5 hours)')), + ('date_column', models.SmallIntegerField(verbose_name='Date column')), + ('time_column', models.SmallIntegerField(verbose_name='Time column')), + ('format_type', models.CharField(choices=[('automatic', 'automatic'), ('conventional', 'conventional')], max_length=25, verbose_name='Format type')), + ('status', models.BooleanField(default=True, verbose_name='Status')), + ('date', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.date', verbose_name='Date format')), + ('delimiter', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.delimiter', verbose_name='Delimiter')), + ('extension', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.extension', verbose_name='File extension')), + ('time', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.time', verbose_name='Time format')), ], options={ - "ordering": ("-format_id",), + 'ordering': ('-format_id',), }, ), migrations.CreateModel( - name="Classification", + name='Classification', fields=[ - ( - "cls_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("value", models.SmallIntegerField(verbose_name="Value column")), - ( - "maximum", - models.SmallIntegerField( - blank=True, null=True, verbose_name="Maximum value column" - ), - ), - ( - "minimum", - models.SmallIntegerField( - blank=True, null=True, verbose_name="Minimum value column" - ), - ), - ( - "value_validator_column", - models.SmallIntegerField( - blank=True, null=True, verbose_name="Value validator column" - ), - ), - ( - "value_validator_text", - models.CharField( - blank=True, - max_length=10, - null=True, - verbose_name="Value validator text", - ), - ), - ( - "maximum_validator_column", - models.SmallIntegerField( - blank=True, - null=True, - verbose_name="Maximum value validator column", - ), - ), - ( - "maximum_validator_text", - models.CharField( - blank=True, - max_length=10, - null=True, - verbose_name="Maximum value validator text", - ), - ), - ( - "minimum_validator_column", - models.SmallIntegerField( - blank=True, - null=True, - verbose_name="Minimum value validator column", - ), - ), - ( - "minimum_validator_text", - models.CharField( - blank=True, - max_length=10, - null=True, - verbose_name="Minimum value validator text", - ), - ), - ( - "accumulate", - models.BooleanField( - default=False, verbose_name="Accumulate every 5 min?" - ), - ), - ( - "incremental", - models.BooleanField( - default=False, verbose_name="Is it an incremental counter?" - ), - ), - ( - "resolution", - models.DecimalField( - blank=True, - decimal_places=2, - max_digits=6, - null=True, - verbose_name="Resolution", - ), - ), - ( - "decimal_comma", - models.BooleanField( - default=False, verbose_name="Uses comma as decimal separator?" - ), - ), - ( - "format", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="formatting.Format", - verbose_name="Format", - ), - ), - ( - "variable", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="variable.Variable", - verbose_name="Variable", - ), - ), + ('cls_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('value', models.SmallIntegerField(verbose_name='Value column')), + ('maximum', models.SmallIntegerField(blank=True, null=True, verbose_name='Maximum value column')), + ('minimum', models.SmallIntegerField(blank=True, null=True, verbose_name='Minimum value column')), + ('value_validator_column', models.SmallIntegerField(blank=True, null=True, verbose_name='Value validator column')), + ('value_validator_text', models.CharField(blank=True, max_length=10, null=True, verbose_name='Value validator text')), + ('maximum_validator_column', models.SmallIntegerField(blank=True, null=True, verbose_name='Maximum value validator column')), + ('maximum_validator_text', models.CharField(blank=True, max_length=10, null=True, verbose_name='Maximum value validator text')), + ('minimum_validator_column', models.SmallIntegerField(blank=True, null=True, verbose_name='Minimum value validator column')), + ('minimum_validator_text', models.CharField(blank=True, max_length=10, null=True, verbose_name='Minimum value validator text')), + ('accumulate', models.BooleanField(default=False, verbose_name='Accumulate every 5 min?')), + ('incremental', models.BooleanField(default=False, verbose_name='Is it an incremental counter?')), + ('resolution', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True, verbose_name='Resolution')), + ('decimal_comma', models.BooleanField(default=False, verbose_name='Uses comma as decimal separator?')), + ('format', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='formatting.format', verbose_name='Format')), + ('variable', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='variable.variable', verbose_name='Variable')), ], options={ - "ordering": ("variable",), + 'ordering': ('variable',), }, ), migrations.CreateModel( - name="Association", + name='Association', fields=[ - ( - "association_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ( - "format", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Format", - verbose_name="Format", - ), - ), - ( - "station", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Station", - verbose_name="Station", - ), - ), + ('association_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('format', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.format', verbose_name='Format')), + ('station', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.station', verbose_name='Station')), ], options={ - "ordering": ("association_id",), - "unique_together": {("station", "format")}, + 'ordering': ('association_id',), + 'unique_together': {('station', 'format')}, }, ), ] diff --git a/formatting/serializers.py b/formatting/serializers.py old mode 100644 new mode 100755 diff --git a/hourly/migrations/0001_initial.py b/hourly/migrations/0001_initial.py new file mode 100644 index 00000000..31ec4aba --- /dev/null +++ b/hourly/migrations/0001_initial.py @@ -0,0 +1,463 @@ +# Generated by Django 3.2.14 on 2023-04-15 03:19 + +from django.db import migrations, models +import timescale.db.models.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PolarWind', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('speed', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Speed')), + ('direction', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Direction')), + ], + options={ + 'managed': False, + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='AirTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='AtmosphericPressure', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='BatteryVoltage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ChlorineConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Flow', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FlowManual', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Humidity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='IndirectRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='OxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PercentageOxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PhycocyaninDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Precipitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('total', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Total')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='RedoxPotentialDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='SoilMoisture', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SoilTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SolarRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='StripLevelReading', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('uncertainty', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Uncertainty')), + ('data_import_date', models.DateTimeField(verbose_name='Data import date')), + ('data_start_date', models.DateTimeField(verbose_name='Data start date')), + ('calibrated', models.BooleanField(verbose_name='Calibrated')), + ('comments', models.CharField(max_length=250, null=True, verbose_name='Comments')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterAcidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterLevel', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperatureDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterTurbidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Minimum')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WindDirection', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WindVelocity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', models.DateTimeField()), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_daily', models.BooleanField(default=False, verbose_name='used_for_daily')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('average', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Average')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddIndex( + model_name='waterturbiditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_wate_station_29158d_idx'), + ), + migrations.AddIndex( + model_name='watertemperaturedepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_wate_station_7c3f37_idx'), + ), + migrations.AddIndex( + model_name='wateraciditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_wate_station_b94319_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_import_date'], name='hourly_stri_station_442487_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_start_date', 'time'], name='hourly_stri_station_31d49c_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['data_import_date'], name='hourly_stri_data_im_f87faf_idx'), + ), + migrations.AddIndex( + model_name='redoxpotentialdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_redo_station_fe0e2d_idx'), + ), + migrations.AddIndex( + model_name='phycocyanindepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_phyc_station_120a69_idx'), + ), + migrations.AddIndex( + model_name='percentageoxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_perc_station_8b4449_idx'), + ), + migrations.AddIndex( + model_name='oxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_oxyg_station_2d6a10_idx'), + ), + migrations.AddIndex( + model_name='chlorineconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='hourly_chlo_station_f2be21_idx'), + ), + ] diff --git a/hourly/models.py b/hourly/models.py index e8ccc21f..5d299c0d 100755 --- a/hourly/models.py +++ b/hourly/models.py @@ -54,7 +54,7 @@ def __init_subclass__(cls, *args, **kwargs) -> None: if not cls.__name__.startswith("_Hour") and cls.__name__ not in HOURLYS: HOURLYS.append(cls.__name__) - time = models.DateTimeField(precision=0) + time = models.DateTimeField() station_id = models.PositiveIntegerField("station_id") used_for_daily = models.BooleanField("used_for_daily", default=False) completeness = models.DecimalField(max_digits=4, decimal_places=1) diff --git a/importing/migrations/0001_initial.py b/importing/migrations/0001_initial.py index 7ab1e72c..665bafb1 100644 --- a/importing/migrations/0001_initial.py +++ b/importing/migrations/0001_initial.py @@ -1,8 +1,8 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 +from django.db import migrations, models import django.db.models.deletion import django.utils.timezone -from django.db import migrations, models class Migration(migrations.Migration): @@ -10,92 +10,36 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ("formatting", "0001_initial"), - ("station", "0001_initial"), + ('formatting', '0001_initial'), + ('station', '0001_initial'), ] operations = [ migrations.CreateModel( - name="DataImportFull", + name='DataImportFull', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("date", models.DateTimeField(auto_now_add=True, verbose_name="Date")), - ( - "filepath", - models.CharField( - blank=True, max_length=1024, null=True, verbose_name="File" - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date', models.DateTimeField(auto_now_add=True, verbose_name='Date')), + ('filepath', models.CharField(blank=True, max_length=1024, null=True, verbose_name='File')), ], options={ - "permissions": [ - ( - "download_original_file", - "Download the original file that was uploaded to the system.", - ) - ], + 'permissions': [('download_original_file', 'Download the original file that was uploaded to the system.')], }, ), migrations.CreateModel( - name="DataImportTemp", + name='DataImportTemp', fields=[ - ( - "data_import_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("date", models.DateTimeField(auto_now_add=True, verbose_name="Date")), - ( - "start_date", - models.DateTimeField( - default=django.utils.timezone.now, verbose_name="Start date" - ), - ), - ( - "end_date", - models.DateTimeField( - default=django.utils.timezone.now, verbose_name="End date" - ), - ), - ( - "observations", - models.TextField( - blank=True, null=True, verbose_name="Observations/Notes" - ), - ), - ("file", models.FileField(upload_to="files/tmp/", verbose_name="File")), - ( - "format", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="formatting.Format", - verbose_name="Format", - ), - ), - ( - "station", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Station", - verbose_name="Station", - ), - ), + ('data_import_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('date', models.DateTimeField(auto_now_add=True, verbose_name='Date')), + ('start_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Start date')), + ('end_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='End date')), + ('observations', models.TextField(blank=True, null=True, verbose_name='Observations/Notes')), + ('file', models.FileField(upload_to='files/tmp/', verbose_name='File')), + ('format', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='formatting.format', verbose_name='Format')), + ('station', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.station', verbose_name='Station')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), ] diff --git a/importing/migrations/0002_auto_20220720_1247.py b/importing/migrations/0002_auto_20220720_1247.py deleted file mode 100644 index 0ee0cc9c..00000000 --- a/importing/migrations/0002_auto_20220720_1247.py +++ /dev/null @@ -1,50 +0,0 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("importing", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="dataimporttemp", - name="user", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL, - verbose_name="User", - ), - ), - migrations.AddField( - model_name="dataimportfull", - name="import_temp", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="importing.DataImportTemp", - ), - ), - migrations.AddField( - model_name="dataimportfull", - name="user", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL, - verbose_name="User", - ), - ), - ] diff --git a/importing/migrations/0002_initial.py b/importing/migrations/0002_initial.py new file mode 100644 index 00000000..9d53d151 --- /dev/null +++ b/importing/migrations/0002_initial.py @@ -0,0 +1,33 @@ +# Generated by Django 3.2.14 on 2023-04-15 03:19 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('importing', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='dataimporttemp', + name='user', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='User'), + ), + migrations.AddField( + model_name='dataimportfull', + name='import_temp', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='importing.dataimporttemp'), + ), + migrations.AddField( + model_name='dataimportfull', + name='user', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='User'), + ), + ] diff --git a/importing/serializers.py b/importing/serializers.py old mode 100644 new mode 100755 diff --git a/management/__init__.py b/management/__init__.py old mode 100644 new mode 100755 diff --git a/management/admin.py b/management/admin.py old mode 100644 new mode 100755 diff --git a/management/apps.py b/management/apps.py old mode 100644 new mode 100755 diff --git a/management/initial_data.py b/management/initial_data.py old mode 100644 new mode 100755 diff --git a/management/migrations/0001_initial.py b/management/migrations/0001_initial.py index 6864667a..8d458348 100644 --- a/management/migrations/0001_initial.py +++ b/management/migrations/0001_initial.py @@ -1,9 +1,9 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 import django.contrib.auth.models import django.contrib.auth.validators -import django.utils.timezone from django.db import migrations, models +import django.utils.timezone class Migration(migrations.Migration): @@ -11,122 +11,34 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ("auth", "0011_update_proxy_permissions"), + ('auth', '0012_alter_user_first_name_max_length'), ] operations = [ migrations.CreateModel( - name="User", + name='User', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("password", models.CharField(max_length=128, verbose_name="password")), - ( - "last_login", - models.DateTimeField( - blank=True, null=True, verbose_name="last login" - ), - ), - ( - "is_superuser", - models.BooleanField( - default=False, - help_text="Designates that this user has all permissions without explicitly assigning them.", - verbose_name="superuser status", - ), - ), - ( - "username", - models.CharField( - error_messages={ - "unique": "A user with that username already exists." - }, - help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", - max_length=150, - unique=True, - validators=[ - django.contrib.auth.validators.UnicodeUsernameValidator() - ], - verbose_name="username", - ), - ), - ( - "first_name", - models.CharField( - blank=True, max_length=30, verbose_name="first name" - ), - ), - ( - "last_name", - models.CharField( - blank=True, max_length=150, verbose_name="last name" - ), - ), - ( - "email", - models.EmailField( - blank=True, max_length=254, verbose_name="email address" - ), - ), - ( - "is_staff", - models.BooleanField( - default=False, - help_text="Designates whether the user can log into this admin site.", - verbose_name="staff status", - ), - ), - ( - "is_active", - models.BooleanField( - default=True, - help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", - verbose_name="active", - ), - ), - ( - "date_joined", - models.DateTimeField( - default=django.utils.timezone.now, verbose_name="date joined" - ), - ), - ( - "groups", - models.ManyToManyField( - blank=True, - help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", - related_name="user_set", - related_query_name="user", - to="auth.Group", - verbose_name="groups", - ), - ), - ( - "user_permissions", - models.ManyToManyField( - blank=True, - help_text="Specific permissions for this user.", - related_name="user_set", - related_query_name="user", - to="auth.Permission", - verbose_name="user permissions", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), ], options={ - "verbose_name": "user", - "verbose_name_plural": "users", - "abstract": False, + 'verbose_name': 'user', + 'verbose_name_plural': 'users', + 'abstract': False, }, managers=[ - ("objects", django.contrib.auth.models.UserManager()), + ('objects', django.contrib.auth.models.UserManager()), ], ), ] diff --git a/management/migrations/0002_initial_data.py b/management/migrations/0002_initial_data.py deleted file mode 100644 index c5f7c043..00000000 --- a/management/migrations/0002_initial_data.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 3.0.11 on 2022-07-20 13:21 - -from django.db import migrations - -from ..initial_data import populate_groups - - -class Migration(migrations.Migration): - - dependencies = [ - ("station", "0001_initial"), - ("sensor", "0001_initial"), - ("formatting", "0001_initial"), - ("variable", "0001_initial"), - ("measurement", "0001_initial"), - ("importing", "0002_auto_20220720_1247"), - ("management", "0001_initial"), - ] - - operations = [migrations.RunPython(populate_groups)] diff --git a/management/migrations/0003_alter_user_first_name.py b/management/migrations/0003_alter_user_first_name.py deleted file mode 100644 index 594af1ef..00000000 --- a/management/migrations/0003_alter_user_first_name.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 3.2.14 on 2022-07-22 10:31 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("management", "0002_initial_data"), - ] - - operations = [ - migrations.AlterField( - model_name="user", - name="first_name", - field=models.CharField( - blank=True, max_length=150, verbose_name="first name" - ), - ), - ] diff --git a/management/migrations/__init__.py b/management/migrations/__init__.py old mode 100644 new mode 100755 diff --git a/management/models.py b/management/models.py old mode 100644 new mode 100755 diff --git a/management/permissions.py b/management/permissions.py old mode 100644 new mode 100755 diff --git a/management/serializers.py b/management/serializers.py old mode 100644 new mode 100755 diff --git a/management/tests.py b/management/tests.py old mode 100644 new mode 100755 diff --git a/management/urls.py b/management/urls.py old mode 100644 new mode 100755 diff --git a/management/views.py b/management/views.py old mode 100644 new mode 100755 diff --git a/measurement/filters.py b/measurement/filters.py old mode 100644 new mode 100755 diff --git a/measurement/migrations/0001_initial.py b/measurement/migrations/0001_initial.py index e5d97475..f950dc6c 100644 --- a/measurement/migrations/0001_initial.py +++ b/measurement/migrations/0001_initial.py @@ -1,8 +1,8 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 +from django.db import migrations, models import django.db.models.deletion import timescale.db.models.fields -from django.db import migrations, models class Migration(migrations.Migration): @@ -10,1282 +10,488 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ("station", "0001_initial"), + ('station', '0001_initial'), ] operations = [ migrations.CreateModel( - name="LevelFunctionTable", + name='LevelFunctionTable', fields=[ - ("id", models.SmallIntegerField(primary_key=True, serialize=False)), - ("funcion", models.CharField(max_length=80, verbose_name="Función")), - ( - "level_inf", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level Inf. (cm)" - ), - ), - ( - "level_1", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level 1" - ), - ), - ( - "level_2", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level 2" - ), - ), - ( - "level_3", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level 3" - ), - ), - ( - "level_4", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level 4" - ), - ), - ( - "level_5", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level 5" - ), - ), - ( - "level_sup", - models.DecimalField( - decimal_places=1, max_digits=5, verbose_name="Level Sup. (cm)" - ), - ), - ( - "flow_inf", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow Inf. (cm)" - ), - ), - ( - "flow_1", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow 1" - ), - ), - ( - "flow_2", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow 2" - ), - ), - ( - "flow_3", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow 3" - ), - ), - ( - "flow_4", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow 4" - ), - ), - ( - "flow_5", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow 5" - ), - ), - ( - "flow_sup", - models.DecimalField( - decimal_places=5, max_digits=10, verbose_name="Flow Sup. (cm)" - ), - ), + ('id', models.SmallIntegerField(primary_key=True, serialize=False)), + ('funcion', models.CharField(max_length=80, verbose_name='Función')), + ('level_inf', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level Inf. (cm)')), + ('level_1', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level 1')), + ('level_2', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level 2')), + ('level_3', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level 3')), + ('level_4', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level 4')), + ('level_5', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level 5')), + ('level_sup', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Level Sup. (cm)')), + ('flow_inf', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow Inf. (cm)')), + ('flow_1', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow 1')), + ('flow_2', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow 2')), + ('flow_3', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow 3')), + ('flow_4', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow 4')), + ('flow_5', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow 5')), + ('flow_sup', models.DecimalField(decimal_places=5, max_digits=10, verbose_name='Flow Sup. (cm)')), ], options={ - "ordering": ("level_inf",), - "managed": False, - "default_permissions": (), + 'ordering': ('level_inf',), + 'managed': False, + 'default_permissions': (), }, ), migrations.CreateModel( - name="PermissionsMeasurement", + name='PermissionsMeasurement', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], options={ - "permissions": (("validar", "usar interfaz de validación"),), - "managed": False, - "default_permissions": (), + 'permissions': (('validar', 'usar interfaz de validación'),), + 'managed': False, + 'default_permissions': (), }, ), migrations.CreateModel( - name="PolarWind", + name='PolarWind', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ( - "speed", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Speed" - ), - ), - ( - "direction", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Direction", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('speed', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Speed')), + ('direction', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Direction')), ], options={ - "managed": False, - "default_permissions": (), + 'managed': False, + 'default_permissions': (), }, ), migrations.CreateModel( - name="ValidationReport", + name='ValidationReport', fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False)), - ("estado", models.BooleanField()), - ("fecha", models.DateTimeField()), - ( - "valor_seleccionado", - models.DecimalField(decimal_places=6, max_digits=14, null=True), - ), - ( - "valor", - models.DecimalField(decimal_places=6, max_digits=14, null=True), - ), - ( - "variacion_consecutiva", - models.DecimalField(decimal_places=6, max_digits=14, null=True), - ), - ("comentario", models.CharField(max_length=350)), - ("class_fila", models.CharField(max_length=30)), - ("class_fecha", models.CharField(max_length=30)), - ("class_validacion", models.CharField(max_length=30)), - ("class_valor", models.CharField(max_length=30)), - ("class_variacion_consecutiva", models.CharField(max_length=30)), - ("class_stddev_error", models.CharField(max_length=30)), + ('id', models.BigAutoField(primary_key=True, serialize=False)), + ('estado', models.BooleanField()), + ('fecha', models.DateTimeField()), + ('valor_seleccionado', models.DecimalField(decimal_places=6, max_digits=14, null=True)), + ('valor', models.DecimalField(decimal_places=6, max_digits=14, null=True)), + ('variacion_consecutiva', models.DecimalField(decimal_places=6, max_digits=14, null=True)), + ('comentario', models.CharField(max_length=350)), + ('class_fila', models.CharField(max_length=30)), + ('class_fecha', models.CharField(max_length=30)), + ('class_validacion', models.CharField(max_length=30)), + ('class_valor', models.CharField(max_length=30)), + ('class_variacion_consecutiva', models.CharField(max_length=30)), + ('class_stddev_error', models.CharField(max_length=30)), ], options={ - "managed": False, + 'managed': False, }, ), migrations.CreateModel( - name="AirTemperature", + name='AirTemperature', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=5, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=2, - max_digits=5, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=2, - max_digits=5, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="AtmosphericPressure", + name='AtmosphericPressure', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="BatteryVoltage", + name='BatteryVoltage', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="ChlorineConcentrationDepth", + name='ChlorineConcentrationDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="DischargeCurve", + name='DischargeCurve', fields=[ - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ( - "require_recalculate_flow", - models.BooleanField( - default=False, verbose_name="Requires re-calculate flow?" - ), - ), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('require_recalculate_flow', models.BooleanField(default=False, verbose_name='Requires re-calculate flow?')), ], options={ - "ordering": ("station", "time"), + 'ordering': ('station', 'time'), }, ), migrations.CreateModel( - name="Flow", + name='Flow', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="FlowManual", + name='FlowManual', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="Humidity", + name='Humidity', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="IndirectRadiation", + name='IndirectRadiation', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="LevelFunction", + name='LevelFunction', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ( - "level", - models.DecimalField( - db_index=True, - decimal_places=1, - max_digits=5, - verbose_name="Level (cm)", - ), - ), - ("function", models.CharField(max_length=80, verbose_name="Function")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('level', models.DecimalField(db_index=True, decimal_places=1, max_digits=5, verbose_name='Level (cm)')), + ('function', models.CharField(max_length=80, verbose_name='Function')), ], options={ - "ordering": ("discharge_curve", "level"), - "default_permissions": (), + 'ordering': ('discharge_curve', 'level'), + 'default_permissions': (), }, ), migrations.CreateModel( - name="OxygenConcentrationDepth", + name='OxygenConcentrationDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="PercentageOxygenConcentrationDepth", + name='PercentageOxygenConcentrationDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="PhycocyaninDepth", + name='PhycocyaninDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="Precipitation", + name='Precipitation', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="RedoxPotentialDepth", + name='RedoxPotentialDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="SoilMoisture", + name='SoilMoisture', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="SoilTemperature", + name='SoilTemperature', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="SolarRadiation", + name='SolarRadiation', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="StripLevelReading", + name='StripLevelReading', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "uncertainty", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Uncertainty", - ), - ), - ( - "data_import_date", - models.DateTimeField(verbose_name="Data import date"), - ), - ( - "data_start_date", - models.DateTimeField(verbose_name="Data start date"), - ), - ("calibrated", models.BooleanField(verbose_name="Calibrated")), - ( - "comments", - models.CharField( - max_length=250, null=True, verbose_name="Comments" - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('uncertainty', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Uncertainty')), + ('data_import_date', models.DateTimeField(verbose_name='Data import date')), + ('data_start_date', models.DateTimeField(verbose_name='Data start date')), + ('calibrated', models.BooleanField(verbose_name='Calibrated')), + ('comments', models.CharField(max_length=250, null=True, verbose_name='Comments')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="WaterAcidityDepth", + name='WaterAcidityDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="WaterLevel", + name='WaterLevel', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="WaterTemperature", + name='WaterTemperature', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="WaterTemperatureDepth", + name='WaterTemperatureDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="WaterTurbidityDepth", + name='WaterTurbidityDepth', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=2, max_digits=6, null=True, verbose_name="Value" - ), - ), - ("depth", models.PositiveSmallIntegerField(verbose_name="Depth")), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), ], options={ - "default_permissions": (), + 'default_permissions': (), }, ), migrations.CreateModel( - name="WindDirection", + name='WindDirection', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.CreateModel( - name="WindVelocity", + name='WindVelocity', fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "time", - timescale.db.models.fields.TimescaleDateTimeField(interval="1 day"), - ), - ("station_id", models.PositiveIntegerField(verbose_name="station_id")), - ( - "value", - models.DecimalField( - decimal_places=6, max_digits=14, null=True, verbose_name="Value" - ), - ), - ( - "maximum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Maximum", - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=6, - max_digits=14, - null=True, - verbose_name="Minimum", - ), - ), + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), ], options={ - "abstract": False, + 'abstract': False, }, ), migrations.AddIndex( - model_name="waterturbiditydepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_0e9e93_idx", - ), + model_name='waterturbiditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_0e9e93_idx'), ), migrations.AddIndex( - model_name="watertemperaturedepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_f376f5_idx", - ), + model_name='watertemperaturedepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_f376f5_idx'), ), migrations.AddIndex( - model_name="wateraciditydepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_cc3299_idx", - ), + model_name='wateraciditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_cc3299_idx'), ), migrations.AddIndex( - model_name="striplevelreading", - index=models.Index( - fields=["station_id", "data_import_date"], - name="measurement_station_18e0d6_idx", - ), + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_import_date'], name='measurement_station_18e0d6_idx'), ), migrations.AddIndex( - model_name="striplevelreading", - index=models.Index( - fields=["station_id", "data_start_date", "time"], - name="measurement_station_d41b10_idx", - ), + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_start_date', 'time'], name='measurement_station_d41b10_idx'), ), migrations.AddIndex( - model_name="striplevelreading", - index=models.Index( - fields=["data_import_date"], name="measurement_data_im_0b6d2a_idx" - ), + model_name='striplevelreading', + index=models.Index(fields=['data_import_date'], name='measurement_data_im_0b6d2a_idx'), ), migrations.AddIndex( - model_name="redoxpotentialdepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_1946e9_idx", - ), + model_name='redoxpotentialdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_1946e9_idx'), ), migrations.AddIndex( - model_name="phycocyanindepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_97154b_idx", - ), + model_name='phycocyanindepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_97154b_idx'), ), migrations.AddIndex( - model_name="percentageoxygenconcentrationdepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_a69078_idx", - ), + model_name='percentageoxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_a69078_idx'), ), migrations.AddIndex( - model_name="oxygenconcentrationdepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_a4c0cc_idx", - ), + model_name='oxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_a4c0cc_idx'), ), migrations.AddField( - model_name="levelfunction", - name="discharge_curve", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="measurement.DischargeCurve", - ), + model_name='levelfunction', + name='discharge_curve', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='measurement.dischargecurve'), ), migrations.AddField( - model_name="dischargecurve", - name="station", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Station", - verbose_name="Station", - ), + model_name='dischargecurve', + name='station', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.station', verbose_name='Station'), ), migrations.AddIndex( - model_name="chlorineconcentrationdepth", - index=models.Index( - fields=["station_id", "depth", "time"], - name="measurement_station_7d34d2_idx", - ), + model_name='chlorineconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='measurement_station_7d34d2_idx'), ), migrations.AlterUniqueTogether( - name="dischargecurve", - unique_together={("station", "time")}, + name='dischargecurve', + unique_together={('station', 'time')}, ), ] diff --git a/measurement/serializers.py b/measurement/serializers.py old mode 100644 new mode 100755 diff --git a/monthly/apps.py b/monthly/apps.py index e5f54372..cc800373 100755 --- a/monthly/apps.py +++ b/monthly/apps.py @@ -16,5 +16,5 @@ from django.apps import AppConfig -class HourlyConfig(AppConfig): - name = "hourly" +class MonthlyConfig(AppConfig): + name = "monthly" diff --git a/monthly/migrations/0001_initial.py b/monthly/migrations/0001_initial.py new file mode 100644 index 00000000..d049fd17 --- /dev/null +++ b/monthly/migrations/0001_initial.py @@ -0,0 +1,505 @@ +# Generated by Django 3.2.14 on 2023-04-15 03:19 + +from django.db import migrations, models +import timescale.db.models.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PolarWind', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('speed', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Speed')), + ('direction', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Direction')), + ], + options={ + 'managed': False, + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='AirTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='AtmosphericPressure', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='BatteryVoltage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ChlorineConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Flow', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FlowManual', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Humidity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='IndirectRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='OxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PercentageOxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PhycocyaninDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Precipitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('total', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Total')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='RedoxPotentialDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='SoilMoisture', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SoilTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SolarRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='StripLevelReading', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('uncertainty', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Uncertainty')), + ('data_import_date', models.DateTimeField(verbose_name='Data import date')), + ('data_start_date', models.DateTimeField(verbose_name='Data start date')), + ('calibrated', models.BooleanField(verbose_name='Calibrated')), + ('comments', models.CharField(max_length=250, null=True, verbose_name='Comments')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterAcidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterLevel', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperatureDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterTurbidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='e')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WindDirection', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WindVelocity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('date', models.DateField(verbose_name='date')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('completeness', models.DecimalField(decimal_places=1, max_digits=4)), + ('v', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='v')), + ('r', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='r')), + ('a', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='a')), + ('g', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='g')), + ('e', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='e')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddIndex( + model_name='waterturbiditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_wat_station_202485_idx'), + ), + migrations.AddIndex( + model_name='watertemperaturedepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_wat_station_8be936_idx'), + ), + migrations.AddIndex( + model_name='wateraciditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_wat_station_06480a_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_import_date'], name='monthly_str_station_8b7a1c_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_start_date', 'time'], name='monthly_str_station_c0561a_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['data_import_date'], name='monthly_str_data_im_277dc3_idx'), + ), + migrations.AddIndex( + model_name='redoxpotentialdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_red_station_4a35df_idx'), + ), + migrations.AddIndex( + model_name='phycocyanindepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_phy_station_847000_idx'), + ), + migrations.AddIndex( + model_name='percentageoxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_per_station_d1f852_idx'), + ), + migrations.AddIndex( + model_name='oxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_oxy_station_4c4c64_idx'), + ), + migrations.AddIndex( + model_name='chlorineconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='monthly_chl_station_18483d_idx'), + ), + ] diff --git a/sensor/migrations/0001_initial.py b/sensor/migrations/0001_initial.py index 2a3bf847..9d6aa076 100644 --- a/sensor/migrations/0001_initial.py +++ b/sensor/migrations/0001_initial.py @@ -1,98 +1,47 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 -import django.db.models.deletion from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): initial = True - dependencies = [] + dependencies = [ + ] operations = [ migrations.CreateModel( - name="SensorBrand", + name='SensorBrand', fields=[ - ( - "brand_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=25, verbose_name="Brand name")), + ('brand_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=25, verbose_name='Brand name')), ], options={ - "ordering": ("brand_id",), + 'ordering': ('brand_id',), }, ), migrations.CreateModel( - name="SensorType", + name='SensorType', fields=[ - ( - "type_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=25, verbose_name="Sensor type")), + ('type_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=25, verbose_name='Sensor type')), ], ), migrations.CreateModel( - name="Sensor", + name='Sensor', fields=[ - ( - "sensor_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ( - "code", - models.CharField( - max_length=32, null=True, unique=True, verbose_name="Code" - ), - ), - ( - "model", - models.CharField( - blank=True, max_length=150, null=True, verbose_name="Model" - ), - ), - ( - "serial", - models.CharField( - blank=True, - max_length=20, - null=True, - verbose_name="Serial number", - ), - ), - ( - "status", - models.BooleanField(default=False, verbose_name="Status (active)"), - ), - ( - "sensor_brand", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="sensor.SensorBrand", - verbose_name="Sensor brand", - ), - ), - ( - "sensor_type", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="sensor.SensorType", - verbose_name="Sensor type", - ), - ), + ('sensor_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('code', models.CharField(max_length=32, null=True, unique=True, verbose_name='Code')), + ('model', models.CharField(blank=True, max_length=150, null=True, verbose_name='Model')), + ('serial', models.CharField(blank=True, max_length=20, null=True, verbose_name='Serial number')), + ('status', models.BooleanField(default=False, verbose_name='Status (active)')), + ('sensor_brand', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='sensor.sensorbrand', verbose_name='Sensor brand')), + ('sensor_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='sensor.sensortype', verbose_name='Sensor type')), ], options={ - "ordering": ("code", "sensor_type", "sensor_brand", "model"), + 'ordering': ('code', 'sensor_type', 'sensor_brand', 'model'), }, ), ] diff --git a/sensor/serializers.py b/sensor/serializers.py old mode 100644 new mode 100755 diff --git a/setup.cfg b/setup.cfg old mode 100644 new mode 100755 diff --git a/static/images/database_viz.png b/static/images/database_viz.png old mode 100644 new mode 100755 diff --git a/station/migrations/0001_initial.py b/station/migrations/0001_initial.py index 929d47e0..57e4c276 100644 --- a/station/migrations/0001_initial.py +++ b/station/migrations/0001_initial.py @@ -1,341 +1,138 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 import django.core.validators -import django.db.models.deletion from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): initial = True - dependencies = [] + dependencies = [ + ] operations = [ migrations.CreateModel( - name="Basin", + name='Basin', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=40)), + ('image', models.FileField(blank=True, null=True, upload_to='station/basin_image/', verbose_name='Photography/Map')), + ('file', models.FileField(blank=True, null=True, upload_to='station/basin_file/', verbose_name='File(PDF)')), + ], + options={ + 'ordering': ('id',), + }, + ), + migrations.CreateModel( + name='Country', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=40)), - ( - "image", - models.FileField( - blank=True, - null=True, - upload_to="station/basin_image/", - verbose_name="Photography/Map", - ), - ), - ( - "file", - models.FileField( - blank=True, - null=True, - upload_to="station/basin_file/", - verbose_name="File(PDF)", - ), - ), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=32)), ], options={ - "ordering": ("id",), + 'ordering': ('id',), }, ), migrations.CreateModel( - name="Country", + name='Ecosystem', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=32)), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=32)), ], options={ - "ordering": ("id",), + 'ordering': ('id',), }, ), migrations.CreateModel( - name="Ecosystem", + name='Institution', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=32)), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=32)), ], options={ - "ordering": ("id",), + 'ordering': ('id',), }, ), migrations.CreateModel( - name="Institution", + name='Place', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=32)), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=40)), + ('image', models.FileField(blank=True, null=True, upload_to='station/place_image/', verbose_name='Photography/Map')), ], options={ - "ordering": ("id",), + 'ordering': ('id',), }, ), migrations.CreateModel( - name="Place", + name='PlaceBasin', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=40)), - ( - "image", - models.FileField( - blank=True, - null=True, - upload_to="station/place_image/", - verbose_name="Photography/Map", - ), - ), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('image', models.FileField(blank=True, null=True, upload_to='station/place_basin_image/', verbose_name='Photography/Map')), + ('basin', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.basin', verbose_name='Basin')), + ('place', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.place', verbose_name='Place')), ], options={ - "ordering": ("id",), + 'ordering': ('id',), + 'unique_together': {('place', 'basin')}, }, ), migrations.CreateModel( - name="PlaceBasin", + name='Region', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ( - "image", - models.FileField( - blank=True, - null=True, - upload_to="station/place_basin_image/", - verbose_name="Photography/Map", - ), - ), - ( - "basin", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Basin", - verbose_name="Basin", - ), - ), - ( - "place", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Place", - verbose_name="Place", - ), - ), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=32, verbose_name='Name')), + ('country', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.country', verbose_name='Country')), ], options={ - "ordering": ("id",), - "unique_together": {("place", "basin")}, + 'ordering': ('id',), }, ), migrations.CreateModel( - name="Region", + name='StationType', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=32, verbose_name="Name")), - ( - "country", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Country", - verbose_name="Country", - ), - ), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=40)), ], options={ - "ordering": ("id",), + 'ordering': ('id',), }, ), migrations.CreateModel( - name="StationType", + name='Station', fields=[ - ( - "id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=40)), + ('station_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('station_code', models.CharField(max_length=32, verbose_name='Code')), + ('station_name', models.CharField(blank=True, max_length=100, null=True, verbose_name='Description')), + ('station_state', models.BooleanField(default=True, verbose_name='Operational')), + ('station_latitude', models.DecimalField(blank=True, decimal_places=14, max_digits=17, null=True, verbose_name='Latitude')), + ('station_longitude', models.DecimalField(blank=True, decimal_places=14, max_digits=17, null=True, verbose_name='Longitude')), + ('station_altitude', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MaxValueValidator(6000), django.core.validators.MinValueValidator(0)], verbose_name='Altitude')), + ('station_file', models.FileField(blank=True, null=True, upload_to='station/station_file/', verbose_name='Photography/File')), + ('station_external', models.BooleanField(default=False, verbose_name='External')), + ('influence_km', models.DecimalField(blank=True, decimal_places=4, max_digits=12, null=True, verbose_name='Área of input (km)')), + ('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.country', verbose_name='Country')), + ('ecosystem', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.ecosystem', verbose_name='Ecosystem')), + ('institution', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.institution', verbose_name='Institution')), + ('place_basin', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.placebasin', verbose_name='Place-Basin')), + ('region', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.region', verbose_name='Region/Province/Department')), + ('station_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.stationtype', verbose_name='StationType')), ], options={ - "ordering": ("id",), + 'ordering': ('station_id',), }, ), migrations.CreateModel( - name="Station", + name='DeltaT', fields=[ - ( - "station_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("station_code", models.CharField(max_length=32, verbose_name="Code")), - ( - "station_name", - models.CharField( - blank=True, - max_length=100, - null=True, - verbose_name="Description", - ), - ), - ( - "station_state", - models.BooleanField(default=True, verbose_name="Operational"), - ), - ( - "station_latitude", - models.DecimalField( - blank=True, - decimal_places=14, - max_digits=17, - null=True, - verbose_name="Latitude", - ), - ), - ( - "station_longitude", - models.DecimalField( - blank=True, - decimal_places=14, - max_digits=17, - null=True, - verbose_name="Longitude", - ), - ), - ( - "station_altitude", - models.IntegerField( - blank=True, - null=True, - validators=[ - django.core.validators.MaxValueValidator(6000), - django.core.validators.MinValueValidator(0), - ], - verbose_name="Altitude", - ), - ), - ( - "station_file", - models.FileField( - blank=True, - null=True, - upload_to="station/station_file/", - verbose_name="Photography/File", - ), - ), - ( - "station_external", - models.BooleanField(default=False, verbose_name="External"), - ), - ( - "influence_km", - models.DecimalField( - blank=True, - decimal_places=4, - max_digits=12, - null=True, - verbose_name="Área of input (km)", - ), - ), - ( - "country", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Country", - verbose_name="Country", - ), - ), - ( - "ecosystem", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Ecosystem", - verbose_name="Ecosystem", - ), - ), - ( - "institution", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Institution", - verbose_name="Institution", - ), - ), - ( - "place_basin", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.PlaceBasin", - verbose_name="Place-Basin", - ), - ), - ( - "region", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Region", - verbose_name="Region/Province/Department", - ), - ), - ( - "station_type", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.StationType", - verbose_name="StationType", - ), - ), + ('id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('delta_t', models.PositiveSmallIntegerField()), + ('station', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='station.station', verbose_name='Station')), ], options={ - "ordering": ("station_id",), + 'ordering': ('id',), }, ), ] diff --git a/station/models.py b/station/models.py index 20544cf8..fc699b50 100755 --- a/station/models.py +++ b/station/models.py @@ -284,7 +284,7 @@ class DeltaT(models.Model): id = models.AutoField("Id", primary_key=True) station = models.ForeignKey( Station, - on_delete=models.SET_NULL, + on_delete=models.CASCADE, verbose_name="Station", ) delta_t = models.PositiveSmallIntegerField() diff --git a/station/serializers.py b/station/serializers.py old mode 100644 new mode 100755 diff --git a/templates/menu_bar.html b/templates/menu_bar.html old mode 100644 new mode 100755 diff --git a/templates/table.html b/templates/table.html old mode 100644 new mode 100755 diff --git a/templates/table_base.html b/templates/table_base.html old mode 100644 new mode 100755 diff --git a/tests/__init__.py b/tests/__init__.py old mode 100644 new mode 100755 diff --git a/tests/formatting/__init__.py b/tests/formatting/__init__.py old mode 100644 new mode 100755 diff --git a/tests/formatting/test_models.py b/tests/formatting/test_models.py old mode 100644 new mode 100755 diff --git a/tests/importing/__init__.py b/tests/importing/__init__.py old mode 100644 new mode 100755 diff --git a/tests/importing/test_functions.py b/tests/importing/test_functions.py old mode 100644 new mode 100755 diff --git a/tests/importing/test_models.py b/tests/importing/test_models.py old mode 100644 new mode 100755 diff --git a/tests/measurement/__init__.py b/tests/measurement/__init__.py old mode 100644 new mode 100755 diff --git a/tests/measurement/test_models.py b/tests/measurement/test_models.py old mode 100644 new mode 100755 diff --git a/tests/sensor/__init__.py b/tests/sensor/__init__.py old mode 100644 new mode 100755 diff --git a/tests/sensor/test_models.py b/tests/sensor/test_models.py old mode 100644 new mode 100755 diff --git a/tests/station/__init__.py b/tests/station/__init__.py old mode 100644 new mode 100755 diff --git a/tests/station/test_models.py b/tests/station/test_models.py old mode 100644 new mode 100755 diff --git a/tests/test_data/iMHEA_HMT_01_HI_01_raw.csv b/tests/test_data/iMHEA_HMT_01_HI_01_raw.csv old mode 100644 new mode 100755 diff --git a/tests/timescaledb/__init__.py b/tests/timescaledb/__init__.py old mode 100644 new mode 100755 diff --git a/tests/timescaledb/test_behaviour.py b/tests/timescaledb/test_behaviour.py old mode 100644 new mode 100755 diff --git a/tests/variable/__init__.py b/tests/variable/__init__.py old mode 100644 new mode 100755 diff --git a/tests/variable/test_models.py b/tests/variable/test_models.py old mode 100644 new mode 100755 diff --git a/utilities/__init__.py b/utilities/__init__.py old mode 100644 new mode 100755 diff --git a/utilities/data/formatting_association.json b/utilities/data/formatting_association.json old mode 100644 new mode 100755 diff --git a/utilities/data/formatting_classification.json b/utilities/data/formatting_classification.json old mode 100644 new mode 100755 diff --git a/utilities/data/formatting_format.json b/utilities/data/formatting_format.json old mode 100644 new mode 100755 diff --git a/utilities/frontend_menu/menu.json b/utilities/frontend_menu/menu.json old mode 100644 new mode 100755 diff --git a/utilities/init.sh b/utilities/init.sh old mode 100644 new mode 100755 diff --git a/utilities/install_ssh.sh b/utilities/install_ssh.sh old mode 100644 new mode 100755 diff --git a/utilities/load_initial_data.py b/utilities/load_initial_data.py old mode 100644 new mode 100755 diff --git a/utilities/sshd_config b/utilities/sshd_config old mode 100644 new mode 100755 diff --git a/utilities/unused_scripts/maintenance/install_postgres_functions.py b/utilities/unused_scripts/maintenance/install_postgres_functions.py old mode 100644 new mode 100755 diff --git a/validated/migrations/0001_initial.py b/validated/migrations/0001_initial.py new file mode 100644 index 00000000..abe4cef7 --- /dev/null +++ b/validated/migrations/0001_initial.py @@ -0,0 +1,423 @@ +# Generated by Django 3.2.14 on 2023-04-15 03:19 + +from django.db import migrations, models +import timescale.db.models.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PolarWind', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('speed', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Speed')), + ('direction', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Direction')), + ], + options={ + 'managed': False, + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='AirTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=5, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='AtmosphericPressure', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='BatteryVoltage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ChlorineConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Flow', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FlowManual', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Humidity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='IndirectRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='OxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PercentageOxygenConcentrationDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='PhycocyaninDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='Precipitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('total', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Total')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='RedoxPotentialDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='SoilMoisture', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SoilTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SolarRadiation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='StripLevelReading', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('uncertainty', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Uncertainty')), + ('data_import_date', models.DateTimeField(verbose_name='Data import date')), + ('data_start_date', models.DateTimeField(verbose_name='Data start date')), + ('calibrated', models.BooleanField(verbose_name='Calibrated')), + ('comments', models.CharField(max_length=250, null=True, verbose_name='Comments')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterAcidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterLevel', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperature', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WaterTemperatureDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WaterTurbidityDepth', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=2, max_digits=6, null=True, verbose_name='Value')), + ('depth', models.PositiveSmallIntegerField(verbose_name='Depth')), + ], + options={ + 'default_permissions': (), + }, + ), + migrations.CreateModel( + name='WindDirection', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WindVelocity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), + ('station_id', models.PositiveIntegerField(verbose_name='station_id')), + ('used_for_hourly', models.BooleanField(default=False, verbose_name='used_for_hourly')), + ('value', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Value')), + ('maximum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=6, max_digits=14, null=True, verbose_name='Minimum')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddIndex( + model_name='waterturbiditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_w_station_20474c_idx'), + ), + migrations.AddIndex( + model_name='watertemperaturedepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_w_station_076d3e_idx'), + ), + migrations.AddIndex( + model_name='wateraciditydepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_w_station_e5f7c3_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_import_date'], name='validated_s_station_e6c151_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['station_id', 'data_start_date', 'time'], name='validated_s_station_8349d5_idx'), + ), + migrations.AddIndex( + model_name='striplevelreading', + index=models.Index(fields=['data_import_date'], name='validated_s_data_im_1fb6d3_idx'), + ), + migrations.AddIndex( + model_name='redoxpotentialdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_r_station_45035c_idx'), + ), + migrations.AddIndex( + model_name='phycocyanindepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_p_station_455746_idx'), + ), + migrations.AddIndex( + model_name='percentageoxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_p_station_fd5bba_idx'), + ), + migrations.AddIndex( + model_name='oxygenconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_o_station_93f67e_idx'), + ), + migrations.AddIndex( + model_name='chlorineconcentrationdepth', + index=models.Index(fields=['station_id', 'depth', 'time'], name='validated_c_station_752351_idx'), + ), + ] diff --git a/validated/models_v1.py b/validated/models_v1.py deleted file mode 100755 index 36cc5320..00000000 --- a/validated/models_v1.py +++ /dev/null @@ -1,355 +0,0 @@ -######################################################################################## -# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos -# (iMHEA)basada en los desarrollos realizados por: -# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. -# Contacto: info@fonag.org.ec -# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), -# Ecuador. -# Contacto: paramh2o@aguaquito.gob.ec -# -# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones -# creadoras, ya sea en uso total o parcial del código. -######################################################################################## -from __future__ import unicode_literals - -from typing import List, Type - -from django.db import models -from django.urls import reverse -from timescale.db.models.models import TimescaleModel - -from station.models import Station - -VALIDATEDS: List[str] = [] -"""Available validated variables.""" - - -# class PermissionsValidated(models.Model): -# """ -# Model used to define the permission "validar". -# """ -# -# class Meta: -# managed = False -# default_permissions = () -# permissions = (("validar", "usar interfaz de validación"),) - - -class PolarWind(TimescaleModel): - """ - Polar Wind validated with a velocity and direction at a specific time. - """ - - speed = models.DecimalField("Speed", max_digits=14, decimal_places=6, null=True) - direction = models.DecimalField( - "Direction", max_digits=14, decimal_places=6, null=True - ) - - class Meta: - """ - Para que no se cree en la migracion. - - NOTE: Why don't we want this in the migration? - """ - - default_permissions = () - managed = False - - -# class DischargeCurve(TimescaleModel): -# """ -# Discharge curve. -# -# Relates a station and a time and a bool as to whether a flow recalculation is -# required. -# """ -# -# id = models.AutoField("Id", primary_key=True) -# station = models.ForeignKey( -# Station, on_delete=models.SET_NULL, null=True, verbose_name="Station" -# ) -# require_recalculate_flow = models.BooleanField( -# verbose_name="Requires re-calculate flow?", default=False -# ) -# -# def __str__(self): -# return self.id -# -# def get_absolute_url(self): -# return reverse("validated:dischargecurve_detail", kwargs={"pk": self.pk}) -# -# class Meta: -# ordering = ("station", "time") -# unique_together = ("station", "time") - - -# class LevelFunction(TimescaleModel): -# """ -# Function Level. Relates a discharge curve to a level (in cm) to a function. -# -# NOTE: No idea what this is -> Ask Pablo -# """ -# -# discharge_curve = models.ForeignKey(DischargeCurve, on_delete=models.CASCADE) -# level = models.DecimalField( -# "Level (cm)", max_digits=5, decimal_places=1, db_index=True -# ) -# function = models.CharField("Function", max_length=80) -# -# def __str__(self): -# return str(self.pk) -# -# def get_absolute_url(self): -# return reverse("validated:levelfunction_detail", kwargs={"pk": self.pk}) -# -# class Meta: -# default_permissions = () -# ordering = ( -# "discharge_curve", -# "level", -# ) - - -############################################################## - - -class BaseValidated(TimescaleModel): - @classmethod - def __init_subclass__(cls, *args, **kwargs) -> None: - if not cls.__name__.startswith("_Vali") and cls.__name__ not in VALIDATEDS: - VALIDATEDS.append(cls.__name__) - - station_id = models.PositiveIntegerField("station_id") - # TODO check - used_for_hourly = models.BooleanField("used_for_hourly", default=False) - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["used_for_hourly"]), - models.Index(fields=["station_id", "time"]), - models.Index(fields=["time", "station_id"]), - ] - abstract = True - - -def create_vali_model( - digits=14, decimals=6, fields=("Value", "Maximum", "Minimum") -) -> Type[TimescaleModel]: - num = len(VALIDATEDS) + 1 - _fields = { - key.lower(): models.DecimalField( - key, - max_digits=digits, - decimal_places=decimals, - null=True, - ) - for key in fields - } - - class Meta: - abstract = True - - attrs = {"__module__": __name__, "Meta": Meta} - attrs.update(_fields) - - return type( - f"_Vali{num}", - (BaseValidated,), - attrs, - ) - - -class Precipitation(create_vali_model(digits=6, decimals=2, fields=("Value",))): - """Precipitation.""" - - -class AirTemperature(create_vali_model(digits=5, decimals=2)): - """Air temperature.""" - - -class Humidity(create_vali_model()): - """Humidity.""" - - -class WindVelocity(create_vali_model()): - """Wind velocity.""" - - -class WindDirection(create_vali_model()): - """Wind direction.""" - - -class SoilMoisture(create_vali_model()): - """Soil moisture.""" - - -class SolarRadiation(create_vali_model()): - """Solar radiation.""" - - -class AtmosphericPressure(create_vali_model()): - """Atmospheric pressure.""" - - -class WaterTemperature(create_vali_model()): - """Water temperature.""" - - -class Flow(create_vali_model()): - """Flow.""" - - -class WaterLevel(create_vali_model()): - """Water level.""" - - -class BatteryVoltage(create_vali_model()): - """Battery voltage.""" - - -class FlowManual(create_vali_model(fields=("Value",))): - """Flow (manual).""" - - -# TODO Check if There id needed StripLevelReading validated. -class StripLevelReading(create_vali_model(fields=("Value", "Uncertainty"))): - """Strip level reading.""" - - data_import_date = models.DateTimeField("Data import date") - data_start_date = models.DateTimeField("Data start date") - calibrated = models.BooleanField("Calibrated") - comments = models.CharField("Comments", null=True, max_length=250) - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "data_import_date"]), - models.Index(fields=["station_id", "data_start_date", "time"]), - models.Index(fields=["data_import_date"]), - ] - - -class SoilTemperature(create_vali_model()): - """Soil temperature.""" - - -class IndirectRadiation(create_vali_model()): - """Indirect radiation.""" - - -# Variables created for buoy with different depths -class WaterTemperatureDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Water temperature (degrees celcius) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class WaterAcidityDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Water acidity (pH) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class RedoxPotentialDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Redox potential (mV) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class WaterTurbidityDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Water turbidity (NTU) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class ChlorineConcentrationDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Chlorine concentration (ug/l) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class OxygenConcentrationDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Oxygen concentration (mg/l) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class PercentageOxygenConcentrationDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Percentage oxygen concentration (mg/l) at a depth in cm. - - HELPWANTED: Is this wrong? It's teh same as above, perhaps units should - be %? --> DIEGO: Looks identical to the previous one to me. It might be an error. - """ - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] - - -class PhycocyaninDepth( - create_vali_model(digits=6, decimals=2, fields=("Value",)), -): - """Phycocyanin (?) at a depth in cm.""" - - depth = models.PositiveSmallIntegerField("Depth") - - class Meta: - default_permissions = () - indexes = [ - models.Index(fields=["station_id", "depth", "time"]), - ] diff --git a/validated/others/__init__.py b/validated/others/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/validated/others/forms.py b/validated/others/forms.py deleted file mode 100755 index d3c9ed5d..00000000 --- a/validated/others/forms.py +++ /dev/null @@ -1,82 +0,0 @@ -######################################################################################## -# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos -# (iMHEA)basada en los desarrollos realizados por: -# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. -# Contacto: info@fonag.org.ec -# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), -# Ecuador. -# Contacto: paramh2o@aguaquito.gob.ec -# -# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones -# creadoras, ya sea en uso total o parcial del código. -######################################################################################## - -from django import forms -from django.core.exceptions import ValidationError -from django.db import connection - -from station.models import Station -from variable.models import Variable - -from .models import LevelFunction - - -class LevelFunctionForm(forms.ModelForm): - class Meta: - model = LevelFunction - fields = ["level", "function"] - - def clean_function(self): - function = self.cleaned_data["function"] - - # Verifica si tiene letra H - if "H" not in function: - raise ValidationError("It must include parameter H (water level)") - - # Verifica si la función devuelve resultado - test_func = function.replace("H", "10") - sql = "SELECT eval_math('" + test_func + "');" - try: - with connection.cursor() as cursor: - cursor.execute(sql) - len = cursor.rowcount - cursor.fetchall() - except Exception as err: - raise ValidationError(f"Formula syntax error. {err}") - - if len < 1: - raise ValidationError("Formula syntax error. No rows found!") - return function - - -class ValidationSearchForm(forms.Form): - station = forms.ModelChoiceField( - queryset=Station.objects.order_by("station_code").filter( - station_external=False, station_type__in=(1, 2, 3) - ), - empty_label="Station", - ) - variable = forms.ModelChoiceField( - queryset=Variable.objects.order_by("variable_id").exclude(variable_id="10"), - empty_label="Variable", - ) - start = forms.DateField( - widget=forms.TextInput(attrs={"autocomplete": "off"}), - input_formats=["%Y-%m-%d"], - label="Start date", - required=True, - ) - end = forms.DateField( - widget=forms.TextInput(attrs={"autocomplete": "off"}), - input_formats=["%Y-%m-%d"], - label="End date", - required=True, - ) - lower_limit = forms.IntegerField(required=False) - upper_limit = forms.IntegerField(required=False) - - def __init__(self, *args, **kwargs): - super(ValidationSearchForm, self).__init__(*args, **kwargs) - self.fields["station"].widget.attrs["placeholder"] = self.fields[ - "station" - ].label diff --git a/validated/others/functions.py b/validated/others/functions.py deleted file mode 100755 index 084f88d5..00000000 --- a/validated/others/functions.py +++ /dev/null @@ -1,124 +0,0 @@ -######################################################################################## -# Plataforma para la Iniciativa Regional de Monitoreo Hidrológico de Ecosistemas Andinos -# (iMHEA)basada en los desarrollos realizados por: -# 1) FONDO PARA LA PROTECCIÓN DEL AGUA (FONAG), Ecuador. -# Contacto: info@fonag.org.ec -# 2) EMPRESA PÚBLICA METROPOLITANA DE AGUA POTABLE Y SANEAMIENTO DE QUITO (EPMAPS), -# Ecuador. -# Contacto: paramh2o@aguaquito.gob.ec -# -# IMPORTANTE: Mantener o incluir esta cabecera con la mención de las instituciones -# creadoras, ya sea en uso total o parcial del código. -######################################################################################## -from django.db import models - - -class ValidationReport(models.Model): - """ - NOTE: No idea what this one does. Why is there a model definition outside of - models.py, anyway? - """ - - id = models.BigAutoField(primary_key=True) - estado = models.BooleanField() - fecha = models.DateTimeField() - valor_seleccionado = models.DecimalField(max_digits=14, decimal_places=6, null=True) - valor = models.DecimalField(max_digits=14, decimal_places=6, null=True) - variacion_consecutiva = models.DecimalField( - max_digits=14, decimal_places=6, null=True - ) - comentario = models.CharField(max_length=350) - class_fila = models.CharField(max_length=30) - class_fecha = models.CharField(max_length=30) - class_validacion = models.CharField(max_length=30) - class_valor = models.CharField(max_length=30) - class_variacion_consecutiva = models.CharField(max_length=30) - class_stddev_error = models.CharField(max_length=30) - - class Meta: - managed = False - - -class LevelFunctionTable(models.Model): - """ - NOTE: No idea what this one does. Why is there a model definition outside of - models.py, anyway? - """ - - id = models.SmallIntegerField(primary_key=True) - funcion = models.CharField("Función", max_length=80) - level_inf = models.DecimalField("Level Inf. (cm)", max_digits=5, decimal_places=1) - level_1 = models.DecimalField("Level 1", max_digits=5, decimal_places=1) - level_2 = models.DecimalField("Level 2", max_digits=5, decimal_places=1) - level_3 = models.DecimalField("Level 3", max_digits=5, decimal_places=1) - level_4 = models.DecimalField("Level 4", max_digits=5, decimal_places=1) - level_5 = models.DecimalField("Level 5", max_digits=5, decimal_places=1) - level_sup = models.DecimalField("Level Sup. (cm)", max_digits=5, decimal_places=1) - flow_inf = models.DecimalField("Flow Inf. (cm)", max_digits=10, decimal_places=5) - flow_1 = models.DecimalField("Flow 1", max_digits=10, decimal_places=5) - flow_2 = models.DecimalField("Flow 2", max_digits=10, decimal_places=5) - flow_3 = models.DecimalField("Flow 3", max_digits=10, decimal_places=5) - flow_4 = models.DecimalField("Flow 4", max_digits=10, decimal_places=5) - flow_5 = models.DecimalField("Flow 5", max_digits=10, decimal_places=5) - flow_sup = models.DecimalField("Flow Sup. (cm)", max_digits=10, decimal_places=5) - - class Meta: - managed = False - default_permissions = () - ordering = ("level_inf",) - - -def level_function_table(curvadescarga_id): - sql = """ - WITH base AS ( - select nv.id, - nv.funcion, - coalesce( lag(nv.level) OVER (ORDER BY nv.level ASC), 0.0 ) AS level_inf, - nv.level AS level_sup - from measurement_levelfuncion nv - WHERE nv.curvadescarga_id = %s - ), - levels AS ( - select - b.id, - b.funcion, - b.level_inf, - (SELECT ROUND(b.level_inf + (b.level_sup - b.level_inf)/6.0, 1)) AS level1, - (SELECT ROUND(b.level_inf + 2*(b.level_sup - b.level_inf)/6.0, 1)) AS level2, - (SELECT ROUND(b.level_inf + 3*(b.level_sup - b.level_inf)/6.0, 1)) AS level3, - (SELECT ROUND(b.level_inf + 4*(b.level_sup - b.level_inf)/6.0, 1)) AS level4, - (SELECT ROUND(b.level_inf + 5*(b.level_sup - b.level_inf)/6.0, 1)) AS level5, - b.level_sup - from base b ORDER BY b.level_inf - ), - funciones AS ( - SELECT *, - replace(n.funcion, 'H', CAST(n.level_inf AS VarChar) ) AS f_inf, - replace(n.funcion, 'H', CAST(n.level1 AS VarChar) ) AS f1, - replace(n.funcion, 'H', CAST(n.level2 AS VarChar) ) AS f2, - replace(n.funcion, 'H', CAST(n.level3 AS VarChar) ) AS f3, - replace(n.funcion, 'H', CAST(n.level4 AS VarChar) ) AS f4, - replace(n.funcion, 'H', CAST(n.level5 AS VarChar) ) AS f5, - replace(n.funcion, 'H', CAST(n.level_sup AS VarChar) ) AS f_sup - from levels n - ) - select - f.id, - f.funcion, - f.level_inf, - f.level1, - f.level2, - f.level3, - f.level4, - f.level5, - f.level_sup, - (SELECT eval_math(f.f_inf)) AS flow_inf, - (SELECT eval_math(f.f1)) AS flow1, - (SELECT eval_math(f.f2)) AS flow2, - (SELECT eval_math(f.f3)) AS flow3, - (SELECT eval_math(f.f4)) AS flow4, - (SELECT eval_math(f.f5)) AS flow5, - (SELECT eval_math(f.f_sup)) AS flow_sup - FROM funciones f ORDER BY f.level_inf; - """ # noqa: W291 - return LevelFunctionTable.objects.raw(sql, [curvadescarga_id]) diff --git a/variable/migrations/0001_initial.py b/variable/migrations/0001_initial.py index 7f9ed590..a53ea964 100644 --- a/variable/migrations/0001_initial.py +++ b/variable/migrations/0001_initial.py @@ -1,7 +1,7 @@ -# Generated by Django 3.0.11 on 2022-07-20 12:47 +# Generated by Django 3.2.14 on 2023-04-15 03:19 -import django.db.models.deletion from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): @@ -9,167 +9,56 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ("sensor", "0001_initial"), - ("station", "0001_initial"), + ('sensor', '0001_initial'), + ('station', '0001_initial'), ] operations = [ migrations.CreateModel( - name="Unit", + name='Unit', fields=[ - ( - "unit_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("name", models.CharField(max_length=50, verbose_name="Name")), - ("initials", models.CharField(max_length=10, verbose_name="Initials")), + ('unit_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('name', models.CharField(max_length=50, verbose_name='Name')), + ('initials', models.CharField(max_length=10, verbose_name='Initials')), ], options={ - "ordering": ["unit_id"], + 'ordering': ['unit_id'], }, ), migrations.CreateModel( - name="Variable", + name='Variable', fields=[ - ( - "variable_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ( - "variable_code", - models.CharField(max_length=100, verbose_name="Code"), - ), - ("name", models.CharField(max_length=50, verbose_name="Name")), - ( - "maximum", - models.DecimalField( - decimal_places=2, max_digits=7, verbose_name="Maximum" - ), - ), - ( - "minimum", - models.DecimalField( - decimal_places=2, max_digits=7, verbose_name="Minimum" - ), - ), - ( - "diff_warning", - models.DecimalField( - blank=True, - decimal_places=2, - max_digits=7, - null=True, - verbose_name="Difference warning", - ), - ), - ( - "diff_error", - models.DecimalField( - blank=True, - decimal_places=2, - max_digits=7, - null=True, - verbose_name="Difference error", - ), - ), - ( - "outlier_limit", - models.DecimalField( - blank=True, - decimal_places=2, - max_digits=7, - null=True, - verbose_name="Sigmas (outliers)", - ), - ), - ("is_active", models.BooleanField(default=True, verbose_name="Active")), - ( - "is_cumulative", - models.BooleanField( - default=True, - verbose_name="Cumulative (True) or Averaged (False)", - ), - ), - ( - "automatic_report", - models.BooleanField(default=True, verbose_name="Automatic report"), - ), - ( - "null_limit", - models.DecimalField( - decimal_places=1, - max_digits=4, - null=True, - verbose_name="Null limit (%)", - ), - ), - ( - "unit", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="variable.Unit", - verbose_name="Unit", - ), - ), + ('variable_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('variable_code', models.CharField(max_length=100, verbose_name='Code')), + ('name', models.CharField(max_length=50, verbose_name='Name')), + ('maximum', models.DecimalField(decimal_places=2, max_digits=7, verbose_name='Maximum')), + ('minimum', models.DecimalField(decimal_places=2, max_digits=7, verbose_name='Minimum')), + ('diff_warning', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='Difference warning')), + ('diff_error', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='Difference error')), + ('outlier_limit', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True, verbose_name='Sigmas (outliers)')), + ('is_active', models.BooleanField(default=True, verbose_name='Active')), + ('is_cumulative', models.BooleanField(default=True, verbose_name='Cumulative (True) or Averaged (False)')), + ('automatic_report', models.BooleanField(default=True, verbose_name='Automatic report')), + ('null_limit', models.DecimalField(decimal_places=1, max_digits=4, null=True, verbose_name='Null limit (%)')), + ('unit', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='variable.unit', verbose_name='Unit')), ], options={ - "ordering": ["variable_id"], + 'ordering': ['variable_id'], }, ), migrations.CreateModel( - name="SensorInstallation", + name='SensorInstallation', fields=[ - ( - "sensorinstallation_id", - models.AutoField( - primary_key=True, serialize=False, verbose_name="Id" - ), - ), - ("start_date", models.DateField(verbose_name="Start date")), - ( - "end_date", - models.DateField(blank=True, null=True, verbose_name="End date"), - ), - ("state", models.BooleanField(default=True, verbose_name="Active")), - ( - "sensor", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="sensor.Sensor", - verbose_name="Sensor", - ), - ), - ( - "station", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="station.Station", - verbose_name="Station", - ), - ), - ( - "variable", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="variable.Variable", - verbose_name="Variable", - ), - ), + ('sensorinstallation_id', models.AutoField(primary_key=True, serialize=False, verbose_name='Id')), + ('start_date', models.DateField(verbose_name='Start date')), + ('end_date', models.DateField(blank=True, null=True, verbose_name='End date')), + ('state', models.BooleanField(default=True, verbose_name='Active')), + ('sensor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='sensor.sensor', verbose_name='Sensor')), + ('station', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='station.station', verbose_name='Station')), + ('variable', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='variable.variable', verbose_name='Variable')), ], options={ - "ordering": ["station"], + 'ordering': ['station'], }, ), ] diff --git a/variable/serializers.py b/variable/serializers.py old mode 100644 new mode 100755 From a5d27f3a187d125594ea5354dea1a4d07f30f6b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Sat, 15 Apr 2023 22:44:22 -0500 Subject: [PATCH 03/24] Validation calculations and UI. First approach for integration --- .idea/misc.xml | 2 +- validated/forms.py | 59 + validated/functions.py | 1265 +++++++++ validated/models.py | 9 +- validated/scripts/tests.py | 6 + .../static/validated/daily_validation.js | 2449 +++++++++++++++++ validated/static/validated/js_ajax2.js | 289 ++ validated/templates/daily_validation.html | 697 +++++ validated/templates/datos_horarios.html | 30 + validated/templates/diario_table.html | 43 + validated/templates/periodos_validacion.html | 37 + validated/templates/validacion_diaria.html | 697 +++++ validated/urls.py | 15 + validated/views.py | 377 +++ 14 files changed, 5973 insertions(+), 2 deletions(-) create mode 100755 validated/forms.py create mode 100644 validated/functions.py create mode 100755 validated/scripts/tests.py create mode 100755 validated/static/validated/daily_validation.js create mode 100755 validated/static/validated/js_ajax2.js create mode 100755 validated/templates/daily_validation.html create mode 100755 validated/templates/datos_horarios.html create mode 100755 validated/templates/diario_table.html create mode 100755 validated/templates/periodos_validacion.html create mode 100755 validated/templates/validacion_diaria.html diff --git a/.idea/misc.xml b/.idea/misc.xml index 95111376..faee0d92 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -1,6 +1,6 @@ - + diff --git a/validated/forms.py b/validated/forms.py new file mode 100755 index 00000000..4f15891f --- /dev/null +++ b/validated/forms.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +from django.forms import ModelForm, Form, ModelChoiceField, DateTimeField +from django.forms import ModelForm +from validacion.models import Validacion +from estacion.models import Estacion +from station.models import Station +from variable.models import Variable + +from django import forms +from estacion.models import Tipo + + + +class DailyValidationForm(forms.Form): + station = forms.ModelChoiceField( + queryset=Station.objects.order_by('station_code'), + empty_label="Station" + ) + variable = forms.ModelChoiceField( + queryset=Variable.objects.order_by('variable_code'), + empty_label="Variable" + ) + start_date = forms.DateField( + input_formats=['%Y-%m-%d'], + label="Start date", + required=True, + widget=forms.TextInput(attrs={'autocomplete': 'off'}), + ) + end_date = forms.DateField( + input_formats=['%Y-%m-%d'], + label="End date", + required=True, + widget=forms.TextInput(attrs={'autocomplete': 'off'}), + ) + minimum = forms.DecimalField(required=False) + maximum = forms.DecimalField(required=False) + #revalidar = forms.BooleanField(label="Revalidar", help_text='Marcar si deseas borrar la última validacion') + def __init__(self, *args, **kwargs): + super(DailyValidationForm, self).__init__(*args, **kwargs) + self.fields['station'].widget.attrs['placeholder'] = self.fields['station'].label + +class ValidacionSearchForm(forms.Form): + estacion = forms.ModelChoiceField(queryset=Estacion.objects.order_by('est_codigo').filter(est_externa=False, tipo__in=(1,2,3)), empty_label="Estación") + variable = forms.ModelChoiceField(queryset=Variable.objects.order_by('var_id').exclude(var_id='10'), empty_label="Variable") + inicio = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Inicio", required=True) + fin = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Fin", required=True) + limite_inferior = forms.IntegerField(required=False) + limite_superior = forms.IntegerField(required=False) + #revalidar = forms.BooleanField(label="Revalidar", help_text='Marcar si deseas borrar la última validacion') + def __init__(self, *args, **kwargs): + super(ValidacionSearchForm, self).__init__(*args, **kwargs) + self.fields['estacion'].widget.attrs['placeholder'] = self.fields['estacion'].label + +# +# class BorrarForm(Form): +# estacion = ModelChoiceField(queryset=Estacion.objects.order_by('est_id').all(), empty_label="Estación") +# variable = ModelChoiceField(queryset=Variable.objects.order_by('var_id').all(), empty_label="Variable") +# inicio = DateTimeField(input_formats=['%Y-%m-%d %H:%M:%S'], label="Fecha de Inicio") +# fin = DateTimeField(input_formats=['%Y-%m-%d %H:%M:%S'], label="Fecha de Fin") diff --git a/validated/functions.py b/validated/functions.py new file mode 100644 index 00000000..1efb3018 --- /dev/null +++ b/validated/functions.py @@ -0,0 +1,1265 @@ +from variable.models import Variable +# from estacion.models import Estacion +from django.apps import apps +import pandas as pd +import numpy as np +from django.db.models import IntegerField, Value, BooleanField +# from diario.models import Var2Diario +import decimal as dec +from datetime import datetime, timedelta, time + + +def reporte_diario(station, variable, start_time, end_time, maximum, minimum): + reporte, series = reporte_diario_dataframes(station, variable, start_time, end_time, maximum, minimum) + # reporte, series = calculo_reporte_diario(station, variable, start_time, end_time, maximum, minimum) + reporte.rename( + columns={ + 'date':'fecha', + 'date_error':'fecha_error', + # 'repeated_values_count':'fecha_numero', + 'extra_data_count': 'fecha_numero', + 'avg_value':'valor', + 'max_maximum':'maximo', + 'min_minimum':'minimo', + 'data_existence_percentage':'porcentaje', + 'is_null':'porcentaje_error', + 'value_error':'valor_error', + 'maximum_error':'maximo_error', + 'minimum_error':'minimo_error', + 'suspicious_values_count':'valor_numero', + 'suspicious_maximums_count':'maximo_numero', + 'suspicious_minimums_count':'minimo_numero', + 'historic_diary_avg':'media_historica', + 'state':'estado', + 'all_validated':'validado', + 'value_difference_error_count':'c_varia_err', + }, + inplace=True + ) + + + # response = acumulado.to_dict(orient='list') + # response = _records.to_dict(orient='records') + if variable.var_id in [4, 5]: + reporte['n_valor'] = 0 + else: + reporte['n_valor'] = reporte['c_varia_err'] + num_fecha = len(reporte[reporte['fecha_error'].ne(1) & ~reporte['fecha_error'].isna()].index) + num_porcentaje = len(reporte[reporte['porcentaje_error'].eq(True)]) + num_valor = len(reporte[reporte['porcentaje_error'].eq(False) & ~reporte['valor_numero'].isna()]) + num_maximo = len(reporte[reporte['porcentaje_error'].eq(False) & ~reporte['maximo_numero'].isna()]) + num_minimo = len(reporte[reporte['porcentaje_error'].eq(False) & ~reporte['minimo_numero'].isna()]) + num_dias = len(reporte.index) + + data = {'estacion': [{ + 'est_id': station.est_id, + 'est_nombre': station.est_nombre, + }], + 'variable': [{ + 'var_id': variable.var_id, + 'var_nombre': variable.var_nombre, + 'var_maximo': variable.var_maximo, + 'var_minimo': variable.var_minimo, + 'var_unidad_sigla': variable.uni_id.uni_sigla, + 'var_unidad_sigla': variable.uni_id.uni_sigla, + 'es_acumulada': variable.es_acumulada, + }], + 'datos': reporte.fillna('').to_dict(orient='records'), + 'indicadores': [{ + 'num_fecha': num_fecha, + 'num_porcentaje': num_porcentaje, + 'num_valor': num_valor, + 'num_maximo': num_maximo, + 'num_minimo': num_minimo, + 'num_dias': num_dias + }], + 'datos_grafico': series.fillna('').values.tolist(),#datos_grafico, + 'grafico': None,#grafico_msj, + 'curva': None,#mensaje + } + return data + + +# Basic calculations is the main functions for calculations +def basic_calculations(station, variable, start_time, end_time, inf_lim_variable, sup_lim_variable): + tx_period = 5 + try: + model_name = 'Var' + str(variable.var_id) + variable = Variable.objects.get(pk=variable.var_id) + station = Estacion.objects.get(pk=station.est_id) + except: + model_name = 'Var' + str(variable) + variable = Variable.objects.get(pk=variable) + station = Estacion.objects.get(pk=station) + + Measurement = apps.get_model(app_label='medicion', model_name=model_name+'Medicion') + Validated = apps.get_model(app_label='validacion', model_name=model_name+'Validado') + + + # filter_args = {} + # validated = pd.DataFrame.from_records(validated.values(**filter_args)) + validated = Validated.objects.filter( + estacion_id=station.est_id, + fecha__gte=start_time, + fecha__lte=end_time + ).annotate( + is_validated=Value(True, output_field=BooleanField()),# True is for 'validated' tables, False for raw 'measurement' + exists_in_validated=Value(True, output_field=BooleanField()), + null_value=Value(False, output_field=BooleanField()) + ).order_by('fecha') + validated = pd.DataFrame.from_records( + validated.values('id', 'fecha', 'is_validated', 'valor', 'maximo', 'minimo', 'exists_in_validated', 'null_value') + ) + # TODO: eliminar + validated = validated.rename(columns={'fecha':'time', 'valor':'value', 'maximo':'maximum', 'minimo':'minimum'}) + if validated.empty: + validated = pd.DataFrame(columns=['id', 'time', 'is_validated', 'value', 'maximum', 'minimum', + 'exists_in_validated', 'null_value']) + + # TODO WHich one is faster? + # # validated['time'] = validated['time'].dt.floor('min') + # validated['time'] = validated['time'].values.astype(' variable.minimum + # variable.var_maximo -> variable.maximum + + # daily['suspicious_values_count'] = daily_group.agg( + # suspicious=pd.NamedAgg( + # column='value', + # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + # )).to_numpy() + daily['suspicious_values_count'] = daily_group['suspicious_value'].count() + # daily['suspicious_maximums_count'] = daily_group.agg( + # suspicious=pd.NamedAgg( + # column='maximum', + # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + # )).to_numpy() + daily['suspicious_maximums_count'] = daily_group['suspicious_maximum'].count() + # daily['suspicious_minimums_count'] = daily_group.agg( + # suspicious=pd.NamedAgg( + # column='minimum', + # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + # )).to_numpy() + daily['suspicious_minimums_count'] = daily_group['suspicious_minimum'].count() + + # TODO check this for PARAMH2O (tabla_varia_erro) + # REF. NAME: tabla_varia_erro + # Calculating consecutive differences and check for errors. + # 'time_lapse_status' set to: + # 0 if 'time_lapse' < 'period' + # 1 if 'time_lapse' == 'period' + # 2 if 'time_lapse' > 'period' + # + + daily['value_difference_error_count'] = daily_group['value_difference_error'].sum(numeric_only=False).to_numpy() + + + # # REF. NAME: lapsos_dias + # # Generate a sequence of days following in the calendar to compare with data in database and note voids + # # TODO Analizar que esto pudiera ser incluído en la primera generación de daily + # calendar_day_seq = pd.DataFrame( + # pd.date_range(start=start_time, end=end_time).date, + # columns=['date'] + # ) + # daily = calendar_day_seq.merge(daily, on='date', how='left') + + if daily.empty: + daily = df = pd.DataFrame(columns=['id', 'date', 'data_count', 'avg_value', 'max_maximum', 'min_minimum', + 'all_validated', 'data_existence_percentage', 'is_null', + 'suspicious_values_count', 'suspicious_maximums_count', + 'suspicious_minimums_count', 'value_difference_error_count', + 'day_interval', 'date_error', 'extra_data_count', 'historic_diary_avg', + 'state', 'value_error', 'maximum_error', 'minimum_error'] + ) + return daily, selected[['time', 'value']] + + # REF. NAME: fecha_error o dia_error + daily['day_interval'] = (daily['date'] - daily['date'].shift(1)).dt.days + daily['day_interval'][0] = 1 + daily['date_error'] = np.where(daily['day_interval'].gt(1), 3, 1) + # TODO hacer un groupby de repeated_values_count por día, para pasar el valor total de repetidos por día + # posiblemente convenga hacer un solo cálculo arriba + + # fecha_numero: repeated_values_count + # # REF. NAME: tabla_duplicados + # # count_of_repeated + repeated_in_validated = validated.groupby(['time_truncated'])['time_truncated'].count() + repeated_in_validated = repeated_in_validated.reset_index(name="repeated_in_validated") + repeated_in_validated['repeated_in_validated'] = np.where( + repeated_in_validated['repeated_in_validated'].gt(0), + repeated_in_validated['repeated_in_validated'] - 1, + 0, + ) + + repeated_in_measurement = measurement.groupby(['time_truncated'])['time_truncated'].count() + repeated_in_measurement = repeated_in_measurement.reset_index(name="repeated_in_measurement") + repeated_in_measurement['repeated_in_measurement'] = np.where( + repeated_in_measurement['repeated_in_measurement'].gt(0), + repeated_in_measurement['repeated_in_measurement'] - 1, + 0, + ) + extra_data_count = pd.merge(repeated_in_validated, + repeated_in_measurement, + on=['time_truncated'], + how='outer', + indicator=False) + extra_data_count.fillna(0, inplace=True) + extra_data_count.sort_values(by=['time_truncated'], inplace=True) + extra_data_count['extra_values_count'] = extra_data_count['repeated_in_validated'] + \ + extra_data_count['repeated_in_measurement'] + extra_data_count['date'] = pd.to_datetime(extra_data_count['time_truncated']).dt.date + + extra_data_daily_group = extra_data_count[extra_data_count['extra_values_count'] > 0].groupby('date') + extra_data_daily = extra_data_daily_group['extra_values_count'].sum() + extra_data_daily = extra_data_daily.reset_index(name='extra_data_count') + daily = daily.merge(extra_data_daily, on='date', how='left') + daily['extra_data_count'].fillna(0, inplace=True) + + # porcentaje : data_existence_percentage + # porcentaje_error : null_value + # valor_error : (posiblemente no requiera) + # maximo_error : (posiblemente no requiera) + # minimo_error : (posiblemente no requiera) + # valor_numero : suspicious_values_count + # maximo_numero : suspicious_maximums_count + # minimo_numero : suspicious_minimums_count + # media_historica : historic_mean + # SELECT AVG(dp.valor) FROM diario_var2diario dp WHERE dp.estacion_id = 4 AND + # date_part('day',dp.fecha)= 13 AND date_part('month',dp.fecha)= 10 + # historic_diaries = Var2Diario.objects.filter(estacion_id=station.est_id, fecha__day=) + + + month_day_tuples = tuple(list(zip( + pd.DatetimeIndex(daily['date']).month, + pd.DatetimeIndex(daily['date']).day + ))) + historic_diary = Var2Diario.objects.filter(estacion_id=station.est_id).extra( + where=["(date_part('month', fecha), date_part('day', fecha)) in %s"], + params=[month_day_tuples] + ) + historic_diary = pd.DataFrame(list(historic_diary.values())) + historic_diary = historic_diary.rename(columns={'fecha':'date', 'valor':'value'}) + historic_diary['month-day'] = pd.DatetimeIndex(historic_diary['date']).month.astype(str) \ + + '-' + pd.DatetimeIndex(historic_diary['date']).day.astype(str) + historic_diary_group = historic_diary.groupby(['month-day']) + daily['historic_diary_avg'] = historic_diary_group['value'].mean().to_numpy() + + + # estado : state + daily['state'] = True + + # validado : + + # validated_only = selected[['date', 'is_validated']].loc[selected['is_validated'] == True] + # validated_count = validated_only.groupby('date')['date'].count().reset_index(name='validated_count') + # daily = daily.merge(validated_count, on='date', how='left') + # daily['validated_count'].fillna(0, inplace=True) + + # daily['all_validated'] = daily_group['is_validated'].all().to_numpy() + + # c_varia_err + + daily['data_count'].fillna(0, inplace=True) + daily['data_existence_percentage'].fillna(0, inplace=True) + daily['suspicious_values_count'].fillna(0, inplace=True) + daily['suspicious_maximums_count'].fillna(0, inplace=True) + daily['suspicious_minimums_count'].fillna(0, inplace=True) + daily['value_difference_error_count'].fillna(0, inplace=True) + + + ## + # TODO check, maybe it's not needed anymore + daily['value_error'] = np.where(daily['suspicious_values_count'].gt(0), True, False,) + daily['maximum_error'] = np.where(daily['suspicious_maximums_count'].gt(0), True, False, ) + daily['minimum_error'] = np.where(daily['suspicious_minimums_count'].gt(0), True, False, ) + # + ## + + # Round decimals + # TODO cambiar 'valor' por 'value' en pAricia + decimal_places = Measurement._meta.get_field('valor').decimal_places + daily['avg_value'] = daily['avg_value'].astype(np.float64).round(decimal_places) + daily['max_maximum'] = daily['max_maximum'].astype(np.float64).round(decimal_places) + daily['min_minimum'] = daily['min_minimum'].astype(np.float64).round(decimal_places) + daily['data_existence_percentage'] = daily['data_existence_percentage'].astype(np.float64).round(1) + daily.reset_index(names='id', inplace=True) + ## TODO Eliminar o corregir ids -> id + # + # daily.rename(columns={'id':'ids',}, inplace=True) + daily['ids'] = daily['id'] + # + ## + return daily, selected[['time', 'value']] + + + +def calculo_reporte_diario(station, variable, start_time, end_time, maximum, minimum): + # variable_id = 2 + # station_id = 4 + # start_time = '2022-10-11 00:00:00' + # end_time = '2022-10-25 23:59:59' + + # Period (or "frecuency") number of minutes between every captured data (e.g. 5 minutes, 10 minutes) + tx_period = 5 + + + model_name = 'Var' + str(variable.var_id) + Measurement = apps.get_model(app_label='medicion', model_name=model_name+'Medicion') + Validated = apps.get_model(app_label='validacion', model_name=model_name+'Validado') + + # + variable = Variable.objects.get(pk=variable.var_id) + station = Estacion.objects.get(pk=station.est_id) + + # filter_args = {} + # validated = pd.DataFrame.from_records(validated.values(**filter_args)) + validated = Validated.objects.filter( + estacion_id=station.est_id, + fecha__gte=start_time, + fecha__lte=end_time + ).annotate( + is_validated=Value(True, output_field=BooleanField()),# True is for 'validated' tables, False for raw 'measurement' + exists_in_validated=Value(True, output_field=BooleanField()), + null_value=Value(False, output_field=BooleanField()) + ).order_by('fecha') + validated = pd.DataFrame.from_records( + validated.values('id', 'fecha', 'is_validated', 'valor', 'maximo', 'minimo', 'exists_in_validated', 'null_value') + ) + # TODO: eliminar + validated = validated.rename(columns={'fecha':'time', 'valor':'value', 'maximo':'maximum', 'minimo':'minimum'}) + if validated.empty: + validated = pd.DataFrame(columns=['id', 'time', 'is_validated', 'value', 'maximum', 'minimum', + 'exists_in_validated', 'null_value']) + + # TODO WHich one is faster? + # # validated['time'] = validated['time'].dt.floor('min') + # validated['time'] = validated['time'].values.astype(' variable.minimum + # variable.var_maximo -> variable.maximum + + daily['suspicious_values_count'] = daily_group.agg( + suspicious=pd.NamedAgg( + column='value', + aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + )).to_numpy() + daily['suspicious_maximums_count'] = daily_group.agg( + suspicious=pd.NamedAgg( + column='maximum', + aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + )).to_numpy() + daily['suspicious_minimums_count'] = daily_group.agg( + suspicious=pd.NamedAgg( + column='minimum', + aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() + )).to_numpy() + + # TODO check this for PARAMH2O (tabla_varia_erro) + # REF. NAME: tabla_varia_erro + # Calculating consecutive differences and check for errors. + # 'time_lapse_status' set to: + # 0 if 'time_lapse' < 'period' + # 1 if 'time_lapse' == 'period' + # 2 if 'time_lapse' > 'period' + # + + daily['value_difference_error_count'] = daily_group['value_difference_error'].sum(numeric_only=False).to_numpy() + + + # # REF. NAME: lapsos_dias + # # Generate a sequence of days following in the calendar to compare with data in database and note voids + # # TODO Analizar que esto pudiera ser incluído en la primera generación de daily + # calendar_day_seq = pd.DataFrame( + # pd.date_range(start=start_time, end=end_time).date, + # columns=['date'] + # ) + # daily = calendar_day_seq.merge(daily, on='date', how='left') + + if daily.empty: + daily = df = pd.DataFrame(columns=['id', 'date', 'data_count', 'avg_value', 'max_maximum', 'min_minimum', + 'all_validated', 'data_existence_percentage', 'is_null', + 'suspicious_values_count', 'suspicious_maximums_count', + 'suspicious_minimums_count', 'value_difference_error_count', + 'day_interval', 'date_error', 'extra_data_count', 'historic_diary_avg', + 'state', 'value_error', 'maximum_error', 'minimum_error'] + ) + return daily, selected[['time', 'value']] + + # REF. NAME: fecha_error o dia_error + daily['day_interval'] = (daily['date'] - daily['date'].shift(1)).dt.days + daily['day_interval'][0] = 1 + daily['date_error'] = np.where(daily['day_interval'].gt(1), 3, 1) + # TODO hacer un groupby de repeated_values_count por día, para pasar el valor total de repetidos por día + # posiblemente convenga hacer un solo cálculo arriba + + # fecha_numero: repeated_values_count + extra_data_daily_group = extra_data_count[extra_data_count['extra_values_count'] > 0].groupby('date') + extra_data_daily = extra_data_daily_group['extra_values_count'].sum() + extra_data_daily = extra_data_daily.reset_index(name='extra_data_count') + daily = daily.merge(extra_data_daily, on='date', how='left') + daily['extra_data_count'].fillna(0, inplace=True) + + # porcentaje : data_existence_percentage + # porcentaje_error : null_value + # valor_error : (posiblemente no requiera) + # maximo_error : (posiblemente no requiera) + # minimo_error : (posiblemente no requiera) + # valor_numero : suspicious_values_count + # maximo_numero : suspicious_maximums_count + # minimo_numero : suspicious_minimums_count + # media_historica : historic_mean + # SELECT AVG(dp.valor) FROM diario_var2diario dp WHERE dp.estacion_id = 4 AND + # date_part('day',dp.fecha)= 13 AND date_part('month',dp.fecha)= 10 + # historic_diaries = Var2Diario.objects.filter(estacion_id=station.est_id, fecha__day=) + + + month_day_tuples = tuple(list(zip( + pd.DatetimeIndex(daily['date']).month, + pd.DatetimeIndex(daily['date']).day + ))) + historic_diary = Var2Diario.objects.filter(estacion_id=station.est_id).extra( + where=["(date_part('month', fecha), date_part('day', fecha)) in %s"], + params=[month_day_tuples] + ) + historic_diary = pd.DataFrame(list(historic_diary.values())) + historic_diary = historic_diary.rename(columns={'fecha':'date', 'valor':'value'}) + historic_diary['month-day'] = pd.DatetimeIndex(historic_diary['date']).month.astype(str) \ + + '-' + pd.DatetimeIndex(historic_diary['date']).day.astype(str) + historic_diary_group = historic_diary.groupby(['month-day']) + daily['historic_diary_avg'] = historic_diary_group['value'].mean().to_numpy() + + + # estado : state + daily['state'] = True + + # validado : + + # validated_only = selected[['date', 'is_validated']].loc[selected['is_validated'] == True] + # validated_count = validated_only.groupby('date')['date'].count().reset_index(name='validated_count') + # daily = daily.merge(validated_count, on='date', how='left') + # daily['validated_count'].fillna(0, inplace=True) + + # daily['all_validated'] = daily_group['is_validated'].all().to_numpy() + + # c_varia_err + + daily['data_count'].fillna(0, inplace=True) + daily['data_existence_percentage'].fillna(0, inplace=True) + daily['suspicious_values_count'].fillna(0, inplace=True) + daily['suspicious_maximums_count'].fillna(0, inplace=True) + daily['suspicious_minimums_count'].fillna(0, inplace=True) + daily['value_difference_error_count'].fillna(0, inplace=True) + + + ## + # TODO check, maybe it's not needed anymore + daily['value_error'] = np.where(daily['suspicious_values_count'].gt(0), True, False,) + daily['maximum_error'] = np.where(daily['suspicious_maximums_count'].gt(0), True, False, ) + daily['minimum_error'] = np.where(daily['suspicious_minimums_count'].gt(0), True, False, ) + # + ## + + # Round decimals + # TODO cambiar 'valor' por 'value' en pAricia + decimal_places = Measurement._meta.get_field('valor').decimal_places + daily['avg_value'] = daily['avg_value'].astype(np.float64).round(decimal_places) + daily['max_maximum'] = daily['max_maximum'].astype(np.float64).round(decimal_places) + daily['min_minimum'] = daily['min_minimum'].astype(np.float64).round(decimal_places) + daily['data_existence_percentage'] = daily['data_existence_percentage'].astype(np.float64).round(1) + daily.reset_index(names='id', inplace=True) + ## TODO Eliminar o corregir ids -> id + # + # daily.rename(columns={'id':'ids',}, inplace=True) + daily['ids'] = daily['id'] + # + ## + return daily, selected[['time', 'value']] + + +# Consultar datos crudos y/o validados por estacion, variable y fecha de un día en específico +def detalle_diario(est_id, var_id, fecha_str, sup_lim_variable, inf_lim_variable): + # SQL fun : reporte_validacion_modelo + # SQL template: validacion_crudos_prom.sql + + start_time = datetime.strptime(fecha_str, '%Y-%m-%d') + end_time = datetime.combine(start_time.date(), time(23, 59, 59, 999999)) + variable = Variable.objects.get(var_id=var_id) + # period = 5 + + + measurement, validated, joined, selected, tx_period = basic_calculations(est_id, var_id, start_time, end_time, tx_period) + + + + joined['state'] = ~(joined['value'].isna() & joined['maximum'].isna() & joined['minimum'].isna()) + # Basic statistics + mean = selected['value'].mean(skipna=True) + std_dev = selected['value'].astype(float).std(skipna=True) + stddev_inf_limit = mean - (std_dev * float(variable.var_min)) + stddev_sup_limit = mean + (std_dev * float(variable.var_min)) + joined['stddev_error'] = ~joined['value'].between(stddev_inf_limit, stddev_sup_limit) + joined['comment'] = '' + + joined.fillna('', inplace=True) + + report = joined[['id_joined', 'time', 'value', 'maximum', 'minimum', 'is_validated', 'is_selected', 'state', + 'time_lapse_status', 'value_error', 'maximum_error', 'minimum_error', 'stddev_error', 'comment', + 'value_difference', 'value_difference_error']] + report.rename(columns={'id_joined': 'id'}, inplace=True) + ####### + joined['n_valor'] = joined['value_difference'] + _selected = joined[joined['is_selected']==True] + num_fecha = len(_selected[_selected['time_lapse_status'] != 1].index) + + # Only take into account 'value_error' when there's no error in timestamp lapse + _selected_NO_TIMELAPSE_ERROR = _selected[_selected['time_lapse_status'] == 1] + num_valor = len(_selected_NO_TIMELAPSE_ERROR[_selected_NO_TIMELAPSE_ERROR['value_error'] == True].index) + num_maximo = len(_selected[_selected['maximum_error'] == True].index) + num_minimo = len(_selected[_selected['minimum_error'] == True].index) + num_stddev = len(_selected[_selected['stddev_error'] == True].index) + + + # TODO check if this es expected number of data + num_datos = int(24 * (60/tx_period)) + + report.rename( + columns={ + # 'id': '', + 'time': 'fecha', + 'value': 'valor', + 'maximum': 'maximo', + 'minimum': 'minimo', + 'is_validated': 'validado', + 'is_selected': 'seleccionado', + 'state': 'estado', + 'time_lapse_status': 'fecha_error', + 'value_error': 'valor_error', + 'maximum_error': 'maximo_error', + 'minimum_error': 'minimo_error', + 'stddev_error': 'stddev_error', + 'comment': 'comentario', + 'value_difference': 'variacion_consecutiva', + 'value_difference_error': 'varia_error' + }, + inplace=True + ) + + + data = { + 'datos': report.to_dict(orient='records'), + 'indicadores': [{ + 'num_fecha': num_fecha, + 'num_valor': num_valor, + 'num_valor1':num_valor, + 'num_maximo': num_maximo, + 'num_minimo': num_minimo, + 'num_stddev': num_stddev, + 'num_datos': num_datos + }] + } + + return data + + +def get_condiciones(cambios_lista): + fechas_condicion = [] + fechas_eliminar = [] + for fila in cambios_lista: + if fila['validado']: + fechas_condicion.append("'" + fila['fecha'] + "'") + if not fila['estado']: + fechas_eliminar.append("'" + fila['fecha'] + "'") + + fechas_condicion = set(fechas_condicion) + fechas_eliminar = set(fechas_eliminar) + + where_fechas = ",".join(fechas_condicion) + where_eliminar = ",".join(fechas_eliminar) + + condiciones = { + 'where_eliminar': where_eliminar, + 'where_fechas': where_fechas + } + return condiciones + + +# Pasar los datos crudos a validados +def pasar_crudos_validados(cambios_lista, variable, estacion_id, condiciones, limite_superior, limite_inferior): + model_name = 'Var' + str(variable.var_id) + Measurement = apps.get_model(app_label='medicion', model_name=model_name+'Medicion') + Validated = apps.get_model(app_label='validacion', model_name=model_name+'Validado') + # modelo = normalize(variable.var_nombre).replace(" de ", "") + # modelo = modelo.replace(" ", "") + variable_nombre = variable.var_codigo + fecha_inicio_dato = cambios_lista[0]['fecha'] + fecha_fin_dato = cambios_lista[-1]['fecha'] + # where_fechas = condiciones.get('where_fechas') + where_eliminar = condiciones.get('where_eliminar') + + # + reporte_recibido = pd.DataFrame.from_records(cambios_lista) + # + + if variable.var_id == 1: + query = "select * FROM reporte_crudos_precipitacion(%s, %s, %s, %s, %s);" + + elif variable.var_id == 4 or variable.var_id == 5: + query = "select * FROM reporte_crudos_viento(%s, %s, %s, %s, %s);" + else: + query = "select * FROM reporte_crudos_" + variable.var_modelo.lower() + "(%s, %s, %s, %s, %s);" + consulta = ReporteCrudos.objects.raw(query, + [estacion_id, fecha_inicio_dato, fecha_fin_dato, limite_superior, + limite_inferior]) + for fila in consulta: + delattr(fila, '_state') + + datos = [dict(fila.__dict__) for fila in consulta] + data_json = json.dumps(datos, cls=DjangoJSONEncoder) + with connection.cursor() as cursor: + if variable.var_id == 4 or variable.var_id == 5: + cursor.callproc('insertar_viento_validacion', [estacion_id, data_json]) + #elif variable.var_id == 10 or variable.var_id == 11: + # cursor.callproc('insertar_agua_validacion', [estacion_id, data_json]) + else: + print(data_json) + print(estacion_id) + cursor.callproc('insertar_' + variable.var_modelo.lower() + '_validacion', [estacion_id, data_json]) + resultado = cursor.fetchone()[0] + cursor.close() + + if len(where_eliminar) > 0: + if variable.var_id == 4 or variable.var_id == 5: + variable_nombre = 'viento' + #elif variable.var_id == 10 or variable.var_id == 11: + # variable_nombre = 'agua' + + if variable.var_id == 1: + sql = """UPDATE validacion_var%%modelo%%validado SET valor = NULL WHERE estacion_id = %%est_id%% + AND date_trunc('day',fecha) IN (%%condicion%%) """ + elif variable.var_id == 4 or variable.var_id == 5: + sql = """UPDATE validacion_viento SET valor = NULL, maximo = NULL, minimo = NULL, + direccion = null, categoria = null WHERE estacion_id = %%est_id%% + AND date_trunc('day',fecha) IN (%%condicion%%); + UPDATE validacion_var4validado SET valor = NULL, maximo = NULL, minimo = NULL + WHERE estacion_id = %%est_id%% + AND date_trunc('day',fecha) IN (%%condicion%%); + UPDATE validacion_var5validado SET valor = NULL, maximo = NULL, minimo = NULL + WHERE estacion_id = %%est_id%% + AND date_trunc('day',fecha) IN (%%condicion%%);""" + #elif variable.var_id == 10 or variable.var_id == 11: + # sql = """UPDATE validacion_var%%modelo%%validado SET nivel = NULL, caudal = NULL + # WHERE estacion_id = %%est_id%% AND date_trunc('day',fecha) IN (%%condicion%%)""" + else: + sql = """UPDATE validacion_var%%modelo%%validado SET valor = NULL, maximo = NULL, minimo = NULL + WHERE estacion_id = %%est_id%% + AND date_trunc('day',fecha) IN (%%condicion%%)""" + + sql_modificar = sql.replace("%%modelo%%", str(variable.var_id)).replace("%%est_id%%", str(estacion_id)) \ + .replace("%%condicion%%", where_eliminar) + print(sql_modificar) + with connection.cursor() as cursor: + cursor.execute(sql_modificar) + cursor.close() + + return resultado + + +def guardar_cambios_validacion (estacion_id, variable, tipo_transaccion, fecha_inicio, fecha_fin): + sincronizacion = Sincronizacion( + estacion_id=estacion_id, + variable=variable, + tipo_transaccion=tipo_transaccion, + fecha_inicio=fecha_inicio, + fecha_fin=fecha_fin + ) + sincronizacion.save() + +""" + +""" + + + + + + +""" + +CREATE OR REPLACE FUNCTION public.reporte_validacion_%%modelo%%( + _estacion_id integer, + _fecha_inicio timestamp with time zone, + _fecha_fin timestamp with time zone, + _var_maximo numeric, + _var_minimo numeric) + RETURNS TABLE(id bigint, fecha timestamp with time zone, valor numeric,maximo numeric, minimo numeric, + validado boolean, seleccionado boolean, estado boolean, fecha_error numeric, valor_error boolean, + maximo_error boolean, minimo_error boolean, stddev_error boolean, comentario character varying + , variacion_consecutiva numeric, varia_error boolean) + LANGUAGE 'plpgsql' + + COST 100 + VOLATILE + ROWS 1000 +AS $BODY$ +BEGIN + RETURN QUERY + WITH + estacion AS (SELECT * FROM estacion_estacion est WHERE est.est_id = 4), + variable AS (SELECT * FROM variable_variable var WHERE var.var_id = 2), + --Seleccionar los datos de la tabla validados + validacion AS ( + SELECT v.id, v.fecha, 0 AS tipo, v.valor, v.maximo, v.minimo, TRUE AS existe_en_validacion, FALSE as valor_vacio + FROM validacion_var2validado v WHERE v.estacion_id = (SELECT est_id FROM estacion) AND v.fecha >= '2022-10-13 00:00:00' AND v.fecha <= '2022-10-13 23:59:59' + ), + --Seleccionar los datos de la tabla medicion + medicion AS ( + SELECT m.id, m.fecha, 1 AS tipo, CASE WHEN m.valor = 'NaN' THEN NULL ELSE m.valor END, + CASE WHEN m.maximo = 'NaN' THEN NULL ELSE m.maximo END, + CASE WHEN m.minimo = 'NaN' THEN NULL ELSE m.minimo END, + EXISTS(SELECT * FROM validacion v WHERE v.fecha = m.fecha AND v.valor = m.valor) AS existe_en_validacion, + EXISTS(SELECT * FROM validacion v WHERE v.fecha = m.fecha AND v.valor IS NULL) AS valor_vacio + FROM medicion_var2medicion m WHERE m.estacion_id = (SELECT est_id FROM estacion) AND m.fecha >= '2022-10-13 00:00:00' AND m.fecha <= '2022-10-13 23:59:59' + ), + --unir las tablas medicion y validacion + union_med_val AS ( + SELECT * FROM validacion UNION SELECT * FROM medicion + ), + --revision de lapsos de tiempo entre fechas + lapsos_fechas AS ( + SELECT + ff.fecha, + row_number() OVER (ORDER BY ff.fecha ASC) as fecha_grupo, + EXTRACT(EPOCH FROM ff.fecha - lag(ff.fecha) OVER (ORDER BY ff.fecha ASC))/60 as lapso_tiempo, + (SELECT fre.fre_valor FROM frecuencia_frecuencia fre + WHERE fre.var_id_id = (SELECT var_id FROM variable) AND fre.est_id_id = (SELECT est_id FROM estacion) AND fre.fre_fecha_ini < ff.fecha + ORDER BY fre.fre_fecha_ini DESC LIMIT 1) AS periodo_esperado + FROM (SELECT DISTINCT(umv.fecha) FROM union_med_val umv) ff ORDER BY fecha ASC + ), + fechas AS ( + SELECT *, + CASE WHEN fecha_grupo = 1 THEN 1 ELSE + CASE WHEN lapso_tiempo < periodo_esperado - 0.13 THEN 0 + WHEN lapso_tiempo > periodo_esperado + 0.13 THEN 3 + WHEN lapso_tiempo = 0 THEN 0 + WHEN LEAD(lapso_tiempo) OVER (ORDER by lf.fecha) > periodo_esperado + 0.13 THEN 2 + ELSE 1 + END + END AS fecha_valida + FROM lapsos_fechas lf + ), + tabla_base AS ( + SELECT + row_number() OVER (ORDER BY umv.fecha ASC, umv.tipo ASC, --umv.validacion DESC-- + umv.id DESC) as numero_fila, + * + FROM union_med_val umv WHERE NOT (umv.existe_en_validacion = TRUE AND umv.tipo = 1 OR umv.valor_vacio = TRUE) + ), + -- Excluir los datos duplicados + tabla_seleccion AS ( + SELECT *, + (SELECT fecha_grupo FROM fechas f WHERE f.fecha = tb.fecha) AS fecha_grupo, + CASE WHEN tb.numero_fila = 1 THEN TRUE ELSE CASE WHEN lag(tb.fecha) OVER (ORDER BY tb.numero_fila ASC) != tb.fecha THEN TRUE ELSE FALSE END END AS seleccionado + --(SELECT med.id FROM medicion med WHERE med.fecha = tb.fecha ORDER BY id ASC LIMIT 1) AS medicion_id + FROM tabla_base tb --WHERE tb.valor IS NOT NULL + ), + tabla_variacion AS ( + SELECT *, + (SELECT t1.valor - (SELECT tanterior.valor FROM tabla_seleccion tanterior + WHERE tanterior.fecha_grupo = t1.fecha_grupo - 1 + AND tanterior.seleccionado IS TRUE) ) AS variacion_consecutiva, + CASE WHEN t1.seleccionado THEN t1.valor ELSE NULL END AS valor_seleccionado + FROM tabla_seleccion t1 + ), + estadistica AS ( + SELECT e1.media AS media, e1.desv_est AS desv_est, + e1.media - (e1.desv_est * (SELECT var_min FROM variable)) AS lim_inf_stddev, + e1.media + (e1.desv_est * (SELECT var_min FROM variable)) AS lim_sup_stddev + FROM ( + SELECT AVG(ts.valor) AS media, STDDEV_SAMP(ts.valor) AS desv_est + FROM tabla_seleccion ts + WHERE ts.valor IS NOT NULL AND ts.seleccionado IS TRUE + ) e1 + ), + reporte AS ( + SELECT ts.numero_fila AS id, ts.fecha, ts.valor, ts.maximo, ts.minimo, ts.existe_en_validacion, ts.seleccionado, + CASE WHEN ts.valor is NULL AND ts.maximo is NULL and ts.maximo is NULL THEN FALSE ELSE TRUE END as estado, + (SELECT fecha_valida FROM fechas ff WHERE ff.fecha = ts.fecha)::numeric AS fecha_error, + ts.valor > 29 OR ts.valor < 2 AS valor_error, + ts.maximo > 29 OR ts.maximo < 2 AS maximo_error, + ts.minimo > 29 OR ts.minimo < 2 AS minimo_error, + ts.valor < (SELECT lim_inf_stddev FROM estadistica ) OR ts.valor > (SELECT lim_sup_stddev FROM estadistica) AS stddev_error, + CASE + WHEN ts.existe_en_validacion THEN + (SELECT vc.comentario FROM validacion_comentariovalidacion vc WHERE vc.estacion_id = (SELECT est_id FROM estacion) AND vc.variable_id = (SELECT var_id FROM variable) AND vc.validado_id = ts.id) + ELSE NULL + END AS comentario, + ts.variacion_consecutiva as variacion_consecutiva, + ts.variacion_consecutiva <= -(select var_err from variable where var_id = 2) as varia_error + FROM tabla_variacion ts + ) + + SELECT * FROM reporte; + + +END; +$BODY$; + + + + + + + + + + + + + + + + + + + + + + +--- PARAMH2O + WITH + estacion AS (SELECT * FROM estacion_estacion est WHERE est.est_id = 4), + variable AS (SELECT * FROM variable_variable var WHERE var.var_id = 2), + --Seleccionar los datos de la tabla validados + validacion AS ( + SELECT v.id, v.fecha, 0 AS tipo, v.valor, v.maximo, v.minimo, TRUE AS existe_en_validacion, FALSE as valor_vacio + FROM validacion_var2validado v WHERE v.estacion_id = (SELECT est_id FROM estacion) AND v.fecha >= '2022-10-13 00:00:00' AND v.fecha <= '2022-10-16 23:59:59' + + ), + --Seleccionar los datos de la tabla medicion + medicion AS ( + SELECT m.id, m.fecha, 1 AS tipo, CASE WHEN m.valor = 'NaN' THEN NULL ELSE m.valor END, + CASE WHEN m.maximo = 'NaN' THEN NULL ELSE m.maximo END, + CASE WHEN m.minimo = 'NaN' THEN NULL ELSE m.minimo END, + EXISTS(SELECT * FROM validacion v WHERE v.fecha = m.fecha AND v.valor = m.valor) AS existe_en_validacion, + EXISTS(SELECT * FROM validacion v WHERE v.fecha = m.fecha ) AS valor_vacio + FROM medicion_var2medicion m WHERE m.estacion_id = (SELECT est_id FROM estacion) AND m.fecha >= '2022-10-13 00:00:00' AND m.fecha <= '2022-10-16 23:59:59' + ), + --unir las tablas medicion y validacion en una tabla + union_med_val AS ( + SELECT * FROM validacion UNION SELECT * FROM medicion + ), + lapsos_fechas AS ( + SELECT + ff.fecha, + row_number() OVER (ORDER BY ff.fecha ASC) as fecha_grupo, + EXTRACT(EPOCH FROM ff.fecha - lag(ff.fecha) OVER (ORDER BY ff.fecha ASC))/60 as lapso_tiempo, + (SELECT fre.fre_valor FROM frecuencia_frecuencia fre + WHERE fre.var_id_id = (SELECT var_id FROM variable) AND fre.est_id_id = (SELECT est_id FROM estacion) AND fre.fre_fecha_ini < ff.fecha + ORDER BY fre.fre_fecha_ini DESC LIMIT 1) AS periodo_esperado + FROM (SELECT DISTINCT(umv.fecha) FROM union_med_val umv) ff ORDER BY fecha ASC + ), + fechas AS ( + SELECT *, + CASE WHEN fecha_grupo = 1 THEN 1 ELSE + CASE WHEN lapso_tiempo < periodo_esperado - 0.13 THEN 0 + WHEN lapso_tiempo > periodo_esperado + 0.13 THEN 3 + WHEN lapso_tiempo = 0 THEN 0 + WHEN LEAD(lapso_tiempo) OVER (ORDER by lf.fecha) > periodo_esperado + 0.13 THEN 2 + ELSE 1 + END + END AS fecha_valida + FROM lapsos_fechas lf + ), + --Seleccionar una serie unica de los validados y los crudos + tabla_base AS ( + SELECT + row_number() OVER (ORDER BY umv.fecha ASC, umv.tipo ASC, umv.id DESC) as numero_fila, + * + FROM union_med_val umv WHERE NOT (umv.existe_en_validacion = TRUE AND umv.tipo = 1 OR umv.valor_vacio = TRUE) + ), + tabla_seleccion AS ( + SELECT *, + (SELECT fecha_grupo FROM fechas f WHERE f.fecha = tb.fecha) AS fecha_grupo, + CASE WHEN tb.numero_fila = 1 THEN TRUE ELSE CASE + WHEN lag(tb.fecha) OVER (ORDER BY tb.numero_fila ASC) != tb.fecha THEN TRUE + ELSE FALSE END END AS seleccionado + --(SELECT med.id FROM medicion med WHERE med.fecha = tb.fecha ORDER BY id ASC LIMIT 1) AS medicion_id + FROM tabla_base tb --WHERE tb.valor IS NOT NULL + ), + tabla_variacion AS ( + SELECT *, + (SELECT t1.valor - (SELECT tanterior.valor FROM tabla_seleccion tanterior + WHERE tanterior.fecha_grupo = t1.fecha_grupo - 1 + AND tanterior.seleccionado IS TRUE) ) AS variacion_consecutiva, + CASE WHEN t1.seleccionado THEN t1.valor ELSE NULL END AS valor_seleccionado + FROM tabla_seleccion t1 + ), + -- valores duplicados por cada fecha + tabla_duplicados AS ( + + SELECT tb.fecha, date_trunc('day',tb.fecha) as dia, COUNT(*) AS num_duplicados + FROM tabla_base tb + GROUP BY tb.fecha + HAVING COUNT(*) > 1 + ORDER BY tb.fecha + ), + -- acumular los datos a diario + tabla_acumulada AS ( + SELECT date_trunc('day',tb.fecha) as dia, COUNT(tb.valor) numero_datos, + AVG(tb.valor) as valor, MAX(tb.maximo) as maximo, MIN(tb.minimo) as minimo, + bool_and(tb.existe_en_validacion) as existe_en_validacion + FROM tabla_base tb GROUP BY dia ORDER by dia + ), + -- Numero de datos esperados por d?a + tabla_datos_esperados AS ( + SELECT ta.dia, ta.numero_datos, + (SELECT CAST(1440/f.fre_valor AS INT) ndatos FROM frecuencia_frecuencia f WHERE f.fre_valor <= 60 + and f.est_id_id = (SELECT e.est_id FROM estacion e) AND f.var_id_id = (SELECT var_id FROM variable) + AND f.fre_fecha_ini <= ta.dia + AND (f.fre_fecha_fin >= ta.dia OR f.fre_fecha_fin IS NULL) + ORDER BY f.fre_fecha_ini DESC LIMIT 1) as numero_datos_esperado + FROM tabla_acumulada ta ORDER by ta.dia + ), + tabla_calculo AS ( + SELECT tde.dia, tde.numero_datos, tde.numero_datos_esperado, + ROUND((tde.numero_datos::decimal/tde.numero_datos_esperado)*100,2) as porcentaje, + CASE WHEN ROUND((tde.numero_datos::decimal/tde.numero_datos_esperado)*100,2) < (SELECT umbral_completo FROM variable) + OR ROUND((tde.numero_datos::decimal/tde.numero_datos_esperado)*100,2) > 100 THEN TRUE ELSE FALSE END AS porcentaje_error + FROM tabla_datos_esperados tde + ), + tabla_valores_sos AS ( + SELECT ta.dia, + (SELECT COUNT(tb.valor) nsvalor FROM tabla_base tb WHERE date(tb.fecha) = ta.dia + AND (tb.valor>29.0 OR tb.valor < 3.0 ) + )::numeric as numero_valor_sospechoso, + (SELECT COUNT(tb.maximo) nsvalor FROM tabla_base tb WHERE date(tb.fecha) = ta.dia + AND (tb.maximo>29.0 OR tb.maximo < 3.0 ) + )::numeric as numero_maximo_sospechoso, + (SELECT COUNT(tb.minimo) nsvalor FROM tabla_base tb WHERE date(tb.fecha) = ta.dia + AND (tb.minimo>29.0 OR tb.minimo < 3.0 ) + )::numeric as numero_minimo_sospechoso + FROM tabla_acumulada ta ORDER BY ta.dia + ), + tabla_varia_err AS ( + SELECT ta.dia, + (SELECT COUNT(tv.variacion_consecutiva) nsvalor FROM tabla_variacion tv WHERE date(tv.fecha) = ta.dia + AND (tv.variacion_consecutiva <= -(select abs(var_err) from variable where var_id = 2) ) + )::numeric as varia_error + FROM tabla_acumulada ta ORDER BY ta.dia + ), + -- revision de lapsos de tiempo entre fechas + lapsos_dias AS ( + SELECT + ff.dia, + row_number() OVER (ORDER BY ff.dia ASC) as fecha_grupo, + EXTRACT(EPOCH FROM ff.dia - lag(ff.dia) OVER (ORDER BY ff.dia ASC))/86400 as lapso_tiempo + FROM (SELECT tc.dia FROM tabla_calculo tc) ff ORDER BY dia ASC + ), + error_lapsos AS ( + SELECT *, + CASE WHEN fecha_grupo = 1 THEN 1 ELSE + CASE WHEN lapso_tiempo < 1 THEN 0 + WHEN lapso_tiempo > 1 THEN 3 + WHEN LEAD (lapso_tiempo) OVER (ORDER BY ld.dia) > 1 THEN 2 + ELSE 1 + END + END AS fecha_valida + FROM lapsos_dias ld + ), + reporte AS ( + SELECT + row_number() OVER (ORDER BY ta.dia ASC) as id, + ta.dia, + (SELECT el.fecha_valida FROM error_lapsos el WHERE el.dia = ta.dia)::numeric as dia_error, + (SELECT SUM(td.num_duplicados) FROM tabla_duplicados td WHERE td.dia = ta.dia)::numeric as fecha_numero, + ROUND(ta.valor,2)::numeric as valor, ROUND(ta.maximo,2)::numeric as maximo, ROUND(ta.minimo,2)::numeric as minimo, + (SELECT tc.porcentaje FROM tabla_calculo tc WHERE tc.dia = ta.dia) as porcentaje, + (SELECT tc.porcentaje_error FROM tabla_calculo tc WHERE tc.dia = ta.dia) as porcentaje_error, + --ta.valor > 29.0 OR ta.valor < 3.0 AS valor_error, + --ta.maximo > 29.0 OR ta.maximo < 3.0 AS maximo_error, + --ta.minimo > 29.0 OR ta.minimo < 3.0 AS minimo_error, + CASE WHEN (SELECT tvs.numero_valor_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia)> 0 THEN true ELSE false END as valor_error, + CASE WHEN (SELECT tvs.numero_maximo_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia)> 0 THEN true ELSE false END as valor_error, + CASE WHEN (SELECT tvs.numero_minimo_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia)> 0 THEN true ELSE false END as valor_error, + (SELECT tvs.numero_valor_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia) as valor_numero, + (SELECT tvs.numero_maximo_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia) as maximo_numero, + (SELECT tvs.numero_minimo_sospechoso FROM tabla_valores_sos tvs WHERE tvs.dia = ta.dia) as minimo_numero, + ROUND((SELECT AVG(dp.valor) FROM diario_var2diario dp WHERE dp.estacion_id = (SELECT est_id FROM estacion) AND + date_part('day',dp.fecha)= date_part('day', ta.dia) AND date_part('month',dp.fecha)= date_part('month',ta.dia)),2) as media_historica, + + TRUE as estado, + ta.existe_en_validacion as validado, + (select tve.varia_error from tabla_varia_err tve where tve.dia = ta.dia) as c_varia_err + FROM tabla_acumulada ta + + ) + SELECT * FROM reporte; + + +""" diff --git a/validated/models.py b/validated/models.py index f2bd32d7..04037796 100755 --- a/validated/models.py +++ b/validated/models.py @@ -23,7 +23,14 @@ VALIDATEDS: List[str] = [] """Available validated variables.""" - +# class Permissions(models.Model): +# +# class Meta: +# managed = False +# default_permissions = () +# permissions = ( +# ('daily_validation', 'Permission for validate raw data.') +# ) class PolarWind(TimescaleModel): diff --git a/validated/scripts/tests.py b/validated/scripts/tests.py new file mode 100755 index 00000000..94b973d3 --- /dev/null +++ b/validated/scripts/tests.py @@ -0,0 +1,6 @@ +from django.test import TestCase + +# Create your tests here. +from val2.functions import reporte_subhorario + +reporte_subhorario(1,2,3,4,5,30) \ No newline at end of file diff --git a/validated/static/validated/daily_validation.js b/validated/static/validated/daily_validation.js new file mode 100755 index 00000000..8ebb2c75 --- /dev/null +++ b/validated/static/validated/daily_validation.js @@ -0,0 +1,2449 @@ +var plot_orig_width = 0; +var plot_adjusted = true; +var gid = 0; + +var bargraph = function(g, series, ctx, cx, cy, color, radius) { + ctx.lineWidth = 1.5; // Change as needed. Could use radius + ctx.strokeStyle = 'blue'; + ctx.fillStyle = color; + ctx.beginPath(); + ctx.moveTo(cx, cy); + ctx.lineTo(cx, g.toDomYCoord(0)); + ctx.closePath(); + ctx.stroke(); + ctx.fill(); +}; + +function format_tuple(data){ + for (var i in data) { + data[i][0] = new Date(data[i][0]); + data[i][1] = parseFloat(data[i][1]); + } + return data; +} + + +function grafico_barras(data, append_to, variable){ + var graf_id= 'graf' + variable.var_id; + var element = "
"; + element = element.replace('graf_id', graf_id); + $(append_to).append("
" + element + "
"); + var g = new Dygraph( + document.getElementById(graf_id), + data, + { + title: variable.var_nombre + ' (' + variable.var_unidad_sigla + ')', + labels: ['fecha', 'valor'], + drawPoints:true, + drawPointCallback : bargraph, + strokeWidth: 0.0 + } + ); +} + +function grafico_dispersion(data, append_to, variable){ + var graf_id= 'graf' + variable.var_id; + var element = "
"; + element = element.replace('graf_id', graf_id); + $(append_to).append("
" + element + "
"); + var g = new Dygraph( + document.getElementById(graf_id), + data, + { + title: variable.var_nombre + ' (' + variable.var_unidad_sigla + ')', + labels: ['fecha', 'valor'], + drawPoints: true, + strokeWidth: 0, + pointSize: 1.5 + } + ); + +} + + +function plot_adjust(){ + var window_width = $("#div_informacion").width(); + Plotly.relayout(gid, {width: window_width }); + $("#" + gid).css("width", ""); + plot_adjusted = true; + $('#resize_plot').html("> Un pixel por dato <"); +} +var dateFormat = "yy-mm-dd"; + +function plot_orig(){ + Plotly.relayout(gid, {width: plot_orig_width }); + $("#" + gid).css("width", plot_orig_width + "px"); + plot_adjusted = false; + $('#resize_plot').html("> Ajustar a pantalla <"); +} + +function resizePlot(){ + if (plot_adjusted){ + plot_orig(); + }else{ + plot_adjust(); + } +} + +tr_ini = ''; +tr_fin = ''; +var datos_json = {}; +var data_fecha = []; +var data_valor = []; +var data_maximo = []; +var data_minimo = []; +//var num_dias = 0; +var num_datos = 0; + +var indicadores_crudos = { + num_fecha: 0, + num_valor: 0, + num_maximo: 0, + num_minimo:0, + num_stddev: 0, + num_datos:0 +} + +var indicadores_diarios = { + num_fecha: 0, + num_porcentaje: 0, + num_valor: 0, + num_maximo: 0, + num_minimo: 0, + num_dias: 0 +} + +var num_fecha = 0; + +// Valores de prueba +// M5077 TAI 2015-07-28, valores de -74 +// M5025 TAI 2008-05-02, serie de datos faltantes + + +function mostrar_label_dentro_de_select(){ + //// Elemento SELECT muestre label dentro de su caja + $('select.use-placeholder').each(function(){ + var op0 = $(this).children('option:first-child'); + if (op0.is(':selected')) { + op0.css( "display", "none" ); + $(this).addClass('placeholder'); + var label_text = $(this).parent('div').children('label').text(); + op0.text(label_text); + } + }); + + //// Elemento SELECT oculte o muestre selección nula + $('select.use-placeholder').change(function() { + var op0 = $(this).children('option:first-child'); + if (op0.is(':selected')) { + op0.css( "display", "none" ); + $(this).addClass('placeholder'); + var label_text = $(this).parent('div').children('label').text(); + op0.text(label_text); + } else { + op0.css( "display", "" ); + $(this).removeClass('placeholder'); + op0.text("---------"); + } + }); + } +$(document).ready(function() { + + $("#btn_nuevo_crudo").attr("disabled", true); + //mostrar_label_dentro_de_select(); + //$("#id_estacion").attr("placeholder", "Estación"); + //console.log($("#id_estacion").attr("placeholder")); + $('#div_grafico').on('hidden.bs.collapse', function () { + $("#btn_grafico").text("Mostrar Gráfico"); + }); + $('#div_grafico').on('show.bs.collapse', function () { + $("#btn_grafico").text("Ocultar Gráfico"); + }); + + + + $("#btn_buscar").click(actualizar_tabla_diario); + + //consultar los periodos de validacion + $("#btn_periodos_validacion").click(function(){ + $("#btn_periodos_validacion").attr("disabled", true); + + + }); + + // consultar los limites de la variable + $("#id_variable").change(function () { + //var codigo = $('#id_estacion option:selected').text(); + var variable = $(this).val(); + token = $("input[name='csrfmiddlewaretoken']").val(); + + $.ajax({ + url: '/variable/limites/', + dataType: 'json', + data: { + 'csrfmiddlewaretoken': token, + 'var_id': variable, + }, + type:'POST', + success: function (data) { + $("#id_limite_inferior").val(data.var_minimo); + $("#id_limite_superior").val(data.var_maximo); + } + }); + + }); + + + + + /*Filtros de la tablas*/ + $("#chk_porcentaje").change(filtrar_diario); + $("#chk_fecha").change(filtrar_diario); + $("#chk_numero").change(filtrar_diario); + + $("#chk_fecha_crudo").change(filtrar_crudo); + + $("#chk_valor_crudo").change(filtrar_crudo); + $("#chk_stddev").change(filtrar_crudo); + $("#chk_fila").change(filtrar_crudo); + $("#chk_varcon").change(filtrar_crudo); + + /*Control de los botones*/ + + var $btn_enviar = $('#btn_enviar'); + var $btn_guardar = $('#btn_enviar_crudo'); + + var $btn_mostrar = $('#btn_mostrar'); + var $btn_mostrar_crudo = $('#btn_mostrar_crudo'); + + var $btn_eliminar = $('#btn_eliminar'); + var $btn_eliminar_crudo = $('#btn_eliminar_crudo'); + + var $btn_seleccionar = $('#btn_seleccionar'); + var $btn_seleccionar_crudo = $('#btn_seleccionar_crudo'); + + var $btn_desmarcar = $('#btn_desmarcar'); + var $btn_desmarcar_crudo = $('#btn_desmarcar_crudo'); + + var $btn_nuevo_promedio = $('#btn_nuevo_promedio'); + var $btn_nuevo_acumulado = $('#btn_nuevo_acumulado'); + + var $btn_modificar_promedio = $('#btn_modificar_promedio'); + var $btn_modificar_acumulado = $('#btn_modificar_acumulado'); + var $btn_modificar_agua = $('#btn_modificar_agua'); + + var $btn_graficar = $('#btn_grafico'); + var $btn_graficar_crudo = $('#btn_grafico_crudo'); + + var $btn_historial = $('#btn_historial'); + + var $btn_desvalidar = $('#btn_desvalidar'); + + var $table = $('#table_diario'); + ///fondo blnco para la tabal diaria + //$table.css("background-color","white"); + + var $btn_eliminar_valor = $("#btn_eliminar_valor"); + + var $btn_nuevo_crudo = $("#btn_nuevo_crudo"); + + + $btn_enviar.click(guardar_validados); + + $btn_mostrar.click(mostrar); + $btn_mostrar_crudo.click(mostrar); + + $btn_eliminar.click(eliminar); + $btn_eliminar_crudo.click(eliminar); + + $btn_seleccionar.click(marcar); + $btn_desmarcar.click(desmarcar); + + $btn_seleccionar_crudo.click(marcar); + $btn_desmarcar_crudo.click(desmarcar); + + $btn_nuevo_promedio.click(nuevo_registro); + $btn_nuevo_acumulado.click(nuevo_registro); + + + $btn_modificar_acumulado.click(modificar); + $btn_modificar_promedio.click(modificar); + $btn_modificar_agua.click(modificar); + + $btn_guardar.click(guardar_crudos); + + $btn_graficar.click(graficar); + $btn_graficar_crudo.click(graficar); + + $btn_historial.click(periodos_validacion); + + $btn_eliminar_valor.click(eliminar_crudo); + + $btn_nuevo_crudo.click(abrir_formulario_nuevo); + + $btn_desvalidar.click(desvalidar_datos); + +}); + +//consultar el historial de validacion +function periodos_validacion(event){ + fecha_inicio = $("input[name='inicio']").val(); + fecha_fin = $("input[name='fin']").val(); + if (fecha_inicio == '' && fecha_fin == '') + { + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("#id_estacion").val(); + variable_id = $("#id_variable").val(); + + $.ajax({ + url: '/val2/', + data: $("#form_validacion").serialize(), + type:'POST', + beforeSend: function () { + activar_espera("historial"); + $("#div_modal_historial").modal("show"); + }, + success: function (data) { + $("#div_historial").html(data) + desactivar_espera("historial"); + }, + error: function () { + + mostrar_mensaje("historial"); + + } + }); + } + else { + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("#id_estacion").val(); + variable_id = $("#id_variable").val(); + + $.ajax({ + url: '/val2/periodos_validacion/', + data: $("#form_validacion").serialize(), + type:'POST', + beforeSend: function () { + activar_espera("historial"); + $("#div_modal_historial").modal("show"); + }, + success: function (data) { + $("#div_historial").html(data) + desactivar_espera("historial"); + }, + error: function () { + + mostrar_mensaje("historial"); + + } + }); + } + + +} + +// pasar los datos crudos a validados + +function guardar_validados(event){ + var $table = $('#table_diario'); + var $table_crudo = $('#table_crudo'); + //$table.css("background-color","white"); + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("#id_estacion").val(); + variable_id = $("#id_variable").val(); + limite_superior = $("#id_limite_superior").val(); + limite_inferior = $("#id_limite_inferior").val(); + fecha_inicio = $("input[name='inicio']").val(); + fecha_fin = $("input[name='fin']").val(); + + + //comentario_general = $("textarea[name='comentario_general']").val(); + cambios = JSON.stringify($table.bootstrapTable('getData',{unfiltered:true, includeHiddenRows: true})); + + + $.ajax({ + url: '/val2/guardarvalidados/', + data: { + 'csrfmiddlewaretoken': token, + 'estacion_id': estacion_id, + 'variable_id': variable_id, + 'fecha_inicio': fecha_inicio, + 'fecha_fin': fecha_fin, + 'limite_superior': limite_superior, + 'limite_inferior': limite_inferior, + //'comentario_general' : comentario_general, + 'cambios': cambios + }, + type:'POST', + beforeSend: function () { + $table.bootstrapTable('showLoading'); + $table_crudo.bootstrapTable('showLoading'); + }, + success: function (data) { + if (data.resultado == true){ + $("#div_body_mensaje").html('Datos Guardados') + $("#div_mensaje_validacion").modal("show"); + $("#resize_plot").hide(); + $("#div_informacion").hide(); + limpiar_filtros('diario'); + limpiar_filtros('crudo'); + $table_crudo.bootstrapTable('removeAll'); + $table.bootstrapTable('removeAll'); + + + } + else{ + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + + } + $table.bootstrapTable('hideLoading'); + $table_crudo.bootstrapTable('hideLoading'); + + + }, + error: function () { + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + $table.bootstrapTable('hideLoading'); + } + }); + +} + +//eliminar datos validados de la base de datos + +function eliminar_validados(event){ + debugger; + var $table = $('#table_diario'); + var $table_crudo = $('#table_crudo'); + //$table.css("background-color","white"); + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("#id_estacion").val(); + variable_id = $("#id_variable").val(); + //comentario_general = $("textarea[name='comentario_general']").val(); + cambios = JSON.stringify($table.bootstrapTable('getData',{unfiltered:true, includeHiddenRows: true})); + + + $.ajax({ + url: '/val2/eliminarvalidados/', + data: { + 'csrfmiddlewaretoken': token, + 'estacion_id': estacion_id, + 'variable_id': variable_id, + 'cambios': cambios + }, + type:'POST', + beforeSend: function () { + //$table.bootstrapTable('showLoading'); + + }, + success: function (data) { + if (data.resultado == true){ + //$("#div_body_mensaje").html('Datos Guardados') + //$("#div_mensaje_validacion").modal("show"); + + limpiar_filtros('diario'); + limpiar_filtros('crudo'); + //$table_crudo.bootstrapTable('removeAll'); + //$table.bootstrapTable('removeAll'); + actualizar_tabla_diario(); + + + } + else{ + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + + } + $table.bootstrapTable('hideLoading'); + $table_crudo.bootstrapTable('hideLoading'); + + + }, + error: function () { + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + $table.bootstrapTable('hideLoading'); + } + }); + +} + +// guardar los cambios en los datos crudos +function guardar_crudos(event){ + var $table = $('#table_crudo'); + //$table.css("background-color","white"); + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("#id_estacion").val(); + variable_id = $("#id_variable").val(); + fecha_inicio = $("input[name='inicio']").val(); + fecha_fin = $("input[name='fin']").val(); + //comentario_general = $("textarea[name='comentario_general']").val(); + cambios = JSON.stringify($table.bootstrapTable('getData',{unfiltered:true})); + //console.log($table.bootstrapTable('getData',{unfiltered:true, })) + //detalle_crudos(); + + + + $.ajax({ + url: '/val2/guardarcrudos/', + data: { + 'csrfmiddlewaretoken': token, + 'estacion_id': estacion_id, + 'variable_id': variable_id, + 'fecha_inicio': fecha_inicio, + 'fecha_fin': fecha_fin, + //'comentario_general' : comentario_general, + 'cambios': cambios + }, + type:'POST', + beforeSend: function () { + $table.bootstrapTable('showLoading'); + }, + success: function (data) { + console.log(typeof data.resultado) + if (data.resultado == true){ + $("#div_body_mensaje").html('Datos Guardados') + $("#div_mensaje_validacion").modal("show"); + detalle_crudos(); + modificar_fila(); + limpiar_filtros('crudo'); + } + else{ + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + $table.bootstrapTable('hideLoading'); + + } + + + }, + error: function () { + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + $table.bootstrapTable('hideLoading'); + } + }); + + +} + +// Consultar la serie de datos diarios desde el servidor de base de datos +function actualizar_tabla_diario(){ + var $table = $('#table_diario'); + var var_id = $("#id_variable").val(); + var flag_error = false; + var mensaje = ''; + $("#orig_variable_id").val(var_id); + $("#div_informacion").html('') + $("#resize_plot").hide(); + limpiar_filtros('diario'); + limpiar_filtros('crudo'); + //$("#table_crudo").bootstrapTable('removeAll'); + fecha_inicio = $("input[name='inicio']").val(); + fecha_fin = $("input[name='fin']").val(); + if( fecha_inicio == '' || fecha_fin == '') + { + $("#div_message_fechas").show("slow"); + $("#div_c").html(""); + } + else { + $("#div_message_fechas").hide(); + $("#div_c").html(""); + $.ajax({ + url: $("#form_validacion").attr('action'), + data: $("#form_validacion").serialize(), + type:'POST', + beforeSend: function () { + //activar_espera(); + $table.bootstrapTable('showLoading'); + + }, + success: function (data) { + $("#btn_buscar").attr("disabled", false); + for (var key in data){ + if (key == 'error'){ + flag_error = true; + mensaje = data.error; + } + } + if (flag_error == false){ + if (data.grafico === '

No hay datos

'){ + $("#div_informacion").show("slow"); + $("#div_informacion").html(data.grafico); + $("#resize_plot").hide(); + } + else { + $("#div_c").html(data.curva); + + var datos_grafico = format_tuple(data.datos_grafico); + if (data.variable[0].es_acumulada){ + grafico_barras(datos_grafico, "#div_informacion", data.variable[0]); + }else{ + grafico_dispersion(datos_grafico, "#div_informacion", data.variable[0]); + } + +// $("#resize_plot").show("slow"); +// $("#div_informacion").show("slow"); +// window.gid = $('.plotly-graph-div,.js-plotly-plot').attr('id'); +// window.plot_orig_width = $("#" + window.gid).width(); +// plot_adjust(); + habilitar_nuevo(); + var_id = data.variable[0]['var_id']; + variable = data.variable[0]; + estacion = data.estacion[0]; + datos_json = data.datos + //num_dias = data.indicadores[0]['num_dias']; + $table.bootstrapTable('destroy'); + for (const index in data.indicadores[0]){ + indicadores_diarios[index]= data.indicadores[0][index]; + } + var columns = get_columns_diario(var_id, data.indicadores[0]); + $table.bootstrapTable({ + columns:columns, + data: datos_json, + height: 458, + showFooter: true, + uniqueId: 'id', + rowStyle: style_fila + }); + + $table.bootstrapTable('hideLoading'); + $("#table_crudo").bootstrapTable('removeAll'); + } + + + } + else{ + $table.bootstrapTable('hideLoading'); + $("#resize_plot").hide(); + $("#div_body_mensaje").html(mensaje) + $("#div_mensaje_validacion").modal("show"); + + } + + }, + error: function () { + $table.bootstrapTable('hideLoading'); + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + //mostrar_mensaje() + + } + }); + + } + +} + +// actualizar una fila de la tabla de datos diarios +function modificar_fila(){ + var $table = $('#table_crudo'); + //$table.css("background-color","white"); + var $table_diario = $('#table_diario'); + + var id_diario = $("#orig_id_diario").val(); + var fecha = $("#orig_fecha_diario").val(); + var var_id = $("#orig_variable_id").val(); + + var limite_inferior = $("#id_limite_inferior").val(); + var limite_superior = $("#id_limite_superior").val(); + + var datos_crudos = $table.bootstrapTable('getData', {unfiltered:true}); + + var suma_valor = 0; + var suma_maximo = 0; + var suma_minimo = 0; + + var suma_nivel = 0; + var suma_caudal = 0; + + var avg_valor = 0; + var avg_maximo = 0; + var avg_minimo = 0; + + var avg_nivel = 0; + var avg_caudal = 0; + + var num_valor = 0; + var num_maximo = 0; + var num_minimo = 0; + + var num_nivel = 0; + var num_caudal = 0; + + var num_fecha = 0; + + var sum_datos = 0; + + $.each(datos_crudos, function(i, item) { + if (item.estado){ + + if (var_id == 10 || var_id == 11){ + if (typeof(parseFloat(item.nivel)) == "number"){ + suma_nivel = suma_nivel + parseFloat(item.nivel); + + if ((parseFloat(item.nivel)limite_superior)) + num_nivel += 1; + } + + if (typeof(parseFloat(item.caudal)) == "number"){ + suma_caudal = suma_caudal + parseFloat(item.caudal); + } + + } + else{ + if (typeof(parseFloat(item.valor)) == "number"){ + suma_valor = suma_valor + parseFloat(item.valor); + + if ((parseFloat(item.valor)limite_superior)) + num_valor += 1; + } + } + + + + if (var_id != 1 && var_id != 10 && var_id != 11){ + if (typeof(parseFloat(item.maximo)) == "number"){ + suma_maximo = suma_maximo + parseFloat(item.maximo); + if ((parseFloat(item.maximo)limite_superior)) + num_maximo += 1; + } + if (typeof(parseFloat(item.minimo)) == "number"){ + suma_minimo = suma_minimo + parseFloat(item.minimo); + if ((parseFloat(item.minimo)limite_superior)) + num_minimo += 1; + } + } + + if (item.seleccionado == false || item.estado == false) + num_fecha += 1; + + sum_datos += 1; + + } + }); + + + if (isNaN(suma_valor)) + avg_valor = null; + else + avg_valor = (suma_valor / sum_datos).toFixed(2); + + if (isNaN(suma_maximo)) + avg_maximo = null; + else + avg_maximo = (Math.max.apply(Math, datos_crudos.map(function(o) { + return o.maximo; + }))).toFixed(2); + + if (isNaN(suma_minimo)) + avg_minimo = null; + else + avg_minimo = (Math.min.apply(Math, datos_crudos.map(function(o) { + return o.minimo; + }))).toFixed(2); + + if (isNaN(suma_nivel)) + avg_nivel = null; + else + avg_nivel = (avg_nivel / sum_datos).toFixed(2); + + + if (isNaN(suma_caudal)) + avg_caudal = null; + else + avg_caudal = (avg_caudal / sum_datos).toFixed(2); + + var porcentaje = (sum_datos * 100 / indicadores_crudos['num_datos']).toFixed(2); + + var porcentaje_error = false + + if (var_id == 1){ + porcentaje_error = porcentaje < 80 || porcentaje > 100 ? true : false + } + else{ + porcentaje_error = porcentaje < 70 || porcentaje > 100 ? true : false + } + + + + + + if (var_id != 1){ + $table_diario.bootstrapTable('updateByUniqueId', { + id: id_diario, + row: {valor: avg_valor, maximo: avg_maximo, minimo: avg_minimo, validado: true, + valor_numero: num_valor, maximo_numero: num_maximo, minimo_numero: num_minimo, + porcentaje: porcentaje, porcentaje_error: porcentaje_error, fecha_numero: num_fecha, + valor_error: num_valor > 0 ? true : false, + maximo_error: num_maximo > 0 ? true: false, + minimo_error: num_minimo > 0 ? true: false + } + }); + + } + else if (var_id == 10 || var_id == 11){ + $table_diario.bootstrapTable('updateByUniqueId', { + id: id_diario, + row: {nivel: avg_nivel, caudal: avg_caudal, validado: true, + nivel_numero: num_nivel, + porcentaje: porcentaje, porcentaje_error: porcentaje_error, fecha_numero: num_fecha, + nivel_error: num_nivel > 0 ? true : false + } + }); + } + else { + $table_diario.bootstrapTable('updateByUniqueId', { + id: id_diario, + row: {valor: suma_valor.toFixed(2), validado: true, valor_numero: num_valor, porcentaje: porcentaje, + porcentaje_error: porcentaje_error, fecha_numero: num_fecha, + valor_error: num_valor > 0 ? true : false + } + }); + } + + +} + + + +// Graficar los datos crudos o diarios de la estacion +function graficar(event){ + var name = event.currentTarget.name; + if (name === 'crudo') + $table = $("#table_crudo"); + else + $table = $("#table_diario"); + + var data_fecha = []; + var data_valor = []; + var data_maximo = []; + var data_minimo = []; + var data_historica = []; + var data_error_fecha = []; + var data_error_valor = []; + var data_porcentaje_fecha = []; + var data_porcentaje_error = []; + var data_direccion =[]; + var data_nivel =[]; + var data_caudal =[]; + + + var var_id = $("#id_variable").val(); + var var_nombre = $('#id_variable option:selected').text(); + var est_nombre = $('#id_estacion option:selected').text(); + + var datos = $table.bootstrapTable('getData',{unfiltered:true}); + var width_graph = $(".container").width(); + + var data = []; + var layout = {}; + + + $.each(datos, function(i, item) { + if (item.estado){ + data_fecha.push(item.fecha); + + if (var_id == 1){ + data_valor.push(item.valor); + data_historica.push(item.media_historica) + } + else if ((var_id == 4) || (var_id == 5)){ + data_valor.push(item.valor); + data_direccion.push(item.direccion) + } + /*else if( var_id == 10 || var_id == 11){ + data_nivel.push(item.nivel); + data_caudal.push(item.caudal); + }*/ + else{ + data_valor.push(item.valor); + data_maximo.push(item.maximo); + data_minimo.push(item.minimo); + } + + if(item.fecha_error==2){ + console.log("fecha error") + data_valor.push(null); + data_fecha.push(null); + if (var_id != 1){ + data_maximo.push(null); + data_minimo.push(null); + } + } + + if (item.valor_error){ + data_error_fecha.push(item.fecha); + data_error_valor.push(item.valor) + } + + if ((name == 'diario') && (item.porcentaje_error)){ + data_porcentaje_fecha.push(item.fecha); + data_porcentaje_error.push(item.valor); + + } + + } + else{ + data_fecha.push(null); + data_valor.push(null); + if (var_id != 1){ + data_maximo.push(null); + data_minimo.push(null); + } + } + }); + + var error_valor = { + type: 'scatter', + x: data_error_fecha, + y: data_error_valor, + mode: 'markers', + name: 'Errores', + showlegend: true, + marker: { + color: '#dc3545', + line: {width: 3}, + //opacity: 0.5, + size: 12, + symbol: 'circle-open' + } + } + + + var error_porcentaje = { + type: 'scatter', + x: data_porcentaje_fecha, + y: data_porcentaje_error, + mode: 'markers', + name: '% bajos', + showlegend: true, + marker: { + color: '#ffc107', + line: {width: 3}, + //opacity: 0.5, + size: 12, + symbol: 'circle-open' + } + } + + if (var_id == 1){ + var trace1 = { + x:data_fecha, + y:data_valor, + name: 'Valor', + type: 'bar' + }; + var trace2 = { + x:data_fecha, + y:data_historica, + name: 'Media Histórica', + type: 'bar' + }; + data = [trace1, trace2, error_valor, error_porcentaje]; + } + else if ((var_id == 4) || (var_id == 5)){ + var trace1 = { + name: "Valores", + type: "scatterpolargl", + //type: "barpolar", + r: data_valor, + theta: data_direccion, + thetaunit: 'degrees', + mode: "markers", + marker: { + color: "rgb(217,95,2)", + size: 10, + line: { + color: "white" + }, + //opacity: 0.10 + }, + cliponaxis: false + }; + data = [trace1]; + + + + } + /*else if (var_id == 10 || var_id == 11){ + console.log(data_nivel, data_caudal); + var trace_nivel = { + x:data_fecha, + y:data_nivel, + name: 'Nivel de Agua', + type: 'scatter' + }; + var trace_caudal = { + x:data_fecha, + y:data_caudal, + name: 'Caudal', + yaxis: 'y2', + type: 'scatter' + }; + data = [trace_nivel, trace_caudal]; + }*/ + else{ + var trace1 = { + x:data_fecha, + y:data_valor, + name: 'Valor', + type: 'scatter' + }; + var trace2 = { + x:data_fecha, + y:data_maximo, + name: 'Maximo', + type: 'scatter' + }; + var trace3 = { + x:data_fecha, + y:data_minimo, + name: 'Minimo', + type: 'scatter' + }; + data = [trace1, trace2, trace3, error_valor]; + } + + if ((var_id == 4) || (var_id == 5)){ + + layout = { + title: var_nombre + ' - '+ est_nombre, + showlegend: true, + polar: { + bgcolor: "rgb(233, 233, 233)", + + angularaxis: { + tickwidth: 2, + linewidth: 3, + direction: 'clockwise', + }, + radialaxis: { + side: "counterclockwise", + showline: true, + linewidth: 2, + tickwidth: 2, + gridcolor: "#FFF", + gridwidth: 2 + }, + }, + paper_bgcolor: "rgb(255, 255, 255)", + width: width_graph-40, + + } + + } + /*else if (var_id == 10 || var_id == 11){ + layout = { + title: 'Caudal y Nivel' + ' - '+ est_nombre, + yaxis: {title: 'Nivel de Agua (cm)'}, + yaxis2: { + title: 'Caudal (l/s)', + overlaying: 'y', + side: 'right' + }, + width: width_graph-40, + //showlegend: false + }; + + }*/ + else{ + layout = { + title: var_nombre + ' - '+ est_nombre, + width: width_graph-40, + //showlegend: false + }; + + } + + + Plotly.newPlot('div_grafico', data, layout); + // renombrar angular axis + var angulo_index = 0; + var angulo_dict = {1:'N', 2:'NE', 3:'E', 4:'SE', 5:'S', 6:'SO', 7:'O', 8:'NO'}; + $("g.polarsublayer").find("g.angularaxistick").each(function(){ + angulo_index++; + $(this).find("text").html(angulo_dict[angulo_index]); + }); + + $("#div_modal_grafico").modal("show"); + +} + +//Deshacer los cambios realizados en la tabla crudos/diarios +function mostrar(event){ + var name = event.currentTarget.name; + if (name === 'crudo') + $table = $("#table_crudo"); + else + $table = $("#table_diario"); + + setTimeout(function(){ + $table.bootstrapTable('showLoading'); + setTimeout(function(){ + //index comienza en 0 + //obtener los ids de todas las filas ocultas + var ids = $.map($table.bootstrapTable('getData',{unfiltered:true, includeHiddenRows: true}), function (row) { + if (row.estado == false){ + return row.id + } + }); + + //recorrer los ids y actualizar la columna estado + ids.map(function(id){ + $table.bootstrapTable('updateByUniqueId', { + id: id, + row: {estado: true} + }) + }); + + $table.bootstrapTable('uncheckBy', {field: 'id', values: ids}); + + //mostrar todas las columnas ocultas + $table.bootstrapTable('getHiddenRows', true); + + setTimeout(function(){ + $table.bootstrapTable('hideLoading'); + + },0 | Math.random() * 100); + },0 | Math.random() * 100); + },0 | Math.random() * 100); +} + +//Quitar filas de la tabla +function eliminar(event){ + debugger; + $(this).attr('disabled',true); + var name = event.currentTarget.name; + console.log(name) + if (name === 'crudo') + $table = $("#table_crudo"); + else + $table = $("#table_diario"); + + setTimeout(function(){ + $table.bootstrapTable('showLoading'); + setTimeout(function(){ + + var ids = $.map($table.bootstrapTable('getSelections'), function (row) { + return row.id + }); + ids.map(function(id){ + $table.bootstrapTable('updateByUniqueId', { + id: id, + row: {estado: false} + })/*.bootstrapTable('hideRow',{ + uniqueId:id + })*/ + + }); + + $table.bootstrapTable('uncheckBy', {field: 'id', values: ids}) + + setTimeout(function(){ + //$table.bootstrapTable('uncheckAll'); + $table.bootstrapTable('hideLoading'); + + },0 | Math.random() * 100); + },0 | Math.random() * 100); + },0 | Math.random() * 100); + + $(this).attr("disabled", false); +} + +// desvalidar datos +function desvalidar_datos(event){ + $(this).attr('disabled',true); + var name = event.currentTarget.name; + console.log(name) + if (name === 'crudo') + $table = $("#table_crudo"); + else + $table = $("#table_diario"); + + setTimeout(function(){ + $table.bootstrapTable('showLoading'); + setTimeout(function(){ + + var ids = $.map($table.bootstrapTable('getSelections'), function (row) { + return row.id + }); + ids.map(function(id){ + $table.bootstrapTable('updateByUniqueId', { + id: id, + row: {validado: false} + })/*.bootstrapTable('hideRow',{ + uniqueId:id + })*/ + + }); + + $table.bootstrapTable('uncheckBy', {field: 'id', values: ids}) + + + setTimeout(function(){ + //$table.bootstrapTable('uncheckAll'); + eliminar_validados(); + + //$table.bootstrapTable('hideLoading'); + + },0 | Math.random() * 100); + },0 | Math.random() * 100); + },0 | Math.random() * 100); + + $(this).attr("disabled", false); +} + + +//Crear un nuevo registro en la tabla de crudos +function nuevo_registro(event){ + + var limite_inferior = $("#id_limite_inferior").val(); + var limite_superior = $("#id_limite_superior").val(); + var variable_id = $("#id_variable").val(); + + var name = event.currentTarget.name; + + var $form = $("#form_nuevo_"+name); + var $form_name = "#form_nuevo_"+name; + $($form_name+',input[name="fecha"]').attr('disabled',false); + + var inputs =$form.serializeArray(); + var $modal = $("#modal_nuevo_"+name); + var data = {}; + var table = $table = $("#table_crudo"); + + $.each(inputs, function(i, field){ + data[field.name] = field.value; + }); + + var num_datos = $table.bootstrapTable('getData').length + data['id']= num_datos + 1; + console.log(data['fecha']); + data['fecha'] = data['fecha']+"T"+data['hora']+":00"; + delete data['hora']; + data['validado'] = false; + data['seleccionado'] = true; + data['estado'] = true; + data['maximo']=limite_superior; + data['minimo']=limite_inferior; + data['fecha_error'] = '1'; + data['stddev_error'] = false; + //data['varcon_error'] = false; + index = data['fila']; + delete data['fila']; + + if (variable_id == 1){ + data['valor_error'] = get_valor_error(data['valor']); + } + else{ + data['nivel_error'] = get_valor_error(data['nivel']); + } + console.log($table); + console.log(data); + datos = $table.bootstrapTable('getData') + var fila = get_existe_en_tabla(data['fecha'],datos); + if (fila.length == 0) { + $table.bootstrapTable('insertRow', { + index: index, + row: data + }); + + $modal.modal('hide'); + } + else{ + + + } + +} + +function get_existe_en_tabla(fecha, datos){ + var existe = false; + return datos.filter( + function(datos){ + //if(datos.fecha == fecha) + //existe = true + return datos.fecha == fecha + } + ) + + +} + + +// Cambiar valores modificados en la tabla crudos +function modificar(event){ + $('input[name="fecha"]').attr('disabled',false); + var variable_id = parseInt($("#id_variable").val()); + var name = event.currentTarget.name; + var $form = $("#form_"+name); + var inputs =$form.serializeArray(); + var $modal = $("#modal_"+name); + var data = {}; + var table = $table = $("#table_crudo"); + $.each(inputs, function(i, field){ + data[field.name] = field.value; + }); + id = data['id']; + delete data['id']; + data['estado'] = true; + if (variable_id == 10 || variable_id == 11){ + data['nivel_error'] = get_valor_error(data['nivel']); + } + else{ + data['valor_error'] = get_valor_error(data['valor']); + } + + + if (variable_id != 1 && variable_id != 10 && variable_id != 11){ + data['maximo_error'] = get_valor_error(data['maximo']); + data['minimo_error'] = get_valor_error(data['minimo']); + } + data['stddev_error'] = false; + console.log(data); + $table.bootstrapTable('updateByUniqueId',{ + id: id, + row: data + }); + $modal.modal('hide'); + +} +//Marcar filas por rango de ids en la tabla tabla crudos/diarios +function marcar(event){ + var name = event.currentTarget.name; + var cadena = ''; + var $table = ''; + + if (name === 'crudo'){ + $table = $("#table_crudo"); + cadena = $("#txt_seleccionar_crudo").val().toString(); + } + else{ + cadena = $("#txt_seleccionar").val().toString(); + $table = $("#table_diario"); + } + $table.bootstrapTable('showLoading'); + var arr_id = cadena.split(','); + var arr_range = cadena.split('-'); + var ids = [] + + if (arr_id.length>1){ + ids = arr_id.map(function(id){ + return parseInt(id) + }); + } + if (arr_range.length>0){ + var inicio = parseInt(arr_range[0]); + var fin = parseInt(arr_range[1]); + + for (var id = inicio; id <= fin; id++){ + ids.push(id); + } + } + + $table.bootstrapTable('checkBy', {field: 'id', values: ids}) + $table.bootstrapTable('hideLoading'); +} + +// Desmarcar filas seleccionadas tabla crudos/diarios +function desmarcar(event){ + var $table = ''; + var name = event.currentTarget.name; + if (name ==='crudo') + $table = $("#table_crudo"); + else + $table = $("#table_diario"); + + + $table.bootstrapTable('showLoading'); + var ids = $.map($table.bootstrapTable('getSelections'), function (row) { + return row.id + }); + $table.bootstrapTable('uncheckBy', {field: 'id', values: ids}); + $table.bootstrapTable('hideLoading'); + +} + +// generar la tabla de datos de validacion +function detalle_crudos(e, value, row){ + + var $table = $('#table_crudo'); + var estacion_id = $("#id_estacion").val(); + var variable_id = $("#id_variable").val(); + + var id_diario = 0; + var fecha = ''; + + var estado = row || false + + if (estado == false ){ + id_diario = $("#orig_id_diario").val(); + fecha = $("#orig_fecha_diario").val(); + } + else{ + id_diario = row.id; + fecha = row.fecha; + $("#orig_id_diario").val(id_diario); + $("#orig_fecha_diario").val(fecha); + } + + var var_maximo = $("#id_limite_superior").val(); + var var_minimo = $("#id_limite_inferior").val(); + + + enlace = '/val2/lista/' + estacion_id + '/' + variable_id + '/' + fecha + '/' + var_maximo + '/' + var_minimo; + + $.ajax({ + url: enlace, + type:'GET', + beforeSend: function () { + $table.bootstrapTable('showLoading'); + }, + success: function (data) { + datos_json = data.datos; + $table.bootstrapTable('destroy'); + for (const index in data.indicadores[0]){ + indicadores_crudos[index] = data.indicadores[0][index]; + $("#span_"+index+"_crudo").text(indicadores_crudos[index]); + } + /* this is an example for new snippet extension make by me xD */ + for (const element of datos_json) { + element["fecha"] = (element['fecha']).replace('T',' '); + } + var columns = get_column_validado(variable_id, data.indicadores[0]); + $table.bootstrapTable({columns:columns, data: datos_json, rowStyle: style_fila}) + //$table.bootstrapTable({columns:columns, data: datos_json}) + $table.bootstrapTable('hideLoading'); + }, + error: function () { + $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') + $("#div_mensaje_validacion").modal("show"); + + $table.bootstrapTable('hideLoading'); + } + }); + +}; + +//funcion para eliminar una fila de la tabla diario +function eliminar_diario(e, value, row, index){ + debugger; + console.log(row); + var $table = $('#table_diario'); + $table.bootstrapTable('updateRow', { + index: index, + row: { + estado: false + } + }); + $table.bootstrapTable('uncheckBy', {field: 'id', values: ids}); + + /*$table.bootstrapTable('hideRow', { + index: index + })*/ +} +//funcion para abrir el formulario de eliminar +function abrir_form_eliminar(e, value, row, index){ + var $form_modal = $('#modal_eliminar'); + var inputs = $("#form_eliminar").serializeArray(); + $.each(inputs, function(i, field){ + $('input[name="'+field.name+'"]').val(row[field.name]); + }); + $form_modal.modal("show"); +} + +//funcion para eliminar una fila de la tabla crudos +function eliminar_crudo(event){ + debugger; + //console.log("row", row); + var inputs = $("#form_eliminar").serializeArray(); + var $form_modal = $('#modal_eliminar'); + var table = $table = $("#table_crudo"); + var data = {}; + $.each(inputs, function(i, field){ + data[field.name] = field.value; + }); + + id = data['id']; + delete data['id']; + data['estado'] = false; + $table.bootstrapTable('updateByUniqueId',{ + id: id, + row: data + }); + $form_modal.modal('hide'); + + /*setTimeout(function(){ + },0 | Math.random() * 10);*/ + + + +} + +function abrir_formulario_nuevo(event){ + var fecha = $("#orig_fecha_diario").val(); + var variable_id = parseInt($("#id_variable").val()); + if (variable_id === 1){ + var $form_modal = $('#modal_nuevo_acumulado'); + var $form = "#form_nuevo_acumulado"; + var inputs = $("#form_nuevo_acumulado").serializeArray(); + + } + else{ + var $form_modal = $('#modal_nuevo_promedio'); + var $form = "#form_nuevo_promedio"; + var inputs = $("#form_nuevo_promedio").serializeArray(); + } + $($form+',input[name="fecha"]').val(fecha); + $($form+',input[name="fecha"]').attr('disabled',true); + $form_modal.modal("show"); +} + +//funcion para abrir un formulario de edicion de datos crudos +function abrir_formulario(e, value, row, index){ + var variable_id = parseInt( $("#id_variable").val()); + + + if (variable_id === 1){ + var $form_modal = $('#modal_acumulado'); + var inputs = $("#form_acumulado").serializeArray(); + + } + else if(variable_id == 10 || variable_id == 11){ + var $form_modal = $('#modal_agua'); + + var inputs = $("#form_agua").serializeArray(); + } + else{ + var $form_modal = $('#modal_promedio'); + var inputs = $("#form_promedio").serializeArray(); + } + + $.each(inputs, function(i, field){ + $('input[name="'+field.name+'"]').val(row[field.name]); + }); + + $('input[name="fecha"]').attr('disabled',true); + $form_modal.modal("show"); +} + + +//Generar las columnas de la tabla de datos diarios +function get_columns_diario(var_id){ + + var span = 'num'; + + var columns = []; + + var state = { + field:'state', + checkbox:true + }; + + var id = { + field:'id', + title:'Id', + cellStyle: style_id + }; + + var fecha = { + field:'fecha', + title: 'Fecha', + cellStyle: style_fecha, + formatter: format_valor, + footerFormatter: total_filas, + //filterControl: 'datepicker' + + }; + var porcentaje = { + field:'porcentaje', + title:'Porcentaje ', + cellStyle: style_porcentaje, + footerFormatter: footer_promedio, + //filterControl: 'input' + }; + + var accion = { + field: 'accion', + title: 'Acción', + formatter: operate_table_diario, + events: { + 'click .search': detalle_crudos, + 'click .delete': eliminar_diario, + //'click .update': abrir_formulario + + } + }; + var n_valor = { + field:'n_valor', + title:'Variación Consecutiva', + cellStyle: style_var_con, + footerFormatter: footer_variaConse_cont + }; + + columns.push(state); + columns.push(id); + columns.push(fecha); + columns.push(porcentaje); + + + + if (var_id == 1) { + var valor = { + field:'valor', + title:'Valor ', + cellStyle: style_valor, + formatter: format_valor, + footerFormatter: footer_suma + }; + columns.push(valor); + columns.push(n_valor); + } + else if ((var_id == 4) || (var_id == 5)){ + + var valor = { + field:'valor', + title:'Valor ', + cellStyle: style_valor, + //formatter: format_valor, + footerFormatter: footer_promedio + }; + + var maximo = { + field:'maximo', + title:'Máximo ', + visible: false, + cellStyle: style_valor, + //formatter: format_valor, + footerFormatter: footer_promedio + }; + + var minimo= { + field:'minimo', + title:'Mínimo ', + visible: false, + cellStyle: style_valor, + //formatter: format_valor, + footerFormatter: footer_promedio + } + var direccion = { + field:'direccion', + title:'Dirección', + //cellStyle: style_valor, + //footerFormatter: footer_promedio + }; + var punto_cardinal = { + field:'categoria', + title:'Punto Cardinal', + //cellStyle: style_valor, + formatter: format_punto_cardinal, + //footerFormatter: footer_promedio + }; + columns.push(valor); + columns.push(maximo); + columns.push(minimo); + columns.push(direccion); + columns.push(punto_cardinal); + + } + + else{ + var valor = { + field:'valor', + title : 'Valor', + cellStyle: style_valor, + formatter: format_valor, + footerFormatter: footer_promedio + }; + var maximo = { + field:'maximo', + title:'Máximo ', + cellStyle: style_valor, + formatter: format_valor, + footerFormatter: footer_promedio + }; + + var minimo= { + field:'minimo', + title:'Mínimo ', + cellStyle: style_valor, + formatter: format_valor, + footerFormatter: footer_promedio + } + columns.push(valor); + columns.push(maximo); + columns.push(minimo); + columns.push(n_valor); + + + + } + + columns.push(accion); + + return columns + +} + +//generar las columnas para la tabla de datos crudos +function get_column_validado(var_id){ + var columns = []; + + var span = 'num'; + + var state = { + field:'state', + checkbox:true + }; + + var id = { + field:'id', + title:'Id', + cellStyle: style_id, + footerFormatter: footer_id + }; + + var fecha = { + field:'fecha', + title:'Fecha', + cellStyle: style_fecha, + footerFormatter: total_datos + }; + + var valor_atipico = { + field:'', + title:'Valores Atípicos', + cellStyle: style_stddev, + footerFormatter: footer_stddev + }; + + + var comentario = { + field:'comentario', + title:'Comentario' + }; + var n_valor = { + field:'n_valor', + title:'Variación Consecutiva', + cellStyle: style_varia_error, + footerFormatter: footer_variaConse + }; + + var accion = { + field: 'accion', + title: 'Acción', + formatter: operate_table_crudo, + events: { + 'click .delete_crudo': abrir_form_eliminar, + 'click .update': abrir_formulario + + } + }; + + columns.push(state); + columns.push(id); + columns.push(fecha); + + if (var_id =='1'){ + + + var valor = { + field:'valor', + title: 'Valor', + cellStyle: style_error_crudo, + footerFormatter: footer_suma + }; + columns.push(valor); + + } + else if ( (var_id == '4') || ( var_id == '5') ){ + var valor = { + field:'valor', + title:'Valor ', + cellStyle: style_valor, + //formatter: format_valor, + footerFormatter: footer_promedio + }; + + var maximo = { + field:'maximo', + title: 'Máximo', + visible: false, + cellStyle: style_error_crudo, + footerFormatter: footer_promedio + }; + + var minimo = { + field:'minimo', + title: 'Mínimo', + visible: false, + cellStyle: style_error_crudo, + footerFormatter: footer_promedio + }; + + + var punto_cardinal = { + field:'categoria', + title:'Punto Cardinal ', + //cellStyle: style_valor, + formatter: format_punto_cardinal, + //footerFormatter: footer_promedio + }; + + var direccion = { + field:'direccion', + title:'Dirección', + //cellStyle: style_valor, + //footerFormatter: footer_promedio + }; + columns.push(valor); + columns.push(maximo); + columns.push(minimo); + columns.push(direccion); + columns.push(punto_cardinal); + } + else{ + var valor = { + field:'valor', + title:'Valor', + cellStyle: style_error_crudo, + footerFormatter: footer_promedio + }; + + var maximo = { + field:'maximo', + title: 'Máximo', + cellStyle: style_error_crudo, + footerFormatter: footer_promedio + }; + + var minimo = { + field:'minimo', + title: 'Mínimo', + cellStyle: style_error_crudo, + footerFormatter: footer_promedio + }; + columns.push(valor); + columns.push(maximo); + columns.push(minimo); + + } + columns.push(valor_atipico); + columns.push(comentario); + columns.push(n_valor); + columns.push(accion); + return columns +} + +//Función para generar los iconos de acción de la tabla diario +function operate_table_diario(value, row, index) { + return [ + '', + '', + ' ', + '', + '', + ' ', + ].join('') +} + +//Función para generar los iconos de acción de la tabla crudos +function operate_table_crudo(value, row, index) { + return [ + '', + '', + ' ', + '', + '', + ' ', + ].join('') +} +/*Formatos para las tablas crudos/diario*/ +// Formato para el porcentaje de datos diarios +function style_porcentaje(value, row, index) { + var clase = '' + if (row.porcentaje_error == true) { + return { + classes: 'error' + } + } + else{ + return { + classes: 'normal' + } + } + +} +//Formato para el valor, maximo, minimo de la tabla crudos/diarios +function style_fila(row, index){ + if (row.estado == false) { + clase = 'error' + } + /*if (row.seleccionado == false){ + clase = 'no-seleccionado' + }*/ + else + clase = '' + return {classes: clase} +} +function style_valor(value, row, index, field){ + var clase = '' + field_numero = field+'_numero'; + limite_superior = $('#id_limite_superior').val(); + if (row[field_numero]>0 ) + clase = 'error'; + else + clase = 'normal'; + return { classes: clase} +} +//Formato para el error de la tabla crudos/diarios +function style_error_crudo(value, row, index, field){ + var clase = '' + field_error = field+'_error' + if (row[field_error] === true) + clase = 'error'; + else + clase = 'normal'; + return { classes: clase} +} +//Formato para la desviación estandar +function style_stddev(value, row, index){ + var clase = '' + if (row.stddev_error === true) + clase = 'error'; + else + clase = ''; + return { classes: clase} +} +function style_var_con(value, row, index){ + var clase = '' + if (row.c_varia_err >= 1) + clase = 'error'; + else + clase = ''; + return { classes: clase} +} +function style_varia_error(value, row, index){ + var clase = '' + if (row.varia_error === true) + clase = 'error'; + else + clase = ''; + return { classes: clase} +} + +//Formato para el formato de la fecha +function style_fecha(value, row, index){ + var clase = '' + if (row.fecha_error == 0 || row.fecha_error == 2 || row.fecha_error == 3 || row.fecha_numero > 0) + clase = 'error'; + else + clase = ''; + return { classes: clase} + +} +//Formato para la fila validada +function style_id(value, row, index){ + var clase = '' + if (row.validado == true) + clase = 'validado'; + ///else if (row.estado == false) + // clase = 'error'; + else if (row.seleccionado == false) + clase = 'error'; + else + clase = ''; + return { classes: clase} + +} +/*Fomatos de celda para las tablas diario/crudos */ +// Poner el numero de errores en el día +function format_valor(value, row, index, field){ + var span = 'num'; + var content = '' + var field_numero = field + '_numero' + if (row[field_numero]>0 ){ + span = span.replace('num',row[field_numero].toString()); + content = value + ' ' + span; + } + else{ + //span = span.replace('num',0); + content = value; + } + return content +} + +function format_punto_cardinal(value, row, index, field){ + puntos_cardinales=['N', 'NE', 'E', 'SE', 'S', 'SO', 'O', 'NO']; + + return puntos_cardinales[parseInt(value-1)] + + +} + +/*Funciones para el footeer de la tabla*/ + +function footer_id(data){ + var span = 'num'; + var num_fecha = data.reduce(function(num, i){ + if (i['estado'] && i['seleccionado']==false) + return num +1; + else + return num; + }, 0); + + span = span.replace('num',num_fecha.toString()); + + return span; + +} + +// Obtener el promedio de los datos +function footer_promedio(data){ + var field = this.field; + var field_error = this.field + '_error'; + + + var span = 'num'; + + var promedio = 0; + var suma = data.reduce(function (sum, i) { + if (i['estado'] && i[field] != null) + return sum + parseFloat(i[field]) + else + return sum + }, 0); + var num_datos = data.reduce(function (sum, i) { + if (i['estado'] && i[field] != null) + return sum + 1 + else + return sum + }, 0); + + var num_valor = data.reduce (function (num, i){ + //console.log('field',i[field_error]) + if (i[field_error] && i['estado']) + return num + 1; + else + return num; + }, 0); + + span = span.replace('num', num_valor); + + if (isNaN(suma)) + promedio = '-'; + else + promedio = (suma / num_datos).toFixed(2); + return promedio + ' ' + span; +} +//obtener la suma de los datos +function footer_suma(data){ + var field = this.field; + var field_error = this.field + '_error'; + var span = 'num'; + var suma = data.reduce(function (sum, i) { + if (i['estado'] && i[field] != null){ + return sum + parseFloat(i[field]) + } + else{ + return sum + } + + }, 0); + var num_valor = data.reduce (function (sum, i){ + if (i[field_error] && i['estado']) + return sum +1 ; + else + return sum; + }, 0); + + span = span.replace('num', num_valor); + + return suma.toFixed(2) + ' ' + span; +} + +//total de dias +function total_filas(data){ + + var span = 'num'; + + var var_id = $("#id_variable").val(); + var fechas = []; + /*if ((var_id == 4) || (var_id == 5)) { + console.log(data); + $.map(data, function(row){ + fechas.push(row.fecha); + }); + console.log( fechas.unique() ); + }*/ + + $.map(data, function(row){ + fechas.push(row.fecha); + }); + + /*var suma = data.reduce(function (sum, i) { + if (i['estado']){ + return sum + 1 + } + else{ + return sum + } + + }, 0);*/ + + var suma = fechas.unique().length; + + var num_fecha = data.reduce(function(num, i){ + if ((i['fecha_error']==0) || (i['fecha_error']==2) || (i['fecha_error']==3)) + return num +1; + else + return num; + }, 0); + + span = span.replace('num',num_fecha.toString()); + + return suma + ' de ' + indicadores_diarios['num_dias'] + ' días ' + span; +} +//total de datos +function total_datos(data){ + var span = 'num'; + + var suma = data.reduce(function (sum, i) { + if (i['estado']){ + return sum + 1 + } + else{ + return sum + } + + }, 0); + + var num_fecha = data.reduce(function(num, i){ + if ( (i['fecha_error']==0) || (i['fecha_error']==2) || (i['fecha_error']==3)) + return num +1; + else + return num; + }, 0); + + span = span.replace('num',num_fecha.toString()); + + return suma + ' de ' + indicadores_crudos['num_datos'] + ' datos ' + span; +} +// valores atípicos +function footer_stddev(data){ + var span = 'num'; + var num_stddev = data.reduce(function(num, i){ + if (i['stddev_error'] && i['estado']) + return num +1; + else + return num; + }, 0); + + span = span.replace('num',num_stddev.toString()); + + return span; +} +function footer_variaConse(data){ + var span = 'num'; + var num_vcr = data.reduce(function(num, i){ + if (i['stddev_error'] && i['estado']) + return num +1; + else + return num; + }, 0); + + if (num_vcr > 1){ + num_vcr -= 1; + } + span = span.replace('num',num_vcr.toString()); + + return span; +} +function footer_variaConse_cont(data){ + var span = 'num'; + var num_vc= data.reduce(function(num, i){ + if (i['c_varia_err'] >= 1 ) + return num + 1; + else + return num; + }, 0); + + span = span.replace('num',num_vc); + return span; +} + + +/* Filtro de las Tablas */ +function filtrar_diario(){ + var fecha = $("#chk_fecha").val(); + var porcentaje = $("#chk_porcentaje").val(); + var numero = $("#chk_numero").val(); + + var filtro_fecha = get_filtro_fecha(fecha); + var filtro_porcentaje = get_filtro_porcentaje(porcentaje); + var filtro_valor = get_filtro_valor(numero); + + var var_id = $("#id_variable").val(); + + if (var_id == 10 || var_id == 11){ + $("#table_diario").bootstrapTable('filterBy', { + fecha_error: filtro_fecha, + porcentaje_error: filtro_porcentaje, + nivel_error: filtro_valor, + //estado:[true] + }); + + } + else { + $("#table_diario").bootstrapTable('filterBy', { + fecha_error: filtro_fecha, + porcentaje_error: filtro_porcentaje, + valor_error: filtro_valor, + //estado:[true] + }); + + } + + +} + +function get_filtro_fecha(fecha){ + var filtro_fecha = []; + if (fecha == 'error') + filtro_fecha = ['0','2', '3']; + else if (fecha == 'normal') + filtro_fecha = ['1']; + else + filtro_fecha = ['0','1', '2', '3']; + + return filtro_fecha +} + +function get_filtro_porcentaje(porcentaje){ + var filtro_porcentaje = []; + + if (porcentaje == 'error') + filtro_porcentaje = [true]; + else if (porcentaje == 'normal') + filtro_porcentaje = [false]; + else + filtro_porcentaje = [true, false]; + + return filtro_porcentaje +} + +function get_filtro_valor(numero){ + + var filtro_valor = []; + + if (numero == 'error') + filtro_valor = [true]; + else if (numero == 'normal') + filtro_valor = [false]; + else + filtro_valor = [true, false, null]; + + return filtro_valor +} + +function get_filtro_stddev(numero){ + + var filtro_valor = []; + + if (numero == 'error') + filtro_valor = [true]; + else if (numero == 'normal') + filtro_valor = [false]; + else + filtro_valor = [true, false, null]; + + return filtro_valor +} + +function get_filtro_estado(numero){ + + var filtro_valor = []; + + if (numero == 'error') + filtro_valor = [false]; + else if (numero == 'normal') + filtro_valor = [true]; + else + filtro_valor = [true, false]; + + return filtro_valor +} +function get_filtro_var_con(numero){ + console.log("Valorfiltro con ", numero); + var filtro_valor = []; + + if (numero == 'error') + filtro_valor = [true]; + else if (numero == 'normal') + filtro_valor = [false]; + else + filtro_valor = [true, false, null]; + + return filtro_valor +} +function filtrar_crudo(){ +console.log("Filtrar Crudo ") + var fecha = $("#chk_fecha_crudo").val(); + var valor = $("#chk_valor_crudo").val(); + var stddev = $("#chk_stddev").val(); + var fila = $("#chk_fila").val(); + var varconfil = $("#chk_varcon").val(); + + var filtro_fecha = get_filtro_fecha(fecha); + var filtro_valor = get_filtro_valor(valor); + var filtro_stddev = get_filtro_stddev(stddev); + var filtro_fila = get_filtro_estado(fila); + var filtro_varcon = get_filtro_var_con(varconfil); + var variable_id = $("#id_variable").val(); + + if (variable_id == 10 || variable_id == 11){ + $("#table_crudo").bootstrapTable('filterBy', { + fecha_error: filtro_fecha, + nivel_error: filtro_valor, + stddev_error: filtro_stddev, + estado:filtro_fila + }); + } + else{ + console.log("En el else 2269"); + $("#table_crudo").bootstrapTable('filterBy', { + fecha_error: filtro_fecha, + valor_error: filtro_valor, + stddev_error: filtro_stddev, + varia_error: filtro_varcon, + estado:filtro_fila + }); + + } +} + + +function limpiar_filtros(tipo){ + if (tipo == 'crudo'){ + $("#chk_fecha_crudo").prop('selectedIndex',0); + $("#chk_valor_crudo").prop('selectedIndex',0); + $("#chk_stddev").prop('selectedIndex',0); + $("#chk_fila").prop('selectedIndex',0); + $("#chk_varcon").prop('selectedIndex',0); + $("#txt_seleccionar_crudo").val(''); + + } + else{ + $("#chk_fecha").prop('selectedIndex',0); + $("#chk_porcentaje").prop('selectedIndex',0); + $("#chk_numero").prop('selectedIndex',0); + $("#txt_seleccionar").val(''); + } + + + +} + +function get_valor_error(valor){ + var limite_inferior = Number($("#id_limite_inferior").val()); + var limite_superior = Number($("#id_limite_superior").val()); + + var valor_error = false; + + + if (Number(valor) > limite_superior || Number(valor) < limite_inferior ) + valor_error = true; + else + valor_error = false; + + return valor_error; +} + + +function habilitar_nuevo(){ + var variable_id = $("#id_variable").val(); + + //if (variable_id == "1" || variable_id == "10" || variable_id == "11") + if ( variable_id != "11" || variable_id == "4" || variable_id == "5" ) + $("#btn_nuevo_crudo").attr("disabled", false); + else + $("#btn_nuevo_crudo").attr("disabled", true); + + +} + +function activar_espera(type){ + + var type = type || '' + if (type !== '') { + var $div_data = $('#div_'+type); + var $div_loading = $('#div_loading_'+type); + var $div_message = $('#div_message_'+type) + } + else{ + var $div_data = $('#div_informacion'); + var $div_loading = $('#div_loading'); + var $div_message = $('#div_error') + + } + $div_loading.show(); + $div_data.hide(); + $div_message.hide(); + $("#div_informacion").hide(); +} + +//función para quitar duplicados +Array.prototype.unique=function(a){ + return function(){return this.filter(a)}}(function(a,b,c){return c.indexOf(a,b+1)<0 +}); + + +function mostrar_mensaje(type){ + /*var message = ''*/ + + var type = type || '' + if (type !== ''){ + var $div_data = $('#div_'+type); + var $div_loading = $('#div_loading_'+type); + var $div_message = $('#div_message_'+type) + } + else{ + var $div_data = $('#div_informacion'); + var $div_loading = $('#div_loading'); + var $div_message = $('#div_error'); + + } + + $div_loading.hide(); + $div_data.hide(); + //$div_message.html(message); + $div_message.show(); + +} + +function desactivar_espera(type){ + var type = type || '' + if (type !== '') { + var $div_data = $('#div_'+type); + var $div_loading = $('#div_loading_'+type); + var $div_message = $('#div_message_'+type); + var $resize_plot = $('#resize_plot'+type); + } + else{ + var $div_data = $('#div_informacion'); + var $div_loading = $('#div_loading'); + var $div_message = $('#div_error'); + var $resize_plot = $('#resize_plot'); + + } + $div_loading.hide(); + $div_data.show(); + $div_message.hide(); + $resize_plot.show(); + $("#div_informacion").show(); +} diff --git a/validated/static/validated/js_ajax2.js b/validated/static/validated/js_ajax2.js new file mode 100755 index 00000000..ced37d0f --- /dev/null +++ b/validated/static/validated/js_ajax2.js @@ -0,0 +1,289 @@ + +$(document).ready(function() { + //activar tooltip + $('[data-toggle="tooltip"]').tooltip() + + + // Comparar Variables + $("#btn_graficar").click(function(){ + $(this).attr('disabled',true); + $.ajax({ + url: $("#form_consulta").attr('action'), + data: $("#form_consulta").serialize(), + type:'POST', + dataType: 'json', + beforeSend: function () { + $("#div_informacion").hide(); + $("#div_loading").show(); + $("#div_error").hide(); + }, + success: function (data) { + $("#div_informacion").show(); + var count = Object.keys(data.data[0].y).length; + if (count>0) { + Plotly.newPlot('div_informacion', data.data,data.layout); + + } + else{ + $("#div_informacion").html('') + } + $("#btn_graficar").removeAttr('disabled'); + + $("#div_loading").hide(); + + $("#div_error").hide(); + }, + error: function () { + //$("#div_informacion").hide(); + $("#div_loading").hide(); + $("#div_error").show(); + $("#btn_graficar").removeAttr('disabled'); + } + }); + }); + + + $("#btn_consultar").click(function(){ + $(this).attr('disabled',true); + $.ajax({ + url: $("#form_busqueda").attr('action'), + data: $("#form_busqueda").serialize(), + type:'POST', + dataType: 'json', + beforeSend: function () { + $("#div_informacion").hide(); + $("#div_loading").show(); + $("#div_error").hide(); + $("#div_mensaje").hide(); + }, + success: function (data) { + + if (Object.keys(data)=="mensaje"){ + $("#div_mensaje").html(data.mensaje); + $("#div_mensaje").show(); + } + else{ + $("#div_informacion").show(); + var count = Object.keys(data.data[0].y).length; + if (count>0) { + Plotly.newPlot('div_informacion', data.data,data.layout,{scrollZoom: true}); + } + else{ + //$("#div_informacion").html('') + $("#div_mensaje").html(''); + $("#div_mensaje").show(); + } + + } + + $("#btn_consultar").removeAttr('disabled'); + + $("#div_loading").hide(); + + $("#div_error").hide(); + }, + error: function () { + //$("#div_informacion").hide(); + $("#div_loading").hide(); + $("#div_error").show(); + $("#btn_consultar").removeAttr('disabled'); + } + }); + }); + + //formulario validacion + $("#btn_filtrar").click(function(){ + + //actualizar_lista(); + $(this).attr('disabled',true); + $.ajax({ + url: '/medicion/filter/', + data: $("#form_filter").serialize(), + type:'POST', + dataType: 'json', + beforeSend: function () { + $("#div_informacion").hide(); + $("#div_loading").show(); + $("#div_error").hide(); + $("#div_mensaje").hide(); + }, + success: function (data) { + if (Object.keys(data)=="mensaje"){ + $("#div_mensaje").html(data.mensaje); + $("#div_mensaje").show(); + } + else{ + $("#div_informacion").show(); + var count = Object.keys(data.data[0].y).length; + if (count>0) { + Plotly.newPlot('div_informacion', data.data,data.layout, {scrollZoom: true}); + } + else{ + //$("#div_informacion").html('') + $("#div_mensaje").html(''); + $("#div_mensaje").show(); + } + } + + $("#btn_filtrar").removeAttr('disabled'); + + $("#div_loading").hide(); + + $("#div_error").hide(); + }, + error: function () { + $("#btn_filtrar").removeAttr('disabled'); + $("#div_informacion").hide(); + $("#div_loading").hide(); + $("#div_error").show(); + } + }); + + $.ajax({ + url: '/medicion/datos_validacion/', + data: $("#form_filter").serialize(), + type:'POST', + beforeSend: function () { + $("#div_lista_datos").hide(); + $("#div_loading_datos").show(); + $("#div_error_datos").hide(); + $("#div_mensaje").hide(); + }, + success: function (data) { + $("#div_lista_datos").html(data); + $("#div_lista_datos").show(); + $("#btn_filtrar").removeAttr('disabled'); + $("#div_loading_datos").hide(); + $("#div_error_datos").hide(); + $("#div_mensaje").hide(); + + }, + error: function () { + + $("#btn_filtrar").removeAttr('disabled'); + $("#div_lista_datos").hide(); + $("#div_loading_datos").hide(); + $("#div_error_datos").show(); + } + }); + return false; + }); + + //consultar los periodos de validacion + $("#btn_periodos_validacion").click(function(){ + //periodos_validacion(); + }); + + + //Cargar variables por estacion + + $("#id_estacion").change(function () { + var estacion = $(this).val(); + $("#id_variable").find('option').remove().end() + $("#id_variable").append(''); + $.ajax({ + url: '/anuarios/variables/'+estacion, + dataType: 'json', + success: function (data) { + $.each(data, function(index, value) { + $("#id_variable").append(''); + }); + } + }); + + + }); + + //consulta y guarda la información + $("#btn_procesar").click(function(){ + $(this).attr('disabled',true); + $.ajax({ + url: $("#form_procesar").attr('action'), + data: $("#form_procesar").serialize(), + type:'POST', + beforeSend: function () { + activar_espera("#div_loading","#div_informacion","#div_error") + }, + success: function (data) { + $("#div_informacion").html(data) + $("#btn_procesar").removeAttr('disabled'); + desactivar_espera("#div_loading","#div_informacion","#div_error") + }, + error: function () { + mostrar_mensaje("#div_loading","#div_informacion","#div_error") + $("#btn_procesar").removeAttr('disabled'); + } + }); + }); + + + //datepicker con intervalo restringido + var dateFormat = "yy-mm-dd"; + $( "#id_inicio" ).datepicker({ + changeMonth: true, + changeYear: true, + dateFormat:"yy-mm-dd", + yearRange: '2000:'+(new Date).getFullYear() + }); + $( "#id_inicio" ).on( "change", function() { + $( "#id_fin" ).datepicker( "option", "minDate", getDate( this ) ); + }); + $( "#id_fin" ).datepicker({ + changeMonth: true, + changeYear: true, + dateFormat:"yy-mm-dd", + yearRange: '2000:'+(new Date).getFullYear() + }); + $( "#id_fin" ).on( "change", function() { + $( "#id_inicio" ).datepicker( "option", "maxDate", getDate( this ) ); + }); + + function getDate( element ) { + var date; + try { + date = $.datepicker.parseDate( dateFormat, element.value ); + } catch( error ) { + date = null; + } + return date; + } + + + function periodos_validacion(){ + token = $("input[name='csrfmiddlewaretoken']").val(); + estacion_id = $("input[name='orig_estacion_id']").val(); + variable_id = $("input[name='orig_variable_id']").val(); + + $.ajax({ + url: '/validacion_v2/periodos_validacion/', + data: $("#form_filter").serialize(), + type:'POST', + beforeSend: function () { + //$("#div_historial").hide(); + //$("#div_loading_historial").show(); + //$("#div_error_historial").hide(); + activar_espera("historial") + }, + success: function (data) { + $("#btn_periodos_validacion").attr("disabled", false); + $("#div_historial").html(data) + + + $("#div_historial").show(); + $("#div_loading_historial").hide(); + $("#div_error_historial").hide(); + + }, + error: function () { + $("#div_historial").hide(); + $("#div_loading_historial").hide(); + $("#div_error_historial").show(); + + } + }); + } + + + + +}); diff --git a/validated/templates/daily_validation.html b/validated/templates/daily_validation.html new file mode 100755 index 00000000..d758daf9 --- /dev/null +++ b/validated/templates/daily_validation.html @@ -0,0 +1,697 @@ +{% extends "base.html" %} +{% block content %} +{% load bootstrap4 %} +{% load static %} + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
Validación Datos Crudos por Frecuencia Diaria
+
+
+ Limpiar +
+
+ + +
+
+ +
+ +
+
+ {% csrf_token %} + {% for field in form%} + {% bootstrap_field field show_label=False form_group_class='col-lg-3 col-md-3 col-sm-6 mt-3' %} + {% endfor %} +
+ + +
+
+
+ + +
+ + +
+
+
+ +
+
+
+ + + + + +
+
+
+ + +
+
+ Filtro Fecha + +
+ +
+ Filtro Porcentaje + +
+ +
+ Filtro Valor + +
+
+ + + + +
+
+ +
+
+
+ + +
+
+ +
+
+ + + + + + +
+
+
+ + + + + + + + + + + + +
IdFechaPorcentajeValorAcción
+ + +
+
+
+ + +
+
+ + Filtro Fecha + +
+
+ Filtro Valor + +
+
+ Valores atípicos + +
+
+ Filtro Estado + +
+
+ Filtro Variación + +
+ +
+ + +
+
+ + +
+
+
+ + +
+ +
+ +
+
+ + + + + +
+
+
+ + + + + + + + + + + + + + + +
IdFechaValorVariación consecutivaValores AtípicosComentarioAcción
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+{% endblock %} diff --git a/validated/templates/datos_horarios.html b/validated/templates/datos_horarios.html new file mode 100755 index 00000000..3130b17c --- /dev/null +++ b/validated/templates/datos_horarios.html @@ -0,0 +1,30 @@ +
+ + + + + + + + + + + + + + + {% for item in datos %} + + + + + + + + + {% endfor%} + + +
FechaValor
{{ item.fecha|date:"Y-m-d H:i:s" }}{{ item.valor }}
+ +
\ No newline at end of file diff --git a/validated/templates/diario_table.html b/validated/templates/diario_table.html new file mode 100755 index 00000000..c5df9210 --- /dev/null +++ b/validated/templates/diario_table.html @@ -0,0 +1,43 @@ +
+ + + + + + {% if variable.var_id == 1 %} + + + {% else %} + + + + + {% endif %} + + + + + + + + {% for item in datos %} + + + {% if variable.var_id == 1 %} + + + {% else %} + + + + + {% endif %} + + + + {% endfor%} + + +
FechaValor
{{ item.fecha|date:"Y-m-d H:i:s" }}{{ item.valor }}{{ item.fecha|date:"Y-m-d H:i:s" }}{{ item.valor }}{{ item.maximo }}{{ item.minimo }}
+ +
\ No newline at end of file diff --git a/validated/templates/periodos_validacion.html b/validated/templates/periodos_validacion.html new file mode 100755 index 00000000..d0acacf4 --- /dev/null +++ b/validated/templates/periodos_validacion.html @@ -0,0 +1,37 @@ +{% load bootstrap4 %} +{% load humanize %} + + +{% if intervalos %} +
+ + + + + + + + + {% for item in intervalos %} + + + + + + {% endfor%} + +
Fecha InicioFecha FinValidado
{{item.fecha_inicio | date:"Y-m-d H:i:s"}}{{item.fecha_fin | date:"Y-m-d H:i:s"}}{% if item.validado %}Sí{% else %}No{% endif %}
+
+{% else %} +
+

No hay datos de mediciones!

+
+{% endif %} diff --git a/validated/templates/validacion_diaria.html b/validated/templates/validacion_diaria.html new file mode 100755 index 00000000..d758daf9 --- /dev/null +++ b/validated/templates/validacion_diaria.html @@ -0,0 +1,697 @@ +{% extends "base.html" %} +{% block content %} +{% load bootstrap4 %} +{% load static %} + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
Validación Datos Crudos por Frecuencia Diaria
+
+
+ Limpiar +
+
+ + +
+
+ +
+ +
+
+ {% csrf_token %} + {% for field in form%} + {% bootstrap_field field show_label=False form_group_class='col-lg-3 col-md-3 col-sm-6 mt-3' %} + {% endfor %} +
+ + +
+
+
+ + +
+ + +
+
+
+ +
+
+
+ + + + + +
+
+
+ + +
+
+ Filtro Fecha + +
+ +
+ Filtro Porcentaje + +
+ +
+ Filtro Valor + +
+
+ + + + +
+
+ +
+
+
+ + +
+
+ +
+
+ + + + + + +
+
+
+ + + + + + + + + + + + +
IdFechaPorcentajeValorAcción
+ + +
+
+
+ + +
+
+ + Filtro Fecha + +
+
+ Filtro Valor + +
+
+ Valores atípicos + +
+
+ Filtro Estado + +
+
+ Filtro Variación + +
+ +
+ + +
+
+ + +
+
+
+ + +
+ +
+ +
+
+ + + + + +
+
+
+ + + + + + + + + + + + + + + +
IdFechaValorVariación consecutivaValores AtípicosComentarioAcción
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+{% endblock %} diff --git a/validated/urls.py b/validated/urls.py index a93b491b..8c604e47 100755 --- a/validated/urls.py +++ b/validated/urls.py @@ -55,6 +55,21 @@ views.PercentageOxygenConcentrationDepthList.as_view(), ), path("phycocyanin_depth/", views.PhycocyaninDepthList.as_view()), + + ################ + # # path('validacion_v2/periodos_validacion/', views.PeriodosValidacion.as_view(), name='v2_periodos_validacion'), + # # path('validacion_v2/borrar/', views.ValidacionBorrar.as_view(), name='v2_borrar'), + path('validated/daily_validation/', views.DailyValidation.as_view(), name='daily_validation'), + # path('val2/diaria/', views.ValidationReport.as_view(), name='diaria'), + # path('val2/lista//////', + # views.ListaValidacion.as_view(), + # name='horario'), + # # path('validacion_v2/guardarcrudos/', views.guardar_crudos, name='v2_guardar_crudos'), + # path('val2/guardarvalidados/', views.guardar_validados, name='guardar_validados'), + # # path('validacion_v2/eliminarvalidados/', views.eliminar_validados, name='v2_eliminar_validados'), + # + # # path('validacion_v2/', views.ValidacionList.as_view(), name='validacion_v2_index'), + ] urlpatterns = format_suffix_patterns(urlpatterns) diff --git a/validated/views.py b/validated/views.py index e340b0fb..94e55ae4 100755 --- a/validated/views.py +++ b/validated/views.py @@ -430,3 +430,380 @@ class PhycocyaninDepthList(ValidatedDepthListBase): # dischargecurve.save() # result = {"res": True} # return JsonResponse(result) + + + + +from django.shortcuts import render +# from medicion.forms import ValidacionSearchForm +from django.contrib.auth.mixins import LoginRequiredMixin +from django.contrib.auth.decorators import permission_required +from django.views.generic import ListView, FormView +# from val2 import functions +import json +from django.core.serializers.json import DjangoJSONEncoder +from django.http import JsonResponse,HttpResponse +# from val2.forms import BorrarForm +# from validacion import functions as funcvariable +from variable.models import Variable +from django.db import connection +from datetime import datetime,time +# from medicion.models import * +from django.db.models import Min, Max +from django.contrib.auth.mixins import PermissionRequiredMixin +# from estacion.views import listar_anio +# from val2.functions import guardar_cambios_validacion + +# from threading import Thread +# from validacion.serializers import * +from rest_framework.generics import ListAPIView +import pandas as pd +from .forms import DailyValidationForm + + +# class ValidationReport(PermissionRequiredMixin, FormView): +# class DailyValidation(PermissionRequiredMixin, FormView): +class DailyValidation(FormView): + # template_name = 'validacion_diaria.html' + template_name = 'daily_validation.html' + # form_class = ValidacionSearchForm + form_class = DailyValidationForm + # success_url = '/val2/diaria/' + success_url = '/validated/daily_validation/' + permission_required = 'validated.daily_validation' + + def post(self, request, *args, **kwargs): + form = DailyValidationForm(self.request.POST or None) + if form.is_valid(): + if self.request.is_ajax(): + modelo = 'Var' + form.data['variable'] + 'Medicion' + modelo = globals()[modelo] + fechaa = modelo.objects.filter(estacion_id__exact=form.data['estacion']).aggregate(Max('fecha'), + Min('fecha')) + if form.data['inicio'] == '': + inicio = fechaa['fecha__min'] + else: + inicio = datetime.combine(form.cleaned_data['inicio'], time(0, 0, 0, 0)) + if form.data['fin'] == '': + fin = fechaa['fecha__max'] + fin = datetime.combine(fin, time(23, 59, 59, 999999)) + else: + fin = datetime.combine(form.cleaned_data['fin'], time(23, 59, 59, 999999)) + variable = form.cleaned_data['variable'] + estacion = form.cleaned_data['estacion'] + maximo = form.cleaned_data['limite_superior'] + minimo = form.cleaned_data['limite_inferior'] + data = functions.reporte_diario(estacion, variable, inicio, fin, maximo, minimo) + data_json = json.dumps(data, allow_nan=True, cls=DjangoJSONEncoder) + return HttpResponse(data_json, content_type='application/json') + + return render(request, 'home/form_error.html', {'form': form}) + + +# Consulta de datos crudos y/o validados por estacion, variable y hora +class ListaValidacion(PermissionRequiredMixin, ListView): + template_name = 'home/mensaje.html' + permission_required = 'validacion_v2.validacion_diaria' + + def get(self, request, *args, **kwargs): + if self.request.is_ajax(): + est_id = kwargs.get('estacion') + var_id = kwargs.get('variable') + fecha_str = kwargs.get('fecha') + maximo = kwargs.get('maximo') + minimo = kwargs.get('minimo') + + datos = functions.detalle_diario(est_id, var_id, fecha_str, maximo, minimo) + data_json = json.dumps(datos, allow_nan=True, cls=DjangoJSONEncoder) + return HttpResponse(data_json, content_type='application/json') + mensaje = 'Ocurrio un problema con el procesamiento de la información, por favor contacte con el administrador' + return render(request, 'home/mensaje.html', {'mensaje': mensaje}) + +# +# # Consular los periodos de validacion por estacion y variable +# class PeriodosValidacion(PermissionRequiredMixin, FormView): +# template_name = 'validacion_v2/periodos_validacion.html' +# form_class = ValidacionSearchForm +# success_url = '/medicion/filter/' +# permission_required = 'validacion_v2.validacion_diaria' +# lista = [] +# +# def post(self, request, *args, **kwargs): +# estacion_id = None +# variable_id = None +# try: +# estacion_id = int(request.POST.get('estacion', None)) +# variable_id = int(request.POST.get('variable', None)) +# print(estacion_id) +# inicio = request.POST.get('inicio', None) +# variable = Variable.objects.get(var_id=variable_id) +# modelo = variable.var_modelo.lower() +# except: +# pass +# +# intervalos = functions.periodos_validacion(est_id=estacion_id, variable=variable, inicio=inicio) +# return render(request, self.template_name, {'intervalos': intervalos}) +# +# +# # TODO: Revisar el funcionamiento de esta vista +# class ValidacionBorrar(PermissionRequiredMixin, FormView): +# template_name = 'validacion/borrar.html' +# form_class = BorrarForm +# success_url = '/validacion/borrar/' +# permission_required = 'validacion_v2.validacion_diaria' +# resultado = None +# +# def form_valid(self, form): +# estacion_id = form.cleaned_data['estacion'].est_id +# variable = form.cleaned_data['variable'] +# inicio = form.cleaned_data['inicio'] +# fin = form.cleaned_data['fin'] +# +# filas_validado = 0 +# sql = "DELETE FROM validacion_var%%var_id%%validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s;" +# if variable.var_id == 4 or variable.var_id == 5: +# sql = """DELETE FROM validacion_viento WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# DELETE FROM validacion_var4validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# DELETE FROM validacion_var5validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# """ +# +# sql = sql.replace('%%var_id%%', str(variable.var_id)) +# with connection.cursor() as cursor: +# cursor.execute(sql, [estacion_id, inicio, fin]) +# filas_validado = cursor.rowcount +# +# filas_horario = 0 +# sql = "DELETE FROM horario_var%%var_id%%horario WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s);" +# if variable.var_id == 4 or variable.var_id == 5: +# sql = """DELETE FROM horario_var4horario WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s); +# DELETE FROM horario_var5horario WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s);""" +# +# sql = sql.replace('%%var_id%%', str(variable.var_id)) +# with connection.cursor() as cursor: +# cursor.execute(sql, [estacion_id, inicio, fin]) +# filas_horario = cursor.rowcount +# +# filas_diario = 0 +# sql = "DELETE FROM diario_var%%var_id%%diario WHERE estacion_id = %s AND fecha >= date_trunc('day', %s) AND fecha <= date_trunc('day', %s);" +# if variable.var_id == 4 or variable.var_id == 5: +# sql = """DELETE FROM diario_var4diario WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s); +# DELETE FROM diario_var5diario WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s);""" +# +# sql = sql.replace('%%var_id%%', str(variable.var_id)) +# with connection.cursor() as cursor: +# cursor.execute(sql, [estacion_id, inicio, fin]) +# filas_diario = cursor.rowcount +# +# filas_mensual = 0 +# sql = "DELETE FROM mensual_var%%var_id%%mensual WHERE estacion_id = %s AND fecha >= date_trunc('month', %s) AND fecha <= date_trunc('month', %s);" +# if variable.var_id == 4 or variable.var_id == 5: +# sql = """DELETE FROM mensual_var4mensual WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s); +# DELETE FROM mensual_var5mensual WHERE estacion_id = %s AND fecha >= date_trunc('hour', %s) AND fecha <= date_trunc('hour', %s);""" +# +# sql = sql.replace('%%var_id%%', str(variable.var_id)) +# with connection.cursor() as cursor: +# cursor.execute(sql, [estacion_id, inicio, fin]) +# filas_mensual = cursor.rowcount +# +# if self.request.is_ajax(): +# data = { +# 'filas_validado': filas_validado, +# 'filas_horario': filas_horario, +# 'filas_diario': filas_diario, +# 'filas_mensual': filas_mensual +# } +# cursor.close() +# return JsonResponse(data) +# else: +# cursor.close() +# return super().form_valid(form) +# +# +# @permission_required('validacion_v2.validacion_diaria') +# def guardar_crudos(request): +# # Verificando datos json para evitar inyeccion SQL +# estacion_id = int(request.POST.get('estacion_id', None)) +# variable_id = int(request.POST.get('variable_id', None)) +# cambios_json = request.POST.get('cambios', None) +# print('Guardar Crudos') +# variable = Variable.objects.get(var_id=variable_id) +# variable_nombre = str(variable_id) +# cambios_lista = json.loads(cambios_json) +# +# # print(cambios_json) +# +# fecha_inicio_dato = cambios_lista[0]['fecha'] +# fecha_fin_dato = cambios_lista[-1]['fecha'] +# # Borrar datos +# if variable.var_id == 4 or variable.var_id == 5: +# with connection.cursor() as cursor: +# sql = """DELETE FROM validacion_viento WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# """ +# cursor.execute(sql, [estacion_id, fecha_inicio_dato, fecha_fin_dato]) +# sql = """DELETE FROM validacion_var4validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# """ +# cursor.execute(sql, [estacion_id, fecha_inicio_dato, fecha_fin_dato]) +# sql = """DELETE FROM validacion_var5validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s; +# """ +# cursor.execute(sql, [estacion_id, fecha_inicio_dato, fecha_fin_dato]) +# cursor.close() +# else: +# with connection.cursor() as cursor: +# sql = "DELETE FROM validacion_var%%var_id%%validado WHERE estacion_id = %s AND fecha >= %s AND fecha <= %s;" +# sql = sql.replace('%%var_id%%', str(variable_nombre)) +# print(sql) +# cursor.execute(sql, [estacion_id, fecha_inicio_dato, fecha_fin_dato]) +# cursor.close() +# if variable.var_id == 4 or variable.var_id == 5: +# with connection.cursor() as cursor: +# cursor.callproc('insertar_viento_validacion', [estacion_id, cambios_json]) +# resultado = cursor.fetchone()[0] +# cursor.close() +# else: +# with connection.cursor() as cursor: +# modelo = functions.normalize(variable.var_nombre).replace(" de ", "") +# modelo = modelo.replace(" ", "") +# variable_nombre = str(modelo) +# cursor.callproc('insertar_' + variable.var_modelo.lower() + '_validacion', [estacion_id, cambios_json]) +# resultado = cursor.fetchone()[0] +# print(resultado) +# cursor.close() +# +# if resultado: +# lista = {'resultado': resultado} +# fecha_inicio = cambios_lista[0]['fecha'] +# fecha_fin = cambios_lista[-1]['fecha'] +# t = Thread(target=guardar_cambios_validacion, args=(estacion_id, variable, 'insert', fecha_inicio, fecha_fin)) +# t.start() +# else: +# lista = {'resultado': False} +# print(resultado) +# return JsonResponse(lista) +# +# + +# Pasar los datos crudos a validados +@permission_required('validacion_v2.validacion_diaria') +def guardar_validados(request): + # Verificando datos json para evitar inyeccion SQL + estacion_id = int(request.POST.get('estacion_id', None)) + variable_id = int(request.POST.get('variable_id', None)) + limite_superior = float(request.POST.get('limite_superior', None)) + limite_inferior = float(request.POST.get('limite_inferior', None)) + cambios_json = request.POST.get('cambios', None) + + variable = Variable.objects.get(var_id=variable_id) + cambios_lista = json.loads(cambios_json) + + + condiciones = functions.get_condiciones(cambios_lista) + + resultado = functions.pasar_crudos_validados(cambios_lista, variable, estacion_id, + condiciones, limite_superior, limite_inferior) + + if resultado: + fecha_inicio = cambios_lista[0]['fecha'] + fecha_fin = cambios_lista[-1]['fecha'] + lista = {'resultado': resultado} + t = Thread(target=guardar_cambios_validacion, args=(estacion_id, variable, 'insert', fecha_inicio, fecha_fin)) + t.start() + + else: + lista = {'resultado': False} + + return JsonResponse(lista) + +# # Pasar los datos crudos a validados +# @permission_required('validacion_v2.validacion_diaria') +# def guardar_validados(request): +# # Verificando datos json para evitar inyeccion SQL +# estacion_id = int(request.POST.get('estacion_id', None)) +# variable_id = int(request.POST.get('variable_id', None)) +# limite_superior = float(request.POST.get('limite_superior', None)) +# limite_inferior = float(request.POST.get('limite_inferior', None)) +# cambios_json = request.POST.get('cambios', None) +# +# variable = Variable.objects.get(var_id=variable_id) +# cambios_lista = json.loads(cambios_json) +# +# condiciones = functions.get_condiciones(cambios_lista) +# +# resultado = functions.pasar_crudos_validados(cambios_lista, variable, estacion_id, +# condiciones, limite_superior, limite_inferior) +# +# if resultado: +# fecha_inicio = cambios_lista[0]['fecha'] +# fecha_fin = cambios_lista[-1]['fecha'] +# lista = {'resultado': resultado} +# t = Thread(target=guardar_cambios_validacion, args=(estacion_id, variable, 'insert', fecha_inicio, fecha_fin)) +# t.start() +# +# else: +# lista = {'resultado': False} +# +# return JsonResponse(lista) + + +# # Permite eliminar los datos validados +# @permission_required('validacion_v2.validacion_diaria') +# def eliminar_validados(request): +# estacion_id = int(request.POST.get('estacion_id', None)) +# variable_id = int(request.POST.get('variable_id', None)) +# cambios_json = request.POST.get('cambios', None) +# +# variable = Variable.objects.get(var_id=variable_id) +# +# cambios_lista = json.loads(cambios_json) +# condiciones = functions.get_condiciones(cambios_lista) +# resultado = functions.eliminar_datos_validacion(cambios_lista, variable, estacion_id, condiciones) +# lista = {'resultado': resultado} +# fecha_inicio = cambios_lista[0]['fecha'] +# fecha_fin = cambios_lista[-1]['fecha'] +# t = Thread(target=guardar_cambios_validacion, args=(estacion_id, variable, 'delete', fecha_inicio, fecha_fin)) +# t.start() +# +# return JsonResponse(lista) +# +# +# # La funcion esta duplicada +# '''def eliminar_validados(request): +# estacion_id = int(request.POST.get('estacion_id', None)) +# variable_id = int(request.POST.get('variable_id', None)) +# cambios_json = request.POST.get('cambios', None) +# +# variable = Variable.objects.get(var_id=variable_id) +# +# cambios_lista = json.loads(cambios_json) +# condiciones = functions.get_condiciones(cambios_lista) +# resultado = functions.eliminar_datos_validacion(cambios_lista, variable, estacion_id, condiciones) +# lista = {'resultado': resultado} +# +# return JsonResponse(lista)''' +# +# +# class ValidacionList(PermissionRequiredMixin, FormView): +# template_name = 'validacion_v2/periodos_validacion.html' +# permission_required = 'validacion_v2.validacion_diaria' +# form_class = ValidacionSearchForm +# success_url = '/medicion/filter/' +# lista = [] +# +# def post(self, request, *args, **kwargs): +# estacion_id = None +# variable_id = None +# try: +# estacion_id = int(request.POST.get('estacion', None)) +# variable_id = int(request.POST.get('variable', None)) +# m = 'Var' + str(variable_id) + 'Medicion' +# m = globals()[m] +# fechaa = m.objects.filter(estacion_id__exact=estacion_id).aggregate(Max('fecha'), Min('fecha')) +# # print(fechaa['fecha__min']) +# inicio = str(fechaa['fecha__min']) +# # print(inicio) +# variable = Variable.objects.get(var_id=variable_id) +# # res = variable.var_modelo.lower() +# except: +# pass +# +# intervalos = functions.periodos_validacion2(estacion_id, variable, inicio) +# return render(request, self.template_name, {'intervalos': intervalos}) \ No newline at end of file From 02476a73eadfb9560e83f1dff9a4a700d2f054e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Sun, 16 Apr 2023 23:09:24 -0500 Subject: [PATCH 04/24] Some changes --- .idea/misc.xml | 5 +- README.md | 2 +- djangomain/urls.py | 1 + static/styles/global.css | 2 +- templates/base.html | 2 + validated/forms.py | 28 +++++----- validated/serializers.py | 4 +- .../static/validated/daily_validation.js | 4 ++ validated/static/validated/js_ajax2.js | 45 +++++++++++++--- validated/templates/daily_validation.html | 53 +++++++++++++------ validated/urls.py | 4 +- validated/views.py | 22 ++++---- 12 files changed, 116 insertions(+), 56 deletions(-) diff --git a/.idea/misc.xml b/.idea/misc.xml index faee0d92..22650b76 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -1,7 +1,10 @@ - + + + \ No newline at end of file diff --git a/README.md b/README.md index 88f5a852..62bff92d 100755 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ If installing this system from scratch: - If you want to load initial data (variables, units, stations...): - In a separate terminal run `docker exec -it bash` e.g. `docker exec -it paricia_web_1 bash` to start a bash session in the container. You can find the name of the container in the Docker Desktop GUI, or by running `docker container ls`. - Run `python manage.py shell < utilities/load_initial_data.py`. - - Create **admin** user running `python manage.py screatesuperuser`. + - Create **admin** user running `python manage.py createsuperuser`. ## Database Schema diff --git a/djangomain/urls.py b/djangomain/urls.py index d547fe2b..d93ce62e 100755 --- a/djangomain/urls.py +++ b/djangomain/urls.py @@ -55,6 +55,7 @@ path("measurement/", include("measurement.urls", namespace="measurement")), path("importing/", include("importing.urls", namespace="importing")), path("management/", include("management.urls", namespace="management")), + path("validated/", include("validated.urls", namespace="validated")), re_path( r"^swagger(?P\.json|\.yaml)$", schema_view.without_ui(cache_timeout=0), diff --git a/static/styles/global.css b/static/styles/global.css index e41bfa2b..795f0cc3 100755 --- a/static/styles/global.css +++ b/static/styles/global.css @@ -11,7 +11,7 @@ body{ */ body { - background: url('../images/fondo1.jpg') ; /* Referencia: static/styles/. */ + /* background: url('../images/fondo1.jpg') */ ; /* Referencia: static/styles/. */ background-color: rgb(127,127,127); background-blend-mode: screen; background-repeat: repeat; diff --git a/templates/base.html b/templates/base.html index 7f585a86..7968a817 100755 --- a/templates/base.html +++ b/templates/base.html @@ -44,6 +44,8 @@ + {% block extralibraries %} + {% endblock %} diff --git a/validated/forms.py b/validated/forms.py index 4f15891f..148560b4 100755 --- a/validated/forms.py +++ b/validated/forms.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- from django.forms import ModelForm, Form, ModelChoiceField, DateTimeField from django.forms import ModelForm -from validacion.models import Validacion -from estacion.models import Estacion +# from validacion.models import Validacion +# from estacion.models import Estacion from station.models import Station from variable.models import Variable from django import forms -from estacion.models import Tipo +# from estacion.models import Tipo @@ -39,17 +39,17 @@ def __init__(self, *args, **kwargs): super(DailyValidationForm, self).__init__(*args, **kwargs) self.fields['station'].widget.attrs['placeholder'] = self.fields['station'].label -class ValidacionSearchForm(forms.Form): - estacion = forms.ModelChoiceField(queryset=Estacion.objects.order_by('est_codigo').filter(est_externa=False, tipo__in=(1,2,3)), empty_label="Estación") - variable = forms.ModelChoiceField(queryset=Variable.objects.order_by('var_id').exclude(var_id='10'), empty_label="Variable") - inicio = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Inicio", required=True) - fin = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Fin", required=True) - limite_inferior = forms.IntegerField(required=False) - limite_superior = forms.IntegerField(required=False) - #revalidar = forms.BooleanField(label="Revalidar", help_text='Marcar si deseas borrar la última validacion') - def __init__(self, *args, **kwargs): - super(ValidacionSearchForm, self).__init__(*args, **kwargs) - self.fields['estacion'].widget.attrs['placeholder'] = self.fields['estacion'].label +# class ValidacionSearchForm(forms.Form): +# estacion = forms.ModelChoiceField(queryset=Estacion.objects.order_by('est_codigo').filter(est_externa=False, tipo__in=(1,2,3)), empty_label="Estación") +# variable = forms.ModelChoiceField(queryset=Variable.objects.order_by('var_id').exclude(var_id='10'), empty_label="Variable") +# inicio = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Inicio", required=True) +# fin = forms.DateField(widget=forms.TextInput(attrs={'autocomplete': 'off'}), input_formats=['%Y-%m-%d'], label="Fecha de Fin", required=True) +# limite_inferior = forms.IntegerField(required=False) +# limite_superior = forms.IntegerField(required=False) +# #revalidar = forms.BooleanField(label="Revalidar", help_text='Marcar si deseas borrar la última validacion') +# def __init__(self, *args, **kwargs): +# super(ValidacionSearchForm, self).__init__(*args, **kwargs) +# self.fields['estacion'].widget.attrs['placeholder'] = self.fields['estacion'].label # # class BorrarForm(Form): diff --git a/validated/serializers.py b/validated/serializers.py index d0ab282b..d6acafab 100755 --- a/validated/serializers.py +++ b/validated/serializers.py @@ -5,12 +5,12 @@ AtmosphericPressure, BatteryVoltage, ChlorineConcentrationDepth, - DischargeCurve, + # DischargeCurve, Flow, FlowManual, Humidity, IndirectRadiation, - LevelFunction, + # LevelFunction, OxygenConcentrationDepth, PercentageOxygenConcentrationDepth, PhycocyaninDepth, diff --git a/validated/static/validated/daily_validation.js b/validated/static/validated/daily_validation.js index 8ebb2c75..4e78421e 100755 --- a/validated/static/validated/daily_validation.js +++ b/validated/static/validated/daily_validation.js @@ -531,6 +531,7 @@ function guardar_crudos(event){ // Consultar la serie de datos diarios desde el servidor de base de datos function actualizar_tabla_diario(){ + debugger; var $table = $('#table_diario'); var var_id = $("#id_variable").val(); var flag_error = false; @@ -556,11 +557,13 @@ function actualizar_tabla_diario(){ data: $("#form_validacion").serialize(), type:'POST', beforeSend: function () { + debugger; //activar_espera(); $table.bootstrapTable('showLoading'); }, success: function (data) { + debugger; $("#btn_buscar").attr("disabled", false); for (var key in data){ if (key == 'error'){ @@ -625,6 +628,7 @@ function actualizar_tabla_diario(){ }, error: function () { + debugger; $table.bootstrapTable('hideLoading'); $("#div_body_mensaje").html('Ocurrio un problema con la validación por favor contacte con el administrador') $("#div_mensaje_validacion").modal("show"); diff --git a/validated/static/validated/js_ajax2.js b/validated/static/validated/js_ajax2.js index ced37d0f..cdf6db25 100755 --- a/validated/static/validated/js_ajax2.js +++ b/validated/static/validated/js_ajax2.js @@ -1,7 +1,7 @@ $(document).ready(function() { //activar tooltip - $('[data-toggle="tooltip"]').tooltip() + $('[data-toggle="tooltip"]').tooltip(); // Comparar Variables @@ -218,24 +218,54 @@ $(document).ready(function() { //datepicker con intervalo restringido +// var dateFormat = "yy-mm-dd"; +// $( "#id_inicio" ).datepicker({ +// changeMonth: true, +// changeYear: true, +// dateFormat:"yy-mm-dd", +// yearRange: '2000:'+(new Date).getFullYear() +// }); +// $( "#id_inicio" ).on( "change", function() { +// $( "#id_fin" ).datepicker( "option", "minDate", getDate( this ) ); +// }); +// $( "#id_fin" ).datepicker({ +// changeMonth: true, +// changeYear: true, +// dateFormat:"yy-mm-dd", +// yearRange: '2000:'+(new Date).getFullYear() +// }); +// $( "#id_fin" ).on( "change", function() { +// $( "#id_inicio" ).datepicker( "option", "maxDate", getDate( this ) ); +// }); +// +// function getDate( element ) { +// var date; +// try { +// date = $.datepicker.parseDate( dateFormat, element.value ); +// } catch( error ) { +// date = null; +// } +// return date; +// } + var dateFormat = "yy-mm-dd"; - $( "#id_inicio" ).datepicker({ + $( "#id_start_date" ).datepicker({ changeMonth: true, changeYear: true, dateFormat:"yy-mm-dd", yearRange: '2000:'+(new Date).getFullYear() }); - $( "#id_inicio" ).on( "change", function() { - $( "#id_fin" ).datepicker( "option", "minDate", getDate( this ) ); + $( "#id_start_date" ).on( "change", function() { + $( "#id_end_date" ).datepicker( "option", "minDate", getDate( this ) ); }); - $( "#id_fin" ).datepicker({ + $( "#id_end_date" ).datepicker({ changeMonth: true, changeYear: true, dateFormat:"yy-mm-dd", yearRange: '2000:'+(new Date).getFullYear() }); - $( "#id_fin" ).on( "change", function() { - $( "#id_inicio" ).datepicker( "option", "maxDate", getDate( this ) ); + $( "#id_end_date" ).on( "change", function() { + $( "#id_start_time" ).datepicker( "option", "maxDate", getDate( this ) ); }); function getDate( element ) { @@ -248,7 +278,6 @@ $(document).ready(function() { return date; } - function periodos_validacion(){ token = $("input[name='csrfmiddlewaretoken']").val(); estacion_id = $("input[name='orig_estacion_id']").val(); diff --git a/validated/templates/daily_validation.html b/validated/templates/daily_validation.html index d758daf9..fa2f0ed6 100755 --- a/validated/templates/daily_validation.html +++ b/validated/templates/daily_validation.html @@ -1,22 +1,42 @@ {% extends "base.html" %} -{% block content %} + {% load bootstrap4 %} {% load static %} - - + +{% block extralibraries %} + + + + + + + + + + + + + + + + + + - - - - - + + - - + + + + + + + - - +{% endblock %} + + +{% block content %} +
@@ -70,7 +91,7 @@
Validación Datos Crudos por Frecuencia Diaria
@@ -80,7 +101,7 @@
Validación Datos Crudos por Frecuencia Diaria
-
+
{% csrf_token %} {% for field in form%} diff --git a/validated/urls.py b/validated/urls.py index 8c604e47..5b4f7737 100755 --- a/validated/urls.py +++ b/validated/urls.py @@ -18,7 +18,7 @@ app_name = "validated" urlpatterns = [ - path("polarwind/", views.PolarWindList.as_view()), + # path("polarwind/", views.PolarWindList.as_view()), # TODO Verify if it's not really needed # path("dischargecurve/", views.DischargeCurveList.as_view()), # path("levelfunction/", views.LevelFunctionList.as_view()), @@ -59,7 +59,7 @@ ################ # # path('validacion_v2/periodos_validacion/', views.PeriodosValidacion.as_view(), name='v2_periodos_validacion'), # # path('validacion_v2/borrar/', views.ValidacionBorrar.as_view(), name='v2_borrar'), - path('validated/daily_validation/', views.DailyValidation.as_view(), name='daily_validation'), + path('daily_validation/', views.DailyValidation.as_view(), name='daily_validation'), # path('val2/diaria/', views.ValidationReport.as_view(), name='diaria'), # path('val2/lista//////', # views.ListaValidacion.as_view(), diff --git a/validated/views.py b/validated/views.py index 94e55ae4..9f488d21 100755 --- a/validated/views.py +++ b/validated/views.py @@ -32,20 +32,20 @@ # LevelFunctionFilter, ValidatedFilter, ValidatedFilterDepth, - PolarWindFilter, + # PolarWindFilter, ) -from .forms import LevelFunctionForm -from validated.others.functions import level_function_table +# from .forms import LevelFunctionForm +# from validated.others.functions import level_function_table -class PolarWindList(generics.ListAPIView): - """ - List all measurements of Polar Wind. - """ - - queryset = vali.PolarWind.objects.all() - serializer_class = serializers.PolarWindSerializer - filterset_class = PolarWindFilter +# class PolarWindList(generics.ListAPIView): +# """ +# List all measurements of Polar Wind. +# """ +# +# queryset = vali.PolarWind.objects.all() +# serializer_class = serializers.PolarWindSerializer +# filterset_class = PolarWindFilter # class DischargeCurveList(generics.ListAPIView): From ca9badf991c6eeea6945872874bf20abbc38335a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Thu, 20 Apr 2023 00:53:34 -0500 Subject: [PATCH 05/24] Updates for validation calculations --- daily/migrations/0001_initial.py | 0 formatting/migrations/0001_initial.py | 0 hourly/migrations/0001_initial.py | 0 importing/migrations/0001_initial.py | 0 importing/migrations/0002_initial.py | 0 management/migrations/0001_initial.py | 0 measurement/migrations/0001_initial.py | 0 monthly/migrations/0001_initial.py | 0 sensor/migrations/0001_initial.py | 0 station/migrations/0001_initial.py | 0 validated/functions.py | 136 ++++++++++++---------- validated/migrations/0001_initial.py | 0 validated/models.py | 2 +- validated/templates/daily_validation.html | 5 +- validated/views.py | 42 ++++--- variable/migrations/0001_initial.py | 0 16 files changed, 104 insertions(+), 81 deletions(-) mode change 100644 => 100755 daily/migrations/0001_initial.py mode change 100644 => 100755 formatting/migrations/0001_initial.py mode change 100644 => 100755 hourly/migrations/0001_initial.py mode change 100644 => 100755 importing/migrations/0001_initial.py mode change 100644 => 100755 importing/migrations/0002_initial.py mode change 100644 => 100755 management/migrations/0001_initial.py mode change 100644 => 100755 measurement/migrations/0001_initial.py mode change 100644 => 100755 monthly/migrations/0001_initial.py mode change 100644 => 100755 sensor/migrations/0001_initial.py mode change 100644 => 100755 station/migrations/0001_initial.py mode change 100644 => 100755 validated/functions.py mode change 100644 => 100755 validated/migrations/0001_initial.py mode change 100644 => 100755 variable/migrations/0001_initial.py diff --git a/daily/migrations/0001_initial.py b/daily/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/formatting/migrations/0001_initial.py b/formatting/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/hourly/migrations/0001_initial.py b/hourly/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/importing/migrations/0001_initial.py b/importing/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/importing/migrations/0002_initial.py b/importing/migrations/0002_initial.py old mode 100644 new mode 100755 diff --git a/management/migrations/0001_initial.py b/management/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/measurement/migrations/0001_initial.py b/measurement/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/monthly/migrations/0001_initial.py b/monthly/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/sensor/migrations/0001_initial.py b/sensor/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/station/migrations/0001_initial.py b/station/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/validated/functions.py b/validated/functions.py old mode 100644 new mode 100755 index 1efb3018..7728c36b --- a/validated/functions.py +++ b/validated/functions.py @@ -9,8 +9,9 @@ from datetime import datetime, timedelta, time -def reporte_diario(station, variable, start_time, end_time, maximum, minimum): - reporte, series = reporte_diario_dataframes(station, variable, start_time, end_time, maximum, minimum) +# def reporte_diario(station, variable, start_time, end_time, maximum, minimum): +def daily_validation(station, variable, start_time, end_time, minimum, maximum): + reporte, series = daily_report_dataframes(station, variable, start_time, end_time, minimum, maximum) # reporte, series = calculo_reporte_diario(station, variable, start_time, end_time, maximum, minimum) reporte.rename( columns={ @@ -40,7 +41,7 @@ def reporte_diario(station, variable, start_time, end_time, maximum, minimum): # response = acumulado.to_dict(orient='list') # response = _records.to_dict(orient='records') - if variable.var_id in [4, 5]: + if variable.variable_id in [4, 5]: reporte['n_valor'] = 0 else: reporte['n_valor'] = reporte['c_varia_err'] @@ -52,17 +53,16 @@ def reporte_diario(station, variable, start_time, end_time, maximum, minimum): num_dias = len(reporte.index) data = {'estacion': [{ - 'est_id': station.est_id, - 'est_nombre': station.est_nombre, + 'est_id': station.station_id, + 'est_nombre': station.station_name, }], 'variable': [{ - 'var_id': variable.var_id, - 'var_nombre': variable.var_nombre, - 'var_maximo': variable.var_maximo, - 'var_minimo': variable.var_minimo, - 'var_unidad_sigla': variable.uni_id.uni_sigla, - 'var_unidad_sigla': variable.uni_id.uni_sigla, - 'es_acumulada': variable.es_acumulada, + 'var_id': variable.variable_id, + 'var_nombre': variable.name, + 'var_maximo': variable.maximum, + 'var_minimo': variable.minimum, + 'var_unidad_sigla': variable.unit.initials, + 'es_acumulada': variable.is_cumulative, }], 'datos': reporte.fillna('').to_dict(orient='records'), 'indicadores': [{ @@ -82,39 +82,31 @@ def reporte_diario(station, variable, start_time, end_time, maximum, minimum): # Basic calculations is the main functions for calculations def basic_calculations(station, variable, start_time, end_time, inf_lim_variable, sup_lim_variable): + # TODO change tx_period for the code to query from model tx_period = 5 - try: - model_name = 'Var' + str(variable.var_id) - variable = Variable.objects.get(pk=variable.var_id) - station = Estacion.objects.get(pk=station.est_id) - except: - model_name = 'Var' + str(variable) - variable = Variable.objects.get(pk=variable) - station = Estacion.objects.get(pk=station) + Measurement = apps.get_model(app_label='measurement', model_name=variable.variable_code) + Validated = apps.get_model(app_label='validated', model_name=variable.variable_code) - Measurement = apps.get_model(app_label='medicion', model_name=model_name+'Medicion') - Validated = apps.get_model(app_label='validacion', model_name=model_name+'Validado') - - - # filter_args = {} - # validated = pd.DataFrame.from_records(validated.values(**filter_args)) validated = Validated.objects.filter( - estacion_id=station.est_id, - fecha__gte=start_time, - fecha__lte=end_time + station_id=station.station_id, + time__gte=start_time, + time__lte=end_time ).annotate( - is_validated=Value(True, output_field=BooleanField()),# True is for 'validated' tables, False for raw 'measurement' + # is_validated: True is for 'validated' tables, False for raw 'measurement' + is_validated=Value(True, output_field=BooleanField()), exists_in_validated=Value(True, output_field=BooleanField()), null_value=Value(False, output_field=BooleanField()) - ).order_by('fecha') + ).order_by('time') + + value_fields = ('value', 'minimum', 'maximum') + base_fields = ('id', 'time', 'is_validated', 'exists_in_validated', 'null_value') + fields = base_fields + value_fields validated = pd.DataFrame.from_records( - validated.values('id', 'fecha', 'is_validated', 'valor', 'maximo', 'minimo', 'exists_in_validated', 'null_value') + validated.values(*fields) ) - # TODO: eliminar - validated = validated.rename(columns={'fecha':'time', 'valor':'value', 'maximo':'maximum', 'minimo':'minimum'}) + if validated.empty: - validated = pd.DataFrame(columns=['id', 'time', 'is_validated', 'value', 'maximum', 'minimum', - 'exists_in_validated', 'null_value']) + validated = pd.DataFrame(columns=fields) # TODO WHich one is faster? # # validated['time'] = validated['time'].dt.floor('min') @@ -124,20 +116,23 @@ def basic_calculations(station, variable, start_time, end_time, inf_lim_variable # validated['date'] = pd.to_datetime(validated['time']).dt.date measurement = Measurement.objects.filter( - estacion_id=station.est_id, - fecha__gte=start_time, - fecha__lte=end_time + station_id=station.station_id, + time__gte=start_time, + time__lte=end_time ).annotate( - is_validated=Value(False, output_field=BooleanField()),# True is for 'validated' tables, False for raw 'measurement' - ).order_by('fecha') + # is_validated: True is for 'validated' tables, False for raw 'measurement' + is_validated=Value(False, output_field=BooleanField()), + ).order_by('time') + + value_fields = ('value', 'minimum', 'maximum') + base_fields = ('id', 'time', 'is_validated') + fields = base_fields + value_fields measurement = pd.DataFrame.from_records( - measurement.values('id', 'fecha', 'is_validated', 'valor', 'maximo', 'minimo') + measurement.values(*fields) ) - # TODO eliminar - measurement = measurement.rename(columns={'fecha':'time', 'valor':'value', 'maximo':'maximum', 'minimo':'minimum'}) + if measurement.empty: - measurement = pd.DataFrame(columns=['id', 'time', 'is_validated', 'value', 'maximum', 'minimum', - 'exists_in_validated', 'null_value']) + measurement = pd.DataFrame(columns=fields) measurement['time_truncated'] = measurement['time'].values.astype(' 'period' # - daily['value_difference_error_count'] = daily_group['value_difference_error'].sum(numeric_only=False).to_numpy() + # TODO Check what would be the best option + # daily['value_difference_error_count'] = daily_group['value_difference_error'].sum(numeric_only=False).to_numpy() + daily['value_difference_error_count'] = daily_group_all['value_difference_error'].sum(numeric_only=False).to_numpy() # # REF. NAME: lapsos_dias @@ -357,16 +363,20 @@ def reporte_diario_dataframes(station, variable, start_time, end_time, minimum, pd.DatetimeIndex(daily['date']).month, pd.DatetimeIndex(daily['date']).day ))) - historic_diary = Var2Diario.objects.filter(estacion_id=station.est_id).extra( - where=["(date_part('month', fecha), date_part('day', fecha)) in %s"], + Daily = apps.get_model(app_label='daily', model_name=variable.variable_code) + historic_diary = Daily.objects.filter(station_id=station.station_id).extra( + where=["(date_part('month', date), date_part('day', date)) in %s"], params=[month_day_tuples] ) historic_diary = pd.DataFrame(list(historic_diary.values())) - historic_diary = historic_diary.rename(columns={'fecha':'date', 'valor':'value'}) - historic_diary['month-day'] = pd.DatetimeIndex(historic_diary['date']).month.astype(str) \ - + '-' + pd.DatetimeIndex(historic_diary['date']).day.astype(str) - historic_diary_group = historic_diary.groupby(['month-day']) - daily['historic_diary_avg'] = historic_diary_group['value'].mean().to_numpy() + if not historic_diary.empty: + historic_diary['month-day'] = pd.DatetimeIndex(historic_diary['date']).month.astype(str) \ + + '-' + pd.DatetimeIndex(historic_diary['date']).day.astype(str) + historic_diary_group = historic_diary.groupby(['month-day']) + daily['historic_diary_avg'] = historic_diary_group['value'].mean().to_numpy() + else: + daily['historic_diary_avg'] = np.nan + # estado : state @@ -389,7 +399,7 @@ def reporte_diario_dataframes(station, variable, start_time, end_time, minimum, daily['suspicious_maximums_count'].fillna(0, inplace=True) daily['suspicious_minimums_count'].fillna(0, inplace=True) daily['value_difference_error_count'].fillna(0, inplace=True) - + daily['historic_diary_avg'].fillna('', inplace=True) ## # TODO check, maybe it's not needed anymore @@ -401,12 +411,16 @@ def reporte_diario_dataframes(station, variable, start_time, end_time, minimum, # Round decimals # TODO cambiar 'valor' por 'value' en pAricia - decimal_places = Measurement._meta.get_field('valor').decimal_places + Measurement = apps.get_model(app_label='measurement', model_name=variable.variable_code) + decimal_places = Measurement._meta.get_field('value').decimal_places daily['avg_value'] = daily['avg_value'].astype(np.float64).round(decimal_places) daily['max_maximum'] = daily['max_maximum'].astype(np.float64).round(decimal_places) daily['min_minimum'] = daily['min_minimum'].astype(np.float64).round(decimal_places) daily['data_existence_percentage'] = daily['data_existence_percentage'].astype(np.float64).round(1) - daily.reset_index(names='id', inplace=True) + + # daily.reset_index(names='id', inplace=True) + daily.index.name = 'id' + daily.reset_index(inplace=True) ## TODO Eliminar o corregir ids -> id # # daily.rename(columns={'id':'ids',}, inplace=True) diff --git a/validated/migrations/0001_initial.py b/validated/migrations/0001_initial.py old mode 100644 new mode 100755 diff --git a/validated/models.py b/validated/models.py index 04037796..10a42032 100755 --- a/validated/models.py +++ b/validated/models.py @@ -102,7 +102,7 @@ class Meta: # TODO Copy the decimal_places and max_digits from a measurement model # meas.Precipitation._meta.get_field('Value').max_digits # meas.Precipitation._meta.get_field('Value').decimal_places -class Precipitation(create_vali_model(digits=6, decimals=2, fields=("Total",))): +class Precipitation(create_vali_model(digits=6, decimals=2, fields=("Value",))): """Precipitation.""" diff --git a/validated/templates/daily_validation.html b/validated/templates/daily_validation.html index fa2f0ed6..532f8d03 100755 --- a/validated/templates/daily_validation.html +++ b/validated/templates/daily_validation.html @@ -17,8 +17,7 @@ - - + @@ -36,6 +35,8 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +{% endblock %} + + +{% block content %} + +
+ + + +
+
+ Validation module +
+ +
+ +
+ + + +
+ +
+ + +
+ {% csrf_token %} + {% for field in form%} + {% bootstrap_field field show_label=False form_group_class='col-lg-3 col-md-3 col-sm-6 mt-3' %} + {% endfor %} +
+ + +
+
+ + + +
+ + +
+
+ +
+
+ + + + + +
+
+
+ + +
+
+ Filtro Fecha + +
+ +
+ Filtro Porcentaje + +
+ +
+ Filtro Valor + +
+
+ + + + +
+
+ +
+
+
+ + +
+
+ +
+
+ + + + + + +
+
+
+ + + + + + + + + + + + +
IdFechaPorcentajeValorAcción
+ + +
+
+
+ + +
+
+ + Filtro Fecha + +
+
+ Filtro Valor + +
+
+ Valores atípicos + +
+
+ Filtro Estado + +
+
+ Filtro Variación + +
+ +
+ + +
+
+ + +
+
+
+ + +
+ +
+ +
+
+ + + + + +
+
+
+ + + + + + + + + + + + + + + +
IdFechaValorVariación consecutivaValores AtípicosComentarioAcción
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+{% endblock %} diff --git a/validated/views.py b/validated/views.py index 3fcd0a39..480e3157 100755 --- a/validated/views.py +++ b/validated/views.py @@ -49,29 +49,6 @@ # filterset_class = PolarWindFilter -# class DischargeCurveList(generics.ListAPIView): -# """ -# List all measurements of Discharge Curve. -# """ -# -# queryset = vali.DischargeCurve.objects.all() -# serializer_class = serializers.DischargeCurveSerializer -# filterset_class = DischargeCurveFilter -# -# -# class LevelFunctionList(generics.ListAPIView): -# """ -# List all measurements of Level Function. -# """ -# -# queryset = vali.LevelFunction.objects.all() -# serializer_class = serializers.LevelFunctionSerializer -# filterset_class = LevelFunctionFilter -# - -############################################################## - - class ValidatedListBase(generics.ListAPIView): """ Base class for the measurement list views that all use the @@ -312,108 +289,6 @@ class PhycocyaninDepthList(ValidatedDepthListBase): ######################################################################################## -# class DischargeCurveDetail(PermissionRequiredMixin, DetailView): -# model = DischargeCurve -# permission_required = "validated.view_dischargecurve" -# -# def get_context_data(self, **kwargs): -# context = super().get_context_data(**kwargs) -# dischargecurve_id = self.object.pk -# context["levelfunctiontable"] = level_function_table(dischargecurve_id) -# return context - - -# class LevelFunctionCreate(PermissionRequiredMixin, CreateView): -# permission_required = "validated.add_dischargecurve" -# model = LevelFunction -# form_class = LevelFunctionForm -# -# def post(self, request, *args, **kwargs): -# dischargecurve_id = kwargs.get("id") -# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) -# form = LevelFunctionForm(self.request.POST or None) -# try: -# # Verify if form is correct -# levelfunction = form.save(commit=False) -# except Exception: -# # If it is not, send an informative message. -# _levelfunctiontable = level_function_table(dischargecurve_id) -# new_levelfunction = render( -# request, -# "measurement/levelfunction_form.html", -# {"form": LevelFunctionForm(self.request.POST or None)}, -# ) -# return render( -# request, -# "measurement/dischargecurve_detail.html", -# { -# "dischargecurve": dischargecurve, -# "levelfunctiontable": _levelfunctiontable, -# "new_levelfunction": new_levelfunction.content.decode("utf-8"), -# }, -# ) -# levelfunction.dischargecurve = dischargecurve -# levelfunction.save() -# dischargecurve.requiere_recalculo_caudal = True -# dischargecurve.save() -# url = reverse( -# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} -# ) -# return HttpResponseRedirect(url) -# -# def get_context_data(self, **kwargs): -# context = super(LevelFunctionCreate, self).get_context_data(**kwargs) -# context["title"] = "Create" -# dischargecurve_id = self.kwargs.get("id") -# context["url"] = reverse( -# "measurement:levelfunction_create", kwargs={"id": dischargecurve_id} -# ) -# return context -# -# -# class LevelFunctionUpdate(PermissionRequiredMixin, UpdateView): -# permission_required = "validated.change_dischargecurve" -# model = LevelFunction -# fields = ["level", "function"] -# -# def get_context_data(self, **kwargs): -# context = super().get_context_data(**kwargs) -# context["title"] = "Modify" -# levelfunction_pk = self.kwargs.get("pk") -# context["url"] = reverse( -# "measurement:levelfunction_update", kwargs={"pk": levelfunction_pk} -# ) -# context["dischargecurve_id"] = self.object.dischargecurve.id -# return context -# -# def post(self, request, *args, **kwargs): -# data = request.POST.copy() -# dischargecurve_id = data.get("dischargecurve_id") -# dischargecurve = DischargeCurve.objects.get(pk=dischargecurve_id) -# dischargecurve.require_recalculate_flow = True -# dischargecurve.save() -# self.success_url = reverse( -# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve_id} -# ) -# return super().post(data, **kwargs) -# -# -# class LevelFunctionDelete(PermissionRequiredMixin, DeleteView): -# permission_required = "validated.delete_dischargecurve" -# model = LevelFunction -# -# def delete(self, request, *args, **kwargs): -# self.object = self.get_object() -# dischargecurve = self.object.dischargecurve -# dischargecurve.require_recalculate_flow = True -# dischargecurve.save() -# self.object.delete() -# return HttpResponseRedirect( -# reverse( -# "measurement:dischargecurve_detail", kwargs={"pk": dischargecurve.id} -# ) -# ) -# # # @permission_required("validated.add_dischargecurve") # def recalculate_flow(request): @@ -468,9 +343,7 @@ class PhycocyaninDepthList(ValidatedDepthListBase): class DailyValidation(FormView): template_name = "daily_validation.html" form_class = DailyValidationForm - # success_url = '/val2/diaria/' success_url = "/validated/daily_validation/" - permission_required = "validated.daily_validation" def post(self, request, *args, **kwargs): form = DailyValidationForm(self.request.POST or None) @@ -513,7 +386,6 @@ def get(self, request, *args, **kwargs): # Pasar los datos crudos a validados -@permission_required("validacion_v2.validacion_diaria") def guardar_validados(request): station_id = int(request.POST.get("station_id", None)) variable_id = int(request.POST.get("variable_id", None)) From b9720426103bac245e214d41c47702ac119d23b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Thu, 18 May 2023 23:28:32 -0500 Subject: [PATCH 10/24] Validation module GUI: Creating Tab panels for plot and tables --- .../static/validated/daily_validation.js | 28 +++- validated/templates/daily_validation.html | 150 +----------------- .../daily_validation_right_panel.html | 23 +++ .../templates/daily_validation_tab1.html | 40 +++++ .../templates/daily_validation_tab2.html | 2 + .../templates/daily_validation_tab3.html | 97 +++++++++++ 6 files changed, 195 insertions(+), 145 deletions(-) create mode 100644 validated/templates/daily_validation_right_panel.html create mode 100644 validated/templates/daily_validation_tab1.html create mode 100644 validated/templates/daily_validation_tab2.html create mode 100644 validated/templates/daily_validation_tab3.html diff --git a/validated/static/validated/daily_validation.js b/validated/static/validated/daily_validation.js index 76d1681f..54b85a49 100755 --- a/validated/static/validated/daily_validation.js +++ b/validated/static/validated/daily_validation.js @@ -123,7 +123,7 @@ function grafico_dispersion_plotly(series, append_to, variable){ }; const miDiv = document.querySelector("#" + append_to); - miDiv.style.height = "500px"; + miDiv.style.height = "450px"; Plotly.newPlot(append_to, data_array, layout, {renderer: 'webgl'}); } @@ -677,6 +677,8 @@ function actualizar_tabla_diario(){ grafico_dispersion_plotly(data.series, "div_informacion", data.variable[0]); } + + // $("#resize_plot").show("slow"); // $("#div_informacion").show("slow"); // window.gid = $('.plotly-graph-div,.js-plotly-plot').attr('id'); @@ -696,7 +698,7 @@ function actualizar_tabla_diario(){ $table.bootstrapTable({ columns:columns, data: datos_json, - height: 458, + height: 420, showFooter: true, uniqueId: 'id', rowStyle: style_fila @@ -728,9 +730,29 @@ function actualizar_tabla_diario(){ }); } - + + var tab1 = document.getElementById("tab1-tab"); + tab1.click(); } + +function getTab(evt, tabName) { + + var i, tabpane; + tabpane = document.getElementsByClassName("tab-pane"); + for (i = 0; i < tabpane.length; i++) { + tabpane[i].style.display = "none"; + tabpane[i].classList.remove("show"); + } + + var e = document.getElementById(tabName); + e.classList.add("show"); + e.style.display = ""; +} + + + + // actualizar una fila de la tabla de datos diarios function modificar_fila(){ var $table = $('#table_crudo'); diff --git a/validated/templates/daily_validation.html b/validated/templates/daily_validation.html index 0dd13096..ca45ff60 100755 --- a/validated/templates/daily_validation.html +++ b/validated/templates/daily_validation.html @@ -74,7 +74,7 @@ } - .panel-derecho { + .right-panel { height: calc(100vh - 150px); overflow: auto; } @@ -108,7 +108,7 @@
-
+
{% csrf_token %} @@ -118,53 +118,16 @@
-
+
+ {% include "daily_validation_right_panel.html" %} + + + + -
-
- -
-
-
- - -
-
-
-
- - - - - - -
-
-
- - - - - - - - - - - -
IdDayPercentageValueAction
- -
-
@@ -213,103 +176,6 @@
-
-
- - Filtro Fecha - -
-
- Filtro Valor - -
-
- Valores atípicos - -
-
- Filtro Estado - -
-
- Filtro Variación - -
- -
- - -
-
- - -
-
-
- - -
- -
- -
-
- - - - - -
-
-
- - - - - - - - - - - - - - - -
IdFechaValorVariación consecutivaValores AtípicosComentarioAcción
-
diff --git a/validated/templates/daily_validation_right_panel.html b/validated/templates/daily_validation_right_panel.html new file mode 100644 index 00000000..bf9593ef --- /dev/null +++ b/validated/templates/daily_validation_right_panel.html @@ -0,0 +1,23 @@ + + +
+
+ {% include "daily_validation_tab1.html" %} +
+
+ {% include "daily_validation_tab2.html" %} +
+
+ {% include "daily_validation_tab3.html" %} +
+
\ No newline at end of file diff --git a/validated/templates/daily_validation_tab1.html b/validated/templates/daily_validation_tab1.html new file mode 100644 index 00000000..1575aabf --- /dev/null +++ b/validated/templates/daily_validation_tab1.html @@ -0,0 +1,40 @@ +
+
+ +
+
+
+ + +
+
+ +
+
+ + + + + + +
+
+
+ + + + + + + + + + + +
IdDayPercentageValueAction
\ No newline at end of file diff --git a/validated/templates/daily_validation_tab2.html b/validated/templates/daily_validation_tab2.html new file mode 100644 index 00000000..a7755ded --- /dev/null +++ b/validated/templates/daily_validation_tab2.html @@ -0,0 +1,2 @@ +
+
\ No newline at end of file diff --git a/validated/templates/daily_validation_tab3.html b/validated/templates/daily_validation_tab3.html new file mode 100644 index 00000000..d9ce3e2c --- /dev/null +++ b/validated/templates/daily_validation_tab3.html @@ -0,0 +1,97 @@ + +
+
+ + Filtro Fecha + +
+
+ Filtro Valor + +
+
+ Valores atípicos + +
+
+ Filtro Estado + +
+
+ Filtro Variación + +
+ +
+ + +
+
+ + +
+
+
+ + +
+ +
+ +
+
+ + + + + +
+
+
+ + + + + + + + + + + + + + + +
IdFechaValorVariación consecutivaValores AtípicosComentarioAcción
From e28c6dc2e8cf76e9c30f35b1de7d639b9b1aeb33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20J=C3=A1come?= Date: Sun, 21 May 2023 19:48:32 -0500 Subject: [PATCH 11/24] Validation: GUI translation to english first steps. Some python code fixed --- validated/forms.py | 6 +- validated/functions.py | 222 ++++++--- .../static/validated/daily_validation.js | 462 ++++++++++++------ validated/templates/daily_validation.html | 5 +- .../daily_validation_right_panel.html | 6 +- .../templates/daily_validation_tab1.html | 2 + .../templates/daily_validation_tab3.html | 2 +- 7 files changed, 462 insertions(+), 243 deletions(-) diff --git a/validated/forms.py b/validated/forms.py index 18354606..45efb1c3 100755 --- a/validated/forms.py +++ b/validated/forms.py @@ -12,7 +12,9 @@ class DailyValidationForm(forms.Form): station = forms.ModelChoiceField( - queryset=Station.objects.order_by("station_code"), empty_label="Station" + queryset=Station.objects.order_by("station_code"), + empty_label="Station", + initial=1, ) variable = forms.ModelChoiceField( queryset=Variable.objects.order_by("variable_code"), empty_label="Variable" @@ -22,12 +24,14 @@ class DailyValidationForm(forms.Form): label="Start date", required=True, widget=forms.TextInput(attrs={"autocomplete": "off"}), + initial="2023-03-01", ) end_date = forms.DateField( input_formats=["%Y-%m-%d"], label="End date", required=True, widget=forms.TextInput(attrs={"autocomplete": "off"}), + initial="2023-03-31", ) minimum = forms.DecimalField(required=False) maximum = forms.DecimalField(required=False) diff --git a/validated/functions.py b/validated/functions.py index 299f3a72..101faa4c 100755 --- a/validated/functions.py +++ b/validated/functions.py @@ -27,91 +27,142 @@ def set_time_limits(start_time, end_time): def daily_validation(station, variable, start_time, end_time, minimum, maximum): - reporte, selected, measurement, validated = daily_report( + report, selected, measurement, validated = daily_report( station, variable, start_time, end_time, minimum, maximum ) # reporte, series = calculo_reporte_diario(station, variable, start_time, end_time, maximum, minimum) - reporte.rename( - columns={ - "date": "fecha", - "date_error": "fecha_error", - # 'repeated_values_count':'fecha_numero', - "extra_data_count": "fecha_numero", - "avg_value": "valor", - "max_maximum": "maximo", - "min_minimum": "minimo", - "data_existence_percentage": "porcentaje", - # TODO Confirm if "is_null" must be replaced for "porcentaje_error" - # "is_null": "porcentaje_error", - "percentage_error": "porcentaje_error", - "value_error": "valor_error", - "maximum_error": "maximo_error", - "minimum_error": "minimo_error", - "suspicious_values_count": "valor_numero", - "suspicious_maximums_count": "maximo_numero", - "suspicious_minimums_count": "minimo_numero", - "historic_diary_avg": "media_historica", - "state": "estado", - "all_validated": "validado", - "value_difference_error_count": "c_varia_err", - }, - inplace=True, - ) + # reporte.rename( + # columns={ + # "date": "fecha", + # "date_error": "fecha_error", + # # 'repeated_values_count':'fecha_numero', + # "extra_data_count": "fecha_numero", + # "avg_value": "valor", + # "max_maximum": "maximo", + # "min_minimum": "minimo", + # "percentage": "porcentaje", + # # TODO Confirm if "is_null" must be replaced for "porcentaje_error" + # # "is_null": "porcentaje_error", + # "percentage_error": "porcentaje_error", + # "value_error": "valor_error", + # "maximum_error": "maximo_error", + # "minimum_error": "minimo_error", + # "suspicious_values_count": "valor_numero", + # "suspicious_maximums_count": "maximo_numero", + # "suspicious_minimums_count": "minimo_numero", + # "historic_diary_avg": "media_historica", + # "state": "estado", + # "all_validated": "validado", + # "value_difference_error_count": "c_varia_err", + # }, + # inplace=True, + # ) - # response = acumulado.to_dict(orient='list') - # response = _records.to_dict(orient='records') + # # response = acumulado.to_dict(orient='list') + # # response = _records.to_dict(orient='records') + # if variable.variable_id in [4, 5]: + # report["n_valor"] = 0 + # else: + # report["n_valor"] = report["c_varia_err"] if variable.variable_id in [4, 5]: - reporte["n_valor"] = 0 + report["n_value"] = 0 else: - reporte["n_valor"] = reporte["c_varia_err"] - num_fecha = len( - reporte[reporte["fecha_error"].ne(1) & ~reporte["fecha_error"].isna()].index + report["n_value"] = report["value_difference_error_count"] + + # num_fecha = len( + # report[report["fecha_error"].ne(1) & ~report["fecha_error"].isna()].index + # ) + num_date = len( + report[report["date_error"].ne(1) & ~report["date_error"].isna()].index ) - num_porcentaje = len(reporte[reporte["porcentaje_error"].eq(True)]) - num_valor = len( - reporte[reporte["porcentaje_error"].eq(False) & ~reporte["valor_numero"].isna()] + + # num_porcentaje = len(report[report["porcentaje_error"].eq(True)]) + num_percentage = len(report[report["percentage_error"].eq(True)]) + + # num_valor = len( + # report[report["porcentaje_error"].eq(False) & ~report["valor_numero"].isna()] + # ) + num_value = len( + report[ + report["percentage_error"].eq(False) + & ~report["suspicious_values_count"].isna() + ] ) - num_maximo = len( - reporte[ - reporte["porcentaje_error"].eq(False) & ~reporte["maximo_numero"].isna() + + # num_maximo = len( + # report[ + # report["porcentaje_error"].eq(False) & ~report["maximo_numero"].isna() + # ] + # ) + num_maximum = len( + report[ + report["percentage_error"].eq(False) + & ~report["suspicious_maximums_count"].isna() ] ) - num_minimo = len( - reporte[ - reporte["porcentaje_error"].eq(False) & ~reporte["minimo_numero"].isna() + + # num_minimo = len( + # report[ + # report["porcentaje_error"].eq(False) & ~report["minimo_numero"].isna() + # ] + # ) + num_minimum = len( + report[ + report["percentage_error"].eq(False) + & ~report["suspicious_minimums_count"].isna() ] ) - num_dias = len(reporte.index) + + # num_dias = len(report.index) + num_days = len(report.index) data = { - "estacion": [ + # "estacion": [ + "station": [ { - "est_id": station.station_id, - "est_nombre": station.station_name, + # "est_id": station.station_id, + "id": station.station_id, + # "est_nombre": station.station_name, + "name": station.station_name, } ], "variable": [ { - "var_id": variable.variable_id, - "var_nombre": variable.name, - "var_maximo": variable.maximum, - "var_minimo": variable.minimum, - "var_unidad_sigla": variable.unit.initials, - "es_acumulada": variable.is_cumulative, + # "var_id": variable.variable_id, + "id": variable.variable_id, + # "var_nombre": variable.name, + "name": variable.name, + # "var_maximo": variable.maximum, + "maximum": variable.maximum, + # "var_minimo": variable.minimum, + "minimum": variable.minimum, + # "var_unidad_sigla": variable.unit.initials, + "unit_initials": variable.unit.initials, + # "es_acumulada": variable.is_cumulative, + "is_cumulative": variable.is_cumulative, } ], - "datos": reporte.fillna("").to_dict(orient="records"), - "indicadores": [ + # "datos": reporte.fillna("").to_dict(orient="records"), + "data": report.fillna("").to_dict(orient="records"), + # "indicadores": [ + "indicators": [ { - "num_fecha": num_fecha, - "num_porcentaje": num_porcentaje, - "num_valor": num_valor, - "num_maximo": num_maximo, - "num_minimo": num_minimo, - "num_dias": num_dias, + # "num_fecha": num_fecha, + "num_date": num_date, + # "num_porcentaje": num_porcentaje, + "num_percentage": num_percentage, + # "num_valor": num_valor, + "num_value": num_value, + # "num_maximo": num_maximo, + "num_maximum": num_maximum, + # "num_minimo": num_minimo, + "num_minimum": num_minimum, + # "num_dias": num_dias, + "num_days": num_days, } ], - "datos_grafico": selected.fillna("").values.tolist(), # datos_grafico, + # "datos_grafico": selected.fillna("").values.tolist(), # datos_grafico, + "plot_data": selected.fillna("").values.tolist(), "series": { "selected": selected.fillna("").to_dict("list"), "measurement": measurement.fillna("").to_dict("list"), @@ -210,13 +261,22 @@ def basic_calculations(station, variable, start_time, end_time, minimum, maximum minimum = float(minimum) maximum = float(maximum) joined["suspicious_value"] = np.where( - (joined["value"] < minimum) | (joined["value"] > maximum), True, False + (joined["value"] < minimum) | (joined["value"] > maximum), + True, + False + # (joined["value"] < minimum) | (joined["value"] > maximum), 1, 0 ) joined["suspicious_maximum"] = np.where( - (joined["maximum"] < minimum) | (joined["maximum"] > maximum), True, False + (joined["maximum"] < minimum) | (joined["maximum"] > maximum), + True, + False + # (joined["maximum"] < minimum) | (joined["maximum"] > maximum), 1, 0 ) joined["suspicious_minimum"] = np.where( - (joined["minimum"] < minimum) | (joined["minimum"] > maximum), True, False + (joined["minimum"] < minimum) | (joined["minimum"] > maximum), + True, + False + # (joined["minimum"] < minimum) | (joined["minimum"] > maximum), 1, 0 ) # selected @@ -309,16 +369,12 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): # REF. NAME: tabla_calculo # Percentage of data existence - daily["data_existence_percentage"] = ( - daily["data_count"] / expected_data_count - ) * 100.0 + daily["percentage"] = (daily["data_count"] / expected_data_count) * 100.0 # TODO escoger la correcta para PARICIA - daily["is_null"] = daily["data_existence_percentage"] < ( - 100.0 - float(variable.null_limit) - ) - # daily['is_null'] = daily['data_existence_percentage'] < variable.null_limit + daily["is_null"] = daily["percentage"] < (100.0 - float(variable.null_limit)) + # daily['is_null'] = daily['percentage'] < variable.null_limit - daily["percentage_error"] = ~daily["data_existence_percentage"].between( + daily["percentage_error"] = ~daily["percentage"].between( 100.0 - float(variable.null_limit), 100.0 ) @@ -333,19 +389,25 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): # column='value', # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() # )).to_numpy() - daily["suspicious_values_count"] = daily_group["suspicious_value"].count() + daily["suspicious_values_count"] = daily_group["suspicious_value"].sum().to_numpy() + # daily['suspicious_maximums_count'] = daily_group.agg( # suspicious=pd.NamedAgg( # column='maximum', # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() # )).to_numpy() - daily["suspicious_maximums_count"] = daily_group["suspicious_maximum"].count() + daily["suspicious_maximums_count"] = ( + daily_group["suspicious_maximum"].sum().to_numpy() + ) + # daily['suspicious_minimums_count'] = daily_group.agg( # suspicious=pd.NamedAgg( # column='minimum', # aggfunc=lambda x: (x < variable.var_minimo).sum() + (x > variable.var_maximo).sum() # )).to_numpy() - daily["suspicious_minimums_count"] = daily_group["suspicious_minimum"].count() + daily["suspicious_minimums_count"] = ( + daily_group["suspicious_minimum"].sum().to_numpy() + ) # TODO check this for PARAMH2O (tabla_varia_erro) # REF. NAME: tabla_varia_erro @@ -381,7 +443,7 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): "max_maximum", "min_minimum", "all_validated", - "data_existence_percentage", + "percentage", "is_null", "suspicious_values_count", "suspicious_maximums_count", @@ -461,7 +523,7 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): # Discuss if the team are agree daily["date_error"] = daily["extra_data_count"] - # porcentaje : data_existence_percentage + # porcentaje : percentage # porcentaje_error : null_value # valor_error : (posiblemente no requiera) # maximo_error : (posiblemente no requiera) @@ -514,7 +576,7 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): # c_varia_err daily["data_count"].fillna(0, inplace=True) - daily["data_existence_percentage"].fillna(0, inplace=True) + daily["percentage"].fillna(0, inplace=True) daily["suspicious_values_count"].fillna(0, inplace=True) daily["suspicious_maximums_count"].fillna(0, inplace=True) daily["suspicious_minimums_count"].fillna(0, inplace=True) @@ -550,9 +612,7 @@ def daily_report(station, variable, start_time, end_time, minimum, maximum): daily["avg_value"] = daily["avg_value"].astype(np.float64).round(decimal_places) daily["max_maximum"] = daily["max_maximum"].astype(np.float64).round(decimal_places) daily["min_minimum"] = daily["min_minimum"].astype(np.float64).round(decimal_places) - daily["data_existence_percentage"] = ( - daily["data_existence_percentage"].astype(np.float64).round(1) - ) + daily["percentage"] = daily["percentage"].astype(np.float64).round(1) # daily.reset_index(names='id', inplace=True) daily.index.name = "id" diff --git a/validated/static/validated/daily_validation.js b/validated/static/validated/daily_validation.js index 54b85a49..e3d97e66 100755 --- a/validated/static/validated/daily_validation.js +++ b/validated/static/validated/daily_validation.js @@ -564,7 +564,7 @@ function guardar_crudos(event){ // cambios = JSON.stringify($table.bootstrapTable('getData',{unfiltered:true})); // //console.log($table.bootstrapTable('getData',{unfiltered:true, })) // //detalle_crudos(); - + document.getElementById("tab3-tab").style.display = "none"; var $table = $('#table_crudo'); token = $("input[name='csrfmiddlewaretoken']").val(); station_id = $("#id_station").val(); @@ -671,7 +671,8 @@ function actualizar_tabla_diario(){ // GRAFICO con Plotly // debugger; - if (data.variable[0].es_acumulada){ +// if (data.variable[0].es_acumulada){ + if (data.variable[0].is_cumulative){ grafico_barras_plotly(data.series, "#div_informacion", data.variable[0]); }else{ grafico_dispersion_plotly(data.series, "div_informacion", data.variable[0]); @@ -685,23 +686,25 @@ function actualizar_tabla_diario(){ // window.plot_orig_width = $("#" + window.gid).width(); // plot_adjust(); habilitar_nuevo(); - var_id = data.variable[0]['var_id']; + var_id = data.variable[0]['id']; variable = data.variable[0]; - estacion = data.estacion[0]; - datos_json = data.datos + estacion = data.station[0]; +// datos_json = data.datos; + datos_json = data.data //num_dias = data.indicadores[0]['num_dias']; $table.bootstrapTable('destroy'); - for (const index in data.indicadores[0]){ - indicadores_diarios[index]= data.indicadores[0][index]; + for (const index in data.indicators[0]){ + indicadores_diarios[index]= data.indicators[0][index]; } - var columns = get_columns_diario(var_id, data.indicadores[0]); + debugger; + var columns = get_columns_diario(var_id, data.indicators[0]); $table.bootstrapTable({ columns:columns, data: datos_json, height: 420, showFooter: true, uniqueId: 'id', - rowStyle: style_fila + rowStyle: style_row }); $table.bootstrapTable('hideLoading'); @@ -1525,6 +1528,9 @@ function detalle_crudos(e, value, row){ $table.bootstrapTable('showLoading'); }, success: function (data) { + + document.getElementById("tab3-tab").style.display = "block"; + document.getElementById("tab3-tab").click(); datos_json = data.datos; $table.bootstrapTable('destroy'); for (const index in data.indicadores[0]){ @@ -1536,7 +1542,12 @@ function detalle_crudos(e, value, row){ element["fecha"] = (element['fecha']).replace('T',' '); } var columns = get_column_validado(variable_id, data.indicadores[0]); - $table.bootstrapTable({columns:columns, data: datos_json, rowStyle: style_fila}) + $table.bootstrapTable({ + columns:columns, + data: datos_json, + rowStyle: style_row, + height: 320, + }); //$table.bootstrapTable({columns:columns, data: datos_json}) $table.bootstrapTable('hideLoading'); }, @@ -1658,7 +1669,7 @@ function abrir_formulario(e, value, row, index){ //Generar las columnas de la tabla de datos diarios function get_columns_diario(var_id){ -// debugger; + var span = 'num'; var columns = []; @@ -1674,26 +1685,53 @@ function get_columns_diario(var_id){ cellStyle: style_id }; - var fecha = { - field:'fecha', - title: 'Fecha', - cellStyle: style_fecha, - formatter: format_valor, +// var fecha = { +// field:'fecha', +// title: 'Fecha', +// cellStyle: style_fecha, +// formatter: format_valor, +// footerFormatter: total_filas, +// //filterControl: 'datepicker' +// }; + var date = { + field:'date', + title: 'Date', + cellStyle: style_date, + formatter: format_value, footerFormatter: total_filas, //filterControl: 'datepicker' - }; - var porcentaje = { - field:'porcentaje', - title:'Porcentaje ', - cellStyle: style_porcentaje, + + +// var porcentaje = { +// field:'porcentaje', +// title:'Porcentaje ', +// cellStyle: style_porcentaje, +// footerFormatter: footer_promedio, +// //filterControl: 'input' +// }; + var percentage = { + field:'percentage', + title:'Percnt.', + cellStyle: style_percentage, footerFormatter: footer_promedio, //filterControl: 'input' }; - var accion = { - field: 'accion', - title: 'Acción', +// var accion = { +// field: 'accion', +// title: 'Acción', +// formatter: operate_table_diario, +// events: { +// 'click .search': detalle_crudos, +// 'click .delete': eliminar_diario, +// //'click .update': abrir_formulario +// +// } +// }; + var action = { + field: 'action', + title: 'Action', formatter: operate_table_diario, events: { 'click .search': detalle_crudos, @@ -1702,55 +1740,62 @@ function get_columns_diario(var_id){ } }; - var n_valor = { - field:'n_valor', - title:'Variación Consecutiva', + +// var n_valor = { +// field:'n_valor', +// title:'Variación Consecutiva', +// cellStyle: style_var_con, +// footerFormatter: footer_variaConse_cont +// }; + var n_value = { + field:'n_value', + title:'Diff. Err', cellStyle: style_var_con, footerFormatter: footer_variaConse_cont }; columns.push(state); columns.push(id); - columns.push(fecha); - columns.push(porcentaje); + columns.push(date); + columns.push(percentage); if (var_id == 1) { - var valor = { - field:'valor', - title:'Valor ', - cellStyle: style_valor, - formatter: format_valor, + var value = { + field:'value', + title:'Value ', + cellStyle: style_value, + formatter: format_value, footerFormatter: footer_suma }; - columns.push(valor); - columns.push(n_valor); + columns.push(value); + columns.push(n_value); } else if ((var_id == 4) || (var_id == 5)){ - var valor = { - field:'valor', - title:'Valor ', - cellStyle: style_valor, + var value = { + field:'value', + title:'Value ', + cellStyle: style_value, //formatter: format_valor, footerFormatter: footer_promedio }; - var maximo = { - field:'maximo', - title:'Máximo ', + var maximum = { + field:'maximum', + title:'Maximum ', visible: false, - cellStyle: style_valor, + cellStyle: style_value, //formatter: format_valor, footerFormatter: footer_promedio }; - var minimo= { - field:'minimo', - title:'Mínimo ', + var minimum= { + field:'minimum', + title:'Minimum ', visible: false, - cellStyle: style_valor, + cellStyle: style_value, //formatter: format_valor, footerFormatter: footer_promedio } @@ -1767,47 +1812,44 @@ function get_columns_diario(var_id){ formatter: format_punto_cardinal, //footerFormatter: footer_promedio }; - columns.push(valor); - columns.push(maximo); - columns.push(minimo); + columns.push(value); + columns.push(maximum); + columns.push(minimum); columns.push(direccion); columns.push(punto_cardinal); } else{ - var valor = { - field:'valor', - title : 'Valor', - cellStyle: style_valor, - formatter: format_valor, + var value = { + field:'avg_value', + title : 'Avg. Val.', + cellStyle: style_value, + formatter: format_value, footerFormatter: footer_promedio }; - var maximo = { - field:'maximo', - title:'Máximo ', - cellStyle: style_valor, - formatter: format_valor, + var maximum = { + field:'max_maximum', + title:'Max. of Maxs. ', + cellStyle: style_value, + formatter: format_value, footerFormatter: footer_promedio }; - var minimo= { - field:'minimo', - title:'Mínimo ', - cellStyle: style_valor, - formatter: format_valor, + var minimum= { + field:'min_minimum', + title:'Min. of Mins.', + cellStyle: style_value, + formatter: format_value, footerFormatter: footer_promedio } - columns.push(valor); - columns.push(maximo); - columns.push(minimo); - columns.push(n_valor); - - - + columns.push(value); + columns.push(maximum); + columns.push(minimum); + columns.push(n_value); } - columns.push(accion); + columns.push(action); return columns @@ -1835,7 +1877,7 @@ function get_column_validado(var_id){ var fecha = { field:'fecha', title:'Fecha', - cellStyle: style_fecha, + cellStyle: style_date, footerFormatter: total_datos }; @@ -1991,10 +2033,26 @@ function operate_table_crudo(value, row, index) { } /*Formatos para las tablas crudos/diario*/ // Formato para el porcentaje de datos diarios -function style_porcentaje(value, row, index) { +//function style_porcentaje(value, row, index) { +//// debugger; +// var clase = '' +// if (row.porcentaje_error == true) { +// return { +// classes: 'error' +// } +// } +// else{ +// return { +// classes: 'normal' +// } +// } +// +//} + +function style_percentage(value, row, index) { // debugger; - var clase = '' - if (row.porcentaje_error == true) { +// var class = ''; + if (row.percentage_error == true) { return { classes: 'error' } @@ -2006,30 +2064,43 @@ function style_porcentaje(value, row, index) { } } + + + + //Formato para el valor, maximo, minimo de la tabla crudos/diarios -function style_fila(row, index){ +//function style_fila(row, index){ +function style_row(row, index){ // debugger; - if (row.estado == false) { - clase = 'error' + var _class = ''; + if (row.state == false) { + _class = 'error'; } /*if (row.seleccionado == false){ clase = 'no-seleccionado' }*/ else - clase = '' - return {classes: clase} + _class = ''; + return {classes: _class} } -function style_valor(value, row, index, field){ -// debugger; - var clase = '' - field_numero = field+'_numero'; + + +//function style_valor(value, row, index, field){ +function style_value(value, row, index, field){ + + var _class = ''; + field_numero = "suspicious_" + field.split("_")[1] +"s_count"; + + // TODO maybe limite_superior could be removed limite_superior = $('#id_limite_superior').val(); if (row[field_numero]>0 ) - clase = 'error'; + _class = 'error'; else - clase = 'normal'; - return { classes: clase} + _class = 'normal'; + return { classes: _class} } + + //Formato para el error de la tabla crudos/diarios function style_error_crudo(value, row, index, field){ // debugger; @@ -2071,7 +2142,26 @@ function style_varia_error(value, row, index){ } //Formato para el formato de la fecha -function style_fecha(value, row, index){ +//function style_fecha(value, row, index){ +//// debugger; +// // TODO Verify what fecha_error means +//// var clase = '' +//// if (row.fecha_error == 0 || row.fecha_error == 2 || row.fecha_error == 3 || row.fecha_numero > 0) +//// clase = 'error'; +//// else +//// clase = ''; +//// return { classes: clase} +// var clase = '' +// if (row.fecha_error > 0) +// clase = 'error'; +// else +// clase = ''; +// return { classes: clase} +// +//} + + +function style_date(value, row, index){ // debugger; // TODO Verify what fecha_error means // var clase = '' @@ -2080,36 +2170,59 @@ function style_fecha(value, row, index){ // else // clase = ''; // return { classes: clase} - var clase = '' - if (row.fecha_error > 0) - clase = 'error'; + var _class = ''; + if (row.date_error > 0) + _class = 'error'; else - clase = ''; - return { classes: clase} + _class = ''; + return { classes: _class} } + //Formato para la fila validada function style_id(value, row, index){ // debugger; - var clase = '' - if (row.validado == true) - clase = 'validado'; + var _class = ''; +// if (row.validado == true) + if (row.all_validated == true) + _class = 'validado'; ///else if (row.estado == false) // clase = 'error'; + // TODO check row.seleccionado translation else if (row.seleccionado == false) - clase = 'error'; + _class = 'error'; else - clase = ''; - return { classes: clase} + _class = ''; + return { classes: _class} + +} +///*Fomatos de celda para las tablas diario/crudos */ +//// Poner el numero de errores en el día +//function format_valor(value, row, index, field){ +//// debugger; +// var span = 'num'; +// var content = '' +// var field_numero = field + '_numero' +//// field_numero = "suspicious_" + field +"s_count"; +// if (row[field_numero]>0 ){ +// span = span.replace('num',row[field_numero].toString()); +// content = value + ' ' + span; +// } +// else{ +// //span = span.replace('num',0); +// content = value; +// } +// return content +//} -} /*Fomatos de celda para las tablas diario/crudos */ // Poner el numero de errores en el día -function format_valor(value, row, index, field){ +function format_value(value, row, index, field){ // debugger; var span = 'num'; - var content = '' - var field_numero = field + '_numero' + var content = ''; +// var field_numero = field + '_numero' + var field_numero = "suspicious_" + field.split("_")[1] +"s_count"; if (row[field_numero]>0 ){ span = span.replace('num',row[field_numero].toString()); content = value + ' ' + span; @@ -2121,6 +2234,8 @@ function format_valor(value, row, index, field){ return content } + + function format_punto_cardinal(value, row, index, field){ // debugger; puntos_cardinales=['N', 'NE', 'E', 'SE', 'S', 'SO', 'O', 'NO']; @@ -2132,48 +2247,78 @@ function format_punto_cardinal(value, row, index, field){ /*Funciones para el footeer de la tabla*/ +//function footer_id(data){ +//// debugger; +// var span = 'num'; +// var num_fecha = data.reduce(function(num, i){ +// if (i['estado'] && i['seleccionado']==false) +// return num +1; +// else +// return num; +// }, 0); +// +// span = span.replace('num',num_fecha.toString()); +// +// return span; +// +//} + function footer_id(data){ -// debugger; + debugger; var span = 'num'; - var num_fecha = data.reduce(function(num, i){ - if (i['estado'] && i['seleccionado']==false) - return num +1; + var num_date = data.reduce(function(num, i){ + // TODO check translation: seleccionado + if (i['state'] && i['seleccionado']==false) + return num + 1; else return num; }, 0); - span = span.replace('num',num_fecha.toString()); + span = span.replace('num',num_date.toString()); return span; } + + // Obtener el promedio de los datos function footer_promedio(data){ -// debugger; + debugger; var field = this.field; - var field_error = this.field + '_error'; + var field_error = ''; + if ( this.field.includes("value") || this.field.includes("maximum") || this.field.includes("minimum")){ + field_error = this.field.split("_")[1] + '_error'; + }else{ + field_error = this.field + '_error'; + } var span = 'num'; var promedio = 0; var suma = data.reduce(function (sum, i) { - if (i['estado'] && i[field] != null) +// if (i['estado'] && i[field] != null) +// debugger; + if (i['state'] && i[field] != null) return sum + parseFloat(i[field]) else - return sum + return sum; }, 0); var num_datos = data.reduce(function (sum, i) { - if (i['estado'] && i[field] != null) - return sum + 1 +// debugger; +// if (i['estado'] && i[field] != null) + if (i['state'] && i[field] != null) + return sum + 1; else - return sum + return sum; }, 0); var num_valor = data.reduce (function (num, i){ //console.log('field',i[field_error]) - if (i[field_error] && i['estado']) +// if (i[field_error] && i['estado']) +// debugger; + if (i[field_error] && i['state']) return num + 1; else return num; @@ -2189,7 +2334,7 @@ function footer_promedio(data){ } //obtener la suma de los datos function footer_suma(data){ -// debugger; + debugger; var field = this.field; var field_error = this.field + '_error'; var span = 'num'; @@ -2216,34 +2361,35 @@ function footer_suma(data){ //total de dias function total_filas(data){ -// debugger; var span = 'num'; var var_id = $("#id_variable").val(); - var fechas = []; - /*if ((var_id == 4) || (var_id == 5)) { - console.log(data); - $.map(data, function(row){ - fechas.push(row.fecha); - }); - console.log( fechas.unique() ); - }*/ +// var fechas = []; + var dates = []; +// /*if ((var_id == 4) || (var_id == 5)) { +// console.log(data); +// $.map(data, function(row){ +// fechas.push(row.fecha); +// }); +// console.log( fechas.unique() ); +// }*/ $.map(data, function(row){ - fechas.push(row.fecha); + dates.push(row.date); }); - /*var suma = data.reduce(function (sum, i) { - if (i['estado']){ - return sum + 1 - } - else{ - return sum - } - - }, 0);*/ +// /*var suma = data.reduce(function (sum, i) { +// if (i['estado']){ +// return sum + 1 +// } +// else{ +// return sum +// } +// +// }, 0);*/ - var suma = fechas.unique().length; +// var suma = fechas.unique().length; + var suma = dates.unique().length; // var num_fecha = data.reduce(function(num, i){ // if ((i['fecha_error']==0) || (i['fecha_error']==2) || (i['fecha_error']==3)) @@ -2254,20 +2400,24 @@ function total_filas(data){ // debugger; var num_fecha = data.reduce(function(num, i){ - if (i['fecha_error']>0) +// if (i['fecha_error']>0) + if (i['date_error']>0) return num +1; else return num; }, 0); // TODO ask the team for prefferred behaviour - span = span.replace('num',num_fecha.toString()); +// span = span.replace('num',num_fecha.toString()); + span = span.replace('num', num_fecha.toString()); - return suma + ' de ' + indicadores_diarios['num_dias'] + ' días ' + span; + return suma + ' of ' + indicadores_diarios['num_days'] + ' days ' + span; +// return suma + ' of ' + daily_indicators['num_days'] + ' days ' + span; } + //total de datos function total_datos(data){ -// debugger; + debugger; var span = 'num'; var suma = data.reduce(function (sum, i) { @@ -2293,7 +2443,7 @@ function total_datos(data){ } // valores atípicos function footer_stddev(data){ -// debugger; + debugger; var span = 'num'; var num_stddev = data.reduce(function(num, i){ if (i['stddev_error'] && i['estado']) @@ -2307,7 +2457,7 @@ function footer_stddev(data){ return span; } function footer_variaConse(data){ -// debugger; + debugger; var span = 'num'; var num_vcr = data.reduce(function(num, i){ if (i['stddev_error'] && i['estado']) @@ -2326,21 +2476,21 @@ function footer_variaConse(data){ function footer_variaConse_cont(data){ // debugger; var span = 'num'; - var num_vc= data.reduce(function(num, i){ - if (i['c_varia_err'] >= 1 ) + var num_vd= data.reduce(function(num, i){ + if (i['value_difference_error_count'] >= 1 ) return num + 1; else return num; }, 0); - span = span.replace('num',num_vc); + span = span.replace('num',num_vd); return span; } /* Filtro de las Tablas */ function filtrar_diario(){ -// debugger; + debugger; var fecha = $("#chk_fecha").val(); var porcentaje = $("#chk_porcentaje").val(); var numero = $("#chk_numero").val(); @@ -2374,7 +2524,7 @@ function filtrar_diario(){ } function get_filtro_fecha(fecha){ -// debugger; + debugger; var filtro_fecha = []; if (fecha == 'error') filtro_fecha = ['0','2', '3']; @@ -2387,7 +2537,7 @@ function get_filtro_fecha(fecha){ } function get_filtro_porcentaje(porcentaje){ -// debugger; + debugger; var filtro_porcentaje = []; if (porcentaje == 'error') @@ -2401,7 +2551,7 @@ function get_filtro_porcentaje(porcentaje){ } function get_filtro_valor(numero){ -// debugger; + debugger; var filtro_valor = []; if (numero == 'error') @@ -2415,7 +2565,7 @@ function get_filtro_valor(numero){ } function get_filtro_stddev(numero){ -// debugger; + debugger; var filtro_valor = []; if (numero == 'error') @@ -2429,7 +2579,7 @@ function get_filtro_stddev(numero){ } function get_filtro_estado(numero){ -// debugger; + debugger; var filtro_valor = []; if (numero == 'error') @@ -2442,7 +2592,7 @@ function get_filtro_estado(numero){ return filtro_valor } function get_filtro_var_con(numero){ -// debugger; + debugger; console.log("Valorfiltro con ", numero); var filtro_valor = []; diff --git a/validated/templates/daily_validation.html b/validated/templates/daily_validation.html index ca45ff60..f1989484 100755 --- a/validated/templates/daily_validation.html +++ b/validated/templates/daily_validation.html @@ -79,6 +79,9 @@ overflow: auto; } + .validation-body { + overflow-y: hidden; + } - - - - @@ -28,16 +18,12 @@ - - - + - - - {% endblock %} @@ -104,7 +85,7 @@ Validation module
@@ -112,12 +93,12 @@
-
+ {% csrf_token %} {% bootstrap_form form show_label=True %} - +
@@ -134,7 +115,7 @@ -
+
@@ -149,14 +130,14 @@ -