diff --git a/backend/apps/account/admin.py b/backend/apps/account/admin.py
index 30b6a34d..e5b55e74 100644
--- a/backend/apps/account/admin.py
+++ b/backend/apps/account/admin.py
@@ -332,20 +332,20 @@ class RoleAdmin(admin.ModelAdmin):
class CareerAdmin(admin.ModelAdmin):
list_display = (
"account",
- "team",
+ "team_old",
"team_new",
- "role",
+ "role_old",
"role_new",
"level",
"start_at",
"end_at",
)
search_fields = (
- "account",
- "team",
- "team_new",
- "role",
- "role_new",
+ "account__email",
+ "team_old",
+ "team_new__name",
+ "role_old",
+ "role_new__name",
)
readonly_fields = ("created_at", "updated_at")
ordering = ["account", "start_at"]
diff --git a/backend/apps/account/migrations/0001_initial.py b/backend/apps/account/migrations/0001_initial.py
index e032df98..08eb76e2 100644
--- a/backend/apps/account/migrations/0001_initial.py
+++ b/backend/apps/account/migrations/0001_initial.py
@@ -84,7 +84,9 @@ class Migration(migrations.Migration):
),
(
"twitter",
- models.CharField(blank=True, max_length=255, null=True, verbose_name="Twitter"),
+ models.CharField(
+ blank=True, max_length=255, null=True, verbose_name="Twitter"
+ ),
),
(
"linkedin",
diff --git a/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py b/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py
index f56d57d1..7e656d4e 100644
--- a/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py
+++ b/backend/apps/account/migrations/0019_role_team_alter_career_end_at_alter_career_level_and_more.py
@@ -5,67 +5,88 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('account', '0018_account_gcp_email'),
+ ("account", "0018_account_gcp_email"),
]
operations = [
migrations.CreateModel(
- name='Role',
+ name="Role",
fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('slug', models.SlugField(unique=True)),
- ('name', models.CharField(max_length=100, unique=True, verbose_name='Name')),
- ('description', models.TextField(blank=True, null=True, verbose_name='Description')),
- ('created_at', models.DateTimeField(auto_now_add=True)),
- ('updated_at', models.DateTimeField(auto_now=True)),
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
+ ),
+ ),
+ ("slug", models.SlugField(unique=True)),
+ ("name", models.CharField(max_length=100, unique=True, verbose_name="Name")),
+ (
+ "description",
+ models.TextField(blank=True, null=True, verbose_name="Description"),
+ ),
+ ("created_at", models.DateTimeField(auto_now_add=True)),
+ ("updated_at", models.DateTimeField(auto_now=True)),
],
options={
- 'verbose_name': 'Role',
- 'verbose_name_plural': 'Roles',
- 'ordering': ['name'],
+ "verbose_name": "Role",
+ "verbose_name_plural": "Roles",
+ "ordering": ["name"],
},
),
migrations.CreateModel(
- name='Team',
+ name="Team",
fields=[
- ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('slug', models.SlugField(unique=True)),
- ('name', models.CharField(max_length=100, unique=True, verbose_name='Name')),
- ('description', models.TextField(blank=True, null=True, verbose_name='Description')),
- ('created_at', models.DateTimeField(auto_now_add=True)),
- ('updated_at', models.DateTimeField(auto_now=True)),
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
+ ),
+ ),
+ ("slug", models.SlugField(unique=True)),
+ ("name", models.CharField(max_length=100, unique=True, verbose_name="Name")),
+ (
+ "description",
+ models.TextField(blank=True, null=True, verbose_name="Description"),
+ ),
+ ("created_at", models.DateTimeField(auto_now_add=True)),
+ ("updated_at", models.DateTimeField(auto_now=True)),
],
options={
- 'verbose_name': 'Team',
- 'verbose_name_plural': 'Teams',
- 'ordering': ['name'],
+ "verbose_name": "Team",
+ "verbose_name_plural": "Teams",
+ "ordering": ["name"],
},
),
migrations.AlterField(
- model_name='career',
- name='end_at',
- field=models.DateField(blank=True, null=True, verbose_name='End at'),
+ model_name="career",
+ name="end_at",
+ field=models.DateField(blank=True, null=True, verbose_name="End at"),
),
migrations.AlterField(
- model_name='career',
- name='level',
- field=models.CharField(blank=True, max_length=40, verbose_name='Level'),
+ model_name="career",
+ name="level",
+ field=models.CharField(blank=True, max_length=40, verbose_name="Level"),
),
migrations.AlterField(
- model_name='career',
- name='role',
- field=models.CharField(blank=True, max_length=40, verbose_name='Role'),
+ model_name="career",
+ name="role",
+ field=models.CharField(blank=True, max_length=40, verbose_name="Role"),
),
migrations.AlterField(
- model_name='career',
- name='start_at',
- field=models.DateField(blank=True, null=True, verbose_name='Start at'),
+ model_name="career",
+ name="start_at",
+ field=models.DateField(blank=True, null=True, verbose_name="Start at"),
),
migrations.AddField(
- model_name='career',
- name='team_new',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='careers', to='account.team'),
+ model_name="career",
+ name="team_new",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.DO_NOTHING,
+ related_name="careers",
+ to="account.team",
+ ),
),
]
diff --git a/backend/apps/account/migrations/0020_career_role_new.py b/backend/apps/account/migrations/0020_career_role_new.py
index 86424a8c..5a0c7269 100644
--- a/backend/apps/account/migrations/0020_career_role_new.py
+++ b/backend/apps/account/migrations/0020_career_role_new.py
@@ -5,15 +5,20 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('account', '0019_role_team_alter_career_end_at_alter_career_level_and_more'),
+ ("account", "0019_role_team_alter_career_end_at_alter_career_level_and_more"),
]
operations = [
migrations.AddField(
- model_name='career',
- name='role_new',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='careers', to='account.role'),
+ model_name="career",
+ name="role_new",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.DO_NOTHING,
+ related_name="careers",
+ to="account.role",
+ ),
),
]
diff --git a/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py
new file mode 100644
index 00000000..d2578ae3
--- /dev/null
+++ b/backend/apps/account/migrations/0021_rename_role_career_role_old_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.2.18 on 2025-02-05 00:43
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('account', '0020_career_role_new'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='career',
+ old_name='role',
+ new_name='role_old',
+ ),
+ migrations.RenameField(
+ model_name='career',
+ old_name='team',
+ new_name='team_old',
+ ),
+ ]
diff --git a/backend/apps/account/models.py b/backend/apps/account/models.py
index 458926c8..40906888 100644
--- a/backend/apps/account/models.py
+++ b/backend/apps/account/models.py
@@ -211,7 +211,9 @@ class Account(BaseModel, AbstractBaseUser, PermissionsMixin):
uuid = models.UUIDField(primary_key=False, default=uuid4)
email = models.EmailField("Email", unique=True)
- gcp_email = models.EmailField("GCP email", null=True, blank=True) # Google Cloud Platform email
+ gcp_email = models.EmailField(
+ "GCP email", null=True, blank=True
+ ) # Google Cloud Platform email
username = models.CharField("Username", max_length=40, blank=True, null=True, unique=True)
first_name = models.CharField("Nome", max_length=40, blank=True)
@@ -466,6 +468,7 @@ class Meta:
def __str__(self):
return self.name
+
class Role(BaseModel):
slug = models.SlugField(unique=True)
name = models.CharField("Name", max_length=100, unique=True)
@@ -485,10 +488,14 @@ def __str__(self):
class Career(BaseModel):
id = models.UUIDField(primary_key=True, default=uuid4)
account = models.ForeignKey(Account, on_delete=models.DO_NOTHING, related_name="careers")
- team = models.CharField("Equipe", max_length=40, blank=True)
- team_new = models.ForeignKey(Team, on_delete=models.DO_NOTHING, related_name="careers", null=True, blank=True)
- role = models.CharField("Role", max_length=40, blank=True)
- role_new = models.ForeignKey(Role, on_delete=models.DO_NOTHING, related_name="careers", null=True, blank=True)
+ team_old = models.CharField("Equipe", max_length=40, blank=True)
+ team_new = models.ForeignKey(
+ Team, on_delete=models.DO_NOTHING, related_name="careers", null=True, blank=True
+ )
+ role_old = models.CharField("Role", max_length=40, blank=True)
+ role_new = models.ForeignKey(
+ Role, on_delete=models.DO_NOTHING, related_name="careers", null=True, blank=True
+ )
level = models.CharField("Level", max_length=40, blank=True)
start_at = models.DateField("Start at", null=True, blank=True)
end_at = models.DateField("End at", null=True, blank=True)
@@ -500,10 +507,10 @@ class Meta:
verbose_name_plural = "Careers"
def __str__(self):
- return f"{self.account.email} @{self.role}"
+ return f"{self.account.email} @{self.role_new.name}" if self.role_new else ""
def get_team(self):
- return self.team
+ return self.team_new.name if self.team_new else ""
get_team.short_description = "Team"
diff --git a/backend/apps/account/translation.py b/backend/apps/account/translation.py
index f967ce00..627aae88 100644
--- a/backend/apps/account/translation.py
+++ b/backend/apps/account/translation.py
@@ -3,12 +3,15 @@
from .models import Account, Team, Role
+
class TeamTranslationOptions(TranslationOptions):
fields = ("name", "description")
+
class RoleTranslationOptions(TranslationOptions):
fields = ("name", "description")
+
class AccountTranslationOptions(TranslationOptions):
fields = ("description",)
diff --git a/backend/apps/account_payment/webhooks.py b/backend/apps/account_payment/webhooks.py
index fb1a29a4..b5a299b4 100644
--- a/backend/apps/account_payment/webhooks.py
+++ b/backend/apps/account_payment/webhooks.py
@@ -267,7 +267,8 @@ def setup_intent_succeeded(event: Event, **kwargs):
return logger.info(f"Ignore setup intent from {backend_url}")
StripeCustomer.modify(
- customer.id, invoice_settings={"default_payment_method": setup_intent.get("payment_method")}
+ customer.id,
+ invoice_settings={"default_payment_method": setup_intent.get("payment_method")},
)
subscriptions = StripeSubscription.list(customer=customer.id)
diff --git a/backend/apps/api/v1/admin.py b/backend/apps/api/v1/admin.py
index 40b292f0..339dae66 100644
--- a/backend/apps/api/v1/admin.py
+++ b/backend/apps/api/v1/admin.py
@@ -4,29 +4,28 @@
from django import forms
from django.contrib import admin, messages
from django.contrib.admin import ModelAdmin
+from django.contrib.auth.admin import UserAdmin
+from django.contrib.auth.models import User
from django.core.management import call_command
from django.db.models.query import QuerySet
from django.http import HttpRequest
from django.shortcuts import render
-from django.utils.html import format_html
from django.urls import reverse
+from django.utils.html import format_html
from modeltranslation.admin import TabbedTranslationAdmin, TranslationStackedInline
from ordered_model.admin import OrderedInlineModelAdminMixin, OrderedStackedInline
-from django.contrib.auth.admin import UserAdmin
-from django.contrib.auth.models import User
from backend.apps.api.v1.filters import (
+ AreaAdministrativeLevelFilter,
+ AreaParentFilter,
DatasetOrganizationListFilter,
OrganizationImageListFilter,
TableCoverageListFilter,
TableDirectoryListFilter,
TableObservationListFilter,
TableOrganizationListFilter,
- AreaAdministrativeLevelFilter,
- AreaParentFilter,
)
from backend.apps.api.v1.forms import (
- CloudTableInlineForm,
ColumnInlineForm,
ColumnOriginalNameInlineForm,
CoverageInlineForm,
@@ -64,6 +63,7 @@
ObservationLevel,
Organization,
Pipeline,
+ Poll,
QualityCheck,
RawDataSource,
Status,
@@ -72,7 +72,6 @@
Tag,
Theme,
Update,
- Poll,
)
from backend.apps.api.v1.tasks import (
rebuild_search_index_task,
@@ -98,6 +97,7 @@ class MeasurementUnitInline(OrderedTranslatedInline):
extra = 0
show_change_link = True
+
class ColumnInline(OrderedTranslatedInline):
model = Column
form = ColumnInlineForm
@@ -152,11 +152,11 @@ class CloudTableInline(admin.TabularInline):
"gcp_table_id",
]
fields = readonly_fields
- template = 'admin/cloud_table_inline.html'
+ template = "admin/cloud_table_inline.html"
def has_add_permission(self, request, obj=None):
return False
-
+
def has_change_permission(self, request, obj=None):
return False
@@ -172,7 +172,7 @@ class ObservationLevelInline(OrderedStackedInline):
"move_up_down_links",
]
fields = readonly_fields
- template = 'admin/observation_level_inline.html'
+ template = "admin/observation_level_inline.html"
ordering = ["order"]
def get_formset(self, request, obj=None, **kwargs):
@@ -182,21 +182,22 @@ def get_formset(self, request, obj=None, **kwargs):
def get_ordering_prefix(self):
"""Return the appropriate ordering prefix based on parent model"""
if isinstance(self.parent_obj, Table):
- return 'table'
+ return "table"
elif isinstance(self.parent_obj, RawDataSource):
- return 'rawdatasource'
+ return "rawdatasource"
elif isinstance(self.parent_obj, InformationRequest):
- return 'informationrequest'
+ return "informationrequest"
elif isinstance(self.parent_obj, Analysis):
- return 'analysis'
+ return "analysis"
return super().get_ordering_prefix()
def has_add_permission(self, request, obj=None):
return False
-
+
def has_change_permission(self, request, obj=None):
return False
+
class TableInline(OrderedTranslatedInline):
model = Table
form = TableInlineForm
@@ -496,23 +497,23 @@ def reorder_observation_levels(modeladmin, request, queryset):
"To pass the names manually you must select only one parent.",
)
return
-
+
parent = queryset.first()
ordered_entities = form.cleaned_data["ordered_entities"].split()
-
+
# Get observation levels for this parent
- if hasattr(parent, 'observation_levels'):
+ if hasattr(parent, "observation_levels"):
obs_levels = parent.observation_levels.all()
-
+
# Create a mapping of entity names to observation levels
obs_by_entity = {ol.entity.name: ol for ol in obs_levels}
-
+
# Update order based on provided entity names
for i, entity_name in enumerate(ordered_entities):
if entity_name in obs_by_entity:
obs_by_entity[entity_name].order = i
obs_by_entity[entity_name].save()
-
+
messages.success(request, "Observation levels reordered successfully")
else:
messages.error(request, "Selected object has no observation levels")
@@ -524,6 +525,7 @@ def reorder_observation_levels(modeladmin, request, queryset):
{"title": "Reorder observation levels", "parents": queryset, "form": form},
)
+
reorder_observation_levels.short_description = "Alterar ordem dos níveis de observação"
@@ -618,11 +620,7 @@ class DatasetAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin):
"updated_at",
"related_objects",
]
- search_fields = [
- "name",
- "slug",
- "organizations__name"
- ]
+ search_fields = ["name", "slug", "organizations__name"]
filter_horizontal = [
"tags",
"themes",
@@ -645,6 +643,7 @@ class DatasetAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin):
def get_organizations(self, obj):
"""Display all organizations for the dataset"""
return ", ".join([org.name for org in obj.organizations.all()])
+
get_organizations.short_description = "Organizations"
def related_objects(self, obj):
@@ -655,16 +654,19 @@ def related_objects(self, obj):
obj.tables.count(),
"tables" if obj.tables.count() > 1 else "table",
)
+
related_objects.short_description = "Tables"
class CustomUserAdmin(UserAdmin):
- search_fields = ['username', 'first_name', 'last_name', 'email']
+ search_fields = ["username", "first_name", "last_name", "email"]
+
if User in admin.site._registry:
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
+
class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin):
form = TableForm
actions = [
@@ -703,13 +705,13 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin):
"dataset__name",
]
autocomplete_fields = [
- 'dataset',
- 'partner_organization',
- 'published_by',
- 'data_cleaned_by',
+ "dataset",
+ "partner_organization",
+ "published_by",
+ "data_cleaned_by",
]
filter_horizontal = [
- 'raw_data_source',
+ "raw_data_source",
]
list_display = [
"name",
@@ -729,16 +731,14 @@ class TableAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdmin):
def get_queryset(self, request):
"""Optimize queryset by prefetching related objects"""
- return super().get_queryset(request).prefetch_related(
- 'published_by',
- 'data_cleaned_by'
- )
+ return super().get_queryset(request).prefetch_related("published_by", "data_cleaned_by")
def get_publishers(self, obj):
"""Display all publishers for the table"""
# Convert to list to avoid multiple DB hits
publishers = list(obj.published_by.all())
return ", ".join(f"{pub.first_name} {pub.last_name}" for pub in publishers)
+
get_publishers.short_description = "Publishers"
def get_data_cleaners(self, obj):
@@ -746,6 +746,7 @@ def get_data_cleaners(self, obj):
# Convert to list to avoid multiple DB hits
cleaners = list(obj.data_cleaned_by.all())
return ", ".join(f"{cleaner.first_name} {cleaner.last_name}" for cleaner in cleaners)
+
get_data_cleaners.short_description = "Data Cleaners"
@@ -779,6 +780,7 @@ class MeasurementUnitCategoryAdmin(TabbedTranslationAdmin):
"name",
]
+
class MeasurementUnitAdmin(TabbedTranslationAdmin):
list_display = [
"slug",
@@ -857,7 +859,7 @@ def reset_observation_level_order(modeladmin, request, queryset):
by_raw_data_source = {}
by_information_request = {}
by_analysis = {}
-
+
for obs in queryset:
if obs.table_id:
by_table.setdefault(obs.table_id, []).append(obs)
@@ -876,6 +878,7 @@ def reset_observation_level_order(modeladmin, request, queryset):
obs_level.order = i
obs_level.save()
+
reset_observation_level_order.short_description = "Reiniciar ordem dos níveis de observação"
@@ -942,7 +945,7 @@ class InformationRequestAdmin(OrderedInlineModelAdminMixin, TabbedTranslationAdm
readonly_fields = ["id", "created_at", "updated_at"]
autocomplete_fields = ["dataset"]
inlines = [
- CoverageInline,
+ CoverageInline,
ObservationLevelInline,
PollInline,
]
@@ -1038,38 +1041,26 @@ def datetime_ranges_display(self, obj):
ranges = obj.datetime_ranges.all()
links = []
for dt_range in ranges:
- url = reverse('admin:v1_datetimerange_change', args=[dt_range.id])
- links.append(
- format_html('{}', url, str(dt_range))
- )
-
+ url = reverse("admin:v1_datetimerange_change", args=[dt_range.id])
+ links.append(format_html('{}', url, str(dt_range)))
+
# Add link to add new datetime range
- add_url = reverse('admin:v1_datetimerange_add') + f'?coverage={obj.id}'
- links.append(
- format_html(
- 'Add DateTime Range',
- add_url
- )
- )
-
- return format_html('
'.join(links))
-
+ add_url = reverse("admin:v1_datetimerange_add") + f"?coverage={obj.id}"
+ links.append(format_html('Add DateTime Range', add_url))
+
+ return format_html("
".join(links))
+
datetime_ranges_display.short_description = "DateTime Ranges"
def get_queryset(self, request):
"""Optimize queryset by prefetching related objects"""
- qs = super().get_queryset(request).select_related(
- 'table',
- 'column',
- 'raw_data_source',
- 'information_request',
- 'area'
+ qs = (
+ super()
+ .get_queryset(request)
+ .select_related("table", "column", "raw_data_source", "information_request", "area")
)
# Add prefetch for datetime_ranges and their units
- return qs.prefetch_related(
- 'datetime_ranges',
- 'datetime_ranges__units'
- )
+ return qs.prefetch_related("datetime_ranges", "datetime_ranges__units")
class EntityCategoryAdmin(TabbedTranslationAdmin):
@@ -1306,6 +1297,7 @@ class PollAdmin(admin.ModelAdmin):
"information_request",
]
+
class PipelineAdmin(admin.ModelAdmin):
readonly_fields = [
"id",
diff --git a/backend/apps/api/v1/filters.py b/backend/apps/api/v1/filters.py
index caf69a36..cf9070de 100644
--- a/backend/apps/api/v1/filters.py
+++ b/backend/apps/api/v1/filters.py
@@ -27,7 +27,7 @@ class DatasetOrganizationListFilter(admin.SimpleListFilter):
parameter_name = "organization"
def lookups(self, request, model_admin):
- organizations = Organization.objects.all().order_by('slug')
+ organizations = Organization.objects.all().order_by("slug")
return [(org.id, org.name) for org in organizations]
def queryset(self, request, queryset):
@@ -108,12 +108,12 @@ class AreaAdministrativeLevelFilter(admin.SimpleListFilter):
def lookups(self, request, model_admin):
return [
- (0, '0'),
- (1, '1'),
- (2, '2'),
- (3, '3'),
- (4, '4'),
- (5, '5'),
+ (0, "0"),
+ (1, "1"),
+ (2, "2"),
+ (3, "3"),
+ (4, "4"),
+ (5, "5"),
]
def queryset(self, request, queryset):
@@ -127,7 +127,7 @@ class AreaParentFilter(admin.SimpleListFilter):
def lookups(self, request, model_admin):
# Get all areas that have children, ordered by name
- parents = Area.objects.filter(children__isnull=False).distinct().order_by('name')
+ parents = Area.objects.filter(children__isnull=False).distinct().order_by("name")
return [(area.id, f"{area.name}") for area in parents]
def queryset(self, request, queryset):
diff --git a/backend/apps/api/v1/forms/__init__.py b/backend/apps/api/v1/forms/__init__.py
index 9ead2290..184f7b6b 100644
--- a/backend/apps/api/v1/forms/__init__.py
+++ b/backend/apps/api/v1/forms/__init__.py
@@ -12,5 +12,7 @@
UpdateInlineForm,
)
from backend.apps.api.v1.forms.reorder_columns_form import ReorderColumnsForm # noqa: F401
+from backend.apps.api.v1.forms.reorder_observation_levels_form import (
+ ReorderObservationLevelsForm, # noqa: F401
+)
from backend.apps.api.v1.forms.reorder_tables_form import ReorderTablesForm # noqa: F401
-from backend.apps.api.v1.forms.reorder_observation_levels_form import ReorderObservationLevelsForm # noqa: F401
\ No newline at end of file
diff --git a/backend/apps/api/v1/forms/admin_form.py b/backend/apps/api/v1/forms/admin_form.py
index 7dc470ed..84c45044 100644
--- a/backend/apps/api/v1/forms/admin_form.py
+++ b/backend/apps/api/v1/forms/admin_form.py
@@ -29,22 +29,22 @@ class Meta:
class TableForm(forms.ModelForm):
class Meta:
model = Table
- fields = '__all__'
-
+ fields = "__all__"
+
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.instance:
# Check both the saved instance and current form data
dataset_id = self.instance.dataset_id
if not dataset_id and self.data:
- dataset_id = self.data.get('dataset')
-
+ dataset_id = self.data.get("dataset")
+
if dataset_id:
- self.fields['raw_data_source'].queryset = RawDataSource.objects.filter(
+ self.fields["raw_data_source"].queryset = RawDataSource.objects.filter(
dataset_id=dataset_id
)
else:
- self.fields['raw_data_source'].queryset = RawDataSource.objects.none()
+ self.fields["raw_data_source"].queryset = RawDataSource.objects.none()
class TableInlineForm(UUIDHiddenIdForm):
@@ -147,6 +147,7 @@ class Meta(UUIDHiddenIdForm.Meta):
model = Update
fields = "__all__"
+
class PollInlineForm(forms.ModelForm):
class Meta:
model = Poll
@@ -154,4 +155,4 @@ class Meta:
"entity",
"frequency",
"latest",
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/forms/reorder_observation_levels_form.py b/backend/apps/api/v1/forms/reorder_observation_levels_form.py
index 46dd4ad9..77a1c18d 100644
--- a/backend/apps/api/v1/forms/reorder_observation_levels_form.py
+++ b/backend/apps/api/v1/forms/reorder_observation_levels_form.py
@@ -1,8 +1,9 @@
from django import forms
+
class ReorderObservationLevelsForm(forms.Form):
ordered_entities = forms.CharField(
required=False,
widget=forms.Textarea(attrs={"rows": 10, "cols": 40}),
help_text="Enter entity names one per line in desired order",
- )
\ No newline at end of file
+ )
diff --git a/backend/apps/api/v1/management/commands/reorder_tables.py b/backend/apps/api/v1/management/commands/reorder_tables.py
index db033da3..8b5b2eaf 100644
--- a/backend/apps/api/v1/management/commands/reorder_tables.py
+++ b/backend/apps/api/v1/management/commands/reorder_tables.py
@@ -10,7 +10,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("dataset_id", type=str, help="ID of the dataset")
- parser.add_argument("ordered_slugs", type=str, nargs="+", help="Ordered tables JSON string")
+ parser.add_argument(
+ "ordered_slugs", type=str, nargs="+", help="Ordered tables JSON string"
+ )
def handle(self, dataset_id, *args, **options):
ordered_slugs = options["ordered_slugs"]
diff --git a/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py b/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py
index f7b2d234..7306ad2d 100644
--- a/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py
+++ b/backend/apps/api/v1/migrations/0003_alter_column_is_closed_alter_dataset_is_closed_and_more.py
@@ -27,6 +27,8 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name="table",
name="is_closed",
- field=models.BooleanField(default=False, help_text="Table is for Pro subscribers only"),
+ field=models.BooleanField(
+ default=False, help_text="Table is for Pro subscribers only"
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0036_datetimerange_units.py b/backend/apps/api/v1/migrations/0036_datetimerange_units.py
index b70314a9..ecf50833 100644
--- a/backend/apps/api/v1/migrations/0036_datetimerange_units.py
+++ b/backend/apps/api/v1/migrations/0036_datetimerange_units.py
@@ -4,15 +4,16 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0035_alter_poll_entity_alter_poll_frequency_and_more'),
+ ("v1", "0035_alter_poll_entity_alter_poll_frequency_and_more"),
]
operations = [
migrations.AddField(
- model_name='datetimerange',
- name='units',
- field=models.ManyToManyField(blank=True, related_name='datetime_ranges', to='v1.column'),
+ model_name="datetimerange",
+ name="units",
+ field=models.ManyToManyField(
+ blank=True, related_name="datetime_ranges", to="v1.column"
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0037_area_entity_area_level_area_parent.py b/backend/apps/api/v1/migrations/0037_area_entity_area_level_area_parent.py
index 025b6dd5..8e61c6d7 100644
--- a/backend/apps/api/v1/migrations/0037_area_entity_area_level_area_parent.py
+++ b/backend/apps/api/v1/migrations/0037_area_entity_area_level_area_parent.py
@@ -1,29 +1,40 @@
# Generated by Django 4.2.16 on 2024-11-03 01:29
-from django.db import migrations, models
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0036_datetimerange_units'),
+ ("v1", "0036_datetimerange_units"),
]
operations = [
migrations.AddField(
- model_name='area',
- name='entity',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='areas', to='v1.entity'),
+ model_name="area",
+ name="entity",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ related_name="areas",
+ to="v1.entity",
+ ),
),
migrations.AddField(
- model_name='area',
- name='level',
+ model_name="area",
+ name="level",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
- model_name='area',
- name='parent',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='children', to='v1.area'),
+ model_name="area",
+ name="parent",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ related_name="children",
+ to="v1.area",
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0038_rename_level_area_administrative_level.py b/backend/apps/api/v1/migrations/0038_rename_level_area_administrative_level.py
index e1511709..0676113a 100644
--- a/backend/apps/api/v1/migrations/0038_rename_level_area_administrative_level.py
+++ b/backend/apps/api/v1/migrations/0038_rename_level_area_administrative_level.py
@@ -4,15 +4,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0037_area_entity_area_level_area_parent'),
+ ("v1", "0037_area_entity_area_level_area_parent"),
]
operations = [
migrations.RenameField(
- model_name='area',
- old_name='level',
- new_name='administrative_level',
+ model_name="area",
+ old_name="level",
+ new_name="administrative_level",
),
]
diff --git a/backend/apps/api/v1/migrations/0039_dataset_organizations.py b/backend/apps/api/v1/migrations/0039_dataset_organizations.py
index 82b9b79c..663ec627 100644
--- a/backend/apps/api/v1/migrations/0039_dataset_organizations.py
+++ b/backend/apps/api/v1/migrations/0039_dataset_organizations.py
@@ -5,7 +5,7 @@
def migrate_organization_to_organizations(apps, schema_editor):
- Dataset = apps.get_model('v1', 'Dataset')
+ Dataset = apps.get_model("v1", "Dataset")
for dataset in Dataset.objects.all():
if dataset.organization:
dataset.organizations.add(dataset.organization)
@@ -13,29 +13,28 @@ def migrate_organization_to_organizations(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
- ('v1', '0038_rename_level_area_administrative_level'),
+ ("v1", "0038_rename_level_area_administrative_level"),
]
operations = [
# Add new ManyToMany field
migrations.AddField(
- model_name='dataset',
- name='organizations',
+ model_name="dataset",
+ name="organizations",
field=models.ManyToManyField(
- related_name='datasets',
- to='v1.organization',
- verbose_name='Organizations',
- help_text='Organizations associated with this dataset',
+ related_name="datasets",
+ to="v1.organization",
+ verbose_name="Organizations",
+ help_text="Organizations associated with this dataset",
),
),
# Run data migration
migrations.RunPython(
- migrate_organization_to_organizations,
- reverse_code=migrations.RunPython.noop
+ migrate_organization_to_organizations, reverse_code=migrations.RunPython.noop
),
# Remove old ForeignKey field
migrations.RemoveField(
- model_name='dataset',
- name='organization',
+ model_name="dataset",
+ name="organization",
),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/migrations/0040_table_publishers_data_cleaners.py b/backend/apps/api/v1/migrations/0040_table_publishers_data_cleaners.py
index 884109a6..861ffb02 100644
--- a/backend/apps/api/v1/migrations/0040_table_publishers_data_cleaners.py
+++ b/backend/apps/api/v1/migrations/0040_table_publishers_data_cleaners.py
@@ -7,12 +7,12 @@
def migrate_publishers_and_cleaners(apps, schema_editor):
"""Migrate existing ForeignKey relationships to ManyToMany"""
- Table = apps.get_model('v1', 'Table')
+ Table = apps.get_model("v1", "Table")
for table in Table.objects.all():
# Store old ForeignKey values
- old_publisher = getattr(table, 'published_by_old', None)
- old_cleaner = getattr(table, 'data_cleaned_by_old', None)
-
+ old_publisher = getattr(table, "published_by_old", None)
+ old_cleaner = getattr(table, "data_cleaned_by_old", None)
+
# Add to new M2M fields if they existed
if old_publisher:
table.published_by.add(old_publisher)
@@ -23,56 +23,55 @@ def migrate_publishers_and_cleaners(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ('v1', '0039_dataset_organizations'),
+ ("v1", "0039_dataset_organizations"),
]
operations = [
# Rename old fields temporarily
migrations.RenameField(
- model_name='table',
- old_name='published_by',
- new_name='published_by_old',
+ model_name="table",
+ old_name="published_by",
+ new_name="published_by_old",
),
migrations.RenameField(
- model_name='table',
- old_name='data_cleaned_by',
- new_name='data_cleaned_by_old',
+ model_name="table",
+ old_name="data_cleaned_by",
+ new_name="data_cleaned_by_old",
),
# Add new M2M fields
migrations.AddField(
- model_name='table',
- name='published_by',
+ model_name="table",
+ name="published_by",
field=models.ManyToManyField(
blank=True,
- related_name='tables_published',
+ related_name="tables_published",
to=settings.AUTH_USER_MODEL,
- verbose_name='Published by',
- help_text='People who published the table',
+ verbose_name="Published by",
+ help_text="People who published the table",
),
),
migrations.AddField(
- model_name='table',
- name='data_cleaned_by',
+ model_name="table",
+ name="data_cleaned_by",
field=models.ManyToManyField(
blank=True,
- related_name='tables_cleaned',
+ related_name="tables_cleaned",
to=settings.AUTH_USER_MODEL,
- verbose_name='Data cleaned by',
- help_text='People who cleaned the data',
+ verbose_name="Data cleaned by",
+ help_text="People who cleaned the data",
),
),
# Run data migration
migrations.RunPython(
- migrate_publishers_and_cleaners,
- reverse_code=migrations.RunPython.noop
+ migrate_publishers_and_cleaners, reverse_code=migrations.RunPython.noop
),
# Remove old fields
migrations.RemoveField(
- model_name='table',
- name='published_by_old',
+ model_name="table",
+ name="published_by_old",
),
migrations.RemoveField(
- model_name='table',
- name='data_cleaned_by_old',
+ model_name="table",
+ name="data_cleaned_by_old",
),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/migrations/0041_remove_table_raw_data_url_and_more.py b/backend/apps/api/v1/migrations/0041_remove_table_raw_data_url_and_more.py
index 8c6e9035..5ff3ca79 100644
--- a/backend/apps/api/v1/migrations/0041_remove_table_raw_data_url_and_more.py
+++ b/backend/apps/api/v1/migrations/0041_remove_table_raw_data_url_and_more.py
@@ -1,123 +1,244 @@
# Generated by Django 4.2.16 on 2024-11-05 23:04
-from django.db import migrations, models
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0040_table_publishers_data_cleaners'),
+ ("v1", "0040_table_publishers_data_cleaners"),
]
operations = [
migrations.RemoveField(
- model_name='table',
- name='raw_data_url',
- ),
- migrations.AlterField(
- model_name='analysis',
- name='analysis_type',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='analyses', to='v1.analysistype'),
- ),
- migrations.AlterField(
- model_name='area',
- name='administrative_level',
- field=models.IntegerField(blank=True, choices=[(0, '0'), (1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5')], null=True),
- ),
- migrations.AlterField(
- model_name='area',
- name='entity',
- field=models.ForeignKey(blank=True, limit_choices_to={'category__slug': 'spatial'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='areas', to='v1.entity'),
- ),
- migrations.AlterField(
- model_name='area',
- name='parent',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='v1.area'),
- ),
- migrations.AlterField(
- model_name='column',
- name='bigquery_type',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='columns', to='v1.bigquerytype'),
- ),
- migrations.AlterField(
- model_name='column',
- name='directory_primary_key',
- field=models.ForeignKey(blank=True, limit_choices_to={'is_primary_key': True, 'table__is_directory': True}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='columns', to='v1.column'),
- ),
- migrations.AlterField(
- model_name='column',
- name='observation_level',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='columns', to='v1.observationlevel'),
- ),
- migrations.AlterField(
- model_name='column',
- name='status',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='columns', to='v1.status'),
- ),
- migrations.AlterField(
- model_name='coverage',
- name='area',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='coverages', to='v1.area'),
- ),
- migrations.AlterField(
- model_name='entity',
- name='category',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='entities', to='v1.entitycategory'),
- ),
- migrations.AlterField(
- model_name='informationrequest',
- name='status',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='information_requests', to='v1.status'),
- ),
- migrations.AlterField(
- model_name='observationlevel',
- name='entity',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='observation_levels', to='v1.entity'),
- ),
- migrations.AlterField(
- model_name='organization',
- name='area',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='organizations', to='v1.area'),
- ),
- migrations.AlterField(
- model_name='poll',
- name='entity',
- field=models.ForeignKey(blank=True, limit_choices_to={'category__slug': 'datetime'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='polls', to='v1.entity'),
- ),
- migrations.AlterField(
- model_name='qualitycheck',
- name='pipeline',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='quality_checks', to='v1.pipeline'),
- ),
- migrations.AlterField(
- model_name='rawdatasource',
- name='availability',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='raw_data_sources', to='v1.availability'),
- ),
- migrations.AlterField(
- model_name='rawdatasource',
- name='license',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='raw_data_sources', to='v1.license'),
- ),
- migrations.AlterField(
- model_name='table',
- name='license',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tables', to='v1.license'),
- ),
- migrations.AlterField(
- model_name='table',
- name='partner_organization',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='partner_tables', to='v1.organization'),
- ),
- migrations.AlterField(
- model_name='table',
- name='pipeline',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tables', to='v1.pipeline'),
- ),
- migrations.AlterField(
- model_name='update',
- name='entity',
- field=models.ForeignKey(blank=True, limit_choices_to={'category__slug': 'datetime'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='updates', to='v1.entity'),
+ model_name="table",
+ name="raw_data_url",
+ ),
+ migrations.AlterField(
+ model_name="analysis",
+ name="analysis_type",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="analyses",
+ to="v1.analysistype",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="area",
+ name="administrative_level",
+ field=models.IntegerField(
+ blank=True,
+ choices=[(0, "0"), (1, "1"), (2, "2"), (3, "3"), (4, "4"), (5, "5")],
+ null=True,
+ ),
+ ),
+ migrations.AlterField(
+ model_name="area",
+ name="entity",
+ field=models.ForeignKey(
+ blank=True,
+ limit_choices_to={"category__slug": "spatial"},
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="areas",
+ to="v1.entity",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="area",
+ name="parent",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="children",
+ to="v1.area",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="column",
+ name="bigquery_type",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="columns",
+ to="v1.bigquerytype",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="column",
+ name="directory_primary_key",
+ field=models.ForeignKey(
+ blank=True,
+ limit_choices_to={"is_primary_key": True, "table__is_directory": True},
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="columns",
+ to="v1.column",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="column",
+ name="observation_level",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="columns",
+ to="v1.observationlevel",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="column",
+ name="status",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="columns",
+ to="v1.status",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="coverage",
+ name="area",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="coverages",
+ to="v1.area",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="entity",
+ name="category",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="entities",
+ to="v1.entitycategory",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="informationrequest",
+ name="status",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="information_requests",
+ to="v1.status",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="observationlevel",
+ name="entity",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="observation_levels",
+ to="v1.entity",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="organization",
+ name="area",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="organizations",
+ to="v1.area",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="poll",
+ name="entity",
+ field=models.ForeignKey(
+ blank=True,
+ limit_choices_to={"category__slug": "datetime"},
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="polls",
+ to="v1.entity",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="qualitycheck",
+ name="pipeline",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="quality_checks",
+ to="v1.pipeline",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="rawdatasource",
+ name="availability",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="raw_data_sources",
+ to="v1.availability",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="rawdatasource",
+ name="license",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="raw_data_sources",
+ to="v1.license",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="table",
+ name="license",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="tables",
+ to="v1.license",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="table",
+ name="partner_organization",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="partner_tables",
+ to="v1.organization",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="table",
+ name="pipeline",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="tables",
+ to="v1.pipeline",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="update",
+ name="entity",
+ field=models.ForeignKey(
+ blank=True,
+ limit_choices_to={"category__slug": "datetime"},
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="updates",
+ to="v1.entity",
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0042_measurementunit.py b/backend/apps/api/v1/migrations/0042_measurementunit.py
index f70c38f1..e3da1bfc 100644
--- a/backend/apps/api/v1/migrations/0042_measurementunit.py
+++ b/backend/apps/api/v1/migrations/0042_measurementunit.py
@@ -1,28 +1,28 @@
# Generated by Django 4.2.16 on 2024-11-05 23:20
-from django.db import migrations, models
import uuid
+from django.db import migrations, models
+
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0041_remove_table_raw_data_url_and_more'),
+ ("v1", "0041_remove_table_raw_data_url_and_more"),
]
operations = [
migrations.CreateModel(
- name='MeasurementUnit',
+ name="MeasurementUnit",
fields=[
- ('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
- ('slug', models.SlugField(unique=True)),
- ('name', models.CharField(max_length=255)),
+ ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
+ ("slug", models.SlugField(unique=True)),
+ ("name", models.CharField(max_length=255)),
],
options={
- 'verbose_name': 'Measurement Unit',
- 'verbose_name_plural': 'Measurement Units',
- 'db_table': 'measurement_unit',
- 'ordering': ['slug'],
+ "verbose_name": "Measurement Unit",
+ "verbose_name_plural": "Measurement Units",
+ "db_table": "measurement_unit",
+ "ordering": ["slug"],
},
),
]
diff --git a/backend/apps/api/v1/migrations/0043_add_measurement_unit_translations.py b/backend/apps/api/v1/migrations/0043_add_measurement_unit_translations.py
index e7eb73ba..ad211d04 100644
--- a/backend/apps/api/v1/migrations/0043_add_measurement_unit_translations.py
+++ b/backend/apps/api/v1/migrations/0043_add_measurement_unit_translations.py
@@ -23,4 +23,4 @@ class Migration(migrations.Migration):
name="name_es",
field=models.CharField(max_length=255, null=True),
),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/migrations/0044_measurementunitcategory_measurementunit_tex_and_more.py b/backend/apps/api/v1/migrations/0044_measurementunitcategory_measurementunit_tex_and_more.py
index d79629e7..6a70b89c 100644
--- a/backend/apps/api/v1/migrations/0044_measurementunitcategory_measurementunit_tex_and_more.py
+++ b/backend/apps/api/v1/migrations/0044_measurementunitcategory_measurementunit_tex_and_more.py
@@ -1,42 +1,47 @@
# Generated by Django 4.2.16 on 2024-11-06 00:03
-from django.db import migrations, models
-import django.db.models.deletion
import uuid
+import django.db.models.deletion
+from django.db import migrations, models
+
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0043_add_measurement_unit_translations'),
+ ("v1", "0043_add_measurement_unit_translations"),
]
operations = [
migrations.CreateModel(
- name='MeasurementUnitCategory',
+ name="MeasurementUnitCategory",
fields=[
- ('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
- ('slug', models.SlugField(unique=True)),
- ('name', models.CharField(max_length=255)),
- ('name_pt', models.CharField(max_length=255, null=True)),
- ('name_en', models.CharField(max_length=255, null=True)),
- ('name_es', models.CharField(max_length=255, null=True)),
+ ("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
+ ("slug", models.SlugField(unique=True)),
+ ("name", models.CharField(max_length=255)),
+ ("name_pt", models.CharField(max_length=255, null=True)),
+ ("name_en", models.CharField(max_length=255, null=True)),
+ ("name_es", models.CharField(max_length=255, null=True)),
],
options={
- 'verbose_name': 'Measurement Unit Category',
- 'verbose_name_plural': 'Measurement Unit Categories',
- 'db_table': 'measurement_unit_category',
- 'ordering': ['slug'],
+ "verbose_name": "Measurement Unit Category",
+ "verbose_name_plural": "Measurement Unit Categories",
+ "db_table": "measurement_unit_category",
+ "ordering": ["slug"],
},
),
migrations.AddField(
- model_name='measurementunit',
- name='tex',
+ model_name="measurementunit",
+ name="tex",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
- model_name='measurementunit',
- name='category',
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='measurement_units', to='v1.measurementunitcategory'),
+ model_name="measurementunit",
+ name="category",
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="measurement_units",
+ to="v1.measurementunitcategory",
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0045_add_measurement_categories_and_units.py b/backend/apps/api/v1/migrations/0045_add_measurement_categories_and_units.py
index fbff222b..ccaf3bec 100644
--- a/backend/apps/api/v1/migrations/0045_add_measurement_categories_and_units.py
+++ b/backend/apps/api/v1/migrations/0045_add_measurement_categories_and_units.py
@@ -1,198 +1,627 @@
# -*- coding: utf-8 -*-
from django.db import migrations
+
def create_categories_and_units(apps, schema_editor):
- MeasurementUnitCategory = apps.get_model('v1', 'MeasurementUnitCategory')
- MeasurementUnit = apps.get_model('v1', 'MeasurementUnit')
-
+ MeasurementUnitCategory = apps.get_model("v1", "MeasurementUnitCategory")
+ MeasurementUnit = apps.get_model("v1", "MeasurementUnit")
+
# Create categories
categories = {
- 'distance': {
- 'name': 'Distância',
- 'name_pt': 'Distância',
- 'name_en': 'Distance',
- 'name_es': 'Distancia'
- },
- 'area': {
- 'name': 'Área',
- 'name_pt': 'Área',
- 'name_en': 'Area',
- 'name_es': 'Área'
- },
- 'mass': {
- 'name': 'Massa',
- 'name_pt': 'Massa',
- 'name_en': 'Mass',
- 'name_es': 'Masa'
- },
- 'volume': {
- 'name': 'Volume',
- 'name_pt': 'Volume',
- 'name_en': 'Volume',
- 'name_es': 'Volumen'
- },
- 'energy': {
- 'name': 'Energia',
- 'name_pt': 'Energia',
- 'name_en': 'Energy',
- 'name_es': 'Energía'
- },
- 'people': {
- 'name': 'Pessoas',
- 'name_pt': 'Pessoas',
- 'name_en': 'People',
- 'name_es': 'Personas'
- },
- 'currency': {
- 'name': 'Moeda',
- 'name_pt': 'Moeda',
- 'name_en': 'Currency',
- 'name_es': 'Moneda'
- },
- 'economics': {
- 'name': 'Economia',
- 'name_pt': 'Economia',
- 'name_en': 'Economics',
- 'name_es': 'Economía'
- },
- 'datetime': {
- 'name': 'Data/Hora',
- 'name_pt': 'Data/Hora',
- 'name_en': 'Date/Time',
- 'name_es': 'Fecha/Hora'
- },
- 'percentage': {
- 'name': 'Porcentagem',
- 'name_pt': 'Porcentagem',
- 'name_en': 'Percentage',
- 'name_es': 'Porcentaje'
- }
+ "distance": {
+ "name": "Distância",
+ "name_pt": "Distância",
+ "name_en": "Distance",
+ "name_es": "Distancia",
+ },
+ "area": {"name": "Área", "name_pt": "Área", "name_en": "Area", "name_es": "Área"},
+ "mass": {"name": "Massa", "name_pt": "Massa", "name_en": "Mass", "name_es": "Masa"},
+ "volume": {
+ "name": "Volume",
+ "name_pt": "Volume",
+ "name_en": "Volume",
+ "name_es": "Volumen",
+ },
+ "energy": {
+ "name": "Energia",
+ "name_pt": "Energia",
+ "name_en": "Energy",
+ "name_es": "Energía",
+ },
+ "people": {
+ "name": "Pessoas",
+ "name_pt": "Pessoas",
+ "name_en": "People",
+ "name_es": "Personas",
+ },
+ "currency": {
+ "name": "Moeda",
+ "name_pt": "Moeda",
+ "name_en": "Currency",
+ "name_es": "Moneda",
+ },
+ "economics": {
+ "name": "Economia",
+ "name_pt": "Economia",
+ "name_en": "Economics",
+ "name_es": "Economía",
+ },
+ "datetime": {
+ "name": "Data/Hora",
+ "name_pt": "Data/Hora",
+ "name_en": "Date/Time",
+ "name_es": "Fecha/Hora",
+ },
+ "percentage": {
+ "name": "Porcentagem",
+ "name_pt": "Porcentagem",
+ "name_en": "Percentage",
+ "name_es": "Porcentaje",
+ },
}
-
+
category_objects = {}
for slug, names in categories.items():
category = MeasurementUnitCategory.objects.create(
slug=slug,
- name=names['name'],
- name_pt=names['name_pt'],
- name_en=names['name_en'],
- name_es=names['name_es']
+ name=names["name"],
+ name_pt=names["name_pt"],
+ name_en=names["name_en"],
+ name_es=names["name_es"],
)
category_objects[slug] = category
# Define units with their categories and translations
units = {
# Distance
- 'kilometer': {'category': 'distance', 'name': 'Kilometer', 'name_pt': 'Quilômetro', 'name_en': 'Kilometer', 'name_es': 'Kilómetro', 'tex': 'km'},
- 'meter': {'category': 'distance', 'name': 'Meter', 'name_pt': 'Metro', 'name_en': 'Meter', 'name_es': 'Metro', 'tex': 'm'},
- 'centimeter': {'category': 'distance', 'name': 'Centimeter', 'name_pt': 'Centímetro', 'name_en': 'Centimeter', 'name_es': 'Centímetro', 'tex': 'cm'},
- 'mile': {'category': 'distance', 'name': 'Mile', 'name_pt': 'Milha', 'name_en': 'Mile', 'name_es': 'Milla', 'tex': 'mi'},
- 'foot': {'category': 'distance', 'name': 'Foot', 'name_pt': 'Pé', 'name_en': 'Foot', 'name_es': 'Pie', 'tex': 'pé'},
- 'inch': {'category': 'distance', 'name': 'Inch', 'name_pt': 'Polegada', 'name_en': 'Inch', 'name_es': 'Pulgada', 'tex': 'polegada'},
-
+ "kilometer": {
+ "category": "distance",
+ "name": "Kilometer",
+ "name_pt": "Quilômetro",
+ "name_en": "Kilometer",
+ "name_es": "Kilómetro",
+ "tex": "km",
+ },
+ "meter": {
+ "category": "distance",
+ "name": "Meter",
+ "name_pt": "Metro",
+ "name_en": "Meter",
+ "name_es": "Metro",
+ "tex": "m",
+ },
+ "centimeter": {
+ "category": "distance",
+ "name": "Centimeter",
+ "name_pt": "Centímetro",
+ "name_en": "Centimeter",
+ "name_es": "Centímetro",
+ "tex": "cm",
+ },
+ "mile": {
+ "category": "distance",
+ "name": "Mile",
+ "name_pt": "Milha",
+ "name_en": "Mile",
+ "name_es": "Milla",
+ "tex": "mi",
+ },
+ "foot": {
+ "category": "distance",
+ "name": "Foot",
+ "name_pt": "Pé",
+ "name_en": "Foot",
+ "name_es": "Pie",
+ "tex": "pé",
+ },
+ "inch": {
+ "category": "distance",
+ "name": "Inch",
+ "name_pt": "Polegada",
+ "name_en": "Inch",
+ "name_es": "Pulgada",
+ "tex": "polegada",
+ },
# Area
- 'kilometer2': {'category': 'area', 'name': 'Square Kilometer', 'name_pt': 'Quilômetro Quadrado', 'name_en': 'Square Kilometer', 'name_es': 'Kilómetro Cuadrado', 'tex': 'km^2'},
- 'meter2': {'category': 'area', 'name': 'Square Meter', 'name_pt': 'Metro Quadrado', 'name_en': 'Square Meter', 'name_es': 'Metro Cuadrado', 'tex': 'm^2'},
- 'centimeter2': {'category': 'area', 'name': 'Square Centimeter', 'name_pt': 'Centímetro Quadrado', 'name_en': 'Square Centimeter', 'name_es': 'Centímetro Cuadrado', 'tex': 'cm^2'},
- 'hectare': {'category': 'area', 'name': 'Hectare', 'name_pt': 'Hectare', 'name_en': 'Hectare', 'name_es': 'Hectárea', 'tex': 'ha'},
- 'acre': {'category': 'area', 'name': 'Acre', 'name_pt': 'Acre', 'name_en': 'Acre', 'name_es': 'Acre', 'tex': 'ac'},
- 'mile2': {'category': 'area', 'name': 'Square Mile', 'name_pt': 'Milha Quadrada', 'name_en': 'Square Mile', 'name_es': 'Milla Cuadrada', 'tex': 'mi^2'},
- 'foot2': {'category': 'area', 'name': 'Square Foot', 'name_pt': 'Pé Quadrado', 'name_en': 'Square Foot', 'name_es': 'Pie Cuadrado', 'tex': 'ft^2'},
- 'inch2': {'category': 'area', 'name': 'Square Inch', 'name_pt': 'Polegada Quadrada', 'name_en': 'Square Inch', 'name_es': 'Pulgada Cuadrada', 'tex': 'in^2'},
-
+ "kilometer2": {
+ "category": "area",
+ "name": "Square Kilometer",
+ "name_pt": "Quilômetro Quadrado",
+ "name_en": "Square Kilometer",
+ "name_es": "Kilómetro Cuadrado",
+ "tex": "km^2",
+ },
+ "meter2": {
+ "category": "area",
+ "name": "Square Meter",
+ "name_pt": "Metro Quadrado",
+ "name_en": "Square Meter",
+ "name_es": "Metro Cuadrado",
+ "tex": "m^2",
+ },
+ "centimeter2": {
+ "category": "area",
+ "name": "Square Centimeter",
+ "name_pt": "Centímetro Quadrado",
+ "name_en": "Square Centimeter",
+ "name_es": "Centímetro Cuadrado",
+ "tex": "cm^2",
+ },
+ "hectare": {
+ "category": "area",
+ "name": "Hectare",
+ "name_pt": "Hectare",
+ "name_en": "Hectare",
+ "name_es": "Hectárea",
+ "tex": "ha",
+ },
+ "acre": {
+ "category": "area",
+ "name": "Acre",
+ "name_pt": "Acre",
+ "name_en": "Acre",
+ "name_es": "Acre",
+ "tex": "ac",
+ },
+ "mile2": {
+ "category": "area",
+ "name": "Square Mile",
+ "name_pt": "Milha Quadrada",
+ "name_en": "Square Mile",
+ "name_es": "Milla Cuadrada",
+ "tex": "mi^2",
+ },
+ "foot2": {
+ "category": "area",
+ "name": "Square Foot",
+ "name_pt": "Pé Quadrado",
+ "name_en": "Square Foot",
+ "name_es": "Pie Cuadrado",
+ "tex": "ft^2",
+ },
+ "inch2": {
+ "category": "area",
+ "name": "Square Inch",
+ "name_pt": "Polegada Quadrada",
+ "name_en": "Square Inch",
+ "name_es": "Pulgada Cuadrada",
+ "tex": "in^2",
+ },
# Mass
- 'ton': {'category': 'mass', 'name': 'Ton', 'name_pt': 'Tonelada', 'name_en': 'Ton', 'name_es': 'Tonelada', 'tex': 'ton'},
- 'kilogram': {'category': 'mass', 'name': 'Kilogram', 'name_pt': 'Quilograma', 'name_en': 'Kilogram', 'name_es': 'Kilogramo', 'tex': 'kg'},
- 'gram': {'category': 'mass', 'name': 'Gram', 'name_pt': 'Grama', 'name_en': 'Gram', 'name_es': 'Gramo', 'tex': 'g'},
- 'miligram': {'category': 'mass', 'name': 'Milligram', 'name_pt': 'Miligrama', 'name_en': 'Milligram', 'name_es': 'Miligramo', 'tex': 'mg'},
- 'ounce': {'category': 'mass', 'name': 'Ounce', 'name_pt': 'Onça', 'name_en': 'Ounce', 'name_es': 'Onza', 'tex': 'oz'},
-
+ "ton": {
+ "category": "mass",
+ "name": "Ton",
+ "name_pt": "Tonelada",
+ "name_en": "Ton",
+ "name_es": "Tonelada",
+ "tex": "ton",
+ },
+ "kilogram": {
+ "category": "mass",
+ "name": "Kilogram",
+ "name_pt": "Quilograma",
+ "name_en": "Kilogram",
+ "name_es": "Kilogramo",
+ "tex": "kg",
+ },
+ "gram": {
+ "category": "mass",
+ "name": "Gram",
+ "name_pt": "Grama",
+ "name_en": "Gram",
+ "name_es": "Gramo",
+ "tex": "g",
+ },
+ "miligram": {
+ "category": "mass",
+ "name": "Milligram",
+ "name_pt": "Miligrama",
+ "name_en": "Milligram",
+ "name_es": "Miligramo",
+ "tex": "mg",
+ },
+ "ounce": {
+ "category": "mass",
+ "name": "Ounce",
+ "name_pt": "Onça",
+ "name_en": "Ounce",
+ "name_es": "Onza",
+ "tex": "oz",
+ },
# Volume
- 'gallon': {'category': 'volume', 'name': 'Gallon', 'name_pt': 'Galão', 'name_en': 'Gallon', 'name_es': 'Galón', 'tex': 'gal'},
- 'litre': {'category': 'volume', 'name': 'Litre', 'name_pt': 'Litro', 'name_en': 'Litre', 'name_es': 'Litro', 'tex': 'l'},
- 'militre': {'category': 'volume', 'name': 'Millilitre', 'name_pt': 'Mililitro', 'name_en': 'Millilitre', 'name_es': 'Mililitro', 'tex': 'ml'},
- 'meter3': {'category': 'volume', 'name': 'Cubic Meter', 'name_pt': 'Metro Cúbico', 'name_en': 'Cubic Meter', 'name_es': 'Metro Cúbico', 'tex': 'm^3'},
- 'mile3': {'category': 'volume', 'name': 'Cubic Mile', 'name_pt': 'Milha Cúbica', 'name_en': 'Cubic Mile', 'name_es': 'Milla Cúbica', 'tex': 'mi^3'},
- 'foot3': {'category': 'volume', 'name': 'Cubic Foot', 'name_pt': 'Pé Cúbico', 'name_en': 'Cubic Foot', 'name_es': 'Pie Cúbico', 'tex': 'ft^3'},
- 'inch3': {'category': 'volume', 'name': 'Cubic Inch', 'name_pt': 'Polegada Cúbica', 'name_en': 'Cubic Inch', 'name_es': 'Pulgada Cúbica', 'tex': 'in^3'},
- 'barrel': {'category': 'volume', 'name': 'Barrel', 'name_pt': 'Barril', 'name_en': 'Barrel', 'name_es': 'Barril', 'tex': 'barrel'},
- 'boe': {'category': 'volume', 'name': 'Barrel of Oil Equivalent', 'name_pt': 'Barril de Óleo Equivalente', 'name_en': 'Barrel of Oil Equivalent', 'name_es': 'Barril de Petróleo Equivalente', 'tex': 'barrel_e'},
- 'toe': {'category': 'volume', 'name': 'Tonne of Oil Equivalent', 'name_pt': 'Tonelada de Óleo Equivalente', 'name_en': 'Tonne of Oil Equivalent', 'name_es': 'Tonelada de Petróleo Equivalente', 'tex': 'ton_e'},
-
+ "gallon": {
+ "category": "volume",
+ "name": "Gallon",
+ "name_pt": "Galão",
+ "name_en": "Gallon",
+ "name_es": "Galón",
+ "tex": "gal",
+ },
+ "litre": {
+ "category": "volume",
+ "name": "Litre",
+ "name_pt": "Litro",
+ "name_en": "Litre",
+ "name_es": "Litro",
+ "tex": "l",
+ },
+ "militre": {
+ "category": "volume",
+ "name": "Millilitre",
+ "name_pt": "Mililitro",
+ "name_en": "Millilitre",
+ "name_es": "Mililitro",
+ "tex": "ml",
+ },
+ "meter3": {
+ "category": "volume",
+ "name": "Cubic Meter",
+ "name_pt": "Metro Cúbico",
+ "name_en": "Cubic Meter",
+ "name_es": "Metro Cúbico",
+ "tex": "m^3",
+ },
+ "mile3": {
+ "category": "volume",
+ "name": "Cubic Mile",
+ "name_pt": "Milha Cúbica",
+ "name_en": "Cubic Mile",
+ "name_es": "Milla Cúbica",
+ "tex": "mi^3",
+ },
+ "foot3": {
+ "category": "volume",
+ "name": "Cubic Foot",
+ "name_pt": "Pé Cúbico",
+ "name_en": "Cubic Foot",
+ "name_es": "Pie Cúbico",
+ "tex": "ft^3",
+ },
+ "inch3": {
+ "category": "volume",
+ "name": "Cubic Inch",
+ "name_pt": "Polegada Cúbica",
+ "name_en": "Cubic Inch",
+ "name_es": "Pulgada Cúbica",
+ "tex": "in^3",
+ },
+ "barrel": {
+ "category": "volume",
+ "name": "Barrel",
+ "name_pt": "Barril",
+ "name_en": "Barrel",
+ "name_es": "Barril",
+ "tex": "barrel",
+ },
+ "boe": {
+ "category": "volume",
+ "name": "Barrel of Oil Equivalent",
+ "name_pt": "Barril de Óleo Equivalente",
+ "name_en": "Barrel of Oil Equivalent",
+ "name_es": "Barril de Petróleo Equivalente",
+ "tex": "barrel_e",
+ },
+ "toe": {
+ "category": "volume",
+ "name": "Tonne of Oil Equivalent",
+ "name_pt": "Tonelada de Óleo Equivalente",
+ "name_en": "Tonne of Oil Equivalent",
+ "name_es": "Tonelada de Petróleo Equivalente",
+ "tex": "ton_e",
+ },
# Energy
- 'watt': {'category': 'energy', 'name': 'Watt', 'name_pt': 'Watt', 'name_en': 'Watt', 'name_es': 'Vatio', 'tex': 'W'},
- 'kilowatt': {'category': 'energy', 'name': 'Kilowatt', 'name_pt': 'Kilowatt', 'name_en': 'Kilowatt', 'name_es': 'Kilovatio', 'tex': 'kW'},
- 'megawatt': {'category': 'energy', 'name': 'Megawatt', 'name_pt': 'Megawatt', 'name_en': 'Megawatt', 'name_es': 'Megavatio', 'tex': 'mW'},
- 'gigawatt': {'category': 'energy', 'name': 'Gigawatt', 'name_pt': 'Gigawatt', 'name_en': 'Gigawatt', 'name_es': 'Gigavatio', 'tex': 'gW'},
- 'terawatt': {'category': 'energy', 'name': 'Terawatt', 'name_pt': 'Terawatt', 'name_en': 'Terawatt', 'name_es': 'Teravatio', 'tex': 'tW'},
- 'volt': {'category': 'energy', 'name': 'Volt', 'name_pt': 'Volt', 'name_en': 'Volt', 'name_es': 'Voltio', 'tex': 'V'},
- 'kilovolt': {'category': 'energy', 'name': 'Kilovolt', 'name_pt': 'Kilovolt', 'name_en': 'Kilovolt', 'name_es': 'Kilovoltio', 'tex': 'kV'},
- 'megavolt': {'category': 'energy', 'name': 'Megavolt', 'name_pt': 'Megavolt', 'name_en': 'Megavolt', 'name_es': 'Megavoltio', 'tex': 'mV'},
- 'gigavolt': {'category': 'energy', 'name': 'Gigavolt', 'name_pt': 'Gigavolt', 'name_en': 'Gigavolt', 'name_es': 'Gigavoltio', 'tex': 'gV'},
- 'teravolt': {'category': 'energy', 'name': 'Teravolt', 'name_pt': 'Teravolt', 'name_en': 'Teravolt', 'name_es': 'Teravoltio', 'tex': 'tV'},
-
+ "watt": {
+ "category": "energy",
+ "name": "Watt",
+ "name_pt": "Watt",
+ "name_en": "Watt",
+ "name_es": "Vatio",
+ "tex": "W",
+ },
+ "kilowatt": {
+ "category": "energy",
+ "name": "Kilowatt",
+ "name_pt": "Kilowatt",
+ "name_en": "Kilowatt",
+ "name_es": "Kilovatio",
+ "tex": "kW",
+ },
+ "megawatt": {
+ "category": "energy",
+ "name": "Megawatt",
+ "name_pt": "Megawatt",
+ "name_en": "Megawatt",
+ "name_es": "Megavatio",
+ "tex": "mW",
+ },
+ "gigawatt": {
+ "category": "energy",
+ "name": "Gigawatt",
+ "name_pt": "Gigawatt",
+ "name_en": "Gigawatt",
+ "name_es": "Gigavatio",
+ "tex": "gW",
+ },
+ "terawatt": {
+ "category": "energy",
+ "name": "Terawatt",
+ "name_pt": "Terawatt",
+ "name_en": "Terawatt",
+ "name_es": "Teravatio",
+ "tex": "tW",
+ },
+ "volt": {
+ "category": "energy",
+ "name": "Volt",
+ "name_pt": "Volt",
+ "name_en": "Volt",
+ "name_es": "Voltio",
+ "tex": "V",
+ },
+ "kilovolt": {
+ "category": "energy",
+ "name": "Kilovolt",
+ "name_pt": "Kilovolt",
+ "name_en": "Kilovolt",
+ "name_es": "Kilovoltio",
+ "tex": "kV",
+ },
+ "megavolt": {
+ "category": "energy",
+ "name": "Megavolt",
+ "name_pt": "Megavolt",
+ "name_en": "Megavolt",
+ "name_es": "Megavoltio",
+ "tex": "mV",
+ },
+ "gigavolt": {
+ "category": "energy",
+ "name": "Gigavolt",
+ "name_pt": "Gigavolt",
+ "name_en": "Gigavolt",
+ "name_es": "Gigavoltio",
+ "tex": "gV",
+ },
+ "teravolt": {
+ "category": "energy",
+ "name": "Teravolt",
+ "name_pt": "Teravolt",
+ "name_en": "Teravolt",
+ "name_es": "Teravoltio",
+ "tex": "tV",
+ },
# People
- 'person': {'category': 'people', 'name': 'Person', 'name_pt': 'Pessoa', 'name_en': 'Person', 'name_es': 'Persona', 'tex': 'per'},
- 'household': {'category': 'people', 'name': 'Household', 'name_pt': 'Domicílio', 'name_en': 'Household', 'name_es': 'Hogar', 'tex': 'dom'},
-
+ "person": {
+ "category": "people",
+ "name": "Person",
+ "name_pt": "Pessoa",
+ "name_en": "Person",
+ "name_es": "Persona",
+ "tex": "per",
+ },
+ "household": {
+ "category": "people",
+ "name": "Household",
+ "name_pt": "Domicílio",
+ "name_en": "Household",
+ "name_es": "Hogar",
+ "tex": "dom",
+ },
# Currency
- 'ars': {'category': 'currency', 'name': 'Argentine Peso', 'name_pt': 'Peso Argentino', 'name_en': 'Argentine Peso', 'name_es': 'Peso Argentino', 'tex': 'ARS'},
- 'brl': {'category': 'currency', 'name': 'Brazilian Real', 'name_pt': 'Real', 'name_en': 'Brazilian Real', 'name_es': 'Real Brasileño', 'tex': 'BRL'},
- 'cad': {'category': 'currency', 'name': 'Canadian Dollar', 'name_pt': 'Dólar Canadense', 'name_en': 'Canadian Dollar', 'name_es': 'Dólar Canadiense', 'tex': 'CAD'},
- 'clp': {'category': 'currency', 'name': 'Chilean Peso', 'name_pt': 'Peso Chileno', 'name_en': 'Chilean Peso', 'name_es': 'Peso Chileno', 'tex': 'CLP'},
- 'usd': {'category': 'currency', 'name': 'US Dollar', 'name_pt': 'Dólar Americano', 'name_en': 'US Dollar', 'name_es': 'Dólar Estadounidense', 'tex': 'USD'},
- 'eur': {'category': 'currency', 'name': 'Euro', 'name_pt': 'Euro', 'name_en': 'Euro', 'name_es': 'Euro', 'tex': 'EUR'},
- 'gbp': {'category': 'currency', 'name': 'British Pound', 'name_pt': 'Libra Esterlina', 'name_en': 'British Pound', 'name_es': 'Libra Esterlina', 'tex': 'GBP'},
- 'cny': {'category': 'currency', 'name': 'Chinese Yuan', 'name_pt': 'Yuan Chinês', 'name_en': 'Chinese Yuan', 'name_es': 'Yuan Chino', 'tex': 'CNY'},
- 'inr': {'category': 'currency', 'name': 'Indian Rupee', 'name_pt': 'Rupia Indiana', 'name_en': 'Indian Rupee', 'name_es': 'Rupia India', 'tex': 'INR'},
- 'jpy': {'category': 'currency', 'name': 'Japanese Yen', 'name_pt': 'Iene Japonês', 'name_en': 'Japanese Yen', 'name_es': 'Yen Japonés', 'tex': 'JPY'},
- 'zar': {'category': 'currency', 'name': 'South African Rand', 'name_pt': 'Rand Sul-Africano', 'name_en': 'South African Rand', 'name_es': 'Rand Sudafricano', 'tex': 'ZAR'},
-
+ "ars": {
+ "category": "currency",
+ "name": "Argentine Peso",
+ "name_pt": "Peso Argentino",
+ "name_en": "Argentine Peso",
+ "name_es": "Peso Argentino",
+ "tex": "ARS",
+ },
+ "brl": {
+ "category": "currency",
+ "name": "Brazilian Real",
+ "name_pt": "Real",
+ "name_en": "Brazilian Real",
+ "name_es": "Real Brasileño",
+ "tex": "BRL",
+ },
+ "cad": {
+ "category": "currency",
+ "name": "Canadian Dollar",
+ "name_pt": "Dólar Canadense",
+ "name_en": "Canadian Dollar",
+ "name_es": "Dólar Canadiense",
+ "tex": "CAD",
+ },
+ "clp": {
+ "category": "currency",
+ "name": "Chilean Peso",
+ "name_pt": "Peso Chileno",
+ "name_en": "Chilean Peso",
+ "name_es": "Peso Chileno",
+ "tex": "CLP",
+ },
+ "usd": {
+ "category": "currency",
+ "name": "US Dollar",
+ "name_pt": "Dólar Americano",
+ "name_en": "US Dollar",
+ "name_es": "Dólar Estadounidense",
+ "tex": "USD",
+ },
+ "eur": {
+ "category": "currency",
+ "name": "Euro",
+ "name_pt": "Euro",
+ "name_en": "Euro",
+ "name_es": "Euro",
+ "tex": "EUR",
+ },
+ "gbp": {
+ "category": "currency",
+ "name": "British Pound",
+ "name_pt": "Libra Esterlina",
+ "name_en": "British Pound",
+ "name_es": "Libra Esterlina",
+ "tex": "GBP",
+ },
+ "cny": {
+ "category": "currency",
+ "name": "Chinese Yuan",
+ "name_pt": "Yuan Chinês",
+ "name_en": "Chinese Yuan",
+ "name_es": "Yuan Chino",
+ "tex": "CNY",
+ },
+ "inr": {
+ "category": "currency",
+ "name": "Indian Rupee",
+ "name_pt": "Rupia Indiana",
+ "name_en": "Indian Rupee",
+ "name_es": "Rupia India",
+ "tex": "INR",
+ },
+ "jpy": {
+ "category": "currency",
+ "name": "Japanese Yen",
+ "name_pt": "Iene Japonês",
+ "name_en": "Japanese Yen",
+ "name_es": "Yen Japonés",
+ "tex": "JPY",
+ },
+ "zar": {
+ "category": "currency",
+ "name": "South African Rand",
+ "name_pt": "Rand Sul-Africano",
+ "name_en": "South African Rand",
+ "name_es": "Rand Sudafricano",
+ "tex": "ZAR",
+ },
# Economics
- 'minimum_wage': {'category': 'economics', 'name': 'Minimum Wage', 'name_pt': 'Salário Mínimo', 'name_en': 'Minimum Wage', 'name_es': 'Salario Mínimo', 'tex': 'sm'},
-
+ "minimum_wage": {
+ "category": "economics",
+ "name": "Minimum Wage",
+ "name_pt": "Salário Mínimo",
+ "name_en": "Minimum Wage",
+ "name_es": "Salario Mínimo",
+ "tex": "sm",
+ },
# Date-time
- 'year': {'category': 'datetime', 'name': 'Year', 'name_pt': 'Ano', 'name_en': 'Year', 'name_es': 'Año', 'tex': 'y'},
- 'semester': {'category': 'datetime', 'name': 'Semester', 'name_pt': 'Semestre', 'name_en': 'Semester', 'name_es': 'Semestre', 'tex': 'sem'},
- 'quarter': {'category': 'datetime', 'name': 'Quarter', 'name_pt': 'Trimestre', 'name_en': 'Quarter', 'name_es': 'Trimestre', 'tex': 'q'},
- 'bimester': {'category': 'datetime', 'name': 'Bimester', 'name_pt': 'Bimestre', 'name_en': 'Bimester', 'name_es': 'Bimestre', 'tex': 'bim'},
- 'month': {'category': 'datetime', 'name': 'Month', 'name_pt': 'Mês', 'name_en': 'Month', 'name_es': 'Mes', 'tex': 'm'},
- 'week': {'category': 'datetime', 'name': 'Week', 'name_pt': 'Semana', 'name_en': 'Week', 'name_es': 'Semana', 'tex': 'w'},
- 'day': {'category': 'datetime', 'name': 'Day', 'name_pt': 'Dia', 'name_en': 'Day', 'name_es': 'Día', 'tex': 'd'},
- 'hour': {'category': 'datetime', 'name': 'Hour', 'name_pt': 'Hora', 'name_en': 'Hour', 'name_es': 'Hora', 'tex': 'h'},
- 'minute': {'category': 'datetime', 'name': 'Minute', 'name_pt': 'Minuto', 'name_en': 'Minute', 'name_es': 'Minuto', 'tex': 'min'},
- 'second': {'category': 'datetime', 'name': 'Second', 'name_pt': 'Segundo', 'name_en': 'Second', 'name_es': 'Segundo', 'tex': 's'},
-
+ "year": {
+ "category": "datetime",
+ "name": "Year",
+ "name_pt": "Ano",
+ "name_en": "Year",
+ "name_es": "Año",
+ "tex": "y",
+ },
+ "semester": {
+ "category": "datetime",
+ "name": "Semester",
+ "name_pt": "Semestre",
+ "name_en": "Semester",
+ "name_es": "Semestre",
+ "tex": "sem",
+ },
+ "quarter": {
+ "category": "datetime",
+ "name": "Quarter",
+ "name_pt": "Trimestre",
+ "name_en": "Quarter",
+ "name_es": "Trimestre",
+ "tex": "q",
+ },
+ "bimester": {
+ "category": "datetime",
+ "name": "Bimester",
+ "name_pt": "Bimestre",
+ "name_en": "Bimester",
+ "name_es": "Bimestre",
+ "tex": "bim",
+ },
+ "month": {
+ "category": "datetime",
+ "name": "Month",
+ "name_pt": "Mês",
+ "name_en": "Month",
+ "name_es": "Mes",
+ "tex": "m",
+ },
+ "week": {
+ "category": "datetime",
+ "name": "Week",
+ "name_pt": "Semana",
+ "name_en": "Week",
+ "name_es": "Semana",
+ "tex": "w",
+ },
+ "day": {
+ "category": "datetime",
+ "name": "Day",
+ "name_pt": "Dia",
+ "name_en": "Day",
+ "name_es": "Día",
+ "tex": "d",
+ },
+ "hour": {
+ "category": "datetime",
+ "name": "Hour",
+ "name_pt": "Hora",
+ "name_en": "Hour",
+ "name_es": "Hora",
+ "tex": "h",
+ },
+ "minute": {
+ "category": "datetime",
+ "name": "Minute",
+ "name_pt": "Minuto",
+ "name_en": "Minute",
+ "name_es": "Minuto",
+ "tex": "min",
+ },
+ "second": {
+ "category": "datetime",
+ "name": "Second",
+ "name_pt": "Segundo",
+ "name_en": "Second",
+ "name_es": "Segundo",
+ "tex": "s",
+ },
# Percentage
- 'percent': {'category': 'percentage', 'name': 'Percentage', 'name_pt': 'Porcentagem', 'name_en': 'Percentage', 'name_es': 'Porcentaje', 'tex': '%'},
+ "percent": {
+ "category": "percentage",
+ "name": "Percentage",
+ "name_pt": "Porcentagem",
+ "name_en": "Percentage",
+ "name_es": "Porcentaje",
+ "tex": "%",
+ },
}
-
+
for slug, unit_data in units.items():
MeasurementUnit.objects.create(
slug=slug,
- name=unit_data['name'],
- name_pt=unit_data['name_pt'],
- name_en=unit_data['name_en'],
- name_es=unit_data['name_es'],
- tex=unit_data['tex'],
- category=category_objects[unit_data['category']]
+ name=unit_data["name"],
+ name_pt=unit_data["name_pt"],
+ name_en=unit_data["name_en"],
+ name_es=unit_data["name_es"],
+ tex=unit_data["tex"],
+ category=category_objects[unit_data["category"]],
)
+
def reverse_categories_and_units(apps, schema_editor):
- MeasurementUnitCategory = apps.get_model('v1', 'MeasurementUnitCategory')
- MeasurementUnit = apps.get_model('v1', 'MeasurementUnit')
-
+ MeasurementUnitCategory = apps.get_model("v1", "MeasurementUnitCategory")
+ MeasurementUnit = apps.get_model("v1", "MeasurementUnit")
+
MeasurementUnit.objects.all().delete()
MeasurementUnitCategory.objects.all().delete()
+
class Migration(migrations.Migration):
dependencies = [
- ('v1', '0044_measurementunitcategory_measurementunit_tex_and_more'),
+ ("v1", "0044_measurementunitcategory_measurementunit_tex_and_more"),
]
operations = [
- migrations.RunPython(
- create_categories_and_units,
- reverse_categories_and_units
- ),
- ]
\ No newline at end of file
+ migrations.RunPython(create_categories_and_units, reverse_categories_and_units),
+ ]
diff --git a/backend/apps/api/v1/migrations/0046_observationlevel_order.py b/backend/apps/api/v1/migrations/0046_observationlevel_order.py
index fbc194ef..cf15856b 100644
--- a/backend/apps/api/v1/migrations/0046_observationlevel_order.py
+++ b/backend/apps/api/v1/migrations/0046_observationlevel_order.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
from django.db import migrations, models
+
class Migration(migrations.Migration):
dependencies = [
("v1", "0045_add_measurement_categories_and_units"),
@@ -15,4 +16,4 @@ class Migration(migrations.Migration):
),
preserve_default=False,
),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/migrations/0047_initialize_observation_level_order.py b/backend/apps/api/v1/migrations/0047_initialize_observation_level_order.py
index b8425838..ac501f2a 100644
--- a/backend/apps/api/v1/migrations/0047_initialize_observation_level_order.py
+++ b/backend/apps/api/v1/migrations/0047_initialize_observation_level_order.py
@@ -1,42 +1,47 @@
from django.db import migrations
+
def initialize_observation_level_order(apps, schema_editor):
- ObservationLevel = apps.get_model('v1', 'ObservationLevel')
-
+ ObservationLevel = apps.get_model("v1", "ObservationLevel")
+
# Group by each possible parent type and set order
- for table_id in ObservationLevel.objects.values_list('table_id', flat=True).distinct():
+ for table_id in ObservationLevel.objects.values_list("table_id", flat=True).distinct():
if table_id:
for i, ol in enumerate(ObservationLevel.objects.filter(table_id=table_id)):
ol.order = i
ol.save()
-
- for rds_id in ObservationLevel.objects.values_list('raw_data_source_id', flat=True).distinct():
+
+ for rds_id in ObservationLevel.objects.values_list("raw_data_source_id", flat=True).distinct():
if rds_id:
for i, ol in enumerate(ObservationLevel.objects.filter(raw_data_source_id=rds_id)):
ol.order = i
ol.save()
-
- for ir_id in ObservationLevel.objects.values_list('information_request_id', flat=True).distinct():
+
+ for ir_id in ObservationLevel.objects.values_list(
+ "information_request_id", flat=True
+ ).distinct():
if ir_id:
for i, ol in enumerate(ObservationLevel.objects.filter(information_request_id=ir_id)):
ol.order = i
ol.save()
-
- for analysis_id in ObservationLevel.objects.values_list('analysis_id', flat=True).distinct():
+
+ for analysis_id in ObservationLevel.objects.values_list("analysis_id", flat=True).distinct():
if analysis_id:
for i, ol in enumerate(ObservationLevel.objects.filter(analysis_id=analysis_id)):
ol.order = i
ol.save()
+
def reverse_migration(apps, schema_editor):
- ObservationLevel = apps.get_model('v1', 'ObservationLevel')
+ ObservationLevel = apps.get_model("v1", "ObservationLevel")
ObservationLevel.objects.all().update(order=0)
+
class Migration(migrations.Migration):
dependencies = [
- ('v1', '0046_observationlevel_order'),
+ ("v1", "0046_observationlevel_order"),
]
operations = [
migrations.RunPython(initialize_observation_level_order, reverse_migration),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/migrations/0048_alter_observationlevel_options_and_more.py b/backend/apps/api/v1/migrations/0048_alter_observationlevel_options_and_more.py
index 4ae3ed76..5013868c 100644
--- a/backend/apps/api/v1/migrations/0048_alter_observationlevel_options_and_more.py
+++ b/backend/apps/api/v1/migrations/0048_alter_observationlevel_options_and_more.py
@@ -1,20 +1,22 @@
# Generated by Django 4.2.16 on 2024-11-04 03:54
from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
+from django.db import migrations
class Migration(migrations.Migration):
-
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ('v1', '0047_initialize_observation_level_order'),
+ ("v1", "0047_initialize_observation_level_order"),
]
operations = [
migrations.AlterModelOptions(
- name='observationlevel',
- options={'ordering': ['order'], 'verbose_name': 'Observation Level', 'verbose_name_plural': 'Observation Levels'},
+ name="observationlevel",
+ options={
+ "ordering": ["order"],
+ "verbose_name": "Observation Level",
+ "verbose_name_plural": "Observation Levels",
+ },
),
]
diff --git a/backend/apps/api/v1/migrations/0049_poll_pipeline.py b/backend/apps/api/v1/migrations/0049_poll_pipeline.py
index 2dc4b07d..412be9ea 100644
--- a/backend/apps/api/v1/migrations/0049_poll_pipeline.py
+++ b/backend/apps/api/v1/migrations/0049_poll_pipeline.py
@@ -1,19 +1,24 @@
# Generated by Django 4.2.16 on 2024-11-06 04:14
-from django.db import migrations, models
import django.db.models.deletion
+from django.db import migrations, models
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0048_alter_observationlevel_options_and_more'),
+ ("v1", "0048_alter_observationlevel_options_and_more"),
]
operations = [
migrations.AddField(
- model_name='poll',
- name='pipeline',
- field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='polls', to='v1.pipeline'),
+ model_name="poll",
+ name="pipeline",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="polls",
+ to="v1.pipeline",
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0050_table_is_deprecated.py b/backend/apps/api/v1/migrations/0050_table_is_deprecated.py
index c703d6ac..f171d55f 100644
--- a/backend/apps/api/v1/migrations/0050_table_is_deprecated.py
+++ b/backend/apps/api/v1/migrations/0050_table_is_deprecated.py
@@ -4,15 +4,17 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0049_poll_pipeline'),
+ ("v1", "0049_poll_pipeline"),
]
operations = [
migrations.AddField(
- model_name='table',
- name='is_deprecated',
- field=models.BooleanField(default=False, help_text='We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc.'),
+ model_name="table",
+ name="is_deprecated",
+ field=models.BooleanField(
+ default=False,
+ help_text="We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc.",
+ ),
),
]
diff --git a/backend/apps/api/v1/migrations/0051_add_new_field_dataset.py b/backend/apps/api/v1/migrations/0051_add_new_field_dataset.py
index 0fcd2b45..4b9840de 100644
--- a/backend/apps/api/v1/migrations/0051_add_new_field_dataset.py
+++ b/backend/apps/api/v1/migrations/0051_add_new_field_dataset.py
@@ -4,16 +4,16 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0050_table_is_deprecated'),
+ ("v1", "0050_table_is_deprecated"),
]
operations = [
migrations.AddField(
- model_name='dataset',
- name='usage_guide',
- field=models.TextField(blank=True, null=True, default='', max_length=255 , verbose_name='Guia de Uso')
+ model_name="dataset",
+ name="usage_guide",
+ field=models.TextField(
+ blank=True, null=True, default="", max_length=255, verbose_name="Guia de Uso"
+ ),
),
]
-
diff --git a/backend/apps/api/v1/migrations/0052_remove_dataset_is_closed.py b/backend/apps/api/v1/migrations/0052_remove_dataset_is_closed.py
index e47b283f..56540aae 100644
--- a/backend/apps/api/v1/migrations/0052_remove_dataset_is_closed.py
+++ b/backend/apps/api/v1/migrations/0052_remove_dataset_is_closed.py
@@ -4,14 +4,13 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('v1', '0051_add_new_field_dataset'),
+ ("v1", "0051_add_new_field_dataset"),
]
operations = [
migrations.RemoveField(
- model_name='dataset',
- name='is_closed',
+ model_name="dataset",
+ name="is_closed",
),
]
diff --git a/backend/apps/api/v1/migrations/0053_rename_required_requires.py b/backend/apps/api/v1/migrations/0053_rename_required_requires.py
index cec684f3..7d5083fa 100644
--- a/backend/apps/api/v1/migrations/0053_rename_required_requires.py
+++ b/backend/apps/api/v1/migrations/0053_rename_required_requires.py
@@ -5,13 +5,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('v1', '0052_remove_dataset_is_closed'),
+ ("v1", "0052_remove_dataset_is_closed"),
]
operations = [
migrations.RenameField(
- model_name='rawdatasource',
- old_name='required_registration',
- new_name='requires_registration',
+ model_name="rawdatasource",
+ old_name="required_registration",
+ new_name="requires_registration",
),
- ]
\ No newline at end of file
+ ]
diff --git a/backend/apps/api/v1/models.py b/backend/apps/api/v1/models.py
index 4621662c..71cc7671 100644
--- a/backend/apps/api/v1/models.py
+++ b/backend/apps/api/v1/models.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+import logging
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime
@@ -14,9 +15,8 @@
from backend.custom.storage import OverwriteStorage, upload_to, validate_image
from backend.custom.utils import check_kebab_case, check_snake_case
-import logging
+logger = logging.getLogger("django.request")
-logger = logging.getLogger('django.request')
class Area(BaseModel):
"""Area model"""
@@ -25,16 +25,16 @@ class Area(BaseModel):
slug = models.SlugField(unique=True)
name = models.CharField(max_length=255, blank=False, null=False)
administrative_level = models.IntegerField(
- null=True,
+ null=True,
blank=True,
choices=[
- (0, '0'),
- (1, '1'),
- (2, '2'),
- (3, '3'),
- (4, '4'),
- (5, '5'),
- ]
+ (0, "0"),
+ (1, "1"),
+ (2, "2"),
+ (3, "3"),
+ (4, "4"),
+ (5, "5"),
+ ],
)
entity = models.ForeignKey(
"Entity",
@@ -42,7 +42,7 @@ class Area(BaseModel):
null=True,
blank=True,
related_name="areas",
- limit_choices_to={'category__slug': 'spatial'}
+ limit_choices_to={"category__slug": "spatial"},
)
parent = models.ForeignKey(
"Area",
@@ -69,19 +69,21 @@ def clean(self):
"""Validate the model fields."""
errors = {}
if self.administrative_level is not None and self.administrative_level not in [0, 1, 2, 3]:
- errors['administrative_level'] = 'Administrative level must be 0, 1, 2, or 3'
-
- if self.entity and self.entity.category.slug != 'spatial':
- errors['entity'] = 'Entity must have category "spatial"'
-
- if self.parent and self.parent.slug != 'world':
+ errors["administrative_level"] = "Administrative level must be 0, 1, 2, or 3"
+
+ if self.entity and self.entity.category.slug != "spatial":
+ errors["entity"] = 'Entity must have category "spatial"'
+
+ if self.parent and self.parent.slug != "world":
if self.administrative_level is None:
- errors['administrative_level'] = 'Administrative level is required when parent is set'
+ errors[
+ "administrative_level"
+ ] = "Administrative level is required when parent is set"
elif self.parent.administrative_level is None:
- errors['parent'] = 'Parent must have an administrative level'
+ errors["parent"] = "Parent must have an administrative level"
elif self.parent.administrative_level != self.administrative_level - 1:
- errors['parent'] = 'Parent must have administrative level exactly one level above'
-
+ errors["parent"] = "Parent must have administrative level exactly one level above"
+
if errors:
raise ValidationError(errors)
return super().clean()
@@ -519,7 +521,7 @@ class Dataset(BaseModel):
"Organization",
related_name="datasets",
verbose_name="Organizations",
- help_text="Organizations associated with this dataset"
+ help_text="Organizations associated with this dataset",
)
themes = models.ManyToManyField(
"Theme",
@@ -548,7 +550,9 @@ class Dataset(BaseModel):
default=0,
help_text="Number of page views by Google Analytics",
)
- usage_guide = models.TextField(blank=True, null=True, default='', max_length=255 , verbose_name='Guia de Uso')
+ usage_guide = models.TextField(
+ blank=True, null=True, default="", max_length=255, verbose_name="Guia de Uso"
+ )
graphql_nested_filter_fields_whitelist = ["id", "slug"]
@@ -612,7 +616,7 @@ def spatial_coverage_name_pt(self) -> list[str]:
*self.raw_data_sources.all(),
*self.information_requests.all(),
]
- return sorted(list(get_spatial_coverage_name(resources, locale = 'pt')))
+ return sorted(list(get_spatial_coverage_name(resources, locale="pt")))
@property
def spatial_coverage_name_en(self) -> list[str]:
@@ -622,7 +626,7 @@ def spatial_coverage_name_en(self) -> list[str]:
*self.raw_data_sources.all(),
*self.information_requests.all(),
]
- return sorted(list(get_spatial_coverage_name(resources, locale = 'en')))
+ return sorted(list(get_spatial_coverage_name(resources, locale="en")))
@property
def spatial_coverage_name_es(self) -> list[str]:
@@ -632,7 +636,7 @@ def spatial_coverage_name_es(self) -> list[str]:
*self.raw_data_sources.all(),
*self.information_requests.all(),
]
- return sorted(list(get_spatial_coverage_name(resources, locale = 'es')))
+ return sorted(list(get_spatial_coverage_name(resources, locale="es")))
@property
def entities(self) -> list[dict]:
@@ -652,7 +656,11 @@ def entities(self) -> list[dict]:
def contains_open_data(self):
"""Returns true if there are tables or columns with open coverages"""
open_data = False
- tables = self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).all()
+ tables = (
+ self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .all()
+ )
for table in tables:
table_coverages = table.coverages.filter(is_closed=False)
if table_coverages:
@@ -663,7 +671,11 @@ def contains_open_data(self):
@property
def contains_closed_data(self):
"""Returns true if there are tables or columns with closed coverages, or if the uncompressed file size is above 1 GB"""
- for table in self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).all():
+ for table in (
+ self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .all()
+ ):
if table.contains_closed_data:
return True
return False
@@ -671,70 +683,70 @@ def contains_closed_data(self):
@property
def contains_tables(self):
"""Returns true if there are tables in the dataset"""
- return len(
- self.tables
- .exclude(status__slug="under_review")
+ return (
+ len(
+ self.tables.exclude(status__slug="under_review")
.exclude(slug__in=["dicionario", "dictionary"])
.all()
- ) > 0
+ )
+ > 0
+ )
@property
def contains_raw_data_sources(self):
"""Returns true if there are raw data sources in the dataset"""
- return len(
- self.raw_data_sources
- .exclude(status__slug="under_review")
- .all()
- ) > 0
+ return len(self.raw_data_sources.exclude(status__slug="under_review").all()) > 0
@property
def contains_information_requests(self):
"""Returns true if there are information requests in the dataset"""
- return len(
- self.information_requests
- .exclude(status__slug="under_review")
- .all()
- ) > 0
+ return len(self.information_requests.exclude(status__slug="under_review").all()) > 0
@property
def n_tables(self):
return len(
- self.tables
- .exclude(status__slug="under_review")
- .exclude(slug__in=["dicionario", "dictionary"])
- .all()
+ self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .all()
)
@property
def n_raw_data_sources(self):
- return len(
- self.raw_data_sources
- .exclude(status__slug="under_review")
- .all()
- )
+ return len(self.raw_data_sources.exclude(status__slug="under_review").all())
@property
def n_information_requests(self):
- return len(
- self.information_requests
- .exclude(status__slug="under_review")
- .all()
- )
+ return len(self.information_requests.exclude(status__slug="under_review").all())
@property
def first_table_id(self):
- if resource := self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).order_by("order").first():
+ if (
+ resource := self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .order_by("order")
+ .first()
+ ):
return resource.pk
@property
def first_open_table_id(self):
- for resource in self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).order_by("order").all():
+ for resource in (
+ self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .order_by("order")
+ .all()
+ ):
if resource.contains_open_data:
return resource.pk
@property
def first_closed_table_id(self):
- for resource in self.tables.exclude(status__slug="under_review").exclude(slug__in=["dicionario", "dictionary"]).order_by("order").all():
+ for resource in (
+ self.tables.exclude(status__slug="under_review")
+ .exclude(slug__in=["dicionario", "dictionary"])
+ .order_by("order")
+ .all()
+ ):
if resource.contains_closed_data:
return resource.pk
@@ -791,7 +803,7 @@ class Update(BaseModel):
null=True,
on_delete=models.SET_NULL,
related_name="updates",
- limit_choices_to={'category__slug': 'datetime'}
+ limit_choices_to={"category__slug": "datetime"},
)
frequency = models.IntegerField(blank=True, null=True)
lag = models.IntegerField(blank=True, null=True)
@@ -846,8 +858,8 @@ def clean(self) -> None:
"One and only one of 'table', "
"'raw_data_source', or 'information_request' must be set."
)
- if self.entity and self.entity.category.slug != 'datetime':
- errors['entity'] = 'Entity must have category "datetime"'
+ if self.entity and self.entity.category.slug != "datetime":
+ errors["entity"] = 'Entity must have category "datetime"'
if errors:
raise ValidationError(errors)
return super().clean()
@@ -861,7 +873,7 @@ class Poll(BaseModel):
null=True,
on_delete=models.SET_NULL,
related_name="polls",
- limit_choices_to={'category__slug': 'datetime'}
+ limit_choices_to={"category__slug": "datetime"},
)
frequency = models.IntegerField(blank=True, null=True)
latest = models.DateTimeField(blank=True, null=True)
@@ -907,8 +919,8 @@ def clean(self) -> None:
raise ValidationError(
"One and only one of 'raw_data_source'," " or 'information_request' must be set."
)
- if self.entity and self.entity.category.slug != 'datetime':
- errors['entity'] = 'Entity must have category "datetime"'
+ if self.entity and self.entity.category.slug != "datetime":
+ errors["entity"] = 'Entity must have category "datetime"'
if errors:
raise ValidationError(errors)
return super().clean()
@@ -933,7 +945,7 @@ class Table(BaseModel, OrderedModel):
)
is_deprecated = models.BooleanField(
default=False,
- help_text="We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc."
+ help_text="We stopped maintaining this table for some reason. Examples: raw data deprecated, new version elsewhere, etc.",
)
license = models.ForeignKey(
"License",
@@ -984,7 +996,9 @@ class Table(BaseModel, OrderedModel):
compressed_file_size = models.BigIntegerField(blank=True, null=True)
number_rows = models.BigIntegerField(blank=True, null=True)
number_columns = models.BigIntegerField(blank=True, null=True)
- is_closed = models.BooleanField(default=False, help_text="Table is for BD Pro subscribers only")
+ is_closed = models.BooleanField(
+ default=False, help_text="Table is for BD Pro subscribers only"
+ )
page_views = models.BigIntegerField(
default=0,
help_text="Number of page views by Google Analytics",
@@ -1064,9 +1078,10 @@ def contains_closed_data(self):
for column in self.columns.all():
if column.coverages.filter(is_closed=True).first():
return True
- if (self.uncompressed_file_size and
- self.uncompressed_file_size > 100 * 1024 * 1024 and
- self.uncompressed_file_size <= 1000 * 1024 * 1024
+ if (
+ self.uncompressed_file_size
+ and self.uncompressed_file_size > 100 * 1024 * 1024
+ and self.uncompressed_file_size <= 1000 * 1024 * 1024
):
return True
return False
@@ -1089,17 +1104,17 @@ def spatial_coverage(self) -> list[str]:
@property
def spatial_coverage_name_pt(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- return get_spatial_coverage_name([self], locale = 'pt')
+ return get_spatial_coverage_name([self], locale="pt")
@property
def spatial_coverage_name_en(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- return get_spatial_coverage_name([self], locale = 'en')
+ return get_spatial_coverage_name([self], locale="en")
@property
def spatial_coverage_name_es(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- return get_spatial_coverage_name([self], locale = 'es')
+ return get_spatial_coverage_name([self], locale="es")
@property
def neighbors(self) -> list[dict]:
@@ -1153,10 +1168,10 @@ def coverage_datetime_units(self) -> str:
for coverage in self.coverages.all():
for datetime_range in coverage.datetime_ranges.all():
units.extend([unit.name for unit in datetime_range.units.all()])
-
+
if not units:
return None
-
+
most_common_unit = list(set(units))
return most_common_unit
@@ -1271,10 +1286,10 @@ def clean(self):
datetime_ranges.sort(key=lambda x: x[0])
for i in range(1, len(datetime_ranges)):
if datetime_ranges[i - 1][1] > datetime_ranges[i][0]:
- errors['coverages_areas'] = f"Temporal coverages in area {area} overlap"
+ errors["coverages_areas"] = f"Temporal coverages in area {area} overlap"
except ValueError:
pass
-
+
if errors:
raise ValidationError(errors)
return super().clean()
@@ -1350,13 +1365,14 @@ class Meta:
verbose_name_plural = "BigQuery Types"
ordering = ["name"]
+
class MeasurementUnitCategory(BaseModel):
"""Model definition for MeasurementUnitCategory."""
id = models.UUIDField(primary_key=True, default=uuid4)
slug = models.SlugField(unique=True)
name = models.CharField(max_length=255)
-
+
graphql_nested_filter_fields_whitelist = ["id"]
def __str__(self):
@@ -1370,6 +1386,7 @@ class Meta:
verbose_name_plural = "Measurement Unit Categories"
ordering = ["slug"]
+
class MeasurementUnit(BaseModel):
"""Model definition for MeasurementUnit."""
@@ -1397,6 +1414,7 @@ class Meta:
verbose_name_plural = "Measurement Units"
ordering = ["slug"]
+
class Column(BaseModel, OrderedModel):
"""Model definition for Column."""
@@ -1416,7 +1434,7 @@ class Column(BaseModel, OrderedModel):
null=True,
related_name="columns",
blank=True,
- limit_choices_to={'is_primary_key': True, 'table__is_directory': True}
+ limit_choices_to={"is_primary_key": True, "table__is_directory": True},
)
measurement_unit = models.CharField(max_length=255, blank=True, null=True)
contains_sensitive_data = models.BooleanField(default=False, blank=True, null=True)
@@ -1479,25 +1497,25 @@ def spatial_coverage(self) -> list[str]:
@property
def spatial_coverage_name_pt(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- coverage = get_spatial_coverage_name([self], locale = 'pt')
+ coverage = get_spatial_coverage_name([self], locale="pt")
if not coverage:
- coverage = get_spatial_coverage_name([self.table], locale = 'pt')
+ coverage = get_spatial_coverage_name([self.table], locale="pt")
return coverage
-
+
@property
def spatial_coverage_name_en(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- coverage = get_spatial_coverage_name([self], locale = 'en')
+ coverage = get_spatial_coverage_name([self], locale="en")
if not coverage:
- coverage = get_spatial_coverage_name([self.table], locale = 'en')
+ coverage = get_spatial_coverage_name([self.table], locale="en")
return coverage
@property
def spatial_coverage_name_es(self) -> list[str]:
"""Union spatial coverage of all related resources"""
- coverage = get_spatial_coverage_name([self], locale = 'es')
+ coverage = get_spatial_coverage_name([self], locale="es")
if not coverage:
- coverage = get_spatial_coverage_name([self.table], locale = 'es')
+ coverage = get_spatial_coverage_name([self.table], locale="es")
return coverage
@property
@@ -1873,7 +1891,7 @@ class ObservationLevel(BaseModel, OrderedModel):
related_name="observation_levels",
)
- order_with_respect_to = ('table', 'raw_data_source', 'information_request', 'analysis')
+ order_with_respect_to = ("table", "raw_data_source", "information_request", "analysis")
graphql_nested_filter_fields_whitelist = ["id"]
@@ -1891,7 +1909,7 @@ class Meta:
def get_ordering_queryset(self):
"""Get queryset for ordering within the appropriate parent"""
qs = super().get_ordering_queryset()
-
+
# Filter by the appropriate parent field
if self.table_id:
return qs.filter(table_id=self.table_id)
@@ -1901,7 +1919,7 @@ def get_ordering_queryset(self):
return qs.filter(information_request_id=self.information_request_id)
elif self.analysis_id:
return qs.filter(analysis_id=self.analysis_id)
-
+
return qs
@@ -2015,17 +2033,17 @@ def clean(self):
errors = {}
try:
if self.since and self.until and self.since > self.until:
- errors['date_range'] = "Start date must be less than or equal to end date"
+ errors["date_range"] = "Start date must be less than or equal to end date"
if self.since and self.until and not self.interval:
- errors['interval'] = "Interval must exist in ranges with start and end dates"
-
+ errors["interval"] = "Interval must exist in ranges with start and end dates"
+
# Add validation for units
- #for unit in self.units.all():
+ # for unit in self.units.all():
# if unit.bigquery_type.name not in ['DATE', 'DATETIME', 'TIME', 'TIMESTAMP']:
# errors['units'] = f"Column '{unit.name}' is not a valid datetime unit"
except Exception as e:
- errors['general'] = f"An error occurred: {str(e)}"
-
+ errors["general"] = f"An error occurred: {str(e)}"
+
if errors:
raise ValidationError(errors)
return super().clean()
@@ -2206,9 +2224,10 @@ def get_full_temporal_coverage(resources: list) -> dict:
if paid_since.str and paid_until.str:
return [paid_since.as_dict, paid_until.as_dict]
+
def get_spatial_coverage(resources: list) -> list:
"""Get spatial coverage of resources by returning unique area slugs, keeping only the highest level in each branch
-
+
For example:
- If areas = [br_mg_3100104, br_mg_3100104] -> returns [br_mg_3100104]
- If areas = [br_mg_3100104, br_sp_3500105] -> returns [br_mg_3100104, br_sp_3500105]
@@ -2222,40 +2241,41 @@ def get_spatial_coverage(resources: list) -> list:
for coverage in resource.coverages.all():
if coverage.area:
all_areas.add(coverage.area.slug)
-
+
if not all_areas:
return []
-
+
# If 'world' is present, it encompasses everything
- if 'world' in all_areas:
- return ['world']
-
+ if "world" in all_areas:
+ return ["world"]
+
# Filter out areas that have a parent in the set
filtered_areas = set()
for area in all_areas:
- parts = area.split('_')
+ parts = area.split("_")
is_parent_present = False
-
+
# Check if any parent path exists in all_areas
for i in range(1, len(parts)):
- parent = '_'.join(parts[:i])
+ parent = "_".join(parts[:i])
if parent in all_areas:
is_parent_present = True
break
-
+
if not is_parent_present:
filtered_areas.add(area)
-
+
return sorted(list(filtered_areas))
-def get_spatial_coverage_name(resources: list, locale: str = 'pt') -> list:
- """Get spatial coverage of resources by returning unique area names in the specified locale,
+
+def get_spatial_coverage_name(resources: list, locale: str = "pt") -> list:
+ """Get spatial coverage of resources by returning unique area names in the specified locale,
keeping only the highest level in each branch
-
+
Args:
resources: List of resources to get coverage from
locale: Language code ('pt', 'en', etc). Defaults to 'pt'
-
+
For example:
- If areas = [br_mg_3100104, br_mg_3100104] -> returns [Belo Horizonte]
- If areas = [br_mg_3100104, br_sp_3500105] -> returns [Belo Horizonte, São Paulo]
@@ -2267,43 +2287,45 @@ def get_spatial_coverage_name(resources: list, locale: str = 'pt') -> list:
"""
# Translation mapping for special cases
translations = {
- 'world': {
- 'pt': 'Mundo',
- 'en': 'World',
- 'es': 'Mundo',
+ "world": {
+ "pt": "Mundo",
+ "en": "World",
+ "es": "Mundo",
}
}
-
+
# Collect all unique areas (both slug and name) across resources
all_areas = {}
for resource in resources:
for coverage in resource.coverages.all():
if coverage.area:
# Get localized name using getattr, fallback to default name if not found
- localized_name = getattr(coverage.area, f'name_{locale}', None) or coverage.area.name
+ localized_name = (
+ getattr(coverage.area, f"name_{locale}", None) or coverage.area.name
+ )
all_areas[coverage.area.slug] = localized_name
-
+
if not all_areas:
return []
-
+
# If 'world' is present, it encompasses everything
- if 'world' in all_areas:
- return [translations['world'].get(locale, translations['world']['pt'])]
-
+ if "world" in all_areas:
+ return [translations["world"].get(locale, translations["world"]["pt"])]
+
# Filter out areas that have a parent in the set
filtered_areas = set()
for area_slug in all_areas:
- parts = area_slug.split('_')
+ parts = area_slug.split("_")
is_parent_present = False
-
+
# Check if any parent path exists in all_areas
for i in range(1, len(parts)):
- parent = '_'.join(parts[:i])
+ parent = "_".join(parts[:i])
if parent in all_areas:
is_parent_present = True
break
-
+
if not is_parent_present:
filtered_areas.add(all_areas[area_slug])
-
+
return sorted(list(filtered_areas))
diff --git a/backend/apps/api/v1/search_indexes.py b/backend/apps/api/v1/search_indexes.py
index 1b46089b..6e256909 100644
--- a/backend/apps/api/v1/search_indexes.py
+++ b/backend/apps/api/v1/search_indexes.py
@@ -47,7 +47,7 @@ class DatasetIndex(indexes.SearchIndex, indexes.Indexable):
null=True,
indexed=False,
)
-
+
spatial_coverage = indexes.MultiValueField(
model_attr="spatial_coverage",
null=True,
@@ -62,7 +62,6 @@ class DatasetIndex(indexes.SearchIndex, indexes.Indexable):
indexed=True,
)
-
table_id = indexes.MultiValueField(
model_attr="tables__pk",
indexed=False,
@@ -233,7 +232,7 @@ class DatasetIndex(indexes.SearchIndex, indexes.Indexable):
faceted=True,
indexed=False,
)
-
+
contains_open_data = indexes.BooleanField(
model_attr="contains_open_data",
indexed=False,
@@ -318,32 +317,32 @@ def prepare_organization_picture(self, obj):
def get_field_mapping(self):
mapping = super().get_field_mapping()
- mapping['spatial_coverage'] = {
- 'type': 'keyword',
- 'store': True,
- 'index': True,
+ mapping["spatial_coverage"] = {
+ "type": "keyword",
+ "store": True,
+ "index": True,
}
return mapping
def prepare(self, obj):
data = super().prepare(obj)
-
+
organization_fields = [
- 'organization_id',
- 'organization_slug',
- 'organization_name',
- 'organization_name_pt',
- 'organization_name_en',
- 'organization_name_es',
- 'organization_picture',
- 'organization_website',
- 'organization_description_pt',
- 'organization_description_en',
- 'organization_description_es'
+ "organization_id",
+ "organization_slug",
+ "organization_name",
+ "organization_name_pt",
+ "organization_name_en",
+ "organization_name_es",
+ "organization_picture",
+ "organization_website",
+ "organization_description_pt",
+ "organization_description_en",
+ "organization_description_es",
]
-
+
for field in organization_fields:
if field in data and not isinstance(data[field], (list, tuple)):
data[field] = [data[field]] if data[field] is not None else []
-
+
return data
diff --git a/backend/apps/api/v1/search_views.py b/backend/apps/api/v1/search_views.py
index be54423d..c7306cce 100644
--- a/backend/apps/api/v1/search_views.py
+++ b/backend/apps/api/v1/search_views.py
@@ -7,7 +7,8 @@
from haystack.models import SearchResult
from haystack.query import SearchQuerySet
-from backend.apps.api.v1.models import Entity, Organization, Tag, Theme, Area
+from backend.apps.api.v1.models import Area, Entity, Organization, Tag, Theme
+
class DatasetSearchForm(FacetedSearchForm):
load_all: bool = True
@@ -30,12 +31,15 @@ def search(self):
sqs = self.searchqueryset.all()
# Debug print to see all form data
- print("DEBUG: Form data:", {
- 'spatial_coverage': self.spatial_coverage,
- 'theme': self.theme,
- 'organization': self.organization,
- 'tag': self.tag,
- })
+ print(
+ "DEBUG: Form data:",
+ {
+ "spatial_coverage": self.spatial_coverage,
+ "theme": self.theme,
+ "organization": self.organization,
+ "tag": self.tag,
+ },
+ )
# Text search if provided
if q := self.cleaned_data.get("q"):
@@ -64,28 +68,25 @@ def search(self):
coverage_queries = []
for coverage_list in self.spatial_coverage:
# Split the comma-separated values
- coverages = coverage_list.split(',')
- if 'world' in coverages:
+ coverages = coverage_list.split(",")
+ if "world" in coverages:
# If world is in the list, only look for world coverage
coverage_queries = ['spatial_coverage_exact:"world"']
break
else:
# Regular case: handle hierarchical patterns for each coverage
for coverage in coverages:
- parts = coverage.split('_')
- coverage_patterns = [
- '_'.join(parts[:i])
- for i in range(1, len(parts))
- ]
+ parts = coverage.split("_")
+ coverage_patterns = ["_".join(parts[:i]) for i in range(1, len(parts))]
coverage_patterns.append(coverage) # Add the full coverage too
-
+
# Build OR condition for all valid levels, including world
- patterns = ' OR '.join(
- f'spatial_coverage_exact:"{pattern}"'
- for pattern in coverage_patterns + ['world']
+ patterns = " OR ".join(
+ f'spatial_coverage_exact:"{pattern}"'
+ for pattern in coverage_patterns + ["world"]
)
- coverage_queries.append(f'({patterns})')
-
+ coverage_queries.append(f"({patterns})")
+
# Combine all coverage queries with AND
query = f'_exists_:spatial_coverage_exact AND {" AND ".join(coverage_queries)}'
sqs = sqs.raw_search(query)
@@ -126,7 +127,7 @@ def page_size(self):
@property
def locale(self):
- return self.request.GET.get('locale', 'pt')
+ return self.request.GET.get("locale", "pt")
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
@@ -181,10 +182,13 @@ def get_facets(self, sqs: SearchQuerySet, facet_size=200):
("entity_slug", "observation_levels", Entity),
]:
to_name = model.objects.values("slug", f"name_{self.locale}", "name")
- to_name = {e["slug"]: {
- "name": e[f"name_{self.locale}"] or e["name"] or e["slug"],
- "fallback": e[f"name_{self.locale}"] is None
- } for e in to_name.all()}
+ to_name = {
+ e["slug"]: {
+ "name": e[f"name_{self.locale}"] or e["name"] or e["slug"],
+ "fallback": e[f"name_{self.locale}"] is None,
+ }
+ for e in to_name.all()
+ }
facets[key_front] = facets.pop(key_back, None)
for field in facets[key_front] or []:
translated_name = to_name.get(field["key"], {})
@@ -195,16 +199,16 @@ def get_facets(self, sqs: SearchQuerySet, facet_size=200):
if "spatial_coverage" in facets:
spatial_coverages = []
coverage_counts = {} # Dictionary to track counts per slug
- coverage_data = {} # Dictionary to store the full data per slug
-
+ coverage_data = {} # Dictionary to store the full data per slug
+
for field in facets.pop("spatial_coverage") or []:
coverage = field["key"]
areas = Area.objects.filter(slug=coverage, administrative_level=0)
-
+
if coverage == "world":
field["name"] = "World"
field["fallback"] = False
-
+
# Add all top-level areas (administrative_level = 0)
top_level_areas = Area.objects.filter(administrative_level=0)
for child_area in top_level_areas:
@@ -212,8 +216,10 @@ def get_facets(self, sqs: SearchQuerySet, facet_size=200):
coverage_counts[slug] = coverage_counts.get(slug, 0) + field["count"]
coverage_data[slug] = {
"key": slug,
- "name": getattr(child_area, f'name_{self.locale}') or child_area.name or slug,
- "fallback": getattr(child_area, f'name_{self.locale}') is None
+ "name": getattr(child_area, f"name_{self.locale}")
+ or child_area.name
+ or slug,
+ "fallback": getattr(child_area, f"name_{self.locale}") is None,
}
elif areas.exists():
for area in areas:
@@ -221,20 +227,20 @@ def get_facets(self, sqs: SearchQuerySet, facet_size=200):
coverage_counts[slug] = coverage_counts.get(slug, 0) + field["count"]
coverage_data[slug] = {
"key": slug,
- "name": getattr(area, f'name_{self.locale}') or area.name or coverage,
- "fallback": getattr(area, f'name_{self.locale}') is None
+ "name": getattr(area, f"name_{self.locale}") or area.name or coverage,
+ "fallback": getattr(area, f"name_{self.locale}") is None,
}
-
+
# Create final list with collapsed counts and sort by count
spatial_coverages = []
for slug, count in coverage_counts.items():
entry = coverage_data[slug].copy()
entry["count"] = count
spatial_coverages.append(entry)
-
+
# Sort by count in descending order
spatial_coverages.sort(key=lambda x: x["count"], reverse=True)
-
+
facets["spatial_coverages"] = spatial_coverages
return facets
@@ -250,8 +256,7 @@ def key(r):
return [as_search_result(r, self.locale) for r in results[since:until]]
-def as_search_result(result: SearchResult, locale='pt'):
-
+def as_search_result(result: SearchResult, locale="pt"):
themes = []
for slug, name in zip(result.theme_slug or [], getattr(result, f"theme_name_{locale}") or []):
themes.append(
@@ -265,7 +270,10 @@ def as_search_result(result: SearchResult, locale='pt'):
for pk, slug, name, picture in zip(
result.organization_id or [],
result.organization_slug or [],
- getattr(result, f"organization_name_{locale}") or result.organization_name or result.organization_slug or [],
+ getattr(result, f"organization_name_{locale}")
+ or result.organization_name
+ or result.organization_slug
+ or [],
result.organization_picture or [],
):
picture = storage.url(picture) if picture else None
@@ -288,7 +296,9 @@ def as_search_result(result: SearchResult, locale='pt'):
)
entities = []
- for slug, name in zip(result.entity_slug or [], getattr(result, f"entity_name_{locale}") or []):
+ for slug, name in zip(
+ result.entity_slug or [], getattr(result, f"entity_name_{locale}") or []
+ ):
entities.append(
{
"slug": slug,
@@ -298,25 +308,27 @@ def as_search_result(result: SearchResult, locale='pt'):
# Add spatial coverage translations
spatial_coverages = []
- for coverage in (result.spatial_coverage or []):
+ for coverage in result.spatial_coverage or []:
area = Area.objects.filter(slug=coverage).first()
if area:
- spatial_coverages.append({
- 'slug': coverage,
- 'name': getattr(area, f'name_{locale}') or area.name or coverage
- })
+ spatial_coverages.append(
+ {
+ "slug": coverage,
+ "name": getattr(area, f"name_{locale}") or area.name or coverage,
+ }
+ )
else:
- spatial_coverages.append({
- 'slug': coverage,
- 'name': coverage
- })
+ spatial_coverages.append({"slug": coverage, "name": coverage})
return {
"updated_at": result.updated_at,
"id": result.dataset_id,
"slug": result.dataset_slug,
- "name": getattr(result, f"dataset_name_{locale}") or result.dataset_name or result.dataset_slug,
- "description": getattr(result, f"dataset_description_{locale}") or result.dataset_description,
+ "name": getattr(result, f"dataset_name_{locale}")
+ or result.dataset_name
+ or result.dataset_slug,
+ "description": getattr(result, f"dataset_description_{locale}")
+ or result.dataset_description,
"tags": tags,
"themes": themes,
"entities": entities,
diff --git a/backend/apps/core/management/commands/fetch_metabase.py b/backend/apps/core/management/commands/fetch_metabase.py
index c687822d..930def1c 100644
--- a/backend/apps/core/management/commands/fetch_metabase.py
+++ b/backend/apps/core/management/commands/fetch_metabase.py
@@ -62,7 +62,9 @@ def get_databases(self, token: str):
def get_tables(self, token: str, database_id: int):
headers = self.get_headers(token)
- response = requests.get(BASE_URL + f"/api/database/{database_id}/metadata", headers=headers)
+ response = requests.get(
+ BASE_URL + f"/api/database/{database_id}/metadata", headers=headers
+ )
json_data = response.json()
tables = []
diff --git a/backend/apps/core/management/commands/populate.py b/backend/apps/core/management/commands/populate.py
index e2216db4..807f96b6 100644
--- a/backend/apps/core/management/commands/populate.py
+++ b/backend/apps/core/management/commands/populate.py
@@ -154,11 +154,19 @@ def get_models_that_depends_on(self, models_to_populate, layer_models):
def sort_models_by_depedencies(self, models_to_populate, other_models):
sorted_models = []
- while len(models_to_populate) > 0:
+ # while len(models_to_populate) > 0:
+ for vezes in range(len(models_to_populate)):
for model in models_to_populate:
has_all_dependencies = True
+ for model in models_to_populate:
for field in model._meta.get_fields():
+ has_all_dependencies = True
+
+ print(
+ f"Campo: {field}\nModelos a testar: {len(models_to_populate)}\n{'#' *30}"
+ )
+
if isinstance(field, models.ForeignKey) or isinstance(
field, models.ManyToManyField
):
@@ -169,12 +177,15 @@ def sort_models_by_depedencies(self, models_to_populate, other_models):
and field.null is False
):
has_all_dependencies = False
- break
if has_all_dependencies:
sorted_models.append(model)
models_to_populate.remove(model)
+ sorted_models = sorted_models + models_to_populate
+ print(f"SORTED MODELS: {sorted_models}\n\n")
+ print(f"MODELS TO POPULATE: {models_to_populate}\n\n")
+
return sorted_models
def clean_database(self, _models):
@@ -203,52 +214,58 @@ def create_instance(self, model, item):
m2m_payload = {}
for field in model._meta.get_fields():
- if isinstance(field, models.ForeignKey):
- field_name = f"{field.name}_id"
- current_value = item.get(field_name)
+ try:
+ if isinstance(field, models.ForeignKey):
+ field_name = f"{field.name}_id"
+ current_value = item.get(field_name)
- if current_value is None:
- continue
+ if current_value is None:
+ continue
- reference = self.references.get(field.related_model._meta.db_table, current_value)
+ reference = self.references.get(
+ field.related_model._meta.db_table, current_value
+ )
- if reference:
- payload[field_name] = reference
+ if reference:
+ payload[field_name] = reference
+ else:
+ # If the field is required and the reference is missing, we need to skip
+ if field.null is False:
+ return
+
+ retry = {
+ "item": item,
+ "table_name": field.related_model._meta.db_table,
+ "field_name": field_name,
+ }
+ elif isinstance(field, models.ManyToManyField):
+ field_name = field.name
+ m2m_table_name = field.m2m_db_table()
+
+ current_model_name = f"{model.__name__.lower()}_id"
+ field_model_name = field.related_model.__name__.lower() + "_id"
+
+ m2m_related_data = self.get_m2m_data(
+ m2m_table_name, current_model_name, field_model_name, item["id"]
+ )
+
+ instances = [
+ self.references.get(field.related_model._meta.db_table, current_value)
+ for current_value in m2m_related_data
+ ]
+
+ if instances:
+ m2m_payload[field_name] = instances
else:
- # If the field is required and the reference is missing, we need to skip
- if field.null is False:
- return
-
- retry = {
- "item": item,
- "table_name": field.related_model._meta.db_table,
- "field_name": field_name,
- }
- elif isinstance(field, models.ManyToManyField):
- field_name = field.name
- m2m_table_name = field.m2m_db_table()
-
- current_model_name = f"{model.__name__.lower()}_id"
- field_model_name = field.related_model.__name__.lower() + "_id"
-
- m2m_related_data = self.get_m2m_data(
- m2m_table_name, current_model_name, field_model_name, item["id"]
- )
-
- instances = [
- self.references.get(field.related_model._meta.db_table, current_value)
- for current_value in m2m_related_data
- ]
-
- if instances:
- m2m_payload[field_name] = instances
- else:
- current_value = item.get(field.name)
+ current_value = item.get(field.name)
- if current_value is None:
- continue
+ if current_value is None:
+ continue
- payload[field.name] = current_value
+ payload[field.name] = current_value
+ except:
+ breakpoint()
+ pass
instance = model(**payload)
instance.save()
diff --git a/scripts/ai-database-translate.py b/scripts/ai-database-translate.py
index 4180bac2..d4507507 100755
--- a/scripts/ai-database-translate.py
+++ b/scripts/ai-database-translate.py
@@ -23,53 +23,63 @@
from pprint import pprint
from random import random
+
import dotenv
+
dotenv.load_dotenv()
import json
-import better_exceptions; better_exceptions.hook()
-import google.generativeai as genai
+
+import better_exceptions
+
+better_exceptions.hook()
import csv as _csv
-from io import StringIO
import os
import time
from collections import deque
from functools import wraps
+from io import StringIO
+
+import google.generativeai as genai
import psycopg2
from tqdm import tqdm
-import sys
genai.configure(api_key=os.getenv("API_KEY"))
-model = genai.GenerativeModel('gemini-1.5-flash-latest')
+model = genai.GenerativeModel("gemini-1.5-flash-latest")
# model = genai.GenerativeModel('gemini-1.0-pro-latest')
+
def main():
"""
Main function to execute the translation process for all specified tables and fields.
"""
for table, fields in FIELDS_TO_TRANSLATE:
- print(f"{table:<20}: {str(fields):<30} - Entries to process: {get_data(table, fields, count_only=True)}")
+ print(
+ f"{table:<20}: {str(fields):<30} - Entries to process: {get_data(table, fields, count_only=True)}"
+ )
print("Press Enter to continue...")
input()
for table, fields in FIELDS_TO_TRANSLATE:
treat_table(table, fields)
+
def get_new_fields(fields):
"""
Generate new field names for English and Spanish translations.
-
+
Args:
fields (list): Original field names.
-
+
Returns:
list: New field names with '_en' and '_es' suffixes.
"""
- return [f'{f}_en' for f in fields] + [f'{f}_es' for f in fields]
+ return [f"{f}_en" for f in fields] + [f"{f}_es" for f in fields]
+
def treat_table(table, fields):
"""
Process and translate data for a specific table and set of fields.
-
+
Args:
table (str): Name of the table to process.
fields (list): List of fields to translate.
@@ -79,7 +89,9 @@ def treat_table(table, fields):
data = [d | {f: None for f in get_new_fields(fields)} for d in data]
print(f"Processing {len(data)} entries!")
- new_fields_description_for_prompt = [f'{f}_en (english)' for f in fields] + [f'{f}_es (spanish)' for f in fields]
+ new_fields_description_for_prompt = [f"{f}_en (english)" for f in fields] + [
+ f"{f}_es (spanish)" for f in fields
+ ]
pbar = tqdm(total=len(data), smoothing=0)
# for batch in batchify(3, data):
@@ -89,7 +101,8 @@ def treat_table(table, fields):
response = None
try:
pprint(d)
- response = gen_content(f"""
+ response = gen_content(
+ f"""
We are an NGO translating metadata for a open public database. We have json with data in portuguese.
Would you please translate the following json, filling up the missing keys: {new_fields_description_for_prompt} . Just write the output json and nothing else.
@@ -98,14 +111,15 @@ def treat_table(table, fields):
```
-""")
+"""
+ )
print(response.text)
- res = json.loads(response.text.strip('\n`json'))
+ res = json.loads(response.text.strip("\n`json"))
for res_line, original_line in zip(res, d):
try:
for field in get_new_fields(fields):
assert field in res_line, f"Missing {field!r} column"
- assert res_line['id'] == original_line['id'], "Id not matching"
+ assert res_line["id"] == original_line["id"], "Id not matching"
write_response_to_db(res_line, table, fields)
print("Written!")
except Exception as e:
@@ -128,7 +142,7 @@ def treat_table(table, fields):
pass
# breakpoint()
db.commit()
- print(f'{len(errors)} ERRORS')
+ print(f"{len(errors)} ERRORS")
pprint(errors)
# breakpoint()
@@ -136,29 +150,32 @@ def treat_table(table, fields):
def connect_db():
"""
Establish a connection to the PostgreSQL database.
-
+
Returns:
psycopg2.connection: Database connection object.
"""
db = psycopg2.connect(
- dbname=os.getenv('DB_NAME'),
- user=os.getenv('DB_USER'),
- password=os.getenv('DB_PASSWORD'),
- host=os.getenv('DB_HOST'),
- port=os.getenv('DB_PORT')
+ dbname=os.getenv("DB_NAME"),
+ user=os.getenv("DB_USER"),
+ password=os.getenv("DB_PASSWORD"),
+ host=os.getenv("DB_HOST"),
+ port=os.getenv("DB_PORT"),
)
db.autocommit = True
return db
+
db = connect_db()
cursor = db.cursor()
+
+
def sql(q):
"""
Execute an SQL query and return the results.
-
+
Args:
q (str): SQL query to execute.
-
+
Returns:
list: Query results.
"""
@@ -166,8 +183,8 @@ def sql(q):
result = cursor.fetchall()
return result
+
def csv(data):
- from io import StringIO
output = StringIO()
csv_writer = _csv.writer(output)
if cursor.description:
@@ -176,13 +193,14 @@ def csv(data):
csv_writer.writerows(data)
return output.getvalue()
+
def dictify(data):
"""
Convert query results to a list of dictionaries.
-
+
Args:
data: Query results to convert.
-
+
Returns:
list: List of dictionaries representing the data.
"""
@@ -192,29 +210,33 @@ def dictify(data):
out = data
field_names = [desc[0] for desc in cursor.description]
out = [dict(zip(field_names, d)) for d in out]
- if not isinstance(data, list): out = out[0]
+ if not isinstance(data, list):
+ out = out[0]
return out
def batchify(size, data):
"""Yield successive n-sized chunks from data."""
for i in range(0, len(data), size):
- yield data[i:i + size]
+ yield data[i : i + size]
+
def batch_by_token_size(max_tokens, max_batch, data):
"""
Yield chunks from data where the total token count of each chunk does not exceed max_tokens.
-
+
Args:
max_tokens (int): Maximum number of tokens per batch.
max_batch (int): Maximum number of items per batch.
data (list): Data to be batched.
-
+
Yields:
list: Batch of data items.
"""
+
def token_counter(item):
return len(str(item).split())
+
batch = []
token_count = 0
for item in data:
@@ -232,7 +254,7 @@ def token_counter(item):
def write_response_to_db(res, table, fields):
"""
Write translated content back to the database.
-
+
Args:
res (dict): Translated data.
table (str): Name of the table to update.
@@ -242,38 +264,43 @@ def write_response_to_db(res, table, fields):
set_clause = ", ".join([f"{field} = %s" for field in new_fields])
values = [res[field] for field in new_fields]
# values = [field + ' ' if field else None for field in values]
- cursor.execute(f'UPDATE "{table}" SET {set_clause} WHERE id = %s', values + [res['id']])
+ cursor.execute(f'UPDATE "{table}" SET {set_clause} WHERE id = %s', values + [res["id"]])
def get_data(table, fields, count_only=False):
"""
Retrieve data from the database for translation.
-
+
Args:
table (str): Name of the table to query.
fields (list): List of fields to retrieve.
count_only (bool): If True, return only the count of rows to process.
-
+
Returns:
int or list: Count of rows or list of data to process.
"""
- pt_fields = ", ".join(f + '_pt' for f in fields)
+ pt_fields = ", ".join(f + "_pt" for f in fields)
if len(fields) == 1:
# skip single fields if they are null at the source. Doing this properly for multi fields is hard so we don't do it
- restriction = " AND ".join(f'{f}_en IS NULL AND {f}_es IS NULL AND {f}_pt IS NOT NULL' for f in fields)
+ restriction = " AND ".join(
+ f"{f}_en IS NULL AND {f}_es IS NULL AND {f}_pt IS NOT NULL" for f in fields
+ )
else:
- restriction = " AND ".join(f'{f}_en IS NULL AND {f}_es IS NULL' for f in fields)
- predicate = 'count(*)' if count_only else f'id, {pt_fields}'
+ restriction = " AND ".join(f"{f}_en IS NULL AND {f}_es IS NULL" for f in fields)
+ predicate = "count(*)" if count_only else f"id, {pt_fields}"
out = sql(f'SELECT {predicate} FROM "{table}" WHERE {restriction}')
return out[0][0] if count_only else out
+
# def get_data():
- # out = sql('SELECT id, name_pt, description_pt FROM dataset WHERE id NOT IN (SELECT id FROM translated_dataset)')
- # return out
+# out = sql('SELECT id, name_pt, description_pt FROM dataset WHERE id NOT IN (SELECT id FROM translated_dataset)')
+# return out
+
def rate_limiter(max_calls_per_minute):
interval = 60.0 / max_calls_per_minute
call_times = deque()
+
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
@@ -289,9 +316,12 @@ def wrapper(*args, **kwargs):
if sleep_time > 0:
time.sleep(sleep_time)
return wrapper(*args, **kwargs)
+
return wrapper
+
return decorator
+
def rate_limiter(max_calls_per_minute):
min_interval = 60.0 / max_calls_per_minute
last_called = 0
@@ -307,23 +337,28 @@ def wrapper(*args, **kwargs):
ret = func(*args, **kwargs)
last_called = time.time()
return ret
+
return wrapper
+
return decorator
+
@rate_limiter(max_calls_per_minute=14)
def gen_content(c):
"""
Generate content using the AI model with rate limiting.
-
+
Args:
c (str): Prompt for content generation.
-
+
Returns:
genai.types.GenerateContentResponse: Generated content.
"""
return model.generate_content(c)
-EXAMPLE_DATA = [""" id | name_en | description_en
+
+EXAMPLE_DATA = [
+ """ id | name_en | description_en
----+----------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
1 | Censo Agropecuário | O Censo Agropecuário, realizado pelo Instituto Brasileiro de Geografia e Estatística (IBGE), é a principal e mais completa investigação estatística e territorial sobre a produção agropecuária do país. Visa obter informações sobre a estrutura, a dinâmica e o nível de produção da atividade agropecuária brasileira. +
| | +
@@ -334,7 +369,8 @@ def gen_content(c):
2 | Pesquisa Nacional de Saúde (PNS) | A Pesquisa Nacional de Saúde (PNS) é um inquérito de base domiciliar e âmbito nacional, realizada pelo Ministério da Saúde (MS) em parceria com o Instituto Brasileiro de Geografia e Estatística (IBGE), nos anos de 2013 e 2019. +
| | A população pesquisada corresponde aos moradores de domicílios particulares permanentes do Brasil, exceto os localizados nos setores censitários especiais (compostos por aglomerados subnormais; quartéis, bases militares etc.; alojamento, acampamentos etc.; embarcações, barcos, navios etc.; aldeia indígena; penitenciárias, colônias penais, presídios, cadeias etc.; asilos, orfanatos, conventos, hospitais etc.; e assentamentos rurais). +
| |
-"""]
+"""
+]
FIELDS_TO_TRANSLATE = [
("dataset", ["name"]),
diff --git a/scripts/database-clean-area-slug.py b/scripts/database-clean-area-slug.py
index 0d2372e9..e0ef6677 100644
--- a/scripts/database-clean-area-slug.py
+++ b/scripts/database-clean-area-slug.py
@@ -13,31 +13,34 @@
"""
import os
+
import dotenv
import psycopg2
from tqdm import tqdm
dotenv.load_dotenv()
+
def connect_db():
"""Establish a connection to the PostgreSQL database."""
db = psycopg2.connect(
- dbname=os.getenv('DB_NAME'),
- user=os.getenv('DB_USER'),
- password=os.getenv('DB_PASSWORD'),
- host=os.getenv('DB_HOST'),
- port=os.getenv('DB_PORT')
+ dbname=os.getenv("DB_NAME"),
+ user=os.getenv("DB_USER"),
+ password=os.getenv("DB_PASSWORD"),
+ host=os.getenv("DB_HOST"),
+ port=os.getenv("DB_PORT"),
)
db.autocommit = True
return db
+
def main():
"""Main function to execute the slug update process."""
db = connect_db()
cursor = db.cursor()
# # First, delete continent-only entries
- continent_codes = ['sa', 'na', 'eu', 'af', 'as', 'oc', 'an']
+ continent_codes = ["sa", "na", "eu", "af", "as", "oc", "an"]
# cursor.execute(
# 'DELETE FROM area WHERE slug = ANY(%s)',
# (continent_codes,)
@@ -45,91 +48,89 @@ def main():
# print(f"Deleted {cursor.rowcount} continent entries")
# Then, update remaining entries to remove continent prefix
- cursor.execute('SELECT id, slug FROM area')
+ cursor.execute("SELECT id, slug FROM area")
areas = cursor.fetchall()
print(f"Processing {len(areas)} areas...")
for area_id, slug in tqdm(areas):
try:
# Split the slug and remove the continent prefix
- parts = slug.split('_')
+ parts = slug.split("_")
if len(parts) > 1 and parts[0] in continent_codes:
- new_slug = '_'.join(parts[1:])
- cursor.execute(
- 'UPDATE area SET slug = %s WHERE id = %s',
- (new_slug, area_id)
- )
+ new_slug = "_".join(parts[1:])
+ cursor.execute("UPDATE area SET slug = %s WHERE id = %s", (new_slug, area_id))
except Exception as e:
print(f"Error updating area {area_id} with slug {slug}: {str(e)}")
continue
# Update administrative levels based on number of underscores in slug
- cursor.execute('SELECT id, slug FROM area')
+ cursor.execute("SELECT id, slug FROM area")
areas = cursor.fetchall()
-
+
print("Updating administrative levels...")
for area_id, slug in tqdm(areas):
- underscore_count = slug.count('_')
+ underscore_count = slug.count("_")
admin_level = underscore_count
cursor.execute(
- 'UPDATE area SET administrative_level = %s WHERE id = %s',
- (admin_level, area_id)
+ "UPDATE area SET administrative_level = %s WHERE id = %s", (admin_level, area_id)
)
# Add parent reference for top-level areas
- world_id = '21486514-209f-416f-b73c-30f41d07e059'
- country_id = 'b9bfd6a6-bc3f-460c-8a93-f695891d64d3'
+ world_id = "21486514-209f-416f-b73c-30f41d07e059"
+ country_id = "b9bfd6a6-bc3f-460c-8a93-f695891d64d3"
print("Setting parent reference for top-level areas...")
- cursor.execute(
- 'UPDATE area SET parent_id = %s WHERE administrative_level = 0',
- (world_id,)
- )
+ cursor.execute("UPDATE area SET parent_id = %s WHERE administrative_level = 0", (world_id,))
print("Setting parent reference for country-level areas...")
- cursor.execute(
- 'UPDATE area SET entity_id = %s WHERE administrative_level = 0',
- (country_id,)
- )
+ cursor.execute("UPDATE area SET entity_id = %s WHERE administrative_level = 0", (country_id,))
print(f"Updated {cursor.rowcount} top-level areas with world parent reference")
# First get Brazil's ID
- cursor.execute('SELECT id FROM area WHERE slug = %s', ('br',))
+ cursor.execute("SELECT id FROM area WHERE slug = %s", ("br",))
brazil_id = cursor.fetchone()[0]
- state_entity_id = '839765a7-9c7a-44bd-bb88-357cedba03f6'
+ state_entity_id = "839765a7-9c7a-44bd-bb88-357cedba03f6"
print("Setting parent reference for Brazilian states...")
- cursor.execute("""
+ cursor.execute(
+ """
UPDATE area
SET parent_id = %s,
entity_id = %s
WHERE slug LIKE 'br_%%'
AND slug NOT LIKE '%%\_%%\_%%'
- """, (brazil_id, state_entity_id))
-
+ """,
+ (brazil_id, state_entity_id),
+ )
+
print(f"Updated {cursor.rowcount} Brazilian state areas")
# First, let's check what we're working with
- cursor.execute("""
+ cursor.execute(
+ """
SELECT slug
FROM area
WHERE slug LIKE 'br\_%%\_%%'
LIMIT 5;
- """)
+ """
+ )
print("Sample municipality slugs:", cursor.fetchall())
- cursor.execute("""
+ cursor.execute(
+ """
SELECT slug
FROM area
WHERE slug LIKE 'br\_%%'
AND slug NOT LIKE 'br\_%%\_%%'
LIMIT 5;
- """)
+ """
+ )
print("Sample state slugs:", cursor.fetchall())
# Set municipality parents and entity
- municipality_entity_id = '460cf58b-63a7-4fb7-910f-4ca8ea58c25e' # entity ID for municipalities
+ municipality_entity_id = "460cf58b-63a7-4fb7-910f-4ca8ea58c25e" # entity ID for municipalities
print("Setting parent reference for Brazilian municipalities...")
-
- cursor.execute("""
+
+ cursor.execute(
+ """
UPDATE area AS municipality
SET parent_id = state.id,
entity_id = %s
@@ -137,12 +138,15 @@ def main():
WHERE municipality.slug LIKE 'br\_%%\_%%' -- Matches municipality pattern (2 underscores)
AND state.slug = split_part(municipality.slug, '_', 1) || '_' || split_part(municipality.slug, '_', 2) -- Gets state slug (e.g., 'br_sp')
AND municipality.slug NOT LIKE '%%\_%%\_%%\_%%' -- Ensures exactly 2 underscores
- """, (municipality_entity_id,))
-
+ """,
+ (municipality_entity_id,),
+ )
+
print(f"Updated {cursor.rowcount} Brazilian municipality areas")
db.commit()
print("All done!")
+
if __name__ == "__main__":
- main()
\ No newline at end of file
+ main()