diff --git a/controlpanel/api/aws.py b/controlpanel/api/aws.py
index 9e2ef0cc4..63d24be64 100644
--- a/controlpanel/api/aws.py
+++ b/controlpanel/api/aws.py
@@ -659,25 +659,7 @@ def create(self, bucket_name, is_data_warehouse=False):
# Set bucket lifecycle. Send non-current versions of files to glacier
# storage after 30 days.
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_bucket_lifecycle_configuration # noqa: E501
- lifecycle_id = f"{bucket_name}_lifecycle_configuration"
- s3_client.put_bucket_lifecycle_configuration(
- Bucket=bucket_name,
- LifecycleConfiguration={
- "Rules": [
- {
- "ID": lifecycle_id,
- "Status": "Enabled",
- "Prefix": "",
- "NoncurrentVersionTransitions": [
- {
- "NoncurrentDays": 30,
- "StorageClass": "GLACIER",
- },
- ],
- },
- ]
- },
- )
+ self.apply_lifecycle_config(bucket_name, s3_client)
if is_data_warehouse:
self._tag_bucket(bucket, {"buckettype": "datawarehouse"})
@@ -718,6 +700,35 @@ def create(self, bucket_name, is_data_warehouse=False):
self._apply_tls_restrictions(s3_client, bucket_name)
return bucket
+ def apply_lifecycle_config(self, bucket_name, s3_client=None):
+ if not s3_client:
+ s3_client = self.boto3_session.client("s3")
+ lifecycle_id = f"{bucket_name}_lifecycle_configuration"
+
+ try:
+ s3_client.put_bucket_lifecycle_configuration(
+ Bucket=bucket_name,
+ LifecycleConfiguration={
+ "Rules": [
+ {
+ "ID": lifecycle_id,
+ "Status": "Enabled",
+ "Prefix": "",
+ "Transitions": [
+ {
+ "Days": 0,
+ "StorageClass": "INTELLIGENT_TIERING",
+ },
+ ],
+ },
+ ]
+ },
+ )
+ except s3_client.exceptions.NoSuchBucket:
+ log.warning(
+ f"Skipping creating lifecycle configuration for {bucket_name}: Does not exist"
+ )
+
def _apply_tls_restrictions(self, client, bucket_name):
"""it assumes that this is a new bucket with no policies & creates it"""
tls_statement = deepcopy(BUCKET_TLS_STATEMENT)
diff --git a/controlpanel/api/cluster.py b/controlpanel/api/cluster.py
index 5b4df3864..3d1a7aa13 100644
--- a/controlpanel/api/cluster.py
+++ b/controlpanel/api/cluster.py
@@ -741,6 +741,12 @@ def create(self, owner=AWSRoleCategory.user):
)
return self.aws_bucket_service.create(self.bucket.name, self.bucket.is_data_warehouse)
+ def apply_lifecycle_config(self, owner=AWSRoleCategory.user):
+ self.aws_bucket_service.assume_role_name = self.get_assume_role(
+ self.aws_service_class, aws_role_category=owner
+ )
+ return self.aws_bucket_service.apply_lifecycle_config(self.bucket.name)
+
def mark_for_archival(self):
self.aws_bucket_service.assume_role_name = self.get_assume_role(
self.aws_service_class, aws_role_category=self._get_assume_role_category()
diff --git a/controlpanel/frontend/jinja2/datasource-list.html b/controlpanel/frontend/jinja2/datasource-list.html
index 7c575bc6a..90e9dcc0b 100644
--- a/controlpanel/frontend/jinja2/datasource-list.html
+++ b/controlpanel/frontend/jinja2/datasource-list.html
@@ -19,6 +19,18 @@
{% block content %}
{{ page_title }}
+{% if all_datasources and request.user.has_perm('api.is_superuser') %}
+Update all bucket lifecycle configurations
+
+
+{% endif %}
+
{% if request.user.has_perm('api.list_s3bucket') %}
{{ datasource_list(buckets, datasource_type|default(""), request.user) }}
diff --git a/controlpanel/frontend/urls.py b/controlpanel/frontend/urls.py
index 13e6de562..e5f15d1b6 100644
--- a/controlpanel/frontend/urls.py
+++ b/controlpanel/frontend/urls.py
@@ -10,6 +10,11 @@
path("oidc/entraid/auth/", views.EntraIdAuthView.as_view(), name="entraid-auth"),
path("oidc/logout/", views.LogoutView.as_view(), name="oidc_logout"),
path("datasources/", views.AdminBucketList.as_view(), name="list-all-datasources"),
+ path(
+ "datasources/update-lifecycle-config",
+ views.UpdateDatasourceLifecycleConfig.as_view(),
+ name="update-lifecycle",
+ ),
path("datasources//", views.BucketDetail.as_view(), name="manage-datasource"),
path(
"datasources//access/",
diff --git a/controlpanel/frontend/views/__init__.py b/controlpanel/frontend/views/__init__.py
index 99c31075a..46fd2cb36 100644
--- a/controlpanel/frontend/views/__init__.py
+++ b/controlpanel/frontend/views/__init__.py
@@ -54,6 +54,7 @@
RevokeAccess,
RevokeIAMManagedPolicyAccess,
UpdateAccessLevel,
+ UpdateDatasourceLifecycleConfig,
UpdateIAMManagedPolicyAccessLevel,
WebappBucketList,
)
diff --git a/controlpanel/frontend/views/datasource.py b/controlpanel/frontend/views/datasource.py
index 2f4a895ce..595e79d5b 100644
--- a/controlpanel/frontend/views/datasource.py
+++ b/controlpanel/frontend/views/datasource.py
@@ -9,7 +9,7 @@
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.urls import reverse_lazy
-from django.views.generic.base import ContextMixin
+from django.views.generic.base import ContextMixin, View
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, FormMixin, UpdateView
from django.views.generic.list import ListView
@@ -402,3 +402,23 @@ def values(self, form):
"paths": form.cleaned_data["paths"],
"policy_id": form.cleaned_data["policy_id"],
}
+
+
+class UpdateDatasourceLifecycleConfig(
+ OIDCLoginRequiredMixin,
+ PermissionRequiredMixin,
+ View,
+):
+ permission_required = "api.is_superuser"
+
+ def get_success_url(self):
+ return reverse_lazy("list-all-datasources")
+
+ def post(self, *args, **kwargs):
+ buckets = S3Bucket.objects.all()
+
+ for bucket in buckets:
+ bucket.cluster.apply_lifecycle_config(bucket.name)
+
+ messages.success(self.request, "Successfully updated bucket lifecycle configurations")
+ return HttpResponseRedirect(self.get_success_url())
diff --git a/tests/api/cluster/test_bucket.py b/tests/api/cluster/test_bucket.py
index 2a00ac89d..2db33a563 100644
--- a/tests/api/cluster/test_bucket.py
+++ b/tests/api/cluster/test_bucket.py
@@ -61,6 +61,12 @@ def test_mark_for_archival(aws_tag_bucket, bucket):
aws_tag_bucket.assert_called_with(bucket.name, {"to-archive": "true"})
+def test_apply_lifecycle_config(bucket):
+ with patch("controlpanel.api.aws.AWSBucket.apply_lifecycle_config") as apply_lifecycle_config:
+ cluster.S3Bucket(bucket).apply_lifecycle_config(bucket.name)
+ apply_lifecycle_config.assert_called_with(bucket.name)
+
+
def test_aws_folder_exists(bucket):
with patch("controlpanel.api.aws.AWSFolder.exists") as mock_exists:
mock_exists.return_value = False
diff --git a/tests/api/test_aws.py b/tests/api/test_aws.py
index d700d5dfc..e9f49e2e9 100644
--- a/tests/api/test_aws.py
+++ b/tests/api/test_aws.py
@@ -246,8 +246,8 @@ def test_create_bucket(logs_bucket, s3):
rule = versioning.rules[0]
assert rule["ID"].endswith("_lifecycle_configuration")
assert rule["Status"] == "Enabled"
- assert rule["NoncurrentVersionTransitions"][0]["NoncurrentDays"] == 30
- assert rule["NoncurrentVersionTransitions"][0]["StorageClass"] == "GLACIER"
+ assert rule["Transitions"][0]["Days"] == 0
+ assert rule["Transitions"][0]["StorageClass"] == "INTELLIGENT_TIERING"
# Check logging
assert (