Skip to content

Commit

Permalink
introduce staging binary cache to migration
Browse files Browse the repository at this point in the history
  • Loading branch information
Mic92 committed Oct 16, 2024
1 parent 824d619 commit 22bda4c
Show file tree
Hide file tree
Showing 8 changed files with 512 additions and 0 deletions.
124 changes: 124 additions & 0 deletions terraform/cache-bucket/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
variable "bucket_name" {
type = string
}

resource "aws_s3_bucket" "cache" {
provider = aws
bucket = var.bucket_name

lifecycle_rule {
enabled = true

transition {
days = 365
storage_class = "STANDARD_IA"
}
}

cors_rule {
allowed_headers = ["Authorization"]
allowed_methods = ["GET"]
allowed_origins = ["*"]
max_age_seconds = 3000
}
}

resource "aws_s3_bucket_public_access_block" "cache" {
bucket = aws_s3_bucket.cache.bucket

block_public_acls = false
block_public_policy = false
}

resource "aws_s3_bucket_object" "cache-nix-cache-info" {
provider = aws
depends_on = [ aws_s3_bucket_public_access_block.cache ]

bucket = aws_s3_bucket.cache.bucket
content_type = "text/x-nix-cache-info"
etag = filemd5("${path.module}/../cache-staging/nix-cache-info")
key = "nix-cache-info"
source = "${path.module}/../cache-staging/nix-cache-info"
}

resource "aws_s3_bucket_object" "cache-index-html" {
provider = aws
depends_on = [ aws_s3_bucket_public_access_block.cache ]

bucket = aws_s3_bucket.cache.bucket
content_type = "text/html"
etag = filemd5("${path.module}/../cache-staging/index.html")
key = "index.html"
source = "${path.module}/../cache-staging/index.html"
}

resource "aws_s3_bucket_policy" "cache" {
provider = aws
bucket = aws_s3_bucket.cache.id
depends_on = [ aws_s3_bucket_public_access_block.cache ]

# imported from existing
policy = <<EOF
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "AllowPublicRead",
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::${var.bucket_name}/*"
},
{
"Sid": "AllowUploadDebuginfoWrite",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::080433136561:user/s3-upload-releases"
},
"Action": [
"s3:PutObject",
"s3:PutObjectAcl"
],
"Resource": "arn:aws:s3:::${var.bucket_name}/debuginfo/*"
},
{
"Sid": "AllowUploadDebuginfoRead",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::080433136561:user/s3-upload-releases"
},
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::${var.bucket_name}/*"
},
{
"Sid": "AllowUploadDebuginfoRead2",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::080433136561:user/s3-upload-releases"
},
"Action": [
"s3:ListBucket",
"s3:GetBucketLocation"
],
"Resource": "arn:aws:s3:::${var.bucket_name}"
}
]
}
EOF
}

resource "aws_s3_bucket_request_payment_configuration" "cache" {
provider = aws
bucket = aws_s3_bucket.cache.id
payer = "Requester"
}

output "bucket" {
value = "${aws_s3_bucket.cache.bucket}"
}

output "bucket_domain_name" {
value = "${aws_s3_bucket.cache.bucket_domain_name}"
}
7 changes: 7 additions & 0 deletions terraform/cache-bucket/providers.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
terraform {
required_providers {
aws = {
source = "registry.terraform.io/hashicorp/aws"
}
}
}
194 changes: 194 additions & 0 deletions terraform/cache-staging.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
locals {
cache_staging_domain = "cache-staging.nixos.org"
}

module "cache-staging-202010" {
source = "./cache-bucket"
bucket_name = "nix-cache-staging"
providers = {
aws = aws.us
}
}

module "cache-staging-202410" {
source = "./cache-bucket"
bucket_name = "nix-cache-staging-202410"
providers = {
# move the new bucket to EU
aws = aws
}
}

resource "fastly_service_vcl" "cache-staging" {
name = local.cache_staging_domain
default_ttl = 86400

backend {
address = "s3.amazonaws.com"
auto_loadbalance = false
between_bytes_timeout = 10000
connect_timeout = 5000
error_threshold = 0
first_byte_timeout = 15000
max_conn = 200
name = "s3.amazonaws.com"
override_host = module.cache-staging-202010.bucket_domain_name
port = 443
# TODO: move this to the EU after migration
shield = "iad-va-us"
ssl_cert_hostname = "s3.amazonaws.com"
ssl_check_cert = true
use_ssl = true
weight = 100
}

# Temporarily disabled due to nix-index bugs: see https://github.com/nix-community/nix-index/issues/249
#request_setting {
# name = "Redirect HTTP to HTTPS"
# force_ssl = true
#}

condition {
name = "is-404"
priority = 0
statement = "beresp.status == 404"
type = "CACHE"
}

condition {
name = "Match /"
priority = 10
statement = "req.url ~ \"^/$\""
type = "REQUEST"
}

domain {
name = "cache-staging.nixos.org"
}

header {
action = "set"
destination = "url"
ignore_if_set = false
name = "Landing page"
priority = 10
request_condition = "Match /"
source = "\"/index.html\""
type = "request"
}

# Clean headers for caching
header {
destination = "http.x-amz-request-id"
type = "cache"
action = "delete"
name = "remove x-amz-request-id"
}
header {
destination = "http.x-amz-version-id"
type = "cache"
action = "delete"
name = "remove x-amz-version-id"
}
header {
destination = "http.x-amz-id-2"
type = "cache"
action = "delete"
name = "remove x-amz-id-2"
}

# Enable Streaming Miss.
# https://docs.fastly.com/en/guides/streaming-miss
# https://github.com/NixOS/nixos-org-configurations/issues/212#issuecomment-1187568233
header {
priority = 20
destination = "do_stream"
type = "cache"
action = "set"
name = "Enabling Streaming Miss"
source = "true"
}

# Allow CORS GET requests.
header {
destination = "http.access-control-allow-origin"
type = "response"
action = "set"
name = "CORS Allow"
source = "\"*\""
}

response_object {
name = "404-page"
cache_condition = "is-404"
content = "404"
content_type = "text/plain"
response = "Not Found"
status = 404
}

# Authenticate Fastly<->S3 requests. See Fastly documentation:
# https://docs.fastly.com/en/guides/amazon-s3#using-an-amazon-s3-private-bucket
snippet {
name = "Authenticate S3 requests"
type = "miss"
priority = 100
content = templatefile("${path.module}/cache-staging/s3-authn.vcl", {
aws_region = aws_s3_bucket.cache.region
backend_domain = aws_s3_bucket.cache.bucket_domain_name
access_key = local.cache-iam.key
secret_key = local.cache-iam.secret
})
}

snippet {
content = "set req.url = querystring.remove(req.url);"
name = "Remove all query strings"
priority = 50
type = "recv"
}

# Work around the 2GB size limit for large files
#
# See https://docs.fastly.com/en/guides/segmented-caching
snippet {
content = <<-EOT
if (req.url.path ~ "^/nar/") {
set req.enable_segmented_caching = true;
}
EOT
name = "Enable segment caching for NAR files"
priority = 60
type = "recv"
}

snippet {
name = "cache-errors"
content = <<-EOT
if (beresp.status == 403) {
set beresp.status = 404;
}
EOT
priority = 100
type = "fetch"
}

logging_s3 {
name = "${local.cache_staging_domain}-to-s3"
bucket_name = local.fastlylogs["bucket_name"]
compression_codec = "zstd"
domain = local.fastlylogs["s3_domain"]
format = local.fastlylogs["format"]
format_version = 2
path = "${local.cache_staging_domain}/"
period = local.fastlylogs["period"]
message_type = "blank"
s3_iam_role = local.fastlylogs["iam_role_arn"]
}
}

resource "fastly_tls_subscription" "cache-staging" {
domains = [for domain in fastly_service_vcl.cache-staging.domain : domain.name]
configuration_id = local.fastly_tls12_sni_configuration_id
certificate_authority = "globalsign"
}
54 changes: 54 additions & 0 deletions terraform/cache-staging/diagnostic.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p bind.dnsutils -p mtr -p curl
# shellcheck shell=bash
# impure: needs ping
#
# Run this script if you are having issues with cache.nixos.org and paste the
# output URL in a new issue in the same repo.
#

domain=${1:-cache-staging.nixos.org}

run() {
echo "> $*"
"$@" |& sed -e "s/^/ /"
printf "Exit: %s\n\n\n" "$?"
}

curl_w="
time_namelookup: %{time_namelookup}
time_connect: %{time_connect}
time_appconnect: %{time_appconnect}
time_pretransfer: %{time_pretransfer}
time_redirect: %{time_redirect}
time_starttransfer: %{time_starttransfer}
time_total: %{time_total}
"

curl_test() {
curl -w "$curl_w" -v -o /dev/null "$@"
}

ix() {
url=$(cat | curl -F 'f:1=<-' ix.io 2>/dev/null)
echo "Pasted at: $url"
}

(
echo "domain=$domain"
run dig -t A "$domain"
run ping -c1 "$domain"
run ping -4 -c1 "$domain"
run ping -6 -c1 "$domain"
run mtr -c 20 -w -r "$domain"
run curl_test -4 "http://$domain/"
run curl_test -6 "http://$domain/"
run curl_test -4 "https://$domain/"
run curl_test -6 "https://$domain/"
run curl -I -4 "https://$domain/"
run curl -I -4 "https://$domain/"
run curl -I -4 "https://$domain/"
run curl -I -6 "https://$domain/"
run curl -I -6 "https://$domain/"
run curl -I -6 "https://$domain/"
) | tee /dev/stderr | ix
Loading

0 comments on commit 22bda4c

Please sign in to comment.