diff --git a/.github/workflows/sqlfluff-lint.yaml b/.github/workflows/sqlfluff-lint.yaml index d99fcdee..81eaa108 100644 --- a/.github/workflows/sqlfluff-lint.yaml +++ b/.github/workflows/sqlfluff-lint.yaml @@ -12,40 +12,25 @@ jobs: - name: Install Python uses: "actions/setup-python@v5" with: - python-version: "3.7" + python-version: "3.8" - name: install sqlfluff run: "pip install sqlfluff" - name: Get changed files - id: get_file_changes - uses: trilom/file-changes-action@v1.2.4 + id: changed-files + uses: tj-actions/changed-files@v42 with: - output: " " - + files: | + src/queries/**/*.sql + files_ignore: | + **/common*.sql - name: Get changed .sql files in /src/queries to lint id: get_files_to_lint - shell: bash -l {0} run: | - # Set the command in the $() brackets as an output to use in later steps - echo "::set-output name=lintees::$( - # Issue where grep regular expressions don't work as expected on the - # Github Actions shell, check dbt/models/ folder - echo \ - $(echo ${{ steps.get_file_changes.outputs.files_modified }} | - tr -s ' ' '\n' | - grep -E '^src/queries.*[.]sql$' | - # ignore files containing 'common' in the file name - grep -v 'common' | - tr -s '\n' ' ') \ - $(echo ${{ steps.get_file_changes.outputs.files_added }} | - tr -s ' ' '\n' | - grep -E '^src/queries.*[.]sql$' | - tr -s '\n' ' ') - )" - + LINTEES="${{ steps.changed-files.outputs.all_changed_files }}" + echo "lintees=${LINTEES}" >> $GITHUB_OUTPUT - name: Lint dbt models id: sqlfluff_json if: steps.get_files_to_lint.outputs.lintees != '' - shell: bash -l {0} run: sqlfluff lint --format github-annotation --annotation-level failure --nofail ${{ steps.get_files_to_lint.outputs.lintees }} > annotations.json - name: Annotate uses: yuzutech/annotations-action@v0.5.0 diff --git a/src/queries/rum-dashboard.sql b/src/queries/rum-dashboard.sql index 2be958af..ee1c11d5 100644 --- a/src/queries/rum-dashboard.sql +++ b/src/queries/rum-dashboard.sql @@ -205,11 +205,12 @@ current_rum_by_url_and_weight AS ( NULL ) AS INT64 ) AS ttfbbad, - CAST(APPROX_QUANTILES(lcp, 100)[OFFSET(75)] AS INT64) AS avglcp, - CAST(APPROX_QUANTILES(fid, 100)[OFFSET(75)] AS INT64) AS avgfid, - CAST(APPROX_QUANTILES(inp, 100)[OFFSET(75)] AS INT64) AS avginp, - ROUND(APPROX_QUANTILES(cls, 100)[OFFSET(75)], 3) AS avgcls, - CAST(APPROX_QUANTILES(ttfb, 100)[OFFSET(75)] AS INT64) AS avgttfb, + # 15th 20-quantile is equivalent to 75th 100-quantile, but cheaper to compute + CAST(APPROX_QUANTILES(lcp, 20)[OFFSET(15)] AS INT64) AS avglcp, + CAST(APPROX_QUANTILES(fid, 20)[OFFSET(15)] AS INT64) AS avgfid, + CAST(APPROX_QUANTILES(inp, 20)[OFFSET(15)] AS INT64) AS avginp, + ROUND(APPROX_QUANTILES(cls, 20)[OFFSET(15)], 3) AS avgcls, + CAST(APPROX_QUANTILES(ttfb, 20)[OFFSET(15)] AS INT64) AS avgttfb, COUNT(id) AS events FROM current_rum_by_id GROUP BY url, weight @@ -290,11 +291,11 @@ previous_rum_by_url_and_weight AS ( NULL ) AS INT64 ) AS ttfbbad, - CAST(APPROX_QUANTILES(lcp, 100)[OFFSET(75)] AS INT64) AS avglcp, - CAST(APPROX_QUANTILES(fid, 100)[OFFSET(75)] AS INT64) AS avgfid, - CAST(APPROX_QUANTILES(inp, 100)[OFFSET(75)] AS INT64) AS avginp, - ROUND(APPROX_QUANTILES(cls, 100)[OFFSET(75)], 3) AS avgcls, - CAST(APPROX_QUANTILES(ttfb, 100)[OFFSET(75)] AS INT64) AS avgttfb, + CAST(APPROX_QUANTILES(lcp, 20)[OFFSET(15)] AS INT64) AS avglcp, + CAST(APPROX_QUANTILES(fid, 20)[OFFSET(15)] AS INT64) AS avgfid, + CAST(APPROX_QUANTILES(inp, 20)[OFFSET(15)] AS INT64) AS avginp, + ROUND(APPROX_QUANTILES(cls, 20)[OFFSET(15)], 3) AS avgcls, + CAST(APPROX_QUANTILES(ttfb, 20)[OFFSET(15)] AS INT64) AS avgttfb, COUNT(id) AS events FROM previous_rum_by_id GROUP BY url, weight diff --git a/src/sendquery.js b/src/sendquery.js index bc58759e..923a4411 100644 --- a/src/sendquery.js +++ b/src/sendquery.js @@ -182,6 +182,13 @@ export async function execute(email, key, project, query, _, params = {}, logger const [job] = await bq.createQueryJob({ query: q, params: requestParams, + jobTimeoutMs: 29000, + configuration: { + query: { + useQueryCache: true, + priority: 'INTERACTIVE', + }, + }, }); const stream = job.getQueryResultsStream(); diff --git a/test/post-deploy.test.js b/test/post-deploy.test.js index 0917581c..053c2814 100644 --- a/test/post-deploy.test.js +++ b/test/post-deploy.test.js @@ -15,6 +15,32 @@ import { setTimeout } from 'node:timers/promises'; import { fetch } from '@adobe/fetch'; import { createTargets } from './post-deploy-utils.js'; +async function retryFetch(url, options, maxRetries = 3, initialDelay = 1000) { + const attempts = Array.from({ length: maxRetries }, (_, i) => i + 1); + const MAX_DELAY = 60000; // Cap the maximum delay at 60 seconds + + for (const attempt of attempts) { + try { + // eslint-disable-next-line no-await-in-loop + const response = await fetch(url, options); + if (response.status !== 503) { + return response; + } + const backoffDelay = Math.min(initialDelay * (2 ** (attempt - 1)), MAX_DELAY); + console.log(`Attempt ${attempt}: Got 503, retrying in ${backoffDelay}ms...`); + // eslint-disable-next-line no-await-in-loop + await setTimeout(backoffDelay); + } catch (error) { + if (attempt === maxRetries) throw error; + const backoffDelay = Math.min(initialDelay * (2 ** (attempt - 1)), MAX_DELAY); + console.log(`Attempt ${attempt}: Failed with ${error.message}, retrying in ${backoffDelay}ms...`); + // eslint-disable-next-line no-await-in-loop + await setTimeout(backoffDelay); + } + } + throw new Error(`Failed after ${maxRetries} attempts`); +} + createTargets().forEach((target) => { describe(`Post-Deploy Tests (${target.title()}) ${target.host()}${target.urlPath()}`, () => { before(async function beforeAll() { @@ -27,45 +53,42 @@ createTargets().forEach((target) => { } }); - it('RUM Dashboard', async () => { - const path = `${target.urlPath()}/rum-dashboard`; - // eslint-disable-next-line no-console + it('Service reports status', async () => { + const path = `${target.urlPath()}/_status_check/healthcheck.json`; console.log(`testing ${target.host()}${path}`); - const response = await fetch(`${target.host()}${path}`, { + const response = await retryFetch(`${target.host()}${path}`, { headers: { Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`, }, }); assert.equal(response.status, 200, await response.text()); assert.equal(response.headers.get('Content-Type'), 'application/json'); - const body = await response.json(); - assert.equal(body.meta.data.length, 49); - }).timeout(60000); + }).timeout(30000); - it('Daily Pageviews', async () => { - const path = `${target.urlPath()}/rum-pageviews?url=www.theplayers.com&offset=1`; - // eslint-disable-next-line no-console + it('RUM Dashboard', async () => { + const path = `${target.urlPath()}/rum-dashboard?url=www.adobe.com`; console.log(`testing ${target.host()}${path}`); - const response = await fetch(`${target.host()}${path}`, { + const response = await retryFetch(`${target.host()}${path}`, { headers: { Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`, }, - }); + }, 5, 1000); // Increase max retries to 5 for this endpoint assert.equal(response.status, 200, await response.text()); assert.equal(response.headers.get('Content-Type'), 'application/json'); - }).timeout(60000); + const body = await response.json(); + assert.equal(body.meta.data.length, 49); + }).timeout(120000); // Double the timeout - it('Service reports status', async () => { - const path = `${target.urlPath()}/_status_check/healthcheck.json`; - // eslint-disable-next-line no-console + it('Daily Pageviews', async () => { + const path = `${target.urlPath()}/rum-pageviews?url=www.theplayers.com&offset=1`; console.log(`testing ${target.host()}${path}`); - const response = await fetch(`${target.host()}${path}`, { + const response = await retryFetch(`${target.host()}${path}`, { headers: { Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`, }, - }); + }, 5, 1000); // Increase max retries to 5 for this endpoint assert.equal(response.status, 200, await response.text()); assert.equal(response.headers.get('Content-Type'), 'application/json'); - }).timeout(10000); - }).timeout(60000); + }).timeout(120000); // Double the timeout + }).timeout(180000); // Increase suite timeout });