Skip to content

Commit

Permalink
Merge pull request #1219 from adobe/sqlfluff-updates
Browse files Browse the repository at this point in the history
ci(github): update outdated python version
  • Loading branch information
trieloff authored Jan 13, 2025
2 parents d3227cd + 7647085 commit 5833f5f
Show file tree
Hide file tree
Showing 4 changed files with 70 additions and 54 deletions.
33 changes: 9 additions & 24 deletions .github/workflows/sqlfluff-lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,40 +12,25 @@ jobs:
- name: Install Python
uses: "actions/setup-python@v5"
with:
python-version: "3.7"
python-version: "3.8"
- name: install sqlfluff
run: "pip install sqlfluff"
- name: Get changed files
id: get_file_changes
uses: trilom/[email protected]
id: changed-files
uses: tj-actions/changed-files@v42
with:
output: " "

files: |
src/queries/**/*.sql
files_ignore: |
**/common*.sql
- name: Get changed .sql files in /src/queries to lint
id: get_files_to_lint
shell: bash -l {0}
run: |
# Set the command in the $() brackets as an output to use in later steps
echo "::set-output name=lintees::$(
# Issue where grep regular expressions don't work as expected on the
# Github Actions shell, check dbt/models/ folder
echo \
$(echo ${{ steps.get_file_changes.outputs.files_modified }} |
tr -s ' ' '\n' |
grep -E '^src/queries.*[.]sql$' |
# ignore files containing 'common' in the file name
grep -v 'common' |
tr -s '\n' ' ') \
$(echo ${{ steps.get_file_changes.outputs.files_added }} |
tr -s ' ' '\n' |
grep -E '^src/queries.*[.]sql$' |
tr -s '\n' ' ')
)"
LINTEES="${{ steps.changed-files.outputs.all_changed_files }}"
echo "lintees=${LINTEES}" >> $GITHUB_OUTPUT
- name: Lint dbt models
id: sqlfluff_json
if: steps.get_files_to_lint.outputs.lintees != ''
shell: bash -l {0}
run: sqlfluff lint --format github-annotation --annotation-level failure --nofail ${{ steps.get_files_to_lint.outputs.lintees }} > annotations.json
- name: Annotate
uses: yuzutech/[email protected]
Expand Down
21 changes: 11 additions & 10 deletions src/queries/rum-dashboard.sql
Original file line number Diff line number Diff line change
Expand Up @@ -205,11 +205,12 @@ current_rum_by_url_and_weight AS (
NULL
) AS INT64
) AS ttfbbad,
CAST(APPROX_QUANTILES(lcp, 100)[OFFSET(75)] AS INT64) AS avglcp,
CAST(APPROX_QUANTILES(fid, 100)[OFFSET(75)] AS INT64) AS avgfid,
CAST(APPROX_QUANTILES(inp, 100)[OFFSET(75)] AS INT64) AS avginp,
ROUND(APPROX_QUANTILES(cls, 100)[OFFSET(75)], 3) AS avgcls,
CAST(APPROX_QUANTILES(ttfb, 100)[OFFSET(75)] AS INT64) AS avgttfb,
# 15th 20-quantile is equivalent to 75th 100-quantile, but cheaper to compute
CAST(APPROX_QUANTILES(lcp, 20)[OFFSET(15)] AS INT64) AS avglcp,
CAST(APPROX_QUANTILES(fid, 20)[OFFSET(15)] AS INT64) AS avgfid,
CAST(APPROX_QUANTILES(inp, 20)[OFFSET(15)] AS INT64) AS avginp,
ROUND(APPROX_QUANTILES(cls, 20)[OFFSET(15)], 3) AS avgcls,
CAST(APPROX_QUANTILES(ttfb, 20)[OFFSET(15)] AS INT64) AS avgttfb,
COUNT(id) AS events
FROM current_rum_by_id
GROUP BY url, weight
Expand Down Expand Up @@ -290,11 +291,11 @@ previous_rum_by_url_and_weight AS (
NULL
) AS INT64
) AS ttfbbad,
CAST(APPROX_QUANTILES(lcp, 100)[OFFSET(75)] AS INT64) AS avglcp,
CAST(APPROX_QUANTILES(fid, 100)[OFFSET(75)] AS INT64) AS avgfid,
CAST(APPROX_QUANTILES(inp, 100)[OFFSET(75)] AS INT64) AS avginp,
ROUND(APPROX_QUANTILES(cls, 100)[OFFSET(75)], 3) AS avgcls,
CAST(APPROX_QUANTILES(ttfb, 100)[OFFSET(75)] AS INT64) AS avgttfb,
CAST(APPROX_QUANTILES(lcp, 20)[OFFSET(15)] AS INT64) AS avglcp,
CAST(APPROX_QUANTILES(fid, 20)[OFFSET(15)] AS INT64) AS avgfid,
CAST(APPROX_QUANTILES(inp, 20)[OFFSET(15)] AS INT64) AS avginp,
ROUND(APPROX_QUANTILES(cls, 20)[OFFSET(15)], 3) AS avgcls,
CAST(APPROX_QUANTILES(ttfb, 20)[OFFSET(15)] AS INT64) AS avgttfb,
COUNT(id) AS events
FROM previous_rum_by_id
GROUP BY url, weight
Expand Down
7 changes: 7 additions & 0 deletions src/sendquery.js
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,13 @@ export async function execute(email, key, project, query, _, params = {}, logger
const [job] = await bq.createQueryJob({
query: q,
params: requestParams,
jobTimeoutMs: 29000,
configuration: {
query: {
useQueryCache: true,
priority: 'INTERACTIVE',
},
},
});
const stream = job.getQueryResultsStream();

Expand Down
63 changes: 43 additions & 20 deletions test/post-deploy.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,32 @@ import { setTimeout } from 'node:timers/promises';
import { fetch } from '@adobe/fetch';
import { createTargets } from './post-deploy-utils.js';

async function retryFetch(url, options, maxRetries = 3, initialDelay = 1000) {
const attempts = Array.from({ length: maxRetries }, (_, i) => i + 1);
const MAX_DELAY = 60000; // Cap the maximum delay at 60 seconds

for (const attempt of attempts) {
try {
// eslint-disable-next-line no-await-in-loop
const response = await fetch(url, options);
if (response.status !== 503) {
return response;
}
const backoffDelay = Math.min(initialDelay * (2 ** (attempt - 1)), MAX_DELAY);
console.log(`Attempt ${attempt}: Got 503, retrying in ${backoffDelay}ms...`);
// eslint-disable-next-line no-await-in-loop
await setTimeout(backoffDelay);
} catch (error) {
if (attempt === maxRetries) throw error;
const backoffDelay = Math.min(initialDelay * (2 ** (attempt - 1)), MAX_DELAY);
console.log(`Attempt ${attempt}: Failed with ${error.message}, retrying in ${backoffDelay}ms...`);
// eslint-disable-next-line no-await-in-loop
await setTimeout(backoffDelay);
}
}
throw new Error(`Failed after ${maxRetries} attempts`);
}

createTargets().forEach((target) => {
describe(`Post-Deploy Tests (${target.title()}) ${target.host()}${target.urlPath()}`, () => {
before(async function beforeAll() {
Expand All @@ -27,45 +53,42 @@ createTargets().forEach((target) => {
}
});

it('RUM Dashboard', async () => {
const path = `${target.urlPath()}/rum-dashboard`;
// eslint-disable-next-line no-console
it('Service reports status', async () => {
const path = `${target.urlPath()}/_status_check/healthcheck.json`;
console.log(`testing ${target.host()}${path}`);
const response = await fetch(`${target.host()}${path}`, {
const response = await retryFetch(`${target.host()}${path}`, {
headers: {
Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`,
},
});
assert.equal(response.status, 200, await response.text());
assert.equal(response.headers.get('Content-Type'), 'application/json');
const body = await response.json();
assert.equal(body.meta.data.length, 49);
}).timeout(60000);
}).timeout(30000);

it('Daily Pageviews', async () => {
const path = `${target.urlPath()}/rum-pageviews?url=www.theplayers.com&offset=1`;
// eslint-disable-next-line no-console
it('RUM Dashboard', async () => {
const path = `${target.urlPath()}/rum-dashboard?url=www.adobe.com`;
console.log(`testing ${target.host()}${path}`);
const response = await fetch(`${target.host()}${path}`, {
const response = await retryFetch(`${target.host()}${path}`, {
headers: {
Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`,
},
});
}, 5, 1000); // Increase max retries to 5 for this endpoint
assert.equal(response.status, 200, await response.text());
assert.equal(response.headers.get('Content-Type'), 'application/json');
}).timeout(60000);
const body = await response.json();
assert.equal(body.meta.data.length, 49);
}).timeout(120000); // Double the timeout

it('Service reports status', async () => {
const path = `${target.urlPath()}/_status_check/healthcheck.json`;
// eslint-disable-next-line no-console
it('Daily Pageviews', async () => {
const path = `${target.urlPath()}/rum-pageviews?url=www.theplayers.com&offset=1`;
console.log(`testing ${target.host()}${path}`);
const response = await fetch(`${target.host()}${path}`, {
const response = await retryFetch(`${target.host()}${path}`, {
headers: {
Authorization: `Bearer ${process.env.UNIVERSAL_TOKEN}`,
},
});
}, 5, 1000); // Increase max retries to 5 for this endpoint
assert.equal(response.status, 200, await response.text());
assert.equal(response.headers.get('Content-Type'), 'application/json');
}).timeout(10000);
}).timeout(60000);
}).timeout(120000); // Double the timeout
}).timeout(180000); // Increase suite timeout
});

0 comments on commit 5833f5f

Please sign in to comment.