Skip to content

Commit

Permalink
Remove logging
Browse files Browse the repository at this point in the history
  • Loading branch information
ebidel committed Dec 5, 2018
1 parent 6b13aa2 commit 5dd8956
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 10 deletions.
20 changes: 11 additions & 9 deletions lighthouse-data.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ export async function getMedianScoresOfAllUrls(
// }
// }

console.warn('No cached medians.');
// console.warn('No cached medians.');

return {};
}
Expand Down Expand Up @@ -640,7 +640,7 @@ async function fetchWithTimeout(method, url, timeout=10 * 1000) {
size: 0, // Response body size. 0 disables it.
compress: true,
}).then(resp => {
console.log(resp.status, resp.statusText, resp.ok, url);
// console.log(resp.status, resp.statusText, resp.ok, url);

// If server doesn't support HEAD request, consider it a valid URL.
if (resp.status === 405) {
Expand Down Expand Up @@ -682,7 +682,9 @@ export async function removeNextSetOfInvalidUrls(limit = 1000) {
.limit(limit);

const snapshot = await query.get();
// If no more docs fetched, reset lastVerifiedRunOn to time far in the past.

// Reset lastVerifiedRunOn to time far in the past. This essentially will
// make the cron job loop around and start checking urls all over again.
if (snapshot.empty) {
await db.doc(`_data/meta`).set({
lastVerifiedRunOn: new Date('2018-11-12T21:49:20.821Z'),
Expand All @@ -696,6 +698,12 @@ export async function removeNextSetOfInvalidUrls(limit = 1000) {
console.info(`Verifying ${snapshot.size} urls.` +
`From ${lastVerifiedRunOn.toJSON()} to ${lastDocVerifiedDate.toJSON()}`);

// Update lastVerifiedRunOn to lastVerified of the last doc in the query
// results. Cron picks up from here.
await db.doc(`_data/meta`).set({
lastVerifiedRunOn: lastDocVerifiedDate,
}, {merge: true});

try {
let numRemoved = 0;

Expand All @@ -719,12 +727,6 @@ export async function removeNextSetOfInvalidUrls(limit = 1000) {
const results = await parallelLimit(
async.reflectAll(tasks), MAX_CONCURRENT_REQUESTS);

// Reset lastVerifiedRunOn to time far in the past. This essentially will
// make the cron job loop around and start checking urls all over again.
await db.doc(`_data/meta`).set({
lastVerifiedRunOn: lastDocVerifiedDate,
}, {merge: true});

return {numUrls: results.length, numRemoved};
} catch (err) {
console.error('Async task error', err);
Expand Down
2 changes: 1 addition & 1 deletion queue.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ queue:
retry_parameters:
task_retry_limit: 0
- name: remove-invalid-urls
rate: 100/m
rate: 2/m
max_concurrent_requests: 1
target: lighthouse
retry_parameters:
Expand Down

0 comments on commit 5dd8956

Please sign in to comment.