Skip to content

Commit

Permalink
Merge pull request #35 from mendableai/nsc/job-logs
Browse files Browse the repository at this point in the history
Better logging
  • Loading branch information
nickscamara authored Apr 20, 2024
2 parents d4e7774 + 6aa3cc3 commit acec766
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 5 deletions.
12 changes: 8 additions & 4 deletions apps/api/src/main/runWebScraper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { CrawlResult, WebScraperOptions } from "../types";
import { WebScraperDataProvider } from "../scraper/WebScraper";
import { Progress } from "../lib/entities";
import { billTeam } from "../services/billing/credit_billing";

import { Document } from "../lib/entities";
export async function startWebScraperPipeline({
job,
}: {
Expand All @@ -24,7 +24,7 @@ export async function startWebScraperPipeline({
job.moveToFailed(error);
},
team_id: job.data.team_id,
})) as { success: boolean; message: string; docs: CrawlResult[] };
})) as { success: boolean; message: string; docs: Document[] };
}
export async function runWebScraper({
url,
Expand Down Expand Up @@ -76,12 +76,12 @@ export async function runWebScraper({

// remove docs with empty content
const filteredDocs = docs.filter((doc) => doc.content.trim().length > 0);
onSuccess(filteredDocs);

const { success, credit_usage } = await billTeam(
team_id,
filteredDocs.length
);

if (!success) {
// throw new Error("Failed to bill team, no subscription was found");
return {
Expand All @@ -91,7 +91,11 @@ export async function runWebScraper({
};
}

return { success: true, message: "", docs: filteredDocs as CrawlResult[] };
// This is where the returnvalue from the job is set
onSuccess(filteredDocs);

// this return doesn't matter too much for the job completion result
return { success: true, message: "", docs: filteredDocs };
} catch (error) {
console.error("Error running web scraper", error);
onError(error);
Expand Down
33 changes: 33 additions & 0 deletions apps/api/src/services/logging/log_job.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { supabase_service } from "../supabase";
import { FirecrawlJob } from "../../types";
import "dotenv/config";

export async function logJob(job: FirecrawlJob) {
try {
// Only log jobs in production
if (process.env.ENV !== "production") {
return;
}
const { data, error } = await supabase_service
.from("firecrawl_jobs")
.insert([
{
success: job.success,
message: job.message,
num_docs: job.num_docs,
docs: job.docs,
time_taken: job.time_taken,
team_id: job.team_id,
mode: job.mode,
url: job.url,
crawler_options: job.crawlerOptions,
page_options: job.pageOptions,
},
]);
if (error) {
console.error("Error logging job:\n", error);
}
} catch (error) {
console.error("Error logging job:\n", error);
}
}
19 changes: 18 additions & 1 deletion apps/api/src/services/queue-worker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import "dotenv/config";
import { logtail } from "./logtail";
import { startWebScraperPipeline } from "../main/runWebScraper";
import { callWebhook } from "./webhook";
import { logJob } from "./logging/log_job";

getWebScraperQueue().process(
Math.floor(Number(process.env.NUM_WORKERS_PER_QUEUE ?? 8)),
Expand All @@ -15,8 +16,11 @@ getWebScraperQueue().process(
current_step: "SCRAPING",
current_url: "",
});
const start = Date.now();
const { success, message, docs } = await startWebScraperPipeline({ job });

const end = Date.now();
const timeTakenInSeconds = (end - start) / 1000;

const data = {
success: success,
result: {
Expand All @@ -29,6 +33,19 @@ getWebScraperQueue().process(
};

await callWebhook(job.data.team_id, data);

await logJob({
success: success,
message: message,
num_docs: docs.length,
docs: docs,
time_taken: timeTakenInSeconds,
team_id: job.data.team_id,
mode: "crawl",
url: job.data.url,
crawlerOptions: job.data.crawlerOptions,
pageOptions: job.data.pageOptions,
});
done(null, data);
} catch (error) {
if (error instanceof CustomError) {
Expand Down
14 changes: 14 additions & 0 deletions apps/api/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,20 @@ export interface WebScraperOptions {
}


export interface FirecrawlJob {
success: boolean;
message: string;
num_docs: number;
docs: any[];
time_taken: number;
team_id: string;
mode: string;
url: string;
crawlerOptions?: any;
pageOptions?: any;
}



export enum RateLimiterMode {
Crawl = "crawl",
Expand Down

0 comments on commit acec766

Please sign in to comment.