Skip to content

Commit

Permalink
fix: make calls to next server
Browse files Browse the repository at this point in the history
  • Loading branch information
jaypyles committed Nov 13, 2024
1 parent b3bf780 commit dc4d219
Show file tree
Hide file tree
Showing 26 changed files with 654 additions and 105 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:

jobs:
build:
if: ${{ github.event.workflow_run.conclusion == 'success' && github.ref == 'refs/heads/master' }}
if: ${{ github.event.workflow_run.conclusion == 'success' && github.ref == 'refs/heads/master' && github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
Expand Down
46 changes: 44 additions & 2 deletions api/backend/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,31 @@ async def average_elements_per_link(user: str):
collection = get_job_collection()
pipeline = [
{"$match": {"status": "Completed", "user": user}},
{
"$addFields": {
"time_created_date": {
"$cond": {
"if": {"$eq": [{"$type": "$time_created"}, "date"]},
"then": "$time_created",
"else": {
"$convert": {
"input": "$time_created",
"to": "date",
"onError": None,
"onNull": None,
}
},
}
}
}
},
{
"$project": {
"date": {
"$dateToString": {"format": "%Y-%m-%d", "date": "$time_created"}
"$dateToString": {
"format": "%Y-%m-%d",
"date": "$time_created_date",
}
},
"num_elements": {"$size": "$elements"},
}
Expand Down Expand Up @@ -100,10 +121,31 @@ async def get_jobs_per_day(user: str):
collection = get_job_collection()
pipeline = [
{"$match": {"status": "Completed", "user": user}},
{
"$addFields": {
"time_created_date": {
"$cond": {
"if": {"$eq": [{"$type": "$time_created"}, "date"]},
"then": "$time_created",
"else": {
"$convert": {
"input": "$time_created",
"to": "date",
"onError": None,
"onNull": None,
}
},
}
}
}
},
{
"$project": {
"date": {
"$dateToString": {"format": "%Y-%m-%d", "date": "$time_created"}
"$dateToString": {
"format": "%Y-%m-%d",
"date": "$time_created_date",
}
}
}
},
Expand Down
4 changes: 3 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
services:
scraperr:
depends_on:
- scraperr_api
image: jpyles0524/scraperr:latest
build:
context: .
dockerfile: docker/frontend/Dockerfile
container_name: scraperr
command: ["npm", "run", "start"]
environment:
- NEXT_PUBLIC_API_URL=http://localhost:8000 # your API URL
- NEXT_PUBLIC_API_URL=http://scraperr_api:8000 # your API URL
- SERVER_URL=http://scraperr_api:8000 # your docker container API URL
ports:
- 80:3000
Expand Down
30 changes: 12 additions & 18 deletions src/components/jobs/JobTable.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,11 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
const router = useRouter();

const handleDownload = async (ids: string[]) => {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/download`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ ids: ids }),
}
);
const response = await fetch("/api/download", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { ids: ids } }),
});

if (response.ok) {
const blob = await response.blob();
Expand Down Expand Up @@ -107,14 +104,11 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
};

const handleDeleteSelected = async () => {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/delete-scrape-jobs`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ ids: Array.from(selectedJobs) }),
}
);
const response = await fetch("/api/delete", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { ids: Array.from(selectedJobs) } }),
});

if (response.ok) {
setJobs((jobs) =>
Expand Down Expand Up @@ -148,13 +142,13 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
value: value,
};

await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/update`, {
await fetch("/api/update", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(postBody),
body: JSON.stringify({ data: postBody }),
});
};

Expand Down
9 changes: 7 additions & 2 deletions src/components/logs/log-container/log-container.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,24 @@ export const LogContainer: React.FC<LogContainerProps> = ({ initialLogs }) => {
const logsContainerRef = useRef<HTMLDivElement | null>(null);

useEffect(() => {
const eventSource = new EventSource(`${Constants.DOMAIN}/api/logs`);
const eventSource = new EventSource(`/api/logs`);

setLogs("");

eventSource.onmessage = (event) => {
setLogs((prevLogs) => prevLogs + event.data + "\n");

if (logsContainerRef.current) {
logsContainerRef.current.scrollTop =
logsContainerRef.current.scrollHeight;
}
};

eventSource.onerror = () => {
eventSource.onopen = (e) => {
};

eventSource.onerror = (error) => {
console.error("EventSource failed:", error);
eventSource.close();
};

Expand Down
18 changes: 7 additions & 11 deletions src/contexts/AuthContext.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const token = Cookies.get("token");
if (token) {
axios
.get(`${process.env.NEXT_PUBLIC_API_URL}/api/auth/users/me`, {
.get(`/api/me`, {
headers: { Authorization: `Bearer ${token}` },
})
.then((response) => {
Expand All @@ -42,23 +42,19 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const params = new URLSearchParams();
params.append("username", email);
params.append("password", password);
const response = await axios.post(
`${process.env.NEXT_PUBLIC_API_URL}/api/auth/token`,
params
);
const response = await axios.post(`/api/token`, params);

Cookies.set("token", response.data.access_token, {
expires: 7,
path: "/",
domain: "localhost",
secure: false,
sameSite: "Lax",
});
const userResponse = await axios.get(
`${process.env.NEXT_PUBLIC_API_URL}/api/auth/users/me`,
{
headers: { Authorization: `Bearer ${response.data.access_token}` },
}
);

const userResponse = await axios.get(`/api/me`, {
headers: { Authorization: `Bearer ${response.data.access_token}` },
});
setUser(userResponse.data);
setIsAuthenticated(true);
};
Expand Down
38 changes: 16 additions & 22 deletions src/lib/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ export const fetchJobs = async (
fetchOptions: fetchOptions = {}
) => {
const token = Cookies.get("token");
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/retrieve-scrape-jobs`, {
await fetch("/api/retrieve", {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(fetchOptions),
body: JSON.stringify({ data: fetchOptions }),
})
.then((response) => response.json())
.then((data) => setJobs(data))
Expand All @@ -29,15 +29,12 @@ export const fetchJobs = async (
export const fetchJob = async (id: string) => {
const token = Cookies.get("token");
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/job/${id}`,
{
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const response = await fetch(`/api/job/${id}`, {
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
});
const data = await response.json();
return data;
} catch (error) {
Expand All @@ -51,15 +48,12 @@ export const checkAI = async (
) => {
const token = Cookies.get("token");
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/ai/check`,
{
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const response = await fetch("/api/ai/check", {
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
});
const data = await response.json();
setAiEnabled(data);
} catch (error) {
Expand All @@ -75,13 +69,13 @@ export const updateJob = async (ids: string[], field: string, value: any) => {
field: field,
value: value,
};
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/update`, {
await fetch("/api/update", {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(postBody),
body: JSON.stringify({ data: postBody }),
}).catch((error) => {
console.error("Error fetching jobs:", error);
});
Expand Down
30 changes: 30 additions & 0 deletions src/pages/api/ai/check.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { NextApiRequest, NextApiResponse } from "next";

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
try {
const headers = new Headers(req.headers as Record<string, string>);
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);

const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/ai/check`,
{
method: "GET",
headers,
}
);

if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}

const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}
56 changes: 56 additions & 0 deletions src/pages/api/ai/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { NextApiRequest, NextApiResponse } from "next";

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { data } = req.body;

try {
const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/ai`, {
method: "POST",
headers: {
Accept: "text/event-stream",
"Content-Type": "application/json",
},
body: JSON.stringify(data),
});

if (!response.ok) {
const errorDetails = await response.text();
if (response.status === 422) {
console.error(`422 Error: ${errorDetails}`);
}
throw new Error(
`Error fetching logs: ${response.statusText} - ${errorDetails}`
);
}

if (!response.body) {
throw new Error(`No response body from API`);
}

res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
"Transfer-Encoding": "chunked",
});

let responseStream = response.body;
const reader = responseStream.getReader();
const decoder = new TextDecoder();

while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
res.write(`${chunk}`);
}

res.end();
} catch (error) {
console.error("Error streaming logs:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}
Loading

0 comments on commit dc4d219

Please sign in to comment.