Skip to content

Commit

Permalink
refactor(app): Decouple from @openneuro/client
Browse files Browse the repository at this point in the history
The client will no longer be needed with the move to deno CLI and this allows for frontend specific client configuration.

Fixes error handling not bubbling up properly across the React app.
  • Loading branch information
nellh committed Nov 7, 2024
1 parent 0d7bdd7 commit 9e65900
Show file tree
Hide file tree
Showing 8 changed files with 268 additions and 32 deletions.
1 change: 0 additions & 1 deletion packages/openneuro-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
"@emotion/react": "11.11.1",
"@emotion/styled": "11.11.0",
"@niivue/niivue": "0.45.1",
"@openneuro/client": "^4.28.3",
"@openneuro/components": "^4.28.3",
"@sentry/react": "^8.25.0",
"@tanstack/react-table": "^8.9.3",
Expand Down
32 changes: 15 additions & 17 deletions packages/openneuro-app/src/client.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,12 @@
*/
import "./scripts/utils/global-polyfill"
import "./scripts/sentry"
import { ApolloProvider, InMemoryCache } from "@apollo/client"
import { createClient } from "@openneuro/client"
import { ApolloClient, ApolloProvider, InMemoryCache } from "@apollo/client"
import React from "react"
import { createRoot } from "react-dom/client"
import { BrowserRouter, Route, Routes } from "react-router-dom"
import App from "./scripts/app"
import Index from "./scripts/index"
import { version } from "./lerna.json"
import { config } from "./scripts/config"
import * as gtag from "./scripts/utils/gtag"
import { relayStylePagination } from "@apollo/client/utilities"
Expand All @@ -20,22 +18,22 @@ gtag.initialize(config.analytics.trackingIds)

const mainElement = document.getElementById("main")
const container = createRoot(mainElement)
const client = new ApolloClient({
uri: `${config.url}/crn/graphql`,
cache: new InMemoryCache({
typePolicies: {
Query: {
fields: {
advancedSearch: relayStylePagination(),
},
},
},
}),
})

container.render(
<App>
<ApolloProvider
client={createClient(`${config.url}/crn/graphql`, {
clientVersion: version,
cache: new InMemoryCache({
typePolicies: {
Query: {
fields: {
advancedSearch: relayStylePagination(),
},
},
},
}),
})}
>
<ApolloProvider client={client}>
<BrowserRouter>
<Routes>
<Route path="*" element={<Index />} />
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,43 @@
import { datasets } from "@openneuro/client"
import { gql } from "@apollo/client"

export const DOWNLOAD_DATASET = gql`
query downloadDraft($datasetId: ID!, $tree: String) {
dataset(id: $datasetId) {
id
draft {
id
files(tree: $tree) {
id
directory
filename
size
urls
}
}
}
}
`

export const DOWNLOAD_SNAPSHOT = gql`
query downloadSnapshot($datasetId: ID!, $tag: String!, $tree: String) {
snapshot(datasetId: $datasetId, tag: $tag) {
id
files(tree: $tree) {
id
directory
filename
size
urls
}
}
}
`

export const downloadDataset =
(client) => async ({ datasetId, snapshotTag, tree = null }) => {
if (snapshotTag) {
const { data } = await client.query({
query: datasets.downloadSnapshot,
query: DOWNLOAD_SNAPSHOT,
variables: {
datasetId,
tag: snapshotTag,
Expand All @@ -14,7 +47,7 @@ export const downloadDataset =
return data.snapshot.files
} else {
const { data } = await client.query({
query: datasets.downloadDataset,
query: DOWNLOAD_DATASET,
variables: {
datasetId,
tree,
Expand Down
122 changes: 122 additions & 0 deletions packages/openneuro-app/src/scripts/uploader/file-upload-parallel.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
/**
* Convert from a URL compatible path
* @param {String} path
*/
export const decodeFilePath = (path) => {
return path.replace(new RegExp(":", "g"), "/")
}

/**
* Determine parallelism based on Request list
* @param {Array<Request>} requests
* @param {number} bytes expected total size of all requests
* @returns {number}
*/
export function uploadParallelism(requests, bytes) {
const averageSize = bytes / requests.length
const parallelism = averageSize / 524288 // 512KB
if (parallelism > 8) {
return 8
} else if (parallelism < 2) {
return 2
} else {
return Math.round(parallelism)
}
}

/**
* Extract filename from Request URL
* @param {string} url .../a:path:to:a:file
*/
export function parseFilename(url) {
const filePath = url.substring(url.lastIndexOf("/") + 1)
return decodeFilePath(filePath)
}

/**
* Control retry delay for upload file requests
* @param {number} step Attempt number
* @param {Request} request Active request
*/
export async function retryDelay(step, request) {
if (step <= 4) {
await new Promise((r) => setTimeout(r, step ** 2 * 1000))
} else {
throw new Error(
`Failed to upload file after ${step} attempts - "${request.url}"`,
)
}
}

/**
* Repeatable function for single file upload fetch request
* @param {object} uploadProgress Progress controller instance
* @param {typeof fetch} fetch Fetch implementation to use - useful for environments without a native fetch
* @returns {function (Request, number): Promise<Response|void>}
*/
export const uploadFile =
(uploadProgress, fetch) => async (request, attempt = 1) => {
// Create a retry function with attempts incremented
const filename = parseFilename(request.url)
const handleFailure = async (failure) => {
const retryClone = request.clone()
// eslint-disable-next-line no-console
console.warn(`\nRetrying upload for ${filename}: ${failure}`)
try {
await retryDelay(attempt, request)
return uploadFile(uploadProgress, fetch)(retryClone, attempt + 1)
} catch (err) {
if ("failUpload" in uploadProgress) {
uploadProgress.failUpload(filename)
}
throw err
}
}
// This is needed to cancel the request in case of client errors
if ("startUpload" in uploadProgress) {
uploadProgress.startUpload(filename)
}
try {
// Clone before using the request to allow retries to reuse the body
const response = await fetch(request)
if (response.status === 200) {
// We need to wait for the response body or fetch-h2 may leave the connection open
await response.json()
if ("finishUpload" in uploadProgress) {
uploadProgress.finishUpload(filename)
}
uploadProgress.increment()
} else {
await handleFailure(response.statusText)
}
} catch (err) {
await handleFailure(err)
}
}

/**
* @param {Request[]} requests
* @param {number} totalSize
* @param {object} uploadProgress
* @param {typeof fetch} fetch
*/
export async function uploadParallel(
requests,
totalSize,
uploadProgress,
fetch,
) {
// Array stride of parallel requests
const parallelism = uploadParallelism(requests, totalSize)
for (
let rIndex = 0;
rIndex < requests.length;
rIndex = rIndex + parallelism
) {
await Promise.allSettled(
requests
.slice(rIndex, rIndex + parallelism)
.map(uploadFile(uploadProgress, fetch)),
)
}
}
6 changes: 3 additions & 3 deletions packages/openneuro-app/src/scripts/uploader/file-upload.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { config } from "../config"
import { uploads } from "@openneuro/client"
import { uploadParallel } from "./file-upload-parallel"

/**
* Trim the webkitRelativePath value to only include the dataset relative path
Expand Down Expand Up @@ -38,7 +38,7 @@ export const getRelativePath = (
*/
export const encodeFilePath = (file, options = { stripRelativePath: false }) =>
file.webkitRelativePath
? uploads.encodeFilePath(getRelativePath(file, options))
? getRelativePath(file, options).replace(new RegExp("/", "g"), ":")
: file.name

/**
Expand Down Expand Up @@ -85,5 +85,5 @@ export async function uploadFiles({

// No background fetch
// Parallelism is handled by the client in this case
return uploads.uploadParallel(requests, totalSize, uploadProgress, fetch)
return uploadParallel(requests, totalSize, uploadProgress, fetch)
}
36 changes: 36 additions & 0 deletions packages/openneuro-app/src/scripts/uploader/hash-file-list.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/**
* Java hashcode implementation for browser and Node.js
* @param {string} str
*/
function hashCode(str) {
return str
.split("")
.reduce(
(prevHash, currVal) =>
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
0,
)
}

/**
* Calculate a hash from a list of files to upload
* @param {string} datasetId Dataset namespace for this hash
* @param {Array<object>} files Files being uploaded
* @returns {string} Hex string identity hash
*/
export function hashFileList(datasetId, files) {
return Math.abs(
hashCode(
datasetId +
files
.map(
(f) =>
`${
"webkitRelativePath" in f ? f.webkitRelativePath : f.filename
}:${f.size}`,
)
.sort()
.join(":"),
),
).toString(16)
}
36 changes: 32 additions & 4 deletions packages/openneuro-app/src/scripts/uploader/upload-mutation.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,34 @@
import { datasets, uploads } from "@openneuro/client"
import { gql } from "@apollo/client"
import { SUBMIT_METADATA } from "../dataset/mutations/submit-metadata.jsx"

export const CREATE_DATASET = gql`
mutation createDataset($affirmedDefaced: Boolean, $affirmedConsent: Boolean) {
createDataset(
affirmedDefaced: $affirmedDefaced
affirmedConsent: $affirmedConsent
) {
id
}
}
`

export const PREPARE_UPLOAD = gql`
mutation prepareUpload($datasetId: ID!, $uploadId: ID!) {
prepareUpload(datasetId: $datasetId, uploadId: $uploadId) {
id
datasetId
token
endpoint
}
}
`

export const FINISH_UPLOAD = gql`
mutation finishUpload($uploadId: ID!) {
finishUpload(uploadId: $uploadId)
}
`

/**
* Create a dataset and update the label
* @param {object} client Apollo client
Expand All @@ -9,7 +37,7 @@ export const createDataset =
(client) => ({ affirmedDefaced, affirmedConsent }) => {
return client
.mutate({
mutation: datasets.createDataset,
mutation: CREATE_DATASET,
variables: { affirmedDefaced, affirmedConsent },
errorPolicy: "all",
})
Expand All @@ -22,7 +50,7 @@ export const createDataset =
*/
export const prepareUpload = (client) => ({ datasetId, uploadId }) => {
return client.mutate({
mutation: uploads.prepareUpload,
mutation: PREPARE_UPLOAD,
variables: { datasetId, uploadId },
})
}
Expand All @@ -33,7 +61,7 @@ export const prepareUpload = (client) => ({ datasetId, uploadId }) => {
*/
export const finishUpload = (client) => (uploadId) => {
return client.mutate({
mutation: uploads.finishUpload,
mutation: FINISH_UPLOAD,
variables: { uploadId },
})
}
Expand Down
Loading

0 comments on commit 9e65900

Please sign in to comment.