diff --git a/README.md b/README.md index 6fa05fd07..66459cd24 100644 --- a/README.md +++ b/README.md @@ -80,26 +80,6 @@ GATSBY_MANIFEST_PATH=/path/to/zipped/ast/file.zip GATSBY_SNOOTY_DEV=true ``` -### Running with Gatsby Cloud preview - -Snooty uses Gatsby Cloud to perform content staging builds for MongoDB documentation. These builds source their ASTs through the [Snooty Data API](https://github.com/mongodb/snooty-data-api) and require additional environment variables for setup. To emulate a Gatsby Cloud preview build locally, include the following in your `.env.development` file: - -``` -GATSBY_CLOUD_SITE_USER= -``` - -Since building with the Gatsby Cloud preview source plugin expects build data to be present in our team's database, please use the Autobuilder to perform one or more builds prior to running the frontend. Otherwise, use a different `GATSBY_CLOUD_SITE_USER`. - -When ready, run the following command: - -```shell -npm run develop:preview -``` - -This command will run the `gatsby-source-snooty-preview` source plugin, where all AST data for the specified `GATSBY_CLOUD_SITE_USER` will be used to mimic a Gatsby Cloud site locally. All build data across every unique project + branch combination for that user will be built on the single site. To access the built content, go to `http://localhost:8000///`. - -Note that this process assumes that the default public Snooty Data API endpoint is being used. If the staging instance of the API is desired, you will need to set the `API_BASE` env to the staging URL, include the expected client credentials as environment variables (found in Parameter Store), and then run the build on the office VPN. Please see the team's Gatsby Cloud template sites as examples. - ## Staging Install libxml2 with `brew install libxml2` on mac and `apt-get install libxml2` on linux @@ -112,10 +92,6 @@ npm run build:clean:stage :warning: Note: This will promote the contents of your local public directory. Your instance in staging may break or be outdated if you haven't run `npm run build` before `make stage`. -### Staging with Gatsby Cloud preview - -If your changes specifically affect Gatsby Cloud preview builds, set up and use your own Gatsby Cloud site (denoted by GitHub username) in our team's organization. The feature branch can be assigned to the Gatsby Cloud site. Multiple feature branches in parallel may require the use of multiple Gatsby Cloud sites. See this [wiki page](https://wiki.corp.mongodb.com/display/DE/How+to+Set+Up+a+New+Gatsby+Cloud+Site) for help with setup. - ## Releasing We have configured an automatic release process using [GitHub Actions](https://github.com/features/actions) that is triggered by [npm-version](https://docs.npmjs.com/cli/version). To release a version, you must have admin privileges in this repo. Then proceed as follows: @@ -126,25 +102,6 @@ We have configured an automatic release process using [GitHub Actions](https://g :warning: This process cannot be completed if the releaser's `origin` points to a fork. -### Gatsby Cloud - -Gatsby Cloud uses set GitHub branches to build sites. Right now, we have: - -1. `gatsby-cloud-latest` - Used by the Gatsby Cloud sites for the docs team. This branch should typically have the latest production release tag used by the Autobuilder. -2. `gatsby-cloud-rc` - Used by Gatsby Cloud sites designated for pre-production. This branch should be used for testing release candidates end-to-end with the Autobuilder in preprd. - -When a new frontend release tag is made, use the commands below to update the desired Gatsby Cloud branch. - -:warning: Note that the following commands include a force push to make it easy to update and rollback the branch as needed. - -```sh -git fetch origin --tags -git checkout tags/ -git push -f origin HEAD: -``` - -Once the branch is updated with a given tag, all Gatsby Cloud sites using that branch will be rebuilt. Ideally, `gatsby-cloud-latest` is only updated after the Autobuilder has completed its latest release, to ensure versions of the frontend and parser are compatible. - ## Testing Tests can be run using: diff --git a/gatsby-config.js b/gatsby-config.js index ec7230f3f..8672c1e5b 100644 --- a/gatsby-config.js +++ b/gatsby-config.js @@ -1,16 +1,15 @@ const { generatePathPrefix } = require('./src/utils/generate-path-prefix'); const { siteMetadata } = require('./src/utils/site-metadata'); -const { isGatsbyPreview } = require('./src/utils/is-gatsby-preview'); -const isPreview = isGatsbyPreview(); -const pathPrefix = !isPreview ? generatePathPrefix(siteMetadata) : undefined; +const pathPrefix = generatePathPrefix(siteMetadata); +const layoutComponentRelativePath = `./src/layouts/index.js`; console.log('PATH PREFIX', pathPrefix); // Specifies which plugins to use depending on build environment // Keep our main plugin at top to include file saving before image plugins const plugins = [ - isPreview ? 'gatsby-source-snooty-preview' : 'gatsby-source-snooty-prod', + 'gatsby-source-snooty-prod', `gatsby-plugin-image`, `gatsby-plugin-sharp`, `gatsby-transformer-sharp`, // Needed for dynamic images @@ -22,20 +21,14 @@ const plugins = [ }, }, 'gatsby-plugin-emotion', -]; -// PRODUCTION DEPLOYMENTS -- -// If not a preview build, use the layout that includes the -// consistent navbar and footer and generate a sitemap. -if (!isPreview) { - plugins.push(`gatsby-plugin-sitemap`); - const layoutComponentRelativePath = `./src/layouts/index.js`; - plugins.push({ + 'gatsby-plugin-sitemap', + { resolve: 'gatsby-plugin-layout', options: { component: require.resolve(layoutComponentRelativePath), }, - }); -} + }, +]; module.exports = { plugins, diff --git a/netlify/README.md b/netlify/README.md deleted file mode 100644 index f79349a76..000000000 --- a/netlify/README.md +++ /dev/null @@ -1,6 +0,0 @@ -Content in this folder is intended to be automatically sourced and used by builds on Netlify. - -The functions folder exists to host custom functions hosted by Netlify. Currently, the ones present are automatically -triggered when one of Netlify's events match the name of the function file. -See: https://docs.netlify.com/functions/trigger-on-events/ for more information. The purpose of these triggered events -is to have a more accurate metric of when a build is actually complete, compared to Gatsby plugins' native setup. diff --git a/netlify/functions/deploy-failed.js b/netlify/functions/deploy-failed.js deleted file mode 100644 index a80277185..000000000 --- a/netlify/functions/deploy-failed.js +++ /dev/null @@ -1,7 +0,0 @@ -import { callPostBuildWebhook } from '../../plugins/gatsby-source-snooty-preview/utils/post-build'; -import { constructResPayload } from '../utils'; - -export async function handler(event, _context) { - const resPayload = constructResPayload(event); - await callPostBuildWebhook(resPayload, 'failed'); -} diff --git a/netlify/functions/deploy-succeeded.js b/netlify/functions/deploy-succeeded.js deleted file mode 100644 index 8e254360e..000000000 --- a/netlify/functions/deploy-succeeded.js +++ /dev/null @@ -1,7 +0,0 @@ -import { callPostBuildWebhook } from '../../plugins/gatsby-source-snooty-preview/utils/post-build'; -import { constructResPayload } from '../utils'; - -export async function handler(event, _context) { - const resPayload = constructResPayload(event); - await callPostBuildWebhook(resPayload, 'completed'); -} diff --git a/netlify/netlify.toml b/netlify/netlify.toml deleted file mode 100644 index 5b21eff98..000000000 --- a/netlify/netlify.toml +++ /dev/null @@ -1,3 +0,0 @@ -[functions."deploy-*"] - # build-hook.txt is a temporary file created at build time for Netlify builds - included_files = ['build-hook.txt', 'plugins/gatsby-source-snooty-preview/utils/post-build.js', 'netlify/utils.js'] diff --git a/netlify/utils.js b/netlify/utils.js deleted file mode 100644 index 51f4534f6..000000000 --- a/netlify/utils.js +++ /dev/null @@ -1,30 +0,0 @@ -import fs from 'fs'; -import path from 'path'; - -/** - * Parses build hook data from the expected temporary location - * @returns {object | undefined} - */ -const parseBuildHookData = () => { - // This file does not currently exist, but should be created on Netlify builds as part of `npm run build:netlify`. - // The INCOMING_HOOK_BODY env var is not automatically passed along to functions, so we use a txt file to save it - const relativeFilePath = '../build-hook.txt'; - const buildHookDataString = fs.readFileSync(path.resolve(__dirname, relativeFilePath), 'utf-8'); - if (!buildHookDataString) { - console.log('No build hook data found.'); - return; - } - return JSON.parse(buildHookDataString); -}; - -export const constructResPayload = (event) => { - const buildHookData = parseBuildHookData(); - const parsedEventBody = JSON.parse(event.body); - // This is Netlify's default post-deployment payload. We include it with our custom data in case - // we want to process any information - const netlifyPayload = parsedEventBody.payload; - return { - netlifyPayload, - ...buildHookData, - }; -}; diff --git a/package.json b/package.json index b7cc43168..705244ebb 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,6 @@ "build:no-prefix": "gatsby build", "clean": "gatsby clean", "develop": "gatsby develop", - "develop:preview": "GATSBY_IS_PREVIEW=true gatsby develop", "ensure-main": "node scripts/ensure-main.js", "format": "npm run prettier -- --check", "format:fix": "npm run prettier -- --write", diff --git a/plugins/gatsby-source-snooty-preview/gatsby-node.js b/plugins/gatsby-source-snooty-preview/gatsby-node.js deleted file mode 100644 index 58901f86a..000000000 --- a/plugins/gatsby-source-snooty-preview/gatsby-node.js +++ /dev/null @@ -1,305 +0,0 @@ -const { getDataStore } = require('gatsby/dist/datastore'); -const path = require('path'); -const stream = require('stream'); -const { promisify } = require('util'); -const pipeline = promisify(stream.pipeline); -const got = require(`got`); -const { parser } = require(`stream-json/jsonl/Parser`); -const { sourceNodes } = require(`./other-things-to-source`); -const { fetchClientAccessToken } = require('./utils/kanopy-auth.js'); -const { callPostBuildWebhook } = require('./utils/post-build.js'); -const { - consumeData, - createSnootyMetadataId, - KEY_LAST_FETCHED, - KEY_LAST_CLIENT_ACCESS_TOKEN, -} = require('./utils/data-consumer.js'); - -// Global variable to allow webhookBody from sourceNodes step to be passed down -// to other Gatsby build steps that might not pass webhookBody natively. -let currentWebhookBody = {}; -// Flag if the build is parsing data from Netlify or Gatsby Cloud, as there may be a difference -let isNetlifyBuild = false; - -exports.createSchemaCustomization = async ({ actions }) => { - const { createTypes } = actions; - const typeDefs = ` - type Page implements Node @dontInfer { - page_id: String - branch: String - pagePath: String - ast: JSON! - metadata: SnootyMetadata @link - componentNames: [String!] - } - - type PagePath implements Node @dontInfer { - page_id: String! - branch: String! - project: String! - pageNodeId: String! - } - - type SnootyMetadata implements Node @dontInfer { - metadata: JSON - branch: String - project: String - } - - type RemoteMetadata implements Node @dontInfer { - remoteMetadata: JSON - } - - type ChangelogData implements Node @dontInfer { - changelogData: JSON - } - - type PageImage implements Node @dontInfer { - slug: String - images: [File] @link(by: "relativePath", from: "pageAssets") - } - - type AssociatedProduct implements Node @dontInfer { - productName: String - } - - type Breadcrumb implements Node @dontInfer { - breadcrumbs: JSON - propertyUrl: String - } - - `; - createTypes(typeDefs); -}; - -const APIBase = process.env.API_BASE || `https://snooty-data-api.mongodb.com`; -const GATSBY_CLOUD_SITE_USER = process.env.GATSBY_CLOUD_SITE_USER; - -/** - * Attempts to parse Netlify's build webhook payload. - * @returns {object | undefined} The parsed payload, if valid, or `undefined` otherwise - */ -const getNetlifyHookBody = () => { - // Netlify adds webhook body payloads to env - const incomingHookBody = process.env.INCOMING_HOOK_BODY; - if (!incomingHookBody) { - return; - } - - isNetlifyBuild = true; - try { - const parsedPayload = JSON.parse(incomingHookBody); - return parsedPayload; - } catch (e) { - console.error(`Error parsing INCOMING_HOOK_BODY: ${incomingHookBody}. ${e}`); - } -}; - -let isFirstRun = true; -exports.sourceNodes = async ({ - actions, - createNodeId, - getNode, - getNodesByType, - reporter, - createContentDigest, - cache, - webhookBody, -}) => { - // Netlify and Gatsby Cloud have different ways of sending webhooks, with Gatsby's having a default value of {}. - currentWebhookBody = getNetlifyHookBody() || webhookBody; - console.log({ currentWebhookBody }); - let hasOpenAPIChangelog = false; - const { createNode, touchNode } = actions; - - const fileWritePromises = []; - const lastFetched = (await cache.get(KEY_LAST_FETCHED)) || 0; - const lastClientAccessToken = await cache.get(KEY_LAST_CLIENT_ACCESS_TOKEN); - console.log({ lastFetched }); - - if (isFirstRun && lastFetched) { - // nodes of following types are managed statefully: - // SnootyMetadata, Page, PagePath - // we need to touch on them on delta updates on first run of a process to prevent them from being garbage collected - const datastore = getDataStore(); - for (const nodeType of ['SnootyMetadata', 'Page', 'PagePath']) { - for (const node of datastore.iterateNodesByType(nodeType)) { - touchNode(node); - } - } - } - - try { - if (!GATSBY_CLOUD_SITE_USER) { - throw new Error('Missing GATSBY_CLOUD_SITE_USER'); - } - - // Generate client access token only if trying to access Snooty Data API's staging instance - const clientAccessToken = APIBase.includes('.staging') ? await fetchClientAccessToken(lastClientAccessToken) : ''; - let url; - if (lastFetched) { - url = `${APIBase}/user/${GATSBY_CLOUD_SITE_USER}/documents?updated=${lastFetched}`; - } else { - url = `${APIBase}/user/${GATSBY_CLOUD_SITE_USER}/documents`; - } - - const headers = {}; - if (clientAccessToken) { - headers['Authorization'] = `Bearer ${clientAccessToken}`; - } - const httpStream = got.stream(url, { headers }); - - let pageCount = 0; - // Callback function to be ran after a valid page has been found and handled. - // Tracks and updates information that spans across several data entries - const onHandlePage = (pageTemplate, pageId, pageNodeId) => { - if (pageTemplate === 'changelog') hasOpenAPIChangelog = true; - pageCount += 1; - if (pageCount % 1000 === 0) { - console.log({ pageCount, page_id: pageId, id: pageNodeId }); - } - }; - - // Since there's a lot of data incoming from the Snooty Data API, we stream - // the data in chunks and parse them as they come instead of fetching everything - // as a single JSON response - const decode = parser(); - decode.on('data', async (_entry) => { - // Un-nest data - const entry = _entry.value; - await consumeData(entry, { - actions, - cache, - clientAccessToken, - createContentDigest, - createNodeId, - fileWritePromises, - getNode, - onHandlePage, - }); - }); - - console.time(`source updates`); - // Wait for HTTP connection to close. - await pipeline(httpStream, decode); - console.timeEnd(`source updates`); - } catch (error) { - if (!isNetlifyBuild) { - await callPostBuildWebhook(currentWebhookBody, 'failed'); - } - reporter.panic('There was an issue sourcing nodes', error); - } - - // Wait for all assets to be written. - await Promise.all(fileWritePromises); - - // Source old nodes. - console.time(`old source nodes`); - await sourceNodes({ - hasOpenAPIChangelog, - github_username: GATSBY_CLOUD_SITE_USER, - createNode, - createContentDigest, - createNodeId, - getNodesByType, - }); - console.timeEnd(`old source nodes`); - isFirstRun = false; -}; - -// Prevent errors when running gatsby build caused by browser packages run in a node environment. -exports.onCreateWebpackConfig = ({ plugins, actions }) => { - const providePlugins = { - Buffer: ['buffer', 'Buffer'], - process: require.resolve('../../stubs/process.js'), - }; - - const fallbacks = { stream: require.resolve('stream-browserify'), buffer: require.resolve('buffer/') }; - - actions.setWebpackConfig({ - plugins: [plugins.provide(providePlugins)], - resolve: { - fallback: fallbacks, - alias: { - process: 'process/browser', - }, - }, - }); -}; - -exports.createPages = async ({ actions, createNodeId, getNode, graphql, reporter }) => { - const { createPage } = actions; - const templatePath = path.join(__dirname, `../../src/components/DocumentBodyPreview.js`); - const result = await graphql(` - query { - allPagePath { - totalCount - nodes { - pageNodeId - branch - page_id - project - } - allProjects: distinct(field: { project: SELECT }) - } - } - `); - - if (result.errors) { - if (!isNetlifyBuild) { - await callPostBuildWebhook(currentWebhookBody, 'failed'); - } - reporter.panic('There was an error in the graphql query', result.errors); - } - - try { - result.data.allPagePath.nodes.forEach((node) => { - const pagePath = path.join(node.project, node.branch, node.page_id); - let slug = node.page_id; - // Slices off leading slash to ensure slug matches an entry within the toctreeOrder and renders InternalPageNav components - if (slug !== '/' && slug[0] === '/') slug = slug.slice(1); - - const metadataNodeId = createSnootyMetadataId({ createNodeId, branch: node.branch, project: node.project }); - if (!getNode(metadataNodeId)) { - // Take into account the possibility of having new page data available through the API, - // but no metadata yet due to async uploads - console.warn( - `Skipping node creation for page "${node.page_id}", in project "${node.project}" on branch "${node.branch}". No metadata node "${metadataNodeId}" found.` - ); - return; - } - - createPage({ - path: pagePath, - component: templatePath, - context: { - page_id: node.pageNodeId, - id: node.pageNodeId, - slug, - // Hardcode static/safe values to prevent incremental builds from rebuilding versioned preview pages - repoBranches: {}, - associatedReposInfo: {}, - isAssociatedProduct: false, - project: node.project, - }, - }); - }); - } catch (err) { - if (!isNetlifyBuild) { - await callPostBuildWebhook(currentWebhookBody, 'failed'); - } - reporter.panic('Could not build pages off of graphl query', err); - } -}; - -// `onPostBuild` is run by Gatsby Cloud after everything is built, but before the -// content is deployed to the preview site. This can result in a short delay between -// when the post-build webhook is called and when the content is updated. -// Ideally, we would use Gatsby Cloud's Outgoing Notifications feature once it can -// support passing through custom data from the preview webhook's body (to include the -// Autobuilder job ID associated with the GC build). -exports.onPostBuild = async () => { - if (!isNetlifyBuild) { - await callPostBuildWebhook(currentWebhookBody, 'completed'); - } -}; diff --git a/plugins/gatsby-source-snooty-preview/other-things-to-source.js b/plugins/gatsby-source-snooty-preview/other-things-to-source.js deleted file mode 100644 index d8a153e47..000000000 --- a/plugins/gatsby-source-snooty-preview/other-things-to-source.js +++ /dev/null @@ -1,24 +0,0 @@ -const { siteMetadata } = require('../../src/utils/site-metadata'); -const { realmDocumentDatabase } = require('../../src/init/DocumentDatabase.js'); -const { createOpenAPIChangelogNode } = require('../utils/openapi'); -const { createProductNodes } = require('../utils/products'); -const { createDocsetNodes } = require('../utils/docsets'); -const { createBreadcrumbNodes } = require('../utils/breadcrumbs'); - -// Sources nodes for the preview plugin that are not directly related to data -// from the Snooty Data API -exports.sourceNodes = async ({ - hasOpenAPIChangelog, - createNode, - createContentDigest, - createNodeId, - getNodesByType, -}) => { - let db = realmDocumentDatabase; - await db.connect(); - await createProductNodes({ db, createNode, createNodeId, createContentDigest }); - await createDocsetNodes({ db, createNode, createNodeId, createContentDigest }); - await createBreadcrumbNodes({ db, createNode, createNodeId, createContentDigest }); - if (hasOpenAPIChangelog) - await createOpenAPIChangelogNode({ createNode, createNodeId, createContentDigest, siteMetadata, db }); -}; diff --git a/plugins/gatsby-source-snooty-preview/package.json b/plugins/gatsby-source-snooty-preview/package.json deleted file mode 100644 index bd5f9e09b..000000000 --- a/plugins/gatsby-source-snooty-preview/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "name": "gatsby-source-snooty-preview" -} diff --git a/plugins/gatsby-source-snooty-preview/utils/data-consumer.js b/plugins/gatsby-source-snooty-preview/utils/data-consumer.js deleted file mode 100644 index 0e281830b..000000000 --- a/plugins/gatsby-source-snooty-preview/utils/data-consumer.js +++ /dev/null @@ -1,181 +0,0 @@ -const { transformBreadcrumbs } = require('../../../src/utils/setup/transform-breadcrumbs.js'); -const { saveStaticFiles, saveFile } = require('../../../src/utils/setup/save-asset-files'); -const { getNestedValue } = require('../../../src/utils/get-nested-value'); - -const KEY_LAST_FETCHED = 'lastFetched'; -const KEY_LAST_CLIENT_ACCESS_TOKEN = 'lastClientAccessToken'; - -function createSnootyMetadataId({ branch, project, createNodeId }) { - return createNodeId(`metadata-${branch}-${project}`); -} - -// Syncs the plugin with timestamp of data being returned from the API. -const handleTimestamp = (data, { cache, clientAccessToken }) => { - cache.set(KEY_LAST_FETCHED, data); - cache.set(KEY_LAST_CLIENT_ACCESS_TOKEN, clientAccessToken); -}; - -const handleAsset = (data, { fileWritePromises }) => { - const { filenames, assetData } = data; - - // Incorrect asset format should not be acceptable - if (!filenames || !filenames.length) { - throw new Error('No filenames found for asset'); - } - if (!assetData) { - throw new Error('Missing asset data'); - } - - filenames.forEach((filePath) => { - // These promises will be resolved once all data is consumed - fileWritePromises.push(saveFile(filePath, Buffer.from(assetData, 'base64'))); - }); -}; - -const handleMetadata = async ( - data, - { createContentDigest, createNode, createNodeId, deleteNode, getNode, shouldDeleteContentNode } -) => { - const { _id, build_id, created_at, static_files: staticFiles, ...metadataMinusStatic } = data; - const { parentPaths, slugToBreadcrumbLabel, branch, project } = metadataMinusStatic; - - const nodeId = createSnootyMetadataId({ createNodeId, branch, project }); - - if (shouldDeleteContentNode) { - deleteNode(getNode(nodeId)); - return; - } - - if (parentPaths) { - transformBreadcrumbs(parentPaths, slugToBreadcrumbLabel); - } - - // Save files in the static_files field of metadata document, including intersphinx inventories. - if (staticFiles) { - await saveStaticFiles(staticFiles); - } - - createNode({ - children: [], - id: nodeId, - internal: { - contentDigest: createContentDigest(metadataMinusStatic), - type: 'SnootyMetadata', - }, - branch, - project, - parent: null, - metadata: metadataMinusStatic, - }); -}; - -const handlePage = ( - data, - { createContentDigest, createNode, createNodeId, deleteNode, getNode, onHandlePage, shouldDeleteContentNode } -) => { - // Strip source string, in case it exists. We don't need the raw source of the AST - const { source, ...page } = data; - - const filename = getNestedValue(['filename'], page) || ''; - // There can be ASTs for included .rst files as well. We should skip these, in case - // we encounter them - if (!filename || !filename.endsWith('.txt')) { - console.warn(`Found an AST that is not for a page: ${filename}`); - return; - } - - const branch = page.page_id.split('/')[2]; - const raw_page_id = page.page_id.split('/').slice(3).join('/'); - const page_id = raw_page_id === 'index' ? '/' : `/${raw_page_id}`; - const project = page.page_id.split('/')[0]; - - const pageNodeId = createNodeId(page_id + project + branch); - const pagePathNodeId = pageNodeId + '/path'; - - if (shouldDeleteContentNode) { - deleteNode(getNode(pageNodeId)); - deleteNode(getNode(pagePathNodeId)); - return; - } - - page.page_id = page_id; - page.metadata = createSnootyMetadataId({ createNodeId, branch, project }); - page.id = pageNodeId; - page.internal = { - type: 'Page', - contentDigest: createContentDigest(page), - }; - - const pagePathNode = { - id: pagePathNodeId, - page_id, - branch, - project, - pageNodeId: page.id, - internal: { - type: 'PagePath', - contentDigest: page.internal.contentDigest, - }, - }; - - createNode(page); - createNode(pagePathNode); - - const pageTemplate = data.ast?.options?.template; - onHandlePage(pageTemplate, page_id, pageNodeId); -}; - -/** - * Handles incoming data accordingly based on its data type. Notably, this handles - * converting build data from the Snooty Data API into nodes and assets that the - * Gatsby site will need to render pages. - * @param {*} entry - A single data entry obtained from the Snooty Data API - * @param {*} options - Gatsby functions and other utilities to be used by handlers - */ -const consumeData = async ( - entry, - { actions, cache, createNodeId, createContentDigest, getNode, fileWritePromises, clientAccessToken, onHandlePage } -) => { - const { type, data } = entry; - - // Shape and format should be consistent across all data - if (!type) { - throw new Error('Data entry is missing data type'); - } - if (!data) { - throw new Error('Data entry is missing data'); - } - - const shouldDeleteContentNode = data.deleted; - const { createNode, deleteNode } = actions; - - if (type === 'timestamp') { - handleTimestamp(data, { cache, clientAccessToken }); - } else if (type === 'asset') { - handleAsset(data, { fileWritePromises }); - } else if (type === 'metadata') { - await handleMetadata(data, { - createContentDigest, - createNode, - createNodeId, - deleteNode, - getNode, - shouldDeleteContentNode, - }); - } else if (type === 'page') { - handlePage(data, { - createContentDigest, - createNode, - createNodeId, - deleteNode, - getNode, - onHandlePage, - shouldDeleteContentNode, - }); - } else { - // Shouldn't affect current builds - console.warn(`Unexpected data type: ${type}`); - } -}; - -module.exports = { consumeData, createSnootyMetadataId, KEY_LAST_FETCHED, KEY_LAST_CLIENT_ACCESS_TOKEN }; diff --git a/plugins/gatsby-source-snooty-preview/utils/kanopy-auth.js b/plugins/gatsby-source-snooty-preview/utils/kanopy-auth.js deleted file mode 100644 index 51fde316c..000000000 --- a/plugins/gatsby-source-snooty-preview/utils/kanopy-auth.js +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Checks if the configuration property exists, and throws an error otherwise. - * @param configProp - * @param configType - */ -const validateConfigType = (configProp, configType) => { - if (!configProp) { - throw new Error(`Missing ${configType} for Snooty Data API access`); - } -}; - -/** - * Generates the authZ token needed for requesting an access token to Kanopy. - */ -const getClientCredentialsHeader = () => { - const clientId = process.env.OAUTH_CLIENT_ID; - validateConfigType(clientId, 'client ID'); - const clientSecret = process.env.OAUTH_CLIENT_SECRET; - validateConfigType(clientSecret, 'client secret'); - return Buffer.from(`${clientId}:${clientSecret}`, 'utf-8').toString('base64'); -}; - -/** - * Generates a new access token to allow for authentication against Kanopy services. - */ -const generateNewAccessToken = async () => { - const grantType = process.env.OAUTH_GRANT_TYPE; - validateConfigType(grantType, 'grant type'); - const scope = process.env.OAUTH_SCOPE; - validateConfigType(scope, 'scope'); - const authUrl = process.env.OAUTH_TOKEN_AUTH_URL; - validateConfigType(authUrl, 'auth token url'); - - // Request a new access token from Kanopy's token authentication endpoint - const authRequestUrl = `${authUrl}?grant_type=${grantType}&scope=${scope}`; - const headers = { - authorization: `Basic ${getClientCredentialsHeader()}`, - accept: 'application/json', - 'Content-Type': 'application/x-www-form-urlencoded', - 'cache-control': 'no-cache', - }; - const res = await fetch(authRequestUrl, { method: 'POST', headers }); - if (!res.ok) { - throw new Error('Error trying to request new access token'); - } - - const data = await res.json(); - const token = data['access_token']; - if (!token) { - throw new Error('Could not find new access token'); - } - - return token; -}; - -/** - * Returns a valid client access token that can be used for machine-machine communication - * between a client and a service hosted on Kanopy. - * See: https://kanopy.corp.mongodb.com/docs/development/authentication_and_authorization/ - * @param prevToken - */ -const fetchClientAccessToken = async (prevToken) => { - if (!prevToken) { - return generateNewAccessToken(); - } - // Decoded value is the JSON object representation of the token string, with token's expiration date - const decodedValue = JSON.parse(Buffer.from(prevToken.split('.')[1], 'base64').toString('ascii')); - // Check if token is expired, or near expiration - if (decodedValue.exp < Date.now() / 1000) { - return generateNewAccessToken(); - } - return prevToken; -}; - -module.exports = { fetchClientAccessToken }; diff --git a/plugins/gatsby-source-snooty-preview/utils/post-build.js b/plugins/gatsby-source-snooty-preview/utils/post-build.js deleted file mode 100644 index fbca2f34e..000000000 --- a/plugins/gatsby-source-snooty-preview/utils/post-build.js +++ /dev/null @@ -1,63 +0,0 @@ -const crypto = require('crypto'); - -/** - * Constructs a signature using the payload and Snooty's secret. The signature - * can be used to help webhooks be more confident that the caller is Snooty. - * @param {string} payloadString - */ -const constructSnootyHeader = (payloadString) => - crypto.createHmac('sha256', process.env.SNOOTY_SECRET).update(payloadString).digest('hex'); - -/** - * Calls the post-build webhook to let the Autobuilder know that the Gatsby Cloud - * build is finished. - * @param {object} webhookBody - The webhook body passed to the source plugin to - * initiate the preview build. - * @param {'completed' | 'failed'} status - The status of the build, typically "completed" or "failed". - * This value should coincide with the Autobuilder's job statuses. - */ -const callPostBuildWebhook = async (webhookBody, status) => { - // Webhook body could be empty if the Gatsby Cloud site is doing a fresh build - // that was not called by the preview webhook - if (!webhookBody || !Object.keys(webhookBody).length) { - console.log('No webhookBody found. This build will not call the post-build webhook.'); - return; - } - - // Avoids completely throwing an error if a build is triggered with a custom payload outside the automated build process - if (!webhookBody.jobId) { - console.log( - 'No Autobuilder job ID included in the webhook payload. This build will not call the post-build webhook.' - ); - return; - } - - const supportedStatuses = ['completed', 'failed']; - if (!supportedStatuses.includes(status)) { - console.log(`Post-build webhook call does not support status "${status}".`); - return; - } - - const payload = { - ...webhookBody, - status, - }; - const body = JSON.stringify(payload); - const headers = { - 'x-snooty-signature': constructSnootyHeader(body), - }; - - console.log(`Calling post-build webhook with status "${status}".`); - const res = await fetch(process.env.AUTOBUILDER_POST_BUILD_WEBHOOK, { method: 'POST', body, headers }); - // Calling the webhook from this function should assume we are fulfilling the requirements of the call. - // Any error thrown here is definitely unexpected. - if (!res.ok) { - const errMessage = await res.text(); - throw new Error( - `There was an issue calling the Autobuilder post-build webhook. Please have the DOP team check CloudWatch logs. ${errMessage}` - ); - } - console.log('Post-build webhook was successfully called!'); -}; - -module.exports = { callPostBuildWebhook }; diff --git a/src/components/Breadcrumbs/CollapsedBreadcrumbs.js b/src/components/Breadcrumbs/CollapsedBreadcrumbs.js index 7217a28ff..b1178c479 100644 --- a/src/components/Breadcrumbs/CollapsedBreadcrumbs.js +++ b/src/components/Breadcrumbs/CollapsedBreadcrumbs.js @@ -3,18 +3,12 @@ import PropTypes from 'prop-types'; import { Menu, MenuItem } from '@leafygreen-ui/menu'; import IconButton from '@leafygreen-ui/icon-button'; import { withPrefix } from 'gatsby'; -import { useLocation } from '@gatsbyjs/reach-router'; import Icon from '@leafygreen-ui/icon'; import { formatText } from '../../utils/format-text'; -import { isGatsbyPreview } from '../../utils/is-gatsby-preview'; -import { getGatsbyPreviewLink } from '../../utils/get-gatsby-preview-link'; const CollapsedBreadcrumbs = ({ crumbs }) => { - const location = useLocation(); - const menuItems = crumbs.map((crumb, index) => { - let to = withPrefix(crumb.path); - if (isGatsbyPreview()) to = getGatsbyPreviewLink(to, location); + const to = withPrefix(crumb.path); return ( diff --git a/src/components/DocumentBodyPreview.js b/src/components/DocumentBodyPreview.js deleted file mode 100644 index 55dfba493..000000000 --- a/src/components/DocumentBodyPreview.js +++ /dev/null @@ -1,141 +0,0 @@ -import React, { useState } from 'react'; -import { graphql } from 'gatsby'; -import PropTypes from 'prop-types'; -import { findAllKeyValuePairs } from '../utils/find-all-key-value-pairs'; -import { getNestedValue } from '../utils/get-nested-value'; -import { getPlaintext } from '../utils/get-plaintext'; -import { getTemplate } from '../utils/get-template'; -import Layout from '../layouts/preview-layout'; -import { PageContext } from '../context/page-context'; -import SEO from './SEO'; -import FootnoteContext from './Footnote/footnote-context'; -import ComponentFactory from './ComponentFactory'; -import { InstruqtProvider } from './Instruqt/instruqt-context'; -import { TabProvider } from './Tabs/tab-context'; - -// Identify the footnotes on a page and all footnote_reference nodes that refer to them. -// Returns a map wherein each key is the footnote name, and each value is an object containing: -// - labels: the numerical label for the footnote -// - references: a list of the footnote reference ids that refer to this footnote -const getFootnotes = (nodes) => { - const footnotes = findAllKeyValuePairs(nodes, 'type', 'footnote'); - const footnoteReferences = findAllKeyValuePairs(nodes, 'type', 'footnote_reference'); - const numAnonRefs = footnoteReferences.filter( - (node) => !Object.prototype.hasOwnProperty.call(node, 'refname') - ).length; - // We label our footnotes by their index, regardless of their names to - // circumvent cases such as [[1], [#], [2], ...] - return footnotes.reduce((map, footnote, index) => { - if (footnote.name) { - // Find references associated with a named footnote - // eslint-disable-next-line no-param-reassign - map[footnote.name] = { - label: index + 1, - references: getNamedFootnoteReferences(footnoteReferences, footnote.name), - }; - } else { - // Find references associated with an anonymous footnote - // Replace potentially broken anonymous footnote ids - footnote.id = `${index + 1}`; - // eslint-disable-next-line no-param-reassign - map[footnote.id] = { - label: index + 1, - references: getAnonymousFootnoteReferences(index, numAnonRefs), - }; - } - return map; - }, {}); -}; - -// Find all footnote_reference node IDs associated with a given footnote by -// that footnote's refname -const getNamedFootnoteReferences = (footnoteReferences, refname) => { - return footnoteReferences.filter((node) => node.refname === refname).map((node) => node.id); -}; - -// They are used infrequently, but here we match an anonymous footnote to its reference. -// The nth footnote on a page is associated with the nth reference on the page. Since -// anon footnotes and footnote references are anonymous, we assume a 1:1 pairing, and -// have no need to query nodes. If there are more anonymous footnotes than references, -// we may return an empty array -const getAnonymousFootnoteReferences = (index, numAnonRefs) => { - return index > numAnonRefs ? [] : [`id${index + 1}`]; -}; - -const DocumentBody = (props) => { - const { - pageContext: { slug }, - data, - } = props; - const page = data.page.ast; - const metadata = data.page.metadata.metadata; - const template = page?.options?.template; - // Adds page to pageContext to mimic current behavior of passing entire page AST down - // pageContext for templates - props.pageContext.page = page; - const initialization = () => { - const pageNodes = getNestedValue(['children'], page) || []; - const footnotes = getFootnotes(pageNodes); - - return { pageNodes, footnotes }; - }; - - const [{ pageNodes, footnotes }] = useState(initialization); - - const lookup = slug === '/' ? 'index' : slug; - const pageTitle = getPlaintext(getNestedValue(['slugToTitle', lookup], metadata)) || 'MongoDB Documentation'; - const siteTitle = getNestedValue(['title'], metadata) || ''; - const { Template, useChatbot } = getTemplate(template); - - return ( - - - - - - - - - - - -
-
- ); -}; - -DocumentBody.propTypes = { - location: PropTypes.object.isRequired, - pageContext: PropTypes.shape({ - page: PropTypes.shape({ - children: PropTypes.array, - }).isRequired, - slug: PropTypes.string.isRequired, - }), -}; - -export default DocumentBody; - -export const query = graphql` - query ($id: String) { - page(id: { eq: $id }) { - ast - metadata { - metadata - } - } - } -`; diff --git a/src/components/Link.js b/src/components/Link.js index 6e17693af..697418982 100644 --- a/src/components/Link.js +++ b/src/components/Link.js @@ -1,6 +1,5 @@ import React from 'react'; import PropTypes from 'prop-types'; -import { useLocation } from '@gatsbyjs/reach-router'; import { Link as GatsbyLink } from 'gatsby'; import { css, cx } from '@leafygreen-ui/emotion'; import { Link as LGLink } from '@leafygreen-ui/typography'; @@ -9,9 +8,7 @@ import { palette } from '@leafygreen-ui/palette'; import ArrowRightIcon from '@leafygreen-ui/icon/dist/ArrowRight'; import { isRelativeUrl } from '../utils/is-relative-url'; import { joinClassNames } from '../utils/join-class-names'; -import { isGatsbyPreview } from '../utils/is-gatsby-preview'; import { validateHTMAttributes } from '../utils/validate-element-attributes'; -import { getGatsbyPreviewLink } from '../utils/get-gatsby-preview-link'; /* * Note: This component is not suitable for internal page navigation: @@ -97,7 +94,6 @@ const Link = ({ hideExternalIcon: hideExternalIconProp, ...other }) => { - const location = useLocation(); if (!to) to = ''; const anchor = to.startsWith('#'); @@ -121,8 +117,6 @@ const Link = ({ // Ensure trailing slash to = to.replace(/\/?(\?|#|$)/, '/$1'); - if (isGatsbyPreview()) to = getGatsbyPreviewLink(to, location); - return ( { const metadata = useSiteMetadata(); - const { openapi_pages, project } = useSnootyMetadata(); - const resourceLinkUrl = getResourceLinkUrl(metadata, project, tag, operationId, openapi_pages); + const { openapi_pages } = useSnootyMetadata(); + const resourceLinkUrl = getResourceLinkUrl(metadata, tag, operationId, openapi_pages); const { darkMode } = useDarkMode(); const allResourceChanges = diff --git a/src/components/OpenAPIChangelog/utils/getResourceLinkUrl.js b/src/components/OpenAPIChangelog/utils/getResourceLinkUrl.js index 9e028b0d0..b61bd4304 100644 --- a/src/components/OpenAPIChangelog/utils/getResourceLinkUrl.js +++ b/src/components/OpenAPIChangelog/utils/getResourceLinkUrl.js @@ -1,8 +1,8 @@ import { generatePathPrefix } from '../../../utils/generate-path-prefix'; import { normalizePath } from '../../../utils/normalize-path'; -const getResourceLinkUrl = (metadata, project, tag, operationId, openapi_pages = {}) => { - const pathPrefix = generatePathPrefix(metadata, project); +const getResourceLinkUrl = (metadata, tag, operationId, openapi_pages = {}) => { + const pathPrefix = generatePathPrefix(metadata); const resourceTag = `#tag/${tag.split(' ').join('-')}/operation/${operationId}`; const oaSpecPageRoute = Object.keys(openapi_pages).find((page) => page.includes('v2')) || diff --git a/src/context/toc-context.js b/src/context/toc-context.js index 2e8e1d879..74cf67d46 100644 --- a/src/context/toc-context.js +++ b/src/context/toc-context.js @@ -4,7 +4,6 @@ import { METADATA_COLLECTION } from '../build-constants'; import { useSiteMetadata } from '../hooks/use-site-metadata'; import { fetchDocument } from '../utils/realm'; import useSnootyMetadata from '../utils/use-snooty-metadata'; -import { isGatsbyPreview } from '../utils/is-gatsby-preview'; import { VersionContext } from './version-context'; const TocContext = createContext({ @@ -21,12 +20,6 @@ const TocContextProvider = ({ children, remoteMetadata }) => { const [isLoaded, setIsLoaded] = useState(false); const getTocMetadata = useCallback(async () => { - // Embedded versioning is not expected to work in staging builds, so we should - // be able to safely return the default toctree. - if (isGatsbyPreview()) { - return toctree; - } - try { const filter = { project, diff --git a/src/layouts/preview-layout.js b/src/layouts/preview-layout.js deleted file mode 100644 index 3f4f5e9cc..000000000 --- a/src/layouts/preview-layout.js +++ /dev/null @@ -1,196 +0,0 @@ -import React from 'react'; -import PropTypes from 'prop-types'; -import { Global, css } from '@emotion/react'; -import styled from '@emotion/styled'; -import ContentTransition from '../components/ContentTransition'; -import PreviewHeader from '../components/Header/preview-header'; -import { Sidenav } from '../components/Sidenav'; -import RootProvider from '../components/RootProvider'; -import { getTemplate } from '../utils/get-template'; -import { theme } from '../theme/docsTheme'; -import { MetadataProvider } from '../utils/use-snooty-metadata'; -import ActionBar from '../components/ActionBar/ActionBar'; -import { StyledContentContainer } from '.'; - -// These fonts are ported over from @mdb/flora design system repo -// They are used on the content areas and are not included in Snooty itself -// Without consistent-nav, this is needed to keep the experience the same for the content writers' previews -const EUCLID_CIRCULAR_FAMILY = 'Euclid Circular A'; -const MONGODB_VALUE_SERIF_FAMILY = 'MongoDB Value Serif'; -const SOURCE_CODE_PRO_FAMILY = 'Source Code Pro'; - -const FONT_SRCS = { - [EUCLID_CIRCULAR_FAMILY]: { - regular: 'https://static.mongodb.com/com/fonts/EuclidCircularA-Regular-WebXL.woff2', - medium: 'https://static.mongodb.com/com/fonts/EuclidCircularA-Medium-WebXL.woff2', - }, - [MONGODB_VALUE_SERIF_FAMILY]: { - regular: 'https://static.mongodb.com/com/fonts/MongoDBValueSerif-Regular.woff2', - medium: 'https://static.mongodb.com/com/fonts/MongoDBValueSerif-Medium.woff2', - bold: 'https://static.mongodb.com/com/fonts/MongoDBValueSerif-Bold.woff2', - }, - [SOURCE_CODE_PRO_FAMILY]: { - regular: 'https://static.mongodb.com/com/fonts/SourceCodePro-Regular.ttf', - medium: 'https://static.mongodb.com/com/fonts/SourceCodePro-Medium.ttf', - }, -}; - -const globalCSS = css` - body { - font-size: 16px; - line-height: 24px; - } - - .hidden { - display: inherit !important; - height: 0; - margin: 0; - padding: 0; - visibility: hidden !important; - width: 0; - } - - .header-buffer { - scroll-margin-top: ${theme.header.navbarScrollOffset}; - } - - ${'' /* Originally from docs-tools navbar.css */} - img.hide-medium-and-up, - img.show-medium-and-up { - max-width: 100%; - } - .hide-medium-and-up { - display: none !important; - } - .show-medium-and-up { - display: block !important; - } - @media (max-width: 767px) { - .hide-medium-and-up { - display: block !important; - } - .show-medium-and-up { - display: none !important; - } - } - - /* Fonts that are usually provided by flora through consistent-nav */ - @font-face { - font-family: ${EUCLID_CIRCULAR_FAMILY}; - src: url(${FONT_SRCS[EUCLID_CIRCULAR_FAMILY].regular}) format('woff2'); - font-weight: normal; - font-display: swap; - } - - @font-face { - font-family: ${EUCLID_CIRCULAR_FAMILY}; - src: url(${FONT_SRCS[EUCLID_CIRCULAR_FAMILY].medium}) format('woff2'); - font-weight: 500; - font-display: swap; - } - - @font-face { - font-family: ${MONGODB_VALUE_SERIF_FAMILY}; - src: url(${FONT_SRCS[MONGODB_VALUE_SERIF_FAMILY].regular}) format('woff2'); - font-weight: normal; - font-display: swap; - } - - @font-face { - font-family: ${MONGODB_VALUE_SERIF_FAMILY}; - src: url(${FONT_SRCS[MONGODB_VALUE_SERIF_FAMILY].medium}) format('woff2'); - font-weight: 500; - font-display: swap; - } - - @font-face { - font-family: ${MONGODB_VALUE_SERIF_FAMILY}; - src: url(${FONT_SRCS[MONGODB_VALUE_SERIF_FAMILY].bold}) format('woff2'); - font-weight: bold; - font-display: swap; - } - - @font-face { - font-family: ${SOURCE_CODE_PRO_FAMILY}; - src: url(${FONT_SRCS[SOURCE_CODE_PRO_FAMILY].regular}) format('truetype'); - font-weight: normal; - font-display: swap; - } - - @font-face { - font-family: ${SOURCE_CODE_PRO_FAMILY}; - src: url(${FONT_SRCS[SOURCE_CODE_PRO_FAMILY].medium}) format('truetype'); - font-weight: 500; - font-display: swap; - } -`; - -const GlobalGrid = styled('div')` - display: grid; - grid-template-areas: - 'header header' - 'sidenav contents'; - grid-template-columns: auto 1fr; - grid-template-rows: auto 1fr; -`; - -const DefaultLayout = ({ - children, - pageContext: { page, slug, repoBranches, template, associatedReposInfo, isAssociatedProduct }, - metadata, -}) => { - const { sidenav } = getTemplate(template); - const { chapters, guides, slugToTitle, toctree, eol } = metadata; - - const pageTitle = React.useMemo(() => page?.options?.title || slugToTitle?.[slug === '/' ? 'index' : slug], [slug]); // eslint-disable-line react-hooks/exhaustive-deps - - return ( - <> - - - - - - {sidenav && ( - - )} - - - {children} - - - - - - ); -}; - -DefaultLayout.propTypes = { - children: PropTypes.oneOfType([PropTypes.arrayOf(PropTypes.node), PropTypes.node]).isRequired, - pageContext: PropTypes.shape({ - chapters: PropTypes.object, - guides: PropTypes.object, - page: PropTypes.shape({ - options: PropTypes.object, - }).isRequired, - slug: PropTypes.string, - template: PropTypes.string, - }).isRequired, -}; - -export default DefaultLayout; diff --git a/src/utils/generate-path-prefix.js b/src/utils/generate-path-prefix.js index 6eda621f4..1793b01b0 100644 --- a/src/utils/generate-path-prefix.js +++ b/src/utils/generate-path-prefix.js @@ -1,9 +1,6 @@ const { normalizePath } = require('./normalize-path'); -const generatePathPrefix = ( - { commitHash, parserBranch, patchId, pathPrefix, project: parserProject, snootyBranch, user }, - project -) => { +const generatePathPrefix = ({ commitHash, parserBranch, patchId, pathPrefix, project, snootyBranch, user }) => { // If user specified a PATH_PREFIX environment variable, ensure it begins with a prefix and use if (pathPrefix) { if (pathPrefix.startsWith('/')) { @@ -16,16 +13,12 @@ const generatePathPrefix = ( if (commitHash) prefix += `${commitHash}`; if (patchId) prefix += `/${patchId}`; - // Uses the passed in project value if siteMetadata's project is undefined. - // This is to maintain usability for both local/prod builds (uses siteMetadata) and Gatsby Cloud builds - // (uses Snooty metadata for individual project + branch combination). - const projectSlug = parserProject ?? project; // Include the Snooty branch in pathPrefix for Snooty developers. mut automatically // includes the git branch of the repo where it is called, so this parameter must // be present in the URL's path prefix in order to be mut-compatible. // // TODO: Simplify this logic when Snooty development is staged in integration environment - const base = `${projectSlug}/${user}`; + const base = `${project}/${user}`; const path = process.env.GATSBY_SNOOTY_DEV ? `/${prefix}/${parserBranch}/${base}/${snootyBranch}` : `/${prefix}/${base}/${parserBranch}`; diff --git a/src/utils/get-gatsby-preview-link.js b/src/utils/get-gatsby-preview-link.js deleted file mode 100644 index e1e2f8bdf..000000000 --- a/src/utils/get-gatsby-preview-link.js +++ /dev/null @@ -1,18 +0,0 @@ -/** - * If we're in preview mode, we build the pages of each project and branch of the site within - * its own namespace so each author can preview their own pages e.g. - * /project1/branch1/doc-path - * /project2/branch2/doc-path - * - * So to navigate with the namespaced site, we add to each link the current project and branch - * the user is browsing in. - */ -const getGatsbyPreviewLink = (to, location) => { - const projectAndBranchPrefix = `/` + location.pathname.split(`/`).slice(1, 3).join(`/`); - if (!to.startsWith(projectAndBranchPrefix)) { - to = projectAndBranchPrefix + to; - } - return to; -}; - -module.exports = { getGatsbyPreviewLink }; diff --git a/src/utils/is-gatsby-preview.js b/src/utils/is-gatsby-preview.js deleted file mode 100644 index f6e1c7b65..000000000 --- a/src/utils/is-gatsby-preview.js +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Returns `true` if the build is a preview build for Gatsby Cloud. - */ -const isGatsbyPreview = () => process.env.GATSBY_IS_PREVIEW === 'true'; - -module.exports = { isGatsbyPreview }; diff --git a/src/utils/setup/save-asset-files.js b/src/utils/setup/save-asset-files.js index 08633345e..57645a71a 100644 --- a/src/utils/setup/save-asset-files.js +++ b/src/utils/setup/save-asset-files.js @@ -1,9 +1,7 @@ const fs = require('fs').promises; const path = require('path'); -const { isGatsbyPreview } = require('../is-gatsby-preview'); const GATSBY_IMAGE_EXTENSIONS = ['webp', 'png', 'avif']; -const isPreview = isGatsbyPreview(); const saveFile = async (file, data) => { // save files both to "public" and "src/images" directories @@ -13,8 +11,7 @@ const saveFile = async (file, data) => { }); await fs.writeFile(path.join('public', file), data, 'binary'); - const pathList = - !isPreview && GATSBY_IMAGE_EXTENSIONS.some((ext) => file.endsWith(ext)) ? ['src', 'images'] : ['public']; + const pathList = GATSBY_IMAGE_EXTENSIONS.some((ext) => file.endsWith(ext)) ? ['src', 'images'] : ['public']; await fs.mkdir(path.join(...pathList, path.dirname(file)), { recursive: true, }); diff --git a/tests/unit/ChangeList.test.js b/tests/unit/ChangeList.test.js index 3b3f90382..79b33e025 100644 --- a/tests/unit/ChangeList.test.js +++ b/tests/unit/ChangeList.test.js @@ -7,7 +7,6 @@ import { mockChangelog, mockDiff } from './data/OpenAPIChangelog'; jest.mock('../../src/utils/use-snooty-metadata', () => () => ({ openapi_pages: ['reference/api-resources-spec/v2'], - project: '', })); const useStaticQuery = jest.spyOn(Gatsby, 'useStaticQuery'); @@ -18,6 +17,7 @@ useStaticQuery.mockImplementation(() => ({ parserBranch: '', patchId: '', pathPrefix: '', + project: '', snootyBranch: '', user: '', }, diff --git a/tests/unit/OpenAPIChangelog.test.js b/tests/unit/OpenAPIChangelog.test.js index ba3866d6f..cb1cfb33d 100644 --- a/tests/unit/OpenAPIChangelog.test.js +++ b/tests/unit/OpenAPIChangelog.test.js @@ -29,7 +29,6 @@ const getComboboxOptionStrings = (o) => { jest.mock('../../src/utils/use-snooty-metadata', () => () => ({ openapi_pages: ['reference/api-resources-spec/v2'], - project: '', })); /* Aggregate all Resources in changelog for frontend filter */ @@ -47,6 +46,7 @@ useStaticQuery.mockImplementation(() => ({ parserBranch: '', patchId: '', pathPrefix: '', + project: '', snootyBranch: '', user: '', snootyEnv: 'production', diff --git a/tests/unit/utils/generate-path-prefix.test.js b/tests/unit/utils/generate-path-prefix.test.js index b686d0422..9579865cc 100644 --- a/tests/unit/utils/generate-path-prefix.test.js +++ b/tests/unit/utils/generate-path-prefix.test.js @@ -11,6 +11,7 @@ describe('path prefix testing', () => { const pathPrefixSlash = '/PATH_PREFIX_SLASH'; const siteMetadata = { parserBranch, + project, snootyBranch, user, }; @@ -19,7 +20,7 @@ describe('path prefix testing', () => { let prefix; it('should generate a prefix when none is provided', () => { - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(`/${project}/${user}/${parserBranch}`); }); @@ -34,7 +35,7 @@ describe('path prefix testing', () => { it('should generate a different prefix if GATSBY_SNOOTY_DEV is enabled', () => { expect(process.env.GATSBY_SNOOTY_DEV).toBe('true'); - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(`/${parserBranch}/${project}/${user}/${snootyBranch}`); }); }); @@ -42,26 +43,26 @@ describe('path prefix testing', () => { it('should included the commit hash, if specified', () => { expect(process.env.GATSBY_SNOOTY_DEV).toBeUndefined(); siteMetadata.commitHash = commitHash; - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(`/${commitHash}/${project}/${user}/${parserBranch}`); }); it('should included the patch ID, if specified', () => { siteMetadata.patchId = patchId; - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(`/${commitHash}/${patchId}/${project}/${user}/${parserBranch}`); }); describe('when using a defined path prefix vairable', () => { it('should prepend a slash if the variable does not include one', () => { siteMetadata.pathPrefix = pathPrefix; - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(`/${pathPrefix}`); }); it('should not prepend an additional slash', () => { siteMetadata.pathPrefix = pathPrefixSlash; - prefix = generatePathPrefix(siteMetadata, project); + prefix = generatePathPrefix(siteMetadata); expect(prefix).toBe(pathPrefixSlash); }); });