diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index 1307600..0000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,10 +0,0 @@ -stages: - - build - -rebuild:portfolio-site: - stage: build - image: curlimages/curl - only: - - master - script: - - "curl -X POST -F token=${TRIGGER_TOKEN} -F ref=master https://gitlab.com/api/v4/projects/19260161/trigger/pipeline" diff --git a/1. Expo with VirtualBox and Genymotion/README.md b/1. Expo with VirtualBox and Genymotion/README.md deleted file mode 100644 index 4520242..0000000 --- a/1. Expo with VirtualBox and Genymotion/README.md +++ /dev/null @@ -1,82 +0,0 @@ ---- -title: "React Native/Expo with VirtualBox and Genymotion" -tags: ["android", "react-native", "virtualbox", "expo"] -license: "public-domain" -slug: "react-native-with-virtualbox" -canonical_url: "https://haseebmajid.dev/blog/react-native-with-virtualbox" -date: "2018-09-03" -published: true -cover_image: "images/cover.jpg" ---- - -My home PC runs Windows for various conveniences, such as gaming. However, for development, I run an Ubuntu virtual -machine (VM) and Genymotion (on Windows) for testing my app. Genymotion also uses VirtualBox to run its Android -emulators. So we work out how to let two VMs running on the same host communicate with each other -(Ubuntu and Android Emulator). - -**Please Note:** This will also work for VMWare Player. - -## Solution - -There are a few networking options we can choose from when setting up a VM. - -- **NAT**: Allows your VMs so communicate with the outside world (outbound network), but your host machine (Windows)has no way to access this network - -- **Bridged**: Allows your VMs to access a network using the host computer’s Ethernet adapter. However this adds a bit of complexity, as your VM needs it own identity on the network as essentially its treated like another physical machine. - -- **Host-only**: Acts a hybrid between the other two options. It allows your VM to communicate with your host machine. So essentially all we need for multiple VMs to communicate with each is other is to turn on Host-only network (on all VMs). This allow them to communicate with the host machine which will act as a “bridge” (no pun intended). - -![Network Diagaram](images/network.png) - -## Prerequisites - -- [VirtualBox](https://www.virtualbox.org/wiki/Downloads) Installed on the host machine -- An [Expo](https://docs.expo.io/versions/latest/workflow/create-react-native-app) app in the (Ubuntu) development VM -- [Genymotion](https://www.genymotion.com/desktop/) Installed and setup on the host machine - -## VirtualBox - -- Open up VirtualBox -- Click on the development VM and open the “Settings”. -- Go to “Network” and Set the following two adapters, as shown in Figure 1 -- Adapter 1: “Host-only Adapter” -- Adapter 2: “NAT” - -NAT allows the Ubuntu VM to use the host machines internet access, it allows it to communicate with the outside world. -The Host-only Adapter allows the Ubuntu VM to communicate with other VM, such as the Genymotion Android emulator. - -![Figure 1: Ubuntu VM network settings](images/ubuntu_network1.png) -![Figure 1: Ubuntu VM network settings](images/ubuntu_network2.png) - -## Genymotion - -Genymotion network settings are pretty much the same as the Ubuntu VM, as shown in Figure 2. Please note that the -Host-only adapters can be the same (i.e. #7). - -![Figure 2: Android emulator VM network settings](images/phone_network1.png) -![Figure 2: Android emulator VM network settings](images/phone_network2.png) - -## Expo - -- Start your Genymotion Android emulator. -- Start your Ubuntu VM and open a terminal. -- Replace the IP Address with your own one, you can find your IP address on the Android emulator. As shown in Figure 3. - -```bash -adb connect 192.168.112.101 -exp start -exp android -``` - -![Figure 3: Genymotion Android emulator’s IP address](images/genymotion_ip.png) - -That’s it you should see your app running on the Genymotion Android emulator now. You can see how to start the -application below in Figure 4. - -![Figure 4: Starting Expo application on Android emulator](images/connecting.gif) - -## Appendix - -- GIFs created with [screentogif](https://www.screentogif.com/) -- [Detailed post about VirtualBox Networking](http://bertvv.github.io/notes-to-self/2015/09/29/virtualbox-networking-an-overview/) -- Drawing made with [draw.io](https://www.draw.io/) diff --git a/1. Expo with VirtualBox and Genymotion/images/connecting.gif b/1. Expo with VirtualBox and Genymotion/images/connecting.gif deleted file mode 100755 index e20a9bc..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/connecting.gif and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/cover.jpg b/1. Expo with VirtualBox and Genymotion/images/cover.jpg deleted file mode 100644 index 7d83012..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/cover.jpg and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/cover.png b/1. Expo with VirtualBox and Genymotion/images/cover.png deleted file mode 100755 index 332e47b..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/cover.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/genymotion_ip.png b/1. Expo with VirtualBox and Genymotion/images/genymotion_ip.png deleted file mode 100755 index bd95b52..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/genymotion_ip.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/network.png b/1. Expo with VirtualBox and Genymotion/images/network.png deleted file mode 100755 index 9b085bc..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/network.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/phone_network1.png b/1. Expo with VirtualBox and Genymotion/images/phone_network1.png deleted file mode 100755 index 04773a3..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/phone_network1.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/phone_network2.png b/1. Expo with VirtualBox and Genymotion/images/phone_network2.png deleted file mode 100755 index 390d5c5..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/phone_network2.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/ubuntu_network1.png b/1. Expo with VirtualBox and Genymotion/images/ubuntu_network1.png deleted file mode 100755 index 404cf65..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/ubuntu_network1.png and /dev/null differ diff --git a/1. Expo with VirtualBox and Genymotion/images/ubuntu_network2.png b/1. Expo with VirtualBox and Genymotion/images/ubuntu_network2.png deleted file mode 100755 index bcbe75b..0000000 Binary files a/1. Expo with VirtualBox and Genymotion/images/ubuntu_network2.png and /dev/null differ diff --git a/10. Redux with React Navigation/README.md b/10. Redux with React Navigation/README.md deleted file mode 100644 index 4563800..0000000 --- a/10. Redux with React Navigation/README.md +++ /dev/null @@ -1,245 +0,0 @@ ---- -title: "Theme your Expo app with Redux and React Navigation" -tags: ["redux", "react-native", "react-navigation", "expo"] -license: "public-domain" -slug: "theme-expo-app-with-redux-and-react-navigation" -canonical_url: "https://haseebmajid.dev/blog/theme-expo-app-with-redux-and-react-navigation" -date: "2018-12-23" -published: true -cover_image: "images/cover.jpg" ---- - -Recently whilst developing a React Native app (with Expo), -I built a simple tab navigator using React Navigation library. -I wanted to theme the app so that the header would change colour depending on which page you're on. -For example on the primary page it would be red and on the secondary page, when you change tabs, the -header would become blue. - -I ended up being able to do it using Redux. In this article, I will show you how you can theme your Expo -app using Redux with React Navigation. - -This article is written in British English however, the code is written using American English to make it more consistent (colour vs color). - -## React Navigation - -Is a library that helps you simplify app navigation. -The main reason for using this library is because it's written purely in JavaScript so no native code (Swift/Android) is required to make it work. Also, it's the [recommended navigation library](https://docs.expo.io/versions/latest/guides/routing-and-navigation) for Expo. - -## Redux - -[Redux](https://redux.js.org/) is used to manage global state across a React app, it can actually be used with both React and React Native. Of course, in this example, we will be using Redux with a React Native app. - -![Redux Store, from https://css-tricks.com/learning-react-redux/](images/redux-store.png) - -The diagram above explains pretty well why you may need to use Redux. Passing state can be tricky -in more complicated React Native apps, with lots of components and sub-components. Using redux -we can change the state in one component and Redux will update the state in all the other components. - -There are many great tutorials about Redux, I particularly liked this [one](https://www.youtube.com/watch?v=KcC8KZ_Ga2M). -Here is brief summary how Redux will work with this app. - -- Redux sets the initial (state) colour to red -- Change tabs from main to the secondary page -- This dispatches an action to the Redux store (action is called `toggleTheme`) -- The store then calls a reducer -- The reducer (also called `toggleTheme`) updates the old state to a new state, it changes the colour from red to blue -- Redux updates the state in all the components - -Here is a GIF that might help clear up how it work. - -![Redux Flow, from http://slides.com/jenyaterpil/redux-from-twitter-hype-to-production#](https://camo.githubusercontent.com/5aba89b6daab934631adffc1f301d17bb273268b/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6d656469612d702e736c69642e65732f75706c6f6164732f3336343831322f696d616765732f323438343535322f415243482d5265647578322d7265616c2e676966) - -## Prerequisite - -- An Expo/React Native app -- An Android emulator/device to test on -- Install the following dependencies - -```text -"react-navigation": "^2.18.0", -"react-redux": "^5.1.1", -"redux": "^4.0.1", -``` - -## Solution - -Now to the interesting part of this article let's take a look at the code. - -### Structure - -The project structure will look like this - -```text -├── src -│ ├── actions -│ ├── components -│ ├── containers -│ ├── reducers -│ ├── screens -│ ├── store -| └── themes -├── package.json -├── App.js -``` - -### themes - -```js -export const COLORS = { - red: { name: "red", hexCode: "#DE5448" }, - blue: { name: "blue", hexCode: "#498AF4" }, -}; -``` - -Here we define our two colours that will be used as themes, red and blue. I have given each colour a name because it makes the toggle logic easier to follow. - -### reducers - -```js:title=src/reducers/Theme.js file=./source_code/src/reducers/Theme.js - -``` - -A reducer is a pure function which takes some state and returns a new state. In this example -it gets passed the current theme colour and swaps it to the new theme colour. - -In this example if the action is `TOGGLE_THEME`, we get the colour name from the payload and using a switch -statement we swap the colours over. So if the current colour is red we update the state (`colorData`) to be -blue. - -```js:title=src/reducers/index.js file=./source_code/src/reducers/index.js - -``` - -Here we combine all of our reducers, in this example, we are only using the one reducer but if we had multiple reducers, The `combineReducers` function to would be necessary to combine them together. In this -example we can simply add new reducers to the function as and when we need them. - -### actions - -```js -export const TOGGLE_THEME = "TOGGLE_THEME"; -``` - -This file defines all the actions we can dispatch to our store. In this example, we only need one action to -toggle our theme. - -```js:title=src/actions/index.js file=./source_code/src/actions/index.js - -``` - -In this file, we define our actions, so here we have a single action `toggleTheme`, which takes a -theme as input and passes it as our payload, hence to access the name of the colour we use `action.payload.name` in our reducer. - -### store - -```js:title=src/store/index.js file=./source_code/src/store/index.js - -``` - -The Redux store is used to store the current state for our app, we have to link our store with our reducers we can do this using the `createStore` function and import the reducers from `reducers/index.js`. - -### App.js - -```js:title=App.js file=./source_code/App.js - -``` - -This acts as the main file for our app, to use Redux with our app we must wrap around `Provider` tags and -set the store props to our `store/index.js` file. The `` contains the logic for our two screens and the tab navigator. - -### components - -```js:title=src/components/CustomTabNavigator.js file=./source_code/src/components/CustomTabNavigator.js - -``` - -Here is where we use react navigation to create our tab navigator. We define two screens called A and B, -each screen has a different tab colour, red for A and blue for B. The main part of this file is the following - -```jsx -tabBarOnPress: ({ defaultHandler }) => { - store.dispatch(toggleTheme(COLORS.red)); - defaultHandler(); -}, -``` - -On tab change (from A -> B), we detect the tab press and dispatch the `toggleTheme` action to the Redux -store. So when we change tabs from A -> B the colour in the store will change from Red -> Blue and -vice versa for B -> A. - -One other thing to note is the tab colour is set using the following function. The colour is passed by the `tabBarOptions`. - -```jsx -const commonTabOptions = (color) => ({ - activeTintColor: "white", - pressColor: "#fff", - inactiveTintColor: "#ddd", - style: { - backgroundColor: color, - }, -}); -``` - -```jsx -tabBarOptions: commonTabOptions(COLORS.red.hexCode); -``` - -```js:title=src/components/ToggleTheme.js file=./source_code/src/components/ToggleTheme.js - -``` - -This file does most of the heavy lifting for this app, this is where most of the logic sits. So, first of all, we have a button, which calls `toggleTheme` on a press of the and passes the current colour (state) as an argument. - -```jsx - -); -``` - -Then in the `makeRequest()` function we use -[react-native-firebase](https://invertase.io/oss/react-native-firebase/) for the authentication (optional) -if you set up the authentication middleware in the firebase functions. You can use the -[following tutorial](https://invertase.io/oss/react-native-firebase/quick-start/existing-project) to get -started with the library. -The following allows any user of our app to get a token we can send with our HTTP request. - -```tsx:title=react_native/ExampleApp/App.tsx -const userCredentials = await firebase.auth().signInAnonymously(); -const token = await userCredentials.user.getIdToken(); -``` - -We use `apisauce` to make HTTP requests, but first we must "create" an API object. Here is where we pass our auth token. - -**NOTE**: Remember to replace `baseURL` with your URL. - -```tsx:title=react_native/ExampleApp/App.tsx -const api = create({ - baseURL: "https://us-central1-exampleapp.cloudfunctions.net", - headers: { Authorization: `Bearer ${token}` }, - timeout: 10000, -}); -``` - -Then we specify the `/hello` endpoint. The response contains a few parameters, if `ok` is set to `true` -then the request was successful (`2xx` HTTP code). - -We then log the response from the server. In reality you will want to do something more -useful than that but this is just a simple example. -All of this code is all surrounded by a try catch so if a reject promise is returned, it will be -captured by the `catch`. - -```tsx:title=react_native/ExampleApp/App.tsx -const response: ApiResponse<{ hello: string }> = await api.post("/hello", { - name: "Haseeb", -}); - -const { data, ok, status } = response; -if (ok) { - console.log("Success", status, data); -} else { - console.error("error", status); -} -``` - -**Note**: Sometimes your Cloud Functions may run a bit slower the first time you call them (in a while). -You need to keep your functions "warm" as they say, as long as you're running the functions the container -they are running in stays alive, after a period of time it is destroyed and needs to be recreated, hence -after a long period of time since the function was called it may well be a few seconds slower. - -That's it! Ee succesfully set up a React Native application to use Cloud Functions we deployed on -Firebase (with authentication). - -## Appendix - -- [Example Firebase source code](https://gitlab.com/hmajid2301/medium/-/tree/master/20.%20React%20Native%20with%20Firebase%20Cloud%20Functions%20and%20Gitlab%C2%A0CI/source_code/firebase) -- [Example React Native source code](https://gitlab.com/hmajid2301/medium/-/tree/master/20.%20React%20Native%20with%20Firebase%20Cloud%20Functions%20and%20Gitlab%C2%A0CI/source_code/react_native/ExampleApp) -- [Example Firebase project](https://gitlab.com/hmajid2301/stegappasaurus-api) diff --git "a/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/cover.jpg" "b/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/cover.jpg" deleted file mode 100644 index f7acaf9..0000000 Binary files "a/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/cover.jpg" and /dev/null differ diff --git "a/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/firebase.gif" "b/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/firebase.gif" deleted file mode 100644 index 43eb060..0000000 Binary files "a/20. React Native with Firebase Cloud Functions and Gitlab\302\240CI/images/firebase.gif" and /dev/null differ diff --git a/21. A Gitlab CI for React Native Gitlab/README.md b/21. A Gitlab CI for React Native Gitlab/README.md deleted file mode 100644 index fbd617d..0000000 --- a/21. A Gitlab CI for React Native Gitlab/README.md +++ /dev/null @@ -1,484 +0,0 @@ ---- -title: "A Gitlab CI file for React Native Apps" -tags: ["react-native", "gitlab", "ci"] -published: true -slug: "gitlab-ci-for-react-native" -canonical_url: "https://haseebmajid.dev/blog/gitlab-ci-for-react-native/" -date: "2020-02-23" -license: "public-domain" -cover_image: images/cover.jpg ---- - -A bit of backstory when I first started developing React Native applications (apps), I found there weren't -any good example of Gitlab CI files. So in this article, I will show you an example `.gitlab-ci.yml` -file you can use with your React Native app. You can of course tweak and makes changes as required by your -project. - -## CI/CD - -> Before we dive straight into the CI file itself, let's do a quicker refresher on some basic concepts. Feel free to skip this section if you are already familiar with CI/CD, Git and Gitlab CI. - -Continuous Integration (CI), is typically defined as making sure all code being integrated into a codebase works. -It usually involves running a set of jobs referred to as a CI pipeline. Some jobs we may run include linting our -code and running unit tests. This is usually done automatically using a tool such as Travis, Circle or even Gitlab. - -One particularly useful use case for this is when others are adding new features to our codebase and we want to check it -still works. We can create a CI pipeline that will run unit tests against the new code automatically when a pull request -(GitHub) or merge request (Gitlab) is opened. This saves us a lot of time, rather than having to copy the new -features/code and then run the tests our selves on our machine. - -Continuous Delivery (CD), is typically an extension of CI to make sure that you can release new changes quickly. -This means automating your release process, such that you can deploy your application at any point of time just -by clicking on a button. - -Continuous Deployment takes CD one step further by requiring no human intervention in deploying our application. -You can read more about [this here](https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment). - -### Git - -[Git](https://guides.github.com/introduction/git-handbook/) is a version control system (VCS), it is heavily tied in with CI. -In git, we can make "commits" which are snapshots of our project at its current state. We can later revert back to older commits and -compare files between commits (and much more). Usually every commit we push to Gitlab triggers a CI pipeline run against that current commit. -Git also has this concept of branches, where usually the `master` branch contains our production-ready code and the other branches have -new features being worked on. When our feature branches are ready they are merged into the master branch. Usually, the CI pipeline needs -to be successfully running (green ticks) before this can happen, however. - -### Gitlab CI - -[Gitlab CI](https://docs.gitlab.com/ee/ci/), is defined as a YAML file. In the file, we define "jobs" which can do various -different task. You can read more [here](https://docs.gitlab.com/ee/user/project/pages/getting_started_part_four.html). -Full [reference docs here](https://docs.gitlab.com/ee/ci/yaml/README.html), which details all the different parameters we -can use. To use Gitlab CI within our projects is very straight forward, create a new file `.gitlab-ci.yml` in our project -root and then define our jobs (we will see this a bit later in the article). - -### Example - -![https://docs.gitlab.com/ee/ci/introduction/](https://docs.gitlab.com/ee/ci/introduction/img/gitlab_workflow_example_11_9.png) - -The image above shows an example of a workflow we may use. So we create a new branch for our feature called `feature/add-x`. -We then create our commits (with our new code) and push them to Gitlab. Open a merge request, this triggers the CI -pipeline (from the `.gitlab-ci.yml`) file. In this example, the pipeline fails, perhaps because a unit test failed. This -causes the whole pipeline to fail. - -We then fix our code so the unit tests pass and then create more commits and push them. This then triggers the -pipeline to run again, this time it passes. Now our code is ready to be reviewed and merged into the main branch. -After the code review, it will be merged onto the `master` (main) branch. Then we will trigger the deployment process, -this can also be defined within our CI file. - -## .gitlab-ci.yml - -Now onto the real meat and potatoes of this article, our `.gitlab-ci.yml` file for React Native apps. -Taking a look at an [example application](https://gitlab.com/hmajid2301/stegappasaurus). You can find the `.gitlab-ci.yml` -and `package.json` in the appendix below or follow the link above. Now let's take a look the `.gitlab-ci.yml` file. - -### setup - -```yaml -image: node:8 - -stages: - - pre - - test - - publish - - post - -cache: - key: ${CI_COMMIT_REF_SLUG} - paths: - - node_modules/ - -variables: - DOCKER_DRIVER: overlay2 - -before_script: - - yarn generate-dotenv - - yarn -``` - -First, we specify an `image`, this is the default docker image we will use for our "jobs". -Unless a job specifies an image explicitly in its definition it will use this one. In this -example, we will use `node 8` it already has node, npm and yarn installed. We could probably -upgrade this to `node 10` or even `node 12` (long term releases of node). - -Next, we define all the `stages` of our pipeline, any jobs in the same stage will run -in parallel (at the same time). If a job in an earlier stage fails the pipeline won't carry -on to the next and will stop running at the current stage. The stages defined first such -as `pre` and `test` run before stages defined later such as `publish`. -Each job **must** be given a `stage`. - -Next, we define a `cache`, we will cache the `node_modules` for -future jobs (in this pipeline). Gitlab CI injects some -[predefined environment variables](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html), -one of them being `CI_COMMIT_REF_SLUG`. - -We then define a variable `DOCKER_DRIVER: overlay2`, this helps speed our -docker containers a bit because by default it uses `vfs` which is slower -[learn more here](https://docs.gitlab.com/ce/ci/docker/using_docker_build.html#using-the-overlayfs-driver). - -Finally, we define `before_script` which will run before every job unless we specify a -`before_script` within the jobs themselves. In this example, we install our node_modules -using `yarn` and create a `.env` file, we need the `.env` file for a few our the jobs. -The `.env` file is used by React Native to set configuration within the app. - -```json:title=package.json -{ - "name": "stegappasaurus", - "scripts": { - "generate-dotenv": "sh util/generate-dotenv.sh > .env" - } -} -``` - -Where `BUGSNAG_API_KEY` and `CAT_API_KEY` are environment variables which are injected by -Gitlab [more information here](https://docs.gitlab.com/ee/ci/variables/). - -```bash:title=util/generate-dotenv.sh -#!/usr/bin/env bash - -cat << EOF -BUGSNAG_API_KEY=${BUGSNAG_API_KEY} -CAT_API_KEY=${CAT_API_KEY} -EOF -``` - -Where the generated `.env` file will look like. - -```bash -BUGSNAG_API_KEY=1232541 -CAT_API_KEY=abxc-71379991 -``` - -### jobs - -**Note:** For the example application I am showing it has two branches `production` (main) and `master`. - -#### pre - -Now, let's take a look at our jobs in the CI file. The first job is used to close -issues automatically on Gitlab if there is an issue number in the git commit. -It uses the following tool -[gitlab-auto-close-issue](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-close-issue). -Which provides a docker image which contains the script to auto-close your issues. It will also -remove labels from the issue if you want such as "Doing". -This job is only run on the master branch of our project. - -Since we don't need to install any dependencies to run the job `before_script: []` is an empty -list, therefore the default `before_script` defined above won't run in this job. Also since -we define a docker image within the job we don't use the default docker image `node:8`. - -```yaml -close:issue: - image: registry.gitlab.com/gitlab-automation-toolkit/gitlab-auto-close-issue - stage: pre - before_script: [] - only: - - master - script: - - apk add --no-cache --upgrade grep - - ISSUE=$(echo $CI_COMMIT_MESSAGE | grep -oP "(?<=Fixes \#)[0-9]+" || echo '1') - - gitlab_auto_close_issue --issue $ISSUE --remove-label "Doing" --remove-label "To Do" -``` - -The next job automatically creates a merge request (MR) if the commits are not being pushed to `master` or `production` branches. -It will create an MR as WIP with a template we defined in the `.gitlab` folder. We also set the option `--use-issue-name` -where if we have a branch called say `feature/#211` where `#211` is an issue number (for that project). It will take -certain bits of information from that issue and set it on the MR such as `labels`. -More information about the tool [gitlab-auto-mr](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr). - -```yaml -create:merge-request: - image: registry.gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr - stage: pre - before_script: [] - except: - - production - - master - - tags - script: - - gitlab_auto_mr -t master -c WIP -d .gitlab/merge_request_templates/merge_request.md -r -s --use-issue-name -``` - -Where the template could look something like this. - -```markdown:title=.gitlab/merge_request_templates/merge_request.md -# Description - - - -## Type - -- [ ] Bug Fix -- [ ] Improvement -- [ ] New Feature - -Fixes # -``` - -### test - -This job called `lint` only runs on MRs not on the master branch i.e. it won't run if create an MR from master to production. -Hence the `except` clause. Finally we run the `lint` command which is defined in our `package.json` file as `eslint src/**/*.{ts,tx,tsx}`. -This will run eslint against all of the code within our `src` folder. - -```yaml -lint: - stage: test - only: - - merge_requests - except: - variables: - - $CI_COMMIT_REF_NAME =~ /^master/ - script: - - yarn lint -``` - -Then `lint:code-formatter` checks our code against `prettier` and see's if it's compliant with the code formatter. - -```yaml -lint:code-formatter: - stage: test - only: - - merge_requests - except: - variables: - - $CI_COMMIT_REF_NAME =~ /^master/ - script: - - yarn code-formatter-check -``` - -Then we check all of our TS is valid, by running `tsc --project . --noEmit --pretty --skipLibCheck`. -To make sure there aren't any type mismatches. - -```yaml -lint:types: - stage: test - only: - - merge_requests - except: - variables: - - $CI_COMMIT_REF_NAME =~ /^master/ - script: - - yarn types-check -``` - -We run our unit tests using `jest` (our test runner). We also use the `--silent` flag to -hide various warnings raised by components we are testing. Like all the other jobs in this `stage` we -only run this job in an MR. - -```yaml -tests:unit: - stage: test - only: - - merge_requests - except: - variables: - - $CI_COMMIT_REF_NAME =~ /^master/ - script: - - yarn tests --silent -``` - -Finally, almost the same as the job above, except it only runs on the `master` branch it gets the -code coverage from unit tests and stores the result using `coverage` (with some Regex). Where the `coverage` script is -defined as `jest --coverage` in `package.json`. [More information here](https://docs.gitlab.com/ee/user/project/pipelines/settings.html#test-coverage-parsing). -The code coverage can be shown on a badge, such as [here](https://gitlab.com/hmajid2301/stegappasaurus/badges/master/coverage.svg). - -```yaml -tests:coverage: - stage: test - only: - - master - script: - - yarn coverage --silent - coverage: /All\sfiles.*?\s+(\d+.\d+)/ -``` - -### publish - -Then on to our next stage. The job below actually publishes our app to the Play Store. It will only run when we've -tagged one of our commits for release i.e. `release/1.0.0`. This will only be done on the `production` branch. -We are also using another docker image which has Android and various our dependencies need for our React Native app. - -I won't do a massive deep dive into this job because I've already written an article about it -[here](https://dev.to/hmajid2301/auto-publish-react-native-app-to-android-play-store-using-gitlab-ci-44mc). -But essentially what happens is we have various variables defined in our project in Gitlab such as our Keystore stored -in base64 and the Keystore setting such as the username and password. -To use the tool to auto-publish our app I need to have a `play-store.json` file and because my app uses react-native-firebase`I need a`google-services.json` file. - -I then generate a `licenses.json` file using the following command `npm-license-crawler -onlyDirectDependencies --omitVersion -json src/data/licenses.json`, -there is a license view within my application which lists all of the main dependencies so I can properly credit those libraries this task generates that file. - -I then generate a `gradle.propeties` file using `sh util/generate-gradle-properties.sh > android/gradle.properties`. -Very similar to the `.env` the script we looked at above. Where the file looks something like: - -```bash:title=util/generate-gradle-properties.sh -#!/usr/bin/env bash - -cat << EOF -android.useAndroidX=true -android.enableJetifier=true -org.gradle.jvmargs=-Xms1g -MYAPP_RELEASE_STORE_FILE=stegappasaurus.keystore -MYAPP_RELEASE_STORE_PASSWORD=${ANDROID_KEYSTORE_PASSWORD} -MYAPP_RELEASE_KEY_ALIAS=${ANDROID_KEYSTORE_ALIAS} -MYAPP_RELEASE_KEY_PASSWORD=${ANDROID_KEYSTORE_KEY_PASSWORD} -EOF -``` - -This means we can reference the variables for the keystore within our `build.gradle` files without needing to hardcode -the values and once again this file is generated from CI variables stored on the project itself. For example the -`app/build.gradle` I have the following defined. - -```groovy:title=android/app/build.gradle -android { - signingConfigs { - release { - if (project.hasProperty("MYAPP_RELEASE_STORE_FILE")) { - storeFile file(MYAPP_RELEASE_STORE_FILE) - storePassword MYAPP_RELEASE_STORE_PASSWORD - keyAlias MYAPP_RELEASE_KEY_ALIAS - keyPassword MYAPP_RELEASE_KEY_PASSWORD - } - } - } -} -``` - -We then publish the application using `publish-package` script which runs `yarn run bundle && bash util/publish-package.sh`. -Where `publish-package.sh` looks like - -```bash:title=util/publish-package.sh -#!/usr/bin/env bash - -echo $CI_COMMIT_TAG - -if [[ $CI_COMMIT_TAG == *"alpha"* ]]; then - echo "Publishing Package: Alpha" - cd android && ./gradlew publish --track alpha -elif [[ $CI_COMMIT_TAG == *"beta"* ]]; then - echo "Publishing Package: Beta" - cd android && ./gradlew publish --track beta -elif [[ $CI_COMMIT_TAG == *"release"* ]]; then - echo "Publishing Package: Production" - cd android && ./gradlew publish --track production -else - echo "Publishing Package: Internal" - cd android && ./gradlew publish --track internal -fi -``` - -If the git tag is `release/1.0.0` then we will publish this directly onto the production track. It also -check if the tag contains `alpha` or `beta` if so then we publish it to different tracks. - -```bash -echo "Publishing Package: Production" -cd android && ./gradlew publish --track production -``` - -Finally we make the `assets` and `build` folders available as -[artifacts](https://docs.gitlab.com/ee/user/project/pipelines/job_artifacts.html) for jobs in future stages. - -```yaml -publish:android:package: - stage: publish - image: reactnativecommunity/react-native-android - only: - - /^release/.*$/ - script: - - echo fs.inotify.max_user_watches=524288 | tee -a /etc/sysctl.conf && sysctl -p - - cd android - - base64 -d $ANDROID_KEYSTORE > app/stegappasaurus.keystore - - export VERSION=$(cat app.json | jq -r .version) - - cat $PLAY_STORE_JSON > app/play-store.json - - cat $FIREBASE_GOOGLE_SERVICES_JSON > app/google-services.json - - yarn generate-licenses - - yarn generate-gradle-properties - - yarn publish-package --no-daemon - artifacts: - paths: - - ./android/app/build/ - - ./android/app/src/main/assets/ -``` - -### post - -Onto our final stage, the first job creates a Gitlab release. This job is again only run on release tags, but only -for "final" release hence the `except` clause. It won't run if the git tag contains `beta` or `alpha` in its name. -The [gitlab-auto-release](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-release) tool much like the other -tools above is used to automate this part of the Gitlab workflow. - -The script also can use `CHANGELOG.md`, if it follows -[keepachangelog](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-release) format. It takes the changelog from that -file and copies into our release. Only for the matching version name i.e. `release/1.0.0`, would look for `1.0.0` in our changelog file. -You can find an example release created by this script [here](https://gitlab.com/hmajid2301/stegappasaurus/-/tags/release%2F1.0.1). -Also if you specify a job name after the `--artifacts` argument it will link that jobs artifacts in this release (if it was run in the -same pipeline as this job). In this example, we want to include our Android app build (APK/AAB). - -```yaml -create:gitlab:release: - image: registry.gitlab.com/gitlab-automation-toolkit/gitlab-auto-release - stage: post - only: - - /^release/.*$/ - except: - variables: - - $CI_COMMIT_TAG =~ /beta/ - - $CI_COMMIT_TAG =~ /alpha/ - before_script: [] - script: - - gitlab_auto_release -c CHANGELOG.md -d "This was auto-generated by the gitlab-auto-release tool, https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-release." --artifacts "publish:android:package" -``` - -Our final job in this stage again only runs on release tags. It publishes our source maps to [Bugsnag](https://www.bugsnag.com/). -Which is a bug tracking tool. When our app is published to the Play store the JavaScript is minified and so Bugsnag cannot -give us a proper stack trace without the source maps. We must "tag" each upload with a version, hence we look in `app.json` file -for the current app version. This job requires artifacts from the previous android publishing job `publish:android:package`, -hence we mark it a dependency in `dependencies`. We need access to the bundle generated in the `assets` folder from the -previous job. Rather than repeat the same "actions" here to generate the files we need. -To speed up our CI we will just "copy" the files into the job by using artifacts. - -```yaml -publish:bugsnag:soucemaps: - stage: post - only: - - /^release/.*$/ - script: - - apt update && apt install -y jq - - export VERSION=$(cat app.json | jq -r .version) - - curl https://upload.bugsnag.com/react-native-source-map -F apiKey=${BUGSNAG_API_KEY} -F appVersion=${VERSION} -F dev=false -F platform=android -F sourceMap=@android/app/src/main/assets/index.map -F bundle=@android/app/src/main/assets/index.bundle -F projectRoot=`pwd` - - yarn run bugsnag-sourcemaps upload --api-key=${BUGSNAG_API_KEY} --app-version=${VERSION} --minifiedFile=android/app/build/generated/assets/react/release/index.android.bundle --source-map=android/app/build/generated/sourcemaps/react/release/index.android.bundle.map --minified-url=index.android.bundle --upload-sources - dependencies: - - publish:android:package -``` - -### other - -Finally, we have a Gitlab defined job called pages, where using Gitlab pages we will publish documentation -for this application. It will publish a static website present in the `public`. The documentation is built -using [docz](docz.site). By default, you can access pages at https://hmajid2301.gitlab.io/stegappasaurus, -i.e. username.gitlab.io/project_name but I have a google domain and using a `CNAME` you can also view the -website on `https://stegappasaurus.haseebmajid.dev/`. - -Since this is a special job this job is run at the very end of our pipeline, also this job on runs on the -`master` branch. - -```yaml -pages: - only: - - master - before_script: - - yarn - script: - - yarn docs-build - - mv .docz/dist/* public/ - artifacts: - paths: - - public -``` - -Finally it's done! That's it! That is one example of a `.gitlab-ci.yml` file you can use to for -your React Native projects. - -## Appendix - -- [Example Project](https://gitlab.com/hmajid2301/stegappasaurus) -- [Cover image](https://about.gitlab.com/images/ci/ci-cd-test-deploy-illustration_2x.png) diff --git a/21. A Gitlab CI for React Native Gitlab/images/cover.jpg b/21. A Gitlab CI for React Native Gitlab/images/cover.jpg deleted file mode 100644 index 7761922..0000000 Binary files a/21. A Gitlab CI for React Native Gitlab/images/cover.jpg and /dev/null differ diff --git a/21. A Gitlab CI for React Native Gitlab/images/cover_image.png b/21. A Gitlab CI for React Native Gitlab/images/cover_image.png deleted file mode 100644 index c785ef9..0000000 Binary files a/21. A Gitlab CI for React Native Gitlab/images/cover_image.png and /dev/null differ diff --git a/22. Deploy Docz on Gitlab Pages/README.md b/22. Deploy Docz on Gitlab Pages/README.md deleted file mode 100644 index a8e5a75..0000000 --- a/22. Deploy Docz on Gitlab Pages/README.md +++ /dev/null @@ -1,221 +0,0 @@ ---- -title: "Deploy Docz on Gitlab Pages" -tags: ["docz", "gitlab", "ci"] -license: "public-domain" -slug: "deploy-docz-gitlab-page" -canonical_url: "https://haseebmajid.dev/blog/deploy-docz-gitlab-page" -date: "2020-03-28" -published: true -cover_image: "images/cover.jpg" ---- - -In this article I will show you how you can deploy a Docz website on Gitlab pages, using `.gitlab-ci.yml`. -Most of this article should be applicable to Github pages as well. - -## Docz - -[Docz](https://www.Docz.site/) is a tool powered by Gatsby, it aims to make it easier to document your project. -It uses a language called `mdx` which is like normal markdown with some extra features, i.e. `md + jsx`. The main -advantage of using Docz is you can render components "live", if you put them with the `` tags. A basic -example may look like this: - -```md ---- -name: Button -route: / ---- - -import { Playground, Props } from 'Docz' -import { Button } from './' - -# Button - - - -## Basic usage - - - - - -``` - -I chose to use Docz because it was simple to set up and looks very nice. I was already writing my documentation -in markdown so it seemed like a perfect fit, even though in my use case I didn't use the `playground` to render -components. - -## Example - -You can find an example project using [Docz here](https://gitlab.com/hmajid2301/stegappasaurus/-/tree/release/1.0.2/). -This is one of my projects where I deployed the documentation using Gitlab Pages. You can -find [it here](https://stegappasaurus.haseebmajid.dev/). - -## Getting Started - -Ok, now let's get into how we can add Docz to an existing project. We also need to have `react-dom` and `react` -installed. - -```bash -yarn add Docz - -# or - -npm install Docz -``` - -So our `package.json` looks like: - -```json:title=package.json -{ - "name": "example_app", - "scripts": { - "docs-dev": "docz dev", - "docs-build": "docz build" - }, - "dependencies": { - "Docz": "2.2.0", - "react": "16.9.0", - "react-dom": "16.8.0" - } -} -``` - -We then need to create our `Doczrc.js` configuration file, like so: - -```js:title=Doczrc.js -export default { - src: "docs", - description: "Example Documentation", - menu: ["Introduction", "Contributing"], - themeConfig: { - initialColorMode: "dark", - }, -}; -``` - -We will keep all our `.mdx` files in a folder called `docs` hence `src: 'docs'`. Each file will be shown as a page on -our website. The `description` option will be the name of our website in an open browser tab. The `menu` option is -the order in which our pages will show up in the sidebar (on the left-hand side by default). The names used in this -menu option must match the names used in the frontmatter of that file should match the front matter in `mdx` pages. -Finally, I want to use the `dark` mode by default which is the final option. - -```md ---- -name: Button -route: / ---- -``` - -We can view our current `docz` website by running `docs-dev` and then go to `localhost:3000` on our dev machine. - -### Adding Pages - -Ok, now let's add our actual "pages" to our Docz website. First create a new folder called `docs` in your project root. -Then we will create our first page called `Introduction.mdx` (this name doesn't matter so much), where the page's contents -look something like: - -```md:title=Introduction.mdx ---- -name: Introduction -route: / ---- - -# Example - -Welcome to this example app, in this app we will show examples. -``` - -> Note: The front matter `name` must match the name we defined in the `Doczrc.js` menu option. - -The route defines the path the user will see, i.e. in the `stegappasaurus` example this page will be shown on -`https://stegappasaurus.haseebmajid.dev/`. - -Next let's create a second page called `Contributing.mdx`, which looks like: - -```md:title=Contributing.mdx ---- -name: Contributing -route: /contributing ---- - -# Contributing - -Three main ways to contribute to this project are; - -- **Adding a new feature:** Adding a new feature to the project, such as allow encoding of audio files alongside images -- **Improving a feature:** Extend/improve an existing feature, such as a small UI change -- **Fix an issue:** We have a list of [issues](https://gitlab.com/hmajid2301/stegappasaurus/issues), or you can fix your issue. -``` - -This page can be found on `/contributing` i.e. `https://stegappasaurus.haseebmajid.dev/contributing`. The page may look -something like the image below. The titles are shown as sub-menus. - -![Example App](images/docs.png) - -## Gitlab Pages - -Now that we have our Docz website and it is working locally, how can we deploy on Gitlab pages for all the world to see? -Well first we need to add a job titled `pages` to our `.gitlab-ci.yml` file, then we need to store all of our -static assets in a folder called `public` and make that an `artifact` of this job. This will tell Gitlab CI, that we -want to publish this "website" to Gitlab Pages. Here is an example of what it may look like: - -```yaml:title=.gitlab-ci.yml -pages: - only: - - master - script: - - yarn - - yarn docs-build - - mv .Docz/dist/* public/ - artifacts: - paths: - - public -``` - -This will publish our "website" to https://.gitlab.io/. So, for example, my username is -`hmajid2301` and my project name is `stegappasaurus` hence the website URL is https://hmajid2301.gitlab.io/stegappasaurus. - -### Custom Domain (Optional) - -If you have your own custom domain, you can "host" the page under two URLs. In my case, I own `haseebmajid.dev` and I -wanted to host it under a subdomain within that domain, like so `https://stegappasaurus.haseebmajid.dev/`. Gitlab -makes this surprisingly easy to do: - -> Note: In this example, I am assuming we are using a subdomain and not the root domain i.e. example_app.haseebmajid.dev not haseebmajid.dev. - -- First go to your project on Gitlab -- Next Settings (Left sidebar) > Pages > New Domain (Top Righthand Corner) -- Enter the domain name you'd like to use, i.e. example_app.haseebmajid.dev -- Press `Create New Domain` -- Copy the `TXT` record to verify ownership. -- Next go your domain provider, in my case it's [Google](https://domains.google.com) -- Go to your DNS settings for that domain - -We will now create two DNS records a TXT record which verifies ownership of the domain. Go to settings for "pages" -which can be found `https://gitlab.com///pages`. Then copy the data next to -`Verification status` into your DNS settings. - -Next we will create a `CNAME` record, a CNAME is used to point one domain record to another, i.e. a user comes to -`stegappasaurus.haseebmajid.dev CNAME -> hmajid2301.gitlab.io -> Resolve IP Address`. Then the browser will take the -user to the correct IP address. You can copy the CNAME data next to the `DNS` field. - -After we've added the details it will take a few minutes for verification and for `Let's encrypt` to create a certificate -for our website. Then you should be able to view your Docz website using both domains listed under the page settings -of your project, i.e the `gitlab.io` url and your custom domain. - -You can find more information -[here](https://docs.gitlab.com/ee/user/project/pages/custom_domains_ssl_tls_certification/#3-set-up-dns-records-for-pages), -with regards to Gitlab pages and custom domains. - -> Note: `.dev` domains always needs to be HTTPS encrypted (need a certificate). - -> Note: For Google Domains this data must be added in the DNS > Custom resource records. - -![pages](images/pages.png) - -That's it. We have deployed a `docz` website using Gitlab CI onto Gitlab Pages and even added our custom domain to it. - -## Appendix - -- [Example Project](https://gitlab.com/hmajid2301/stegappasaurus/-/tree/release/1.0.2) -- [Gitlab Tutorial](https://docs.gitlab.com/ee/user/project/pages/custom_domains_ssl_tls_certification/#3-set-up-dns-records-for-pages) diff --git a/22. Deploy Docz on Gitlab Pages/images/cover.jpg b/22. Deploy Docz on Gitlab Pages/images/cover.jpg deleted file mode 100644 index 3358991..0000000 Binary files a/22. Deploy Docz on Gitlab Pages/images/cover.jpg and /dev/null differ diff --git a/22. Deploy Docz on Gitlab Pages/images/cover.png b/22. Deploy Docz on Gitlab Pages/images/cover.png deleted file mode 100644 index 603e7d5..0000000 Binary files a/22. Deploy Docz on Gitlab Pages/images/cover.png and /dev/null differ diff --git a/22. Deploy Docz on Gitlab Pages/images/docs.png b/22. Deploy Docz on Gitlab Pages/images/docs.png deleted file mode 100644 index 377c7f0..0000000 Binary files a/22. Deploy Docz on Gitlab Pages/images/docs.png and /dev/null differ diff --git a/22. Deploy Docz on Gitlab Pages/images/pages.png b/22. Deploy Docz on Gitlab Pages/images/pages.png deleted file mode 100644 index e391463..0000000 Binary files a/22. Deploy Docz on Gitlab Pages/images/pages.png and /dev/null differ diff --git a/23. React Hooks, Context & Local Storage/README.md b/23. React Hooks, Context & Local Storage/README.md index 2e0ab31..a1ad880 100644 --- a/23. React Hooks, Context & Local Storage/README.md +++ b/23. React Hooks, Context & Local Storage/README.md @@ -40,7 +40,7 @@ We will use a project structure like so: Our `package.json` file looks like this: -```json:title=package.json +```json { "name": "ExampleApp", "version": "1.0.0", @@ -83,7 +83,65 @@ First, let's create our React context that will store the current theme the user give us a function that other components can use to update the theme. Finally, after any change has been made it will update the local storage with the users latest settings. -```tsx:title=./src/providers/DarkModeProvider.tsx file=./source_code/src/providers/DarkModeProvider.tsx +```tsx +import React, { Context, createContext, useReducer, useEffect } from "react"; + +export const LIGHT_THEME: Theme = { + background: "#fafafa" as BackgroundColors, + color: "#000000" as ForegroundColors, + isDark: false +}; + +export const DARK_THEME: Theme = { + background: "#333333" as BackgroundColors, + color: "#fafafa" as ForegroundColors, + isDark: true +}; + +export type BackgroundColors = "#333333" | "#fafafa"; +export type ForegroundColors = "#000000" | "#fafafa"; + +export interface Theme { + background: BackgroundColors; + color: ForegroundColors; + isDark: boolean; +} + +interface DarkModeContext { + mode: Theme; + dispatch: React.Dispatch; +} + +const darkModeReducer = (_: any, isDark: boolean) => + isDark ? DARK_THEME : LIGHT_THEME; + +const DarkModeContext: Context = createContext( + {} as DarkModeContext +); + +const initialState = + JSON.parse(localStorage.getItem("DarkMode") as string) || LIGHT_THEME; + +const DarkModeProvider: React.FC = ({ children }) => { + const [mode, dispatch] = useReducer(darkModeReducer, initialState); + + useEffect(() => { + localStorage.setItem("DarkMode", JSON.stringify(mode)); + }, [mode]); + + return ( + + {children} + + ); +}; + +export { DarkModeProvider, DarkModeContext }; ``` @@ -91,7 +149,7 @@ Next, we will import all of the modules we will need to use then. We will define and `DARK_THEME`. Then finally because we are using Typescript we will define types for the Themes and the context we will use. -```tsx:title=./src/providers/DarkModeProvider.tsx +```tsx const darkModeReducer = (_: any, isDark: boolean) => isDark ? DARK_THEME : LIGHT_THEME; ``` @@ -101,7 +159,7 @@ current app so it cannot have any unintended side-effects. Exactly the same func would define if we were using Redux. In this case, the reducer just returns the `DARK_THEME` if the `isDark` argument is `true` else it returns the `LIGHT_THEME`. -```tsx:title=./src/providers/DarkModeProvider.tsx +```tsx const DarkModeContext: Context = createContext( {} as DarkModeContext ); @@ -114,7 +172,7 @@ After this, we create our React context called `DarkModeContext` and we give it (we don't really mind too much). We then define the default value. It tries to check the value stored in `localstorage`. If there is none, then we use the `LIGHT_THEME`. After which we define the provider. -```tsx:title=./src/providers/DarkModeProvider.tsx +```tsx const DarkModeProvider: React.FC = ({ children }) => { const [mode, dispatch] = useReducer(darkModeReducer, initialState); @@ -142,7 +200,7 @@ we use the `useReducer` hook and give it our `darkModeReducer` with the initial reducer will then return a `mode` which is the current theme data and a function `dispatch` which will be used to update the current theme. Breaking it down a bit further we see: -```tsx:title=./src/providers/DarkModeProvider.tsx +```tsx useEffect(() => { localStorage.setItem("DarkMode", JSON.stringify(mode)); }, [mode]); @@ -154,7 +212,7 @@ stores the current theme into the user's local storage under the key `DarkMode`. this was changed from light -> dark and then the user comes back to the site, the initial value we would get from `localstorage.getItem("DarkMode")` would not, of course, be the dark theme. -```tsx:title=./src/providers/DarkModeProvider.tsx +```tsx return ( Warning: The provider needs to be in a separate component to those that access the React Hook. Hence we import the `MainApp` component rather than including all of the `MainApp.tsx` in `App.tsx`. -```tsx:title=src/App.tsx file=./source_code/src/App.tsx +```tsx import React from "react"; import { DarkModeProvider } from "~/providers/DarkModeProvider"; @@ -205,8 +263,35 @@ Now the MainApp is a very basic page: it contains a single button which is used for dark to light and vice versa. Here we use React hooks with React context to be able to update and retrieve the theme. -```tsx:title=src/views/MainApp.tsx file=./source_code/src/views/MainApp.tsx +```tsx +import React, { useContext } from "react"; + +import { DarkModeContext } from "~/providers/DarkModeProvider"; + +const MainApp = () => { + const theme = useContext(DarkModeContext); + const { background, color, isDark } = theme.mode; + + return ( +
+
Theme is {isDark ? "Dark" : "Light"}
+ +
+ ); +}; + +const setTheme = (darkMode: DarkModeContext) => { + const isDark = darkMode.mode.isDark; + darkMode.dispatch(!isDark); +}; +export default MainApp; ``` #### useContext @@ -215,7 +300,7 @@ The `useContext` is an example of a React Hook. It allows users to access a spec component, a component which is not a class. The context has a mode property which stores the current theme we should display light or dark. Such as `background` and `color`. -```tsx:title=src/views/MainApp.tsx +```tsx const theme = useContext(DarkModeContext); const { background, color, isDark } = theme.mode; ``` @@ -230,7 +315,7 @@ has an `onClick` event. The `setTheme` function gets the current theme from the It then calls the `dispatch` function we have defined in the context to change to the theme to the opposite it is at the moment. So light theme -> dark theme and dark theme -> light theme. -```tsx:title=src/views/MainApp.tsx +```tsx ; //... diff --git a/23. React Hooks, Context & Local Storage/images/cover.jpg b/23. React Hooks, Context & Local Storage/images/cover.jpg deleted file mode 100644 index 1976e8b..0000000 Binary files a/23. React Hooks, Context & Local Storage/images/cover.jpg and /dev/null differ diff --git a/24. Gitlab CI DinD/README.md b/24. Gitlab CI DinD/README.md deleted file mode 100644 index 9f0a261..0000000 --- a/24. Gitlab CI DinD/README.md +++ /dev/null @@ -1,201 +0,0 @@ ---- -title: "DinD with Gitlab CI" -tags: ["docker", "gitlab", "ci"] -license: "public-domain" -slug: "dind-and-gitlab-ci" -canonical_url: "https://haseebmajid.dev/blog/dind-and-gitlab-ci/" -date: "2020-05-01" -published: true -cover_image: "images/cover.jpg" ---- - -Like most developers, we want to be able to automate as many and as much of processes as possible. Pushing Docker -images to a registry is a task that can easily be automated. In this article, we will cover how you can use -Gitlab CI to build and publish your Docker images, to the Gitlab registry. However, you can also very easily -edit this to push your images to DockerHub as well. - -A quick aside on terminology related to Docker: - -- container: An instance of an image is called a container (`docker run`) -- image: A set of immutable layers (`docker build`) -- hub: The official registry where you can get more Docker images from (`docker pull`) - -## Example - -Here is an example `.gitlab-ci.yml` file which can be used to build and push your Docker images to the Gitlab registry. - -```yaml:title=.gitlab-ci.yml -variables: - DOCKER_DRIVER: overlay2 - -services: - - docker:dind - -stages: - - publish - -publish-docker: - stage: publish - image: docker - script: - - export VERSION_TAG=v1.2.3 - - docker login ${CI_REGISTRY} -u gitlab-ci-token -p ${CI_BUILD_TOKEN} - - docker build -t ${CI_REGISTRY_IMAGE}:latest -t ${CI_REGISTRY_IMAGE}:${VERSION_TAG} . - - docker push ${CI_REGISTRY_IMAGE}:latest - - docker push ${CI_REGISTRY_IMAGE}:${VERSION_TAG} -``` - -## Explained - -The code above may be a bit confusing, it might be a lot to take in. So now we will break it down line by line. - -```yaml:title=.gitlab-ci.yml -variables: - DOCKER_DRIVER: overlay2 -``` - -In our first couple of lines, we define some variables which will be used by all our jobs (the variables are global). -We define a variable `DOCKER_DRIVER: overlay2`, this helps speed our Docker containers a bit because by default it -uses `vfs` which is slower -[learn more here](https://docs.gitlab.com/ce/ci/docker/using_docker_build.html#using-the-overlayfs-driver). - -```yaml -random-job: - stage: publish - variables: - DOCKER_DRIVER: overlay2 - script: - - echo "HELLO" -``` - -> Note we could just as easily define `variables` just within our job as well like you see in the example above. - -```yaml:title=.gitlab-ci.yml -services: - - docker:dind -``` - -The next couple of lines define a service. A service is a Docker image which links during our job(s). Again in this -example, it is defined globally and will link to all of our jobs. We could very easily define it within our job just -like in the `variables` example. The [`docker:dind`](https://github.com/docker-library/docker/blob/157869f94ea90e2acb4d0f77045d99079ead821c/18.02/dind/dockerd-entrypoint.sh) -image automatically using its `entrypoint` starts a docker daemon. We need to use this daemon to build/push our -Docker images within CI. - -The `docker:dind` (dind = Docker in Docker) image is almost identical to the `docker` image. The difference being the dind image -starts a Docker daemon. In this example, the job will use the `docker` image as the client and connect to the daemon -running in this container. - -We could also just use the `dind` image in our job and simply start `dockerd` (& = in the background) in the first line. -The `dockerd` command starts the Docker daemon as a client, so we can then communicate with the other Docker daemon. -It would achieve the same outcome. I think the service approach is a bit cleaner but as already stated either approach -would work. - -```yaml:title=.gitlab-ci.yml -publish-docker: - stage: publish - image: docker:dind - script: - - dockerd & - ... - - docker push ${CI_REGISTRY_IMAGE}:${VERSION_TAG} -``` - -> Info: One common use case of Gitlab CI services is to spin up databases like MySQL. We can then connect to it within our job, run our tests. It can simplify our jobs by quite a bit. - -> Note: There are several other ways we could also build/push our images. This is the [recommended approach](https://gitlab.com/gitlab-examples/docker/blob/master/.gitlab-ci.yml). - -```yaml:title=.gitlab-ci.yml -stages: - - publish -``` - -Next, we define our stages and give them names. Each job must have a valid stage attached to it. Stages are used to -determine when a job will be run in our CI pipeline. If two jobs have the same stage, then they will run in parallel. -The stages defined earlier will run first so order does matter. However in this example, we only have one stage and -one job so this isn't super important, more just something to keep in mind. - -```yaml:title=.gitlab-ci.yml -publish-docker: - stage: publish - ... -``` - -Now we define our job, where `publish-docker` is the name of our job on `Gitlab CI` pipeline. We then define -what `stage` the job should run in, in this case, this job will run during the `publish` stage. - -```yaml:title=.gitlab-ci.yml -publish-docker: - ... - image: docker - ... -``` - -Then we define what Docker image to use in this job. In this job, we will use the `docker` image. This -image has all the commands we need to `build` and `push` our Docker images. It will act as the client making -requests to the `dind` daemon. - -```yaml:title=.gitlab-ci.yml -script: - - export VERSION_TAG=v1.2.3 - - docker login ${CI_REGISTRY} -u gitlab-ci-token -p ${CI_BUILD_TOKEN} - - docker build -t ${CI_REGISTRY_IMAGE}:latest -t ${CI_REGISTRY_IMAGE}:${VERSION_TAG} . - - docker push ${CI_REGISTRY_IMAGE}:latest - - docker push ${CI_REGISTRY_IMAGE}:${VERSION_TAG} -``` - -Finally, we get to the real meat and potatoes of the CI file. The bit of code that builds and pushes are Docker -images to the registry: - -```yaml:title=.gitlab-ci.yml -- export VERSION_TAG=v1.2.3 -``` - -It is often a good idea to tag our images, in this case, I'm using a release name. You could get this from say your -`setup.py` or `package.json` file as well. In my Python projects I usually use this command -`export VERSION_TAG=$(cat setup.py | grep version | head -1 | awk -F= '{ print $2 }' | sed 's/[",]//g' | tr -d "'")`, -to parse my `setup.py` for the version number. But this can be whatever you want it to be. Here we have just kept it -static to make things simpler but in reality, you'll probably want to retrieve it programmatically (the version number). - -```yaml:title=.gitlab-ci.yml -- docker login ${CI_REGISTRY} -u gitlab-ci-token -p ${CI_BUILD_TOKEN} -``` - -Then we log in to our Gitlab registry, the environment variables `$CI_REGISTRY` and `CI_BUILD_TOKEN` are predefined -Gitlab variables that are injected into our environment. You can read more about them -[here](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html). Since we are pushing to our Gitlab registry -we can just use the credentials defined within environment i.e. `username=gitlab-ci-token` and password a throwaway -token. - -> Note: You can only do this on protected branches/tags. - -```yaml:title=.gitlab-ci.yml -- docker build -t ${CI_REGISTRY_IMAGE}:latest -t ${CI_REGISTRY_IMAGE}:${VERSION_TAG} . -- docker push ${CI_REGISTRY_IMAGE}:latest -- docker push ${CI_REGISTRY_IMAGE}:${VERSION_TAG} -``` - -Finally, we run our normal commands to build and push our images. The place where you can find your images will depend -on the project name and your username but it should follow this format - -```bash -registry.gitlab.com/// -``` - -### (Optional) Push to DockerHub - -```yaml:title=.gitlab-ci.yml -- docker login -u hmajid2301 -p ${DOCKER_PASSWORD} -- export IMAGE_NAME="hmajid2301/example_project" -- docker build -t ${IMAGE_NAME}:latest -t ${IMAGE_NAME}:${VERSION_TAG} . -- docker push ${IMAGE_NAME}:latest -- docker push ${IMAGE_NAME}:${VERSION_TAG} -``` - -We can also push our images to DockerHub, with the code shown above. We need to first login to DockerHub. Then change -the name of our image `/`. - -## Appendix - -- A good [Stackoverflow Post](https://stackoverflow.com/questions/47280922/role-of-docker-in-docker-dind-service-in-gitlab-ci) -- [Gitlab CI Docs](https://docs.gitlab.com/ee/ci/docker/using_docker_build.html) -- [Gitlab Example](https://gitlab.com/gitlab-examples/docker/blob/master/.gitlab-ci.yml) diff --git a/24. Gitlab CI DinD/images/cover.jpg b/24. Gitlab CI DinD/images/cover.jpg deleted file mode 100644 index 94fe396..0000000 Binary files a/24. Gitlab CI DinD/images/cover.jpg and /dev/null differ diff --git a/24. Gitlab CI DinD/images/cover.png b/24. Gitlab CI DinD/images/cover.png deleted file mode 100644 index 07cc9ee..0000000 Binary files a/24. Gitlab CI DinD/images/cover.png and /dev/null differ diff --git a/25. AJAX with PrismJS/README.md b/25. AJAX with PrismJS/README.md deleted file mode 100644 index 20dddc3..0000000 --- a/25. AJAX with PrismJS/README.md +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: "Make PrismJS editable" -tags: ["javascript", "ajax", "prismjs"] -license: "public-domain" -slug: "make-prismjs-editable" -canonical_url: "https://haseebmajid.dev/blog/make-prismjs-editable" -date: "2020-05-10" -published: true -cover_image: "images/cover.jpg" ---- - -![demo](images/main.gif) - -In this article, we will go over how you can make PrismJS (syntax highlighted) code blocks editable. - -## Introduction - -[PrismJS](https://prismjs.com/) can be used to add syntax highlighting to code blocks on our website. For a persona -project of mine, [composersiation](composerisation.haseebmajid.dev/) #ShamelessPlug :plug:, I needed to allow the user -to paste in their own (docker-compose) yaml files. So let's take a look how we can let a user to first edit a code block -and then re-run PrismJS to add syntax highlighting. - -So our HTML will look something like this. - -> Note: When I refer to "code block" I am referring to entire thing including the `pre` and the `code` tags. - -```html:title=index.html - - - -... - -
-  
-
- -``` - -In this file we import the `prism.css` stylesheet, there are many themes you can choose -from in this example we will use the default theme. We will also import `prism.js`, these are the two files required to use PrismJS. - -```html:title=index.html -
-  
-
-``` - -Next we create the code block on web page. Not the class on the `code` tag is `language-yaml`. To use PrismJS we -need to give the `code` a tag a class of `language-x` where x is the language we want syntax highlighting for. -You can find a full list of [supported languages here](https://prismjs.com/#supported-languages). - -To allow users to paste and edit the code block we add `contenteditable` to the `pre` tag. The reason we add it to the `pre` -tag and not the `code` tag is, when PrismJS has run it will edit the `code` block to include `span`'s and other html elements, -to do the syntax highlighting it makes it a lot harder for the user to copy and paste when you edit the `code` tag as a pose to -`pre` tag. The `pre` tag also has `onPaste="setTimeout(function() {onPaste();}, 0)"` this means that after the user has pasted -into the `pre` tag this function will be called. In this case we call a function called `onPaste()`. However we use a `setTimeout`, -so that the browser has enough time to update the `pre` tag, else the `pre`/`code` tags will still contain the previous text before -the paste. - -## JavaScript - -Now the user can paste directly into the code block. How do we force a re-render ? Let's take a look at `onPaste` function which -is called everytime the user paste's into our code block. - -```js:title=index.html -function onPaste() { - const editable = document.getElementById("editable"); - const dockerCompose = editable.innerText; - editable.innerHTML = ''; - const yaml = document.getElementById("yaml"); - yaml.innerHTML = Prism.highlight(dockerCompose, Prism.languages.yaml, "yaml"); -} -``` - -So first we get the `editable` element (our `pre` tag). Next we get the innerText of said element. This should be the new content -the user wants to paste into the `pre` tag. Sometimes when you copy/paste into the code block the old `code` tag get's deleted -so just in case we add the `code` tag back in. As this is where PrismJS will render our "new" yaml "code" in. This is done like so -`editable.innerHTML = '';`, this code replaces all the "children" of the `pre` tag -with this new code block. Next we get the `code` tag with id `yaml`. - -```js:title=index.html -yaml.innerHTML = Prism.highlight(dockerCompose, Prism.languages.yaml, "yaml"); -``` - -Finally the main part of our code which actually highlights our code. We pass the newly pasted yaml it's stored in `dockerCompose` -variable. Next we tell Prism what langauge to use `Prism.languages.yaml` (this is the language grammar0 and finally we pass the -language name in this case yaml. Then we set this as the `innerHTML` of the `code` tag. - -That's it! Now when the user paste's in new yaml code, it'll be automatically syntax highlighted by PrismJS. This process -can of course, also be used for AJAX content as well. If you make an API request and the API responds with code that needs -to be syntax highlighted. - -> Note: The code in this project isn't particularly clean, it's mostly all in one file. This is just to make the example a bit easier to follow in reality you would likely split this into multiple files. - -## Appendix - -- - [Source Code](https://gitlab.com/hmajid2301/articles/-/blob/master/25.%20AJAX%20with%20PrismJS/source_code) -- [Example Project](https://composerisation.haseebmajid.dev/#yaml) diff --git a/25. AJAX with PrismJS/images/cover.jpg b/25. AJAX with PrismJS/images/cover.jpg deleted file mode 100644 index b35a43e..0000000 Binary files a/25. AJAX with PrismJS/images/cover.jpg and /dev/null differ diff --git a/25. AJAX with PrismJS/images/main.gif b/25. AJAX with PrismJS/images/main.gif deleted file mode 100644 index 6ade9f1..0000000 Binary files a/25. AJAX with PrismJS/images/main.gif and /dev/null differ diff --git a/26. Example React Native Structure/README.md b/26. Example React Native Structure/README.md deleted file mode 100644 index 3efc65c..0000000 --- a/26. Example React Native Structure/README.md +++ /dev/null @@ -1,257 +0,0 @@ ---- -title: "An example React Native Project Structure" -tags: ["javascript", "react-native", "project"] -license: "public-domain" -slug: "react-native-project-structure" -canonical_url: "https://haseebmajid.dev/blog/react-native-project-structure" -date: "2020-05-31" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, I will go over an example project structure you can use for your React Native projects. -This of couse my opinion so feel free to tweak the structure to your needs/preferences. - -- [Link to project](https://gitlab.com/hmajid2301/stegappasaurus/) -- [Link to Docz Website](stegappasaurus.haseebmajid.dev/) - -## Project Structure - -```bash -. -├── android -├── app.json -├── App.tsx -├── babel.config.js -├── .buckconfig -├── CHANGELOG.md -├── CODE_OF_CONDUCT.md -├── CONTRIBUTING.md -├── docs -├── doczrc.js -├── .eslintrc.js -├── gatsby-node.js -├── .gitignore -├── .gitlab -├── .gitlab-ci.yml -├── .history -├── images -├── index.d.ts -├── index.js -├── ios -├── jest.config.js -├── LICENSE -├── metro.config.js -├── __mocks__ -├── node_modules -├── package.json -├── prettier.config.js -├── public -├── react-native.config.js -├── README.md -├── src -├── __tests__ -├── tsconfig.json -├── util -├── .watchmanconfig -└── yarn.lock -``` - -## Configs - -Let's briefly go over the various config files used in this project. - -> **Note:** Not all of this will be relevant for your project. You can use the ones relevant to your project. - -- `app.json`: Used by React Native contains the name of your app. -- `.buckconfig`: Used to speed up builds plus more. -- `babel.config.js`: The config used by Babel, which transpile our code into compliant ES5, so we can use all the newest and greatest features from JavaScript. I think one of the best babel plugins you can use is the babel-module-resolver so we have cleaner imports more info [here](https://dev.to/hmajid2301/better-imports-with-typescript-aliases-babel-and-tspath-40ne). -- `doczrc.js`: The config is used by Docz, which is used to create a website from Markdown files, the config is used to set the theme and the order of the sidebar. -- `.eslintrc.js`: I use eslint as my linter of choice. This is the config used to set up all the various options. Including relevant config to use with Typescript and Prettier. -- `gatsby-node.js`: Docz uses Gatsby "behind the scenes", you only need this file if you intend to use Docz. -- `jest.config.js`: Since this is a React Native project I also use Jest. A test runner created by Facebook. This file is used to set up various bits of config such as allowing me to use the same module import resolution and using it with Typescript (babel-jest). -- `metro.config.js`: Metro is a React Native javascript bundler. -- `package.json`: The file use to manage dependencies and build scripts. -- `prettier.config.js`: The config for the Prettier code formatter. -- `react-native.config.js`: As of React Native 0.60 you use this file to allow you to import custom fonts and assets into your React Native project. -- `tsconfig.json`: Since I am using Typescript this is the required config for Typescript. -- `.watchmanconfig`: Is a file watcher used for hot reloading. -- `yarn.lock`: Not quite config but used by package.json. - -The following config files, `app.json`, `.buckconfig`, `metro.config.js`, `.watchmanconfig`, were unchanged after creating the project. Using the following command: - -```bash -npx react-native init AwesomeTSProject --template react-native-template-typescript -``` - -## Testing - -For testing, I have the following two folders: - -### Mocks - -The `__mocks__` folder. Used to mock out various third party modules and functions. Here is an example: - -```bash -. -├── bugsnag-react-native.js -├── @react-native-community -│ └── cameraroll.js -├── react-native-image-picker.js -├── react-native-navigation-bar-color.js -├── react-native-permissions.js -├── react-native-share-extension.js -├── react-native-share.js -├── react-native-snackbar.js -└── rn-fetch-blob.js -``` - -Where `bugsnag-react-native.js` looks something like the following: - -```js -module.exports = { - Configuration: jest.fn(), - Client: jest.fn(() => ({ notify: jest.fn() })), -}; -``` - -### Tests - -The `__tests__` folder contains all of my tests. The structure matches the structure of the `src` folder. -So it's easier to find tests. Some people prefer to keep their tests in the same folder as their components. They will also -keep their storybook config in the component folder, so everything related to that component exists in that folder. However -I prefer to keep my tests separate to my source code. - -```bash -. -├── set upTests.ts -└── src - ├── actions - │ ├── Snackbar.test.ts - │ └── Steganography - ├── components - │ ├── AboutList.test.tsx - │ ├── AppHeader.test.tsx - │ ├── ImageMessage.test.tsx - │ ├── ImageProgress.test.tsx - │ ├── MainHeader.test.tsx - │ ├── MarkdownModal.test.tsx - │ └── Modal.test.tsx - └── views - ├── Home - └── Settings -``` - -## Documentation - -The following files/folders are used to document the project. - -- `docs`: Contains the markdown files used by the Docz website. -- `public`: Used to contain some static files used by Docz such as favicons. -- `README.md`: The first page the user will see when visiting the repo. -- `CHANGELOG.md`: The changes to the project in the [Keepachangelog](https://keepachangelog.com/en/1.0.0/) format. -- `CODE_OF_CONDUCT.md`: How to "behave within" the project. -- `CONTRIBUTING.md`: How to contribute to the project, helping users getting started with this project. -- `images`: Used to store the original SVG images converted to PNGs. - -## Gitlab / Git - -This project is available on Gitlab, so here are the specific files related to git/Gitlab: - -- `.gitlab`: Contains templates for merge requests and issues. -- `.gitlab-ci.yml`: Is the CI file, which defines what jobs are run on Gitlab CI. -- `.gitignore`: Used by git to determine what files to ignore, when committing changes. Generated from [gitignore.io](https://www.gitignore.io/) - -### .gitlab - -Taking a closer look at the `.gitlab` folder you can see the different templates I have: - -```bash -. -├── issue_templates -│ ├── bug.md -│ ├── feature.md -│ └── question.md -└── merge_request_templates - ├── merge_request.md - └── release.md -``` - -If someone creates a new issue using the `bug` template, they will get the following template to edit when -raising their issue. Making it easier for others to give the relevant information required to resolve the -issue. - -```markdown ---- -name: "🐛 Bug" ---- - -# Bug Report - -## Current Behaviour - - - -# ... -``` - -## Source Code - -Now onto the more interesting part of this project. - -- `android`: All the specific native code for Android. You will only need to edit this if you need to write Android specific code in Java/Kotlin or edit the way your application is built. -- `ios`: Same as above except for IOS. - -### src - -Now most of the code related to this project exists within the `src/` folder. - -```bash -. -├── actions -│ ├── Bugsnag -│ ├── Share -│ ├── Snackbar -│ └── Steganography -├── assets -│ ├── fonts -│ └── images -├── components -├── AboutList -│ ├── AboutList.tsx -│ └── index.ts -│ ├── ImageMessage -│ ├── ImageProgress -│ ├── IntroSlider -│ ├── Loader -│ ├── Logo -│ ├── MarkdownModal -│ ├── Modal -│ └── PhotoAlbumList -├── constants -│ ├── colors.ts -│ ├── fonts.ts -│ ├── themes.ts -│ └── types.ts -├── data -├── providers -└── views - ├── Home - ├── MainApp.tsx - ├── Setting - └── Settings.tsx -``` - -- `actions`: Contains actions such as a snack bar which can be shown. -- `assets`: Static assets such as images and fonts. -- `components`: Components typically will be used by multiple views. Each component has its own folder. -- `constants`: Used to store colours, common types and fonts. -- `data`: (JSON) data used by the components. -- `providers`: React contexts, which will be consumed by other components to store state. -- `views`: The different pages the users will see. Since settings and home have sub-pages those, exist within those folders. - -That's it, that my "basic" structure I've used for a React Native project. - -## Appendix - -- [Example React Native Project](https://gitlab.com/hmajid2301/stegappasaurus/) diff --git a/26. Example React Native Structure/images/cover.jpg b/26. Example React Native Structure/images/cover.jpg deleted file mode 100644 index ccafb4c..0000000 Binary files a/26. Example React Native Structure/images/cover.jpg and /dev/null differ diff --git a/27. Mocking in Flask with Pytest/README.md b/27. Mocking in Flask with Pytest/README.md deleted file mode 100644 index 7c874a3..0000000 --- a/27. Mocking in Flask with Pytest/README.md +++ /dev/null @@ -1,315 +0,0 @@ ---- -title: "Testing & Mocking a Connexion/Flask Application with Pytest" -tags: ["testing", "python", "flask", "pytest"] -license: "public-domain" -slug: "testing-mocking-connexion-flask-app-pytest" -canonical_url: "https://haseebmajid.dev/blog/testing-mocking-connexion-flask-app-pytest" -date: "2020-06-09" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, I will show you how you can test a Python web service that was built using [Connexion](https://github.com/zalando/connexion/) -(a wrapper library around Flask). We will go over how you can mock functions and how you can test -your endpoints. There are two related articles I have written in the past listed below. In the first -one we go over how to create a web service using Connexions, the same web service we will in this article. -In the second article I introduce how you can use `pytest-mock` and `pytest-flask` to test a Flask web -service. - -- [Implementing a Simple REST API using OpenAPI, Flask & Connexions](/blog/rest-api-openapi-flask-connexion/) -- [Testing with pytest-mock and pytest-flask](/blog/testing-with-pytest-mock-and-pytest-flask/) - -The example app we will be writing tests for is a very simple CRUD API managing a pet store. It allows us -to add pets, remove pets, update pets and query pets we have in the store. - -## Structure - -You can find the source code here. Our project structure looks like this: - -```text -. -├── openapi -│ └── specification.yml -├── requirements.txt -├── test_api -│ ├── core -│ │ ├── __init__.py -│ │ ├── pets.json -│ │ └── pets.py -│ ├── __init__.py -│ ├── run.py -│ └── web -│ ├── controllers -│ │ ├── __init__.py -│ │ └── pets_controller.py -│ ├── encoder.py -│ ├── __init__.py -│ ├── models -│ │ ├── base_model_.py -│ │ ├── __init__.py -│ │ ├── pet.py -│ │ └── pets.py -│ └── util.py -└── tests - ├── conftest.py - ├── __init__.py - └── test_pets_controller.py -``` - -## API - -Here is our controller module called `web/controller/pets_controller.py`. This is where connexion routes are requests to: - -```python:title=test_api/web/controllers/pets_controller.py -import connexion -import six - -from ..models.pet import Pet # noqa: E501 -from ..models.pets import Pets # noqa: E501 -from .. import util - -from test_api.core import pets - - -def get_pet(pet_id): # noqa: E501 - """Get a pet in the store - - # noqa: E501 - - :param pet_id: The id of the pet to retrieve - :type pet_id: str - - :rtype: Pet - """ - try: - pet = pets.get_pet(pet_id) - response = Pet(id=pet.id, breed=pet.breed, name=pet.name, price=pet.price), 200 - except KeyError: - response = {}, 404 - - return response -``` - -Connexion uses the open API specification `openapi/specification.yml`, to work out which function to route requests -for the path `/pet/{pet_id}`. It uses the `operationId` alongside the `x-swagger-router-controller` to determine -the function to call in the `pets_controller.py` module. - -```yaml:title=openapi/specification.yml -/pet/{pet_id}: - get: - tags: - - "pet" - summary: "Get a pet in the store" - operationId: "get_pet" - parameters: - - name: "pet_id" - in: "path" - description: "The id of the pet to retrieve" - required: true - type: "string" - responses: - 200: - description: "Successfully retrived pet" - schema: - $ref: "#/definitions/Pet" - 404: - description: "Pet doesn't exist" - x-swagger-router-controller: "test_api.web.controllers.pets_controller" -``` - -## Tests - -Now onto our tests! - -### Libraries - -_pytest-flask_ allows us to specify an app fixture and then send API requests with this app. Usage is similar to the `requests` library when sending HTTP requests to our app. - -_pytest-mock_ is a simple wrapper around the unit test mock library, so anything you can do using `unittest.mock` you can do with `pytest-mock`. The main difference in usage is you can access it using a fixture `mocker`, also the mock ends at the end of the test. Whereas with the normal mock library if you say mock the `open()` function, it will be mocked for the remaining duration of that test module, i.e. it will affect other tests. - -### conftest.py - -The `conftest.py` file is automatically run by pytest and allows our test modules to access fixtures defined -in this file. One of the best features of Pytest is fixtures. Fixture are functions that have re-usable bits of code we -can run in our unit tests, such as static data used by tests. - -```python:title=tests/conftest.py file=./source_code/tests/conftest.py - -``` - -#### app() - -In this file, we have two functions: the `app` allows users to pass the `client` argument to other tests -and then we can test our web application. You can get more information -[here](https://flask.palletsprojects.com/en/1.1.x/testing/#the-testing-skeleton) about how Flask apps can be tested. -Essentially we don't need to start/stop a server before/after our tests. - -By giving it the `scope=session` the fixture will be created once before all of our tests run. Our `run.py` file looks -like this: - -```python:title=test_api/run.py file=./source_code/test_api/run.py - -``` - -The `create_app` function creates our web application and returns a Flask object. Remember the Connexion library is -just a wrapper around Flask. Connexion just reduces the boilerplate code we wrote. Again you can have a read of the -article above to get more details about how it works. - -#### clean_up() - -```python:title=tests/conftest.py -@pytest.fixture(scope="session", autouse=True) -def clean_up(): - yield - default_pets = { - "1": {"name": "ginger", "breed": "bengal", "price": 100}, - "2": {"name": "sam", "breed": "husky", "price": 10}, - "3": {"name": "guido", "breed": "python", "price": 518}, - } - - abs_file_path = os.path.abspath(os.path.dirname(__file__)) - json_path = os.path.join(abs_file_path, "../", "test_api", "core", "pets.json") - with open(json_path, "w") as pet_store: - json.dump(default_pets, pet_store, indent=4) -``` - -The second fixture we define is called `clean_up`, because of the `yield` line, this function will run after all of -our tests have completed. The `yield` command is related to generators, you can read -[more here](https://stackoverflow.com/questions/231767/what-does-the-yield-keyword-do). In our case, it's used in Pytest -fixtures so that we can run some cleanup jobs after our test is completed. In this example, I am simply replacing the -contents of the JSON file which acts as a data store (like a database), to its default values before the test was run. - -> Since pytest-3.0, fixtures using the normal fixture decorator can use a yield statement to provide fixture values and execute teardown code - Pytest Docs - -### test_pets_controller.py - -Now we have gone over the setup required for our tests, let's take a look at how we can test our -code. So our first test looks like: - -```python:title=tests/test_pets_controller.py -def test_get_all_pets(client): - url = "/api/v1/pet" - expected_json = [ - {"id": "1", "name": "ginger", "breed": "bengal", "price": 100}, - {"id": "2", "name": "sam", "breed": "husky", "price": 10}, - {"id": "3", "name": "guido", "breed": "python", "price": 518}, - ] - response = client.get(url) - assert response.json == expected_json -``` - -It's a very simple test, here we use the `app` fixture we defined above. This `client` fixture can be used -because we are using the `pytest-flask` library. As you can see it looks very similar to `requests`, where -we give it a path `/API/v1/pet` and then tell it what kind of request to make `client.get`. -Whilst the syntax between the `requests` library and the `client` fixture is almost identical. One big -difference that always seems to trip me up is, in `requests` to get the JSON data from the `response` object would be -`response.json()` i.e it is a function. However in `client` (`pytest-flask`) fixture do get the JSON data we do -`response.json` which is just an attribute of the object not a function. - -The test itself is very simple, it's making a request to get all pets in the pet store. We then compare that with= -what we expect to be in the pet store `assert response.json == expected_json`. - -The next test we have looks like this: - -```python:title=tests/test_pets_controller.py -@pytest.mark.parametrize( - "pet_data, expected_status, expected_data", - [ - ({"name": "Yolo", "breed": "shorthair", "price": 100}, 201, {"id": 4}), - ({}, 400, {}), - ({"a": "b"}, 400, {}), - - ] -) -def test_add_a_pet(client, pet_data, expected_status, expected_data): - url = "/api/v1/pet" - response = client.post(url, json=pet_data) - assert response.status_code == expected_status - if response.status_code == 200: - assert response.json == expected_data -``` - -This test is attempting to add a new pet to the store. It's similar to the other test we still use -the `client` fixture to make the request. This time we also give it some `json` data hence we provide the `json` -argument `json=pet_data` this automatically sets the headers correctly so the server knows it's receiving -JSON data. - -We also use a decorate called `@pytest.mark.parametrize`. This allows us to run our tests against a list -of data. So we don't have to write the same test x number of times. We just pass the test different -arguments. Pytest will run this test x number of times once for each item in the list. -So, for example, the first time the test runs: - -```python -pet_data = {"name": "Yolo", "breed": "shorthair", "price": 100} -expected_status = 200 -expected_data = {"id": 4} -``` - -The second like this: - -```python -pet_data = {} -expected_status = 200 -expected_data = {} -``` - -And so on and so on. This helps keep our test file smaller and keeps the DRY (do not repeat yourself). -A very nice feature of Pytest and one I use heavily. - -The final test we have in this file looks like: - -```python:title=tests/test_pets_controller.py -def test_add_pet_fail_json(client, mocker): - pet_data = {"name": "Yolo", "breed": "shorthair", "price": 100} - url = "/api/v1/pet" - mock = mocker.patch("connexion.request") - mock.is_json = False - response = client.post(url, json=pet_data) - assert response.status_code == 400 -``` - -At last, we see `pytest-mock` being used via the `mocker` fixture we automatically get access to. -The `mocker` is just a simple wrapper around the `unittest.mock` module. The main difference being -the mock on exists for the duration of that test. Mocking is often used when unit testing and we cannot -rely on external dependencies such as database connections or another web service. - -```python:title=test_api/web/controllers/pets_controller.py -def add_pet(body): # noqa: E501 - # ... - if connexion.request.is_json: - body = Pet.from_dict(connexion.request.get_json()) # noqa: E501 - # ... -``` - -In this example, we want to mock the part of connexion that checks if the data being sent is valid JSON. -We want the `connexion.request.is_json` to return `False`, we can do this like so: - -```python -mock = mocker.patch("connexion.request") -mock.is_json = False -``` - -Since `is_json` is an attribute of the `connexion.request` module and not a function we need to set -it false on another line. If `is_json` was a function that we wanted to return `False` we could've done -`mocker.patch("connexion.request.is_json")` instead. - -You can run the tests locally by running the `pytest` command or if you want to run the code in this article, you can -by doing the following: - -```bash -gcl https://gitlab.com/hmajid2301/articles.git -cd 27.\ Mocking\ in\ Flask\ with\ Pytest/source_code -virtualenv .venv -source .venv/bin/activate -pip install -r requirements.txt -pytest -``` - -That's it, the examples above cover most of the things you'll need to mock and test your connexion -web service. - -> INFO: `pytest-flask` provides a whole bunch of other features that may be useful, you can find the full list [here](https://pytest-flask.readthedocs.io/en/latest/features.html) - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/27.%20Mocking%20in%20Flask%20with%20Pytest/source_code) diff --git a/27. Mocking in Flask with Pytest/images/cover.jpg b/27. Mocking in Flask with Pytest/images/cover.jpg deleted file mode 100644 index 8d585f3..0000000 Binary files a/27. Mocking in Flask with Pytest/images/cover.jpg and /dev/null differ diff --git a/28. DinD Docker Testing/README.md b/28. DinD Docker Testing/README.md deleted file mode 100644 index fca7843..0000000 --- a/28. DinD Docker Testing/README.md +++ /dev/null @@ -1,223 +0,0 @@ ---- -title: "How to use Gitlab CI, Pytest and docker-compose together" -tags: ["docker", "python", "pytest", "docker-compose"] -license: "public-domain" -slug: "gitlab-ci-pytest-and-docker-compose" -canonical_url: "https://haseebmajid.dev/blog/gitlab-ci-pytest-and-docker-compose" -date: "2020-06-22" -published: true -cover_image: "images/cover.jpg" ---- - -On a recent project, I was working on, I wanted to test my web service using docker-compose where I can run and kill -Docker containers used by the application and see how my web application reacts to that. In this article, we will -go over how you start docker containers using docker-compose from within Gitlab CI. - -![main](images/main.png) - -The diagram above is a visualisation of what we are trying to achieve. We want to spawn Docker containers using docker-compose -from within our job. The spawning and destruction of these Docker containers will be done via our Python code. We can achieve -this by using dind (Docker in Docker). I have written a previous article on this topic which you can read more about -[here](/blog/dind-and-gitlab-ci/blog/dind-and-gitlab-ci/). This article assumes you already somewhat familiar -with Docker, docker-compose and Pytest. - -This compose file will be used to start our Docker containers. - -```yaml:title=docker-compose.yml -version: "3" - -services: - service1: - container_name: container1 - image: docker - command: ["tail", "-f", "/dev/null"] - - service2: - container_name: container2 - image: docker - command: ["tail", "-f", "/dev/null"] -``` - -## Gitlab CI - -Next, we have our `.gitlab-ci.yml` file, this file is used to tell Gitlab CI what our CI jobs should do. -In this example, we have one job called `test:integration` which will run our integration tests. But before we do that -we need a way to access the Docker daemon from within Gitlab CI, this can be done by using the `docker:dind` service. - -The docker:dind image automatically using its entry point starts a docker daemon. We need to use this daemon to -start/stop our Docker images within CI. The docker:dind (dind = Docker in Docker) image is almost identical to -the docker image. The difference being the dind image starts a Docker daemon. In this example, the job will -use the docker image as the client and connect to the daemon running in this container. - -```yaml:title=.gitlab-ci.yml file=./source_code/.gitlab-ci.yml - -``` - -The job itself is very simple, it uses a container which already comes with `docker` and `docker-compose`. Next we -install the dependencies we need for our tests. Then it runs our tests. - -## Tests - -Now onto our actual tests file. It looks more complicated than it is: - -```python:title=tests/test_integration.py -import docker as docker_py -import pytest - -docker_client = docker_py.from_env() -docker_compose = None - - -@pytest.fixture(scope="session", autouse=True) -def docker(docker_services): - global docker_compose - docker_compose = docker_services - - -@pytest.fixture(scope="session", autouse=True) -def setup(): - docker_compose.start() - yield - docker_compose.shutdown() - - -def kill_container(container_name): - container = get_container(container_name) - container.kill() - container.remove() - - -def get_container(container_name): - containers = docker_client.containers.list() - for container in containers: - if container.name == container_name: - return container - - -def start_container(service_name): - docker_compose.start(service_name) - - -def test_two_containers(): - containers = docker_client.containers.list() - assert len(containers) == 2 - - -def test_kill_container1(): - kill_container("container1") - containers = docker_client.containers.list() - container1 = get_container("container1") - assert len(containers) == 1 - assert not container1 - -def test_start_container1(): - start_container("service1") - containers = docker_client.containers.list() - container1 = get_container("container1") - assert len(containers) == 2 - assert container11 -``` - -The first part is the setup, we will use the python Docker library,`docker`. Which allow us to use Python code to -control our Docker daemon. The first `@pytest-fixture` called `docker` allows us to use the -`lovely-pytest-docker` library. To give us a `docker_compose` object which will allow us to again use Python code to -control our `docker-compose.yml` file (start-up/stop containers). The library also has some very nice features such -as waiting for containers or executing commands within containers. You can find the available functions -[here](https://github.com/lovelysystems/lovely-pytest-docker/blob/master/src/lovely/pytest/docker/compose.py). -Now we can access the `docker_compose` object by using our `docker_compose` global variable. - -The reason we have both a library for Docker and docker-compose is because at the moment there is no way to use -`lovely-pytest-docker` (as far as I'm aware) to stop a single container. So we need to use the standard `docker` -library to do that. We also use the standard `docker` library to find out if a container is running. - -Next, we have the `setup()` fixture which we auto use, this means the fixture is run before our test, normally a fixture -would only be called once it has been referred to within another function. In this function, we start both of our containers -in our `docker-compose` file. This is the same as running `docker-compose up --build -d`. Next we `yield`, how exactly the -`yield` command works I won't go over in this article, all you have to know is that everything after the yield will only -be run after all of our tests. In this case we teardown our containers (stop them). This is the same as running `docker-compose down`. - -```python:title=tests/test_integration.py -import docker as docker_py -import pytest - -docker_client = docker_py.from_env() -docker_compose = None - - -@pytest.fixture(scope="session", autouse=True) -def docker(docker_services): - global docker_compose - docker_compose = docker_services - - -@pytest.fixture(scope="session", autouse=True) -def setup(): - docker_compose.start() - yield - docker_compose.shutdown() -``` - -The next part of our file contains some helper functions I've written. These functions can be used by multiple tests. -This helps our tests file stay more DRY (do not repeat yourself). You may well want to make these part of a -(helper) class that you expose as a fixture. If you wanted to structure these properly so they can be accessed by -more than one file. Also whilst we are on this topic, we may want to move our fixture to `conftest.py`, again to allow -other files to use the same fixture we have defined here. But to keep this example simpler we will leave it here. - -```python:title=tests/test_integration.py -def kill_container(container_name): - container = get_container(container_name) - container.kill() - container.remove() - - -def get_container(container_name): - containers = docker_client.containers.list() - for container in containers: - if container.name == container_name: - return container - - -def start_container(service_name): - docker_compose.start(service_name) -``` - -Finally, onto our actual tests, in reality, these tests are very boring and not super useful but they should give you an -idea what you can do. Such as killing the database container and then check how your Python application responds. Then -you can start the database container and again check how your Python application responds. -You can read more about how you can test Python Flask applications with pytest -[here](https://medium.com/@hmajid2301/testing-mocking-a-connexion-flask-application-with-pytest-bacfd07099eb). - -So what do our tests do? Well, the first one checks the number of containers running is equal to 2. -The next one kills `container1` and checks that only one container is running and it's not `container1`. -Our final test starts `container1` and checks that it is running and the number of containers running -is back to 2. After this final test has completed then the `setup` fixture will run its `docker_compose.shutdown()` -command. - -```python:title=tests/test_integration.py -def test_two_containers(): - containers = docker_client.containers.list() - assert len(containers) == 2 - - -def test_kill_container1(): - kill_container("container1") - containers = docker_client.containers.list() - container1 = get_container("container1") - assert len(containers) == 1 - assert not container1 - -def test_start_container1(): - start_container("service") - containers = docker_client.containers.list() - container1 = get_container("container1") - assert len(containers) == 2 - assert container1 -``` - -That's it we've managed to start/stop Docker containers from within Gitlab CI, using DinD. In a future article I will -explain how you could run your tests within a Docker container you've started. Say you had three containers `nginx`, `flask` -and `postgres` and you wanted to run your tests within the `flask` container. But for now that's it, thanks for reading! - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/28.%20DinD%20Docker%20Testing/source_code) diff --git a/28. DinD Docker Testing/images/cover.jpg b/28. DinD Docker Testing/images/cover.jpg deleted file mode 100644 index 17d4dbf..0000000 Binary files a/28. DinD Docker Testing/images/cover.jpg and /dev/null differ diff --git a/28. DinD Docker Testing/images/main.png b/28. DinD Docker Testing/images/main.png deleted file mode 100644 index 752cd32..0000000 Binary files a/28. DinD Docker Testing/images/main.png and /dev/null differ diff --git a/28. DinD Docker Testing/images/main.xml b/28. DinD Docker Testing/images/main.xml deleted file mode 100644 index 80d37ee..0000000 --- a/28. DinD Docker Testing/images/main.xml +++ /dev/null @@ -1,2 +0,0 @@ - -7VlLc5s8FP01XsbDwzy8dOykXbSdzGTxtd10BLpgTTDyCCV2+uu/KxAYkNK0U5xkOskiSAeBpHPuS2bmr3fHD4Lst585hWLmOfQ48zczz/Oc0MOLQh4bxHWiZYPkglGNnYBb9hPagRq9ZxSqwUDJeSHZfgimvCwhlQOMCMEPw2EZL4az7kkOBnCbksJE/2NUbhs09qIT/hFYvm1ndkO9vx1pB+udVFtC+aEH+Vczfy04l01rd1xDodhreWmeu37ibrcwAaX8nQdW7Eu2urp1Pv3Mv39l/keSFcmFfssDKe71hvVi5WPLwGHLJNzuSar6B5R55l9u5a7AnotNUu0b3jN2BJzqMmNFseYFF/XjfpZlXpoiXknB76B3h4ZJGIR4R68BhITjk5tzO8rQ2IDvQIpHHNJamma9NbNQk344aRZFGtv29OpAou0k7159ohIbms0/YNZ/nlm0iL1qsl1thH1WFRkMrXBVsLxETPJ9D/1EEihueMUk4+puwqXkOxxQqBuXJL3LBb8v6VAG/MMh9WSrVjTHpqBez2YrpfKylSLCu05p6c8Z+lnGSgpinuKM3jUlkuBF4RVeC57z6oKU9CIR+B+blCeANwJX8ekuFj8+MInLvHC9eL4v8wm0952h9pGpPU5mat+Bk2u/eAGvgvAJr4qWieNM41WLIbO+xasWnsWrOnByZgODWQkVzrpipYRckNohxlwjAXJI75C1kpcwolhDRLtfioSBsPjljlGqprm06Vn7oFJvM5EeXVbUgkQWQVzHIsjZ9Ajfo5wZ5ZbRjw1P70BMGOSCkSsGrx7kIkP6vI7sSGXK/l0f9MPFQIml+9o+GBtCrHkpCStBuIYOQz6eyUKjpEMJxJk16YRpDEk2DcEjSw+CwOTXVsrFwZn4bQW2EWyGu7dPsBe/OYa959PI3/D6MtVSED1fLnUZfFAu+ecKDa55CqlAPDBF20odyzHJ/buhOvSCYblkOxm8aKh2f+Nk8F4vTVIvjYKc//qnQtc8vLyLfxbxw1GCswRi3+L25wvD78ekF1LeW7416c1jEq03Pa/wMm2VE6dgr3KSOFgEUyVVd5hUg6UtsFooDs9GsXkAMniFkq7U1wBVnBSkqljDExHShHsMw5HJr732NyXMPHSXur85aqXqzmOvcwOC4fZUFdQrZ4AaXxtGvOOq+b1I4Rf7Xdj16fEfWOhvMQEFkexhuAybJnqGG85wgae8Gg7lD8c/oDfL10+dlDVeNLajcFx0oTg5SONFtYl027ZZDXZPX1Wa4aePU/7V/w== \ No newline at end of file diff --git a/29. Storybooks, Gatsby and MDX/README.md b/29. Storybooks, Gatsby and MDX/README.md deleted file mode 100644 index 3461157..0000000 --- a/29. Storybooks, Gatsby and MDX/README.md +++ /dev/null @@ -1,280 +0,0 @@ ---- -title: "How to use Storybooks, Gatsby, Babel, Tailwind, Typescript together" -tags: ["gatsby", "documentation", "mdx", "storybook"] -license: "public-domain" -slug: "storybook-gatsby-babel-tailwind-typescript" -canonical_url: "https://haseebmajid.dev/blog/storybook-gatsby-babel-tailwind-typescript" -date: "2020-06-29" -published: true -cover_image: "images/cover.jpg" ---- - -Recently I started to re-design my website, I decided to use this as an opportunity to learn some new technologies -such as Gatsby, Tailwind. I also decided to try using Storybook. For this said project I used MDX to create my -Storybook stories. In this article, I will show you how you can create Storybooks stories, for a Gatsby project -with TailwindCSS, Typescript using MDX. - -You can find an example project using this [here](https://gitlab.com/hmajid2301/personal-site/-/tree/e415420744b2a8f49eddaf2d3058b23c70f46638/.storybook). -You can also find a [demo site](https://storybook.haseebmajid.dev/) for said project. - -> This article assumes you already familiar with Typescript, TailwindCSS and Gatsby. - -## Storybook - -> Storybook is an open source tool for developing UI components in isolation for React, Vue, and Angular. It makes building stunning UIs organized and efficient. - Storybook Website - -Storybook allows us to create and test (visually) components in isolation. It can be a great way to both document all -of your components but also speed up development as all you need to focus on is one component at a time. Storybook -also has a ton of extra plugins/addons which can help to customise storybooks to your liking. One such example being -checking for any accessibility issues your components may have. - -### MDX - -MDX is a combination of markdown mixed with JSX. It allows us to "execute" and "render" JSX code from within an MDX -document. When used with Storybook it means we get all of the flexibility of markdown. So we can use normal markdown -syntax, to document our component. We also get access to MDX-flavored Component Story Format (CSF) which includes a collection -of components called "Doc Blocks", that allow Storybook to translate MDX files into storybook stories. - -## Setup - -OK let's go over what we need to do, first let's create our gatsby site by using the `gatsby-cli` tool. - -```bash -gatsby new gatsby-site -cd gatsby-site -``` - -### TailwindCSS - -Now let's see how we add tailwindcss to this site: - -```bash -yarn add gatsby-plugin-typescript gatsby-plugin-postcss tailwindcss twin.macro postcss-preset-env -vim gatsby-config.js -vim postcss.config.js -vim tailwind.config.js -mkdir -p src/styles/ -vim src/styles/globals.css -vim gatsby-browser.js -``` - -We need to update the `gatsby-config.js` file to add support for both typescript and PostCSS. Tailwind is written in PostCSS -so we need to include that in our gatsby file. You can either replace the default `gatsby-config.js` or update the plugins. - -```js:title=gatsby-config.js file=./source_code/gatsby-config.js - -``` - -Next we add a `postcss.config.js` file as per the Tailwind instructions found -[here](https://tailwindcss.com/docs/installation#webpack-encore). - -```js:title=postcss.config.js file=./source_code/postcss.config.js - -``` - -Finally, we create a `tailwind.config.js` file. Here we can add new colours, overwrite existing colours and extend the -configuration such as adding news fonts (`Inter`). This file will get merged with the default config by Tailwind. - -```js:title=tailwind.config.js file=./source_code/tailwind.config.js - -``` - -Next, to add the Tailwind styles or our app we need to create a CSS file, you can call this file whatever you want, -you just need to make sure it gets imported in such a place it can be used by any of your components. - -```css:title=src/styles/globals.css file=./source_code/src/styles/globals.css - -``` - -One place we can import this is in the `gatsby-brower.js` file. It should be empty, add the import shown below. -We will add babel later on in the app, which will allow us to use imports in the style we've just described. -In this example, we will use the `~` to mean `src`. - -```js:title=gatsby-browser.js file=./source_code/gatsby-browser.js - -``` - -### Typescript - -Now let's add typescript to our project: - -```bash -yarn add --dev react-docgen-typescript react-docgen-typescript-loader ts-loader typescript -vim tsconfig.json -``` - -We will add some extra libraries that will be used by Storybooks to parse our Typescript components. -Like all Typescript projects, we need to include a `tsconfig.json` file. Note we add the `"paths"` so we can -have cleaner imports, this will be used alongside Babel. - -```json:title=tsconfig.json file=./source_code/tsconfig.json -{ - "compileOnSave": false, - "compilerOptions": { - "target": "es5", - "module": "es6", - "types": ["node"], - "moduleResolution": "node", - "esModuleInterop": true, - "lib": ["dom", "es2015", "es2017"], - "jsx": "react", - "sourceMap": true, - "strict": true, - "resolveJsonModule": true, - "noUnusedLocals": true, - "noImplicitAny": true, - "noUnusedParameters": true, - "noFallthroughCasesInSwitch": true, - "allowSyntheticDefaultImports": true, - "downlevelIteration": true, - "baseUrl": "./", - "paths": { - "~/*": ["src/*"] - } - }, - "include": ["./src/**/*"], - "exclude": ["node_modules", "plugins"] -} -``` - -### Babel - -```bash -yarn add --dev babel-plugin-module-resolver babel-preset-gatsby babel-preset-react-app @babel/compat-data \ -@babel/core @babel/preset-env babel-loader - -vim .babelrc -``` - -`Gatsby` automatically uses Babel, however, to customise babel we need to create our own `.babelrc` file. You can read -more about it [here](https://www.gatsbyjs.org/docs/babel/). The main reason we want to use it is to allow use to have cleaner -imports. So we can use `~` instead of `src` in imports. So we can do `import "~/styles/globals.css";` instead of -`import "../../../styles/globals.css"'`. - -> [You can read more about it here, I wrote a previous article on this topic.](/blog/better-imports-with-babel-tspath/) - -```json:title=.babelrc file=./source_code/.babelrc -{ - "env": {}, - "plugins": [ - [ - "module-resolver", - { - "root": ["./src"], - "alias": { - "~": "./src" - } - } - ] - ], - "presets": [ - [ - "babel-preset-gatsby", - { - "targets": { - "browsers": [">0.25%", "not dead"] - } - } - ] - ] -} -``` - -### Storybook - -We will use the latest versions of Storybook (v6) so we can access the latest features. We will go over how we can use these features in the next article. - -First remove any lines in your `package.json` that start with `@storybook`. In my case, -I removed `@storybook/addon-actions`, `@storybook/add-links`, `@storybook/addons` and -`@storybook/react`. - -```bash -yarn add --dev @storybook/addon-docs@6.0.0-beta.20 @storybook/addon-essentials@6.0.0-beta.20 \ -@storybook/addon-storysource@6.0.0-beta.20 @storybook/preset-typescript@1.2.0 \ -@storybook/react@6.0.0-beta.20 core-js@2.6.5 - -npx -p @storybook/cli sb init -f -vim .storybook/main.js -vim .storybook/preview.js -vim preview-head.html -vim webpack.config.js -``` - -Next, we will update the `main.js` file. This will tell Storybook where to look for the stories, in this case in the `src` folder -any file called `x.stories.mdx` or `x.stories.tsx`. - -```js:title=.storybook/main.js file=./source_code/.storybook/main.js - -``` - -Next, lets update the preview file. Here is typically you can define global parameters and decorators. Again -will see more of this in the next article. - -```js:title=.storybook/preview.js file=./source_code/.storybook/preview.js - -``` - -If we want to use any custom fonts, such as google fonts or other styles within our Tailwind, we need to -define them here. - -```html:title=.storybook/preview-head.html file=./source_code/.storybook/preview-head.html - -``` - -Storybook uses webpack, so if we want to add extra webpack options, we do that here. This allows us to use -things like Babel and PostCSS loader. - -```js:title=.storybook/webpack.config.js file=./source_code/.storybook/webpack.config.js - -``` - -### Component - -Finally, let's create a component that we will create a story for. First, create a new folder at `src/components/Logo`. -In that folder let's create the following files: - -> Note the comments in the Props will be the comments shown in our story later, if you use the correct addons for Storybook. We will go over this in the next article. - -```tsx:title=src/components/Logo/Logo.tsx file=./source_code/src/components/Logo/Logo.tsx - -``` - -This index file makes it easier to import the component from other files. As we don't have to do -`import {Logo} from "src/components/Logo/Logo.ts` we can use `import {Logo} from "src/components/Logo`. - -```tsx:title=src/components/Logo/index.ts file=./source_code/src/components/Logo/index.ts - -``` - -#### Storybook - -Now we have set everything up but do we create a story for our component. First, create a new file at `src/components/Logo/Logo.stories.mdx`. -You could keep this in another folder like storybooks/ or keep it in the same folder as your component, it's all personal preference. -Some people will also have all unit tests in the same folder `src/components/Logo/`. - -```md:title=src/components/Logo/Logo.stories.mdx file=./source_code/src/components/Logo/Logo.stories.mdx - -``` - -Add the following to your `package.json` to the "scripts" section. We need to pass it the `NODE_ENV=test` -environment variable, else the Gatsby Babel plugin will complain. - -```json:title=package.json -"storybook": "NODE_ENV=test start-storybook -p 6006", -"build-storybook": "NODE_ENV=test build-storybook" -``` - -Now we can run our Storybook by running the following command: - -```bash -yarn storybook -``` - -That's it! We managed to get Storybook to work with Gatsby. Where Gatsby is using Tailwind, Babel and Typescript. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/medium/tree/master/29.%20Storybooks,%20Gatsby%20and%20MDX/source_code) -- [Example Project](https://gitlab.com/hmajid2301/personal-site/-/tree/e415420744b2a8f49eddaf2d3058b23c70f46638/.storybook) -- [Example Storybook](https://storybook.haseebmajid.dev/) -- Cover image from, [World Vector Logo](https://worldvectorlogo.com/downloaded/storybook-1) diff --git a/29. Storybooks, Gatsby and MDX/images/cover.jpg b/29. Storybooks, Gatsby and MDX/images/cover.jpg deleted file mode 100644 index f6e55f5..0000000 Binary files a/29. Storybooks, Gatsby and MDX/images/cover.jpg and /dev/null differ diff --git a/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/README.md b/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/README.md deleted file mode 100644 index dab732b..0000000 --- a/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/README.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "Inheritance in SQLAlchemy (with Flask)" -tags: ["python", "orm", "sqlalchemy", "database"] -license: "public-domain" -slug: "inheritance-in-sqlalchemy" -canonical_url: "https://haseebmajid.dev/blog/inheritance-in-sqlalchemy/" -date: "2018-10-18" -published: true -cover_image: "images/cover.jpg" ---- - -SQLAlchemy is an Object-relational mapping (ORM) made for the Python programming language. ORMs in theory allow -programmers to abstract away SQL. In simple terms they allow us to interact with a database using purely Python -(objects/functions). I will be using the flask-SQLAlchemy extension for my examples. - -Each table is referred to as a model, each model is simply just a python class and each attribute of that class -becomes a column in an SQL table. The database is made up of multiple models. Just like with normal Python models -can inherit from other models and share attributes with the parent model. This is very useful if you going to -have models that will store similar types of data. - -```python:title=example/models.py file=./source_code/example/models.py - -``` - -Taking a look at the _models.py_ module, we define an abstract class called Pets. Which means SQLAlchemy will not create -a table for that model. Our next two models Cats and Dogs inherit all the attributes form Pets. So Cats and Dog tables -will each have a column called name, price and breed. The main advantage of this is if you ever need to change the -models you just have to change it in once place. The more models that inherit from the base model. - -```python:title=example/__init__.py file=./source_code/example/__init__.py - -``` - -Above is an example `__init__.py` file to initialise the database and create all the database tables from the -models. That's it folks, thanks for reading. - -**Please** note there are other ways to implement inheritance with SQLAlchemy, I personally found this way to be the -cleanest in terms of code readability. - ---- - -## Appendix - -- [Example source code]() -- [SQLAlchemy](https://www.sqlalchemy.org/) -- [flask-sqlalchemy](http://flask-sqlalchemy.pocoo.org/2.3/) diff --git a/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/images/cover.jpg b/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/images/cover.jpg deleted file mode 100644 index a449956..0000000 Binary files a/3. Implementing Model Class Inheritance in SQLAlchemy (with Flask)/images/cover.jpg and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/README.md b/30. Storybooks, Gatsby and MDX II/README.md deleted file mode 100644 index 333c504..0000000 --- a/30. Storybooks, Gatsby and MDX II/README.md +++ /dev/null @@ -1,306 +0,0 @@ ---- -title: "How to use Storybooks with MDX" -tags: ["gatsby", "documentation", "mdx", "storybook"] -license: "public-domain" -slug: "storybooks-with-mdx" -canonical_url: "https://haseebmajid.dev/blog/storybooks-with-mdx" -date: "2020-07-20" -published: true -cover_image: "images/controls.png" ---- - -This article (sort of) continues on from my previous article -[How to use Storybooks, Gatsby, Babel, Tailwind, Typescript together](/blog/storybook-gatsby-babel-tailwind-typescript/). -In this article, we will document our React components using Storybook with MDX. - -You can find an example project using this [here](https://gitlab.com/hmajid2301/personal-site/-/tree/e415420744b2a8f49eddaf2d3058b23c70f46638/.storybook), -you can also find a [demo site](https://storybook.haseebmajid.dev/) for said project. - -## Prerequisite - -Just to make sure everyone's on the same page let's follow the same steps to setup Storybook as we had in the last article. -We will use the latest versions of Storybook (v6) so we can access the latest features. We will go over how we can use -these features in the next article. - -First, remove any lines in your `package.json` that start with `@storybook`. In my case, -I removed `@storybook/addon-actions`, `@storybook/add-links`, `@storybook/addons` and -`@storybook/react`. The typescript docgen modules will be used to parse our components and retrieve the props information -for Storybook. We will see a bit later with the control addon. - -```bash -yarn add --dev @storybook/addon-docs@6.0.0-beta.20 @storybook/addon-essentials@6.0.0-beta.20 \ -@storybook/addon-storysource@6.0.0-beta.20 @storybook/preset-typescript@1.2.0 \ -@storybook/react@6.0.0-beta.20 core-js@2.6.5 react-docgen-typescript@1.16.5 \ -react-docgen-typescript-loader@3.6.0 - -npx -p @storybook/cli sb init -f -vim .storybook/main.js -vim .storybook/preview.js -vim preview-head.html -vim webpack.config.js -``` - -Next, we will update the `main.js` file. This will tell Storybook where to look for the stories, in this case in the `src` folder -any file called `x.stories.mdx` or `x.stories.tsx`. - -```js:title=.storybook/main.js file=./source_code/.storybook/main.js - -``` - -Next, let's update the preview file. Here you can define global parameters and decorators. Again -will see more of this in the next article. - -```js:title=.storybook/preview.js file=./source_code/.storybook/preview.js - -``` - -If we want to use any custom fonts, such as google fonts or other styles within our Tailwind, we need to -define them here. - -```html:title=.storybook/preview-head.html file=./source_code/.storybook/preview-head.html - -``` - -Storybook uses webpack, so if we want to add extra webpack options, we do that here. This allows us to use -things like Babel and PostCSS loader. - -```js:title=.storybook/webpack.config.js file=./source_code/.storybook/webpack.config.js - -``` - -## Add-ons - -Let's install the extra add-ons we will use with storybooks: - -- [a11y](https://github.com/storybookjs/storybook/tree/next/addons/a11y): Will list any accessibility issues with your component -- [controls](https://github.com/storybookjs/storybook/tree/next/addons/controls): Is the new version of the addon-knobs and will let you control the props you pass in -- [viewport](https://github.com/storybookjs/storybook/tree/next/addons/viewport): Allows you to test how your component looks, with different viewports like iPhone, 13" etc - -```bash -yarn add --dev @storybook/addon-a11y@6.0.0-beta.20 @storybook/addon-controls@6.0.0-beta.15 \ - @storybook/addon-viewport@6.0.0-beta.20 -``` - -Let's update the `.storybook/main.js` so that Storybook uses the new addons we've just installed. - -```js:title=.storybook/main.js -module.exports = { - stories: ["../src/**/*.stories.@(tsx|mdx)"], - addons: [ - "@storybook/addon-a11y", - "@storybook/addon-controls", - "@storybook/addon-essentials", - "@storybook/preset-typescript", - "@storybook/addon-viewport", - ], -}; -``` - -Next, let's update `.storybook/preview.js` so it looks like: - -```js:title=.storybook/preview.js file=./source_code/.storybook/preview.js - -``` - -The following snippet allows Storybook to show the default list of viewports. You can customise this list by following -[this README here](https://github.com/storybookjs/storybook/tree/next/addons/viewport). - -```js:title=.storybook/preview.js -viewport: { - viewports: INITIAL_VIEWPORTS, - defaultViewport: "responsive", -} -``` - -This part of the `addParameters` sorts all of our storybook components in descending capital order (in the left panel). - -```js:title=.storybook/preview.js -options: { - panelPosition: "right", - storySort: (a, b) => - a[1].kind === b[1].kind - ? 0 - : a[1].id.localeCompare(b[1].id, undefined, { numeric: true }), -}, -``` - -The final part will add a panel which will list accessibility (a11y) issues with our components, such as missing an -`alt` in an `` tag. - -```js -addDecorator(withA11y); -``` - -## MDX - -MDX is markdown mixed with JSX, it lets us render "React" code within markdown files, whilst providing all the features -of markdown as well as headers and hyperlinks. So I feel it is a perfect way to document your Storybooks. - -### Introduction MDX - -This file is an example of a story which will not render any of our components. It's simply there for documentation. This -file will list the colour palette of the app. It will get the colours from our `tailwind.config.js` (mixed with the default one). -So if you changed the values in the tailwind config file, it'll also change them in this story. - -```md:title=src/introduction.stories.mdx file=./source_code/src/introduction.stories.mdx - -``` - -The first part contains our imports so we can render the colours correctly. It also includes a meta tag where we list -the title of the page, each of our stories should have one of these meta tags. - -```md:title=src/introduction.stories.mdx -import { -Meta, -ColorPalette, -ColorItem, -Typeset, -} from "@storybook/addon-docs/blocks"; - -import { Themes } from "~/styles"; -import { theme } from "~/utils/tailwindConfig"; - - -``` - -#### Utils - -We use this simple script to resolve the values of the tailwind config so we can use them like so -`theme.colors.orange[500]`. We pass the script our current config, it then combines it with the default config and -generates a final tailwind config. From this, we get the theme part as this is all we need to retrieve our colours. -So with a class name of say `text-blue-500` to get the colour, we would do `theme.colors.blue[500]`. - -```js:title=src/utils/tailwindConfig.js file=./source_code/src/utils/tailwindConfig.js - -``` - -## Component - -Now let's take a look at how to create a Storybook story for one of our components. - -### Logo - -Say we have a component that looks like: - -```tsx:title=src/components/Logo/Logo.tsx file=./source_code/src/components/Logo/Logo.tsx - -``` - -Since we are using Typescript, we define our props as an interface. The comments above each item in the interface -will be parsed by docgen and shown in our story (we will see this a bit later). It will let the user know what -the prop is so they know how to adjust it. We will also see in a little bit. - -```tsx:title=src/components/Logo/Logo.tsx - -``` - -The rest of the file is a normal React component, nothing special here. So how do we document this with Storybooks UI? - -### Logo.stories.mdx - -So now onto the real meat and potatoes of this article, let's document a React component. So I think one of the coolest -parts of the new Storybook is the controls add-on we installed earlier. So how we do this is to provide the props as -`args` object. Then we pass this onto the component give a name called `Basic` below. - -```md:title=src/components/Logo/Logo.stories.mdx file=./source_code/src/components/Logo/Logo.stories.mdx - -``` - -In the diagram below is the `docs` tab in Storybook (default is the `canvas` tab). As you can see in the diagrams -below we have a table which is taken from the props in our Logo component. We also have the comments shown as -descriptions, any default values we assigned and the current value. Which we set above in the stories file -`Logo.stories.mdx`. We can then edit the values within the controls table, it will automatically re-render our -component on the fly. You can see this in the second image. - -![Storybook Controls](images/controls.png) - -![Storybook Controls2](images/controls2.png) - -Then we can define our regular stories components like so: - -```md:title=src/components/Logo/Logo.stories.mdx -## Accent - -You can adjust the accent (tags) color by passing the `accent` prop. - - - -
- - -
-
-
- -## Colour - -You can change the default colour of the logo by passing the `color` prop. - - - -
- - -
-
-
-``` - -The example above will be rendered into something shown in the image below. You can see from this example we are now -mixing our React component with our normal markdown syntax. This provides a very flexible way to document our components. -It allows others to interact with and look at an example of its use. - -![Storybook Component](images/components.png) - -That's about it documentation wise! Taking a look at the other features we added you can see the a11y issues in a panel -(either on the bottom or on the right of when you are in the `canvas` tab). You can see this in the diagram below. - -![A11y Panel](images/a11y.png) - -Then at the top of the screen, we just adjust the viewport and see how our component would look at different resolutions. -The default list we have includes resolutions for devices like various iPhones, Google Pixel and Galaxy Sx. You can -see this in the diagram below. - -![Viewport](images/viewport.png) - -## Run - -We can run the Storybook UI from our source code like so: - -```bash -git clone https://gitlab.com/hmajid2301/articles.git -cd articles/30.\ Storybooks,\ Gatsby\ and\ MDX\ II/source_code/ -yarn -yarn storybook -``` - -With all of this setup now we can focus on a component first approach I like to use atomic design alongside Storybooks UI. -To create all of the components of my website before I start work on the pages themselves. This allows me to work out exactly -what each page will need, break them down into its core parts and work on them one at a time. Anyways that's it, thanks -for reading! - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/medium/tree/master/30.%20Storybooks,%20Gatsby%20and%20MDX%20II/source_code) -- [Example Project](https://gitlab.com/hmajid2301/personal-site/-/tree/e415420744b2a8f49eddaf2d3058b23c70f46638/.storybook) -- [Example Storybook](https://storybook.haseebmajid.dev/) diff --git a/30. Storybooks, Gatsby and MDX II/images/a11y.png b/30. Storybooks, Gatsby and MDX II/images/a11y.png deleted file mode 100644 index 9857c71..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/a11y.png and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/images/components.png b/30. Storybooks, Gatsby and MDX II/images/components.png deleted file mode 100644 index ec0a793..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/components.png and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/images/controls.png b/30. Storybooks, Gatsby and MDX II/images/controls.png deleted file mode 100644 index 2ee0b63..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/controls.png and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/images/controls2.png b/30. Storybooks, Gatsby and MDX II/images/controls2.png deleted file mode 100644 index 6688390..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/controls2.png and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/images/cover.jpg b/30. Storybooks, Gatsby and MDX II/images/cover.jpg deleted file mode 100644 index 4817b12..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/cover.jpg and /dev/null differ diff --git a/30. Storybooks, Gatsby and MDX II/images/viewport.png b/30. Storybooks, Gatsby and MDX II/images/viewport.png deleted file mode 100644 index 8aca56a..0000000 Binary files a/30. Storybooks, Gatsby and MDX II/images/viewport.png and /dev/null differ diff --git a/31. TailwindCSS and Variables/README.md b/31. TailwindCSS and Variables/README.md deleted file mode 100644 index d599890..0000000 --- a/31. TailwindCSS and Variables/README.md +++ /dev/null @@ -1,162 +0,0 @@ ---- -title: "TailwindCSS with CSS variables" -tags: ["gatsby", "documentation", "tailwindcss", "css"] -license: "public-domain" -slug: "tailwindcss-with-css-variables" -canonical_url: "https://haseebmajid.dev/blog/tailwindcss-with-css-variables" -date: "2020-08-05" -published: true -cover_image: "images/cover.jpg" ---- - -TailwindCSS allows us to use pre-defined classes instead of defining our CSS styles. In this article, we will go over -how we can use Custom properties (sometimes referred to as CSS variables or cascading variables) with TailwindCSS. - -## Setup - -First, follow the installation guide found [here](https://tailwindcss.com/docs/installation/#2-add-tailwind-to-your-css). -This will show you how you can add TailwindCSS to your current project. For part 2 I will assume you called your CSS -file `global.css`. This is the file that contains `@tailwind base;` etc. - -## Global CSS - -First, we need to edit our TailwindCSS file so it looks something like this: - -```css:title=global.css -@tailwind base; -@tailwind components; -@tailwind utilities; - -.root, -#root, -#docs-root { - --primary: #367ee9; - --secondary: #a0aec0; - --accent: #718096; - --background: #fff; - --main: #0d0106; - --header: #2d3748; -} -``` - -I wrap my entire body in an element with class `root` or id `root`, so that any of my elements can access it later. - -### Gatsby - -If you're using Gatsby, you can add the following to your `gatsby-browser.js` file: - -```js:title=gatsby-browser.js -export const wrapRootElement = ({ element }) => ( -
{element}
-); -``` - -This will wrap all of our pages in the class `root` and `overflow-hidden` CSS class from TailwindCSS. - -## tailwind.config.js - -Now we've defined some CSS variables how can we use them with Tailwindcss? Simple, we update our tailwind config file -with some of the new CSS variables. Here we simply want to extend the config to add new colour values. - -```js:title=tailwind.config.js -module.exports = { - theme: { - extend: { - colors: { - primary: "var(--primary)", - secondary: "var(--secondary)", - main: "var(--main)", - background: "var(--background)", - header: "var(--header)", - accent: "var(--accent)", - }, - }, - }, -}; -``` - -The syntax is very similar to how we would use the variables normally with CSS where it would normally look like: - -```css -element { - background-color: var(--primary); -} -``` - -## Logo - -Now how do we use our variable? Again pretty straight forward just like our normal tailwind classes. Let's imagine -we have a React component called `Logo.tsx`, defined like so: - -```tsx:title=Logo.tsx -import React from "react"; -import tw from "twin.macro"; - -export interface Props { - /** The size of the main text */ - size?: string; -} - -const Logo = ({ size = "2xl" }: Props) => ( - - < - Haseeb - /> - -); - -const LogoContainer = tw.div`cursor-pointer font-header tracking-wide text-2xl font-bold hover:text-primary`; - -const Tag = tw.span`text-accent`; - -export default Logo; -``` - -> INFO: I'm using the `twin.macro` the library so we can use it with CSS-in-JS. - -To use our variables we just use them like: `text-primary`. Which will use the value we defined above, `#367ee9`. Now -if we change the value in the `global.css` file, it will automatically change here as well. - -## Dark/Light Mode (Optional) - -This can be easily extended to add a dark/light mode. Add the following to the `global.css` file like so: - -```css:title=global.css -.theme-light { - --background: #fff; - --main: #0d0106; - --header: #2d3748; -} - -.theme-dark { - --background: #0e141b; - --main: #ffffff; - --header: #eaeaea; -} -``` - -We can use a theme context to get the current theme I've written about -[here](https://dev.to/hmajid2301/react-hooks-context-local-storage-3job). We get the current theme then use that to determine which class -to set. This will then change value of the variables. If the theme changes, the variable values will change dark -> light or -light -> dark etc. - -```jsx -const { theme } = useContext(ThemeContext); -// ... -return ( -
- // ... -
-); -``` - -That's it! We've learnt how to use CSS variables with TailwindCSS. - -## Appendix - -- [Example Project](https://gitlab.com/hmajid2301/personal-site/-/tree/fa01433eecec728427763e1e2b2cdd9710a9c197) -- [Icons from FlatIcon](https://flaticon.com) diff --git a/31. TailwindCSS and Variables/images/cover.jpg b/31. TailwindCSS and Variables/images/cover.jpg deleted file mode 100644 index 1029451..0000000 Binary files a/31. TailwindCSS and Variables/images/cover.jpg and /dev/null differ diff --git a/32. Gatsby and search/README.md b/32. Gatsby and search/README.md deleted file mode 100644 index 3c28187..0000000 --- a/32. Gatsby and search/README.md +++ /dev/null @@ -1,390 +0,0 @@ ---- -title: "How to add offline search to a Gatsby blog" -tags: ["gatsby", "react", "javascript", "react-hooks"] -license: "public-domain" -slug: "offline-search-with-gatsby" -canonical_url: "https://haseebmajid.dev/blog/offline-search-with-gatsby/" -date: "2020-08-20" -published: true -cover_image: "images/cover.jpg" ---- - -![Search Gif](images/main.gif) - -Let's take a look at how we can add offline local search 🔍 to a Gatsby blog. There are two main types of search we can -use an offline search like `elasticlunr` and external API search engines like `ElasticSearch`. These are typically more -scalable but also more expensive. - -> You can find more info [here](https://www.gatsbyjs.com/docs/adding-search/#reach-skip-nav). - -In this article, I will show you how to add offline search to your Gatsby blog using `elasticlunr`. This means your -website needs to be indexed locally and will increase the bundle size as this index needs to be loaded by the client but -with the scale and size of personal blogs (100s, not 1000s of blog post) this shouldn't make a massive difference. -We will also look at how we can add highlighting to our search results. - -> Note that you need to be careful with offline search because the entire search index has to be brought into the client, which can affect the bundle size significantly - GatsbyJS - -## Setup - -Before we add search Gatsby blog, let's setup a simple Gatsby site using the `Gatsby blog starter`, you can of course -skip this step and add search to an existing site. - -```bash -npm -g install gatsby-cli -gatsby new my-blog-starter https://github.com/gatsbyjs/gatsby-starter-blog -``` - -## Markdown - -The search component will use the data within our markdown and index it, so that the client can search with this -data later. In this example I will assume your markdown files look something like the example below: - -```md:title=content/blog/hello-world/index.md ---- -title: Hello World -date: "2015-05-01" -tags: ["food", "duck"] ---- - -This is my first post on my new fake blog! How exciting! - -I'm sure I'll write a lot more interesting things in the future. - -... -``` - -The top part of a markdown file between the `---` is known as the front matter, often we can access this data as a -key/value (like a Python dictionary). - -> Note: In this example, we will be using the [`MarkdownRemark`](https://www.gatsbyjs.com/plugins/gatsby-transformer-remark/?=markdown), but you can use search for anything just adjust the examples below as required. - -## Search - -Now onto adding search to our site. - -### Elasticlunr - -We will use `elasticlunr` for our offline/local search. Luckily there is a Gatsby plugin we can use, which makes -integrating it into our site very easy. First install the following plugin and the library: -`yarn add @gatsby-contrib/gatsby-plugin-elasticlunr-search elasticlunr`. - -Then open your `gatsby-config.js` and add the following: - -```js:title=gatsby-config.js -{ - resolve: `@gatsby-contrib/gatsby-plugin-elasticlunr-search`, - options: { - fields: [`title`, `tags`], - resolvers: { - MarkdownRemark: { - title: (node) => node.frontmatter.title, - tags: (node) => node.frontmatter.tags, - path: (node) => node.frontmatter.slug, - }, - }, - }, - }, -``` - -Here we are telling the search plugin what (GraphQL) fields to index. In this example, we want to index -the title and tags. We could also index the content if we wanted by adding the following line after path -`html: (node) => node.internal.content,` and adding `html` to the `fields` array. You can index -any field available in GraphQL, provided by the `MarkdownRemark` plugin (or whichever plugin you are using). - -#### GraphQL (Optional) - -Slight aside here but if you wish to explore and take a look at the data available/provided by the `MarkdownRemark` plugin, you can start your -Gatsby site, typically using `yarn develop` and once the command has finished doing it's magic 🎉, visit this page -`http://localhost:8000/___graphql`. This provides us with our GraphQL playground (an IDE) and is a great way to understand what is going -on with our GraphQL queries if you don't understand. - -For example, if you type the following into the main field and press the play button at the top. - -```graphql -query MyQuery { - allMarkdownRemark( - sort: { order: DESC, fields: [frontmatter___date] } - filter: { frontmatter: { title: { ne: "Uses" } } } - ) { - edges { - node { - id - excerpt(pruneLength: 100) - frontmatter { - date(formatString: "YYYY-MM-DD") - title - tags - } - } - } - } -} -``` - -You should see something like (in this example): - -```json -{ - "data": { - "allMarkdownRemark": { - "edges": [ - { - "node": { - "id": "1a7e02d4-620a-5268-8149-2d8cbf26a20a", - "excerpt": "Far far away, behind the word mountains, far from the countries Vokalia and\nConsonantia, there live…", - "frontmatter": { - "date": "2015-05-28", - "title": "New Beginnings", - "tags": ["deer", "horse"] - } - } - }, - { - "node": { - "id": "fe83f167-8f86-51fe-a981-c5189625e270", - "excerpt": "Wow! I love blogging so much already. Did you know that “despite its name, salted duck eggs can also…", - "frontmatter": { - "date": "2015-05-06", - "title": "My Second Post!", - "tags": ["food", "blog"] - } - } - }, - { - "node": { - "id": "4e865c18-e797-5da8-a46d-902949a00c7f", - "excerpt": "This is my first post on my new fake blog! How exciting! I’m sure I’ll write a lot more interesting…", - "frontmatter": { - "date": "2015-05-01", - "title": "Hello World", - "tags": ["food", "duck"] - } - } - } - ] - } - }, - "extensions": {} -} -``` - -As you can see this is a very familiar structure to the one we described in our search config above. If you play -around with the fields on the left-hand side of the IDE, you should be able to get a better understanding of all -fields you can index. - -## Logic - -Now we will add the relevant JSX components we need for search to our site. - -### TailwindCSS (Optional) - -You can follow this [tutorial](https://www.gatsbyjs.com/docs/tailwind-css/) to add TailwindCSS. -We will add TailwindCSS to this Gatsby project and we will use this to style our components. -First install the following dependencies: - -```bash -yarn add tailwindcss gatsby-plugin-postcss @emotion/core @emotion/styled gatsby-plugin-emotion -yarn add -D twin.macro # twin.macro allows us to use css-in-js a bit like emotion/styled-components except for tailwind -npx tailwindcss init -``` - -Then add the following to your `gatsby-config.js`: - -```js:title=gatsby-config.js -plugins: [`gatsby-plugin-postcss`, `gatsby-plugin-emotion`], -``` - -Then create a new file: - -```bash -vim main.css -#... - -# Contents of the file -@tailwind base; -@tailwind components; -@tailwind utilities; - -# ... -``` - -Then add the following line to `gatsby-browser.js`: - -```js:title=gatsby-browser.js -import "./src/main.css"; -``` - -Finally create a new file `postcss.config.js` and add the following: - -```js:title=postcss.config.js -module.exports = () => ({ - plugins: [require("tailwindcss")], -}); -``` - -### Components - -We will create all of the components in the following `src/components` folder. -First, let's create the `Input.jsx` component for the text input, which looks something like this: - -```jsx:title=src/components/Input.jsx file=./source_code/src/components/Input.jsx - -``` - -Since we are using `twin.macro` we can use syntax like ` const TextInput = tw.input`` `. Hence we can use the name `TextInput`. -in our component, where `TextInput` is just an input with some tailwindcss styles we've defined. - -Note we added a React forward ref so that, we can autofocus on this input later on. -So when the input is shown to the client we are already focused into the input. - -Next, let's create a component for `SearchItem.jsx`. This is a single search item found. -In this case, we will only show the title and read more button. Note we are using the -`react-highlight-words` library to highlight words from the search query. - -The prop `query` is the search query the user typed in. In the `Highlighter` component the `searchWords` prop -is given a list of words to highlight, hence we need to split the string into an array. For example, if we -had the search query `"A blog post"`, it would become `["A", "blog", "post"]`, and will highlight either of -those words in the title (A, blog or post). - -> Note: Again you can extend this to include perhaps a description of the blog post (first 160 characters) etc. We are just keeping it simple for this example. - -```jsx:title=src/components/SearchItem.jsx file=./source_code/src/components/SearchItem.jsx - -``` - -Next, we have a component we will call `SearchItems.jsx`, which will be a list of the search results and look -something like: - -```jsx:title=src/components/SearchItems.jsx file=./source_code/src/components/SearchItems.jsx - -``` - -Now onto the main component, the component that will actually work out the results to show -to the client. We will call this component `Search.jsx`: - -```jsx:title=src/components/Search.jsx file=./source_code/src/components/Search.jsx - -``` - -Let's break this down: - -```jsx:title=src/components/Search.jsx -const index = Index.load(searchIndex); -const [query, setQuery] = useState(""); -const [results, setResults] = useState([]); -const searchInput = React.createRef(); -``` - -The first part will be used to store some variables we need later on. Like storing the current query the client has -typed into the search, the current search results and a reference to the search input so we can focus into it. - -```jsx:title=src/components/Search.jsx -useEffect(() => { - searchResults("blog"); - searchInput.current.focus(); -}, []); -``` - -Next, the `useEffect` hook is called as soon as the component mounts, so as soon as the component mounts we will focus -into the `searchInput` component `searchInput.current.focus()` and we pre-fill the search with any blog post with -`"blog"` in it's title/tags `searchResults("blog")`. - -```jsx:title=src/components/Search.jsx -function searchResults(searchQuery) { - const res = index.search(searchQuery, { expand: true }).map(({ ref }) => { - return index.documentStore.getDoc(ref); - }); - setResults(res); -} -``` - -This is the actual function which gets our search results. It makes the query with `elasticlunr` and -stores the results in out state hook variable `result` using the set function `setResults(res)`. The first part -of the function does most of the heavy lifting returning a list of possible results to show to the client. - -```jsx:title=src/components/Search.jsx - { - const searchQuery = event.target.value; - setQuery(searchQuery); - searchResults(searchQuery); - }} - placeholder="Search" - value={query} -/> -``` - -> Note: `e.target.query` is the current value in the text input. - -Finally when taking a look at the input you can see the `ref={searchInput}` we defined above -being assigned here, so we can focus on this component. Next on any change i.e. a keypress we call the `onChange` -function. Where we update the query with the new search query `setQuery(searchQuery)` again using a state hook. -Then we call the `searchResults(searchQuery)` function which will update the results. - -This is then shown to the client using our SearchItems component defined above like so: -``. - -Finally, we have a "`SearchBar.tsx`", this is the component we will use to tie everything together. - -```jsx:title=src/components/SearchBar.jsx file=./source_code/src/components/SearchBar.jsx - -``` - -Normally I would use a search icon which when pressed would show the search overlay. However to keep things simple we -will just use the text "Search", which when clicked on will show our search overlay to the client. - -```jsx:title=src/components/SearchBar.jsx -

setShowSearch(!showSearch)} -> - Search -

-``` - -The main job of this component is to toggle the search on/off. To do this we use a state hook like so: - -```jsx:title=src/components/SearchBar.jsx -const [showSearch, setShowSearch] = useState(false); - -function hideSearch(event) { - if (event.target.placeholder !== "Search") { - setShowSearch(false); - } -} -``` - -Where we have a function to hide the search if the user clicks anything outside of the search. Hence the if statement -`event.target.placeholder`. - -```jsx:title=src/components/SearchBar.jsx - ( - - {showSearch && } - - )} -/> -``` - -The next interesting part is the Graphql query to get the search index from `elasticlunr`. We this pass as `searchIndex` -prop to our `Search` component we created above. This is the same search index we search against the current user -query. We also use conditional rendering we only show the `Search` component when `showSearch` is true. - -And that's it! We successfully added search to our `Gatsby` blog alongside search highlighting. Thanks for reading. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/32.%20Gatsby,%20and%20search/source_code) -- [Example Project](https://gitlab.com/hmajid2301/personal-site/-/blob/d5f413310d4404fc6a1761a592f5e10840fc30df/src/components/organisms/SearchBar/SearchBar.tsx) -- [Cover Photo by Markus Winkler](https://unsplash.com/@markuswinkler?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) diff --git a/32. Gatsby and search/images/cover.jpg b/32. Gatsby and search/images/cover.jpg deleted file mode 100644 index c7c9ae5..0000000 Binary files a/32. Gatsby and search/images/cover.jpg and /dev/null differ diff --git a/32. Gatsby and search/images/main.gif b/32. Gatsby and search/images/main.gif deleted file mode 100644 index ca35e82..0000000 Binary files a/32. Gatsby and search/images/main.gif and /dev/null differ diff --git a/33. Gitlab Auto MR/README.md b/33. Gitlab Auto MR/README.md deleted file mode 100644 index 0b914f7..0000000 --- a/33. Gitlab Auto MR/README.md +++ /dev/null @@ -1,133 +0,0 @@ ---- -title: "How to auto create MRs in Gitlab" -tags: ["gitlab", "python", "ci", "showdev"] -license: "public-domain" -slug: "gitlab-auto-mr-with-gitlab" -canonical_url: "https://haseebmajid.dev/blog/gitlab-auto-mr-with-gitlab/" -date: "2020-08-31" -published: true -cover_image: "images/cover.png" -series: gitlab-workflow ---- - -In this article, we will go over how we can use the `gitlab-auto-mr` CLI script I wrote to help automate your Gitlab -workflow. This is a very simple script you can use with Gitlab which will auto-create merge requests (MRs) every time you -create a new branch on a project in Gitlab. - -## (Optional) Git Feature Branches - -> Feel free to skip this section if you are already familar with feature branch, skip to the `Gitlab Auto MR` section - -Before I introduce what the script does and how we use it, let's go over why you might need to use it. -Say you're working on a project with multiple other people and you want to make sure you keep your master/production -branch clean. One way to do that is everyone uses "feature" branches. So for every feature being added to the project -you create a new short-lived branch off of the master branch. Then typically one developer will work on a feature -and make a merge request when ready to get the branch merged into the main master branch, to integrate their work -with everyone else's. - -![Gitlab Workflow](https://docs.gitlab.com/ee/ci/introduction/img/gitlab_workflow_example_11_9.png) - -This means your changes can get reviewed before they merged into the master branch. This helps keep the master branch -"cleaner", it should have fewer bugs etc. One good way to visualise this is in the diagram above. Let's say we want -to add reaction buttons to a blog. We would create a new branch called something like `feature/add-reaction-buttons`. -Then we would commit our changes on the branch. - -Usually, this is coupled with a CI pipeline which will auto-run against our code. It may run jobs like unit tests, linting -and static code analysis. This acts as a kind of first step review, we need to make sure it's passing (and is green) before -people even start to review our code, as seen in the above diagram. Once the CI pipeline is working the merge request can -be reviewed. After the merge request has been approved it can be merged into the master branch and we will add the new -feature to our codebase, which will eventually get deployed to production. We can also then delete our old branch. -You can read more about [feature branches here](https://www.atlassian.com/git/tutorials/comparing-workflows/feature-branch-workflow). - -> Note there are many other git flows. This can project can be used in conjunction with all which will use some form of feature branching. - -### CI/CD - -> Feel free to skip this section if you are already familiar with CI/CD, Git and Gitlab CI. - -Continuous Integration (CI) is typically defined as making sure all code being integrated into codebase works. -It usually involves running a set of jobs referred to as a CI pipeline. Some jobs we may run include linting our -code and running unit tests. This is usually done automatically using a tool such as Travis, Circle or even Gitlab. - -One use case for this is when others are adding new features to our codebase and we want to check it -still works. We can create a CI pipeline that will run unit tests against the new code automatically when a pull request -(GitHub) or merge request (Gitlab) is opened. This saves us a lot of time, rather than having to copy the new -features/code and then run the tests ourselves on our machine. - -Continuous Delivery (CD) is typically an extension of CI to make sure that you can release new changes quickly. -This means automating your release process, such that you can deploy your application at any point of time just -by clicking on a button. - -Continuous Deployment takes CD one step further by requiring no human intervention in deploying our application. -You can read more about [this here](https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment) - -## Gitlab Auto MR - -I created a simple CLI script, that I run during on Gitlab CI, which will auto-create merge requests every time you create a new branch. -So you could have something like this: - -```yml:title=.gitlab-ci.yml -stages: - - pre - -create:merge-request: - image: registry.gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr - stage: pre - except: - - master - - tags - script: - - gitlab_auto_mr -t master -c WIP -d .gitlab/merge_request_templates/merge_request.md -r -s --use-issue-name -``` - -To use this tool you will need to create a personal access token so that the tool can access the GitLab API on your behalf. -You can find out how to do that [here](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html). In the -the example above I have set the private token in my CI/CD variables as `GITLAB_PRIVATE_TOKEN` but you can also pass in the -`--private-token` argument instead. - -![CI/CD Variables in Gitlab CI](images/ci-vars.gif) - -If you use `registry.gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr` this Docker image, the cli tool already -comes preinstalled with all of the dependencies as well. Else you can also install it manually using -`pip install gitlab-auto-mr`. - -This particular job will only be run on a branch that is not called a master. This is because of the `except` clause -we defined above. Hence it should only run when we create a new feature branch. It will create a new merge request -if one does not already exist between this new branch and the master branch. The target branch it will create the -merge request to is set by the `-t` option, in this example `-t master`. For example, source branch -> target branch. - -Next, we append all of our new merge requests with WIP, set by the `-c WIP` argument, where WIP typically means -`Work in progress` so other devs know not to review our MR as it's not ready. Next, the tool also allows you to -specify a merge request template to use by passing the `-d` and giving it the path to a file. In this example -`.gitlab/merge_request_templates/merge_request.md` looks like this: - -```md:title=.gitlab/merge_request_templates/merge_request.md -# Description - - - -## Type - -- [ ] Bug Fix -- [ ] Improvement -- [ ] New Feature - -Fixes # -``` - -The other arguments do the following: - -- `-r`: Will remove the source branch (our feature branch) after the MR has been approved -- `-s`: Will squash our commits into a single commit, so each feature branch will appear as a single commit onto the master branch -- `--use-issue-name`: If set and you do something like `feature/#6` it will search for the issue with id `6` and pull information from there like labels and milestones etc. It will then assign those to this MR, an example of this can be seen with [MR here](https://gitlab.com/hmajid2301/stegappasaurus/-/merge_requests/196) where the issue [`#211` is here](https://gitlab.com/hmajid2301/stegappasaurus/-/issues/211) - -You can get a full list of options available using this tool -[here at its project page](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr). - -## Appendix - -- This project was originally inspired by [this other project](https://gitlab.com/tmaier/gitlab-auto-merge-request) and this [post](https://rpadovani.com/open-mr-gitlab-ci) -- [Project Page](https://gitlab.com/gitlab-automation-toolkit/gitlab-auto-mr) -- [An example project](https://gitlab.com/hmajid2301/stegappasaurus/-/blob/84d48e80d77a04870b748d2ac62e2cb698f17db8/.gitlab-ci.yml) -- [Example MRs created with this tool](https://gitlab.com/hmajid2301/stegappasaurus/-/merge_requests/176) -- [Example pipeline running this tool](https://gitlab.com/hmajid2301/stegappasaurus/-/pipelines/120105476) diff --git a/33. Gitlab Auto MR/images/ci-vars.gif b/33. Gitlab Auto MR/images/ci-vars.gif deleted file mode 100644 index b214918..0000000 Binary files a/33. Gitlab Auto MR/images/ci-vars.gif and /dev/null differ diff --git a/33. Gitlab Auto MR/images/cover.png b/33. Gitlab Auto MR/images/cover.png deleted file mode 100644 index ffd0de3..0000000 Binary files a/33. Gitlab Auto MR/images/cover.png and /dev/null differ diff --git a/34. Gatsby edit button/README.md b/34. Gatsby edit button/README.md deleted file mode 100644 index c95e00f..0000000 --- a/34. Gatsby edit button/README.md +++ /dev/null @@ -1,257 +0,0 @@ ---- -title: "Add an 'edit post' button to your Gatsby blog" -tags: ["gatsby", "react", "git", "javascript"] -license: "public-domain" -slug: "gatsby-edit-button" -canonical_url: "https://haseebmajid.dev/blog/gatsby-edit-button/" -date: "2020-09-07" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, we will look at how we can add an "edit post" button, to your Gatsby blog. When this button is clicked it will take the user to your markdown file, on github/gitlab that was used to generate the blog post they are currently viewing. - -`youtube: https://www.youtube.com/watch?v=rALo_BzGKs8` - -## Setup - -Before we add the edit button to a Gatsby blog, let's set up a simple Gatsby site using the `Gatsby blog starter`. -You can skip this step and add the button to an existing site. - -```bash -npm -g install gatsby-cli -gatsby new my-blog-starter https://github.com/gatsbyjs/gatsby-starter-blog -``` - -If you don't use the start above, you will need to make sure you have the `gatsby-source-filesystem` plugin installed. To import our markdown files. Your `gatsby-config.js` looks like this: - -```js:title=gatsby-config.js - { - resolve: `gatsby-source-filesystem`, - options: { - path: `${__dirname}/content/blog`, - name: `blog`, - }, - }, -``` - -Then make sure you also have the `gatsby-transformer-remark` plugin installed -and it should be in your `gatsby-config.js` like so: - -```js:title=gatsby-config.js - { - resolve: `gatsby-transformer-remark`, - options: { - // ... - }, - }, -``` - -## (Optional) Blog Post - -Let's assume our `gatsby-node.js` file looks like this: - -```js:title=gatsby-node.js -exports.createPages = async ({ graphql, actions }) => { - const { createPage } = actions; - - const blogPost = path.resolve(`./src/templates/blog-post.js`); - const result = await graphql( - ` - { - allMarkdownRemark( - sort: { fields: [frontmatter___date], order: DESC } - limit: 1000 - ) { - edges { - node { - fields { - slug - } - frontmatter { - title - } - } - } - } - } - ` - ); - - if (result.errors) { - throw result.errors; - } - - // Create blog posts pages. - const posts = result.data.allMarkdownRemark.edges; - - posts.forEach((post, index) => { - const previous = index === posts.length - 1 ? null : posts[index + 1].node; - const next = index === 0 ? null : posts[index - 1].node; - - createPage({ - path: post.node.fields.slug, - component: blogPost, - context: { - slug: post.node.fields.slug, - previous, - next, - }, - }); - }); -}; -``` - -This is how we create a new blog post for each of our markdown files. You can read more about how -markdown works with [Gatsby here](https://www.gatsbyjs.com/docs/adding-markdown-pages/). - -Also let's use a simple template file for your blogs posts. So our `blog-post.js` looks like this: - -```jsx:title=src/templates/blog-post.js -import React from "react"; -import { Link, graphql } from "gatsby"; - -// ... - -const BlogPostTemplate = ({ data, pageContext, location }) => { - const post = data.markdownRemark; - const siteTitle = data.site.siteMetadata.title; - const { previous, next } = pageContext; - - return ( - - - // ... - - ); -}; - -export default BlogPostTemplate; - -export const pageQuery = graphql` - query BlogPostBySlug($slug: String!) { - site { - siteMetadata { - title - } - } - markdownRemark(fields: { slug: { eq: $slug } }) { - id - excerpt(pruneLength: 160) - html - frontmatter { - title - date(formatString: "MMMM DD, YYYY") - description - } - } - } -`; -``` - -## Edit Button - -Ok, now we need two pieces of information the location of our project on git where our -markdown files are stored. In this example, it's here `https://gitlab.com/hmajid2301/articles`. We also need the path to the markdown file in the git repo. So we can combine -these two pieces of information together to get a URL to the markdown file on git. - -First, we need a way to get the file path of the markdown file, we can do this with using our GraphQL query. -The same query we use to get other information such as title and contents. All we need to add is `fileAbsolutePath` -to the `markdownRemark` part of our query. This will return, as the name suggests, the absolute path to the file, -i.e. `/home/haseeb/projects/personal/articles/34. Gatsby edit button/source_code/content/blog/hello-world/index.md`. - -```js{11}:title=src/templates/blog-post.js -export const pageQuery = graphql` - query BlogPostBySlug($slug: String!) { - site { - siteMetadata { - title - } - } - markdownRemark(fields: { slug: { eq: $slug } }) { - id - excerpt(pruneLength: 160) - html - fileAbsolutePath - frontmatter { - title - date(formatString: "MMMM DD, YYYY") - description - } - } - } -`; -``` - -Now we need a way to use this file path to link to this page on Gitlab. Since I know that -`articles/` is a git repo, we want to remove `/home/haseeb/projects/personal/articles` -from `/home/haseeb/projects/personal/articles/34. Gatsby edit button/source_code/content/blog/hello-world/index.md`. - -Then assuming the git URL of our repo, where the markdown files exists, is `https://gitlab.com/hmajid2301/articles`. The path to our markdown file on git could be something like -`https://gitlab.com/hmajid2301/articles/-/blob/master/34. Gatsby edit button/source_code/content/blog/hello-world/index.md`. - -So let's add logic to our `blog-post.js` file to generate this git URL. After we have -updated our GraphQL query, we can add the some logic to our code to workout the git URL path. -Let's create a new function called `getGitMarkdownUrl()`. - -```jsx:title=src/templates/blog-post.js -const BlogPostTemplate = ({ data, pageContext, location }) => { - const post = data.markdownRemark; - const siteTitle = data.site.siteMetadata.title; - const { previous, next } = pageContext; - - function getGitMarkdownUrl() { - const pathConst = "/articles/"; - const gitURL = "https://gitlab.com/hmajid2301/articles"; - const sliceIndex = - post.fileAbsolutePath.indexOf(pathConst) + pathConst.length; - const markdownFileGitPath = post.fileAbsolutePath.slice(sliceIndex); - const blogPostOnGit = `${gitURL}/-/blob/master/${markdownFileGitPath}`; - return blogPostOnGit; - } - - const gitMarkdownUrl = getGitMarkdownUrl(); - - // .... -}; -``` - -> Warn: Don't forget to change the `gitURL` variable in your project! - -Where the following two lines remove everything before `/articles/`, so we get -`34. Gatsby edit button/source_code/content/blog/hello-world/index.md`. - -```js -const sliceIndex = post.fileAbsolutePath.indexOf(pathConst) + pathConst.length; -const markdownFileGitPath = post.fileAbsolutePath.slice(sliceIndex); -``` - -Then we combine this with our git URL to end up with the path to the markdown file `https://gitlab.com/hmajid2301/articles/-/blob/master/34. Gatsby edit button/source_code/content/blog/hello-world/index.md`. - -```js -const blogPostOnGit = `${gitURL}/-/blob/master/${markdownFileGitPath}`; -``` - -Finally, all we need to do is add the edit button and have it link to this `gitMarkdownUrl`. You can do something like -this below: - -```jsx - - EDIT THIS POST - -``` - -If you want to make it look fancier, you can use `react-icons` to get a proper edit icon (as shown in the gif above). - -That's it! That's all we needed to do when the user clicks on the edit button it'll take them to the git repo where -the markdown files exist. They can then perhaps fork the project make their edit and open a new merge or pull request -(GitLab vs GitHub) and add in the changes they want (if approved by you). - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/34.%20Gatsby%20edit%20button/source_code) -- [Site in video](https://haseebmajid.dev/) -- [Source code](https://gitlab.com/hmajid2301/portfolio-site) for site in video diff --git a/34. Gatsby edit button/images/cover.jpg b/34. Gatsby edit button/images/cover.jpg deleted file mode 100644 index 095e350..0000000 Binary files a/34. Gatsby edit button/images/cover.jpg and /dev/null differ diff --git a/35. Gatsby source git/README.md b/35. Gatsby source git/README.md deleted file mode 100644 index c6bcb29..0000000 --- a/35. Gatsby source git/README.md +++ /dev/null @@ -1,285 +0,0 @@ ---- -title: "How to manage your Gatsby blog posts from another repo with Gitlab CI" -tags: ["gatsby", "gitlab", "git", "ci", "netlify"] -license: "public-domain" -slug: "gatsby-articles-git-gitlab-ci" -canonical_url: "https://haseebmajid.dev/blog/gatsby-articles-git-gitlab-ci/" -date: "2020-09-18" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, we will go over how you can manage your markdown blog posts from another git repository (repo). Separate to the git repository for your Gatsby site. -This is the same process that I use to manage [this repo](https://gitlab.com/hmajid2301/articles). - -So what this entails is the source code for my Gatsby site is in a repo called `portfolio-site` on Gitlab. -Then I have another repo for all of my blog posts (in markdown) called `articles`. During build time of the -Gatsby blog, we will import the markdown files from our `articles` git repo and use it as a source of data for -our Gatsby blog. - -## Git Plugin - -First, install the [Gatsby git plugin](https://www.gatsbyjs.com/plugins/gatsby-source-git/?=git), so that we can source our data from git. - -```bash -yarn add gatsby-source-git -``` - -Then add the plugin to your `gatsby-config.js` to tell it where to source its data from. - -:::warning Gatsby Filesystem -You need to use the `gatsby-source-filesystem` before the `gatsby-source-git`. -You can read more about it [here at this Github issue](https://github.com/stevetweeddale/gatsby-source-git/issues/22). -::: - -```js:title=gatsby-config.js -{ - resolve: `gatsby-source-git`, - options: { - name: `Articles`, - remote: "https://gitlab.com/hmajid2301/articles.git", - branch: `master`, - patterns: ["**/*", "!**/*/index.md"], - }, -}, -``` - -In our example, I will use [this](https://gitlab.com/hmajid2301/articles), the same repo this blog post originates from. -You can specify the branch to use if you want. The most interesting bit is the `patterns` section. This is where you can -specify which files to include and which to ignore `["**/*", "!**/*/index.md"]`. In this example, I want to ignore -all files called `index.md` because these are the ones that I use in my example Gatsby blogs in this repo for. -You can read more about the [pattern here](https://github.com/mrmlnc/fast-glob). - -## GraphQL - -We can now check if the articles are being imported correctly by using the GraphQL IDE that comes with Gatsby. - -```bash -yarn develop - -# Go to localhost:8000/__graphql -``` - -Then run the following query. - -```graphql -query MyQuery { - allMarkdownRemark { - edges { - node { - fileAbsolutePath - } - } - } -} -``` - -You should see output like this, where it will list all of your blog posts/markdown files. Here you can verify your git -repo is being sourced correctly. - -```json -{ - "data": { - "allMarkdownRemark": { - "edges": [ - { - "node": { - "fileAbsolutePath": "/home/haseeb/projects/personal/articles/35. Gatsby source git/source_code/.cache/gatsby-source-git/Articles/1. Expo with VirtualBox and Genymotion/README.md" - } - }, - { - "node": { - "fileAbsolutePath": "/home/haseeb/projects/personal/articles/35. Gatsby source git/source_code/.cache/gatsby-source-git/Articles/11. React Navigation with React Native/README.md" - } - }, - { - "node": { - "fileAbsolutePath": "/home/haseeb/projects/personal/articles/35. Gatsby source git/source_code/.cache/gatsby-source-git/Articles/13. REST API using OpenAPI, Flask & Connexions/README.md" - } - } - // ... - ] - } - } -} -``` - -## Gatsby Node - -Now make sure you have logic in your `gatsby-node.js` file to create a blog post page for every -markdown file that we source, i.e. one blog post for every item in the list above. - -```js:title=gatsby-node.js -exports.createPages = async ({ graphql, actions }) => { - const { createPage } = actions; - - const blogPost = path.resolve(`./src/templates/blog-post.js`); - const result = await graphql( - ` - { - allMarkdownRemark( - sort: { fields: [frontmatter___date], order: DESC } - limit: 1000 - ) { - edges { - node { - frontmatter { - title - slug - } - } - } - } - } - ` - ); - - if (result.errors) { - throw result.errors; - } - - // Create blog posts pages. - const posts = result.data.allMarkdownRemark.edges; - - posts.forEach((post, index) => { - const previous = index === posts.length - 1 ? null : posts[index + 1].node; - const next = index === 0 ? null : posts[index - 1].node; - - createPage({ - path: post.node.frontmatter.slug, - component: blogPost, - context: { - slug: post.node.frontmatter.slug, - previous, - next, - }, - }); - }); -}; -``` - -## Gitlab CI - -So every time we make a change in our article repo we want to trigger a rebuild of our site. Since I use Gitlab, I will -show you how you can do this with Gitlab CI. Every commit on the master branch, on our repo that contains our -articles, will trigger a rebuild on the Gatsby repo. - -:::caution Assumption -This next section assumes that you use Gitlab to host your repos. -It also assumes that for your Gatsby blog you use Gitlab CI to build/publish it. -::: - -For example, in my use case [the article repo](https://gitlab.com/hmajid2301/articles) will trigger a rebuild for -[the Gatsby repo](https://gitlab.com/hmajid2301/portfolio-site/-/tree/7258fe7ca1366024f17da5952077cdc00f00a3a8). - -First, go to your Gatsby repo then go to `Settings > CI/CD > Pipeline triggers`. Then create a new pipeline trigger, -save the newly created token to your CI/CD variables. - -`youtube: https://www.youtube.com/watch?v=JbAk6xpBRxc` -`youtube: https://www.youtube.com/watch?v=X9m8UxmZgy8` - -Then also copy the `cURL` command shown and add the following to your `.gitlab-ci.yml`, with the `cURL` command. - -```yml{10}:title=.gitlab-ci.yml -stages: - - build - -rebuild:portfolio-site: - stage: build - image: curlimages/curl - only: - - master - script: - - "curl -X POST -F token=${TRIGGER_TOKEN} -F ref=master https://gitlab.com/api/v4/projects/19260161/trigger/pipeline" -``` - -Make sure you replace `19260161` with the project ID of your Gatsby blog, as this is the repo we want to trigger a -rebuild of. This means every time we push a new commit (i.e. an article) to the master branch of the articles, -it will trigger the pipeline to run on our Gatsby blog. - -This will mean when it runs `yarn build` or `gatsby build` it'll source the markdown data from the latest commit on -our article git repo and will have the new article or whatever changes were made. The `.gitlab-ci` for our -Gatsby blog may look something like this: - -```yml{29-30}:title=.gitlab-ci.yml -image: node:12.13.0 -cache: - key: ${CI_COMMIT_REF_SLUG} - paths: - - node_modules - -stages: - - build - - deploy - -before_script: - - yarn install - -build:site: - stage: build - only: - - master - script: - - yarn run build - artifacts: - paths: - - public - -deploy:site: - stage: deploy - only: - - master - script: - - npm i -g netlify-cli - - yarn deploy --site $NETLIFY_SITE_ID --auth $NETLIFY_PERSONAL_TOKEN --message "$CI_COMMIT_TITLE" - dependencies: - - build:site -``` - -:::tip Artifacts -The `deploy:site` job uses the build artifacts from the previous `build:site` job which has the site data stored in the `public` -folder. Due to the sites default settings on Netlify, this is what is uploaded when we use `netlify-cli`. -::: - -I build and deploy the site from Gitlab CI to save the build minutes on Netlify. All you need to do this is to get your -`NETLIFY_SITE_ID` and create a `NETLIFY_PERSONAL_TOKEN` that can make the API request to publish the site on your behalf. - -![Gatsby Blog CI](images/gatsby-blog-ci.png) - -:::important Gitlab CI -You can, of course, change the `deploy:site` job to suit how you want to deploy your site, i.e. Gitlab pages, Github Pages, Vercel etc. -::: - -## Netlify - -If you don't want to use Gitlab CI to build and publish your Gatsby blog and want to force a rebuild on Netlify. -Then you can do the following; Every time we push a new commit to the master branch on the article repo. We can -use a webhook to trigger a rebuild of our site on Netlify. To do this select your website on the Netlify GUI. -Then `Settings` > `Build & deploy` > `Build hooks`. Add a new build hook. Then copy the `cURL` command, -so your article repo `.gitlab-ci.yml` now looks something like: - -```yml{10}:title=.gitlab-ci.yml -stages: - - build - -rebuild:portfolio-site: - stage: build - image: curlimages/curl - only: - - master - script: - - curl -X POST -d {} https://api.netlify.com/build_hooks/5f5e9c4f495aebe573c39aef -``` - -You will want to turn `5f5e9c4f495aebe573c39aef` into a CI/CD variable, else anyone can force a rebuild of your site. - -`youtube: https://www.youtube.com/watch?v=7KRihyulbTQ` - -That's it, we learnt how we can manage our markdown articles in a separate repo to our Gatsby blog! We went over how we -can also automate the rebuild of our site using Gitlab CI and Netlify. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/35.%20Gatsby%20source%20git/source_code) -- Example [Gatsby blog](https://gitlab.com/hmajid2301/portfolio-site/-/tree/7258fe7ca1366024f17da5952077cdc00f00a3a8) repo -- Example [Articles](https://gitlab.com/hmajid2301/articles) repo diff --git a/35. Gatsby source git/images/cover.jpg b/35. Gatsby source git/images/cover.jpg deleted file mode 100644 index b845306..0000000 Binary files a/35. Gatsby source git/images/cover.jpg and /dev/null differ diff --git a/35. Gatsby source git/images/gatsby-blog-ci.png b/35. Gatsby source git/images/gatsby-blog-ci.png deleted file mode 100644 index a08062a..0000000 Binary files a/35. Gatsby source git/images/gatsby-blog-ci.png and /dev/null differ diff --git a/36. How to use proxychains/README.md b/36. How to use proxychains/README.md deleted file mode 100644 index 6e9ecb9..0000000 --- a/36. How to use proxychains/README.md +++ /dev/null @@ -1,213 +0,0 @@ ---- -title: "How proxychains and SOCKS proxies work?" -tags: ["proxychains", "networking", "proxy", "socks"] -license: "public-domain" -slug: "proxychains-and-socks-explained" -canonical_url: "https://haseebmajid.dev/blog/proxychains-and-socks-explained/" -date: "2020-10-10" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, we will go over how you can use `proxychains` to proxy our traffic through a socks proxy. - -## Background - -Recently, like everyone else, I've been working from home a lot more often. This means to access resources at work -I need to use a VPN. However, to access some resources, such as production servers from my local machine, I need to -use a SOCKS5 proxy. Without using a SOCKS proxy, I would need to do something shown in the diagram below. - -### Example Setup - -```mermaid -graph LR - A[Local Machine] -->|ssh| B[Server A] - B -->|ssh| C[Server B] - subgraph Firewall - C - end -``` - -First, I would need to SSH onto an intermediate server (`Server A`), which I have connectivity to from my local machine. -Then on that intermediate server, I would need to SSH onto the production server. So this intermediate server needs -to have connectivity to the `Server B` as well. As you can see `Server B` is behind a firewall, in this example, the -firewall will only allow traffic from `Server A` to ingress to `Server B`. So we cannot connect directly from `Server A`. - -Another reason this setup is sub-optimal is because, I lose all the development tools on my local machine. Say I wanted -to use terraform to deploy/upgrade a service running on `Server B` server. I need to make sure terraform exists -on the intermediate server. Now, this is fine for something simple like terraform which is a single binary file -but may get more complicated for other pieces of software, especially if you cannot install extra packages on -the intermediate server. Also, there are other advantages in using your local development environment: you have -all your shortcuts saved, perhaps you use a different shell zsh, fish vs bash on the server itself. For whatever -reason, it may be more convenient to access `Server B` directly from our local machine. - -:::warning 🔐 Production Access -Now depending on where you work and how your policies work it may not be possible or a good idea to access -your production servers from your local machine. This is just a simple example of one reason you may -want to use a SOCKS proxy. There may be many others, such as accessing your test environment instead of -production. -::: - -## SOCKS Proxy - -In this section, I will show you how to solve the problem we described above. To solve this problem we will need to use, -a SOCKS (🧦 not this kinda socks) proxy. SOCKS is a layer 5 (on the OSI model, shown below) protocol. The protocol will allow -us to proxy to `Server A` and this server will then act as almost a middleman between the `Local Machine` and `Server B`. -The SOCKS proxy doesn't interpret any network traffic between the client (`Local Machine`) and the -server (`Server B`), it merely passes it onto between the two. - -![OSI Model](https://upload.wikimedia.org/wikipedia/commons/2/2b/Osi-model.png) - -:::tip SOCKS Proxy -You can learn more about -[SOCKS proxies here](https://securityintelligence.com/posts/socks-proxy-primer-what-is-socks5-and-why-should-you-use-it/). -This article goes into much more detail than I do! -::: - -### SSH Command - -So finally let's get onto how we can create a SOCKS proxy. To do this we will create an SSH tunnel. - -```bash{promptUser: haseeb} -ssh -D 8123 -f -C -q -N haseeb@10.10.10.10 -``` - -- `-D 8123`: Opens a SOCKS5 proxy on local port `8123` -- `-f`: Requests SSH to go to the background itself before executing the command -- `-C`: Compresses data before sending it -- `-q`: Quiet mode doesn't show any output -- `-N`: Doesn't execute remote commands, useful for just port forward (protocol 2+) - -:::tip Multiple Proxies -You can create multiple SOCKS proxies by running the SSH command binding to different local ports. -::: - -If the command worked, you now have a SOCKS proxy. One common use case of a SOCKS proxy is for internet -browsing using very much the same logic described above. Maybe you can access a website at work which is -behind a firewall, such as an authentication server's GUI etc. You can read more about using a SOCKS -proxy, in your browser [here](https://ma.ttias.be/socks-proxy-linux-ssh-bypass-content-filters/). -The diagram gives us a visual of what we've just done. - -```mermaid -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - end - - subgraph Remote Server - C[Server A] - end - - B -->|Opens Port| A - B -->|SSH Tunnel| C -``` - -## Proxychains - -Now that we have SOCKS proxy running on our local machine, how can we use it to connect to `Server B` and say -use terraform to deploy a new service? Well, that's where `proxychains` comes in, or rather more specifically -`proxychains-ng`. The latter being a version which still gets relatively frequent updates. -To install `proxychains` on an Ubuntu/Debian based distro you can do something like this: - -### Install - -```bash{promptUser: haseeb}{outputLines:4-8} -sudo apt install proxychains-ng - -vim /etc/proxychains4.conf -[ProxyList] -# add proxy here ... -# meanwile -# defaults set to "tor" -socks5 127.0.0.1 8123 -``` - -Edit the configuration file as shown above, `socks5 127.0.0.1 8123`. Adjust the port `8123` to whatever port you set above. -Now that `proxychains` is setup. This is what our setup now looks like: - -![ProxyChains](images/proxychains.png) - -```mermaid -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - D[Proxychains] - end - - subgraph Remote Server - C[Server A] - end - - D -->|Connects to| A - B -->|Opens Port| A - B -->|SSH Tunnel| C -``` - -### Examples - -If `Server B` had an IP address of `10.10.10.11` we could do: - -```bash{promptUser: haseeb} -proxychains ssh haseeb@10.10.10.11 -``` - -This would allow us to connect directly using SSH. Or perhaps if you had a web service running on `Server B` and wanted to -check a `healthcheck` endpoint to see if your API was running correctly you might do: - -```bash{promptUser: haseeb} -proxychains curl https://10.10.10.11/api/v1/healthcheck -``` - -Or if you wanted to use terraform to deploy something on `Server B`, you could do something like: - -:::tip Terraform -To get terraform to use our SOCKS proxy we need to export the `HTTP_PROXY` and `HTTPS_PROXY` variables. -::: - -```bash{promptUser: haseeb} -export HTTP_PROXY=socks5://127.0.0.1:8123 -export HTTPS_PROXY=socks5://127.0.0.1:8123 -proxychains terraform plan -proxychains terraform apply -``` - -:::caution ProxyChains TCP -proxychains will only proxy TCP connections from your `Local Machine`. -However, it can resolve DNS through the proxy as well. -::: - -What is essentially going on here is that traffic is being sent from our `Local Machine` to `Server A` which can -connect to `Server B` and pass traffic to the server. This in effect makes it seem our `Local Machine` can connect -directly to `Server B`. - -```mermaid -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - D[Proxychains] - end - - subgraph Remote Server - C[Server A] - E[Server B] - end - - subgraph Firewall - E - end - - D -->|Connects to| A - B -->|Opens Port| A - B -->|SSH Tunnel| C - C -->|TCP Connection| E -``` - -So overall we have something as described in the diagram above! - -## Appendix - -- Read more about [SOCKS Proxies here](https://securityintelligence.com/posts/socks-proxy-primer-what-is-socks5-and-why-should-you-use-it/) -- Read more about how to setup [a SOCKS here](https://ma.ttias.be/socks-proxy-linux-ssh-bypass-content-filters/) diff --git a/36. How to use proxychains/images/cover.jpg b/36. How to use proxychains/images/cover.jpg deleted file mode 100644 index 0ad723b..0000000 Binary files a/36. How to use proxychains/images/cover.jpg and /dev/null differ diff --git a/36. How to use proxychains/images/local-server-server.png b/36. How to use proxychains/images/local-server-server.png deleted file mode 100644 index 53fce40..0000000 Binary files a/36. How to use proxychains/images/local-server-server.png and /dev/null differ diff --git a/36. How to use proxychains/images/overall.png b/36. How to use proxychains/images/overall.png deleted file mode 100644 index 9340b29..0000000 Binary files a/36. How to use proxychains/images/overall.png and /dev/null differ diff --git a/36. How to use proxychains/images/proxychains.png b/36. How to use proxychains/images/proxychains.png deleted file mode 100644 index 34b1488..0000000 Binary files a/36. How to use proxychains/images/proxychains.png and /dev/null differ diff --git a/36. How to use proxychains/images/socks-tunnel.png b/36. How to use proxychains/images/socks-tunnel.png deleted file mode 100644 index d9e4cad..0000000 Binary files a/36. How to use proxychains/images/socks-tunnel.png and /dev/null differ diff --git a/36. How to use proxychains/mermaid-snippets/local-server-server.mmd b/36. How to use proxychains/mermaid-snippets/local-server-server.mmd deleted file mode 100644 index 0e7cf8f..0000000 --- a/36. How to use proxychains/mermaid-snippets/local-server-server.mmd +++ /dev/null @@ -1,6 +0,0 @@ -graph LR - A[Local Machine] -->|ssh| B[Server A] - B -->|ssh| C[Server B] - subgraph Firewall - C - end diff --git a/36. How to use proxychains/mermaid-snippets/overall.mmd b/36. How to use proxychains/mermaid-snippets/overall.mmd deleted file mode 100644 index 48e8d0f..0000000 --- a/36. How to use proxychains/mermaid-snippets/overall.mmd +++ /dev/null @@ -1,20 +0,0 @@ -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - D[Proxychains] - end - - subgraph Remote Server - C[Server A] - E[Server B] - end - - subgraph Firewall - E - end - - D -->|Connects to| A - B -->|Opens Port| A - B -->|SSH Tunnel| C - C -->|TCP Connection| E \ No newline at end of file diff --git a/36. How to use proxychains/mermaid-snippets/proxychains.mmd b/36. How to use proxychains/mermaid-snippets/proxychains.mmd deleted file mode 100644 index e9fd83c..0000000 --- a/36. How to use proxychains/mermaid-snippets/proxychains.mmd +++ /dev/null @@ -1,14 +0,0 @@ -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - D[Proxychains] - end - - subgraph Remote Server - C[Server A] - end - - D -->|Connects to| A - B -->|Opens Port| A - B -->|SSH Tunnel| C \ No newline at end of file diff --git a/36. How to use proxychains/mermaid-snippets/socks-tunnel.mmd b/36. How to use proxychains/mermaid-snippets/socks-tunnel.mmd deleted file mode 100644 index 75e6290..0000000 --- a/36. How to use proxychains/mermaid-snippets/socks-tunnel.mmd +++ /dev/null @@ -1,12 +0,0 @@ -graph LR - subgraph Local Machine - A[TCP Port :8123] - B[SSH Client] - end - - subgraph Remote Server - C[Server A] - end - - B -->|Opens Port| A - B -->|SSH Tunnel| C \ No newline at end of file diff --git a/37. DNS with Docker/README.md b/37. DNS with Docker/README.md deleted file mode 100644 index bda2afd..0000000 --- a/37. DNS with Docker/README.md +++ /dev/null @@ -1,235 +0,0 @@ ---- -title: "How DNS works with Docker?" -tags: ["docker", "networking", "dns"] -license: "public-domain" -slug: "dns-docker-explained" -canonical_url: "https://haseebmajid.dev/blog/dns-docker-explained/" -date: "2020-10-27" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, we will briefly go over what DNS (domain name system) is and explain how it is used in conjunction -with Docker 🐳. - -## DNS - -You can think of DNS like a phonebook, except instead of people's name and phone numbers, it stores domains names and -IP addresses (this can be either IPv4 or IPv6). Where a domain name is used to identify resources i.e. `google.com` is a -domain name. This is how DNS works: - -```text -google.com: 8.8.8.8 -cloudflare.com: 1.1.1.1 -``` - -### Example - -You can manually send a DNS request (and get a response) using the `dig` command. So for example, we can do something -like this. - -```bash{promptUser: haseeb}{outputLines:2} -dig +short google.com -172.217.169.78 -``` - -### Records - -Each DNS entry can be of varying types, some of the most common DNS types (referred to as records) are: - -A: Points a domain name to an IPv4 address i.e. `8.8.8.8` -AAAA: Same as an A record except points to an IPv6 address i.e. `2001:db8:0:1` -CNAME: Canonical Name points one domain to another domain name, one common use case is to point `www.example.com` -> `example.com`. This way we only need to update the A record of `example.com`, not both domains. - -#### AAAA Example - -To specify a AAAA (quad A) record we can do something like: - -```bash{promptUser: haseeb}{outputLines:2} -dig +short google.com AAAA -2a00:1450:4009:810::200e -``` - -:::important More Details -In a future article, I will do a deeper dive into the mechanics of how DNS works and the actual process of converting a domain name to an IP address. -::: - -So that's DNS in a nutshell! On to how it relates to Docker. - -:::note tl:dr -DNS is a system used to convert domain names into IP addresses because it's much easier for humans to remember names as compared with numbers. -::: - -## Docker - -For the sake of this article, we will be using the following docker-compose file: - -```yaml:title=docker-compose.yml -version: "3.5" - -services: - web_server: - container_name: nginx - build: - context: . - dockerfile: docker/nginx/Dockerfile - ports: - - 80:80 - depends_on: - - app - - app: - container_name: flask - build: - context: . - dockerfile: docker/flask/Dockerfile - env_file: docker/database.conf - expose: - - 8080 - depends_on: - - database - - database: - container_name: postgres - image: postgres:latest - env_file: docker/database.conf - ports: - - 5432:5432 - volumes: - - db_volume:/var/lib/postgresql - -volumes: - db_volume: -``` - -It will create three containers, Nginx, a flask app and a Postgres database, when we run `docker-compose up --build`, in particular take **note** of the `container_name`(s): `postgres`, `nginx`, `flask`. - -:::tip Source Code -The source code for those Docker containers can be found -[here](https://gitlab.com/hmajid2301/articles/-/tree/master/7.%20Multi%20Docker%20Container%20with%20Nginx%2C%20Flask%20and%C2%A0MySQL/source_code) -::: - -### Nginx - -Our `nginx` config file looks something like: - -```nginx:title=example.conf{10} -server { - listen 80; - server_name _; - - location / { - try_files $uri @app; - } - - location @app { - include /etc/nginx/uwsgi_params; - uwsgi_pass flask:8080; - } -} -``` - -This Nginx configuration file tells Nginx to pass any requests sent on `/` path to the -uwsgi server running in the `flask` docker container. -Now taking a look at the `location @app` section you'll notice for `uwsgi_pass` we don't specify an IP address to send the requests -to. Instead, we use the container name, this is because within Docker containers we don't have to specify the other Docker -container's IP address to connect to it we can specify the container name. Docker's DNS will resolve the name into an IP address for us. - -### Nginx Example - -So if I open a shell on the `nginx` container: - -```bash{promptUser: haseeb} -docker exec -it nginx bash -``` - -Then we can do something like: - -```bash{promptUser: root}{outputLines:3} -apt update && apt install dnsutils -dig flask +short -172.23.0.3 -``` - -This is particularly useful because Docker containers get assigned an IP if you don't specify one -(in the `docker-compose.yml`) file. Taking a look at the IP assigned to the `flask` -the container matches the IP address returned by the dig command. - -```bash{promptUser: haseeb}{outputLines:2} -docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' flask -172.23.0.3 -``` - -### Flask Example - -Similarly in the `flask` container if we want to connect to the `postgres` database, we can just specify the host -using the container name `postgres` rather than an IP in our connection URI. As shown in the example below: - -```python -DATABASE_CONNECTION_URI = f'postgresql+psycopg2://{user}:{password}@postgres:5432/{database}' -``` - -:::note Example -The example above is a URI used by the SQLAlchemy library to connect to the Postgres database. -::: - -## Deep Diver - -Let's take a slightly closer look into Docker's architecture to understand what is going on here. - -### Docker Engine 🏭 Explained - -> Docker Engine is an open-source containerization technology for building and containerizing your applications. - https://docs.docker.com/engine/ - -It contains the following components: - A server with a long-running daemon process dockerd. - APIs which specify interfaces that programs can use to talk to and instruct the Docker daemon. - A command-line interface (CLI) client docker. - -When we install Docker we are also installing the Docker Engine. - -:::important More Details -In a future article, I will do a deeper dive into the Docker Engine as well 🐳. -::: - -Briefly, how it works is we use the CLI i.e. `docker run`/`docker-compose`, which makes -API requests (on our behalf) to the Docker daemon. The Docker daemon then interacts with containerd, which is responsible for the creation/deletion of our containers. Essentially containerd is a container supervisor. - -### Docker Engine and DNS - -Now how does Docker Engine relate to DNS? As long as the two containers are on the same -network we can use the container name and resolve it using DNS. Each Docker container has a DNS resolver that forwards -DNS queries to Docker Engine, which acts as a DNS server. Docker -Engine then checks if the DNS query belongs to a container on the network that the requested container belongs to. -If it does, then Docker Engine looks up the IP address that matches a container name in its key-value store and -returns that IP back to the requesting container. - -![https://success.mirantis.com/api/images/.%2Frefarch%2Fnetworking%2Fimages%2FDNS.png](https://success.mirantis.com/api/images/.%2Frefarch%2Fnetworking%2Fimages%2FDNS.png) - -:::note Normal Queries -For all other DNS queries the Docker Engine will use the host machine's DNS settings, -unless overwritten (explained below in the `Misc` section). -::: - -:::important Daemon Vs Engine - -> Docker Daemon checks the client request and communicates with the Docker components to perform a service whereas, Docker Engine or Docker is the base engine installed on your host machine to build and run containers using Docker components and services - Anjali Nair, [Quora](https://www.quora.com/What-is-the-difference-between-the-Docker-Engine-and-Docker-Daemon) - -::: - -## Misc - -:::note Docker DNS Settings -We can customise Docker's default DNS settings by using the `--dns` flag, for example, to use Google's DNS you could -go `--dns 8.8.8.8`. You can also provide your DNS records for the container to use by using the `--extra_hosts` flag. -For example `--extra_hosts somehost:162.242.195.82`. -::: - -:::warning Docker DNS Settings -Custom hosts defined in the `/etc/hosts` file are ignored. They must be passed in using the `extra_hosts` flag. -::: - -## Appendix - -- [What is DNS?](https://www.cloudflare.com/en-gb/learning/dns/what-is-dns/) by CloudFlare -- [DNS Records](https://www.bluehost.com/help/article/dns-records-explained) Explained -- [Docker Engine](https://www.serverwatch.com/server-news/how-docker-engine-works-to-enable-containers/) -- [Docker in detail](https://stackoverflow.com/questions/41645665/how-containerd-compares-to-runc) SO Post -- [Docker Swarm Architecture](https://success.mirantis.com/article/networking) (relevant to normal Docker) diff --git a/37. DNS with Docker/images/cover.jpg b/37. DNS with Docker/images/cover.jpg deleted file mode 100644 index cb4e985..0000000 Binary files a/37. DNS with Docker/images/cover.jpg and /dev/null differ diff --git a/38. ToC in Gatsby/README.md b/38. ToC in Gatsby/README.md deleted file mode 100644 index ba7b929..0000000 --- a/38. ToC in Gatsby/README.md +++ /dev/null @@ -1,381 +0,0 @@ ---- -title: "How to add a ToC in Gatsby" -tags: ["gatsby", "react", "javascript"] -license: "public-domain" -slug: "toc-in-gatsby" -canonical_url: "https://haseebmajid.dev/blog/toc-in-gatsby/" -date: "2020-11-11" -published: true -cover_image: "images/cover.jpg" ---- - -A lot of people, I included, are using Gatsby to build their own blogs. One of the things I wanted to add to my blog -was a table of contents (ToC) 📝. A ToC will show you all the headings of an article and when you click on a heading it'll -take you directly to that heading. -It's a nice little feature to have on your blog, which makes it easier for users to navigate and find the information -they are looking for. - -`youtube: https://www.youtube.com/watch?v=YrUeiD4YO5E` - -## Prerequisite - -So before we get started you can find the [source code here](https://gitlab.com/hmajid2301/). -In this article, I will be using the -[gatsby-starter-blog](https://www.gatsbyjs.com/starters/gatsbyjs/gatsby-starter-blog/). - -```bash -# If you don't have the CLI installed, run this command. -npm -g install gatsby-cli - -gatsby new my-gatsby-project https://github.com/gatsbyjs/gatsby-starter-blog -``` - -If you already have an existing Gatsby site, you can make the changes directly there instead of -using this starter. - -## Plugins - -We need to get some extra plugins installed for the ToC to work properly. - -```bash -yarn add gatsby-remark-autolink-headers gatsby-plugin-emotion -``` - -The plugin `gatsby-remark-autolink-headers` turns all of the headers into anchor links. This means we can link to the -headers. - -:::note Emotion -You only need to add the emotion plugin if you want to use emotionjs, which is a css-in-js solution. -You will see this later when we look at the `toc.js` component. -::: - -```js{1-2,5}:title=gatsby-config.js -`gatsby-plugin-emotion`, -`gatsby-plugin-smoothscroll`, -{ - resolve: `gatsby-transformer-remark`, - options: { - plugins: [ - // ... - `gatsby-remark-autolink-headers`, - ], - }, -}, -// ... -``` - -Our header elements with the `autolinks` plugin will now look something like: - -```html -

- ...Header 1 -

-``` - -## ToC - -First, let's design the ToC element. This component is just a presentation component, it doesn't contain any state logic. - -```jsx:title=src/components/toc.js file=./source_code/src/components/toc.js - -``` - -Let's break this component down. It receives a `heading` props, which it expects to be a list of the `headings` -from the markdown documents. A heading is an element starting with `#`, the more `#`s the lower the heading, for example: - -```md -# Heading 1 - -## Heading 2 - -### Heading 3 -``` - -We use the `heading.map` which will create an element for each of the `headings` in the list. If it's a "heading 5" or lower, -we will simply return an empty div, `heading.depth > 4`. This is so that the ToC doesn't become too "big" and -which would make it harder to use/navigate. - -```js:title=src/components/toc.js -const ToC = ({ headings }) => ( - - Table of contents - - {headings.map((heading) => { - if (heading.depth > 4) { - return
; - } - - return ( - - - {heading.value} - - - ); - })} - - -); -``` - -If it's a heading 1-4, we create a list element (`
  • `) with a link (``) inside of it. This will be a single -heading within our ToC. Below is an example ToC: - -![Example ToC](images/example_toc.png) - -The heading data for the ToC above will look something like this: - -```js -const headings = [ - { - value: "Header 1", - depth: 1, - }, - { - value: "Header 2", - depth: 2, - }, -]; -``` - -As discussed earlier we are using the `autolink` headers plugin. This plugin auto-generates anchor links for all of our header. We will use the `href` attribute to link to these headers in our ToC. - -:::note href -The `href` link we replace all the whitespace with `-` so `"Heading 1"` becomes the anchor link `#heading-1`. - -```jsx:title=src/components/toc.js - - {heading.value} - -``` - -::: - -### Twin Macro & EmotionJS - -Now in the above `ToC` component, you see elements like ``, `` and ``. -Where are these components coming from? Well, this is why I said we need to use the gatsby emotion plugin. -This is the css-in-js components, these components above are twin.macro or emotionjs components. -To use it within our code run the following commands: - -```bash -yarn add twin.macro @emotion/core @emotion/styled -npx tailwind init -vim package.json -``` - -```json:title=package.json -"babelMacros": { - "twin": { - "config": "tailwind.config.js", - "preset": "emotion", - "dataTwProp": true, - "debugPlugins": false, - "debug": false - } -} -``` - -:::info TailwindCSS with Gatsby -Gatsby have a good tutorial [here](https://www.gatsbyjs.com/docs/tailwind-css/), on how to integrate -TailwindCSS with a Gatsby site. -::: - -The `twin.macro` library allows us to use [`TailwindCSS`](https://tailwindcss.com/). Tailwind provides us with -many pre-generated classes that we can then leverage within our code. Here I am assuming you are somewhat familiar with how it works. - -```jsx:title=src/components/toc.js -const Toc = styled.ul` - ${tw`bg-white fixed hidden lg:flex flex-col rounded p-3 my-3`}; - width: 20rem; - left: calc(50% + 400px); - top: 80px; - max-height: 30vh; -`; - -const Title = tw.h2`text-2xl mb-2`; - -const ToCElement = tw.li`p-1 leading-5 ml-4 mb-4 mr-4 leading-3 list-none`; - -const ToCLink = tw.a`hover:text-black transition duration-300 no-underline`; - -const InnerScroll = styled.div` - scrollbar-width: thin; - scrollbar-color: #367ee9 rgba(48, 113, 209, 0.3); - overflow: hidden auto; -`; -``` - -This is how we can style the scrollbar. The first colour is the colour of the scrollbar and the second colour is the -the background colour of the scrollbar. - -```jsx:title=src/components/toc.js -const InnerScroll = styled.div` - scrollbar-width: thin; - scrollbar-color: #367ee9 rgba(48, 113, 209, 0.3); - overflow: hidden auto; -`; -``` - -:::note div -The `styled.div` this means `InnerScroll` when translated to HTML code will be `
    `. - -```html -
    -
  • - Header 1 -
  • -
    -``` - -::: - -Another interesting component to look at is the `ToC`. This combined twin.macro and emotionjs -so `width` CSS is using emotionjs and we are using twin.macro with `{tw`...`}`. Where we fill in -the `tw` with the tailwind styles we want to apply. In the example below, `fixed` will make -the position of the element fixed. - -```jsx:title=src/components/toc.js -const Toc = styled.ul` - ${tw`bg-white fixed hidden lg:flex flex-col rounded p-3 my-3`}; - width: 20rem; - left: calc(50% + 400px); - top: 80px; - max-height: 30vh; -`; -``` - -## Global Style - -One small change we need to make to allow our scrolling to be smoother is in our global styles, -whether that should be a CSS file or a css-in-js etc. In this example, it'll be the `style.css` file that comes with -the start. We need to add the following properties: - -```css:title=src/style.css -html { - scroll-behavior: smooth; - // ... -} -``` - -This CSS property will stop the scrolling from feeling jerky and instead will be far smoother. So instead of jumping to the header, we click on a header in the `ToC` and it'll scroll smoothly to that header. - -## Blog Template - -Finally, we need to add the ToC element to our blog template. - -### Gatsby Node - -Remember that with Gatsby in the `gatsby-node.js` file, we can create a new page for each markdown file found. Here -is the logic that creates a page for each markdown file found by the `markdown-remark` plugin. As you can see, we use -`blog-post.js` file as the template for each of our blog posts. - -```js:title=gatsby-node.js -exports.createPages = async ({ graphql, actions, reporter }) => { - const { createPage } = actions; - - // Define a template for blog post - const blogPost = path.resolve(`./src/templates/blog-post.js`); - - // Get all markdown blog posts sorted by date - const result = await graphql( - ` - { - allMarkdownRemark( - sort: { fields: [frontmatter___date], order: ASC } - limit: 1000 - ) { - nodes { - id - fields { - slug - } - } - } - } - ` - ); - - const posts = result.data.allMarkdownRemark.nodes; - - // Create blog posts pages - // But only if there's at least one markdown file found at "content/blog" (defined in gatsby-config.js) - // `context` is available in the template as a prop and as a variable in GraphQL - - if (posts.length > 0) { - posts.forEach((post, index) => { - createPage({ - path: post.fields.slug, - component: blogPost, - }); - }); - } -}; -``` - -### Gatsby Config - -To allow the remark plugin to "see" the markdown files, we need to source them. There are a few ways to do this, -I normally use [git to source my plugins](/blog/gatsby-articles-git-gitlab-ci). In this case, we will add all -the markdown files in the `content/blog` folder. - -```js:title=gatsby-config.js -{ - resolve: `gatsby-source-filesystem`, - options: { - path: `${__dirname}/content/blog`, - name: `blog`, - }, -} -``` - -### Blog Post - -So now back to our `blog-post.js`, let us add our `ToC` component to the blog post template. - -```jsx:title=src/templates/blog-post.js -import ToC from "../components/toc"; -// ... - -const post = data.markdownRemark; -return ( - -
    - -
    -
    ; -) -``` - -Let's also adjust the GraphQL query so we can get the heading data that the ToC component requires. -Add the `headings` field to get the value and the depth fields. - -```graphql:title=src/templates/blog-post.js{5-8} -markdownRemark(id: { eq: $id }) { - id - excerpt(pruneLength: 160) - html - headings { - value - depth - } - - frontmatter { - title - date(formatString: "MMMM DD, YYYY") - description - } -} -``` - -That's that we added a ToC to our Gatsby Site. We can a bunch of other things to improve it. Such as -styling it to make it look better. I also hide my ToC when the width decreases. So you only see a ToC when -browsing the site on a Laptop/Desktop. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/38.%20ToC%20in%20Gatsby/source_code) diff --git a/38. ToC in Gatsby/images/cover.jpg b/38. ToC in Gatsby/images/cover.jpg deleted file mode 100644 index cf06231..0000000 Binary files a/38. ToC in Gatsby/images/cover.jpg and /dev/null differ diff --git a/38. ToC in Gatsby/images/example_toc.png b/38. ToC in Gatsby/images/example_toc.png deleted file mode 100644 index fe62376..0000000 Binary files a/38. ToC in Gatsby/images/example_toc.png and /dev/null differ diff --git a/39. Python yield explained/README.md b/39. Python yield explained/README.md deleted file mode 100644 index 3af9b94..0000000 --- a/39. Python yield explained/README.md +++ /dev/null @@ -1,187 +0,0 @@ ---- -title: "What does yield do in Python?" -tags: ["python", "pytest"] -license: "public-domain" -slug: "python-yield-explained" -canonical_url: "https://haseebmajid.dev/blog/python-yield-explained/" -date: "2020-11-30" -published: true -cover_image: "images/cover.jpg" ---- - -In this article, we will go over what the `yield` keyword is used for. We will also cover how you can use a `yield` -with a pytest fixture to allow us to "teardown" tests, after all of our tests have run. A common job being removing -test data from the database, so that next time your run the tests your tests won't fail. Due to the database being -in a different (unexpected) state. - -## Background - -### Iterables & Iterators - -Before we can look at the `yield` keyword we will need to cover iterables and generators in Python. An "iterable" is -any Python object that can return its members one at a time, in a for-loop. - -In Python we have functions called magic methods, there are methods like `__enter__` and `__exit__` defined within -objects. These are called "magic" methods because they are never directly called by the user. For an object to be -iterable, it needs to implement the `__iter__` method. If an object is iterable it can be passed to the `iter()` -function. The `iter()` function returns an iterator. - -```python -❯ ipython -Python 3.8.5 (default, Jul 28 2020, 12:59:40) -Type 'copyright', 'credits' or 'license' for more information -IPython 7.14.0 -- An enhanced Interactive Python. Type '?' for help. - -In [1]: iter([1, 2, 3]) -Out[1]: - -In [2]: iter("hello") -Out[2]: - -In [3]: iter(42) ---------------------------------------------------------------------------- -TypeError Traceback (most recent call last) - in -----> 1 iter(42) - -TypeError: 'int' object is not iterable - -In [4]: -``` - -An iterator is any object which has the `__next__` magic method defined. Whenever we use a for-loop -(or list comprehension), the `next` method is called automatically for us, to get the next item from -the iterable. - -```python -In [5]: hello_list = ["h", "e", "l", "l", "o"] - -In [6]: iterator = iter(hello_list) - -In [7]: next(iterator) -Out[7]: 'h' - -In [8]: next(iterator) -Out[8]: 'e' - -In [9]: next(iterator) -Out[9]: 'l' - -In [10]: next(iterator) -Out[10]: 'l' - -In [11]: next(iterator) -Out[11]: 'o' - -In [12]: next(iterator) ---------------------------------------------------------------------------- -StopIteration Traceback (most recent call last) - in -----> 1 next(iterator) - -StopIteration: -``` - -In summary, an iterable is an object that can be "looped" over and an iterator is an object which can -do the "looping" for us, it will keep track of the current state/index and move to the next item. -In the example above the `hello_list` is iterable and the `iterator` variable is the iterator. - -### Generators - -Generators are a special type of iterable, they differ from normal lists in two main ways: - -- You can only iterate over them once -- They don't store all of their values in memory - -So generators can be great when lists get very large. - -```python -In [14]: g = (x^2 for x in range(10)) - -In [15]: for i in g: - ...: print(i) - ...: -2 -3 -0 -1 -6 -7 -4 -5 -10 -11 - -In [16]: for i in g: - ...: print(i) - ...: -``` - -## Yield - -Now that we finally understand iterables and generators let's see how they relate to the `yield` keyword. `yield` can be -used like `return` except it will return a generator. - -```python -In [17]: def example(): - ...: yield "A" - ...: yield "B" - ...: yield "C" - ...: - -In [18]: for i in example(): - ...: print(i) - ...: -A -B -C - -In [22]: example() -Out[22]: -``` - -A good example of `yield` can be seen above, it differs from a return because it is smart enough to retain "state" -and resume where it left off in the function. We can see the same example with `return`. In this example there is only -a single item being returned so only "A" is being looped over. - -```python -In [19]: def example(): - ...: return "A" - ...: return "B" - ...: return "C" - ...: - ...: - -In [20]: for i in example(): - ...: print(i) - ...: -A -``` - -## Pytest Example - -One interesting use case of using the `yield` keyword is using it to run clean up tasks after running tests using -pytest. Pytest is a very popular testing framework in Python, it allows us to create a file called `conftest.py`. -Here we store common functions, fixtures shared between our tests. - -In the example below, before any tests have run the `clean_up` fixture will be called, because we have given -it the `autouse=True` parameter. It will `yield`, and return a generator after all of our tests have finished -running. The print and the teardown tasks will then be run. This is useful for example when you want to clean up -your database after running tests that will add "test" data to it. Or in fact, any other type of teardown tasks -you need to run after all of your tests have finished running. - -```python:title=conftest.py -@pytest.fixture(scope="session", autouse=True) -def clean_up(): - yield - print("teardown after yield") - delete_database_collection() -``` - -## Appendix - -- [Cover Photo](https://unsplash.com/photos/ieic5Tq8YMk) from Chris Ried on Usplash -- [Real Python](https://realpython.com/python-for-loop/) "for" loop -- [Real Python](https://realpython.com/introduction-to-python-generators/) generators -- [SO Post](https://stackoverflow.com/questions/231767/what-does-the-yield-keyword-do) -- [DZone Yield vs Return](https://dzone.com/articles/when-to-use-yield-instead-of-return-in-python) diff --git a/39. Python yield explained/images/cover.jpg b/39. Python yield explained/images/cover.jpg deleted file mode 100644 index 6cd7a94..0000000 Binary files a/39. Python yield explained/images/cover.jpg and /dev/null differ diff --git a/4. Debug Expo Native WebView/README.md b/4. Debug Expo Native WebView/README.md deleted file mode 100644 index 41de816..0000000 --- a/4. Debug Expo Native WebView/README.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -title: "Debug React Native WebView" -tags: ["react-native", "expo", "chrome"] -license: "public-domain" -slug: "debug-react-native-webview" -canonical_url: "https://haseebmajid.dev/blog/debug-react-native-webview" -date: "2018-10-27" -published: true -cover_image: "images/cover.jpg" ---- - -The _core_ logic of my React Native app involves using WebViews because I need to access the HTML5 canvas. Whilst -developing this code there are bound to be errors and issues with the WebView code. Figuring out how to debug -code within the WebView isn’t so obvious. - -![Figure 1: Chrome Inspect for the WebView](images/webview-dev-tools.png) - -## Option 1: Chrome Inspect - -- Start your Expo/React Native app\*. -- Open and chrome and then go to the following URL, [chrome://inspect](chrome://inspect). -- Then click on the _inspect_ button, click the top link to open the latest WebView, you should see something similar to Figure 2. -- You should see something similar to Figure 1. Now you explore the WebView like a normal web page. - -![Figure 2: List of WebView](images/chrome-inspect.png) - -## Option 2: React Native Debugger - -Alternatively you can access this page using the React Native Debugger page in chrome. - -- Start your Expo/React Native app\* -- Go to your React Native debugger on chrome, in my case since I’m using Expo my URL is http://192.168.27.128:19001/debugger-ui/ -- Go to the three dots button (next to close button) > More tools > Remote devices -- At the bottom you’ll see another panel open up -- You should be able to see your Android device in the list -- Hit the Inspect button the first item (this will be the latest) -- Now you explore the WebView like a normal web page - -## Debugging - -So to the closest way to debug the WebView I have found is to use console.log statements that will appear in the -DevTools console. In the normal React Native debugger console you won’t be able to see any console.log statements -from the WebView. You can only view them by exploring DevTools for Remote devices. As far as I can tell you cannot -use breakpoints in WebViews but still this is better than nothing. - -![Figure 3: Open DevTools for Remote devices](images/devtools.gif) - -## Appendix - -Make sure to do this on the same machine/host that is running the emulator. For example, a Genymotion VM is -running on my Windows machine but I can still see it’s WebViews on the chrome inspect URL. This can be seen in Figure 3. - -![Figure 4: List of WebView](images/chrome-inspect-emulator.png) - -### Links - -- [With help from this StackOverflow post](https://stackoverflow.com/questions/47711418/debugging-webview-in-react-native-apps?rq=1) -- [Genymotion](https://www.genymotion.com/) -- GIFs created with [screentogif](https://www.screentogif.com/) diff --git a/4. Debug Expo Native WebView/images/chrome-inspect-emulator.png b/4. Debug Expo Native WebView/images/chrome-inspect-emulator.png deleted file mode 100755 index 93966c2..0000000 Binary files a/4. Debug Expo Native WebView/images/chrome-inspect-emulator.png and /dev/null differ diff --git a/4. Debug Expo Native WebView/images/chrome-inspect.png b/4. Debug Expo Native WebView/images/chrome-inspect.png deleted file mode 100755 index 73eeea6..0000000 Binary files a/4. Debug Expo Native WebView/images/chrome-inspect.png and /dev/null differ diff --git a/4. Debug Expo Native WebView/images/cover.jpg b/4. Debug Expo Native WebView/images/cover.jpg deleted file mode 100644 index 642ae1c..0000000 Binary files a/4. Debug Expo Native WebView/images/cover.jpg and /dev/null differ diff --git a/4. Debug Expo Native WebView/images/devtools.gif b/4. Debug Expo Native WebView/images/devtools.gif deleted file mode 100755 index c4f34d8..0000000 Binary files a/4. Debug Expo Native WebView/images/devtools.gif and /dev/null differ diff --git a/4. Debug Expo Native WebView/images/webview-dev-tools.png b/4. Debug Expo Native WebView/images/webview-dev-tools.png deleted file mode 100755 index e8f7541..0000000 Binary files a/4. Debug Expo Native WebView/images/webview-dev-tools.png and /dev/null differ diff --git a/40. Golang, MongoDB Polymorphism/README.md b/40. Golang, MongoDB Polymorphism/README.md deleted file mode 100644 index 32d404b..0000000 --- a/40. Golang, MongoDB Polymorphism/README.md +++ /dev/null @@ -1,250 +0,0 @@ ---- -title: "Golang & MongoDB with Polymorphism and BSON Unmarshal" -tags: ["golang", "mongodb", "polymorphism", "bson"] -license: "public-domain" -slug: "golang-mongodb-polymorphism" -canonical_url: "https://haseebmajid.dev/blog/golang-mongodb-polymorphism/" -date: "2020-12-20" -published: true -cover_image: "images/cover.png" ---- - -Recently I've been working on a new personal project called Banter Bus, a browser-based multiplayer game. -I've been working on a REST API to add new questions to the game. The API is built in Golang and uses -MongoDB as the database. Since Golang is a strongly typed language, we will need to specify the structure of -the data we expect from the database. This can get tricky if the data varies, such as one field changing. - -One issue I encountered was each game type has to have its questions. These questions will be asked to the -users playing the game and are stored differently in the database. This is because each game type has different -rules and therefore needs a different structure. This means when we unmarshal the data in Golang, -we need to specify the structure of these questions. In this article, I will explain how you can create -your own unmarshal function. This will allow you to customise the struct that will hold this data (in Golang) -returned from MongoDB. - -## Collection - -Imagine the data stored in MongoDB looks something like so: - -```json -[ - { - "game_name": "fibbing_it", - "questions": { - "opinion": { - "horse_group": { - "questions": [ - "What do you think about horses?", - "What do you think about camels?" - ], - "answers": ["lame", "tasty"] - } - }, - "free_form": { - "bike_group": ["Favourite bike colour?", "A funny question?"] - }, - "likely": ["to eat ice-cream from the tub", "to get arrested"] - } - }, - { - "game_name": "quibly", - "questions": { - "pair": [ - "What do you think about horses?", - "What do you think about camels?" - ], - "answers": ["Favourite bike colour?", "A funny question?"] - } - }, - { - "game_name": "drawlosseum", - "questions": { "drawings": ["horses", "camels"] } - } -] -``` - -Here you can see each game type has a different structure, due to the different rules each game type -will have. - -## Unmarshal - -:::note BSON -Binary JSON the format used by MongoDB readme more about it [here](https://www.mongodb.com/json-and-bson) -::: - -To do this we need to create a custom BSON unmarshal function. This will work very similarly to JSON unmarshaling. -When we try to get data from MongoDB, doing something like: - -```go -collection := _database.Collection("games") -err := collection.FindOne(_ctx, bson.M{"game_name": "quibly"}).Decode(interface{}{}) -``` - -When decoding the object into a struct, MongoDB checks that the (struct) type implements the `Umarshaler` interface. -It implements this interface if it implements the `UnmarshalBSONValue(t bsontype.Type, data []byte) error` function. -If the struct type does implement this function, it will use this function instead of the default `UnmarshalBSONValue()` -function. - -### Example - -Let's take a look at an example, define the following struct. - -```go -type QuestionSet struct { - GameName string `bson:"game_name"` - Questions interface{} `bson:"questions"` -} -``` - -Where the `Questions` field is the one that can vary between the different game types. Now let's define -the structure of the different game type. As you can see each of the game types will have different -rounds and ask different types of questions. - -```go -type DrawlosseumQuestionsPool struct { - Drawings []string `bson:"drawings,omitempty"` -} - -type QuiblyQuestionsPool struct { - Pair []string `bson:"pair,omitempty"` - Answers []string `bson:"answers,omitempty"` - Group []string `bson:"group,omitempty"` -} - -type FibbingItQuestionsPool struct { - Opinion map[string]map[string][]string `bson:"opinion,omitempty"` - FreeForm map[string][]string `bson:"free_form,omitempty"` - Likely []string `bson:"likely,omitempty"` -} -``` - -To get the `QuestionSet` struct to implement the `Unmarshaler` interface we need to do something like: - -```go -func (questionSet *QuestionSet) UnmarshalBSONValue(t bsontype.Type, data []byte) error { - var rawData bson.Raw - err := bson.Unmarshal(data, &rawData) - if err != nil { - return err - } - - err = rawData.Unmarshal(&questionSet) - if err != nil { - return err - } - - var questions struct { - Questions bson.Raw - } - - err = rawData.Unmarshal(&questions) - if err != nil { - return err - } - - switch questionPool.GameName { - case "drawlosseum": - questionStructure := DrawlosseumQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - case "quibly": - questionStructure := QuiblyQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - case "fibbing_it": - questionStructure := FibbingItQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - default: - return errors.Errorf("Unknown game name %s", questionPool.GameName) - } - - return err -} -``` - -This function looks very complicated so let's break it down and explain what's going on. - -```go - var rawData bson.Raw - err := bson.Unmarshal(data, &rawData) - if err != nil { - return err - } -``` - -First, we need to unmarshal the data into BSON raw data. We need the BSON raw data because it allows -us to partially unmarshal values. You can read more about it [here](https://godoc.org/gopkg.in/mgo.v2/bson#Raw). - -```go - err = rawData.Unmarshal(&questionSet) - if err != nil { - return err - } -``` - -Next, we need to unmarshal the data into the `QuestionSet` struct, this is mainly to fill all the other fields (`GameName`) -besides `Questions`. - -:::note Names -The struct tags we've defined `bson:"x"` should match the name of that field in the database, else the unmarshaling will not -work correctly i.e. the struct fields will be `nil`. -::: - -```go - var questions struct { - Questions bson.Raw - } - - err = rawData.Unmarshal(&questions) - if err != nil { - return err - } -``` - -Now onto the part that deals with the `Questions` field. Here we get the raw BSON data only related to the `Questions` field. So it won't have anything -related to `GameName`. We create a "temporary" struct to hold this BSON data, with the same field name. - -:::caution BSON Struct Tags -If your field has an `_` or something else a bit different, you should use the `bson` struct tags -to specify the name of the field in the database. -::: - -```go - switch questionPool.GameName { - case "drawlosseum": - questionStructure := DrawlosseumQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - case "quibly": - questionStructure := QuiblyQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - case "fibbing_it": - questionStructure := FibbingItQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure - default: - return errors.Errorf("Unknown game name %s", questionPool.GameName) - } -``` - -Finally, let's take a look at the code that unmarshal our questions into the correct structs. -We will use a switch type statement. In this example, the `GameName` will determine how the questions -are stored. Each case looks something like: - -```go - questionStructure := DrawlosseumQuestionsPool{} - err = questions.Questions.Unmarshal(&questionStructure) - questionPool.Questions = questionStructure -``` - -We define the correct struct to use. Then we unmarshal the raw BSON data into this struct. We then assign this struct -to the `questionPool` variable. This is what will be "returned" when we use `FindOne` function shown above. - -That's it! We've now created our custom unmarshal function for dealing with polymorphic data stored in MongoDB in -Golang. - -## Appendix - -- [Cover Photo](https://github.com/mongodb/mongo-go-driver/) -- [Example Project](https://gitlab.com/banter-bus/banter-bus-server/-/blob/39c05ef7e3097697e25343b47f4846d11f9e7ae5/src/core/models/user_models.go#L86-125) diff --git a/40. Golang, MongoDB Polymorphism/images/cover.png b/40. Golang, MongoDB Polymorphism/images/cover.png deleted file mode 100644 index 7a387fc..0000000 Binary files a/40. Golang, MongoDB Polymorphism/images/cover.png and /dev/null differ diff --git a/41. Create a webapp with fizz/README.md b/41. Create a webapp with fizz/README.md deleted file mode 100644 index 9949078..0000000 --- a/41. Create a webapp with fizz/README.md +++ /dev/null @@ -1,406 +0,0 @@ ---- -title: "How to create a Golang Web Application using Fizz" -tags: ["golang", "web-app", "fizz", "gin", "openapi"] -license: "public-domain" -slug: "golang-fizz-web-app" -canonical_url: "https://haseebmajid.dev/blog/golang-fizz-web-app/" -date: "2021-01-19" -published: true -cover_image: "images/cover.png" ---- - -# Background - -A bit of background before we start the article. When I develop a Python web service I use the -[Connexion library created by Zalando](https://github.com/zalando/connexion). It's a great library which is built on top of -Flask. It uses an OpenAPI Specification (OAS) file to handle input validation and routing for you. Therefore reducing the boilerplate code you need to write. - -The main advantage of this is that we have a design-first approach to developing our API. We fully define the -OAS then develop the code/web service. This also keeps the OAS up to date, helping to mitigate the issue of the -code/documentation getting out of date. Especially when you share the OAS with other people (clients) to use. The last -thing you want to do is give them an out-of-date file. - -Anyways short story aside, recently I started learning Golang and developing a simple CRUD web service using Gin. -However, I discovered (at least at the time of writing) there was no equivalent library to Connexion. The closest -library I could find was Fizz. - -## What is Fizz? - -Fizz almost works the opposite way Connexion does. It generates an OAS -file from our code. Now again I prefer the Connexion approach because we just use -the OAS file we created at the beginning of the project. However, this is the next best thing. - -What I ended up doing was creating an OAS by hand. Then implementing that OAS using Golang and letting Fizz -auto-generate the "new" OAS. This "new" OAS is the one that gets shared with clients and is kept up to date. -In theory the OAS I defined manually can now be deleted as it's not required anymore. - -This solves the problem of our code getting out-of-date with the specification. -Fizz also uses other libraries behind the scenes to help us reduce the boilerplate code similiar to -how Connexion works. - -# Web Service - -Now onto the real meat and potatoes of this article. We will create three different endpoints: - -- GET /healthcheck: Checks if the application is healthy or not -- GET /pet/{name}: Get information about a single pet -- PUT /pet/{name}: Update information about a single pet - -## Structure - -```bash -├── cmd -│ └── example-fizz-project -│ └── main.go -├── go.mod -├── go.sum -└── internal - └── server - ├── controllers - │ ├── maintenance_controllers.go - │ └── pets_controller.go - ├── models - │ ├── error.go - │ ├── healthcheck.go - │ ├── input.go - │ ├── params.go - │ └── pets.go - └── routes.go -``` - -Our project will follow the structure shown above. We will go (no pun intended 🤷) over what each of the folder "do". - -:::note Core Code -Since this example application is so simple we don't have a `core` folder but for more complicated -applications you should probably add another folder inline with the `server` folder. For example, this could include code that interacts with the database. - -This helps to de-couple the application's various layers. You could, for example, remove the web service part in the `server` folder and -turn into a CLI application at a later date. Using the core code you already have. -::: - -## Dependencies - -The main dependency for this project is [Fizz](https://github.com/wI2L/fizz). Simply run `go get github.com/wI2L/fizz` to -install it. - -## internal - -The main logic of our web service will be stored within the `internal` folder. - -### server - -This folder contains all the logic related to the web service itself. This will include -models (data structure returned to the client) and the controllers, which are functions -that will handle the various requests sent by clients. They act as an "interface" to our application. - -:::note Fizz Routing -The Fizz library abstracts away routing partially for us, more on this later. -::: - -#### models - -This folder contains all the data structure and data types that will be received by the application -from the client or sent back to the client from the application. For example: - -```go:title=internal/server/models/pets.go file=./source_code/internal/server/models/pets.go - -``` - -This will be the object sent back to the client when they request to get a pet. Note the use of struct tags -`json:"name"`. When the data is unmarshaled from JSON to this struct (again we will see how this done later) the `Name` -field will look for the `name` field in the JSON file. Later on we will see why we need to specify struct tags and not -just us being explicit. - -```go:title=internal/server/models/params.go file=./source_code/internal/server/models/params.go - -``` - -Note the struct tag in this example is `query` and not `json` because it's used as a query parameter. -We also have one final type of model to take a look at: - -```go:title=internal/server/models/input.go file=./source_code/internal/server/models/input.go - -``` - -This model is used when we need to pass both a Pet struct in the body of a request and also a query parameter. Again -we will see exactly how we use this model a bit later. - -#### controllers - -The controllers folder contains the main web service logic for the application. It contains the one function for every -route/endpoint you have in your application. Let's take a look at the maintenance controller first - -```go:title=internal/server/controllers/maintenance_controllers.go file=./source_code/internal/server/controllers/maintenance_controllers.go - -``` - -So we have defined a new function, which receives a single argument the gin context (which we don't use, hence the `_`). -This function returns the health check model. It simply checks if we can connect to -`example.com:80` (on port 80). - -Fizz uses the [Tonic library](https://github.com/loopfz/gadgeto/tree/master/tonic) to assign function handlers to our -route. - -> Package tonic handles path/query/body parameter binding in a single consolidated input object which allows you to remove all the boilerplate code that retrieves and tests the presence of various parameters. - Tonic README - -We need to specify two return types in the function definition because this function is a handler set using Tonic -Again we will see how we do this in the `routes.go` file. The first return type is a struct, which will be -returned to the client (marshalled into JSON). In the example above this is the `*models.Healthcheck`. -The second is an `error`, again we will see how errors are handled a bit later. - -Let's now take a look at the pets controller. - -```go:title=internal/server/controllers/pets_controller.go file=./source_code/internal/server/controllers/pets_controller.go - -``` - -The first function: - -```go:title=internal/server/controllers/pets_controller.go -func GetPet(_ *gin.Context, params *models.PetParams) (models.Pet, error) { - if params.Name != "bob" { - return models.Pet{}, errors.NotFoundf("Pet %s", params.Name) - } - - return models.Pet{ - Name: "bob", - Price: 100, - Breed: "bengal", - }, nil - -} -``` - -:::note JuJu Errors -For throwing errors in this application we used [juju's error library](https://github.com/juju/errors). -::: - -The main difference in this function is we pass in an extra parameter which is the query parameter `{name}`. The logic of this function is not very smart because -it expects the name of the pet to be `bob` in order to send a successful respone back to the client. Of course in -reality you would look in your data store for information about the pet. - -The second function looks like: - -```go:title=internal/server/controllers/pets_controller.go -func UpdatePet(_ *gin.Context, input *models.PetInput) (models.Pet, error) { - if input.PetParams.Name != "bob" { - return models.Pet{}, errors.NotFoundf("Pet %s", input.PetParams.Name) - } - - return input.Pet, nil -} -``` - -Again this is slightly different because the client sends both a HTTP body and a path query parameter. So the -`input` argument is a combination of two structs: - -```go -type PetInput struct { - PetParams - Pet -} -``` - -We can access the query parameter like so `input.PetParams.Name` and the pet's data like `input.Pet`. Note how we -use the name of the struct after `input`. This is how we can combine the body, query parameters and also the query -string into a single struct. The struct tags are really important as they let Tonic know what type of data that field is -i.e. `json` or `query` etc. - -Again we can ignore the logic of the function itself. It's not supposed to be very complicated. Just more of an -example of how we can use Fizz, with more complicated HTTP requests. - -#### routes.go - -This file is where we link the routes to their specific handler functions (using Tonic). This is also where we -provide most of the data that will be used to populate the OAS file. - -```go:title=internal/server/routes.go -func NewRouter() (*fizz.Fizz, error) { - engine := gin.New() - - engine.Use(cors.Default()) - - fizzApp := fizz.NewFromEngine(engine) - - infos := &openapi.Info{ - Title: "Example API", - Description: "The API definition for the Example API.", - Version: "1.0.0", - } - - fizzApp.GET("/openapi.json", nil, fizzApp.OpenAPI(infos, "json")) - - group := fizzApp.Group("", "maintenance", "Related to managing the maintenance of the API.") - group.GET("/healthcheck", []fizz.OperationOption{ - fizz.Summary("Checks API is healthy."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.Healthcheck, http.StatusOK)) - - group.GET("/pets:name", []fizz.OperationOption{ - fizz.Summary("Get a pet by name."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - fizz.Response(fmt.Sprint(http.StatusNotFound), "Pet Not Found", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.GetPet, http.StatusOK)) - - group.PUT("/pets:name", []fizz.OperationOption{ - fizz.Summary("Update a pet."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.UpdatePet, http.StatusOK)) - - if len(fizzApp.Errors()) != 0 { - return nil, fmt.Errorf("fizz errors: %v", fizzApp.Errors()) - } - tonic.SetErrorHook(errHook) - return fizzApp, nil -} -``` - -Let's break this function down: - -```go:title=internal/server/routes.go - engine := gin.New() - - engine.Use(cors.Default()) - - fizzApp := fizz.NewFromEngine(engine) - - infos := &openapi.Info{ - Title: "Example API", - Description: "The API definition for the Example API.", - Version: "1.0.0", - } - - fizzApp.GET("/openapi.json", nil, fizzApp.OpenAPI(infos, "json") -``` - -First, we create the Gin engine and share this with a new Fizz engine. Fizz just uses Gin behind the scenes. -Then we create an info struct, which stores the metadata for the generated OAS file. Then we add a new route -`/openapi.json`, which will serve the OAS file. - -Note we could change the path if we wanted and serve -a YAML file as well `fizzApp.GET("/openapi", nil, fizzApp.OpenAPI(infos, "yaml"))`. Here we removed the -extension and changed the generated file so that we will serve the client a YAML file. - -```go:title=internal/server/routes.go - group := fizzApp.Group("", "endpoints", "All of the endpoints.") - group.GET("/healthcheck", []fizz.OperationOption{ - fizz.Summary("Checks API is healthy."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.Healthcheck, http.StatusOK)) - - group.GET("/pets:name", []fizz.OperationOption{ - fizz.Summary("Get a pet by name."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - fizz.Response(fmt.Sprint(http.StatusNotFound), "Pet Not Found", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.GetPet, http.StatusOK)) - - group.PUT("/pets:name", []fizz.OperationOption{ - fizz.Summary("Update a pet."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.UpdatePet, http.StatusOK)) -``` - -Next, let's get to the part of the function where we define our routes. First, we create a group, this will group the -routes within the OAS (such as the tag). - -```go:title=internal/server/routes.go - group.GET("/pets:name", []fizz.OperationOption{ - fizz.Summary("Get a pet by name."), - fizz.Response(fmt.Sprint(http.StatusInternalServerError), "Server Error", models.APIError{}, nil, nil), - fizz.Response(fmt.Sprint(http.StatusNotFound), "Pet Not Found", models.APIError{}, nil, nil), - }, tonic.Handler(controllers.GetPet, http.StatusOK)) -``` - -Next, let's take a look at how we define a new route. We add it to our existing group, then we give it some -information to add to the OAS such as summary. What responses we can get here I have defined the possible -errors. Note that because I prefer not to use magic numbers I have used the `http` package constants instead -of using numbers i.e. 404 -> `http.StatusNotFound`. And of course the most important bit, the Tonic handler -where we tell this route what function to call when a client sends a request to this route. In this case, we -choose the `GetPet` function we mentioned earlier and on a successful response we return a `200` status code i.e. -`http.StatusOK`. - -You can define whichever status code you want here such as an `http.StatusCreated` or `http.NoContent`. - -```go:title=internal/server/routes.go - if len(fizzApp.Errors()) != 0 { - return nil, fmt.Errorf("fizz errors: %v", fizzApp.Errors()) - } - tonic.SetErrorHook(errHook) - return fizzApp, nil -``` - -The final part of the function checks if Fizz returned any errors and sets up the Tonic error hook. What to do if -any of the Tonic function handler return an error. As we saw earlier with some of the functions -returning errors. - -```go:title=internal/server/routes.go -func errHook(_ *gin.Context, e error) (int, interface{}) { - code, msg := http.StatusInternalServerError, http.StatusText(http.StatusInternalServerError) - - if _, ok := e.(tonic.BindError); ok { - code, msg = http.StatusBadRequest, e.Error() - } else { - switch { - case errors.IsBadRequest(e), errors.IsNotValid(e), errors.IsNotSupported(e), errors.IsNotProvisioned(e): - code, msg = http.StatusBadRequest, e.Error() - case errors.IsForbidden(e): - code, msg = http.StatusForbidden, e.Error() - case errors.IsMethodNotAllowed(e): - code, msg = http.StatusMethodNotAllowed, e.Error() - case errors.IsNotFound(e), errors.IsUserNotFound(e): - code, msg = http.StatusNotFound, e.Error() - case errors.IsUnauthorized(e): - code, msg = http.StatusUnauthorized, e.Error() - case errors.IsAlreadyExists(e): - code, msg = http.StatusConflict, e.Error() - case errors.IsNotImplemented(e): - code, msg = http.StatusNotImplemented, e.Error() - } - } - err := models.APIError{ - Message: msg, - } - return code, err -``` - -This function receives an error since we are using juju error in our controller functions. We can then use the `isX` -the function provided by the library to check what kind of error we received. Using a switch statement we then determine -what type of HTTP status code to return to the client depending on the error thrown by the function. For example an `NotFoundError` means we return `http.StatusNotFound` (404). - -Ok, that's the main part of our application so how do we start our web service? - -## cmd - -In our cmd folder, we have the `main.go` file. - -### main.go - -The `main.go` file, as is good practice in Golang, is used to start our application. - -```go:title=cmd/example-fizz-project/main.go file=./source_code/cmd/example-fizz-project/main.go - -``` - -In the root folder of our application run this command `go run cmd/example-fizz-project/main.go`. Then you should see something like: - -```bash -go run cmd/example-fizz-project/main.go - -[GIN-debug] [WARNING] Running in "debug" mode. Switch to "release" mode in production. - - using env: export GIN_MODE=release - - using code: gin.SetMode(gin.ReleaseMode) - -[GIN-debug] GET /openapi.json --> github.com/wI2L/fizz.(*Fizz).OpenAPI.func1 (2 handlers) -[GIN-debug] GET /healthcheck --> github.com/wI2L/fizz.(*RouterGroup).Handle.func1 (2 handlers) -[GIN-debug] GET /pets:name --> github.com/wI2L/fizz.(*RouterGroup).Handle.func1 (2 handlers) -[GIN-debug] PUT /pets:name --> github.com/wI2L/fizz.(*RouterGroup).Handle.func1 (2 handlers) -``` - -And voila you now have a working web service you have created using Fizz. That's it we have now built a web application with Fizz and Golang. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/41.%20Create%20a%20webapp%20with%20fizz/source_code) -- [Fizz](https://github.com/wI2L/fizz/) -- [Tonic](https://github.com/loopfz/gadgeto/tree/master/tonic) -- [Juju's Errors](https://github.com/juju/errors) diff --git a/41. Create a webapp with fizz/images/cover.png b/41. Create a webapp with fizz/images/cover.png deleted file mode 100644 index 51f6066..0000000 Binary files a/41. Create a webapp with fizz/images/cover.png and /dev/null differ diff --git a/42. Cypress with Gitlab CI/README.md b/42. Cypress with Gitlab CI/README.md deleted file mode 100644 index 65e854f..0000000 --- a/42. Cypress with Gitlab CI/README.md +++ /dev/null @@ -1,230 +0,0 @@ ---- -title: "Testing a Gatsby application with Cypress on Gitlab CI" -tags: ["testing", "cypress", "react", "gitlab"] -license: "public-domain" -slug: "cypress-with-gitlab-ci" -canonical_url: "https://haseebmajid.dev/blog/cypress-with-gitlab-ci" -date: "2021-03-22" -published: true -cover_image: "images/cover.jpg" ---- - -In this blog post, we will go over how we can automatically test a Gatsby site end-to-end (e2e), using Cypress on Gitlab CI. - -# Introduction - -## Gatsby - -[Gatsby](https://www.gatsbyjs.com) is a static site generator (SSG) built upon React. It allows us to create "blazing" fast websites. -In this example, we will use a simple blog starter template available and add a Cypress test. - -## Cypress - -> Fast, easy and reliable testing for anything that runs in a browser. - Cypress README - -[Cypress](http://cypress.io/) allows us to test a web application, how a real user would use the application. -Cypress will be used to test our Gatsby application, though if it's a site you can test it using Cypress. - -## Gitlab CI - -[Gitlab CI](https://docs.gitlab.com/ee/ci/) is a continuous integration pipeline that will allow us to run our -tests automatically, such as when we merge code into the master branch. - -# Getting Started - -## Gatsby - -Create a new Gatsby site, using this default Gatsby starter: - -```bash{promptUser: haseeb} -gatsby new gatsby-starter-blog https://github.com/gatsbyjs/gatsby-starter-blog -cd gatsby-starter-blog -``` - -### (Optional) Typescript - -Adding Typescript to a Gatsby web application. - -```bash{promptUser: haseeb} -yarn add typescript @types/react @types/react-dom @types/node -D -yarn add gatsby-plugin-typescript -``` - -Add the following to your `gatsby-config.js`. - -```js:title=gatsby-config.js -module.exports = { - plugins: [ - { - resolve: `gatsby-plugin-typescript`, - options: { - isTSX: true, // defaults to false - jsxPragma: `jsx`, // defaults to "React" - allExtensions: true, // defaults to false - }, - }, - ], -} -``` - -Then create a new file `tsconfig.json` (in the project root, where the `gatsby-config.js` is). - -```json:title=tsconfig.json file=./source_code/tsconfig.json - -``` - -## Cypress - -Now to finally add Cypress to our application so we can test it. First, install the dependencies. - -```bash -yarn add -D cypress cypress-axe axe-core start-server-and-test -# Add types -yarn add -D @types/cypress-axe -``` - -Next, let's create a `cypress.json` folder in the project root. - -```json:title=cypress.json -{ - "baseUrl": "http://localhost:8000/", - "integrationFolder": "cypress/e2e" -} -``` - -Next, let's add some new "scripts" to the `package.json` file. - -```json:title=package.json{1-2,10} - "scripts": { - "cy:open": "cypress open", - "cy:run": "cypress run", - "build": "gatsby build", - "develop": "gatsby develop", - "format": "prettier --write \"**/*.{js,jsx,ts,tsx,json,md}\"", - "start": "npm run develop", - "serve": "gatsby serve", - "clean": "gatsby clean", - "test": "echo \"Write tests! -> https://gatsby.dev/unit-testing\" && exit 1", - "test:e2e": "start-server-and-test 'yarn develop' http://localhost:8000 'yarn cy:open'", - "test:e2e:ci": "start-server-and-test 'yarn develop' http://localhost:8000 'yarn cy:run'" - } -``` - -These scripts allow us to start Cypress, `cy:open` opens a GUI to visualise our tests whereas `cy:run` does it all -in the terminal (the browser runs in headless mode). Where we will run `test:e2e:ci` in our CI pipeline, here we use -the `start-server-and-test` command to start our Gatsby server using `yarn develop`. Then we run `cy:run` to -start our tests. - -### Structure - -Create a new folder called `cypress` which will look something like this. - -```bash -. -├── e2e -│ └── accessibility.test.ts -├── fixtures -│ └── graphql.json -├── plugins -│ └── index.js -├── support -│ ├── commands.js -│ ├── index.d.ts -│ └── index.js -└── tsconfig.json -``` - -```bash -mkdir -p cypress/support -``` - -Create a file at `cypress/support/commands.js` - -```js:title=cypress/support/commands.js -Cypress.Commands.add(`assertRoute`, (route) => { - cy.url().should(`equal`, `${window.location.origin}${route}`); -}); -``` - -Add some custom types for Cypress `index.d.ts` if you are using Typescript. - -```ts:title=cypress/support/index.d.ts file=./source_code/cypress/support/index.d.ts - -``` - -Next create the `index.js` file, which should look something like this. - -```js:title=cypress/support/index.js file=./source_code/cypress/support/index.js - -``` - -Next, let's create a plugin folder `mkdir -p cypress/plugins`. - -```js:title=cypress/plugins/index.js file=./source_code/cypress/plugins/index.js - -``` - -Now finally let's create our tests folder `mkdir -p cypress/e2e`. - -### cypress-axe - -In this blog post we won't go over any complicated Cypress test we will simply use `cypress-axe` to test -the accessibility of our (a11y) of our website. - -```ts:title=cypress/e2e/accessibility.test.ts file=./source_code/cypress/e2e/accessibility.test.ts - -``` - -Note the `///` comments at the top used to add types for cypress. The test above will go to our -home page and test if it has any a11y violations and if so will fail the test. - -We can now run our tests locally by running this command: - -```bash -yarn run test:e2e -``` - -## Gitlab CI - -Now how can we automate this, so the tests will run say every time we make changes on the master branch to make -sure we haven't broken any a11y. Create a new `.gitlab-ci.yml` or add the following job to an existing CI file. - -```yml:title=.gitlab-ci.yml -image: node:12.14.1 - -variables: - CYPRESS_CACHE_FOLDER: "$CI_PROJECT_DIR/cache/Cypress" - -cache: - key: ${CI_COMMIT_REF_SLUG} - paths: - - cache/Cypress - - node_modules - -stages: - - test - -before_script: - - yarn install - -tests: - image: cypress/browsers:node12.14.1-chrome83-ff77 - stage: test - script: - - yarn test:e2e:ci -``` - -I won't go into the details of what makes up a Gitlab CI file. At the top of the file, we will cache the `node_modules` -file so we can share it between the job and the Cypress cache. -The job itself is very simple it uses a `cypress/browsers:node12.14.1-chrome83-ff77` Docker -image which provides a headless chrome browser that Cypress can leverage to run the tests. -As we won't have access to a GUI in the Gitlab CI runner. The `tests` job is very simple it runs `yarn test:e2e:ci` to -run our Cypress tests. - -That's it, quite simple to add Cypress tests that run in our CI pipeline. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/42.%20Cypress%20with%20Gitlab%20CI/source_code) -- [Cypress](http://cypress.io/) -- [Example Job](https://gitlab.com/hmajid2301/portfolio-site/-/jobs/1080367107) diff --git a/42. Cypress with Gitlab CI/images/cover.jpg b/42. Cypress with Gitlab CI/images/cover.jpg deleted file mode 100644 index e030920..0000000 Binary files a/42. Cypress with Gitlab CI/images/cover.jpg and /dev/null differ diff --git a/43. Test python socketio & uvicorn/README.md b/43. Test python socketio & uvicorn/README.md deleted file mode 100644 index 3cad675..0000000 --- a/43. Test python socketio & uvicorn/README.md +++ /dev/null @@ -1,306 +0,0 @@ ---- -title: "Testing socketio in Python & uvicorn" -tags: ["testing", "python", "socketio", "pytest"] -license: "public-domain" -slug: "testing-python-socketio" -canonical_url: "https://haseebmajid.dev/blog/testing-python-socketio" -date: "2021-12-23" -published: true -cover_image: "images/cover.jpg" ---- - -In this article I will show you how you can test an async Socketio application in Python, where the ASGI server we are running is uvicorn. -I will be referring to these tests as integration tests, though depending on who you ask they could be called E2E tests, system tests, slow test etc. -What I am referring to is simply testing out the entire "flow" of a socketio event i.e. emitting an event from a client, then receiving it on the web service -and for my actual projects interacting with an actual database. - -We will be using `pytest` as our testing framework. - -::: note -ASGI (Asynchronous Server Gateway Interface) is a spiritual successor to WSGI, intended to provide a standard interface between async-capable Python web servers, frameworks, and applications. - https://asgi.readthedocs.io/en/latest/ -::: - -## main.py - -```python:title=app/main.py file=./source_code/app/main.py - -``` - -Let's take a look at our socketio app. Which is a very simple web app, that listens to one event `FOO` and -responds with a `BAR` event. It is just this single file. - -## conftest.py - -The `conftest.py` file is automatically run by pytest and allows our test modules to access fixtures defined -in this file. One of the best features of Pytest is fixtures. Fixture are functions that have re-usable bits of code we -can run in our tests, such as static data used by tests. - -```python:title=tests/conftest.py file=./source_code/tests/conftest.py - -``` - -### Quick Aside FastAPI Testing - -:::important -tl:dr: We need to start and stop the Uvicorn server within our tests. -::: - -Now when testing say a FastAPI application, it has a builtin test client we can use. This means we don't actually have -to spin up a Uvicorn server to test our application. We can simply pretend to send requests to the FastAPI web service -and it will handle the routing behind the scenes. - -We can do something like this, where `httpx` is a async HTTP client (think like the `requests` library). - -```python -import pytest -from asgi_lifespan import LifespanManager -from httpx import AsyncClient - -from app.main import app - - - -@pytest.fixture() -async def client() -> AsyncIterator[AsyncClient]: - async with LifespanManager(app): - async with AsyncClient(app=app, base_url="http://localhost") as client: - yield client -``` - -Then we can use it like so in our tests: - -```python -from fastapi import status -from httpx import AsyncClient - -@pytest.mark.asyncio -async def test_add_game(client: AsyncClient): - response = await client.post("/game", json=request_data) - assert response.status_code == status.HTTP_201_CREATED -``` - -However socketio at the moment does not provide us with a test client we can use. So we will start and stop a Uvicorn server and send actual -Socketio requests from a Socketio client. There is a Socketio client library we can use to do this, available in Python. - -```python:title=tests/conftest.py -class UvicornTestServer(uvicorn.Server): - def __init__(self, app: ASGIApp = main.app, host: str = LISTENING_IF, port: int = PORT): - self._startup_done = asyncio.Event() - self._serve_task: Optional[Awaitable[Any]] = None - super().__init__(config=uvicorn.Config(app, host=host, port=port)) - - async def startup(self) -> None: - """Override uvicorn startup""" - await super().startup() - self.config.setup_event_loop() - self._startup_done.set() - - async def start_up(self) -> None: - """Start up server asynchronously""" - self._serve_task = asyncio.create_task(self.serve()) - await self._startup_done.wait() - - async def tear_down(self) -> None: - """Shut down server asynchronously""" - self.should_exit = True - if self._serve_task: - await self._serve_task -``` - -This is a test class which we can use to start and stop the Uvicorn server. Note that the class inherits -from `uvicorn.server`, we need to overwrite the `startup()` method as we want to change the startup a bit. - -Before explaining the code above let's take a look at how we may use it: - -```python:title=conftest.py -@pytest.fixture(scope="session") -def event_loop(): - loop = asyncio.get_event_loop() - yield loop - loop.close() - - -@pytest.fixture(autouse=True, scope="session") -async def startup_and_shutdown_server(): - server = UvicornTestServer() - await server.start_up() - yield - await server.tear_down() - -@pytest.fixture(scope="session") -async def client() -> AsyncIterator[AsyncClient]: - sio = socketio.AsyncClient() - await sio.connect(BASE_URL) - yield sio - await sio.disconnect() - -``` - -What we have done is created two pytest fixtures, the first simply starts an event loop so we can test async code. - -### Tangent on asyncio - -To test async code with pytest we need to install the `pyest-asyncio` library. -By default this will give us an `event_loop` fixture that runs on scope of `function`. So it will start and stop after -each test function. However if you want to use fixtures that aren't of scope `function` i.e. `session` or `module`. -Then we need to redefine the `event_loop` function as we have done in the example above. - -Okay back to our code above. The main bit we are interested in is the `startup_and_shutdown_server` function, here we -start the server before all of our tests and due to how `yield`, you can read more about how -[yield works here](/blog/python-yield-explained/), we will stop our server after all of our tests have run. - -This happens automatically without calling the function because of the decorator we have provided -`@pytest.fixture(autouse=True, scope="session")`. -Again we are using scope `session` so that this function isn't called either for -every function (which would slow down our tests). We could've set it to `module` but again -if we have multiple test files we don't want to run this function for every file (module). - -### Deeper diver into UvicornTestServer - -Let's take a look at the first two methods - -```python:title=conftest.py - def __init__(self, app: ASGIApp = main.app, host: str = LISTENING_IF, port: int = PORT): - self._startup_done = asyncio.Event() - self._serve_task: Optional[Awaitable[Any]] = None - super().__init__(config=uvicorn.Config(app, host=host, port=port)) - - async def startup(self) -> None: - """Override uvicorn startup""" - await super().startup() - self.config.setup_event_loop() - self._startup_done.set() -``` - -The `__init__` magic dunder method creates an asyncio event `asyncio.Event()`. These events are often used to: - -> An asyncio event can be used to notify multiple asyncio tasks that some event has happened. - https://docs.python.org/3/library/asyncio-sync.html#asyncio.Event - -Then we create a variable `self._serve_task: Optional[Awaitable[Any]] = None`, we will see how this used later. -Finally we call the parent calls `__init__` method (`super().__init__()`). This calls the `__init__` function -of the `uvicorn.Server` class. We do this to set the `uvicorn.Config`, which includes our app and which host and port -to start the server. - -Onto the second method `startup` this also overwrites a method in the parent class. In fact the first we do is call -the parent class's `startup` method (`await super().startup()`). Then we start the event loop ourselves -`self.config.setup_event_loop()`, where our web app will run. - -::: note -This is a different event loop in which our tests run in. -::: - -Finally we do `self._startup_done.set()`, we are setting this event as true i.e. is complete. So any coroutines waiting -until this set can be carry on their execution. - -::: note -An Event object manages an internal flag that can be set to true with the set() method and reset to false with the clear() method. The wait() method blocks until the flag is set to true. The flag is set to false initially. - https://docs.python.org/3/library/asyncio-sync.html#asyncio.Event -::: - -#### Yet another tangent on run() method - -Now the parent class does have a `run` method we could use, which would start the event loop for us. This however won't work, -lets pretend we change `startup_and_shutdown_server` function too look like this (`server.run()`). - -```python{4} -@pytest.fixture(autouse=True, scope="session") -async def startup_and_shutdown_server(): - server = UvicornTestServer() - await server.run() - yield - await server.tear_down() -``` - -We would get the following error `RuntimeError: asyncio.run() cannot be called from a running event loop`. This because if -we take a look at the `run` method in the parent class it contains something like this line -`return asyncio.run(self.serve(...))`. - -This is why we need to write our own code to handle starting the Uvicorn server. - -#### `start_up` and `tear_down` - -Okay let's move and take a look at the `start_up` and `tear_down` methods - -```python:title=conftest.py -async def start_up(self) -> None: - self._serve_task = asyncio.create_task(self.serve()) - await self._startup_done.wait() - -async def tear_down(self) -> None: - self.should_exit = True - if self._serve_task: - await self._serve_task -``` - -Remember these are the two methods we will call in our "startup and shutdown" fixture. The `start_up` method, creates a task and assigns it -to our empty variable from the `__init__` method `self._serve_task = asyncio.create_task(self.serve())`. It calls the `serve` method to start -the Uvicorn server. - -:::note What does `create_task` do ? - -It submits the coroutine to run "in the background", i.e. concurrently with the current task and all other tasks, switching between them at await points. It returns an awaitable handle called a "task" which you can also use to cancel the execution of the coroutine. - https://stackoverflow.com/questions/62528272/what-does-asyncio-create-task-do -::: - -:::note What is a task ? -It's an asyncio construct that tracks execution of a coroutine in a concrete event loop. When you call create_task, you submit a coroutine for execution and receive back a handle. You can await this handle when you actually need the result, or you can never await it, if you don't care about the result. This handle is the task, and it inherits from Future, which makes it awaitable and also provides the lower-level callback-based interface, such as add_done_callback. - https://stackoverflow.com/questions/62528272/what-does-asyncio-create-task-do -::: - -Then we `await self._startup_done.wait()`, this is the event we created earlier. It will wait until the `set()` function -has been called in the in the `startup` method above. - -Now onto the `tear_down` method where we set the `should_exit` to true. There is a `main_loop` method called by our -`serve` method in the parent class. This `main_loop` calls an `on_tick` function which returns if `self.should_exit` is true. -So the call chain looks like: `serve` -> `main_loop` -> `on_tick`. When on_tick returns `should_exist` as true, it exits it main loop: - -:::important This is code from Uvicorn -::: - -```python{3,8}:title=.venv/.../uvicorn/server.py -async def main_loop(self) -> None: - counter = 0 - should_exit = await self.on_tick(counter) - while not should_exit: - counter += 1 - counter = counter % 864000 - await asyncio.sleep(0.1) - should_exit = await self.on_tick(counter) -``` - -### Client Fixture - -Finally lets take a look at our final fixture, here we create a client that can be used to make requests with socketio. -We use a similar technique with `yields` so we return a socketio client. We will see how this used in one of our tests. - -```python:title=conftest.py -@pytest.fixture(scope="session") -async def client() -> AsyncIterator[AsyncClient]: - sio = socketio.AsyncClient() - await sio.connect(BASE_URL) - yield sio - await sio.disconnect() -``` - -## test_room.py - -```python:title=tests/test_room.py file=./source_code/tests/test_room.py - -``` - -Since we need to wait for the `FOO` event to return a `BAR` event we use a future -to await until we get a response then set the return data in the future - -```python:title=tests/test_room.py -@client.on("BAR") -def _(data): - future.set_result(data) -``` - -We `await asyncio.wait_for(future, timeout=5.0)` for the future to have data set on it. - -That's it, the code itself is fairly simple once everything is setup in `conftest` to actually do the test. - -## Appendix - -- [Source Code](https://gitlab.com/hmajid2301/articles/tree/master/43.%20Test%20python%20socketio%20&%20uvicorn/source_code) -- [Github Issue: UvicornTestServer](https://github.com/miguelgrinberg/python-socketio/issues/332#issuecomment-712928157) -- [Async Create Task SO](https://stackoverflow.com/questions/62528272/what-does-asyncio-create-task-do) -- [Real application using this testing pattern](https://gitlab.com/banter-bus/banter-bus-core-api) diff --git a/43. Test python socketio & uvicorn/images/cover.jpg b/43. Test python socketio & uvicorn/images/cover.jpg deleted file mode 100644 index 503744c..0000000 Binary files a/43. Test python socketio & uvicorn/images/cover.jpg and /dev/null differ diff --git a/44. Gitlab CI testing with services/README.md b/44. Gitlab CI testing with services/README.md deleted file mode 100644 index beb7576..0000000 --- a/44. Gitlab CI testing with services/README.md +++ /dev/null @@ -1,191 +0,0 @@ ---- -title: "E2E tests with Gitlab CI services" -tags: ["testing", "ci", "gitlab"] -license: "public-domain" -slug: "gitlab-ci-and-services" -canonical_url: "https://haseebmajid.dev/blog/gitlab-ci-and-services" -date: "2021-12-25" -published: true -cover_image: "images/cover.jpg" ---- - -## Background - -This will be a slightly shorter article. In this article I will show you how I've managed to do some -end-to-end testing with Gitlab CI services. - -I'm building a browser-based multiplayer game called Banter Bus. Banter Bus consists of three main components, - -- gui: A SvelteKit based frontend the user will interact with to play the game -- core-api: A Socketio API written in Python -- management-api: A simple RESTful API written in Python (FastAPI) - -Now say I want to write some e2e Cypress tests, that will test all of these components interacting with each other. -Which mainly will look something like `gui -> core-api -> management-api`. - -Each of these project deploys its own Docker container, which we can then use for testing it. So how can we do this with Gitlab CI ? - -## Gitlab Services - -What is a Gitlab CI service ? - -> The services keyword defines a Docker image that runs during a job linked to the Docker image that the image keyword defines. This allows you to access the service image during build time. - https://docs.gitlab.com/ee/ci/services/ - -Essentially they are Docker containers we can use in our CI jobs. - -## package.json - -For the examples below assume our `package.json` scipts section looks something like: - -```json:title=package.json -{ - "dev": "svelte-kit dev", - "e2e": "cypress run --browser chrome", - "e2e:ci": "start-server-and-test dev http://localhost:3000 e2e" -} -``` - -## Gitlab CI - -Let's take a look at an example `.gitlab-ci.yml` file: - -```yml:title=.gitlab-ci.yml -stages: - - test - -cypress-e2e-chrome: - image: cypress/browsers:node14.17.0-chrome88-ff89 - stage: test - variables: - BANTER_BUS_CORE_API_MANAGEMENT_API_URL: http://banter-bus-management-api - BANTER_BUS_CORE_API_DB_HOST: banter-bus-database - FF_NETWORK_PER_BUILD: 1 - # Hidden the rest of the variables as not to clutter the file - services: - - name: mongo:4.4.4 - alias: banter-bus-database - - name: registry.gitlab.com/banter-bus/banter-bus-core-api:test - alias: banter-bus-core-api - - name: registry.gitlab.com/banter-bus/banter-bus-management-api:test - alias: banter-bus-management-api - - name: registry.gitlab.com/banter-bus/banter-bus-management-api/database-seed:latest - alias: banter-bus-database-seed - script: - - npm ci - - export VITE_BANTER_BUS_CORE_API_URL=http://banter-bus-core-api:8080 - - echo fs.inotify.max_user_watches=524288 | tee -a /etc/sysctl.conf && sysctl -p - - npm run e2e:ci - artifacts: - expire_in: 1 week - when: always - paths: - - cypress/screenshots - - cypress/videos - reports: - junit: - - results/TEST-*.xml -``` - -### Services - -Let's break file down a bit, these are essentially all the dependencies of our `gui` application. We need all of -these containers running. - -In this case we need four containers (this doesn't really matter): - -- banter-bus-database: A database for the core-api and management-api -- banter-bus-core-api: The main API the gui will interact with -- banter-bus-management-api: Used to help manage our available games, questions etc -- banter-bus-database-seed: A short lived container which pre-fills the database with some values - -```yml -services: - - name: mongo:4.4.4 - alias: banter-bus-database - - name: registry.gitlab.com/banter-bus/banter-bus-core-api:test - alias: banter-bus-core-api - - name: registry.gitlab.com/banter-bus/banter-bus-management-api:test - alias: banter-bus-management-api - - name: registry.gitlab.com/banter-bus/banter-bus-management-api/database-seed:latest - alias: banter-bus-database-seed -``` - -In our examples the `name` field is the image name, this is the same name you'd use when using the `docker pull` -command. The next field is `alias` this is the name we'll use to reference that container. This is the container name. - -To see how the `alias` is used, is to look at the environment variables we have provided for the job -`BANTER_BUS_CORE_API_DB_HOST: banter-bus-database`. So core-api will try to connect to database using -this host. You can read more about Docker is able to resolve this to an [IP address here](/blog/dns-docker-explained/). Another example is how the URL core-api will use to connect to the management-api -`BANTER_BUS_CORE_API_MANAGEMENT_API_URL: http://banter-bus-management-api`. - -:::important ENV Variable -One environment variable we must provide is `FF_NETWORK_PER_BUILD` set to `1` (or true). Docker then -creates a bridge network so all the services can communicate amognst themselves. You can read more about -[it here](https://docs.gitlab.com/runner/executors/docker.html#create-a-network-for-each-job) -::: - -We've discussed the most important part of the CI file, but lets quickly discuss the rest for completeness - -:::note optional -I've discussed the main point of this article, how to use services and how to get them to work together. -::: - -### Variables - -We've already spoken about this above, but lets take a quick look at the `variables` section. - -```yml -variables: - BANTER_BUS_CORE_API_MANAGEMENT_API_URL: http://banter-bus-management-api - BANTER_BUS_CORE_API_DB_HOST: banter-bus-database - FF_NETWORK_PER_BUILD: 1 - # Hidden the rest of the variables as not to clutter the f -``` - -These are environment variables that are shared both with the job and the services. Some of these are -config passed to the application, such as `BANTER_BUS_CORE_API_MANAGEMENT_API_URL` and `BANTER_BUS_CORE_API_DB_HOST`. - -### Script - -Since we are using the `cypress/browsers:node14.17.0-chrome88-ff89` image, we have access to chrome -(headless) browser we can use with Cypress. - -So we can do something like so: - -```yml -script: - - npm ci - - export VITE_BANTER_BUS_CORE_API_URL=http://banter-bus-core-api:8080 - - npm run e2e:ci -``` - -- `npm ci`: Installs our npm dependencies for the gui app -- `export VITE_BANTER_BUS_CORE_API_URL=http://banter-bus-core-api:8080` exports an enviroment variable which will be used by the gui app so it knows the URL of the core-api. Note the use of the alias name here (and port `:8080` default port for the core-api) -- `npm run e2e:ci`: Starts the dev server and then runs the cypress test, see `start-server-and-test dev http://localhost:3000 e2e` where `e2e` is `cypress run --browser chrome` - -### Artifacts - -Finally, the artifacts are "things" that are left over after the build. In this case we use them in two ways: - -- One to generate a coverage report with `junit` -- Two to save our Cypress screenshots and videos - -The downloadable artifacts will expire after 1 week. The Cypress files can be useful when debugging a problem -with your tests. You get a video of perhaps why the tests failed. - -```yml -artifacts: - expire_in: 1 week - when: always - paths: - - cypress/screenshots - - cypress/videos - reports: - junit: - - results/TEST-*.xml -``` - -## Appendix - -- [Example Project](https://gitlab.com/banter-bus/banter-bus-gui/-/tree/350f1f986b077ac86da924b830fed88ffcd3cde0) -- [Example Job](https://gitlab.com/banter-bus/banter-bus-gui/-/jobs/1920396599) diff --git a/44. Gitlab CI testing with services/images/cover.jpg b/44. Gitlab CI testing with services/images/cover.jpg deleted file mode 100644 index e030920..0000000 Binary files a/44. Gitlab CI testing with services/images/cover.jpg and /dev/null differ diff --git a/45. Python socketio function handlers/README.md b/45. Python socketio function handlers/README.md deleted file mode 100644 index a55da06..0000000 --- a/45. Python socketio function handlers/README.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: "Separate function handler modules when using Python Socketio" -tags: ["python", "socketio", "clean-code", "fastapi"] -license: "public-domain" -slug: "python-socketio-handlers" -canonical_url: "https://haseebmajid.dev/blog/python-socketio-handlers" -date: "2021-12-31" -published: true -cover_image: "images/cover.jpg" ---- - -In this article I will show you how you can have separate modules for your Socketio event handlers. -Rather than keeping them all in the same file. - -Hopefully this should be a relatively short article, lets get into it - -## Main - -In this example I will be using SocketIO alongside FastAPI, but you can easily change this code to be SocketIO -only. I also will be using a uvicorn to run the server. - -For example - -```python:title=app/main.py file=./source_code/app/main.py - -``` - -Here is where we setup our FastAPI application and create a Socketio server as a sub-application and mount it. - -:::note fastapi-socketio - -Here I am using the `fastapi-socketio` library to handle mounting the application into the Fastapi app. -But this again can be done without the library, see this [Github issue](https://github.com/tiangolo/fastapi/issues/129#issuecomment-547806432) for an example. -::: - -Anyhow we could simply do something like, this create a Socketio only server without FastAPI. - -```python:title=app/main.py file=./source_code/app/main.py -import socketio - -sio = socketio.AsyncServer() -application = socketio.ASGIApp(sio) -``` - -## Handler Module - -Next lets take a look at the module which will handle our various events it should listen to from the client. - -```python:title=app/foo/foo_handlers.py file=./source_code/app/foo/foo_handlers.py - -``` - -As you can see this handler imports the socket manager object in the second example this would be the object called ` sio`. Then decorates a function, this function then will be called everytime a client sends an `FOO` event -to our web server. In this example it returns a `BAR` event (emits it) with `hello world`. -What this function does specifically doesn't really matter. - -## **init**.py - -Finally let's put all this in our `app/__init__.py` module: - -```python:title=app/__init__.py file=./source_code/app/__init__.py - -``` - -This may look a bit confusing, essentially this is the module that uvicorn will call directly to start the server. When then import our function handlers `import app.foo.foo_handlers`, here you will need to import -all of your function handlers, even though they aren't used here. So this is so that your application knows -they exist. - -Without this import your application will have no way to "attach" them to the app. Now everytime the `FOO` -event is emitted from a client, your server knows to send it that function. - -Finally we create a simple dummy variable `app = application` where application is the FastAPI/ASGIApp we -created in the `main.py` module. You could leave this as application but usually when using uvicorn -, i.e. looking at examples it will use `app`. So hence I've renamed it here. - -The final block is not really needed: - -```python -if __name__ == "__main__": - uvicorn.run(app, host="0.0.0.0", port=8080) -``` - -It more exists if this module is used as a main file and will start the uvicorn server for us. However typically I will start the uvicorn server myself, usually in my Docker images or launch.json (VSCode debugger) config etc. - -```bash -uvicorn app:app --host "0.0.0.0" --port 8080 -``` - -## Appendix - -- [Example project using this pattern](https://gitlab.com/banter-bus/banter-bus-core-api) -- [] diff --git a/45. Python socketio function handlers/images/cover.jpg b/45. Python socketio function handlers/images/cover.jpg deleted file mode 100644 index 030122d..0000000 Binary files a/45. Python socketio function handlers/images/cover.jpg and /dev/null differ diff --git a/46. Gitlab CI and Docker Compose/README.md b/46. Gitlab CI and Docker Compose/README.md deleted file mode 100644 index b14202c..0000000 --- a/46. Gitlab CI and Docker Compose/README.md +++ /dev/null @@ -1,226 +0,0 @@ ---- -title: "Gitlab CI with docker compose" -tags: ["ci", "gitlab", "docker-compose", "docker"] -license: "public-domain" -slug: "gitlab-ci-with-docker-compose" -canonical_url: "https://haseebmajid.dev/blog/gitlab-ci-with-docker-compose" -date: "2022-08-08" -published: true -cover_image: "images/cover.png" ---- - -Shameless plug: This is related to a EuroPython 2022 talk I am giving, [My Journey Using Docker as a Development Tool](https://gitlab.com/haseeb-slides/docker-as-a-dev-tool). - -For most of my common dev tasks, I've started to rely on `docker`/`docker compose` to run commands locally. I have also -started using vscode's `.devcontainers`, to provide a consistent environment for all developers using a project. - -The main reason for this is to avoid needing to install dependencies on my host machine. In theory, all I -should need is a Docker daemon and a CLI (docker CLI) to interact with that Daemon. This also makes it -far easier for any new developer to start working on my project and get set up. - -What inspired me to do this change now (in my banter bus project) was I wanted to upgrade to -python 3:10 to use some of the new typing features released. However when I tried to upgrade my CI pipeline -started failing, after hours of trying to debug it. I ended up using Docker and everything ran smoothly. - -Now to have a more consistent environment between my local environment and CI. So in theory, it means -less chance of something passing locally but failing in CI. - -Now we know why we want to do it. let's look at how we do it. - - -## Before - -Let's take a look at what a typical CI pipeline may look for a Python project (using banter bus). -In this example, we will be using a FastAPI web service which uses Poetry to manage its dependencies. - - -```yml:title=.gitlab-ci.yml file=./source_code/.gitlab-ci.before.yml - -``` - -The above looks quite complicated, but very simply we install our dependencies for each job the `before_script` section is used in all jobs. -All jobs also use `python:3.9.8` image, this is where our code is cloned into the CI pipeline. - -Where our `.pre-commit-config.yaml` looks something like this: - -```yml:title=.pre-commit-config.yaml file=./source_code/.pre-commit-config.yaml -``` - -`re-commit` is a library we can use to add pre-commit hooks before we commit our code to git. Adding some checks that -the code is consistent with the rules we defined. We can also just use it as a lint job, multiple linting tools together. Simplified. Hence -here we are checking for code formatting, linting, import sorting etc. The details don't matter but at the moment we need to have -a virtualenv locally to run this. - -### Integration Tests - -A slightly more interesting job is integration tests, it requires other docker containers, as our tests need Postgres and Redis to run. -We can define these as services and then reference them in our job like so: - - -```yml:title=.gitlab-ci.yml -test:integration-tests: - stage: test - only: - - merge_request - extends: - - .test - script: - - poetry run pytest -v tests/integration -``` - -Note the `extends` clause, which essentially merges the `.test` section with our job so it will look something like: - -```yml:title=.gitlab-ci.yml -test:integration-tests: - stage: test - only: - - merge_request - services: - - name: mongo:4.4.4 - alias: banter-bus-database - - name: redis:6.2.4 - alias: banter-bus-message-queue - - name: registry.gitlab.com/banter-bus/banter-bus-management-api:test - alias: banter-bus-management-api - - name: registry.gitlab.com/banter-bus/banter-bus-management-api/database-seed:latest - alias: banter-bus-database-seed - variables: - MONGO_INITDB_ROOT_USERNAME: banterbus - MONGO_INITDB_ROOT_PASSWORD: banterbus - MONGO_INITDB_DATABASE: test - BANTER_BUS_MANAGEMENT_API_DB_USERNAME: banterbus - BANTER_BUS_MANAGEMENT_API_DB_PASSWORD: banterbus - BANTER_BUS_MANAGEMENT_API_DB_HOST: banter-bus-database - BANTER_BUS_MANAGEMENT_API_DB_PORT: 27017 - BANTER_BUS_MANAGEMENT_API_DB_NAME: test - BANTER_BUS_MANAGEMENT_API_WEB_PORT: 8090 - BANTER_BUS_MANAGEMENT_API_CLIENT_ID: client_id - BANTER_BUS_MANAGEMENT_API_USE_AUTH: "False" - MONGO_HOSTNAME: banter-bus-database:27017 - BANTER_BUS_CORE_API_DB_USERNAME: banterbus - BANTER_BUS_CORE_API_DB_PASSWORD: banterbus - BANTER_BUS_CORE_API_DB_HOST: banter-bus-database - BANTER_BUS_CORE_API_DB_PORT: 27017 - BANTER_BUS_CORE_API_DB_NAME: test - BANTER_BUS_CORE_API_MANAGEMENT_API_URL: http://banter-bus-management-api - BANTER_BUS_CORE_API_MANAGEMENT_API_PORT: 8090 - BANTER_BUS_CORE_API_CLIENT_ID: client_id - BANTER_BUS_CORE_API_USE_AUTH: "False" - BANTER_BUS_CORE_API_MESSAGE_QUEUE_HOST: banter-bus-message-queue - BANTER_BUS_CORE_API_MESSAGE_QUEUE_PORT: 6379 - script: - - poetry run pytest -v tests/integration -``` - -We also need to define a bunch of environment variables in this case so our containers can communicate -with each other. Now, these are of course specific to my apps. But you can imagine a real-life project also -needing a bunch of environment variables. As you can see this can get a bit messy and what is -running locally may differ slightly from what is running in CI. - -I have been caught out by these env variables in the past. Note variables like -`BANTER_BUS_CORE_API_MANAGEMENT_API_URL: http://banter-bus-management-api`. The name of the -container must match the URL we have provided - -```yml - - name: registry.gitlab.com/banter-bus/banter-bus-management-api:test - alias: banter-bus-management-api -``` - -Docker DNS (link to DNS) is clever enough to work out the IP address. -This is also different now to how we are running it locally. - -## After - -Now we are running all our dev tasks in docker. We will use docker-compose to manage all of the containers, -docker-compose makes managing multiple containers a lot easier. We define all of them in our -`docker-compose.yml` file. - -```yml:title=docker-compose.yml file=./source_code/docker-compose.yml -``` - -Note: This file was already defined just not used in CI because I wanted to provide an easy way to start up my "tech stack". -So the file had gone unused. - -How do run our dev tasks? - -- lint: `docker compose run app poetry run pre-commit run --all-files` -- integration tests: `docker compose run app poetry run pytest -v tests/integration` - -Then our CI pipelines could look simply like this: - -```yml:title=.gitlab-ci.yml -image: docker - -services: - - docker:dind - -variables: - DOCKER_DRIVER: overlay2 - DOCKER_HOST: tcp://docker:2375 - -before_script: - - docker compose build - -stages: - - test - -test:lint: - stage: test - only: - - merge_request - script: - - docker compose run app poetry run pre-commit run --all-files - -test:unit-tests: - stage: test - only: - - merge_request - script: - - docker compose run app poetry run pytest -v tests/unit - -test:integration: - stage: test - only: - - merge_request - script: - - docker compose run app poetry run pytest -v tests/integration -``` - - -Now before job we build our docker images, `docker compose build`. -Then to run the dev task we do something like: - -``` -docker compose run app -``` - -So to run unit tests we could do: - -```bash -docker compose run app poetry run pytest -v tests/unit -``` - -### Aside - -We could simplify this if we use `makefile` and make the target be `poetry run pytest -v tests/unit`. - -```makefile:title=Makefile -.PHONY: unit_tests -unit_tests: ## Run all the unit tests - @poetry run pytest -v tests/unit -``` - -Then our ci job would look something like: - -```yml:.gitlab-ci.yml -test:unit-tests: - stage: test - only: - - merge_request - script: - - make unit_tests -``` - -Which I think is a lot more readable and a lot easier to type. We can also -leverage auto-complete on the terminal and add help targets. So a user can see all the targets -they can run. \ No newline at end of file diff --git a/46. Gitlab CI and Docker Compose/images/cover.png b/46. Gitlab CI and Docker Compose/images/cover.png deleted file mode 100644 index 3c40d18..0000000 Binary files a/46. Gitlab CI and Docker Compose/images/cover.png and /dev/null differ diff --git a/5. Running Expo in Docker/README.md b/5. Running Expo in Docker/README.md deleted file mode 100644 index b06c443..0000000 --- a/5. Running Expo in Docker/README.md +++ /dev/null @@ -1,153 +0,0 @@ ---- -title: "Running Expo/React Native in Docker" -tags: ["docker", "react-native", "expo", "docker-compose"] -license: "public-domain" -slug: "running-react-native-in-docker" -canonical_url: "https://haseebmajid.dev/blog/running-react-native-in-docker/" -date: "2018-10-31" -published: true -cover_image: "images/cover.jpg" ---- - -Running Expo/React Native in a Docker container can sometimes cause issues. In this example, I will be running -Docker 🐳 within a guest VM (Ubuntu) which will run on my host machine (Windows). My host machine will also -be running another VM as the Android emulator (Genymotion) for Expo to connect to. You can get a more -detailed post about how to connect two VMs together -[here](https://medium.com/@hmajid2301/react-native-expo-with-virtualbox-and-genymotion-2b58f622d92b), -#Plug 🔌🔌🔌. Since I’ve set up networking on those two VMs already as far as Expo is concerned -it might as well be running on the host machine (Windows). Also in this example, I will be testing -this on an Android device. - -![Original Image: https://maraaverick.rbind.io/2017/11/docker-izing-your-work-in-r/ and https://tutuappapkdownload.com/expo-apk/](images/docker-nyan.gif) - -## Prerequisites - -- [Install Docker](https://docs.docker.com/install/) -- (optional) [Install docker-compose](https://docs.docker.com/compose/install/) -- Android device/emulator to test on - -## Docker - -```json:title=package.json file=./source_code/package.json - -``` - -The `package.json` file I will be using the following example is a very barebones file, just including the minimum -packages required to run Expo. - -```docker:title=Dockerfile file=./source_code/Dockerfile - -``` - -`FROM node:latest` - -Tells us which Docker Image we are using as a base, in this case, the official node.js image. This is because it -will have a lot of the dependencies we need already installed such as yarn and npm. - -```text -ENV ADB_IP="192.168.1.1" -ENV REACT_NATIVE_PACKAGER_HOSTNAME="192.255.255.255" -``` - -Sets an environment variable which can be accessed during runtime of the Docker container. Strictly speaking, these -don’t need to be here because we can always inject into the Docker container at runtime, but I like to have the -environment variables documented. - -The **ADB_IP** is IP of the Android device 📱 to connect to. The **REACT_NATIVE_PACKAGER_HOSTNAME** environment variable is -very important because it sets which IP address Expo (cli) is running on, this is the IP Address your phone will try to -connect to. If this is not set correctly, you’ll get an error similar to Figure 1. You can work out the correct IP -address on Linux by using the following command. The first one should host IP (192.168.27.128 on my machine). - -```bash -hostname -I -192.168.27.128 192.168.130.128 172.17.0.1 172.19.0.1 172.18.0.1 -``` - -The reason this environment variable needs to be set is because by default the React Native packager -(which expo relies on) picks the first IP it sees on the machine, hence you can run expo on your host machine -fine but when you run in a Docker container you cannot connect to it because it’s trying to use the Docker -IP address (one of the ones starting with 172.xxx.xxx.xxx). - -```text -EXPOSE 19000 -EXPOSE 19001 -``` - -This is essentially meta data letting the user of the Docker container know that they can access data on those ports. - -```bash -RUN apt-get update && \ - apt-get install android-tools-adb -``` - -Install Android Debugging Bridge (ADB), which is used to connect to an Android device and debug the application. - -```text -COPY package.json yarn.lock app.json ./ -RUN yarn --network-timeout 100000 -``` - -Copy some important files from host to Docker container. The `package.json` and `yarn.lock` are used to install -the dependencies and `app.json` is required by expo as a bare minimum. - -```text -CMD adb connect $ADB_IP && \ - yarn run android - # runs expo-cli start --android -``` - -![Figure 1: Could not connect error 😢](images/error-emulator.png) - -## Running Docker - -This command runs when the Docker Image is first to run, every other command is used to build to the image itself. This -uses an environment variable passed into the Docker container and connects to the Android device at \$ADB*IP. Then run -the **android** command in \_package.json*. Then you can simply run the following commands to build and start your Docker container. - -```bash -docker build -t expo-android . -docker run -e ADB_IP=192.168.112.101 \ - -e REACT_NATIVE_PACKAGER_HOSTNAME=192.168.1.1 \ - -p 19000:19000 \ - -p 19001:19001 \ - expo-android -``` - -- -t is used to name the image (expo-android) -- . tells Docker where the Dockerfile is (in the current directory) -- --env sets environment used by Docker container when it starts to run (REACT_NATIVE_PACKAGER_HOSTNAME andADB_IP are overwritten using these new values) -- -p publishes ports, in this example, it maps port 19000 on the host to port 19000 on the Docker container (and also 19001), as we need to access port 19000 and 19001 so that Expo (expo-cli) can connect to our Android device. - -```yaml:title=docker-compose.yml file=./source_code/docker-compose.yml - -``` - -Since expo is being used to build mobile phone applications Docker isn't going to be used in production. I prefer to use -docker-compose to do the building and running, it means I can run one simple command and do the building and running in -one step. Quick aside docker-compose is great for development, especially when you need to run multiple Docker container, -but is not really built to be used in production. Look at using a container orchestration tool such as Kubernetes. - -I also mount my current directory on the host machine to /app/ directory on the docker container, this is so that any -files that change on my the host machine will also change in the Docker container, rather than having to build the -Docker container again. - -```bash -docker-compose up --build -d -``` - -### Environment Variables - -```text:title=.env file=./source_code/.env - -``` - -An example `.env` file used to pass environment variables (using docker-compose) to the Docker container. - -## Appendix - -- [Example source code](https://gitlab.com/hmajid2301/articles/-/tree/master/5.%20Running%20Expo%20in%20Docker/source_code) -- [Docker explained](https://medium.freecodecamp.org/a-beginner-friendly-introduction-to-containers-vms-and-docker-79a9e3e119b) -- [GitHub issue around could not connect errors](https://github.com/react-community/create-react-native-app/issues/81) -- [Genymotion emulator](https://www.genymotion.com/) -- [GIF overlay creator (Nyan Docker)](https://ezgif.com/overlay) -- React logo from [here](https://seeklogo.com/vector-logo/273845/react) diff --git a/5. Running Expo in Docker/images/.env.png b/5. Running Expo in Docker/images/.env.png deleted file mode 100755 index 023fb15..0000000 Binary files a/5. Running Expo in Docker/images/.env.png and /dev/null differ diff --git a/5. Running Expo in Docker/images/cover.jpg b/5. Running Expo in Docker/images/cover.jpg deleted file mode 100644 index c7d6778..0000000 Binary files a/5. Running Expo in Docker/images/cover.jpg and /dev/null differ diff --git a/5. Running Expo in Docker/images/docker-nyan.gif b/5. Running Expo in Docker/images/docker-nyan.gif deleted file mode 100755 index a33ebd6..0000000 Binary files a/5. Running Expo in Docker/images/docker-nyan.gif and /dev/null differ diff --git a/5. Running Expo in Docker/images/error-emulator.png b/5. Running Expo in Docker/images/error-emulator.png deleted file mode 100755 index 05fd46c..0000000 Binary files a/5. Running Expo in Docker/images/error-emulator.png and /dev/null differ diff --git a/6. Pyest with Background Threads/README.md b/6. Pyest with Background Threads/README.md deleted file mode 100644 index 0234007..0000000 --- a/6. Pyest with Background Threads/README.md +++ /dev/null @@ -1,75 +0,0 @@ ---- -title: "Pytest with Background Thread Fixtures" -tags: ["pytest", "python", "testing", "threads"] -license: "public-domain" -slug: "pytest-with-background-thread-in-fixtures" -canonical_url: "https://haseebmajid.dev/blog/pytest-with-background-thread-in-fixtures" -date: "2018-11-05" -published: true -cover_image: "images/cover.jpg" ---- - -Recently I had to test some of my Python :snake: :snake: :snake: code which required an external dependency and communicating by using TCP sockets :electric_plug: . You can think of this dependency as essentially a database because it stored information. However, when testing my Python code, I couldn't rely on there always being a TCP server to send messages to. - -So I ended up creating a simplified mocked version in Python. This way I could run automated tests on my code without needing to either install/rely on this server already existing there. Essentially I ended up creating a TCP server in Python, which would receive some input and respond how I would expect the real TCP server too. - -I write my tests using Pytest for several reasons, I prefer it to unittest because it required less boilerplate code. It also has lots of useful features such as fixtures. However, when creating tests for my API I realised that I had to first start the fake TCP server. But since it was always listening :ear: for traffic :vertical_traffic_light: it would block the rest of my tests from running. So in this example, I will show you how you can create a Pytest fixture and run it on a background thread (TCP server) and still run your tests as normal. - -## Code - -```python:title=tcp_server.py file=./source_code/tcp_server.py - -``` - -This is a very basic TCP server, it received some input but always replies with "Received". Also, I created the class to be used with the context manager so that the socket can be closed, hence the _\_\_enter\_\_()_ and _\_\_exit\_\_()_ dunder (magic) method. This TCP server will always listen on localhost on port 9500, this is the same port the client will also have to connect to, to send messages to this TCP server. - -The main function of interest is _listen_for_traffic()_, which loops forever whilst listening for traffic. If it receives any data, it always replies with "Received", of course with a real server we might want to do something more interesting but to keep this example simple that's all this server will do. - -```python:title=test_example.py file=./source_code/test_example.py - -``` - -When you run the Pytest command it will automatically run tests on all modules and method that start with "test\_". I have created a fixture (using the fixture decorator), fixtures allow for code reuse within a Pytest module. Fixtures are typically used to connect to databases, fixtures are the run before any tests hence we can also use them to setup is code. - -So taking a look at the _dummy_tcp_server()_ function, first we use a context manager (with) to set up our TCP server. - -```python -with tcp_server as example_server: -``` - -This calls the _\_\_enter\_\_()_ dunder method, which starts up our TCP server and so it's now listening for any traffic on port 9500. - -```python -thread = threading.Thread(target=example_server.listen_for_traffic) -thread.daemon = True -thread.start() -``` - -This is the bit of code that actually runs our TCP server on another (background) thread. So the in the first line we tell the thread what function to call, _listen_for_traffic_ (without brackets) has an infinite while loops just waiting to listen for traffic arriving on port 9500. We set the thread as a daemon so that when we close the main Python program the thread will automatically kill itself, this means we don't have to manage it yourself. - -```python -yield example_server -``` - -The yield is very similar to a return statement. The main reason I choose to include is that if the tests for some reason raise an exception, the yield will still allow the context manager to exit. If we had any tear down code after the yield even with an exception, this code will still run after all the tests have finished. Also yields (and return statements) allow individual tests to access functions from the TCPServer class. For example, if you had a function (in the TCPServer class) that stored every message sent to the server. If you pass the fixture as a parameter to any tests then you could access this function. - -```python -def test_example(): - HOST = '127.0.0.1' - PORT = 9500 - - data = "" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.connect((HOST, PORT)) - s.sendall(b'Hello World') - data = s.recv(1024) - - assert data.decode() == "Received" -``` - -The part of our code is the actual test. It sends "Hello World" to the TCP server, remember if you sent it as string (without _b''_) then you'll have to encode that string first (_"Hello World".encode()_). Then we capture the response from the server and decode it (because it's in binary, this converts it into a string) and assert or check that the data is the string "Received" as we expect.. - -## Appendix - -- [Example source code](https://gitlab.com/hmajid2301/articles/-/tree/master/6.%20Pyest%20with%20Background%20Threads/source_code) -- [Pytest](https://docs.pytest.org/en/latest/) diff --git a/6. Pyest with Background Threads/images/cover.jpg b/6. Pyest with Background Threads/images/cover.jpg deleted file mode 100644 index 8330568..0000000 Binary files a/6. Pyest with Background Threads/images/cover.jpg and /dev/null differ diff --git "a/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/images/cover.jpg" "b/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/images/cover.jpg" deleted file mode 100644 index a4aa3bd..0000000 Binary files "a/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/images/cover.jpg" and /dev/null differ diff --git "a/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/source_code/dist/example-0.1.0.tar.gz" "b/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/source_code/dist/example-0.1.0.tar.gz" deleted file mode 100644 index c2ca689..0000000 Binary files "a/7. Multi Docker Container with Nginx, Flask and\302\240MySQL/source_code/dist/example-0.1.0.tar.gz" and /dev/null differ diff --git a/8. Docker with SQLAlchemy/README.md b/8. Docker with SQLAlchemy/README.md deleted file mode 100644 index 136823e..0000000 --- a/8. Docker with SQLAlchemy/README.md +++ /dev/null @@ -1,189 +0,0 @@ ---- -title: "Simple App with Flask, SQLalchemy and Docker" -tags: ["docker", "python", "docker-compose", "sqlalchemy"] -license: "public-domain" -slug: "simple-app-flask-sqlalchemy-and-docker" -canonical_url: "https://haseebmajid.dev/blog/simple-app-flask-sqlalchemy-and-docker/" -date: "2018-11-24" -published: true -cover_image: "images/cover.jpg" ---- - -SQLAlchemy is an object-relational mapper (ORM), it allow us to interact with a database using Python functions and objects. For example, if we have a table called -`Cats` we could retrieve every row with a command like `Cats.query.all()`. The main advantage of this is that it allows us to abstract away the SQL. - -Docker :whale: allows us to quickly bring up a database within a Docker container, this means we don't have to set up and configure a database on our local machine. We can simply kill the Docker container when we are done with the database. In this article, I will show you how you can create a very simple RESTful API using Flask and SQLAlchemy, which will connect to a database running in a Docker container. - -**NOTE:** Flask server will be running locally, not in a Docker container. - -In this example, I will be using Postgres but it should be easy enough to use any other relational database, such as MySQL. I will also be using `flask-sqlalchemy`, which is a wrapper around `SQLAlchemy`, it simplifies our code and means we can use less boilerplate code. - -## Prerequisites - -- [Install Docker](https://docs.docker.com/install/) -- (optional) [Install docker-compose](https://docs.docker.com/compose/install/) -- Install Python3.6 -- Install the following dependencies, using `pip install -r requirements.txt` (or pip3 instead of pip) - -Where requirements.txt is: - -```text -flask==1.0.2 -flask-sqlalchemy==2.3.0 -psycopg2==2.7.6.1 -``` - -```python:title=src/example/__init__.py file=./source_code/src/example/__init__.py - -``` - -The init file has one function `create_app()`, which funnily enough creates our Flask app with this line `Flask(__name__)`. It then assigns a URI, from the `config.py` file, to the Flask app's configuration. This URI is used to connect to the Postgres database. - -One important thing about this function is that we have to use Flask contexts. Since Flask can have multiple apps we have to specify which app we are using with SQLAlchemy, hence we push the context with our newly created app. Else we would see the following error, [more information here](http://flask-sqlalchemy.pocoo.org/2.3/contexts/). - -```text -No application found. Either work inside a view function or push an application context. -``` - -After pushing our context, we link our `db` to the Flask app with the following line `db.init_app(flask_app)`. We then create all of our tables (in the database) if they don't already exist, using `db.create_all()`. The tables are created using the classes defined in `models.py`. - -```python:title=src/example/config.py file=./source_code/src/example/config.py - -``` - -This module's only job at the moment is to generate this URI, but could easily be extended to add extra configuration variables if required. - -```python -DATABASE_CONNECTION_URI = f'postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}' -``` - -**NOTE:** F-strings used for formatting strings (as shown above) can only be used with Python3.6. - -These are examples of the variables that need to get passed as environment variables to the Flask app. - -```text:title=database.conf file=./source_code/database.conf - -``` - -**NOTE:** If you're running the Flask app in a Docker container you will need to change the variable `POSTGRES_HOST=postgres`, (from localhost) -where `postgres` is the Docker container name we are connecting to. - -**WARNING:** Make sure these are the same values passed to the Flask app and the Postgres database. - -```python:title=src/example/models.py file=./source_code/src/example/models.py - -``` - -This module defines our classes which then become tables within our database. For example, the class `Cats` (cats) is the table name and each attribute becomes a column in that table. So the `cats` table with have four columns id, name, price and breed. - -The `db` variable is imported from here by the `__init__.py` file, that's how the `db.create_all()` function knows which classes/tables to create in the database. - -```python:title=src/example/app.py file=./source_code/src/example/app.py - -``` - -This is a simple Flask file, which creates our app by calling `create_app()` function from `__init__.py` module. -Then it defines four functions for our four routes for the "RESTful" API: - -- GET: Get information about all the cats -- POST: Add a new cat -- DELETE: Remove a cat -- PATCH: Edit a cat's price - -## database.py - -```python:title=src/example/database.py file=./source_code/src/example/database.py - -``` - -This module is created so we can abstract away how we interact with the database. We simply use -the functions in this module to interact with the database. This means it's easier to change the -library we use to interact with the database. It also means that if for some reason we need to change how we interact with the database. We only have to change it in a single module (this one). - -The `app.py` module calls functions in this file to interact with database. - -- GET - `get_all()` -- POST - `add_instance()` -- DELETE - `delete_instance()` -- PUT - `edit_instance()` - -Some functions use this special keyword called `**kwargs`, kwargs (keyword arguments) could be called anything but it's best practice to call it kwargs. This allows -the caller of the function to pass in an arbitrary number of keyword arguments. - -Let's take a look at the `add_instance()` function as an example. The function is called in `app.py` like so `database.add_instance(Cats, name=name, price=price, breed=breed)` the `model=Cats` and `kwargs` is the rest of the arguments which are passed onto the cats model so we can add our cat object to the database. - -**NOTE:** The `kwargs` just stores the arguments as a dictionary, the `**` operator unpacks our dictionary -and passes them as keyword arguments. - -## Docker Compose - -```yaml:title=docker-compose.yml file=./source_code/docker-compose.yml - -``` - -For development, I like to use docker-compose. In docker compose we can specify Docker containers using YAML. It can help to simplify the commands we need to type when trying to build/run multiple Docker containers. In this example, we only define a single Docker container. - -Taking a look at the file: - -First, we define our version number `version: '3.5'`, it is recommended by Docker that you use at least version 3. You can find -[more information here](https://docs.docker.com/compose/compose-file/compose-versioning/). - -Then we give our a service name, in this case, `database`. I like to name my services with what they are used for generic names such as `web server`, `database` or `message broker`. This means I can change the underlying technology without changing the service name. - -After this we name our container `postgres`, this is the name of the Docker container. -It can be used to interact with the container (to kill it or exec onto it) without using an ID. - -We use the official Postgres image on [Docker Hub](https://hub.docker.com/_/postgres/), we pull the image that is tagged with `latest`. - -This image requires us to use some variables to set it up such as username, password and database. We pass these in the form of a file to make things a bit simpler (the same `database.conf as defined above). - -We then map the host port 5432 to the guest Docker container port 5432, this is the port that Postgres listens on. You could change the host port if you wanted to something else like `9000` say, this means all traffic on the host on port 9000 will be sent to the Postgres container on port 5432. We would also need to update the environment variable the Flask app is using. - -Finally, we mount a volume so that our data is persistent, without this when the database Docker container is killed you would lose all of your data. By mounting `db_volume` even when you kill the container, like when you want to update the Postgres image, your data will persist. - -## Running our application - -To build and run our Docker container with docker-compose: - -```bash -docker-compose up --build -d -``` - -The equivalent commands using just normal Docker would be - -```bash -docker volume create --name db_volume -docker run -d --name postgres -p 5432:5432 \ - --env-file docker/database.conf \ - -v db_volume:/var/lib/postgresql postgres:latest -``` - -To start our Flask app - -```bash -docker-compose up --build - -# In a new terminal -virtualenv .venv -source .venv/bin/activate -pip install -r requirements.txt -# To load env variables -export $(xargs < database.conf) -export FLASK_APP=src/example/app.py -flask run -# Running on http://127.0.0.1:5000 -``` - -You can send HTTP requests to your Flask server on `127.0.0.1:5000`, you can either use a REST client like Postman or Insomnia. You can also use cURL on the cli. - -```bash -curl -XPOST -H "Content-type: application/json" -d \ -'{"name": "catty mcCatFace", "price": 5000, "breed": "bengal"}' \ -'127.0.0.1:5000/add' -``` - -## Appendix - -- [Example source code](https://gitlab.com/hmajid2301/articles/-/tree/master/8.%20Docker%20with%20SQLAlchemy/source_code) -- [Postman](https://www.getpostman.com/) -- [Insomnia](https://insomnia.rest/) diff --git a/8. Docker with SQLAlchemy/images/cover.jpg b/8. Docker with SQLAlchemy/images/cover.jpg deleted file mode 100644 index 0dd8c17..0000000 Binary files a/8. Docker with SQLAlchemy/images/cover.jpg and /dev/null differ diff --git a/9. Testing with pytest-mock and pytest-flask/README.md b/9. Testing with pytest-mock and pytest-flask/README.md deleted file mode 100644 index 8a9ce2e..0000000 --- a/9. Testing with pytest-mock and pytest-flask/README.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: "Testing with pytest-mock and pytest-flask" -tags: ["testing", "python", "pytest", "flask"] -license: "public-domain" -slug: "testing-with-pytest-mock-and-pytest-flask" -canonical_url: "https://haseebmajid.dev/blog/testing-with-pytest-mock-and-pytest-flask" -date: "2018-12-15" -published: true -cover_image: "images/cover.jpg" ---- - -Pytest is a popular Python library used for testing. It is my preferred testing library because it requires less boilerplate code than the alternatives such as (the builtin) unittest, the built in testing library. -In this article, I will show you how you can use `pytest-flask` and `pytest-mock` to test your Flask app. These two -libraries are plugins for Pytest which build upon some of the features that Pytest provides us. - -In this example, we will be testing a very simple Flask app I created [source code here](https://gitlab.com/hmajid2301/articles/-/tree/master/9.%20Testing%20with%20pytest-mock%20and%20pytest-flask/source_code). -If you want to learn more about the Flask app you can read my previous article [here](/blog/simple-app-flask-sqlalchemy-and-docker/) :plug: :plug:. -All you really need to know for this article is that it involves using a very simple RESTful API for an imaginary cat store. Essentially all the API does is it interacts with a database, -to get current cats, add new cats, edit already existing cats and remove cats from the store. - -## Prerequisites - -- Your own Flask app -- Install the following dependencies, using `pip install -r requirements.txt` (or pip3 instead of pip) - -Where requirements.txt is: - -```text:title=requirements.txt file=./source_code/requirements.txt - -``` - -## Libraries - -`pytest-flask`: Allows us to specify an app fixture and then send API requests with this app. Usage is similar to `requests` library, when sending HTTP requests to our flask app. - -`pytest-mock`: Is a simple wrapper around the unittest mock library, so anything you can do using `unittest.mock` you can do with `pytest-mock`. The main difference in usage is you can access it using a fixture `mocker`, also the mock ends at the end of the test. Whereas with the normal mock library if you mock say the `open()` function, it will be mocked for the remaining duration of that test module, i.e. it will effect other tests. - -## pytest-flask example - -```python -import pytest - -from example_app import create_app - - -@pytest.fixture -def app(): - app = create_app() - return app - - -def test_example(client): - response = client.get("/") - assert response.status_code == 200 -``` - -To use pytest-flask we need to create a fixture called `app()` which creates our Flask server. We can then use this fixture by passing `client` -as an argument to any test. Then we can send various http requests using `client`. - -Above is a very simple example using pytest-flask, we send a GET request to our app, which should return all cats in the database. -We then check that the status code returned from the server was a 200 (OK). This is great except how can we mock out certain features within our code? - -## pytest-mock - -We can mock out certain parts of our code using the `pytest-mock` library, but we have to mock inside the `app()` fixture. Since the rest of our tests will just be making HTTP requests to our Flask server. In this example say we don't want to mock a connection to the database, we can use the following lines of code. - -```python -mocker.patch("flask_sqlalchemy.SQLAlchemy.init_app", return_value=True) -mocker.patch("flask_sqlalchemy.SQLAlchemy.create_all", return_value=True) -mocker.patch("example.database.get_all", return_value={}) -``` - -What this bit of code is doing is any time the any of the mocked functions are called, say `init_app` we mock it so it will always return true. We have to give it the "full path" to the function, it's the same as if you had to import the function itself. - -```python:title=tests/test_example.py file=./source_code/tests/test_example.py - -``` - -In this example it's very boring as when we send an HTTP GET request to the app it will not interact with the database since we've mocked this out but instead just return an empty dict ({}). In reality, this is -not a very good test, you would make it a bit more interesting. - -```bash -virtualenv .venv -source .venv/bin/activate -pip install -e . -pip install -r requirements.txt -export $(xargs < database.conf) -pytest -``` - -## Appendix - -- [Example source code](https://gitlab.com/hmajid2301/articles/-/tree/master/9.%20Testing%20with%20pytest-mock%20and%20pytest-flask/source_code) diff --git a/9. Testing with pytest-mock and pytest-flask/images/cover.jpg b/9. Testing with pytest-mock and pytest-flask/images/cover.jpg deleted file mode 100644 index 2587d98..0000000 Binary files a/9. Testing with pytest-mock and pytest-flask/images/cover.jpg and /dev/null differ diff --git a/README.md b/README.md new file mode 100644 index 0000000..dfe6b97 --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +Articles +-------- + +This repo contains all source code for my articles. + +Contents of this repo, will be moved to https://gitlab.com/hmajid2301/blog + +Appendix +-------- + +- Icon From https://www.flaticon.com \ No newline at end of file diff --git a/README.rst b/README.rst deleted file mode 100644 index 51828c8..0000000 --- a/README.rst +++ /dev/null @@ -1,14 +0,0 @@ -Articles --------- - -This repo contains all my articles that I publish in Markdown and also all the source code examples related to those articles. -You can currently find them on. - -- https://haseebmajid.dev/ -- https://dev.to/hmajid2301 -- https://medium.com/@hmajid2301 - -Appendix --------- - -- Icon From https://www.flaticon.com \ No newline at end of file diff --git a/package.json b/package.json deleted file mode 100644 index aff126d..0000000 --- a/package.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scripts": { - "publish-article": "markdown-to-medium" - }, - "dependencies": { - "@mermaid-js/mermaid-cli": "^8.8.2-beta.8", - "gatsby-source-git": "^1.1.0", - "markdown-to-medium": "1.4.4" - } -} diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index 6d7bdb3..0000000 --- a/yarn.lock +++ /dev/null @@ -1,1927 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@babel/runtime@^7.10.3": - version "7.11.2" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736" - integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw== - dependencies: - regenerator-runtime "^0.13.4" - -"@mermaid-js/mermaid-cli@^8.8.2-beta.8": - version "8.8.2-beta.8" - resolved "https://registry.yarnpkg.com/@mermaid-js/mermaid-cli/-/mermaid-cli-8.8.2-beta.8.tgz#9fa47cbfeaa01fa811a6962396005c7a795acdc2" - integrity sha512-X9I7gwvqKVdqVvqi9AVfUWXnHQQYjssWJ2asKfhBDAkQn0vPagKkx6EjzKcRgyIroWGXN6ZpwkQ/gMvTPFlx7g== - dependencies: - chalk "^4.1.0" - commander "^6.0.0" - puppeteer "^5.0.0" - -"@mrmlnc/readdir-enhanced@^2.2.1": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz#524af240d1a360527b730475ecfa1344aa540dde" - integrity sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g== - dependencies: - call-me-maybe "^1.0.1" - glob-to-regexp "^0.3.0" - -"@nodelib/fs.stat@^1.1.2": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b" - integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== - -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== - -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - -"@types/node@*": - version "14.14.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.5.tgz#e92d3b8f76583efa26c1a63a21c9d3c1143daa29" - integrity sha512-H5Wn24s/ZOukBmDn03nnGTp18A60ny9AmCwnEcgJiTgSGsCO7k+NWP7zjCCbhlcnVCoI+co52dUAt9GMhOSULw== - -"@types/yauzl@^2.9.1": - version "2.9.1" - resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.9.1.tgz#d10f69f9f522eef3cf98e30afb684a1e1ec923af" - integrity sha512-A1b8SU4D10uoPjwb0lnHmmu8wZhR9d+9o2PKBQT2jU5YPTKsxac6M2qGAdY7VcL+dHHhARVUDmeg0rOrcd9EjA== - dependencies: - "@types/node" "*" - -agent-base@5: - version "5.1.1" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c" - integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g== - -ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -anymatch@~3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" - integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= - -base64-js@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" - integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - -better-queue-memory@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/better-queue-memory/-/better-queue-memory-1.0.4.tgz#f390d6b30bb3b36aaf2ce52b37a483e8a7a81a22" - integrity sha512-SWg5wFIShYffEmJpI6LgbL8/3Dqhku7xI1oEiy6FroP9DbcZlG0ZDjxvPdP9t7hTGW40IpIcC6zVoGT1oxjOuA== - -better-queue@^3.8.10: - version "3.8.10" - resolved "https://registry.yarnpkg.com/better-queue/-/better-queue-3.8.10.tgz#1c93b9ec4cb3d1b72eb91d0efcb84fc80e8c6835" - integrity sha512-e3gwNZgDCnNWl0An0Tz6sUjKDV9m6aB+K9Xg//vYeo8+KiH8pWhLFxkawcXhm6FpM//GfD9IQv/kmvWCAVVpKA== - dependencies: - better-queue-memory "^1.0.1" - node-eta "^0.9.0" - uuid "^3.0.0" - -binary-extensions@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9" - integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ== - -bl@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.0.3.tgz#12d6287adc29080e22a705e5764b2a9522cdc489" - integrity sha512-fs4G6/Hu4/EE+F75J8DuN/0IpQqNjAdC7aEQv7Qt8MHGUH7Ckv2MwTEEeN9QehD0pfIDkMI1bkHYkKy7xHyKIg== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - -bluebird@^3.7.2: - version "3.7.2" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - -braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -buffer-alloc-unsafe@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" - integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== - -buffer-alloc@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" - integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== - dependencies: - buffer-alloc-unsafe "^1.1.0" - buffer-fill "^1.0.0" - -buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" - integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI= - -buffer-fill@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" - integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= - -buffer@^5.2.1, buffer@^5.5.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.0.tgz#88afbd29fc89fa7b58e82b39206f31f2cf34feed" - integrity sha512-cd+5r1VLBwUqTrmnzW+D7ABkJUM6mr7uv1dv+6jRw4Rcl7tFIFHDqHPL98LhpGFn3dbAt3gtLxtrWp4m1kFrqg== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - -call-me-maybe@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" - integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= - -chalk@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" - integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chokidar@^3.4.2: - version "3.4.2" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.2.tgz#38dc8e658dec3809741eb3ef7bb0a47fe424232d" - integrity sha512-IZHaDeBeI+sZJRX7lGcXsdzgvZqKv6sECqsbErJA4mHWfpRrD8B97kSFN4cQz6nGBGiuFia1MKR4d6c1o8Cv7A== - dependencies: - anymatch "~3.1.1" - braces "~3.0.2" - glob-parent "~5.1.0" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.4.0" - optionalDependencies: - fsevents "~2.1.2" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -ci-info@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - -cliclopts@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/cliclopts/-/cliclopts-1.1.1.tgz#69431c7cb5af723774b0d3911b4c37512431910f" - integrity sha1-aUMcfLWvcjd0sNORG0w3USQxkQ8= - -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colors@^1.1.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.3.2.tgz#2df8ff573dfbf255af562f8ce7181d6b971a359b" - integrity sha512-rhP0JSBGYvpcNQj4s5AdShMeE5ahMop96cTeDl/v9qQQm2fYClE2QXZRi8wLzc+GmXSxdIqqbOIAhyObEXDbfQ== - -commander@^6.0.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.0.tgz#b990bfb8ac030aedc6d11bc04d1488ffef56db75" - integrity sha512-zP4jEKbe8SHzKJYQmq8Y9gYjtO/POJLgIdKgV7B9qNmABVFVc+ctqSX6iXh4mCpJfRBOabiZ2YKPg8ciDw6C+Q== - -commonmark-helpers@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/commonmark-helpers/-/commonmark-helpers-0.4.1.tgz#8035d28dd64c9bae519220948bea9d69b504593b" - integrity sha1-gDXSjdZMm65RkiCUi+qdabUEWTs= - dependencies: - commonmark "^0.21.0" - ramda "^0.17.1" - -commonmark@^0.21.0: - version "0.21.0" - resolved "https://registry.yarnpkg.com/commonmark/-/commonmark-0.21.0.tgz#ad646b335cd5f5f9892b6980129ed996326f0f6d" - integrity sha1-rWRrM1zV9fmJK2mAEp7ZljJvD20= - dependencies: - entities "~ 1.1.1" - mdurl "~ 1.0.0" - string.prototype.repeat "^0.2.0" - -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -conf@^0.11.2: - version "0.11.2" - resolved "https://registry.yarnpkg.com/conf/-/conf-0.11.2.tgz#879f479267600483e502583462ca4063fc9779b2" - integrity sha1-h59HkmdgBIPlAlg0YspAY/yXebI= - dependencies: - dot-prop "^3.0.0" - env-paths "^0.3.0" - mkdirp "^0.5.1" - pkg-up "^1.0.0" - -configstore@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" - integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== - dependencies: - dot-prop "^5.2.0" - graceful-fs "^4.1.2" - make-dir "^3.0.0" - unique-string "^2.0.0" - write-file-atomic "^3.0.0" - xdg-basedir "^4.0.0" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - -debug@4, debug@^4.1.0, debug@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.2.0.tgz#7f150f93920e94c58f5574c2fd01a3110effe7f1" - integrity sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg== - dependencies: - ms "2.1.2" - -debug@^2.2.0, debug@^2.3.3: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" - -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== - -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - -devtools-protocol@0.0.809251: - version "0.0.809251" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.809251.tgz#300b3366be107d5c46114ecb85274173e3999518" - integrity sha512-pf+2OY6ghMDPjKkzSWxHMq+McD+9Ojmq5XVRYpv/kPd9sTMQxzEt21592a31API8qRjro0iYYOc3ag46qF/1FA== - -dot-prop@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-3.0.0.tgz#1b708af094a49c9a0e7dbcad790aba539dac1177" - integrity sha1-G3CK8JSknJoOfbyteQq6U52sEXc= - dependencies: - is-obj "^1.0.0" - -dot-prop@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" - integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== - dependencies: - is-obj "^2.0.0" - -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - -end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -"entities@~ 1.1.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" - integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== - -env-paths@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-0.3.1.tgz#c30ccfcbc30c890943dc08a85582517ef00da463" - integrity sha1-wwzPy8MMiQlD3AioVYJRfvANpGM= - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -extract-zip@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" - integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== - dependencies: - debug "^4.1.1" - get-stream "^5.1.0" - yauzl "^2.10.0" - optionalDependencies: - "@types/yauzl" "^2.9.1" - -fast-glob@^2.2.3: - version "2.2.7" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-2.2.7.tgz#6953857c3afa475fff92ee6015d52da70a4cd39d" - integrity sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw== - dependencies: - "@mrmlnc/readdir-enhanced" "^2.2.1" - "@nodelib/fs.stat" "^1.1.2" - glob-parent "^3.1.0" - is-glob "^4.0.0" - merge2 "^1.2.3" - micromatch "^3.1.10" - -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" - integrity sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4= - dependencies: - pend "~1.2.0" - -file-type@^12.4.2: - version "12.4.2" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-12.4.2.tgz#a344ea5664a1d01447ee7fb1b635f72feb6169d9" - integrity sha512-UssQP5ZgIOKelfsaB5CuGAL+Y+q7EmONuiwF3N5HAH0t27rvrttgi6Ra9k/+DVaY9UF6+ybxu5pOXLUdA8N7Vg== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - -front-matter@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/front-matter/-/front-matter-2.3.0.tgz#7203af896ce357ee04e2aa45169ea91ed7f67504" - integrity sha1-cgOviWzjV+4E4qpFFp6pHtf2dQQ= - dependencies: - js-yaml "^3.10.0" - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-extra@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-5.0.0.tgz#414d0110cdd06705734d055652c5411260c31abd" - integrity sha512-66Pm4RYbjzdyeuqudYqhFiNBbCIuI9kgRqLPSHIlXHidW8NIQtVdkM1yeZ4lXwuhbTETv3EUGMNHAAw6hiundQ== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-extra@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" - integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -fsevents@~2.1.2: - version "2.1.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e" - integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ== - -gatsby-core-utils@^1.3.19: - version "1.3.19" - resolved "https://registry.yarnpkg.com/gatsby-core-utils/-/gatsby-core-utils-1.3.19.tgz#d7f9981fbc85affbbc09c96ac84c76c37615fc89" - integrity sha512-nwiU17m2lihvN7IOkBTj4n2WKc7Eb569Vv7G8dmuwthmjIB6ml98Quno5fRBCz+2DBF6R60boDfc84Qdct1LPw== - dependencies: - ci-info "2.0.0" - configstore "^5.0.1" - fs-extra "^8.1.0" - node-object-hash "^2.0.0" - proper-lockfile "^4.1.1" - tmp "^0.2.1" - xdg-basedir "^4.0.0" - -gatsby-source-filesystem@^2.1.19: - version "2.3.29" - resolved "https://registry.yarnpkg.com/gatsby-source-filesystem/-/gatsby-source-filesystem-2.3.29.tgz#5eaf1434593f38523655145323e426f21c30687a" - integrity sha512-PWHtkj+inhfPm8aMVhtxS1PAW+r4Kk5M1LDP5l0OO2HBaHiiSi0DulgmYJ3usYpctFqOQ92im/EQLRJbOaaQsA== - dependencies: - "@babel/runtime" "^7.10.3" - better-queue "^3.8.10" - bluebird "^3.7.2" - chokidar "^3.4.2" - file-type "^12.4.2" - fs-extra "^8.1.0" - gatsby-core-utils "^1.3.19" - got "^9.6.0" - md5-file "^3.2.3" - mime "^2.4.6" - pretty-bytes "^5.3.0" - progress "^2.0.3" - read-chunk "^3.2.0" - valid-url "^1.0.9" - xstate "^4.11.0" - -gatsby-source-git@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/gatsby-source-git/-/gatsby-source-git-1.1.0.tgz#99faab2b8053a78afec6f7a67c2e74b68b1575d3" - integrity sha512-f5HllxwS+ivVn6SitSJPEQe8tf/apjwq5TOZRiEIRJtlrm9eSBqM2hO6ZIOK5na6UuvI+BH8xxbgj0qrwNTznA== - dependencies: - fast-glob "^2.2.3" - fs-extra "^5.0.0" - gatsby-source-filesystem "^2.1.19" - git-url-parse "^11.1.1" - rimraf "^2.6.2" - simple-git "^1.105.0" - -get-md-title@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/get-md-title/-/get-md-title-1.0.4.tgz#daea37a95a2f7b136c2bdddfa917d9a7dfd70c15" - integrity sha1-2uo3qVovexNsK93fqRfZp9/XDBU= - dependencies: - commonmark-helpers "^0.4.0" - trim-html-tag "^1.0.3" - -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -git-up@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/git-up/-/git-up-4.0.2.tgz#10c3d731051b366dc19d3df454bfca3f77913a7c" - integrity sha512-kbuvus1dWQB2sSW4cbfTeGpCMd8ge9jx9RKnhXhuJ7tnvT+NIrTVfYZxjtflZddQYcmdOTlkAcjmx7bor+15AQ== - dependencies: - is-ssh "^1.3.0" - parse-url "^5.0.0" - -git-url-parse@^11.1.1: - version "11.2.0" - resolved "https://registry.yarnpkg.com/git-url-parse/-/git-url-parse-11.2.0.tgz#2955fd51befd6d96ea1389bbe2ef57e8e6042b04" - integrity sha512-KPoHZg8v+plarZvto4ruIzzJLFQoRx+sUs5DQSr07By9IBKguVd+e6jwrFR6/TP6xrCJlNV1tPqLO1aREc7O2g== - dependencies: - git-up "^4.0.0" - -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-parent@~5.1.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" - integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== - dependencies: - is-glob "^4.0.1" - -glob-to-regexp@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" - integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= - -glob@^7.1.3: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -got@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0: - version "4.2.4" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" - integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - -http-cache-semantics@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" - integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== - -https-proxy-agent@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b" - integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg== - dependencies: - agent-base "5" - debug "4" - -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@^2.0.3, inherits@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.0, is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= - -is-obj@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" - integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== - -is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-ssh@^1.3.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/is-ssh/-/is-ssh-1.3.2.tgz#a4b82ab63d73976fd8263cceee27f99a88bdae2b" - integrity sha512-elEw0/0c2UscLrNG+OAorbP539E3rhliKPg+hDMWN9VwrDXfYK+4PBEykDPfxlYYtQvl84TascnQyobfQLHEhQ== - dependencies: - protocols "^1.1.0" - -is-typedarray@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= - -is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -isarray@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= - -js-yaml@^3.10.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1" - integrity sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= - -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= - optionalDependencies: - graceful-fs "^4.1.6" - -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - -make-dir@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - -markdown-to-medium@1.4.4: - version "1.4.4" - resolved "https://registry.yarnpkg.com/markdown-to-medium/-/markdown-to-medium-1.4.4.tgz#056b763105b8e9e0454abb7bd711cc194f9b555b" - integrity sha512-F93jUxFHPfUe1go5ERO0d0hc0ptR4N1YtMzEJzWSRio83is98YIvlJk2BLWl3LNKQkYf1RFGXS9GwLEcZiGGwA== - dependencies: - cliclopts "^1.1.1" - colors "^1.1.2" - conf "^0.11.2" - front-matter "^2.1.0" - get-md-title "^1.0.4" - medium-sdk "0.0.3" - minimist "^1.2.0" - open "0.0.5" - xtend "^4.0.1" - -md5-file@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/md5-file/-/md5-file-3.2.3.tgz#f9bceb941eca2214a4c0727f5e700314e770f06f" - integrity sha512-3Tkp1piAHaworfcCgH0jKbTvj1jWWFgbvh2cXaNCgHwyTCBxxvD1Y04rmfpvdPm1P4oXMOpm6+2H7sr7v9v8Fw== - dependencies: - buffer-alloc "^1.1.0" - -"mdurl@~ 1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" - integrity sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4= - -medium-sdk@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/medium-sdk/-/medium-sdk-0.0.3.tgz#13dea6fda76f46dbb6c39de01ca1c0dbd7b32745" - integrity sha1-E96m/advRtu2w53gHKHA29ezJ0U= - -merge2@^1.2.3: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -micromatch@^3.1.10: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -mime@^2.4.6: - version "2.4.6" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.6.tgz#e5b407c90db442f2beb5b162373d07b69affa4d1" - integrity sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA== - -mimic-response@^1.0.0, mimic-response@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - -minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= - -minimist@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= - -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - -mkdirp@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= - dependencies: - minimist "0.0.8" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@2.1.2, ms@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -node-eta@^0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/node-eta/-/node-eta-0.9.0.tgz#9fb0b099bcd2a021940e603c64254dc003d9a7a8" - integrity sha1-n7CwmbzSoCGUDmA8ZCVNwAPZp6g= - -node-fetch@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" - integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== - -node-object-hash@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/node-object-hash/-/node-object-hash-2.0.0.tgz#9971fcdb7d254f05016bd9ccf508352bee11116b" - integrity sha512-VZR0zroAusy1ETZMZiGeLkdu50LGjG5U1KHZqTruqtTyQ2wfWhHG2Ow4nsUbfTFGlaREgNHcCWoM/OzEm6p+NQ== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-url@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" - integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== - -normalize-url@^4.1.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.0.tgz#453354087e6ca96957bd8f5baf753f5982142129" - integrity sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ== - -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -open@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/open/-/open-0.0.5.tgz#42c3e18ec95466b6bf0dc42f3a2945c3f0cad8fc" - integrity sha1-QsPhjslUZra/DcQvOilFw/DK2Pw= - -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - -p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-try@^2.0.0, p-try@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parse-path@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-4.0.2.tgz#ef14f0d3d77bae8dd4bc66563a4c151aac9e65aa" - integrity sha512-HSqVz6iuXSiL8C1ku5Gl1Z5cwDd9Wo0q8CoffdAghP6bz8pJa1tcMC+m4N+z6VAS8QdksnIGq1TB6EgR4vPR6w== - dependencies: - is-ssh "^1.3.0" - protocols "^1.4.0" - -parse-url@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/parse-url/-/parse-url-5.0.2.tgz#856a3be1fcdf78dc93fc8b3791f169072d898b59" - integrity sha512-Czj+GIit4cdWtxo3ISZCvLiUjErSo0iI3wJ+q9Oi3QuMYTI6OZu+7cewMWZ+C1YAnKhYTk6/TLuhIgCypLthPA== - dependencies: - is-ssh "^1.3.0" - normalize-url "^3.3.0" - parse-path "^4.0.0" - protocols "^1.4.0" - -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= - dependencies: - pinkie-promise "^2.0.0" - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - integrity sha1-elfrVQpng/kRUzH89GY9XI4AelA= - -picomatch@^2.0.4, picomatch@^2.2.1: - version "2.2.2" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" - integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== - -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= - -pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-1.0.0.tgz#3e08fb461525c4421624a33b9f7e6d0af5b05a26" - integrity sha1-Pgj7RhUlxEIWJKM7n35tCvWwWiY= - dependencies: - find-up "^1.0.0" - -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= - -pretty-bytes@^5.3.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.4.1.tgz#cd89f79bbcef21e3d21eb0da68ffe93f803e884b" - integrity sha512-s1Iam6Gwz3JI5Hweaz4GoCD1WUNUIyzePFy5+Js2hjwGVt2Z79wNN+ZKOZ2vB6C+Xs6njyB84Z1IthQg8d9LxA== - -progress@^2.0.1, progress@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -proper-lockfile@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/proper-lockfile/-/proper-lockfile-4.1.1.tgz#284cf9db9e30a90e647afad69deb7cb06881262c" - integrity sha512-1w6rxXodisVpn7QYvLk706mzprPTAPCYAqxMvctmPN3ekuRk/kuGkGc82pangZiAt4R3lwSuUzheTTn0/Yb7Zg== - dependencies: - graceful-fs "^4.1.11" - retry "^0.12.0" - signal-exit "^3.0.2" - -protocols@^1.1.0, protocols@^1.4.0: - version "1.4.8" - resolved "https://registry.yarnpkg.com/protocols/-/protocols-1.4.8.tgz#48eea2d8f58d9644a4a32caae5d5db290a075ce8" - integrity sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg== - -proxy-from-env@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -puppeteer@^5.0.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-5.4.1.tgz#f2038eb23a0f593ed2cce0d6e7cd5c43aecd6756" - integrity sha512-8u6r9tFm3gtMylU4uCry1W/CeAA8uczKMONvGvivkTsGqKA7iB7DWO2CBFYlB9GY6/IEoq9vkI5slJWzUBkwNw== - dependencies: - debug "^4.1.0" - devtools-protocol "0.0.809251" - extract-zip "^2.0.0" - https-proxy-agent "^4.0.0" - node-fetch "^2.6.1" - pkg-dir "^4.2.0" - progress "^2.0.1" - proxy-from-env "^1.0.0" - rimraf "^3.0.2" - tar-fs "^2.0.0" - unbzip2-stream "^1.3.3" - ws "^7.2.3" - -ramda@^0.17.1: - version "0.17.1" - resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.17.1.tgz#4c198147d3ab54e8c15255f11730e2116f6e6073" - integrity sha1-TBmBR9OrVOjBUlXxFzDiEW9uYHM= - -read-chunk@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/read-chunk/-/read-chunk-3.2.0.tgz#2984afe78ca9bfbbdb74b19387bf9e86289c16ca" - integrity sha512-CEjy9LCzhmD7nUpJ1oVOE6s/hBkejlcJEgLQHVnQznOSilOPb+kpKktlLfFDK3/WP43+F80xkUTM2VOkYoSYvQ== - dependencies: - pify "^4.0.1" - with-open-file "^0.1.6" - -readable-stream@^3.1.1, readable-stream@^3.4.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@~3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.4.0.tgz#9fdccdf9e9155805449221ac645e8303ab5b9ada" - integrity sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ== - dependencies: - picomatch "^2.2.1" - -regenerator-runtime@^0.13.4: - version "0.13.7" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" - integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -repeat-element@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" - integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - -retry@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" - integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= - -rimraf@^2.6.2: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - -semver@^6.0.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - -signal-exit@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" - integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== - -simple-git@^1.105.0: - version "1.132.0" - resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-1.132.0.tgz#53ac4c5ec9e74e37c2fd461e23309f22fcdf09b1" - integrity sha512-xauHm1YqCTom1sC9eOjfq3/9RKiUA9iPnxBbrY2DdL8l4ADMu0jjM5l5lphQP5YWNqAL2aXC/OeuQ76vHtW5fg== - dependencies: - debug "^4.0.1" - -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-url@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" - integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= - -source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= - -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= - -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - -string.prototype.repeat@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/string.prototype.repeat/-/string.prototype.repeat-0.2.0.tgz#aba36de08dcee6a5a337d49b2ea1da1b28fc0ecf" - integrity sha1-q6Nt4I3O5qWjN9SbLqHaGyj8Ds8= - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -tar-fs@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.0.tgz#d1cdd121ab465ee0eb9ccde2d35049d3f3daf0d5" - integrity sha512-9uW5iDvrIMCVpvasdFHW0wJPez0K4JnMZtsuIeDI7HyMGJNxmDZDOCQROr7lXyS+iL/QMpj07qcjGYTSdRFXUg== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.0.0" - -tar-stream@^2.0.0: - version "2.1.4" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.1.4.tgz#c4fb1a11eb0da29b893a5b25476397ba2d053bfa" - integrity sha512-o3pS2zlG4gxr67GmFYBLlq+dM8gyRGUOvsrHclSkvtVtQbjV0s/+ZE8OpICbaj8clrX3tjeHngYGP7rweaBnuw== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -through@^2.3.8: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - -tmp@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14" - integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ== - dependencies: - rimraf "^3.0.0" - -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - -trim-html-tag@^1.0.3: - version "1.0.5" - resolved "https://registry.yarnpkg.com/trim-html-tag/-/trim-html-tag-1.0.5.tgz#bbeacd7fc3fcc042ce702f91aad1ccd9d05a77ae" - integrity sha1-u+rNf8P8wELOcC+RqtHM2dBad64= - dependencies: - ramda "^0.17.1" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - -unbzip2-stream@^1.3.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== - dependencies: - buffer "^5.2.1" - through "^2.3.8" - -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - -universalify@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" - -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -uuid@^3.0.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - -valid-url@^1.0.9: - version "1.0.9" - resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" - integrity sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA= - -with-open-file@^0.1.6: - version "0.1.7" - resolved "https://registry.yarnpkg.com/with-open-file/-/with-open-file-0.1.7.tgz#e2de8d974e8a8ae6e58886be4fe8e7465b58a729" - integrity sha512-ecJS2/oHtESJ1t3ZfMI3B7KIDKyfN0O16miWxdn30zdh66Yd3LsRFebXZXq6GU4xfxLf6nVxp9kIqElb5fqczA== - dependencies: - p-finally "^1.0.0" - p-try "^2.1.0" - pify "^4.0.1" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - -ws@^7.2.3: - version "7.3.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.1.tgz#d0547bf67f7ce4f12a72dfe31262c68d7dc551c8" - integrity sha512-D3RuNkynyHmEJIpD2qrgVkc9DQ23OrN/moAwZX4L8DfvszsJxpjQuUq3LMx6HoYji9fbIOBY18XWBsAux1ZZUA== - -xdg-basedir@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" - integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== - -xstate@^4.11.0: - version "4.13.0" - resolved "https://registry.yarnpkg.com/xstate/-/xstate-4.13.0.tgz#0be22ceb8bae2bc6a025fab330fe44204d76771c" - integrity sha512-UnUJJzP2KTPqnmxIoD/ymXtpy/hehZnUlO6EXqWC/72XkPb15p9Oz/X4WhS3QE+by7NP+6b5bCi/GTGFzm5D+A== - -xtend@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= - -yauzl@^2.10.0: - version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" - integrity sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk= - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0"