LLama 3 running on Apple Silicon with MLX
conda create --name backend
conda install --yes --file backend/requirements.txt
conda install pytorch::pytorch torchvision torchaudio -c pytorch
conda install -c conda-forge mlx-lm
uvicorn backend.app.main:app --host 0.0.0.0 --port 8000
npx create-react-app app
cd app
npm start
cd app
npm run build