Skip to content

Commit

Permalink
Merge pull request #1 from sctg-development/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
aeltorio authored Dec 15, 2024
2 parents 253a864 + 194b7bb commit 96d59ac
Show file tree
Hide file tree
Showing 7 changed files with 1,860 additions and 2,790 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ node_modules
dist
api.key
.DS_Store
config.json
4,421 changes: 1,658 additions & 2,763 deletions package-lock.json

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@
"dev_server_port": 3000
},
"scripts": {
"build": "npm run getGroqModels && npx webpack --mode production",
"build": "npm run getPrompts && npm run getGroqModels && npx webpack --mode production",
"build:github-pages": "npm run getGroqModels && npx webpack --env website=GITHUB_PAGES --mode production",
"build:dev": "webpack --mode development",
"dev-server": "webpack serve --mode development",
"getGroqModels": "tsx utils/initModels.ts",
"getPrompts": "tsx utils/getPrompts.ts",
"_getGroqModels": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' ./utils/initModels.ts",
"lint": "office-addin-lint check",
"lint:fix": "office-addin-lint fix",
Expand All @@ -31,26 +32,25 @@
"watch": "webpack --mode development --watch"
},
"dependencies": {
"@fluentui/react-components": "^9.55.1",
"@fluentui/react-icons": "^2.0.264",
"@fluentui/react-components": "^9.56.2",
"@fluentui/react-icons": "^2.0.266",
"@sctg/ai-sdk": "0.0.4",
"@sctg/sentencepiece-js": "^1.3.3",
"core-js": "^3.39.0",
"dompurify": "^3.1.7",
"dompurify": "^3.2.1",
"es6-promise": "^4.2.8",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-markdown": "^9.0.1",
"react-router-dom": "^6.27.0",
"react-router-dom": "^7.0.1",
"regenerator-runtime": "^0.14.1",
"rehype-highlight": "^7.0.1"
},
"devDependencies": {
"@babel/core": "^7.26.0",
"@babel/plugin-syntax-import-attributes": "^7.26.0",
"@babel/preset-typescript": "^7.26.0",
"@types/dompurify": "^3.0.5",
"@types/office-js": "^1.0.441",
"@types/office-js": "^1.0.449",
"@types/office-runtime": "^1.0.35",
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
Expand Down Expand Up @@ -83,7 +83,7 @@
"ts-loader": "^9.5.1",
"ts-node": "^10.9.2",
"tsx": "^4.19.2",
"typescript": "^5.6.3",
"typescript": "^5.7.2",
"webpack": "^5.96.1",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "5.1.0",
Expand Down
166 changes: 166 additions & 0 deletions prompt_dataset.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ['HF_TOKEN'] = 'hf_…………'\n",
"dataset_name = 'eltorio/ai-prompts'\n",
"prompts_src = 'config.json'"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Login to Hugging Face"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Hugging Face token found in environment variable\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured.\n"
]
}
],
"source": [
"### Login to Hugging Face\n",
"from huggingface_hub import login\n",
"import os\n",
"\n",
"HF_TOKEN = \"\"\n",
"\n",
"if os.environ.get('HF_TOKEN') is not None:\n",
" HF_TOKEN = os.environ.get('HF_TOKEN')\n",
" print(f\"Hugging Face token found in environment variable\")\n",
"try:\n",
" import google.colab\n",
" from google.colab import userdata\n",
" if (userdata.get('HF_TOKEN') is not None) and (HF_TOKEN == \"\"):\n",
" HF_TOKEN = userdata.get('HF_TOKEN')\n",
" else:\n",
" raise ValueError(\"Please set your Hugging Face token in the user data panel, or pass it as an environment variable\")\n",
"except ModuleNotFoundError:\n",
" if HF_TOKEN is None:\n",
" raise ValueError(\"Please set your Hugging Face token in the user data panel, or pass it as an environment variable\")\n",
"\n",
"login(\n",
" token=HF_TOKEN,\n",
" add_to_git_credential=True\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"prompts = json.loads(open(prompts_src, 'r').read())\n",
"# remove standalone column\n",
"for prompt in prompts:\n",
" if 'standalone' in prompt:\n",
" del prompt['standalone']\n"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"from datasets import Dataset\n",
"pd = pd.DataFrame(prompts)\n",
"dataset = Dataset.from_pandas(pd)\n"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "5db8b3622de1491e8d35e92616524e96",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Uploading the dataset shards: 0%| | 0/1 [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "ce0bb9646d6f4a5b94fcbae5febf977f",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Creating parquet from Arrow format: 0%| | 0/1 [00:00<?, ?ba/s]"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"CommitInfo(commit_url='https://huggingface.co/datasets/eltorio/ai-prompts/commit/bdebad7b860a36dbd4e07f957f36d7da9846ea9a', commit_message='Upload dataset', commit_description='', oid='bdebad7b860a36dbd4e07f957f36d7da9846ea9a', pr_url=None, repo_url=RepoUrl('https://huggingface.co/datasets/eltorio/ai-prompts', endpoint='https://huggingface.co', repo_type='dataset', repo_id='eltorio/ai-prompts'), pr_revision=None, pr_num=None)"
]
},
"execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dataset.push_to_hub(dataset_name)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
34 changes: 17 additions & 17 deletions src/aipane/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@ export const config: AIConfig = {
aiproxied: true,
models: [
{
id: "llama-3.2-90b-text-preview",
name: "Llama 3.2 (90b Text Preview)",
id: "llama-3.3-70b-specdec",
name: "Llama 3.3 (70b Specdec)",
default: true,
max_tokens: 8192,
},
{
id: "llama-3.2-11b-text-preview",
name: "Llama 3.2 (11b Text Preview)",
id: "llama-3.3-70b-versatile",
name: "Llama 3.3 (70b Versatile)",
default: false,
max_tokens: 8192,
max_tokens: 32768,
},
{
id: "llama-3.2-90b-vision-preview",
Expand All @@ -48,18 +48,18 @@ export const config: AIConfig = {
default: false,
max_tokens: 8192,
},
{
id: "llama-3.1-70b-versatile",
name: "Llama 3.1 (70b Versatile)",
default: false,
max_tokens: 32768,
},
{
id: "llama3-70b-8192",
name: "Llama3 70b (8192)",
default: false,
max_tokens: 8192,
},
{
id: "llama-3.1-70b-versatile",
name: "Llama 3.1 (70b Versatile)",
default: false,
max_tokens: 32768,
},
{
id: "llama-3.1-8b-instant",
name: "Llama 3.1 (8b Instant)",
Expand All @@ -72,12 +72,6 @@ export const config: AIConfig = {
default: false,
max_tokens: 8192,
},
{
id: "llama3-groq-70b-8192-tool-use-preview",
name: "Llama3 Groq (70b 8192 Tool Use)",
default: false,
max_tokens: 8192,
},
{
id: "llama-guard-3-8b",
name: "Llama Guard (3 8b)",
Expand All @@ -90,6 +84,12 @@ export const config: AIConfig = {
default: false,
max_tokens: 8192,
},
{
id: "llama3-groq-70b-8192-tool-use-preview",
name: "Llama3 Groq (70b 8192 Tool Use)",
default: false,
max_tokens: 8192,
},
],
},
{
Expand Down
4 changes: 2 additions & 2 deletions src/version.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export const versionInfo = {
commit: "a859828ce077447711338df714a4ea6a0b0236f2",
date: "2024-11-06 17:40:45 +0100",
commit: "463ef47853239e4b21154323382e091f18f124cd",
date: "2024-11-25 09:51:49 +0100",
};
8 changes: 8 additions & 0 deletions utils/getPrompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { config } from "../src/aipane/config";
import { writeFileSync } from "fs";

export const getPrompts = () => {
return config.prompts;
};

writeFileSync("./config.json", JSON.stringify(getPrompts(), null, 2));

0 comments on commit 96d59ac

Please sign in to comment.