Skip to content
This repository has been archived by the owner on Sep 12, 2024. It is now read-only.

Commit

Permalink
update: sync latest llama.cpp branch
Browse files Browse the repository at this point in the history
  • Loading branch information
hlhr202 committed May 29, 2023
1 parent 96724a6 commit 7a8229f
Show file tree
Hide file tree
Showing 9 changed files with 247 additions and 157 deletions.
10 changes: 5 additions & 5 deletions example/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@llama-node/examples",
"version": "0.1.5",
"version": "0.1.6",
"description": "",
"main": "index.js",
"type": "module",
Expand All @@ -19,9 +19,9 @@
"langchain": "^0.0.56"
},
"dependencies": {
"@llama-node/core": "0.1.5",
"@llama-node/llama-cpp": "0.1.5",
"@llama-node/rwkv-cpp": "0.1.5",
"llama-node": "0.1.5"
"@llama-node/core": "0.1.6",
"@llama-node/llama-cpp": "0.1.6",
"@llama-node/rwkv-cpp": "0.1.6",
"llama-node": "0.1.6"
}
}
26 changes: 13 additions & 13 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "llama-node",
"version": "0.1.5",
"version": "0.1.6",
"description": "Node.js Library for Large Language Model LLaMA/RWKV",
"keywords": [
"llama",
Expand Down Expand Up @@ -58,10 +58,10 @@
"packageManager": "[email protected]",
"homepage": "https://llama-node.vercel.app/",
"devDependencies": {
"@llama-node/cli": "0.1.5",
"@llama-node/core": "0.1.5",
"@llama-node/llama-cpp": "0.1.5",
"@llama-node/rwkv-cpp": "0.1.5",
"@llama-node/cli": "0.1.6",
"@llama-node/core": "0.1.6",
"@llama-node/llama-cpp": "0.1.6",
"@llama-node/rwkv-cpp": "0.1.6",
"@napi-rs/cli": "^2.15.2",
"@tensorflow/tfjs-node": "^4.2.0",
"@types/node": "^18.15.5",
Expand All @@ -80,17 +80,17 @@
"vitest": "^0.30.1"
},
"dependencies": {
"@llama-node/cli": "0.1.5"
"@llama-node/cli": "0.1.6"
},
"optionalDependencies": {
"@llama-node/core": "0.1.5",
"@llama-node/llama-cpp": "0.1.5",
"@llama-node/rwkv-cpp": "0.1.5"
"@llama-node/core": "0.1.6",
"@llama-node/llama-cpp": "0.1.6",
"@llama-node/rwkv-cpp": "0.1.6"
},
"peerDependencies": {
"@llama-node/cli": "0.1.5",
"@llama-node/core": "0.1.5",
"@llama-node/llama-cpp": "0.1.5",
"@llama-node/rwkv-cpp": "0.1.5"
"@llama-node/cli": "0.1.6",
"@llama-node/core": "0.1.6",
"@llama-node/llama-cpp": "0.1.6",
"@llama-node/rwkv-cpp": "0.1.6"
}
}
4 changes: 2 additions & 2 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@llama-node/cli",
"version": "0.1.5",
"version": "0.1.6",
"description": "",
"main": "index.js",
"type": "commonjs",
Expand All @@ -12,7 +12,7 @@
"test": "tsx src/index.ts"
},
"dependencies": {
"@llama-node/core": "0.1.5",
"@llama-node/core": "0.1.6",
"yargs": "^17.7.1"
},
"author": "",
Expand Down
45 changes: 0 additions & 45 deletions packages/cli/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,54 +93,9 @@ class InferenceCommand implements yargs.CommandModule {
}
}

class ConvertCommand implements yargs.CommandModule<any, any> {
command = "convert";
describe = "Convert llama pth to ggml, not ready yet";
builder(args: yargs.Argv) {
return args
.help("help")
.example(
"llama convert --dir ./model --type q4_0",
"Convert pth to q4_0 ggml model"
)
.options({
dir: {
describe: "The directory of model and tokenizer directory",
type: "string",
demandOption: true,
},
})
.options({
type: {
describe: "The type of model",
type: "string",
choices: convertType,
demandOption: true,
},
});
}
async handler(args: yargs.ArgumentsCamelCase) {
const dir = args.dir as string;
const type = args.type as ConvertType;

const absolute = path.isAbsolute(dir)
? dir
: path.join(process.cwd(), dir);
if (!existsSync(absolute)) {
console.error(`Directory ${absolute} does not exist`);
return;
} else {
const elementType = convertType.findIndex((t) => t === type);
await convert(absolute, elementType);
console.log("Convert successfully");
}
}
}

(yargs as yargs.Argv<any | CLIInferenceArguments>)
.scriptName("llama")
.usage("$0 <cmd> [args]")
.command(new ConvertCommand())
.command(new InferenceCommand())
.demandCommand(1, "You need at least one command before moving on")
.strict()
Expand Down
2 changes: 1 addition & 1 deletion packages/core/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@llama-node/core",
"version": "0.1.5",
"version": "0.1.6",
"main": "index.js",
"types": "index.d.ts",
"napi": {
Expand Down
1 change: 1 addition & 0 deletions packages/docs/docs/start.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ const config = {
useMlock: false,
embedding: false,
useMmap: true,
nGpuLayers: 0
};
const template = `How are you?`;
Expand Down
2 changes: 1 addition & 1 deletion packages/llama-cpp/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@llama-node/llama-cpp",
"version": "0.1.5",
"version": "0.1.6",
"main": "index.js",
"types": "index.d.ts",
"napi": {
Expand Down
2 changes: 1 addition & 1 deletion packages/rwkv-cpp/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@llama-node/rwkv-cpp",
"version": "0.1.5",
"version": "0.1.6",
"main": "index.js",
"types": "index.d.ts",
"napi": {
Expand Down
Loading

0 comments on commit 7a8229f

Please sign in to comment.