From fab51a419655c21e084064b3e69e5ba2f38fa5c4 Mon Sep 17 00:00:00 2001 From: Yoann Couble Date: Tue, 2 Jul 2024 11:26:05 +0200 Subject: [PATCH 1/3] First tentative of implementation of the mistralai tracer for issue #5 --- package.json | 9 +++- src/tracers/mistralai_tracer.ts | 73 ++++++++++++++++++++++++++++++++- src/tracers/openai_tracer.ts | 5 +-- src/tracers/utils.ts | 7 +++- 4 files changed, 86 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index eb379f0..3778afc 100644 --- a/package.json +++ b/package.json @@ -45,15 +45,20 @@ "require-in-the-middle": "^7.3.0" }, "devDependencies": { + "@mistralai/mistralai": "^0.5.0", "@types/node": "^20.14.2", + "openai": "^4.49.0", "tsc-watch": "^5.0.3", - "typescript": "^5.4.5", - "openai": "^4.49.0" + "typescript": "^5.4.5" }, "peerDependencies": { + "@mistralai/mistralai": "^0.5.0", "openai": "^4.49.0" }, "peerDependenciesMeta": { + "@mistralai/mistralai": { + "optional": true + }, "openai": { "optional": true } diff --git a/src/tracers/mistralai_tracer.ts b/src/tracers/mistralai_tracer.ts index be28dc0..0de67b0 100644 --- a/src/tracers/mistralai_tracer.ts +++ b/src/tracers/mistralai_tracer.ts @@ -1,3 +1,72 @@ -// import OpenAI from "openai"; +import { OnRequireFn } from "require-in-the-middle"; +import MistralClient, { + ChatCompletionResponse, + ChatCompletionResponseChunk, + ChatRequest, + ChatRequestOptions, +} from "@mistralai/mistralai"; -// import hooks from "promised-hooks"; +import EcoLogitsData from "../tracers/utils"; +import { BaseInstrumentor } from "./baseInstrumentor"; + +const PROVIDER = "mistralai"; + +/** + * Wrapper around the chat method to add impacts to the response + * + */ + +/** + * Wrap the two chat methods to add impacts to the response + */ +class MistralAiWrapper extends MistralClient { + chat: MistralClient["chat"] = async (request, options?) => { + const timerStart = new Date().getTime(); + return super.chat(request, options).then(async (response) => { + const requestLatency = new Date().getTime() - timerStart; + const ecologitsData = await EcoLogitsData.build(); + const tokens = response.usage?.completion_tokens || 0; + const impacts = ecologitsData.computeLlmImpacts( + PROVIDER, + request.model, + tokens, + requestLatency + ); + return { ...response, impacts }; + }); + }; + + // chatStream: MistralClient["chatStream"] = async (request, options?) => { + // const timerStart = new Date().getTime(); + // for await (const item of super.chatStream(request, options)) { + // TODO: implement token counting + // }; + // }; +} + +/** + * Wraps the chat method to add impacts to the response + * + */ +const chatCompletionsCreateHook: OnRequireFn = ( + exported: any, + name: string +) => { + if (name === PROVIDER) { + console.debug(`Hooking ${name}`); + exported = MistralAiWrapper; + } else { + console.debug(`Skipping ${name}`); + } + return exported; +}; + +/** + * Instrument mistralai chat completions to add impacts to the response + * + */ +export class MistralAiInstrumentor extends BaseInstrumentor { + constructor() { + super(PROVIDER, chatCompletionsCreateHook); + } +} diff --git a/src/tracers/openai_tracer.ts b/src/tracers/openai_tracer.ts index 433af8b..a00268b 100644 --- a/src/tracers/openai_tracer.ts +++ b/src/tracers/openai_tracer.ts @@ -1,4 +1,4 @@ -import { Hook, OnRequireFn } from "require-in-the-middle"; +import { OnRequireFn } from "require-in-the-middle"; import OpenAi from "openai"; import type { APIPromise, RequestOptions } from "openai/core"; import { Stream } from "openai/streaming"; @@ -124,8 +124,7 @@ export class OpenAIInstrumentor extends BaseInstrumentor { const chatCompletionsCreateHook: OnRequireFn = ( exported: any, - name: string, - baseDir + name: string ) => { if (name === "openai") { console.debug(`Hooking ${name}`); diff --git a/src/tracers/utils.ts b/src/tracers/utils.ts index c228d08..00c23b7 100644 --- a/src/tracers/utils.ts +++ b/src/tracers/utils.ts @@ -11,6 +11,8 @@ type ModelData = { sources: string; }; +let cachedData: EcoLogitsData | undefined; + class EcoLogitsData { data: ModelData[] = []; @@ -22,9 +24,10 @@ class EcoLogitsData { } static async build() { + if (cachedData) return cachedData; const url = "https://raw.githubusercontent.com/genai-impact/ecologits/main/ecologits/data/models.csv"; - return fetch(url).then((res) => { + const data = await fetch(url).then((res) => { return res.text().then( (text) => new EcoLogitsData( @@ -49,6 +52,8 @@ class EcoLogitsData { ) ); }); + cachedData = data; + return data; } findModel(provider: string, name: string): ModelData | undefined { From 4bceb0c30ee8904b0f2edd0da2289082e7b804f1 Mon Sep 17 00:00:00 2001 From: Yoann Couble Date: Tue, 16 Jul 2024 13:04:08 +0200 Subject: [PATCH 2/3] before refacto --- package.json | 2 +- src/test.ts | 4 ++-- src/tracers/mistralai_tracer.ts | 24 ++++++++++++++++++++---- src/tracers/openai_tracer.ts | 1 - 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 3778afc..6ae3b4b 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ }, "peerDependencies": { "@mistralai/mistralai": "^0.5.0", - "openai": "^4.49.0" + "openai": "^4.1.0" }, "peerDependenciesMeta": { "@mistralai/mistralai": { diff --git a/src/test.ts b/src/test.ts index 94f42e4..0bede24 100644 --- a/src/test.ts +++ b/src/test.ts @@ -1,10 +1,10 @@ import type { Impacts } from "./index"; import { Ecologits } from "./index"; -Ecologits.init(); import OpenAI from "openai"; - import type OpenAITypes from "openai"; // TODO : remove dependency +Ecologits.init(); + const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); diff --git a/src/tracers/mistralai_tracer.ts b/src/tracers/mistralai_tracer.ts index 0de67b0..e5f73e6 100644 --- a/src/tracers/mistralai_tracer.ts +++ b/src/tracers/mistralai_tracer.ts @@ -36,11 +36,27 @@ class MistralAiWrapper extends MistralClient { }); }; - // chatStream: MistralClient["chatStream"] = async (request, options?) => { + // chatStream: MistralClient["chatStream"] = async function* (request, options?) { // const timerStart = new Date().getTime(); - // for await (const item of super.chatStream(request, options)) { - // TODO: implement token counting - // }; + // const ecologitsData = await EcoLogitsData.build(); + // // let tokens = 0; + // const stream = super.chatStream(request, options); + + // async function* iterator() { + // for await (const item of stream) { + // // tokens += 1; + // const tokens = item.usage?.completion_tokens || 0; + // const requestLatency = new Date().getTime() - timerStart; + // const impacts = ecologitsData.computeLlmImpacts( + // PROVIDER, + // request.model, + // tokens, + // requestLatency + // ); + // yield ({ ...item, impacts }); + // }; + // } + // return iterator(); // }; } diff --git a/src/tracers/openai_tracer.ts b/src/tracers/openai_tracer.ts index a00268b..c84bce4 100644 --- a/src/tracers/openai_tracer.ts +++ b/src/tracers/openai_tracer.ts @@ -110,7 +110,6 @@ class ChatWraper extends OpenAi.Chat { class OpenAiWrapper extends OpenAi { chat: OpenAi.Chat = new ChatWraper(this); - toto: string = "toto"; } /** * Instrument openai chat completions to add impacts to the response From bb978c265e164c1bb31fd2fb07007f1efe6f6cf0 Mon Sep 17 00:00:00 2001 From: Yoann Couble Date: Thu, 18 Jul 2024 09:57:42 +0200 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=9A=A7=20Refactorisation=20propositio?= =?UTF-8?q?n?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 5 +- README.md | 84 +++++++++--------- libs/core/package.json | 15 ++++ .../utils.ts => libs/core/src/ecologits.ts | 64 ++++++-------- {src => libs/core/src}/impacts/dag.ts | 39 +++++--- {src => libs/core/src}/impacts/default.ts | 12 +-- {src => libs/core/src}/impacts/llm.ts | 0 libs/core/src/index.ts | 3 + libs/core/src/types.ts | 24 +++++ libs/core/tsconfig.json | 19 ++++ package.json | 41 ++++----- providers/mistral/README.md | 45 ++++++++++ providers/mistral/package.json | 54 ++++++++++++ providers/mistral/src/index.ts | 4 + providers/mistral/src/tracer.ts | 62 +++++++++++++ providers/mistral/tsconfig.json | 17 ++++ providers/openai/README.md | 44 ++++++++++ providers/openai/package.json | 54 ++++++++++++ providers/openai/src/index.ts | 1 + .../openai/src/tracer.ts | 47 +++------- providers/openai/tsconfig.json | 17 ++++ src/@types/index.d.ts | 24 ----- src/ecologits.ts | 43 --------- src/exceptions.ts | 0 src/index.ts | 3 - src/model_repository.ts | 0 src/tracers/anthropic_tracer.ts | 0 src/tracers/baseInstrumentor.ts | 31 ------- src/tracers/cohere_tracer.ts | 0 src/tracers/huggingface_tracer.ts | 0 src/tracers/mistralai_tracer.ts | 88 ------------------- test/mistral/package.json | 56 ++++++++++++ test/mistral/src/index.ts | 27 ++++++ test/mistral/tsconfig.json | 17 ++++ test/openai/package.json | 56 ++++++++++++ src/test.ts => test/openai/src/index.ts | 13 ++- test/openai/tsconfig.json | 17 ++++ turbo.json | 20 +++++ 38 files changed, 693 insertions(+), 353 deletions(-) create mode 100644 libs/core/package.json rename src/tracers/utils.ts => libs/core/src/ecologits.ts (50%) rename {src => libs/core/src}/impacts/dag.ts (88%) rename {src => libs/core/src}/impacts/default.ts (67%) rename {src => libs/core/src}/impacts/llm.ts (100%) create mode 100644 libs/core/src/index.ts create mode 100644 libs/core/src/types.ts create mode 100644 libs/core/tsconfig.json create mode 100644 providers/mistral/README.md create mode 100644 providers/mistral/package.json create mode 100644 providers/mistral/src/index.ts create mode 100644 providers/mistral/src/tracer.ts create mode 100644 providers/mistral/tsconfig.json create mode 100644 providers/openai/README.md create mode 100644 providers/openai/package.json create mode 100644 providers/openai/src/index.ts rename src/tracers/openai_tracer.ts => providers/openai/src/tracer.ts (70%) create mode 100644 providers/openai/tsconfig.json delete mode 100644 src/@types/index.d.ts delete mode 100644 src/ecologits.ts delete mode 100644 src/exceptions.ts delete mode 100644 src/index.ts delete mode 100644 src/model_repository.ts delete mode 100644 src/tracers/anthropic_tracer.ts delete mode 100644 src/tracers/baseInstrumentor.ts delete mode 100644 src/tracers/cohere_tracer.ts delete mode 100644 src/tracers/huggingface_tracer.ts delete mode 100644 src/tracers/mistralai_tracer.ts create mode 100644 test/mistral/package.json create mode 100644 test/mistral/src/index.ts create mode 100644 test/mistral/tsconfig.json create mode 100644 test/openai/package.json rename src/test.ts => test/openai/src/index.ts (64%) create mode 100644 test/openai/tsconfig.json create mode 100644 turbo.json diff --git a/.gitignore b/.gitignore index 30a9897..791b361 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ node_modules yarn.lock es -.DS_Store \ No newline at end of file +.DS_Store +.turbo +yarn-error.log +.env \ No newline at end of file diff --git a/README.md b/README.md index 982dc1f..3bc4eb2 100644 --- a/README.md +++ b/README.md @@ -6,68 +6,68 @@ This repository is a tentative of porting the [genai-impact/ecologits python lib ## Installation -### Using npm +Installation depends on the model providers that you use in your code. + +### Open AI ```bash -npm install @genai-impact/ecologits.js +npm install @genai-impact/ecologits-openai ``` -### Using yarn +### Mistral AI ```bash -yarn add @genai-impact/ecologits.js +npm install @genai-impact/ecologits-mistral ``` ## Usage -**Warning**: as usual, you'll need to provide your credentials to your API provider in the environment variables as instructed by them, or pass them directly to the client as you would normally. +The usage will depend on the model provider used in your code. +But the principle is simple : + +- import the provider wrapper from `@genai-impact/ecologits-` +- use it (e.g. `MistralClient` / `OpenAI`) as you would usually. +- The wrapper adds an `impacts` attribute to the response containing EcoLogits metrics. + +> [!WARNING] As usual, you'll need to provide your credentials to your API provider in the environment variables as instructed by them, or pass them directly to the client as you would normally. ```ts -import { Ecologits, type Impacts } from "@genai-impact/ecologits.js"; -import OpenAI from "openai"; +import MistralClient from "@genai-impact/ecologits-mistral"; +import { ChatCompletionResponse } from "@mistralai/mistralai"; +import { Impacts } from "core"; + +const apiKey = process.env.MISTRAL_API_KEY; -Ecologits.init(); // Call ecologits **before** any other relevant AI package import +const client = new MistralClient(apiKey); -const client = new OpenAI(); const main = async () => { - const response = (await client.chat.completions.create({ - messages: [{ role: "user", content: "Tell me a funny joke!" }], - model: "gpt-3.5-turbo", - })) as OpenAI.Chat.Completions.ChatCompletion & { impacts: Impacts }; - - console.log( - `Joke: ${response.choices[0].message.content}` - ); - console.log( - `Token generated: ${response.usage.completion_tokens} tokens` - ); - - // Get estimated environmental impacts of the inference - console.log( - `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` - ); - console.log( - `GHG emissions: ${response.impacts.gwp.value} ${response.impacts.gwp.unit}` - ); + try { + const response = (await client.chat({ + model: "mistral-tiny", + messages: [{ role: "user", content: "What is the best French cheese?" }], + })) as ChatCompletionResponse & { impacts: Impacts }; + // Get estimated environmental impacts of the inference + console.log( + `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` + ); + console.log( + `GHG emissions: ${response.impacts.gwp.value} ${response.impacts.gwp.unit}` + ); + } catch (e) { + console.error(e); + throw e; + } }; main(); ``` -## Porting Status +## Contributing -- [x] `openAI` tracer -- [ ] `mistral` tracer (branch `feat/mistral_tracer`) -- [ ] `anthropic` tracer -- [ ] `huggingface` tracer -- [ ] `cohere` tracer +Look for open issues and feel free to contribute to this project by opening an issue or a pull request. +We're always open to covering more model providers. -## Current challenges +### Current challenges -- [ ] 🔥 Patching the providers responses with the `impacts` object like in the python library (with seamless types exposition) -- [ ] publishing the package to npm +- [ ] Setting up CI with automatic publication to npm.js of each package - [ ] port tests from python to js -- [ ] reduce work to keep the library up-to-date with the python library (csv files, etc) - -## Contributing - -Feel free to contribute to this project by opening an issue or a pull request. +- [ ] reduce work to keep the library up-to-date with the python library (etc) diff --git a/libs/core/package.json b/libs/core/package.json new file mode 100644 index 0000000..54dd1ef --- /dev/null +++ b/libs/core/package.json @@ -0,0 +1,15 @@ +{ + "name": "core", + "version": "0.0.1", + "description": "Core library for the Ecologits project, port of the ecologits methodology to TypeScript", + "type": "module", + "main": "es/index.js", + "types": "es/index.d.ts", + "source": "src/index.ts", + "scripts": { + "build": "tsc", + "prepublishOnly": "yarn build", + "watch": "tsc-watch --noClear", + "clean": "rm -rf es" + } +} diff --git a/src/tracers/utils.ts b/libs/core/src/ecologits.ts similarity index 50% rename from src/tracers/utils.ts rename to libs/core/src/ecologits.ts index 00c23b7..b6fc1b2 100644 --- a/src/tracers/utils.ts +++ b/libs/core/src/ecologits.ts @@ -1,6 +1,6 @@ import fetch from "node-fetch"; -import computeLlmImpacts from "../impacts/dag"; -import { DEFAULT_IMPACT } from "../impacts/default"; +import computeLlmImpacts from "./impacts/dag.js"; +import { DEFAULT_IMPACT } from "./impacts/default.js"; type ModelData = { provider: string; @@ -11,8 +11,6 @@ type ModelData = { sources: string; }; -let cachedData: EcoLogitsData | undefined; - class EcoLogitsData { data: ModelData[] = []; @@ -23,39 +21,6 @@ class EcoLogitsData { this.data = data; } - static async build() { - if (cachedData) return cachedData; - const url = - "https://raw.githubusercontent.com/genai-impact/ecologits/main/ecologits/data/models.csv"; - const data = await fetch(url).then((res) => { - return res.text().then( - (text) => - new EcoLogitsData( - text - .split("\n") - .slice(1, text.length) - .map((line) => { - const infos = line.split(","); - return { - provider: infos[0], - name: infos[1], - totalParameters: infos[2] - .split(";") - .map((x) => parseFloat(x)), - activeParameters: infos[3] - .split(";") - .map((x) => parseFloat(x)), - warnings: infos[4], - sources: infos[5], - } as ModelData; - }) - ) - ); - }); - cachedData = data; - return data; - } - findModel(provider: string, name: string): ModelData | undefined { return this.data.find( (model) => model.provider === provider && model.name === name @@ -86,5 +51,28 @@ class EcoLogitsData { ); } } +const url = + "https://raw.githubusercontent.com/genai-impact/ecologits/main/ecologits/data/models.csv"; +const ecoLogitsData: EcoLogitsData = await fetch(url).then((res) => { + return res.text().then( + (text) => + new EcoLogitsData( + text + .split("\n") + .slice(1, text.length) + .map((line) => { + const infos = line.split(","); + return { + provider: infos[0], + name: infos[1], + totalParameters: infos[2].split(";").map((x) => parseFloat(x)), + activeParameters: infos[3].split(";").map((x) => parseFloat(x)), + warnings: infos[4], + sources: infos[5], + } as ModelData; + }) + ) + ); +}); -export default EcoLogitsData; +export default ecoLogitsData; diff --git a/src/impacts/dag.ts b/libs/core/src/impacts/dag.ts similarity index 88% rename from src/impacts/dag.ts rename to libs/core/src/impacts/dag.ts index a0b422c..7ae07b8 100644 --- a/src/impacts/dag.ts +++ b/libs/core/src/impacts/dag.ts @@ -1,4 +1,5 @@ -import * as Llm from "./llm"; +import { ImpactMetric, Impacts } from "../types.js"; +import * as Llm from "./llm.js"; const MODEL_QUANTIZATION_BITS = 4; @@ -85,14 +86,30 @@ const DEFAULT_CALCULATED_PROPS: CalculatedProps = { requestEmbodiedAdpe: 0, requestEmbodiedPe: 0, }; +const orderedFuncs = [ + "gpuEnergy", + "generationLatency", + "modelRequiredMemory", + "gpuRequiredCount", + "serverEnergy", + "requestEnergy", + "requestUsageGwp", + "requestUsageAdpe", + "requestUsagePe", + "serverGpuEmbodiedGwp", + "serverGpuEmbodiedAdpe", + "serverGpuEmbodiedPe", + "requestEmbodiedGwp", + "requestEmbodiedAdpe", + "requestEmbodiedPe", +]; function dagExecute(props: ComputeLLmImpactsProps) { const allProps: ComputeLLmImpactsProps & CalculatedProps = { ...DEFAULT_CALCULATED_PROPS, // add default values to const that are going to be calculated ...props, }; - const funcs = Object.keys(Llm); // get all calcul functions - return funcs.reduce((acc, fn) => { + return orderedFuncs.reduce((acc, fn) => { const res = Llm[fn as keyof typeof Llm](allProps); allProps[fn as keyof typeof DEFAULT_CALCULATED_PROPS] = res; return { ...acc, [fn]: res }; @@ -123,7 +140,7 @@ export default function computeLlmImpacts( ifElectricityMixGwp: number = IF_ELECTRICITY_MIX_GWP, ifElectricityMixAdpe: number = IF_ELECTRICITY_MIX_ADPE, ifElectricityMixPe: number = IF_ELECTRICITY_MIX_PE -) { +): Impacts { const results = dagExecute({ modelActiveParameterCount, modelTotalParameterCount, @@ -149,43 +166,43 @@ export default function computeLlmImpacts( ifElectricityMixAdpe, ifElectricityMixPe, }); - const energy = { + const energy: ImpactMetric = { type: "energy", name: "Energy", unit: "kWh", value: results["requestEnergy"], }; - const gwpUsage = { + const gwpUsage: ImpactMetric = { type: "GWP", name: "Global Warming Potential", unit: "kgCO2eq", value: results["requestUsageGwp"], }; - const adpeUsage = { + const adpeUsage: ImpactMetric = { type: "ADPe", name: "Abiotic Depletion Potential (elements)", unit: "kgSbeq", value: results["requestUsageAdpe"], }; - const peUsage = { + const peUsage: ImpactMetric = { type: "PE", name: "Primary Energy", unit: "MJ", value: results["requestUsagePe"], }; - const gwpEmbodied = { + const gwpEmbodied: ImpactMetric = { type: "GWP", name: "Global Warming Potential", unit: "kgCO2eq", value: results["requestEmbodiedGwp"], }; - const adpeEmbodied = { + const adpeEmbodied: ImpactMetric = { type: "ADPe", name: "Abiotic Depletion Potential (elements)", unit: "kgSbeq", value: results["requestEmbodiedAdpe"], }; - const peEmbodied = { + const peEmbodied: ImpactMetric = { type: "PE", name: "Primary Energy", unit: "MJ", diff --git a/src/impacts/default.ts b/libs/core/src/impacts/default.ts similarity index 67% rename from src/impacts/default.ts rename to libs/core/src/impacts/default.ts index bec22ba..f2b4395 100644 --- a/src/impacts/default.ts +++ b/libs/core/src/impacts/default.ts @@ -1,29 +1,31 @@ -const energy = { +import { Impacts, ImpactMetric } from "../types.js"; + +const energy: ImpactMetric = { type: "energy", name: "Energy", unit: "kWh", value: 0, }; -const gwp = { +const gwp: ImpactMetric = { type: "GWP", name: "Global Warming Potential", unit: "kgCO2eq", value: 0, }; -const adpe = { +const adpe: ImpactMetric = { type: "ADPe", name: "Abiotic Depletion Potential (elements)", unit: "kgSbeq", value: 0, }; -const pe = { +const pe: ImpactMetric = { type: "PE", name: "Primary Energy", unit: "MJ", value: 0, }; -export const DEFAULT_IMPACT = { +export const DEFAULT_IMPACT: Impacts = { energy, gwp: gwp, adpe: adpe, diff --git a/src/impacts/llm.ts b/libs/core/src/impacts/llm.ts similarity index 100% rename from src/impacts/llm.ts rename to libs/core/src/impacts/llm.ts diff --git a/libs/core/src/index.ts b/libs/core/src/index.ts new file mode 100644 index 0000000..75c549a --- /dev/null +++ b/libs/core/src/index.ts @@ -0,0 +1,3 @@ +export { default as computeLlmImpacts } from "./impacts/dag.js"; +export { default, default as ecoLogitsData } from "./ecologits.js"; +export type * from "./types.js"; diff --git a/libs/core/src/types.ts b/libs/core/src/types.ts new file mode 100644 index 0000000..d022cc3 --- /dev/null +++ b/libs/core/src/types.ts @@ -0,0 +1,24 @@ +export type ImpactMetric = { + type: "energy" | "GWP" | "ADPe" | "PE"; + name: string; + unit: string; + value: number; +}; + +export type Impacts = { + energy: ImpactMetric; + gwp: ImpactMetric; + adpe: ImpactMetric; + pe: ImpactMetric; + usage: { + energy: ImpactMetric; + gwp: ImpactMetric; + adpe: ImpactMetric; + pe: ImpactMetric; + }; + embodied: { + gwp: ImpactMetric; + adpe: ImpactMetric; + pe: ImpactMetric; + }; +}; diff --git a/libs/core/tsconfig.json b/libs/core/tsconfig.json new file mode 100644 index 0000000..8404b10 --- /dev/null +++ b/libs/core/tsconfig.json @@ -0,0 +1,19 @@ +{ + "includes": ["src/@types/*.d.ts, @types/*.d.ts"], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./es", + "target": "ESNext", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "jsx": "react-jsx" + } +} diff --git a/package.json b/package.json index 6ae3b4b..c4de37b 100644 --- a/package.json +++ b/package.json @@ -5,12 +5,18 @@ "main": "es/index.js", "types": "es/index.d.ts", "source": "src/index.ts", + "private": true, + "workspaces": [ + "libs/*", + "providers/*", + "test/*" + ], "scripts": { - "build": "tsc", + "build": "turbo run build", + "dev": "turbo run dev", "prepublishOnly": "yarn build", - "watch": "tsc-watch --noClear", - "test": "node es/test.js", - "clean": "rm -rf es" + "test": "turbo run test", + "clean": "turbo run clean" }, "keywords": [ "ecologits", @@ -26,43 +32,30 @@ "footprint", "llm" ], - "packageManager": "yarn@1.22.18", + "packageManager": "yarn@1.22.19", "files": [ "es" ], "author": { - "name": "Noé GRANGE", - "email": "noe.grange@epitech.eu" + "name": "GenAI-Impact", + "email": "yoann.couble@valraiso.fr" }, "homepage": "ecologits.ai", "repository": { "type": "git", - "url": "git://github.com/valraiso-innov/ecologits.js.git" + "url": "git://github.com/genai-impact/ecologits.js.git" }, "license": "MPL-2.0", "dependencies": { - "node-fetch": "<3.0.0", - "require-in-the-middle": "^7.3.0" + "node-fetch": "<3.0.0" }, "devDependencies": { - "@mistralai/mistralai": "^0.5.0", "@types/node": "^20.14.2", - "openai": "^4.49.0", "tsc-watch": "^5.0.3", + "turbo": "^2.0.7", "typescript": "^5.4.5" }, - "peerDependencies": { - "@mistralai/mistralai": "^0.5.0", - "openai": "^4.1.0" - }, - "peerDependenciesMeta": { - "@mistralai/mistralai": { - "optional": true - }, - "openai": { - "optional": true - } - }, + "peerDependencies": {}, "engines": { "node": ">=14.0.0" } diff --git a/providers/mistral/README.md b/providers/mistral/README.md new file mode 100644 index 0000000..d64526b --- /dev/null +++ b/providers/mistral/README.md @@ -0,0 +1,45 @@ +# Ecologits.js - Mistral provider + +## Install + +### `npm` + +``` +npm install @genai-impact/ecologits-mistral +``` + +### `yarn` + +``` +yarn add @genai-impact/ecologits-mistral +``` + +## Usage + +```ts +import MistralClient from "@genai-impact/ecologits-mistral"; + +const apiKey = process.env.MISTRAL_API_KEY || "your_api_key"; + +const client = new MistralClient(apiKey); + +const main = async () => { + try { + const response = await client.chat({ + model: "mistral-tiny", + messages: [{ role: "user", content: "What is the best French cheese?" }], + }); + // Get estimated environmental impacts of the inference + console.log( + `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` + ); + console.log( + `GHG emissions: ${response.impacts.gwp.value} ${response.impacts.gwp.unit}` + ); + } catch (e) { + console.error(e); + throw e; + } +}; +main(); +``` diff --git a/providers/mistral/package.json b/providers/mistral/package.json new file mode 100644 index 0000000..e86f7e1 --- /dev/null +++ b/providers/mistral/package.json @@ -0,0 +1,54 @@ +{ + "name": "@genai-impact/ecologits-mistral", + "version": "0.0.1", + "description": "Ecologits.js Wrapper for the MistralAI API", + "type": "module", + "main": "es/index.js", + "types": "es/index.d.ts", + "source": "src/index.ts", + "scripts": { + "build": "tsc", + "prepublishOnly": "yarn build", + "watch": "tsc-watch --noClear", + "clean": "rm -rf es" + }, + "keywords": [ + "ecologits", + "openai", + "chatGPT", + "gpt4o", + "gpt3.5", + "climate", + "co2", + "measure", + "carbon", + "footprint", + "llm" + ], + "packageManager": "yarn@1.22.18", + "files": [ + "es" + ], + "author": { + "name": "Yoann Couble for GenAI Impact", + "email": "yoann.couble@valraiso.fr" + }, + "homepage": "ecologits.ai", + "repository": { + "type": "git", + "url": "git://github.com/genai-impact/ecologits.js.git" + }, + "license": "MPL-2.0", + "dependencies": { + "core": "*" + }, + "devDependencies": { + "@mistralai/mistralai": "^0.5.0" + }, + "peerDependencies": { + "@mistralai/mistralai": "^0.5.0" + }, + "engines": { + "node": ">=18.0.0" + } +} diff --git a/providers/mistral/src/index.ts b/providers/mistral/src/index.ts new file mode 100644 index 0000000..e24365f --- /dev/null +++ b/providers/mistral/src/index.ts @@ -0,0 +1,4 @@ +export { + MistralAiWrapper as default, + MistralAiWrapper as MistralClient, +} from "./tracer.js"; diff --git a/providers/mistral/src/tracer.ts b/providers/mistral/src/tracer.ts new file mode 100644 index 0000000..9905825 --- /dev/null +++ b/providers/mistral/src/tracer.ts @@ -0,0 +1,62 @@ +import MistralClient from "@mistralai/mistralai"; +import { ecoLogitsData } from "core"; + +const PROVIDER = "mistralai"; + +/** + * Wrapper around the chat method to add impacts to the response + * + */ + +/** + * Wrap the two chat methods to add impacts to the response + */ +export class MistralAiWrapper extends MistralClient { + oldChat: MistralClient["chat"]; + constructor( + apiKey?: string, + endpoint?: string, + maxRetries?: number, + timeout?: number + ) { + super(apiKey, endpoint, maxRetries, timeout); + this.oldChat = this.chat; + this.chat = async (request, options?) => { + const timerStart = new Date().getTime(); + return this.oldChat(request, options).then(async (response) => { + const requestLatency = new Date().getTime() - timerStart; + const tokens = response.usage?.completion_tokens || 0; + const impacts = ecoLogitsData.computeLlmImpacts( + PROVIDER, + request.model, + tokens, + requestLatency + ); + return { ...response, impacts }; + }); + }; + } + + // chatStream: MistralClient["chatStream"] = async function* (request, options?) { + // const timerStart = new Date().getTime(); + // const ecologitsData = await EcoLogitsData.build(); + // // let tokens = 0; + // const stream = super.chatStream(request, options); + + // async function* iterator() { + // for await (const item of stream) { + // // tokens += 1; + // const tokens = item.usage?.completion_tokens || 0; + // const requestLatency = new Date().getTime() - timerStart; + // const impacts = ecologitsData.computeLlmImpacts( + // PROVIDER, + // request.model, + // tokens, + // requestLatency + // ); + // yield ({ ...item, impacts }); + // }; + // } + // return iterator(); + // }; +} diff --git a/providers/mistral/tsconfig.json b/providers/mistral/tsconfig.json new file mode 100644 index 0000000..f615675 --- /dev/null +++ b/providers/mistral/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "rootDir": "./src", + "outDir": "./es", + "target": "ESNext", + "module": "NodeNext", + "resolveJsonModule": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "jsx": "react-jsx", + "preserveWatchOutput": true, + "removeComments": true + } +} diff --git a/providers/openai/README.md b/providers/openai/README.md new file mode 100644 index 0000000..99c89e5 --- /dev/null +++ b/providers/openai/README.md @@ -0,0 +1,44 @@ +# Ecologits.js - OpenAI provider + +## Install + +### `npm` + +``` +npm install @genai-impact/ecologits-openai +``` + +### `yarn` + +``` +yarn add @genai-impact/ecologits-openai +``` + +## Usage + +```ts +import OpenAI from "@genai-impact/ecologits-openai"; + +const client = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); +const main = async () => { + try { + const response = await client.chat.completions.create({ + messages: [{ role: "user", content: "Tell me a funny joke!" }], + model: "gpt-3.5-turbo", + }); + // Get estimated environmental impacts of the inference + console.log( + `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` + ); + console.log( + `GHG emissions: ${response.impacts.gwp.value} ${response.impacts.gwp.unit}` + ); + } catch (e) { + console.error(e); + throw e; + } +}; +main(); +``` diff --git a/providers/openai/package.json b/providers/openai/package.json new file mode 100644 index 0000000..c20be6e --- /dev/null +++ b/providers/openai/package.json @@ -0,0 +1,54 @@ +{ + "name": "@genai-impact/ecologits-openai", + "version": "0.0.1", + "description": "Ecologits.js Wrapper for the OpenAI API", + "type": "module", + "main": "es/index.js", + "types": "es/index.d.ts", + "source": "src/index.ts", + "scripts": { + "build": "tsc", + "prepublishOnly": "yarn build", + "watch": "tsc-watch --noClear", + "clean": "rm -rf es" + }, + "keywords": [ + "ecologits", + "openai", + "chatGPT", + "gpt4o", + "gpt3.5", + "climate", + "co2", + "measure", + "carbon", + "footprint", + "llm" + ], + "packageManager": "yarn@1.22.18", + "files": [ + "es" + ], + "author": { + "name": "Yoann Couble for GenAI Impact", + "email": "yoann.couble@valraiso.fr" + }, + "homepage": "ecologits.ai", + "repository": { + "type": "git", + "url": "git://github.com/genai-impact/ecologits.js.git" + }, + "license": "MPL-2.0", + "dependencies": { + "core": "*" + }, + "devDependencies": { + "openai": "^4.49.0" + }, + "peerDependencies": { + "openai": "^4.49.0" + }, + "engines": { + "node": ">=18.0.0" + } +} diff --git a/providers/openai/src/index.ts b/providers/openai/src/index.ts new file mode 100644 index 0000000..b4fa226 --- /dev/null +++ b/providers/openai/src/index.ts @@ -0,0 +1 @@ +export { default, default as OpenAI } from "./tracer.js"; diff --git a/src/tracers/openai_tracer.ts b/providers/openai/src/tracer.ts similarity index 70% rename from src/tracers/openai_tracer.ts rename to providers/openai/src/tracer.ts index c84bce4..a438761 100644 --- a/src/tracers/openai_tracer.ts +++ b/providers/openai/src/tracer.ts @@ -1,5 +1,4 @@ -import { OnRequireFn } from "require-in-the-middle"; -import OpenAi from "openai"; +import OriginOpenAI from "openai"; import type { APIPromise, RequestOptions } from "openai/core"; import { Stream } from "openai/streaming"; import type { @@ -11,8 +10,7 @@ import type { ChatCompletionCreateParams, } from "openai/resources/chat/completions"; -import EcoLogitsData from "../tracers/utils"; -import { BaseInstrumentor } from "./baseInstrumentor"; +import ecoLogitsData, { type Impacts } from "core"; const PROVIDER = "openai"; @@ -22,13 +20,12 @@ async function mapStream( stream: Stream ) { let tokens = 0; - const ecologitsData = await EcoLogitsData.build(); async function* iterator() { for await (const item of stream) { tokens += 1; const requestLatency = new Date().getTime() - timerStart.getTime(); - const impacts = ecologitsData.computeLlmImpacts( + const impacts = ecoLogitsData.computeLlmImpacts( PROVIDER, model, tokens, @@ -49,7 +46,7 @@ async function createStream( return mapStream(timerStart, model, res); } -class CompletionsWraper extends OpenAi.Chat.Completions { +class CompletionsWraper extends OriginOpenAI.Chat.Completions { create( body: ChatCompletionCreateParamsNonStreaming, options?: RequestOptions @@ -92,8 +89,7 @@ class CompletionsWraper extends OpenAi.Chat.Completions { return res.then(async (resp) => { const requestLatency = new Date().getTime() - timerStart.getTime(); const tokens = resp.usage?.completion_tokens || 0; - const ecologitsData = await EcoLogitsData.build(); - const impacts = ecologitsData.computeLlmImpacts( + const impacts = ecoLogitsData.computeLlmImpacts( PROVIDER, body.model, tokens, @@ -104,32 +100,13 @@ class CompletionsWraper extends OpenAi.Chat.Completions { } } -class ChatWraper extends OpenAi.Chat { - completions: OpenAi.Chat.Completions = new CompletionsWraper(this._client); +class Chat extends OriginOpenAI.Chat { + completions: OriginOpenAI.Chat.Completions = new CompletionsWraper( + this._client + ); } -class OpenAiWrapper extends OpenAi { - chat: OpenAi.Chat = new ChatWraper(this); +export default class OpenAI extends OriginOpenAI { + //@ts-ignore : _options is considered "private" in openai types + chat: OriginOpenAI.Chat = new Chat(this); } -/** - * Instrument openai chat completions to add impacts to the response - * - */ -export class OpenAIInstrumentor extends BaseInstrumentor { - constructor() { - super("openai", chatCompletionsCreateHook); - } -} - -const chatCompletionsCreateHook: OnRequireFn = ( - exported: any, - name: string -) => { - if (name === "openai") { - console.debug(`Hooking ${name}`); - exported = OpenAiWrapper; - } else { - console.debug(`Skipping ${name}`); - } - return exported; -}; diff --git a/providers/openai/tsconfig.json b/providers/openai/tsconfig.json new file mode 100644 index 0000000..f615675 --- /dev/null +++ b/providers/openai/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "rootDir": "./src", + "outDir": "./es", + "target": "ESNext", + "module": "NodeNext", + "resolveJsonModule": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "jsx": "react-jsx", + "preserveWatchOutput": true, + "removeComments": true + } +} diff --git a/src/@types/index.d.ts b/src/@types/index.d.ts deleted file mode 100644 index 9d8ded3..0000000 --- a/src/@types/index.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -type ImpactPart = { - type: "energy" | "GWP" | "ADPe" | "PE"; - name: string; - unit: string; - value: number; -}; - -type Impacts = { - energy: ImpactPart; - gwp: ImpactPart; - adpe: ImpactPart; - pe: ImpactPart; - usage: { - energy: ImpactPart; - gwp: ImpactPart; - adpe: ImpactPart; - pe: ImpactPart; - }; - embodied: { - gwp: ImpactPart; - adpe: ImpactPart; - pe: ImpactPart; - }; -}; diff --git a/src/ecologits.ts b/src/ecologits.ts deleted file mode 100644 index 700a0c6..0000000 --- a/src/ecologits.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { OpenAIInstrumentor } from "./tracers/openai_tracer"; - -export class Ecologits { - static initialized: boolean = false; - constructor() { - console.log("Ecologits constructor"); - } - static async init() { - console.log("Ecologits init"); - if (!this.initialized) { - initInstruments(); - this.initialized = true; - } - } -} -const initInstruments = () => { - initOpenAIInstrumentor(); -}; -const initOpenAIInstrumentor = async () => { - const isOpenAIModuleAvailable: boolean = isModuleAvailable("openai"); - console.log(`Is OpenAI module available? ${isOpenAIModuleAvailable}`); - if (isOpenAIModuleAvailable) { - const instrumentor = new OpenAIInstrumentor(); - instrumentor.instrument(); - } -}; - -/** - * True if the module is available - * @param moduleName - * @returns - */ -const isModuleAvailable = (moduleName: string): boolean => { - try { - require.resolve(moduleName); - return true; - } catch (error) { - if (error.code === "MODULE_NOT_FOUND") { - return false; - } - throw error; - } -}; diff --git a/src/exceptions.ts b/src/exceptions.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/index.ts b/src/index.ts deleted file mode 100644 index b7d6d04..0000000 --- a/src/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -type _Impacts = Impacts; -export type { _Impacts as Impacts }; -export { Ecologits } from "./ecologits"; diff --git a/src/model_repository.ts b/src/model_repository.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/tracers/anthropic_tracer.ts b/src/tracers/anthropic_tracer.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/tracers/baseInstrumentor.ts b/src/tracers/baseInstrumentor.ts deleted file mode 100644 index 606bdbe..0000000 --- a/src/tracers/baseInstrumentor.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Hook, OnRequireFn } from "require-in-the-middle"; -/** - * Base class to instrument a module - * - */ -export class BaseInstrumentor { - // TODO: This only works for Commonjs modules. For ESM modules, we need to use the import-in-the-middle package - private _hook: Hook; - moduleName: string; - onRequireFn: OnRequireFn; - /** - * - * @param moduleName name of the module to instrument - * @param onRequireFn what to do when the module is required - */ - constructor(moduleName: string, onRequireFn: OnRequireFn) { - this.moduleName = moduleName; - this.onRequireFn = onRequireFn; - } - /** - * It will apply the changes mmentioned in onRequireFn to the module mentioned in moduleName - */ - instrument(): void { - console.log(`Instrumenting ${this.moduleName}`); - this._hook = new Hook( - [this.moduleName], - { internals: true }, - this.onRequireFn - ); - } -} diff --git a/src/tracers/cohere_tracer.ts b/src/tracers/cohere_tracer.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/tracers/huggingface_tracer.ts b/src/tracers/huggingface_tracer.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/tracers/mistralai_tracer.ts b/src/tracers/mistralai_tracer.ts deleted file mode 100644 index e5f73e6..0000000 --- a/src/tracers/mistralai_tracer.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { OnRequireFn } from "require-in-the-middle"; -import MistralClient, { - ChatCompletionResponse, - ChatCompletionResponseChunk, - ChatRequest, - ChatRequestOptions, -} from "@mistralai/mistralai"; - -import EcoLogitsData from "../tracers/utils"; -import { BaseInstrumentor } from "./baseInstrumentor"; - -const PROVIDER = "mistralai"; - -/** - * Wrapper around the chat method to add impacts to the response - * - */ - -/** - * Wrap the two chat methods to add impacts to the response - */ -class MistralAiWrapper extends MistralClient { - chat: MistralClient["chat"] = async (request, options?) => { - const timerStart = new Date().getTime(); - return super.chat(request, options).then(async (response) => { - const requestLatency = new Date().getTime() - timerStart; - const ecologitsData = await EcoLogitsData.build(); - const tokens = response.usage?.completion_tokens || 0; - const impacts = ecologitsData.computeLlmImpacts( - PROVIDER, - request.model, - tokens, - requestLatency - ); - return { ...response, impacts }; - }); - }; - - // chatStream: MistralClient["chatStream"] = async function* (request, options?) { - // const timerStart = new Date().getTime(); - // const ecologitsData = await EcoLogitsData.build(); - // // let tokens = 0; - // const stream = super.chatStream(request, options); - - // async function* iterator() { - // for await (const item of stream) { - // // tokens += 1; - // const tokens = item.usage?.completion_tokens || 0; - // const requestLatency = new Date().getTime() - timerStart; - // const impacts = ecologitsData.computeLlmImpacts( - // PROVIDER, - // request.model, - // tokens, - // requestLatency - // ); - // yield ({ ...item, impacts }); - // }; - // } - // return iterator(); - // }; -} - -/** - * Wraps the chat method to add impacts to the response - * - */ -const chatCompletionsCreateHook: OnRequireFn = ( - exported: any, - name: string -) => { - if (name === PROVIDER) { - console.debug(`Hooking ${name}`); - exported = MistralAiWrapper; - } else { - console.debug(`Skipping ${name}`); - } - return exported; -}; - -/** - * Instrument mistralai chat completions to add impacts to the response - * - */ -export class MistralAiInstrumentor extends BaseInstrumentor { - constructor() { - super(PROVIDER, chatCompletionsCreateHook); - } -} diff --git a/test/mistral/package.json b/test/mistral/package.json new file mode 100644 index 0000000..b3ecea4 --- /dev/null +++ b/test/mistral/package.json @@ -0,0 +1,56 @@ +{ + "name": "test-mistralai", + "version": "0.0.1", + "description": "Tests for the mistralai provider", + "type": "module", + "main": "es/index.js", + "types": "es/index.d.ts", + "source": "src/index.ts", + "scripts": { + "build": "tsc", + "prepublishOnly": "yarn build", + "watch": "tsc-watch --noClear", + "test": "node es/index.js", + "clean": "rm -rf es" + }, + "keywords": [ + "ecologits", + "openai", + "chatGPT", + "gpt4o", + "gpt3.5", + "climate", + "co2", + "measure", + "carbon", + "footprint", + "llm" + ], + "packageManager": "yarn@1.22.18", + "files": [ + "es" + ], + "author": { + "name": "Yoann Couble for GenAI Impact", + "email": "yoann.couble@valraiso.fr" + }, + "homepage": "ecologits.ai", + "repository": { + "type": "git", + "url": "git://github.com/genai-impact/ecologits.js.git" + }, + "license": "MPL-2.0", + "dependencies": { + "@genai-impact/ecologits-mistral": "*", + "core": "*" + }, + "devDependencies": { + "@mistralai/mistralai": "^0.5.0" + }, + "peerDependencies": { + "@mistralai/mistralai": "^0.5.0" + }, + "engines": { + "node": ">=18.0.0" + } +} diff --git a/test/mistral/src/index.ts b/test/mistral/src/index.ts new file mode 100644 index 0000000..bdc97e2 --- /dev/null +++ b/test/mistral/src/index.ts @@ -0,0 +1,27 @@ +import MistralClient from "@genai-impact/ecologits-mistral"; +import { ChatCompletionResponse } from "@mistralai/mistralai"; +import { Impacts } from "core"; + +const apiKey = process.env.MISTRAL_API_KEY; + +const client = new MistralClient(apiKey); + +const main = async () => { + try { + const response = (await client.chat({ + model: "mistral-tiny", + messages: [{ role: "user", content: "What is the best French cheese?" }], + })) as ChatCompletionResponse & { impacts: Impacts }; + // Get estimated environmental impacts of the inference + console.log( + `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` + ); + console.log( + `GHG emissions: ${response.impacts.gwp.value} ${response.impacts.gwp.unit}` + ); + } catch (e) { + console.error(e); + throw e; + } +}; +main(); diff --git a/test/mistral/tsconfig.json b/test/mistral/tsconfig.json new file mode 100644 index 0000000..f615675 --- /dev/null +++ b/test/mistral/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "rootDir": "./src", + "outDir": "./es", + "target": "ESNext", + "module": "NodeNext", + "resolveJsonModule": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "jsx": "react-jsx", + "preserveWatchOutput": true, + "removeComments": true + } +} diff --git a/test/openai/package.json b/test/openai/package.json new file mode 100644 index 0000000..a654f75 --- /dev/null +++ b/test/openai/package.json @@ -0,0 +1,56 @@ +{ + "name": "test-openai", + "version": "0.0.1", + "description": "Tests for the openai provider", + "type": "module", + "main": "es/index.js", + "types": "es/index.d.ts", + "source": "src/index.ts", + "scripts": { + "build": "tsc", + "prepublishOnly": "yarn build", + "watch": "tsc-watch --noClear", + "test": "node es/index.js", + "clean": "rm -rf es" + }, + "keywords": [ + "ecologits", + "openai", + "chatGPT", + "gpt4o", + "gpt3.5", + "climate", + "co2", + "measure", + "carbon", + "footprint", + "llm" + ], + "packageManager": "yarn@1.22.18", + "files": [ + "es" + ], + "author": { + "name": "Yoann Couble for GenAI Impact", + "email": "yoann.couble@valraiso.fr" + }, + "homepage": "ecologits.ai", + "repository": { + "type": "git", + "url": "git://github.com/genai-impact/ecologits.js.git" + }, + "license": "MPL-2.0", + "dependencies": { + "core": "*", + "@genai-impact/ecologits-openai": "*" + }, + "devDependencies": { + "openai": "^4.49.0" + }, + "peerDependencies": { + "openai": "^4.49.0" + }, + "engines": { + "node": ">=18.0.0" + } +} diff --git a/src/test.ts b/test/openai/src/index.ts similarity index 64% rename from src/test.ts rename to test/openai/src/index.ts index 0bede24..b096f3e 100644 --- a/src/test.ts +++ b/test/openai/src/index.ts @@ -1,9 +1,5 @@ -import type { Impacts } from "./index"; -import { Ecologits } from "./index"; -import OpenAI from "openai"; -import type OpenAITypes from "openai"; // TODO : remove dependency - -Ecologits.init(); +import OpenAI from "@genai-impact/ecologits-openai"; +import { Impacts } from "core"; const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, @@ -13,9 +9,10 @@ const main = async () => { const response = (await client.chat.completions.create({ messages: [{ role: "user", content: "Tell me a funny joke!" }], model: "gpt-3.5-turbo", - })) as OpenAITypes.Chat.Completions.ChatCompletion & { impacts: Impacts }; + })) as Awaited> & { + impacts: Impacts; + }; // Get estimated environmental impacts of the inference - console.log(`Response: ${JSON.stringify(response, undefined, 2)}`); console.log( `Energy consumption: ${response.impacts.energy.value} ${response.impacts.energy.unit}` ); diff --git a/test/openai/tsconfig.json b/test/openai/tsconfig.json new file mode 100644 index 0000000..f615675 --- /dev/null +++ b/test/openai/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "rootDir": "./src", + "outDir": "./es", + "target": "ESNext", + "module": "NodeNext", + "resolveJsonModule": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "jsx": "react-jsx", + "preserveWatchOutput": true, + "removeComments": true + } +} diff --git a/turbo.json b/turbo.json new file mode 100644 index 0000000..a57b5a3 --- /dev/null +++ b/turbo.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://turbo.build/schema.json", + "tasks": { + "build": { + "dependsOn": ["^build"], + "outputs": ["es/**"] + }, + "dev": { + "dependsOn": ["libs#build"], + "cache": false, + "persistent": true + }, + "test": { + "dependsOn": ["build"] + }, + "clean": { + "dependsOn": ["^clean"] + } + } +}