diff --git a/.github/ISSUE_TEMPLATE/NEW-LANGUAGE-REQUEST.yml b/.github/ISSUE_TEMPLATE/NEW-LANGUAGE-REQUEST.yml
index d924019b9a9..5fddced9f87 100644
--- a/.github/ISSUE_TEMPLATE/NEW-LANGUAGE-REQUEST.yml
+++ b/.github/ISSUE_TEMPLATE/NEW-LANGUAGE-REQUEST.yml
@@ -1,7 +1,7 @@
name: New Language Request
description: Request to add a new language for LibreChat translations.
title: "New Language Request: "
-labels: ["enhancement", "i18n"]
+labels: ["✨ enhancement", "🌍 i18n"]
body:
- type: markdown
attributes:
@@ -30,4 +30,4 @@ body:
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/.github/CODE_OF_CONDUCT.md).
options:
- label: I agree to follow this project's Code of Conduct
- required: true
\ No newline at end of file
+ required: true
diff --git a/.github/TRANSLATION.md b/.github/TRANSLATION.md
deleted file mode 100644
index 080505048fe..00000000000
--- a/.github/TRANSLATION.md
+++ /dev/null
@@ -1,70 +0,0 @@
-# LibreChat Translation Guide
-
-Thank you for your interest in translating LibreChat! We rely on community contributions to make our application accessible to users around the globe. We manage all translations using [Locize](https://locize.com), a powerful translation management system that integrates seamlessly with our project.
-
-## How Translations Work
-
-- **Centralized Management:** All translation strings for LibreChat are managed in a single location on Locize. This allows us to keep translations consistent across all parts of the application.
-- **Automatic Updates:** Changes made in Locize are automatically synchronized with our project. You can see the current translation progress for each language via the dynamic badges in our GitHub repository.
-- **Community Driven:** We welcome contributions in all languages. Your help ensures that more users can enjoy LibreChat in their native language.
-
-## Getting Started
-
-### 1. Create a Locize Account
-
-If you don't already have an account, please register using our invite link:
-
-[Register at Locize](https://www.locize.app/register?invitation=t1VDfqoRvj8eUkd1JasxxrBCCI4SAqeeofa2YumAgmVDRxkr4vO1jKqNmpaNCv7H)
-
-This invitation will give you access to our translation project once you’ve created your account.
-
-
-## Adding a New Language
-
-If you do not see your language listed in our current translation table, please help us expand our language support by following these steps:
-
-1. **Create a New Issue:** Open a new issue in the GitHub repository.
-2. **Use the Template:** When creating your issue, please select the **New Language Request** template. This template will guide you through providing all the necessary details, including:
- - The full name of your language (e.g., Spanish, Mandarin).
- - The [ISO 639-1](https://www.w3schools.com/tags/ref_language_codes.asp) code for your language (e.g., es for Spanish).
-3. **Collaborate with Maintainers:** Our maintainers will review your issue and work with you to integrate the new language. Once approved, your language will appear in the translation progress table, and you can start contributing translations.
-
-
-## Translation Progress
-
-Below is our current translation progress for some of the supported languages. Feel free to check these badges and help us improve the translations further:
-
-| Language | Translation Progress Badge |
-|---------------------------------------|----------------------------|
-| **English (en)** |  |
-| **Arabic (ar)** |  |
-| **German (de)** |  |
-| **Spanish (es)** |  |
-| **Finnish (fi)** |  |
-| **French (fr)** |  |
-| **Hebrew (he)** |  |
-| **Indonesian (id)** |  |
-| **Italian (it)** |  |
-| **Japanese (ja)** |  |
-| **Korean (ko)** |  |
-| **Dutch (nl)** |  |
-| **Polish (pl)** |  |
-| **Portuguese (pt)** |  |
-| **Russian (ru)** |  |
-| **Swedish (sv)** |  |
-| **Turkish (tr)** |  |
-| **Vietnamese (vi)** |  |
-| **Chinese (Simplified) (zh)** |  |
-| **Chinese (Traditional) (zh-Hant)** |  |
-
----
-
-## Need Help?
-
-If you have any questions about the translation process or need assistance getting started, please feel free to:
-
-- Open an issue in this repository.
-- Join our [Discord community](https://discord.librechat.ai) to chat with fellow translators and contributors.
-- Contact one of the project maintainers directly.
-
-Your contributions help make LibreChat better for users worldwide. Happy translating!
diff --git a/.github/workflows/eslint-ci.yml b/.github/workflows/eslint-ci.yml
index f8baf09dcf6..ea1a5f24161 100644
--- a/.github/workflows/eslint-ci.yml
+++ b/.github/workflows/eslint-ci.yml
@@ -1,10 +1,14 @@
name: ESLint Code Quality Checks
+
on:
pull_request:
branches:
- main
- dev
- release/*
+ paths:
+ - 'api/**'
+ - 'client/**'
jobs:
eslint_checks:
@@ -29,67 +33,40 @@ jobs:
- name: Install dependencies
run: npm ci
- # Use a paths filter (v3) to detect changes in JavaScript/TypeScript files,
- # but only consider files that are added or modified (ignoring deleted files).
- - name: Filter changed files for ESLint
- id: file_filter
- uses: dorny/paths-filter@v3
- with:
- filters: |
- eslint:
- - added|modified: '**/*.js'
- - added|modified: '**/*.jsx'
- - added|modified: '**/*.ts'
- - added|modified: '**/*.tsx'
-
- # Run ESLint only if relevant files have been added or modified.
+ # Run ESLint on changed files within the api/ and client/ directories.
- name: Run ESLint on changed files
- if: steps.file_filter.outputs.eslint == 'true'
env:
SARIF_ESLINT_IGNORE_SUPPRESSED: "true"
run: |
# Extract the base commit SHA from the pull_request event payload.
BASE_SHA=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
echo "Base commit SHA: $BASE_SHA"
-
- # List files changed between the base commit and current HEAD,
- # but only include files that are not deleted (ACMRTUXB: A, C, M, R, T, U, X, B).
- CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$BASE_SHA" HEAD | grep -E '\.(js|jsx|ts|tsx)$')
- echo "Files to lint:"
+
+ # Get changed files (only JS/TS files in api/ or client/)
+ CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$BASE_SHA" HEAD | grep -E '^(api|client)/.*\.(js|jsx|ts|tsx)$' || true)
+
+ # Debug output
+ echo "Changed files:"
echo "$CHANGED_FILES"
-
- # Run ESLint on the changed files.
+
+ # Ensure there are files to lint before running ESLint
+ if [[ -z "$CHANGED_FILES" ]]; then
+ echo "No matching files changed. Skipping ESLint."
+ echo "UPLOAD_SARIF=false" >> $GITHUB_ENV
+ exit 0
+ fi
+
+ # Set variable to allow SARIF upload
+ echo "UPLOAD_SARIF=true" >> $GITHUB_ENV
+
+ # Run ESLint
npx eslint --no-error-on-unmatched-pattern \
--config eslint.config.mjs \
--format @microsoft/eslint-formatter-sarif \
--output-file eslint-results.sarif $CHANGED_FILES || true
- # If no JavaScript/TypeScript files were added or modified,
- # create a valid (non-empty) SARIF file containing one run.
- - name: Create empty SARIF results file
- if: steps.file_filter.outputs.eslint != 'true'
- run: |
- cat << 'EOF' > eslint-results.sarif
- {
- "version": "2.1.0",
- "$schema": "https://json.schemastore.org/sarif-2.1.0.json",
- "runs": [
- {
- "tool": {
- "driver": {
- "name": "ESLint",
- "informationUri": "https://eslint.org",
- "version": "0.0.0",
- "rules": []
- }
- },
- "results": []
- }
- ]
- }
- EOF
-
- name: Upload analysis results to GitHub
+ if: env.UPLOAD_SARIF == 'true'
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: eslint-results.sarif
diff --git a/.github/workflows/i18n-unused-keys.yml b/.github/workflows/i18n-unused-keys.yml
new file mode 100644
index 00000000000..79f95d3b27f
--- /dev/null
+++ b/.github/workflows/i18n-unused-keys.yml
@@ -0,0 +1,84 @@
+name: Detect Unused i18next Strings
+
+on:
+ pull_request:
+ paths:
+ - "client/src/**"
+
+jobs:
+ detect-unused-i18n-keys:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write # Required for posting PR comments
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ - name: Find unused i18next keys
+ id: find-unused
+ run: |
+ echo "🔍 Scanning for unused i18next keys..."
+
+ # Define paths
+ I18N_FILE="client/src/locales/en/translation.json"
+ SOURCE_DIR="client/src"
+
+ # Check if translation file exists
+ if [[ ! -f "$I18N_FILE" ]]; then
+ echo "::error title=Missing i18n File::Translation file not found: $I18N_FILE"
+ exit 1
+ fi
+
+ # Extract all keys from the JSON file
+ KEYS=$(jq -r 'keys[]' "$I18N_FILE")
+
+ # Track unused keys
+ UNUSED_KEYS=()
+
+ # Check if each key is used in the source code
+ for KEY in $KEYS; do
+ if ! grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$SOURCE_DIR"; then
+ UNUSED_KEYS+=("$KEY")
+ fi
+ done
+
+ # Output results
+ if [[ ${#UNUSED_KEYS[@]} -gt 0 ]]; then
+ echo "🛑 Found ${#UNUSED_KEYS[@]} unused i18n keys:"
+ echo "unused_keys=$(echo "${UNUSED_KEYS[@]}" | jq -R -s -c 'split(" ")')" >> $GITHUB_ENV
+ for KEY in "${UNUSED_KEYS[@]}"; do
+ echo "::warning title=Unused i18n Key::'$KEY' is defined but not used in the codebase."
+ done
+ else
+ echo "✅ No unused i18n keys detected!"
+ echo "unused_keys=[]" >> $GITHUB_ENV
+ fi
+
+ - name: Post verified comment on PR
+ if: env.unused_keys != '[]'
+ run: |
+ PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
+
+ # Format the unused keys list correctly, filtering out empty entries
+ FILTERED_KEYS=$(echo "$unused_keys" | jq -r '.[]' | grep -v '^\s*$' | sed 's/^/- `/;s/$/`/' )
+
+ COMMENT_BODY=$(cat < used_scripts.txt
+ else
+ touch used_scripts.txt
+ fi
+ }
+
+ extract_deps_from_scripts "package.json"
+ mv used_scripts.txt root_used_deps.txt
+
+ extract_deps_from_scripts "client/package.json"
+ mv used_scripts.txt client_used_deps.txt
+
+ extract_deps_from_scripts "api/package.json"
+ mv used_scripts.txt api_used_deps.txt
+
+ - name: Extract Dependencies Used in Source Code
+ id: extract-used-code
+ run: |
+ extract_deps_from_code() {
+ local folder=$1
+ local output_file=$2
+ if [[ -d "$folder" ]]; then
+ grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,mjs,cjs} | \
+ sed -E "s/require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)/\1/" > "$output_file"
+
+ grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,mjs,cjs} | \
+ sed -E "s/import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
+
+ sort -u "$output_file" -o "$output_file"
+ else
+ touch "$output_file"
+ fi
+ }
+
+ extract_deps_from_code "." root_used_code.txt
+ extract_deps_from_code "client" client_used_code.txt
+ extract_deps_from_code "api" api_used_code.txt
+
+ - name: Run depcheck for root package.json
+ id: check-root
+ run: |
+ if [[ -f "package.json" ]]; then
+ UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
+ UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt | sort) || echo "")
+ echo "ROOT_UNUSED<> $GITHUB_ENV
+ echo "$UNUSED" >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+ fi
+
+ - name: Run depcheck for client/package.json
+ id: check-client
+ run: |
+ if [[ -f "client/package.json" ]]; then
+ chmod -R 755 client
+ cd client
+ UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
+ UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
+ echo "CLIENT_UNUSED<> $GITHUB_ENV
+ echo "$UNUSED" >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+ cd ..
+ fi
+
+ - name: Run depcheck for api/package.json
+ id: check-api
+ run: |
+ if [[ -f "api/package.json" ]]; then
+ chmod -R 755 api
+ cd api
+ UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
+ UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt | sort) || echo "")
+ echo "API_UNUSED<> $GITHUB_ENV
+ echo "$UNUSED" >> $GITHUB_ENV
+ echo "EOF" >> $GITHUB_ENV
+ cd ..
+ fi
+
+ - name: Post comment on PR if unused dependencies are found
+ if: env.ROOT_UNUSED != '' || env.CLIENT_UNUSED != '' || env.API_UNUSED != ''
+ run: |
+ PR_NUMBER=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
+
+ ROOT_LIST=$(echo "$ROOT_UNUSED" | awk '{print "- `" $0 "`"}')
+ CLIENT_LIST=$(echo "$CLIENT_UNUSED" | awk '{print "- `" $0 "`"}')
+ API_LIST=$(echo "$API_UNUSED" | awk '{print "- `" $0 "`"}')
+
+ COMMENT_BODY=$(cat <
-
+
@@ -179,7 +179,7 @@ Contributions, suggestions, bug reports and fixes are welcome!
For new features, components, or extensions, please open an issue and discuss before sending a PR.
-If you'd like to help translate LibreChat into your language, we'd love your contribution! Improving our translations not only makes LibreChat more accessible to users around the world but also enhances the overall user experience. Please check out our [Translation Guide](.github/TRANSLATION.md).
+If you'd like to help translate LibreChat into your language, we'd love your contribution! Improving our translations not only makes LibreChat more accessible to users around the world but also enhances the overall user experience. Please check out our [Translation Guide](https://www.librechat.ai/docs/translation).
---
@@ -199,4 +199,4 @@ We thank [Locize](https://locize.com) for their translation management tools tha
-
\ No newline at end of file
+
diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js
index 9334f1c28b1..368e7d6e84b 100644
--- a/api/app/clients/OpenAIClient.js
+++ b/api/app/clients/OpenAIClient.js
@@ -506,9 +506,8 @@ class OpenAIClient extends BaseClient {
if (promptPrefix && this.isOmni === true) {
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
if (lastUserMessageIndex !== -1) {
- payload[
- lastUserMessageIndex
- ].content = `${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
+ payload[lastUserMessageIndex].content =
+ `${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
}
}
@@ -1067,14 +1066,36 @@ ${convo}
});
}
- getStreamText() {
+ /**
+ *
+ * @param {string[]} [intermediateReply]
+ * @returns {string}
+ */
+ getStreamText(intermediateReply) {
if (!this.streamHandler) {
- return '';
+ return intermediateReply?.join('') ?? '';
+ }
+
+ let thinkMatch;
+ let remainingText;
+ let reasoningText = '';
+
+ if (this.streamHandler.reasoningTokens.length > 0) {
+ reasoningText = this.streamHandler.reasoningTokens.join('');
+ thinkMatch = reasoningText.match(/([\s\S]*?)<\/think>/)?.[1]?.trim();
+ if (thinkMatch != null && thinkMatch) {
+ const reasoningTokens = `:::thinking\n${thinkMatch}\n:::\n`;
+ remainingText = reasoningText.split(/<\/think>/)?.[1]?.trim() || '';
+ return `${reasoningTokens}${remainingText}${this.streamHandler.tokens.join('')}`;
+ } else if (thinkMatch === '') {
+ remainingText = reasoningText.split(/<\/think>/)?.[1]?.trim() || '';
+ return `${remainingText}${this.streamHandler.tokens.join('')}`;
+ }
}
const reasoningTokens =
- this.streamHandler.reasoningTokens.length > 0
- ? `:::thinking\n${this.streamHandler.reasoningTokens.join('')}\n:::\n`
+ reasoningText.length > 0
+ ? `:::thinking\n${reasoningText.replace('', '').replace('', '').trim()}\n:::\n`
: '';
return `${reasoningTokens}${this.streamHandler.tokens.join('')}`;
@@ -1314,11 +1335,19 @@ ${convo}
streamPromise = new Promise((resolve) => {
streamResolve = resolve;
});
+ /** @type {OpenAI.OpenAI.CompletionCreateParamsStreaming} */
+ const params = {
+ ...modelOptions,
+ stream: true,
+ };
+ if (
+ this.options.endpoint === EModelEndpoint.openAI ||
+ this.options.endpoint === EModelEndpoint.azureOpenAI
+ ) {
+ params.stream_options = { include_usage: true };
+ }
const stream = await openai.beta.chat.completions
- .stream({
- ...modelOptions,
- stream: true,
- })
+ .stream(params)
.on('abort', () => {
/* Do nothing here */
})
@@ -1449,7 +1478,7 @@ ${convo}
this.options.context !== 'title' &&
message.content.startsWith('')
) {
- return message.content.replace('', ':::thinking').replace('', ':::');
+ return this.getStreamText();
}
return message.content;
@@ -1458,7 +1487,7 @@ ${convo}
err?.message?.includes('abort') ||
(err instanceof OpenAI.APIError && err?.message?.includes('abort'))
) {
- return intermediateReply.join('');
+ return this.getStreamText(intermediateReply);
}
if (
err?.message?.includes(
@@ -1473,14 +1502,18 @@ ${convo}
(err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason'))
) {
logger.error('[OpenAIClient] Known OpenAI error:', err);
- if (intermediateReply.length > 0) {
- return intermediateReply.join('');
+ if (this.streamHandler && this.streamHandler.reasoningTokens.length) {
+ return this.getStreamText();
+ } else if (intermediateReply.length > 0) {
+ return this.getStreamText(intermediateReply);
} else {
throw err;
}
} else if (err instanceof OpenAI.APIError) {
- if (intermediateReply.length > 0) {
- return intermediateReply.join('');
+ if (this.streamHandler && this.streamHandler.reasoningTokens.length) {
+ return this.getStreamText();
+ } else if (intermediateReply.length > 0) {
+ return this.getStreamText(intermediateReply);
} else {
throw err;
}
diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js
index b7ff50150e2..6592371f027 100644
--- a/api/cache/getLogStores.js
+++ b/api/cache/getLogStores.js
@@ -37,6 +37,10 @@ const messages = isRedisEnabled
? new Keyv({ store: keyvRedis, ttl: Time.ONE_MINUTE })
: new Keyv({ namespace: CacheKeys.MESSAGES, ttl: Time.ONE_MINUTE });
+const flows = isRedisEnabled
+ ? new Keyv({ store: keyvRedis, ttl: Time.TWO_MINUTES })
+ : new Keyv({ namespace: CacheKeys.FLOWS, ttl: Time.ONE_MINUTE * 3 });
+
const tokenConfig = isRedisEnabled
? new Keyv({ store: keyvRedis, ttl: Time.THIRTY_MINUTES })
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: Time.THIRTY_MINUTES });
@@ -88,6 +92,7 @@ const namespaces = {
[CacheKeys.MODEL_QUERIES]: modelQueries,
[CacheKeys.AUDIO_RUNS]: audioRuns,
[CacheKeys.MESSAGES]: messages,
+ [CacheKeys.FLOWS]: flows,
};
/**
diff --git a/api/cache/keyvRedis.js b/api/cache/keyvRedis.js
index 9501045e4e1..d544b50a11e 100644
--- a/api/cache/keyvRedis.js
+++ b/api/cache/keyvRedis.js
@@ -1,6 +1,6 @@
const KeyvRedis = require('@keyv/redis');
-const { logger } = require('~/config');
const { isEnabled } = require('~/server/utils');
+const logger = require('~/config/winston');
const { REDIS_URI, USE_REDIS } = process.env;
diff --git a/api/config/index.js b/api/config/index.js
index c2b21cfc079..aaf8bb27644 100644
--- a/api/config/index.js
+++ b/api/config/index.js
@@ -1,9 +1,11 @@
const { EventSource } = require('eventsource');
+const { Time, CacheKeys } = require('librechat-data-provider');
const logger = require('./winston');
global.EventSource = EventSource;
let mcpManager = null;
+let flowManager = null;
/**
* @returns {Promise}
@@ -16,6 +18,21 @@ async function getMCPManager() {
return mcpManager;
}
+/**
+ * @param {(key: string) => Keyv} getLogStores
+ * @returns {Promise}
+ */
+async function getFlowStateManager(getLogStores) {
+ if (!flowManager) {
+ const { FlowStateManager } = await import('librechat-mcp');
+ flowManager = new FlowStateManager(getLogStores(CacheKeys.FLOWS), {
+ ttl: Time.ONE_MINUTE * 3,
+ logger,
+ });
+ }
+ return flowManager;
+}
+
/**
* Sends message data in Server Sent Events format.
* @param {ServerResponse} res - The server response.
@@ -34,4 +51,5 @@ module.exports = {
logger,
sendEvent,
getMCPManager,
+ getFlowStateManager,
};
diff --git a/api/models/Token.js b/api/models/Token.js
index cdd156b6b45..210666ddd78 100644
--- a/api/models/Token.js
+++ b/api/models/Token.js
@@ -1,5 +1,6 @@
-const tokenSchema = require('./schema/tokenSchema');
const mongoose = require('mongoose');
+const { encryptV2 } = require('~/server/utils/crypto');
+const tokenSchema = require('./schema/tokenSchema');
const { logger } = require('~/config');
/**
@@ -7,6 +8,32 @@ const { logger } = require('~/config');
* @type {mongoose.Model}
*/
const Token = mongoose.model('Token', tokenSchema);
+/**
+ * Fixes the indexes for the Token collection from legacy TTL indexes to the new expiresAt index.
+ */
+async function fixIndexes() {
+ try {
+ const indexes = await Token.collection.indexes();
+ logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
+ const unwantedTTLIndexes = indexes.filter(
+ (index) => index.key.createdAt === 1 && index.expireAfterSeconds !== undefined,
+ );
+ if (unwantedTTLIndexes.length === 0) {
+ logger.debug('No unwanted Token indexes found.');
+ return;
+ }
+ for (const index of unwantedTTLIndexes) {
+ logger.debug(`Dropping unwanted Token index: ${index.name}`);
+ await Token.collection.dropIndex(index.name);
+ logger.debug(`Dropped Token index: ${index.name}`);
+ }
+ logger.debug('Token index cleanup completed successfully.');
+ } catch (error) {
+ logger.error('An error occurred while fixing Token indexes:', error);
+ }
+}
+
+fixIndexes();
/**
* Creates a new Token instance.
@@ -29,8 +56,7 @@ async function createToken(tokenData) {
expiresAt,
};
- const newToken = new Token(newTokenData);
- return await newToken.save();
+ return await Token.create(newTokenData);
} catch (error) {
logger.debug('An error occurred while creating token:', error);
throw error;
@@ -42,7 +68,8 @@ async function createToken(tokenData) {
* @param {Object} query - The query to match against.
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
* @param {String} query.token - The token value.
- * @param {String} query.email - The email of the user.
+ * @param {String} [query.email] - The email of the user.
+ * @param {String} [query.identifier] - Unique, alternative identifier for the token.
* @returns {Promise