Skip to content

Commit

Permalink
Update dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
josevalim committed Aug 18, 2024
1 parent d44c3ca commit 0e4f8c4
Show file tree
Hide file tree
Showing 14 changed files with 52 additions and 191 deletions.
2 changes: 1 addition & 1 deletion mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ defmodule Tokenizers.MixProject do
use Mix.Project

@source_url "https://github.com/elixir-nx/tokenizers"
@version "0.5.0"
@version "0.5.0-dev"

def project do
[
Expand Down
26 changes: 17 additions & 9 deletions mix.lock
Original file line number Diff line number Diff line change
@@ -1,13 +1,21 @@
%{
"castore": {:hex, :castore, "1.0.3", "7130ba6d24c8424014194676d608cb989f62ef8039efd50ff4b3f33286d06db8", [:mix], [], "hexpm", "680ab01ef5d15b161ed6a95449fac5c6b8f60055677a8e79acf01b27baa4390b"},
"earmark_parser": {:hex, :earmark_parser, "1.4.33", "3c3fd9673bb5dcc9edc28dd90f50c87ce506d1f71b70e3de69aa8154bc695d44", [:mix], [], "hexpm", "2d526833729b59b9fdb85785078697c72ac5e5066350663e5be6a1182da61b8f"},
"ex_doc": {:hex, :ex_doc, "0.30.4", "e8395c8e3c007321abb30a334f9f7c0858d80949af298302daf77553468c0c39", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "9a19f0c50ffaa02435668f5242f2b2a61d46b541ebf326884505dfd3dd7af5e4"},
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
"makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"},
"nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
"rustler": {:hex, :rustler, "0.29.1", "880f20ae3027bd7945def6cea767f5257bc926f33ff50c0d5d5a5315883c084d", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "109497d701861bfcd26eb8f5801fe327a8eef304f56a5b63ef61151ff44ac9b6"},
"castore": {:hex, :castore, "1.0.8", "dedcf20ea746694647f883590b82d9e96014057aff1d44d03ec90f36a5c0dc6e", [:mix], [], "hexpm", "0b2b66d2ee742cb1d9cb8c8be3b43c3a70ee8651f37b75a8b982e036752983f1"},
"earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"},
"ex_doc": {:hex, :ex_doc, "0.34.2", "13eedf3844ccdce25cfd837b99bea9ad92c4e511233199440488d217c92571e8", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "5ce5f16b41208a50106afed3de6a2ed34f4acfd65715b82a0b84b49d995f95c1"},
"finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"},
"hpax": {:hex, :hpax, "1.0.0", "28dcf54509fe2152a3d040e4e3df5b265dcb6cb532029ecbacf4ce52caea3fd2", [:mix], [], "hexpm", "7f1314731d711e2ca5fdc7fd361296593fc2542570b3105595bb0bc6d0fad601"},
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
"makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"},
"mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"},
"mint": {:hex, :mint, "1.6.2", "af6d97a4051eee4f05b5500671d47c3a67dac7386045d87a904126fd4bbcea2e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "5ee441dffc1892f1ae59127f74afe8fd82fda6587794278d924e4d90ea3d63f9"},
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
"req": {:hex, :req, "0.5.6", "8fe1eead4a085510fe3d51ad854ca8f20a622aae46e97b302f499dfb84f726ac", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "cfaa8e720945d46654853de39d368f40362c2641c4b2153c886418914b372185"},
"rustler": {:hex, :rustler, "0.34.0", "e9a73ee419fc296a10e49b415a2eb87a88c9217aa0275ec9f383d37eed290c1c", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "1d0c7449482b459513003230c0e2422b0252245776fe6fd6e41cb2b11bd8e628"},
"rustler_precompiled": {:hex, :rustler_precompiled, "0.6.2", "d2218ba08a43fa331957f30481d00b666664d7e3861431b02bd3f4f30eec8e5b", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:rustler, "~> 0.23", [hex: :rustler, repo: "hexpm", optional: true]}], "hexpm", "b9048eaed8d7d14a53f758c91865cc616608a438d2595f621f6a4b32a5511709"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
}
25 changes: 16 additions & 9 deletions native/ex_tokenizers/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion native/ex_tokenizers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ crate-type = ["cdylib"]

[dependencies]
anyhow = "1"
rustler = "0.29.1"
rustler = "0.34.0"
thiserror = "1"
tokenizers = { version = "0.19.1", default-features = false, features = ["onig", "esaxx_fast"]}
serde = { version = "1.0", features = [ "rc", "derive" ] }
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/added_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ pub struct ExTokenizersAddedTokenRef(pub AddedToken);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.AddedToken"]
pub struct ExTokenizersAddedToken {
pub resource: rustler::resource::ResourceArc<ExTokenizersAddedTokenRef>,
pub resource: rustler::ResourceArc<ExTokenizersAddedTokenRef>,
}

impl Serialize for ExTokenizersAddedToken {
Expand Down Expand Up @@ -46,7 +46,7 @@ impl ExTokenizersAddedToken {
T: Into<AddedToken>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersAddedTokenRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersAddedTokenRef::new(data)),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/decoders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub struct ExTokenizersDecoderRef(pub DecoderWrapper);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.Decoder"]
pub struct ExTokenizersDecoder {
pub resource: rustler::resource::ResourceArc<ExTokenizersDecoderRef>,
pub resource: rustler::ResourceArc<ExTokenizersDecoderRef>,
}

impl Serialize for ExTokenizersDecoder {
Expand Down Expand Up @@ -55,7 +55,7 @@ impl ExTokenizersDecoder {
T: Into<DecoderWrapper>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersDecoderRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersDecoderRef::new(data)),
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion native/ex_tokenizers/src/encoding.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rustler::{resource::ResourceArc, Binary, Env, NifTaggedEnum};
use rustler::{Binary, Env, NifTaggedEnum, ResourceArc};
use tokenizers::Encoding;

use crate::util::Direction;
Expand Down
156 changes: 1 addition & 155 deletions native/ex_tokenizers/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,158 +36,4 @@ fn on_load(env: Env, _info: Term) -> bool {
true
}

rustler::init!(
"Elixir.Tokenizers.Native",
[
// AddedToken
added_token_new,
//
added_token_info,
// Decoders
decoders_decode,
//
decoders_info,
//
decoders_byte_level,
decoders_replace,
decoders_wordpiece,
decoders_byte_fallback,
decoders_fuse,
decoders_strip,
decoders_metaspace,
decoders_bpe,
decoders_ctc,
decoders_sequence,
// Encoding
encoding_get_length,
encoding_get_n_sequences,
encoding_set_sequence_id,
encoding_get_ids,
encoding_get_u32_ids,
encoding_get_type_ids,
encoding_get_u32_type_ids,
encoding_get_attention_mask,
encoding_get_u32_attention_mask,
encoding_get_special_tokens_mask,
encoding_get_u32_special_tokens_mask,
encoding_get_tokens,
encoding_get_word_ids,
encoding_get_sequence_ids,
encoding_get_offsets,
encoding_get_overflowing,
encoding_word_to_tokens,
encoding_word_to_chars,
encoding_token_to_sequence,
encoding_token_to_chars,
encoding_token_to_word,
encoding_char_to_token,
encoding_char_to_word,
encoding_pad,
encoding_truncate,
//
encoding_transform,
// Models
models_save,
//
models_info,
//
models_bpe_init,
models_bpe_empty,
models_bpe_from_file,
//
models_wordpiece_init,
models_wordpiece_empty,
models_wordpiece_from_file,
//
models_wordlevel_init,
models_wordlevel_empty,
models_wordlevel_from_file,
//
models_unigram_init,
models_unigram_empty,
// Normalizers
normalizers_normalize,
//
normalizers_info,
//
normalizers_bert_normalizer,
normalizers_nfd,
normalizers_nfkd,
normalizers_nfc,
normalizers_nfkc,
normalizers_strip,
normalizers_prepend,
normalizers_strip_accents,
normalizers_sequence,
normalizers_lowercase,
normalizers_replace,
normalizers_nmt,
normalizers_precompiled,
// PreTokenizers
pre_tokenizers_pre_tokenize,
//
pre_tokenizers_info,
//
pre_tokenizers_byte_level,
pre_tokenizers_byte_level_alphabet,
pre_tokenizers_whitespace,
pre_tokenizers_whitespace_split,
pre_tokenizers_bert,
pre_tokenizers_metaspace,
pre_tokenizers_char_delimiter_split,
pre_tokenizers_split,
pre_tokenizers_punctuation,
pre_tokenizers_sequence,
pre_tokenizers_digits,
// PostProcessors
post_processors_info,
//
post_processors_bert,
post_processors_roberta,
post_processors_byte_level,
post_processors_template,
post_processors_sequence,
// Trainers
trainers_info,
//
trainers_bpe_trainer,
trainers_wordpiece_trainer,
trainers_wordlevel_trainer,
trainers_unigram_trainer,
// Tokenizer
tokenizer_init,
tokenizer_from_file,
tokenizer_from_buffer,
tokenizer_save,
//
tokenizer_get_model,
tokenizer_set_model,
tokenizer_get_normalizer,
tokenizer_set_normalizer,
tokenizer_get_pre_tokenizer,
tokenizer_set_pre_tokenizer,
tokenizer_get_post_processor,
tokenizer_set_post_processor,
tokenizer_get_decoder,
tokenizer_set_decoder,
tokenizer_get_vocab,
tokenizer_get_vocab_size,
tokenizer_add_tokens,
tokenizer_add_special_tokens,
tokenizer_set_truncation,
tokenizer_disable_truncation,
tokenizer_set_padding,
tokenizer_disable_padding,
//
tokenizer_encode,
tokenizer_encode_batch,
tokenizer_decode,
tokenizer_decode_batch,
tokenizer_token_to_id,
tokenizer_id_to_token,
tokenizer_post_processing,
//
tokenizer_train_from_files,
],
load = on_load
);
rustler::init!("Elixir.Tokenizers.Native", load = on_load);
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub struct ExTokenizersModelRef(pub RwLock<ModelWrapper>);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.Model"]
pub struct ExTokenizersModel {
pub resource: rustler::resource::ResourceArc<ExTokenizersModelRef>,
pub resource: rustler::ResourceArc<ExTokenizersModelRef>,
}

impl Serialize for ExTokenizersModel {
Expand Down Expand Up @@ -97,7 +97,7 @@ impl ExTokenizersModel {
T: Into<ModelWrapper>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersModelRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersModelRef::new(data)),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/normalizers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ pub struct ExTokenizersNormalizerRef(pub NormalizerWrapper);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.Normalizer"]
pub struct ExTokenizersNormalizer {
pub resource: rustler::resource::ResourceArc<ExTokenizersNormalizerRef>,
pub resource: rustler::ResourceArc<ExTokenizersNormalizerRef>,
}

impl Serialize for ExTokenizersNormalizer {
Expand Down Expand Up @@ -56,7 +56,7 @@ impl ExTokenizersNormalizer {
T: Into<NormalizerWrapper>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersNormalizerRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersNormalizerRef::new(data)),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/post_processors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub struct ExTokenizersPostProcessorRef(pub PostProcessorWrapper);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.PostProcessor"]
pub struct ExTokenizersPostProcessor {
pub resource: rustler::resource::ResourceArc<ExTokenizersPostProcessorRef>,
pub resource: rustler::ResourceArc<ExTokenizersPostProcessorRef>,
}

impl ExTokenizersPostProcessorRef {
Expand All @@ -27,7 +27,7 @@ impl ExTokenizersPostProcessor {
T: Into<PostProcessorWrapper>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersPostProcessorRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersPostProcessorRef::new(data)),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions native/ex_tokenizers/src/pre_tokenizers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ pub struct ExTokenizersPreTokenizerRef(pub PreTokenizerWrapper);
#[derive(rustler::NifStruct)]
#[module = "Tokenizers.PreTokenizer"]
pub struct ExTokenizersPreTokenizer {
pub resource: rustler::resource::ResourceArc<ExTokenizersPreTokenizerRef>,
pub resource: rustler::ResourceArc<ExTokenizersPreTokenizerRef>,
}

impl Serialize for ExTokenizersPreTokenizer {
Expand Down Expand Up @@ -63,7 +63,7 @@ impl ExTokenizersPreTokenizer {
T: Into<PreTokenizerWrapper>,
{
Self {
resource: rustler::resource::ResourceArc::new(ExTokenizersPreTokenizerRef::new(data)),
resource: rustler::ResourceArc::new(ExTokenizersPreTokenizerRef::new(data)),
}
}
}
Expand Down
Loading

0 comments on commit 0e4f8c4

Please sign in to comment.