Skip to content

Commit

Permalink
Remove blanket unused imports from the common crate (#292)
Browse files Browse the repository at this point in the history
* Remove blanket unused imports from the common crate

Signed-off-by: José Ulises Niño Rivera <[email protected]>

* updatE

Signed-off-by: José Ulises Niño Rivera <[email protected]>

---------

Signed-off-by: José Ulises Niño Rivera <[email protected]>
  • Loading branch information
junr03 authored Nov 26, 2024
1 parent 9c6fcdb commit be8c3c9
Show file tree
Hide file tree
Showing 10 changed files with 11 additions and 31 deletions.
12 changes: 2 additions & 10 deletions crates/common/src/common_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,7 @@ pub struct PromptGuardResponse {
#[cfg(test)]
mod test {
use crate::common_types::open_ai::{ChatCompletionStreamResponseServerEvents, Message};
use pretty_assertions::{assert_eq, assert_ne};
use pretty_assertions::assert_eq;
use std::collections::HashMap;

const TOOL_SERIALIZED: &str = r#"{
Expand Down Expand Up @@ -534,9 +534,7 @@ mod test {

#[test]
fn test_parameter_types() {
use super::open_ai::{
ChatCompletionsRequest, FunctionDefinition, FunctionParameter, ParameterType, ToolType,
};
use super::open_ai::{FunctionParameter, ParameterType};

const PARAMETER_SERIALZIED: &str = r#"{
"city": {
Expand Down Expand Up @@ -582,8 +580,6 @@ mod test {

#[test]
fn stream_chunk_parse() {
use super::open_ai::{ChatCompletionStreamResponse, ChunkChoice, Delta};

const CHUNK_RESPONSE: &str = r#"data: {"id":"chatcmpl-ALmdmtKulBMEq3fRLbrnxJwcKOqvS","object":"chat.completion.chunk","created":1729755226,"model":"gpt-3.5-turbo-0125","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-ALmdmtKulBMEq3fRLbrnxJwcKOqvS","object":"chat.completion.chunk","created":1729755226,"model":"gpt-3.5-turbo-0125","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Hello"},"logprobs":null,"finish_reason":null}]}
Expand Down Expand Up @@ -645,8 +641,6 @@ data: {"id":"chatcmpl-ALmdmtKulBMEq3fRLbrnxJwcKOqvS","object":"chat.completion.c

#[test]
fn stream_chunk_parse_done() {
use super::open_ai::{ChatCompletionStreamResponse, ChunkChoice, Delta};

const CHUNK_RESPONSE: &str = r#"data: {"id":"chatcmpl-ALn2KTfmrIpYd9N3Un4Kyg08WIIP6","object":"chat.completion.chunk","created":1729756748,"model":"gpt-3.5-turbo-0125","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-ALn2KTfmrIpYd9N3Un4Kyg08WIIP6","object":"chat.completion.chunk","created":1729756748,"model":"gpt-3.5-turbo-0125","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}]}
Expand Down Expand Up @@ -712,8 +706,6 @@ data: [DONE]

#[test]
fn stream_chunk_parse_mistral() {
use super::open_ai::{ChatCompletionStreamResponse, ChunkChoice, Delta};

const CHUNK_RESPONSE: &str = r#"data: {"id":"e1ebce16de5443b79613512c2d757936","object":"chat.completion.chunk","created":1729805261,"model":"ministral-8b-latest","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}
data: {"id":"e1ebce16de5443b79613512c2d757936","object":"chat.completion.chunk","created":1729805261,"model":"ministral-8b-latest","choices":[{"index":0,"delta":{"content":"Hello"},"finish_reason":null}]}
Expand Down
6 changes: 2 additions & 4 deletions crates/common/src/configuration.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
use duration_string::DurationString;
use serde::{Deserialize, Deserializer, Serialize};
use std::default;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fmt::Display;
use std::{collections::HashMap, time::Duration};

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Overrides {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
* Generated by: https://openapi-generator.tech
*/

use crate::embeddings;
use serde::{Deserialize, Serialize};

/// CreateEmbeddingRequestInput : Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. The input must not exceed the max input tokens for the model (8192 tokens for `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 dimensions or less. for counting tokens.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
* Generated by: https://openapi-generator.tech
*/

use crate::embeddings;
use serde::{Deserialize, Serialize};

/// CreateEmbeddingResponseUsage : The usage information for the request.
Expand Down
1 change: 0 additions & 1 deletion crates/common/src/embeddings/embedding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
* Generated by: https://openapi-generator.tech
*/

use crate::embeddings;
use serde::{Deserialize, Serialize};

/// Embedding : Represents an embedding vector returned by embedding endpoint.
Expand Down
1 change: 0 additions & 1 deletion crates/common/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use proxy_wasm::types::Status;
use serde_json::error;

use crate::{common_types::open_ai::ChatCompletionChunkResponseError, ratelimit};

Expand Down
4 changes: 2 additions & 2 deletions crates/common/src/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use crate::{
stats::{Gauge, IncrementingMetric},
};
use derivative::Derivative;
use log::{debug, trace};
use proxy_wasm::{traits::Context, types::Status};
use log::trace;
use proxy_wasm::traits::Context;
use serde::Serialize;
use std::{cell::RefCell, collections::HashMap, fmt::Debug, time::Duration};

Expand Down
2 changes: 0 additions & 2 deletions crates/common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#![allow(unused_imports)]

pub mod common_types;
pub mod configuration;
pub mod consts;
Expand Down
2 changes: 0 additions & 2 deletions crates/common/src/tracing.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::path::Display;

use rand::RngCore;
use serde::{Deserialize, Serialize};

Expand Down
12 changes: 5 additions & 7 deletions crates/prompt_gateway/src/http_context.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
use std::{
collections::HashMap,
time::{Duration, SystemTime, UNIX_EPOCH},
};

use crate::stream_context::{ResponseHandlerType, StreamCallContext, StreamContext};
use common::{
common_types::{
open_ai::{
Expand All @@ -23,8 +19,10 @@ use http::StatusCode;
use log::{debug, trace, warn};
use proxy_wasm::{traits::HttpContext, types::Action};
use serde_json::Value;

use crate::stream_context::{ResponseHandlerType, StreamCallContext, StreamContext};
use std::{
collections::HashMap,
time::{Duration, SystemTime, UNIX_EPOCH},
};

// HttpContext is the trait that allows the Rust code to interact with HTTP objects.
impl HttpContext for StreamContext {
Expand Down

0 comments on commit be8c3c9

Please sign in to comment.