Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(test): adding test #3171

Closed
wants to merge 9 commits into from
2 changes: 1 addition & 1 deletion ee/tabby-webserver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
mod axum;
mod hub;
mod jwt;
mod mock;
Sma1lboy marked this conversation as resolved.
Show resolved Hide resolved
mod oauth;
mod path;
mod routes;
Expand All @@ -12,7 +13,6 @@ mod webserver;
pub use service::*;

pub mod public {

pub use super::{
/* used by tabby workers (consumer of /hub api) */
hub::{create_worker_client, WorkerClient},
Expand Down
74 changes: 74 additions & 0 deletions ee/tabby-webserver/src/mock/helper.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
pub mod helpers {
use std::path::PathBuf;

use juniper::ID;
use tabby_common::{api::code::CodeSearchParams, config::AnswerConfig};
use tabby_schema::{
context::{ContextInfo, ContextInfoHelper, ContextSourceValue},
repository::{Repository, RepositoryKind},
thread::CodeQueryInput,
AsID,
};

const TEST_SOURCE_ID: &str = "source-1";
const TEST_GIT_URL: &str = "TabbyML/tabby";
const TEST_FILEPATH: &str = "test.rs";
const TEST_LANGUAGE: &str = "rust";
const TEST_CONTENT: &str = "fn main() {}";

pub fn make_answer_config() -> AnswerConfig {
AnswerConfig {
code_search_params: make_code_search_params(),
}
}

pub fn make_code_search_params() -> CodeSearchParams {
CodeSearchParams {
min_bm25_score: 0.5,
min_embedding_score: 0.7,
min_rrf_score: 0.3,
num_to_return: 5,
num_to_score: 10,
}
}
pub fn make_code_query_input() -> CodeQueryInput {
CodeQueryInput {
filepath: Some(TEST_FILEPATH.to_string()),
content: TEST_CONTENT.to_string(),
git_url: Some(TEST_GIT_URL.to_string()),
source_id: Some(TEST_SOURCE_ID.to_string()),
language: Some(TEST_LANGUAGE.to_string()),
}
}

pub fn make_context_info_helper() -> ContextInfoHelper {
ContextInfoHelper::new(&ContextInfo {
sources: vec![ContextSourceValue::Repository(Repository {
id: ID::from(TEST_SOURCE_ID.to_owned()),
source_id: TEST_SOURCE_ID.to_owned(),
name: "tabby".to_owned(),
kind: RepositoryKind::Github,
dir: PathBuf::from("tabby"),
git_url: TEST_GIT_URL.to_owned(),
refs: vec![],
})],
})
}

pub fn make_message(
id: i32,
content: &str,
role: tabby_schema::thread::Role,
attachment: Option<tabby_schema::thread::MessageAttachment>,
) -> tabby_schema::thread::Message {
tabby_schema::thread::Message {
id: id.as_id(),
thread_id: ID::new("0"),
content: content.to_owned(),
role,
attachment: attachment.unwrap_or_default(),
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
}
}
}
180 changes: 180 additions & 0 deletions ee/tabby-webserver/src/mock/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
pub mod helper;

use async_openai::{
error::OpenAIError,
types::{
ChatChoice, ChatChoiceStream, ChatCompletionResponseMessage, ChatCompletionResponseStream,
ChatCompletionStreamResponseDelta, CompletionUsage, CreateChatCompletionRequest,
CreateChatCompletionResponse, CreateChatCompletionStreamResponse, FinishReason, Role,
},
};
use axum::async_trait;
use tabby_common::api::{
code::{CodeSearch, CodeSearchError, CodeSearchParams, CodeSearchQuery, CodeSearchResponse},
doc::{DocSearch, DocSearchDocument, DocSearchError, DocSearchHit, DocSearchResponse},
};
use tabby_inference::ChatCompletionStream;
use tabby_schema::{
context::{ContextInfo, ContextService},
policy::AccessPolicy,
Result,
};

pub struct MockChatCompletionStream;
Sma1lboy marked this conversation as resolved.
Show resolved Hide resolved
#[async_trait]
impl ChatCompletionStream for MockChatCompletionStream {
async fn chat(
&self,
_request: CreateChatCompletionRequest,
) -> Result<CreateChatCompletionResponse, OpenAIError> {
Ok(CreateChatCompletionResponse {
id: "test-response".to_owned(),
created: 0,
model: "ChatTabby".to_owned(),
object: "chat.completion".to_owned(),
choices: vec![ChatChoice {
index: 0,
message: ChatCompletionResponseMessage {
role: Role::Assistant,
content: Some(
"1. What is the main functionality of the provided code?\n\
2. How does the code snippet implement a web server?\n\
3. Can you explain how the Flask app works in this context?"
.to_string(),
),
tool_calls: None,
function_call: None,
},
finish_reason: Some(FinishReason::Stop),
logprobs: None,
}],
system_fingerprint: Some("seed".to_owned()),
usage: Some(CompletionUsage {
prompt_tokens: 1,
completion_tokens: 2,
total_tokens: 3,
}),
})
}

async fn chat_stream(
&self,
_request: CreateChatCompletionRequest,
) -> Result<ChatCompletionResponseStream, OpenAIError> {
let stream = futures::stream::iter(vec![
Ok(CreateChatCompletionStreamResponse {
id: "test-stream-response".to_owned(),
created: 0,
model: "ChatTabby".to_owned(),
object: "chat.completion.chunk".to_owned(),
choices: vec![ChatChoiceStream {
index: 0,
delta: ChatCompletionStreamResponseDelta {
role: Some(Role::Assistant),
content: Some("This is the first part of the response. ".to_string()),
function_call: None,
tool_calls: None,
},
finish_reason: None,
logprobs: None,
}],
system_fingerprint: Some("seed".to_owned()),
}),
Ok(CreateChatCompletionStreamResponse {
id: "test-stream-response".to_owned(),
created: 0,
model: "ChatTabby".to_owned(),
object: "chat.completion.chunk".to_owned(),
choices: vec![ChatChoiceStream {
index: 0,
delta: ChatCompletionStreamResponseDelta {
role: None,
content: Some("This is the second part of the response.".to_string()),
function_call: None,
tool_calls: None,
},
finish_reason: Some(FinishReason::Stop),
logprobs: None,
}],
system_fingerprint: Some("seed".to_owned()),
}),
]);

Ok(Box::pin(stream) as ChatCompletionResponseStream)
}
}
pub struct MockCodeSearch;

#[async_trait]
impl CodeSearch for MockCodeSearch {
async fn search_in_language(
&self,
_query: CodeSearchQuery,
_params: CodeSearchParams,
) -> Result<CodeSearchResponse, CodeSearchError> {
Ok(CodeSearchResponse { hits: vec![] })
}
}

pub struct MockDocSearch;
#[async_trait]
impl DocSearch for MockDocSearch {
async fn search(
&self,
_source_ids: &[String],
_q: &str,
_limit: usize,
) -> Result<DocSearchResponse, DocSearchError> {
let hits = vec![
DocSearchHit {
score: 1.0,
doc: DocSearchDocument {
title: "Document 1".to_string(),
link: "https://example.com/doc1".to_string(),
snippet: "Snippet for Document 1".to_string(),
},
},
DocSearchHit {
score: 0.9,
doc: DocSearchDocument {
title: "Document 2".to_string(),
link: "https://example.com/doc2".to_string(),
snippet: "Snippet for Document 2".to_string(),
},
},
DocSearchHit {
score: 0.8,
doc: DocSearchDocument {
title: "Document 3".to_string(),
link: "https://example.com/doc3".to_string(),
snippet: "Snippet for Document 3".to_string(),
},
},
DocSearchHit {
score: 0.7,
doc: DocSearchDocument {
title: "Document 4".to_string(),
link: "https://example.com/doc4".to_string(),
snippet: "Snippet for Document 4".to_string(),
},
},
DocSearchHit {
score: 0.6,
doc: DocSearchDocument {
title: "Document 5".to_string(),
link: "https://example.com/doc5".to_string(),
snippet: "Snippet for Document 5".to_string(),
},
},
];
Ok(DocSearchResponse { hits })
}
}

pub struct MockContextService;
#[async_trait]
impl ContextService for MockContextService {
async fn read(&self, _policy: Option<&AccessPolicy>) -> Result<ContextInfo> {
Ok(ContextInfo { sources: vec![] })
}
}
Loading
Loading