Skip to content

Commit ccfe9da

Browse files
authored
Merge pull request #106 from graniet/feat/workspace
test: Handle OpenRouter no endpoints in backend tests accurately
2 parents 9724ed6 + 72f2ba5 commit ccfe9da

File tree

1 file changed

+140
-38
lines changed

1 file changed

+140
-38
lines changed

tests/test_backends.rs

Lines changed: 140 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use futures::StreamExt;
22
use llm::{
33
builder::{FunctionBuilder, LLMBackend, LLMBuilder, ParamBuilder},
44
chat::{ChatMessage, StructuredOutputFormat},
5+
error::LLMError,
56
models::ModelListRequest,
67
};
78
use rstest::rstest;
@@ -40,6 +41,32 @@ fn clean_response_text_for_backend(response_text: &str, backend_name: &str) -> S
4041
}
4142
}
4243

44+
const OPENROUTER_404_MARKER: &str = "OpenRouter API returned error status: 404";
45+
const OPENROUTER_NO_ENDPOINTS_MARKER: &str = "No endpoints found";
46+
47+
fn skip_openrouter_no_endpoints(backend_name: &str, err: &LLMError, test_name: &str) -> bool {
48+
if backend_name != "openrouter" {
49+
return false;
50+
}
51+
match err {
52+
LLMError::ResponseFormatError {
53+
message,
54+
raw_response,
55+
} => {
56+
if message.contains(OPENROUTER_404_MARKER)
57+
&& raw_response.contains(OPENROUTER_NO_ENDPOINTS_MARKER)
58+
{
59+
eprintln!(
60+
"test {test_name} ... ignored, OpenRouter returned no eligible endpoints"
61+
);
62+
return true;
63+
}
64+
}
65+
_ => {}
66+
}
67+
false
68+
}
69+
4370
#[derive(Debug, Clone)]
4471
struct BackendTestConfig {
4572
backend: LLMBackend,
@@ -171,7 +198,12 @@ async fn test_chat(#[case] config: &BackendTestConfig) {
171198
usage.total_tokens
172199
);
173200
}
174-
Err(e) => panic!("Chat error for {}: {e}", config.backend_name),
201+
Err(e) => {
202+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat") {
203+
return;
204+
}
205+
panic!("Chat error for {}: {e}", config.backend_name);
206+
}
175207
}
176208
}
177209

@@ -324,7 +356,12 @@ async fn test_chat_with_tools(#[case] config: &BackendTestConfig) {
324356
usage.total_tokens
325357
);
326358
}
327-
Err(e) => panic!("Chat with tools error for {}: {e}", config.backend_name),
359+
Err(e) => {
360+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_with_tools") {
361+
return;
362+
}
363+
panic!("Chat with tools error for {}: {e}", config.backend_name);
364+
}
328365
}
329366
}
330367

@@ -466,10 +503,16 @@ async fn test_chat_structured_output(#[case] config: &BackendTestConfig) {
466503
usage.total_tokens
467504
);
468505
}
469-
Err(e) => panic!(
470-
"Chat with structured output error for {}: {e}",
471-
config.backend_name
472-
),
506+
Err(e) => {
507+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_structured_output")
508+
{
509+
return;
510+
}
511+
panic!(
512+
"Chat with structured output error for {}: {e}",
513+
config.backend_name
514+
);
515+
}
473516
}
474517
}
475518

@@ -508,7 +551,7 @@ async fn test_chat_stream_struct(#[case] config: &BackendTestConfig) {
508551
match llm.chat_stream_struct(&messages).await {
509552
Ok(mut stream) => {
510553
let mut complete_text = String::new();
511-
// NOTE: groq and cohere do not return usage in stream responses
554+
// NOTE: groq and cohere may omit usage in stream responses
512555
let mut usage_data = None;
513556
while let Some(chunk_result) = stream.next().await {
514557
match chunk_result {
@@ -522,20 +565,23 @@ async fn test_chat_stream_struct(#[case] config: &BackendTestConfig) {
522565
usage_data = Some(usage);
523566
}
524567
}
525-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
568+
Err(e) => {
569+
if skip_openrouter_no_endpoints(
570+
config.backend_name,
571+
&e,
572+
"test_chat_stream_struct",
573+
) {
574+
return;
575+
}
576+
panic!("Stream error for {}: {e}", config.backend_name);
577+
}
526578
}
527579
}
528580
assert!(
529581
!complete_text.is_empty(),
530582
"Expected response message, got empty text"
531583
);
532-
if config.backend_name == "groq" || config.backend_name == "cohere" {
533-
// Groq and Cohere do not return usage in streamed chat responses
534-
assert!(
535-
usage_data.is_none(),
536-
"Expected no usage data for Groq/Cohere"
537-
);
538-
} else if let Some(usage) = usage_data {
584+
if let Some(usage) = usage_data {
539585
assert!(
540586
usage.prompt_tokens > 0,
541587
"Expected prompt tokens > 0, got {}",
@@ -546,11 +592,18 @@ async fn test_chat_stream_struct(#[case] config: &BackendTestConfig) {
546592
"Expected total tokens > 0, got {}",
547593
usage.total_tokens
548594
);
595+
} else if config.backend_name == "groq" || config.backend_name == "cohere" {
596+
// Groq and Cohere may omit usage in streamed chat responses
549597
} else {
550598
panic!("Expected usage data in response");
551599
}
552600
}
553-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
601+
Err(e) => {
602+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_stream_struct") {
603+
return;
604+
}
605+
panic!("Stream error for {}: {e}", config.backend_name);
606+
}
554607
}
555608
}
556609

@@ -614,16 +667,19 @@ async fn test_chat_stream_tools(#[case] config: &BackendTestConfig) {
614667
usage_data = Some(usage);
615668
}
616669
}
617-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
670+
Err(e) => {
671+
if skip_openrouter_no_endpoints(
672+
config.backend_name,
673+
&e,
674+
"test_chat_stream_tools",
675+
) {
676+
return;
677+
}
678+
panic!("Stream error for {}: {e}", config.backend_name);
679+
}
618680
}
619681
}
620-
if config.backend_name == "groq" || config.backend_name == "cohere" {
621-
// Groq and Cohere do not return usage in streamed chat responses
622-
assert!(
623-
usage_data.is_none(),
624-
"Expected no usage data for Groq/Cohere"
625-
);
626-
} else if let Some(usage) = usage_data {
682+
if let Some(usage) = usage_data {
627683
assert!(
628684
usage.prompt_tokens > 0,
629685
"Expected prompt tokens > 0, got {}",
@@ -634,11 +690,18 @@ async fn test_chat_stream_tools(#[case] config: &BackendTestConfig) {
634690
"Expected total tokens > 0, got {}",
635691
usage.total_tokens
636692
);
693+
} else if config.backend_name == "groq" || config.backend_name == "cohere" {
694+
// Groq and Cohere may omit usage in streamed chat responses
637695
} else {
638696
panic!("Expected usage data in response");
639697
}
640698
}
641-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
699+
Err(e) => {
700+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_stream_tools") {
701+
return;
702+
}
703+
panic!("Stream error for {}: {e}", config.backend_name);
704+
}
642705
}
643706
assert!(
644707
tool_call_chunks > 0,
@@ -660,15 +723,29 @@ async fn test_chat_stream_tools(#[case] config: &BackendTestConfig) {
660723
}
661724
}
662725
}
663-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
726+
Err(e) => {
727+
if skip_openrouter_no_endpoints(
728+
config.backend_name,
729+
&e,
730+
"test_chat_stream_tools",
731+
) {
732+
return;
733+
}
734+
panic!("Stream error for {}: {e}", config.backend_name);
735+
}
664736
}
665737
}
666738
assert!(
667739
!complete_text.is_empty(),
668740
"Expected response message, got empty text"
669741
);
670742
}
671-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
743+
Err(e) => {
744+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_stream_tools") {
745+
return;
746+
}
747+
panic!("Stream error for {}: {e}", config.backend_name);
748+
}
672749
}
673750
}
674751

@@ -732,16 +809,19 @@ async fn test_chat_stream_tools_normalized(#[case] config: &BackendTestConfig) {
732809
usage_data = Some(usage);
733810
}
734811
}
735-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
812+
Err(e) => {
813+
if skip_openrouter_no_endpoints(
814+
config.backend_name,
815+
&e,
816+
"test_chat_stream_tools_normalized",
817+
) {
818+
return;
819+
}
820+
panic!("Stream error for {}: {e}", config.backend_name);
821+
}
736822
}
737823
}
738-
if config.backend_name == "groq" || config.backend_name == "cohere" {
739-
// Groq and Cohere do not return usage in streamed chat responses
740-
assert!(
741-
usage_data.is_none(),
742-
"Expected no usage data for Groq/Cohere"
743-
);
744-
} else if let Some(usage) = usage_data {
824+
if let Some(usage) = usage_data {
745825
assert!(
746826
usage.prompt_tokens > 0,
747827
"Expected prompt tokens > 0, got {}",
@@ -752,11 +832,22 @@ async fn test_chat_stream_tools_normalized(#[case] config: &BackendTestConfig) {
752832
"Expected total tokens > 0, got {}",
753833
usage.total_tokens
754834
);
835+
} else if config.backend_name == "groq" || config.backend_name == "cohere" {
836+
// Groq and Cohere may omit usage in streamed chat responses
755837
} else {
756838
panic!("Expected usage data in response");
757839
}
758840
}
759-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
841+
Err(e) => {
842+
if skip_openrouter_no_endpoints(
843+
config.backend_name,
844+
&e,
845+
"test_chat_stream_tools_normalized",
846+
) {
847+
return;
848+
}
849+
panic!("Stream error for {}: {e}", config.backend_name);
850+
}
760851
}
761852
assert_eq!(
762853
tool_call_chunks, 1,
@@ -801,7 +892,13 @@ async fn test_chat_stream(#[case] config: &BackendTestConfig) {
801892
while let Some(chunk_result) = stream.next().await {
802893
match chunk_result {
803894
Ok(content) => complete_text.push_str(&content),
804-
Err(e) => panic!("Stream error: {e}"),
895+
Err(e) => {
896+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_stream")
897+
{
898+
return;
899+
}
900+
panic!("Stream error: {e}");
901+
}
805902
}
806903
}
807904
assert!(
@@ -810,7 +907,12 @@ async fn test_chat_stream(#[case] config: &BackendTestConfig) {
810907
config.backend_name
811908
);
812909
}
813-
Err(e) => panic!("Stream error for {}: {e}", config.backend_name),
910+
Err(e) => {
911+
if skip_openrouter_no_endpoints(config.backend_name, &e, "test_chat_stream") {
912+
return;
913+
}
914+
panic!("Stream error for {}: {e}", config.backend_name);
915+
}
814916
}
815917
}
816918

0 commit comments

Comments
 (0)