Skip to content

Commit 9e6ec06

Browse files
update error handling in integration tests (#1178)
<!-- Thank you for opening a pull request! Please add a brief description of the proposed change here. Also, please tick the appropriate points in the checklist below. --> ## Motivation and Context <!-- Why is this change needed? What problem does it solve? --> ## Breaking Changes <!-- Will users need to update their code or configurations? --> --- #### Type of the changes - [ ] New feature (non-breaking change which adds functionality) - [x] Bug fix (non-breaking change which fixes an issue) - [ ] Breaking change (fix or feature that would cause existing functionality to change) - [ ] Documentation update - [ ] Tests improvement - [ ] Refactoring #### Checklist - [ ] The pull request has a description of the proposed change - [ ] I read the [Contributing Guidelines](https://github.com/JetBrains/koog/blob/main/CONTRIBUTING.md) before opening the pull request - [ ] The pull request uses **`develop`** as the base branch - [ ] Tests for the changes have been added - [x] All new and existing tests passed ##### Additional steps for pull requests adding a new feature - [ ] An issue describing the proposed change exists - [ ] The pull request includes a link to the issue - [ ] The change was discussed and approved in the issue - [ ] Docs have been added / updated
1 parent 4a4302b commit 9e6ec06

File tree

1 file changed

+24
-15
lines changed

1 file changed

+24
-15
lines changed

integration-tests/src/jvmTest/kotlin/ai/koog/integration/tests/executor/ExecutorIntegrationTestBase.kt

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ import ai.koog.prompt.dsl.ModerationCategory
3636
import ai.koog.prompt.dsl.Prompt
3737
import ai.koog.prompt.dsl.prompt
3838
import ai.koog.prompt.executor.clients.LLMClient
39+
import ai.koog.prompt.executor.clients.LLMClientException
3940
import ai.koog.prompt.executor.clients.LLMEmbeddingProvider
4041
import ai.koog.prompt.executor.clients.anthropic.AnthropicParams
4142
import ai.koog.prompt.executor.clients.anthropic.models.AnthropicThinking
@@ -473,20 +474,24 @@ abstract class ExecutorIntegrationTestBase {
473474
withRetry {
474475
try {
475476
checkExecutorMediaResponse(getExecutor(model).execute(prompt, model).single())
476-
} catch (e: Exception) {
477+
} catch (e: LLMClientException) {
477478
// For some edge cases, exceptions are expected
478479
when (scenario) {
479480
ImageTestScenario.LARGE_IMAGE_ANTHROPIC, ImageTestScenario.LARGE_IMAGE -> {
480-
(e.message?.shouldContain("400 Bad Request"))
481-
(e.message?.shouldContain("image exceeds"))
481+
val message = e.message.shouldNotBeNull()
482+
483+
message.shouldContain("Status code: 400")
484+
message.shouldContain("image exceeds")
482485
}
483486

484487
ImageTestScenario.CORRUPTED_IMAGE, ImageTestScenario.EMPTY_IMAGE -> {
485-
(e.message?.shouldContain("400 Bad Request"))
488+
val message = e.message.shouldNotBeNull()
489+
490+
message.shouldContain("Status code: 400")
486491
if (model.provider == LLMProvider.Anthropic) {
487-
(e.message?.shouldContain("Could not process image"))
492+
message.shouldContain("Could not process image")
488493
} else if (model.provider == LLMProvider.OpenAI) {
489-
(e.message?.shouldContain("You uploaded an unsupported image. Please make sure your image is valid."))
494+
message.shouldContain("You uploaded an unsupported image. Please make sure your image is valid.")
490495
}
491496
}
492497

@@ -534,19 +539,21 @@ abstract class ExecutorIntegrationTestBase {
534539
withRetry {
535540
try {
536541
checkExecutorMediaResponse(getExecutor(model).execute(prompt, model).single())
537-
} catch (e: Exception) {
542+
} catch (e: LLMClientException) {
538543
when (scenario) {
539544
TextTestScenario.EMPTY_TEXT -> {
540545
if (model.provider == LLMProvider.Google) {
541-
(e.message?.shouldContain("400 Bad Request"))
542-
(e.message?.shouldContain("Unable to submit request because it has an empty inlineData parameter. Add a value to the parameter and try again."))
546+
val message = e.message.shouldNotBeNull()
547+
message.shouldContain("Status code: 400")
548+
message.shouldContain("Unable to submit request because it has an empty inlineData parameter. Add a value to the parameter and try again.")
543549
}
544550
}
545551

546552
TextTestScenario.LONG_TEXT_5_MB -> {
547553
if (model.provider == LLMProvider.Anthropic) {
548-
(e.message?.shouldContain("400 Bad Request"))
549-
(e.message?.shouldContain("prompt is too long"))
554+
val message = e.message.shouldNotBeNull()
555+
message.shouldContain("Status code: 400")
556+
message.shouldContain("prompt is too long")
550557
} else if (model.provider == LLMProvider.Google) {
551558
throw e
552559
}
@@ -582,13 +589,15 @@ abstract class ExecutorIntegrationTestBase {
582589
withRetry(times = 3, testName = "integration_testAudioProcessingBasic[${model.id}]") {
583590
try {
584591
checkExecutorMediaResponse(getExecutor(model).execute(prompt, model).single())
585-
} catch (e: Exception) {
592+
} catch (e: LLMClientException) {
586593
if (scenario == AudioTestScenario.CORRUPTED_AUDIO) {
587-
(e.message?.shouldContain("400 Bad Request"))
594+
val message = e.message.shouldNotBeNull()
595+
596+
message.shouldContain("Status code: 400")
588597
if (model.provider == LLMProvider.OpenAI) {
589-
(e.message?.shouldContain("This model does not support the format you provided."))
598+
message.shouldContain("This model does not support the format you provided.")
590599
} else if (model.provider == LLMProvider.Google) {
591-
(e.message?.shouldContain("Request contains an invalid argument."))
600+
message.shouldContain("Request contains an invalid argument.")
592601
}
593602
} else {
594603
throw e

0 commit comments

Comments
 (0)