Skip to content

Commit

Permalink
Add prompt, raw_output and error message to BamlValidationError in TS…
Browse files Browse the repository at this point in the history
… and Python (#1005)

- Add the prompt + raw output metadata to the errors. When printing
things out, choose to print out the error message first.
- Also change baml-init to be less confusing -- the custom client names
now say "custom"
  • Loading branch information
aaronvg authored Oct 2, 2024
1 parent c063116 commit 447dbf4
Show file tree
Hide file tree
Showing 27 changed files with 8,938 additions and 7,699 deletions.
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
// Learn more about clients at https://docs.boundaryml.com/docs/snippets/clients/overview

client<llm> GPT4o {
client<llm> CustomGPT4o {
provider openai
options {
model "gpt-4o"
api_key env.OPENAI_API_KEY
}
}

client<llm> GPT4oMini {
client<llm> CustomGPT4oMini {
provider openai
options {
model "gpt-4o-mini"
api_key env.OPENAI_API_KEY
}
}

client<llm> Sonnet {
client<llm> CustomSonnet {
provider anthropic
options {
model "claude-3-5-sonnet-20240620"
Expand All @@ -25,23 +25,23 @@ client<llm> Sonnet {
}


client<llm> Haiku {
client<llm> CustomHaiku {
provider anthropic
options {
model "claude-3-haiku-20240307"
api_key env.ANTHROPIC_API_KEY
}
}

client<llm> Fast {
client<llm> CustomFast {
provider round-robin
options {
// This will alternate between the two clients
strategy [GPT4oMini, Haiku]
}
}

client<llm> Openai {
client<llm> OpenaiFallback {
provider fallback
options {
// This will try the clients in order until one succeeds
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@ class Resume {
skills string[]
}

// Creating a function to extract the resume from a string.
// Create a function to extract the resume from a string.
function ExtractResume(resume: string) -> Resume {
// Specify a client as provider/model-name
// you can use custom LLM params with a custom client name from clients.baml like "client CustomHaiku"
client "openai/gpt-4o" // Set OPENAI_API_KEY to use this client.
prompt #"
Extract from this content:
Expand All @@ -17,7 +19,7 @@ function ExtractResume(resume: string) -> Resume {
"#
}

// Testing the function with a sample resume.
// Test the function with a sample resume. Open the VSCode playground to run this.
test vaibhav_resume {
functions [ExtractResume]
args {
Expand Down
6 changes: 5 additions & 1 deletion engine/baml-runtime/src/cli/serve/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,11 @@ impl BamlError {
pub(crate) fn from_anyhow(err: anyhow::Error) -> Self {
if let Some(er) = err.downcast_ref::<ExposedError>() {
match er {
ExposedError::ValidationError(_) => Self::ValidationFailure(format!("{:?}", err)),
ExposedError::ValidationError {
prompt,
raw_response,
message,
} => Self::ValidationFailure(format!("{:?}", err)),
}
} else if let Some(er) = err.downcast_ref::<ScopeStack>() {
Self::InvalidArgument(format!("{:?}", er))
Expand Down
18 changes: 15 additions & 3 deletions engine/baml-runtime/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,27 @@
pub enum ExposedError {
/// Error in parsing post calling the LLM
ValidationError(String),
ValidationError {
prompt: String,
raw_response: String,
message: String,
},
}

impl std::error::Error for ExposedError {}

impl std::fmt::Display for ExposedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ExposedError::ValidationError(err) => {
write!(f, "Parsing error: {}", err)
ExposedError::ValidationError {
prompt,
raw_response,
message,
} => {
write!(
f,
"Parsing error: {}\nPrompt: {}\nRaw Response: {}",
message, prompt, raw_response
)
}
}
}
Expand Down
33 changes: 30 additions & 3 deletions engine/baml-runtime/src/types/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,36 @@ impl FunctionResult {
if let Ok(val) = res {
Ok(val)
} else {
Err(anyhow::anyhow!(
crate::errors::ExposedError::ValidationError(format!("{}", self))
))
// Capture the actual error to preserve its details
let actual_error = res.as_ref().err().unwrap().to_string();
Err(anyhow::anyhow!(ExposedError::ValidationError {
prompt: match self.llm_response() {
LLMResponse::Success(resp) => resp.prompt.to_string(),
LLMResponse::LLMFailure(err) => err.prompt.to_string(),
_ => "N/A".to_string(),
},
raw_response: self
.llm_response()
.content()
.unwrap_or_default()
.to_string(),
// The only branch that should be hit is LLMResponse::Success(_) since we
// only call this function when we have a successful response.
message: match self.llm_response() {
LLMResponse::Success(_) =>
format!("Failed to parse LLM response: {}", actual_error),
LLMResponse::LLMFailure(err) => format!(
"LLM Failure: {} ({}) - {}",
err.message,
err.code.to_string(),
actual_error
),
LLMResponse::UserFailure(err) =>
format!("User Failure: {} - {}", err, actual_error),
LLMResponse::InternalFailure(err) =>
format!("Internal Failure: {} - {}", err, actual_error),
},
}))
}
})
.unwrap_or_else(|| Err(anyhow::anyhow!(self.llm_response().clone())))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { BamlRuntime, FunctionResult, BamlCtxManager, BamlStream, Image, ClientRegistry } from "@boundaryml/baml"
import { BamlRuntime, FunctionResult, BamlCtxManager, BamlStream, Image, ClientRegistry, BamlValidationError, createBamlValidationError } from "@boundaryml/baml"
import {
{%- for t in types %}{{ t }}{% if !loop.last %}, {% endif %}{% endfor -%}
} from "./types"
Expand Down Expand Up @@ -33,18 +33,27 @@ export class BamlAsyncClient {
{%- endfor %}
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry }
): Promise<{{fn.return_type}}> {
const raw = await this.runtime.callFunction(
"{{fn.name}}",
{
{% for (name, optional, type) in fn.args -%}
"{{name}}": {{name}}{% if optional %}?? null{% endif %}{% if !loop.last %},{% endif %}
{%- endfor %}
},
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
)
return raw.parsed() as {{fn.return_type}}
try {
const raw = await this.runtime.callFunction(
"{{fn.name}}",
{
{% for (name, optional, type) in fn.args -%}
"{{name}}": {{name}}{% if optional %}?? null{% endif %}{% if !loop.last %},{% endif %}
{%- endfor %}
},
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
)
return raw.parsed() as {{fn.return_type}}
} catch (error: any) {
const bamlError = createBamlValidationError(error);
if (bamlError instanceof BamlValidationError) {
throw bamlError;
} else {
throw error;
}
}
}
{% endfor %}
}
Expand All @@ -59,25 +68,35 @@ class BamlStreamClient {
{%- endfor %}
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry }
): BamlStream<RecursivePartialNull<{{ fn.return_type }}>, {{ fn.return_type }}> {
const raw = this.runtime.streamFunction(
"{{fn.name}}",
{
{% for (name, optional, type) in fn.args -%}
"{{name}}": {{name}}{% if optional %} ?? null{% endif %}{% if !loop.last %},{% endif %}
{%- endfor %}
},
undefined,
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
)
return new BamlStream<RecursivePartialNull<{{ fn.return_type }}>, {{ fn.return_type }}>(
raw,
(a): a is RecursivePartialNull<{{ fn.return_type }}> => a,
(a): a is {{ fn.return_type }} => a,
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
)
try {
const raw = this.runtime.streamFunction(
"{{fn.name}}",
{
{% for (name, optional, type) in fn.args -%}
"{{name}}": {{name}}{% if optional %} ?? null{% endif %}{% if !loop.last %},{% endif %}
{%- endfor %}
},
undefined,
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
__baml_options__?.clientRegistry,
)
return new BamlStream<RecursivePartialNull<{{ fn.return_type }}>, {{ fn.return_type }}>(
raw,
(a): a is RecursivePartialNull<{{ fn.return_type }}> => a,
(a): a is {{ fn.return_type }} => a,
this.ctx_manager.cloneContext(),
__baml_options__?.tb?.__tb(),
)
} catch (error) {
if (error instanceof Error) {
const bamlError = createBamlValidationError(error);
if (bamlError instanceof BamlValidationError) {
throw bamlError;
}
}
throw error;
}
}
{% endfor %}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ export { b } from "./sync_client"
export * from "./types"
export * from "./tracing"
export { resetBamlEnvVars } from "./globals"
export { BamlValidationError } from "@boundaryml/baml"
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ export class BamlSyncClient {
{%- endfor %}
__baml_options__?: { tb?: TypeBuilder, clientRegistry?: ClientRegistry }
): {{fn.return_type}} {
try {
const raw = this.runtime.callFunctionSync(
"{{fn.name}}",
{
Expand All @@ -45,6 +46,14 @@ export class BamlSyncClient {
__baml_options__?.clientRegistry,
)
return raw.parsed() as {{fn.return_type}}
} catch (error: any) {
const bamlError = createBamlValidationError(error);
if (bamlError instanceof BamlValidationError) {
throw bamlError;
} else {
throw error;
}
}
}
{% endfor %}
}
Expand Down
11 changes: 10 additions & 1 deletion engine/language_client_python/python_src/baml_py/errors.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
from .baml_py import BamlError, BamlClientError, BamlClientHttpError, BamlInvalidArgumentError, BamlValidationError
from .baml_py import (
BamlError,
BamlClientError,
BamlClientHttpError,
BamlInvalidArgumentError,
)

# hack to get the BamlValidationError class which is a custom error
from .baml_py.errors import BamlValidationError


__all__ = [
"BamlError",
Expand Down
Loading

0 comments on commit 447dbf4

Please sign in to comment.