Skip to content

Commit

Permalink
fix: proper error handling in /hpo/terms (#156) (#175)
Browse files Browse the repository at this point in the history
  • Loading branch information
holtgrewe authored Jul 11, 2024
1 parent 5c40e30 commit c0436de
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 39 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,4 +115,6 @@ jobs:
run: |
cargo run -- server schema --output-file /tmp/openapi.yaml
set -e
diff openapi.yaml /tmp/openapi.yaml
diff \
<(sed 's/^ version:.*$/ version: x.y.z/g' openapi.yaml) \
<(sed 's/^ version:.*$/ version: x.y.z/g' /tmp/openapi.yaml)
18 changes: 18 additions & 0 deletions openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -416,6 +416,24 @@ components:
name:
type: string
description: The term name.
Result_:
type: object
title: HpoTermsResult
description: Container for the result.
required:
- version
- query
- result
properties:
version:
$ref: '#/components/schemas/Version'
query:
$ref: '#/components/schemas/Query'
result:
type: array
items:
$ref: '#/components/schemas/ResultEntry'
description: The resulting records for the scored genes.
ScoreCombiner:
type: string
description: |-
Expand Down
80 changes: 49 additions & 31 deletions src/server/run/hpo_terms.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ impl Ord for ResultEntry {

impl ResultEntry {
/// Create a `ResultEntry` from an `HpoTerm`.
///
/// # Errors
///
/// In the case that there is an error parsing the term.
#[allow(clippy::missing_panics_doc)]
pub fn from_term_with_ontology(
term: &HpoTerm,
Expand All @@ -102,26 +106,26 @@ impl ResultEntry {
ncbi_to_hgnc: &HashMap<u32, String>,
index: &crate::index::Index,
doc: Option<&tantivy::Document>,
) -> Self {
) -> Result<Self, anyhow::Error> {
let field_term_id = index
.schema()
.get_field("term_id")
.expect("field must exist");
.map_err(|e| anyhow::anyhow!("field term_id must exist: {}", e))?;
let field_def = index
.index()
.schema()
.get_field("def")
.expect("field must exist");
.map_err(|e| anyhow::anyhow!("field def must exist: {}", e))?;
let field_synonym = index
.index()
.schema()
.get_field("synonym")
.expect("field must exist");
.map_err(|e| anyhow::anyhow!("field synonym must exist: {}", e))?;
let field_xref = index
.index()
.schema()
.get_field("xref")
.expect("field must exist");
.map_err(|e| anyhow::anyhow!("field xref must exist: {}", e))?;

let searcher = index.reader().searcher();
let doc = if let Some(doc) = doc {
Expand All @@ -131,14 +135,14 @@ impl ResultEntry {
tantivy::query::QueryParser::for_index(index.index(), vec![field_term_id]);
let query = query_parser
.parse_query(&format!("\"{}\"", term.id()))
.expect("bad term ID query");
.map_err(|e| anyhow::anyhow!("problem with term ID query: {}", e))?;
let top_docs = searcher
.search(&query, &tantivy::collector::TopDocs::with_limit(1))
.expect("problemw ith term ID search");
.map_err(|e| anyhow::anyhow!("problem searching for query: {}", e))?;

searcher
.doc(top_docs[0].1)
.expect("problem with term ID query")
.map_err(|e| anyhow::anyhow!("problem retrieving document: {}", e))?
};

let definition = doc
Expand Down Expand Up @@ -177,21 +181,21 @@ impl ResultEntry {
} else {
None
};
ResultEntry {
Ok(ResultEntry {
term_id: term.id().to_string(),
name: term.name().to_string(),
genes,
definition,
synonyms,
xrefs,
}
})
}
}

/// Container for the result.
#[derive(Debug, serde::Serialize, serde::Deserialize, utoipa::ToSchema)]
#[schema(title = "HpoTermsResult")]
pub struct Result {
pub struct Result_ {
/// Version information.
pub version: Version,
/// The original query records.
Expand Down Expand Up @@ -260,18 +264,29 @@ async fn handle(
.expect("field must exist");

if let Some(term_id) = &query.term_id {
let re = regex::Regex::new(r"^hp:\d+$").unwrap();
if !re.is_match(&term_id.to_lowercase()) {
return Err(CustomError::new(anyhow::anyhow!(
"Invalid term ID: {}",
term_id
)));
}

let term_id = HpoTermId::from(term_id.clone());
let term = ontology.hpo(term_id).ok_or_else(|| {
CustomError::new(anyhow::anyhow!("Term ID {} not found in HPO", term_id))
})?;
result.push(ResultEntry::from_term_with_ontology(
&term,
ontology,
query.genes,
&data.ncbi_to_hgnc,
&data.full_text_index,
None,
));
result.push(
ResultEntry::from_term_with_ontology(
&term,
ontology,
query.genes,
&data.ncbi_to_hgnc,
&data.full_text_index,
None,
)
.map_err(|e| CustomError::new(anyhow::anyhow!("Problem parsing term: {}", e)))?,
);
} else if let Some(name) = &query.name {
let searcher = data.full_text_index.reader().searcher();
let query_parser = {
Expand All @@ -295,7 +310,7 @@ async fn handle(
query_parser.set_field_fuzzy(field_synonym, true, 1, true);
query_parser
};
let name = if name.contains(":") {
let name = if name.contains(':') {
format!("\"{name}\"")
} else {
name.to_string()
Expand Down Expand Up @@ -326,18 +341,21 @@ async fn handle(
CustomError::new(anyhow::anyhow!("Term ID {} not found in HPO", term_id))
})?;

result.push(ResultEntry::from_term_with_ontology(
&term,
ontology,
query.genes,
&data.ncbi_to_hgnc,
&data.full_text_index,
Some(&retrieved_doc),
));
result.push(
ResultEntry::from_term_with_ontology(
&term,
ontology,
query.genes,
&data.ncbi_to_hgnc,
&data.full_text_index,
Some(&retrieved_doc),
)
.map_err(|e| CustomError::new(anyhow::anyhow!("Problem parsing term: {}", e)))?,
);
}
};

let result = Result {
let result = Result_ {
version: Version::new(&data.ontology.hpo_version()),
query: query.into_inner(),
result,
Expand All @@ -357,15 +375,15 @@ mod test {
pub async fn run_query(
web_server_data: Arc<crate::server::run::WebServerData>,
uri: &str,
) -> Result<super::Result, anyhow::Error> {
) -> Result<super::Result_, anyhow::Error> {
let app = actix_web::test::init_service(
actix_web::App::new()
.app_data(actix_web::web::Data::new(web_server_data))
.service(super::handle),
)
.await;
let req = actix_web::test::TestRequest::get().uri(uri).to_request();
let resp: super::Result = actix_web::test::call_and_read_body_json(&app, req).await;
let resp: super::Result_ = actix_web::test::call_and_read_body_json(&app, req).await;

Ok(resp)
}
Expand Down
2 changes: 1 addition & 1 deletion src/server/run/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ where
hpo_omims::Result,
hpo_omims::ResultEntry,
hpo_terms::Query,
hpo_terms::Result,
hpo_terms::Result_,
hpo_terms::ResultEntry,
hpo_sim::term_gene::Query,
crate::query::query_result::Result,
Expand Down
4 changes: 2 additions & 2 deletions tests/data/hpo/bootstraph.sh
Git LFS file not shown
4 changes: 2 additions & 2 deletions tests/data/hpo/hp.obo
Git LFS file not shown
4 changes: 2 additions & 2 deletions tests/data/hpo/hpo.bin
Git LFS file not shown

0 comments on commit c0436de

Please sign in to comment.