Skip to content

Commit e25a256

Browse files
committed
chore: review feedback / cleanup
Signed-off-by: Greg Clark <[email protected]>
1 parent fc8cd4e commit e25a256

File tree

3 files changed

+8
-12
lines changed

3 files changed

+8
-12
lines changed

lib/llm/src/backend.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,6 @@ impl
225225
//let mdcsum = self.mdcsum.clone();
226226
let stream = processed_stream.map(move |output| {
227227
output.map_data(|data| {
228-
log::info!("data: {:?}", data);
229228
Ok(BackendOutput {
230229
token_ids: data.token_ids,
231230
tokens: data.tokens.unwrap_or_default(),

lib/llm/src/protocols/openai/chat_completions.rs

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -236,16 +236,13 @@ impl OpenAIStopConditionsProvider for NvCreateChatCompletionRequest {
236236
impl OpenAIOutputOptionsProvider for NvCreateChatCompletionRequest {
237237
fn get_logprobs(&self) -> Option<u32> {
238238
match self.inner.logprobs {
239-
Some(logprobs) => {
240-
if logprobs {
241-
match self.inner.top_logprobs {
242-
Some(top_logprobs) => Some(top_logprobs as u32),
243-
None => Some(1_u32),
244-
}
245-
} else {
246-
None
239+
Some(true) => {
240+
match self.inner.top_logprobs {
241+
Some(top_logprobs) => Some(top_logprobs as u32),
242+
None => Some(1_u32),
247243
}
248-
}
244+
},
245+
Some(false) => None,
249246
None => None,
250247
}
251248
}

lib/llm/src/protocols/openai/completions/delta.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ impl DeltaGenerator {
104104
.map(|(_, lp)| lp as f32)
105105
.collect::<Vec<f32>>();
106106

107-
let top_lps = top_logprobs.map_or(Vec::new(), |top_logprobs| {
107+
let top_lps = top_logprobs.map_or(vec![], |top_logprobs| {
108108
toks.iter()
109109
.zip(tok_lps.iter())
110110
.zip(top_logprobs.iter())
@@ -139,7 +139,7 @@ impl DeltaGenerator {
139139
Some(async_openai::types::Logprobs {
140140
tokens: toks.iter().map(|(t, _)| t.clone()).collect(),
141141
token_logprobs: tok_lps.into_iter().map(Some).collect(),
142-
text_offset: Vec::new(),
142+
text_offset: vec![],
143143
top_logprobs: top_lps,
144144
})
145145
}

0 commit comments

Comments
 (0)