@@ -32,10 +32,20 @@ import {
3232 Datasets ,
3333 ListDatasetsResponse ,
3434} from './resources/datasets' ;
35+ import { EmbeddingCreateParams , Embeddings , EmbeddingsResponse } from './resources/embeddings' ;
36+ import {
37+ DeleteFileResponse ,
38+ File ,
39+ FileContentResponse ,
40+ FileCreateParams ,
41+ FileListParams ,
42+ Files ,
43+ ListFilesResponse ,
44+ } from './resources/files' ;
3545import {
3646 ChatCompletionResponseStreamChunk ,
3747 CompletionResponse ,
38- EmbeddingsResponse ,
48+ EmbeddingsResponse as InferenceAPIEmbeddingsResponse ,
3949 Inference ,
4050 InferenceBatchChatCompletionParams ,
4151 InferenceBatchChatCompletionResponse ,
@@ -174,6 +184,17 @@ import {
174184 ToolRuntimeListToolsParams ,
175185 ToolRuntimeListToolsResponse ,
176186} from './resources/tool-runtime/tool-runtime' ;
187+ import {
188+ ListVectorStoresResponse ,
189+ VectorStore ,
190+ VectorStoreCreateParams ,
191+ VectorStoreDeleteResponse ,
192+ VectorStoreListParams ,
193+ VectorStoreSearchParams ,
194+ VectorStoreSearchResponse ,
195+ VectorStoreUpdateParams ,
196+ VectorStores ,
197+ } from './resources/vector-stores/vector-stores' ;
177198
178199export interface ClientOptions {
179200 /**
@@ -291,10 +312,12 @@ export class LlamaStackClient extends Core.APIClient {
291312 eval : API . Eval = new API . Eval ( this ) ;
292313 inspect : API . Inspect = new API . Inspect ( this ) ;
293314 inference : API . Inference = new API . Inference ( this ) ;
315+ embeddings : API . Embeddings = new API . Embeddings ( this ) ;
294316 chat : API . Chat = new API . Chat ( this ) ;
295317 completions : API . Completions = new API . Completions ( this ) ;
296318 vectorIo : API . VectorIo = new API . VectorIo ( this ) ;
297319 vectorDBs : API . VectorDBs = new API . VectorDBs ( this ) ;
320+ vectorStores : API . VectorStores = new API . VectorStores ( this ) ;
298321 models : API . Models = new API . Models ( this ) ;
299322 postTraining : API . PostTraining = new API . PostTraining ( this ) ;
300323 providers : API . Providers = new API . Providers ( this ) ;
@@ -306,6 +329,7 @@ export class LlamaStackClient extends Core.APIClient {
306329 scoring : API . Scoring = new API . Scoring ( this ) ;
307330 scoringFunctions : API . ScoringFunctions = new API . ScoringFunctions ( this ) ;
308331 benchmarks : API . Benchmarks = new API . Benchmarks ( this ) ;
332+ files : API . Files = new API . Files ( this ) ;
309333
310334 protected override defaultQuery ( ) : Core . DefaultQuery | undefined {
311335 return this . _options . defaultQuery ;
@@ -359,10 +383,12 @@ LlamaStackClient.Datasets = Datasets;
359383LlamaStackClient . Eval = Eval ;
360384LlamaStackClient . Inspect = Inspect ;
361385LlamaStackClient . Inference = Inference ;
386+ LlamaStackClient . Embeddings = Embeddings ;
362387LlamaStackClient . Chat = Chat ;
363388LlamaStackClient . Completions = Completions ;
364389LlamaStackClient . VectorIo = VectorIo ;
365390LlamaStackClient . VectorDBs = VectorDBs ;
391+ LlamaStackClient . VectorStores = VectorStores ;
366392LlamaStackClient . Models = Models ;
367393LlamaStackClient . PostTraining = PostTraining ;
368394LlamaStackClient . Providers = Providers ;
@@ -374,6 +400,7 @@ LlamaStackClient.Telemetry = Telemetry;
374400LlamaStackClient . Scoring = Scoring ;
375401LlamaStackClient . ScoringFunctions = ScoringFunctions ;
376402LlamaStackClient . Benchmarks = Benchmarks ;
403+ LlamaStackClient . Files = Files ;
377404export declare namespace LlamaStackClient {
378405 export type RequestOptions = Core . RequestOptions ;
379406
@@ -465,7 +492,7 @@ export declare namespace LlamaStackClient {
465492 Inference as Inference ,
466493 type ChatCompletionResponseStreamChunk as ChatCompletionResponseStreamChunk ,
467494 type CompletionResponse as CompletionResponse ,
468- type EmbeddingsResponse as EmbeddingsResponse ,
495+ type InferenceAPIEmbeddingsResponse as EmbeddingsResponse ,
469496 type TokenLogProbs as TokenLogProbs ,
470497 type InferenceBatchChatCompletionResponse as InferenceBatchChatCompletionResponse ,
471498 type InferenceBatchChatCompletionParams as InferenceBatchChatCompletionParams ,
@@ -479,6 +506,12 @@ export declare namespace LlamaStackClient {
479506 type InferenceEmbeddingsParams as InferenceEmbeddingsParams ,
480507 } ;
481508
509+ export {
510+ Embeddings as Embeddings ,
511+ type EmbeddingsResponse as EmbeddingsResponse ,
512+ type EmbeddingCreateParams as EmbeddingCreateParams ,
513+ } ;
514+
482515 export { Chat as Chat , type ChatCompletionChunk as ChatCompletionChunk } ;
483516
484517 export {
@@ -505,6 +538,18 @@ export declare namespace LlamaStackClient {
505538 type VectorDBRegisterParams as VectorDBRegisterParams ,
506539 } ;
507540
541+ export {
542+ VectorStores as VectorStores ,
543+ type ListVectorStoresResponse as ListVectorStoresResponse ,
544+ type VectorStore as VectorStore ,
545+ type VectorStoreDeleteResponse as VectorStoreDeleteResponse ,
546+ type VectorStoreSearchResponse as VectorStoreSearchResponse ,
547+ type VectorStoreCreateParams as VectorStoreCreateParams ,
548+ type VectorStoreUpdateParams as VectorStoreUpdateParams ,
549+ type VectorStoreListParams as VectorStoreListParams ,
550+ type VectorStoreSearchParams as VectorStoreSearchParams ,
551+ } ;
552+
508553 export {
509554 Models as Models ,
510555 type ListModelsResponse as ListModelsResponse ,
@@ -597,6 +642,16 @@ export declare namespace LlamaStackClient {
597642 type BenchmarkRegisterParams as BenchmarkRegisterParams ,
598643 } ;
599644
645+ export {
646+ Files as Files ,
647+ type DeleteFileResponse as DeleteFileResponse ,
648+ type File as File ,
649+ type ListFilesResponse as ListFilesResponse ,
650+ type FileContentResponse as FileContentResponse ,
651+ type FileCreateParams as FileCreateParams ,
652+ type FileListParams as FileListParams ,
653+ } ;
654+
600655 export type AgentConfig = API . AgentConfig ;
601656 export type BatchCompletion = API . BatchCompletion ;
602657 export type ChatCompletionResponse = API . ChatCompletionResponse ;
0 commit comments