Skip to content

Commit

Permalink
feat(ai): add xAI grok-beta
Browse files Browse the repository at this point in the history
  • Loading branch information
KernelDeimos committed Nov 7, 2024
1 parent d613c5f commit 28adcf5
Show file tree
Hide file tree
Showing 3 changed files with 124 additions and 0 deletions.
5 changes: 5 additions & 0 deletions src/backend/src/modules/puterai/PuterAIModule.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,11 @@ class PuterAIModule extends AdvancedBase {
const { GroqAIService } = require('./GroqAIService');
services.registerService('groq', GroqAIService);
}

if ( !! config?.services?.['xai'] ) {
const { XAIService } = require('./XAIService');
services.registerService('xai', XAIService);
}
}
}

Expand Down
117 changes: 117 additions & 0 deletions src/backend/src/modules/puterai/XAIService.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
const { default: Anthropic } = require("@anthropic-ai/sdk");
const BaseService = require("../../services/BaseService");
const { whatis } = require("../../util/langutil");
const { PassThrough } = require("stream");
const { TypedValue } = require("../../services/drivers/meta/Runtime");

const PUTER_PROMPT = `
You are running on an open-source platform called Puter,
as the xAI implementation for a driver interface
called puter-chat-completion.
The following JSON contains system messages from the
user of the driver interface (typically an app on Puter):
`.replace('\n', ' ').trim();

class XAIService extends BaseService {
static MODULES = {
Anthropic: require('@anthropic-ai/sdk'),
}

async _init () {
this.anthropic = new Anthropic({
apiKey: this.config.apiKey,
baseURL: 'https://api.x.ai'
});
}

static IMPLEMENTS = {
['puter-chat-completion']: {
async list () {
return [
'grok-beta',
];
},
async complete ({ messages, stream, model }) {
const adapted_messages = [];

const system_prompts = [];
let previous_was_user = false;
for ( const message of messages ) {
if ( typeof message.content === 'string' ) {
message.content = {
type: 'text',
text: message.content,
};
}
if ( whatis(message.content) !== 'array' ) {
message.content = [message.content];
}
if ( ! message.role ) message.role = 'user';
if ( message.role === 'user' && previous_was_user ) {
const last_msg = adapted_messages[adapted_messages.length-1];
last_msg.content.push(
...(Array.isArray ? message.content : [message.content])
);
continue;
}
if ( message.role === 'system' ) {
system_prompts.push(...message.content);
continue;
}
adapted_messages.push(message);
if ( message.role === 'user' ) {
previous_was_user = true;
}
}

if ( stream ) {
const stream = new PassThrough();
const retval = new TypedValue({
$: 'stream',
content_type: 'application/x-ndjson',
chunked: true,
}, stream);
(async () => {
const completion = await this.anthropic.messages.stream({
model: model ?? 'grok-beta',
max_tokens: 1000,
temperature: 0,
system: PUTER_PROMPT + JSON.stringify(system_prompts),
messages: adapted_messages,
});
for await ( const event of completion ) {
if (
event.type !== 'content_block_delta' ||
event.delta.type !== 'text_delta'
) continue;
const str = JSON.stringify({
text: event.delta.text,
});
stream.write(str + '\n');
}
stream.end();
})();

return retval;
}

const msg = await this.anthropic.messages.create({
model: model ?? 'grok-beta',
max_tokens: 1000,
temperature: 0,
system: PUTER_PROMPT + JSON.stringify(system_prompts),
messages: adapted_messages,
});
return {
message: msg,
finish_reason: 'stop'
};
}
}
}
}

module.exports = {
XAIService,
};
2 changes: 2 additions & 0 deletions src/puter-js/src/modules/AI.js
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,8 @@ class AI{
"whisper-large-v3"
].includes(options.model)) {
driver = 'groq';
}else if(options.model === 'grok-beta') {
driver = 'xai';
}

// stream flag from settings
Expand Down

0 comments on commit 28adcf5

Please sign in to comment.