Skip to content

Commit

Permalink
Merge pull request #192 from kreneskyp/ollama
Browse files Browse the repository at this point in the history
LLM: Ollama
  • Loading branch information
kreneskyp authored Aug 30, 2023
2 parents f17a5ec + 828e833 commit 6822a22
Show file tree
Hide file tree
Showing 2 changed files with 272 additions and 2 deletions.
69 changes: 67 additions & 2 deletions ix/chains/fixture_src/llm.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from langchain import LlamaCpp
from langchain.llms import Ollama

from ix.api.chains.types import NodeTypeField
from ix.chains.fixture_src.common import VERBOSE

OPENAI_LLM_CLASS_PATH = "langchain.chat_models.openai.ChatOpenAI"
OPENAI_LLM = {
Expand Down Expand Up @@ -182,7 +184,8 @@
"type": "llm",
"name": "Llama Cpp",
"description": "Llama Cpp wrapper for llama models",
"fields": NodeTypeField.get_fields(
"fields": [VERBOSE]
+ NodeTypeField.get_fields(
LlamaCpp,
include=[
"model_path",
Expand Down Expand Up @@ -216,5 +219,67 @@
),
}

OLLAMA_LLM_CLASS_PATH = "langchain.llms.ollama.Ollama"
OLLAMA_LLM = {
"class_path": OLLAMA_LLM_CLASS_PATH,
"type": "llm",
"name": "Ollama",
"description": "Ollama server for llama models",
"fields": [VERBOSE]
+ NodeTypeField.get_fields(
Ollama,
include=[
"base_url",
"model",
"mirostat",
"mirostat_eta",
"mirostat_tau",
"num_ctx",
"num_gpu",
"num_thread",
"repeat_last_n",
"repeat_penalty",
"temperature",
"stop",
"tfs_z",
"top_p",
],
field_options={
"temperature": {
"default": 0.8,
"input_type": "slider",
"min": 0,
"max": 1,
"step": 0.05,
},
"num_gpu": {
"default": 1,
"input_type": "slider",
"min": 0,
"max": 10,
"step": 1,
},
"top_k": {
"default": 40,
"description": "Top K",
"input_type": "slider",
"min": 1,
"max": 100,
"step": 1,
},
"top_p": {
"default": 0.9,
"input_type": "slider",
"min": 0,
"max": 1,
"step": 0.05,
},
"stop": {
"style": {"width": "100%"},
},
},
),
}


LLMS = [ANTHROPIC_LLM, GOOGLE_PALM, LLAMA_CPP_LLM, OPENAI_LLM]
LLMS = [ANTHROPIC_LLM, GOOGLE_PALM, LLAMA_CPP_LLM, OLLAMA_LLM, OPENAI_LLM]
205 changes: 205 additions & 0 deletions ix/chains/fixtures/node_types.json
Original file line number Diff line number Diff line change
Expand Up @@ -1401,6 +1401,11 @@
"display_type": "node",
"connectors": null,
"fields": [
{
"name": "verbose",
"type": "boolean",
"default": false
},
{
"name": "model_path",
"type": "str",
Expand Down Expand Up @@ -1588,6 +1593,7 @@
"config_schema": {
"type": "object",
"required": [
"verbose",
"model_path"
],
"properties": {
Expand Down Expand Up @@ -3453,6 +3459,205 @@
}
}
},
{
"model": "chains.nodetype",
"pk": "a4b78af5-415f-45e5-ad6f-079384d826a0",
"fields": {
"name": "Ollama",
"description": "Ollama server for llama models",
"class_path": "langchain.llms.ollama.Ollama",
"type": "llm",
"display_type": "node",
"connectors": null,
"fields": [
{
"name": "verbose",
"type": "boolean",
"default": false
},
{
"name": "model",
"type": "str",
"label": "Model",
"default": "llama2",
"required": false
},
{
"name": "mirostat",
"type": "int",
"label": "Mirostat",
"default": null,
"required": false
},
{
"name": "mirostat_eta",
"type": "float",
"label": "Mirostat_eta",
"default": null,
"required": false
},
{
"name": "mirostat_tau",
"type": "float",
"label": "Mirostat_tau",
"default": null,
"required": false
},
{
"name": "num_ctx",
"type": "int",
"label": "Num_ctx",
"default": null,
"required": false
},
{
"max": 10,
"min": 0,
"name": "num_gpu",
"step": 1,
"type": "int",
"label": "Num_gpu",
"default": 1,
"required": false,
"input_type": "slider"
},
{
"name": "num_thread",
"type": "int",
"label": "Num_thread",
"default": null,
"required": false
},
{
"name": "repeat_last_n",
"type": "int",
"label": "Repeat_last_n",
"default": null,
"required": false
},
{
"name": "repeat_penalty",
"type": "float",
"label": "Repeat_penalty",
"default": null,
"required": false
},
{
"max": 1,
"min": 0,
"name": "temperature",
"step": 0.05,
"type": "float",
"label": "Temperature",
"default": 0.8,
"required": false,
"input_type": "slider"
},
{
"name": "stop",
"type": "List",
"label": "Stop",
"style": {
"width": "100%"
},
"default": null,
"required": false
},
{
"name": "tfs_z",
"type": "float",
"label": "Tfs_z",
"default": null,
"required": false
},
{
"max": 1,
"min": 0,
"name": "top_p",
"step": 0.05,
"type": "int",
"label": "Top_p",
"default": 0.9,
"required": false,
"input_type": "slider"
}
],
"child_field": null,
"config_schema": {
"type": "object",
"required": [
"verbose"
],
"properties": {
"stop": {
"type": "object",
"default": null
},
"model": {
"type": "string",
"default": "llama2"
},
"tfs_z": {
"type": "number",
"default": null
},
"top_p": {
"type": "number",
"default": 0.9,
"maximum": 1.0,
"minimum": 0.0,
"multipleOf": 0.05
},
"num_ctx": {
"type": "number",
"default": null
},
"num_gpu": {
"type": "number",
"default": 1,
"maximum": 10.0,
"minimum": 0.0,
"multipleOf": 1.0
},
"verbose": {
"type": "boolean",
"default": false
},
"mirostat": {
"type": "number",
"default": null
},
"num_thread": {
"type": "number",
"default": null
},
"temperature": {
"type": "number",
"default": 0.8,
"maximum": 1.0,
"minimum": 0.0,
"multipleOf": 0.05
},
"mirostat_eta": {
"type": "number",
"default": null
},
"mirostat_tau": {
"type": "number",
"default": null
},
"repeat_last_n": {
"type": "number",
"default": null
},
"repeat_penalty": {
"type": "number",
"default": null
}
}
}
}
},
{
"model": "chains.nodetype",
"pk": "a8ffd3b0-dfb4-4e1b-b5bf-90a2ec959ff8",
Expand Down

0 comments on commit 6822a22

Please sign in to comment.