Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: planner love #5477

Merged
merged 4 commits into from
Mar 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
143 changes: 68 additions & 75 deletions python/notebooks/05-using-the-planner.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"metadata": {},
"outputs": [],
"source": [
"!python -m pip install semantic-kernel==0.9.2b1"
"!python -m pip install -U semantic-kernel "
]
},
{
Expand All @@ -36,7 +36,7 @@
"from services import Service\n",
"\n",
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
"selectedService = Service.OpenAI"
"selectedService = Service.AzureOpenAI"
]
},
{
Expand All @@ -46,9 +46,9 @@
"metadata": {},
"outputs": [],
"source": [
"from semantic_kernel.prompt_template.input_variable import InputVariable\n",
"from semantic_kernel.contents.chat_history import ChatHistory\n",
"from semantic_kernel.functions.kernel_arguments import KernelArguments"
"from semantic_kernel.contents.chat_history import ChatHistory # noqa: F401\n",
"from semantic_kernel.functions.kernel_arguments import KernelArguments # noqa: F401\n",
"from semantic_kernel.prompt_template.input_variable import InputVariable # noqa: F401"
]
},
{
Expand All @@ -59,26 +59,26 @@
"outputs": [],
"source": [
"import semantic_kernel as sk\n",
"import semantic_kernel.connectors.ai.open_ai as sk_oai\n",
"import semantic_kernel.connectors.ai.open_ai as sk_oai # noqa: F401\n",
"\n",
"kernel = sk.Kernel()\n",
"\n",
"service_id = None\n",
"if selectedService == Service.OpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion\n",
"\n",
" api_key, org_id = sk.openai_settings_from_dot_env()\n",
" service_id = \"default\"\n",
" kernel.add_service(\n",
" OpenAIChatCompletion(service_id=service_id, ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id),\n",
" sk_oai.OpenAIChatCompletion(\n",
" service_id=service_id, ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id\n",
" ),\n",
" )\n",
"elif selectedService == Service.AzureOpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n",
"\n",
" deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n",
" service_id = \"default\"\n",
" kernel.add_service(\n",
" AzureChatCompletion(service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n",
" sk_oai.AzureChatCompletion(\n",
" service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key\n",
" ),\n",
" )"
]
},
Expand Down Expand Up @@ -203,44 +203,6 @@
"Let's also define an inline plugin and have it be available to the Planner. Be sure to give it a function name and plugin name.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "54422ba6",
"metadata": {},
"outputs": [],
"source": [
"prompt = \"\"\"\n",
"{{$input}}\n",
"\n",
"Rewrite the above in the style of Shakespeare.\n",
"\"\"\"\n",
"\n",
"exec_settings = sk_oai.OpenAIChatPromptExecutionSettings(\n",
" service_id=service_id,\n",
" max_tokens=2000,\n",
" temperature=0.8,\n",
")\n",
"\n",
"prompt_template_config = sk.PromptTemplateConfig(\n",
" template=prompt,\n",
" function_name=\"shakespeare\",\n",
" plugin_name=\"ShakespearePlugin\",\n",
" name=\"planner\",\n",
" template_format=\"semantic-kernel\",\n",
" input_variables=[\n",
" InputVariable(name=\"input\", description=\"The user input\", is_required=True),\n",
" ],\n",
" execution_settings=exec_settings,\n",
")\n",
"\n",
"shakespeare_plugin = kernel.create_function_from_prompt(\n",
" function_name=\"shakespeare\",\n",
" plugin_name=\"ShakespearePlugin\",\n",
" prompt_template_config=prompt_template_config,\n",
")"
]
},
{
"cell_type": "markdown",
"id": "5057cf9b",
Expand All @@ -256,38 +218,66 @@
"metadata": {},
"outputs": [],
"source": [
"ask = \"\"\"\n",
"Tomorrow is Valentine's day. I need to come up with a few date ideas.\n",
"She likes Shakespeare so write using his style. She speaks French so write it in French.\n",
"Convert the text to uppercase.\"\"\"\n",
"from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt\n",
"\n",
"# TODO: we cannot add an updated ask to a current plan because the underlying plugins already exist\n",
"kernel = sk.Kernel()\n",
"service_id = None\n",
"service_id = \"default\"\n",
"if selectedService == Service.OpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion\n",
"\n",
" api_key, org_id = sk.openai_settings_from_dot_env()\n",
" service_id = \"default\"\n",
" kernel.add_service(\n",
" OpenAIChatCompletion(service_id=service_id, ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id),\n",
" sk_oai.OpenAIChatCompletion(\n",
" service_id=service_id, ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id\n",
" ),\n",
" )\n",
"elif selectedService == Service.AzureOpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n",
"\n",
" deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n",
" service_id = \"default\"\n",
" kernel.add_service(\n",
" AzureChatCompletion(service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n",
" sk_oai.AzureChatCompletion(\n",
" service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key\n",
" ),\n",
" )\n",
"\n",
"plugins_directory = \"../../samples/plugins/\"\n",
"summarize_plugin = kernel.import_plugin_from_prompt_directory(plugins_directory, \"SummarizePlugin\")\n",
"writer_plugin = kernel.import_plugin_from_prompt_directory(plugins_directory, \"WriterPlugin\")\n",
"text_plugin = kernel.import_plugin_from_object(TextPlugin(), \"TextPlugin\")\n",
"\n",
"shakespeare_func = KernelFunctionFromPrompt(\n",
" function_name=\"Shakespeare\",\n",
" plugin_name=\"WriterPlugin\",\n",
" prompt=\"\"\"\n",
"{{$input}}\n",
"\n",
"Rewrite the above in the style of Shakespeare.\n",
"\"\"\",\n",
" prompt_execution_settings=sk_oai.OpenAIChatPromptExecutionSettings(\n",
" service_id=service_id,\n",
" max_tokens=2000,\n",
" temperature=0.8,\n",
" ),\n",
")\n",
"kernel.plugins.add_functions_to_plugin([shakespeare_func], \"WriterPlugin\")\n",
"\n",
"for plugin in kernel.plugins:\n",
" for function in plugin.functions.values():\n",
" print(f\"Plugin: {plugin.name}, Function: {function.name}\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "25abac0d",
"metadata": {},
"outputs": [],
"source": [
"planner = BasicPlanner(service_id)\n",
"new_plan = await planner.create_plan(ask, kernel, prompt)"
"\n",
"ask = \"\"\"\n",
"Tomorrow is Valentine's day. I need to come up with a few short poems.\n",
"She likes Shakespeare so write using his style. She speaks French so write it in French.\n",
"Convert the text to uppercase.\"\"\"\n",
"\n",
"new_plan = await planner.create_plan(goal=ask, kernel=kernel)"
]
},
{
Expand Down Expand Up @@ -482,11 +472,7 @@
"metadata": {},
"outputs": [],
"source": [
"from semantic_kernel.core_plugins import (\n",
" MathPlugin,\n",
" TextPlugin,\n",
" TimePlugin,\n",
")\n",
"from semantic_kernel.core_plugins import MathPlugin, TextPlugin, TimePlugin\n",
"\n",
"kernel.import_plugin_from_object(MathPlugin(), \"math\")\n",
"kernel.import_plugin_from_object(TimePlugin(), \"time\")\n",
Expand Down Expand Up @@ -627,7 +613,7 @@
"metadata": {},
"outputs": [],
"source": [
"ask = \"\"\"How many total championships combined do the top 5 teams in the NBA have?\"\"\"\n",
"ask = \"\"\"How many total championships combined do the top 5 teams in the NBA have? And which teams are they?\"\"\"\n",
"\n",
"plan = planner.create_plan(goal=ask)"
]
Expand All @@ -639,7 +625,7 @@
"metadata": {},
"outputs": [],
"source": [
"result = await plan.invoke()"
"result = await plan.invoke(kernel)"
]
},
{
Expand Down Expand Up @@ -671,9 +657,16 @@
" print(\"Step:\", index)\n",
" print(\"Description:\", step.description)\n",
" print(\"Function:\", step.plugin_name + \".\" + step._function.name)\n",
" if len(step._outputs) > 0:\n",
" print(\" Output:\\n\", str.replace(result[step._outputs[0]], \"\\n\", \"\\n \"))"
" print(f\" Output: {','.join(str(res) for res in result.metadata['results'])}\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82a52451",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -692,7 +685,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.8"
"version": "3.11.6"
}
},
"nbformat": 4,
Expand Down
8 changes: 8 additions & 0 deletions python/semantic_kernel/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,6 +619,14 @@ def func(self, plugin_name: str, function_name: str) -> KernelFunction:
raise KernelFunctionNotFoundError(f"Function '{function_name}' not found in plugin '{plugin_name}'")
return self.plugins[plugin_name][function_name]

def func_from_fully_qualified_function_name(self, fully_qualified_function_name: str) -> KernelFunction:
plugin_name, function_name = fully_qualified_function_name.split("-", maxsplit=1)
if plugin_name not in self.plugins:
raise KernelPluginNotFoundError(f"Plugin '{plugin_name}' not found")
if function_name not in self.plugins[plugin_name]:
raise KernelFunctionNotFoundError(f"Function '{function_name}' not found in plugin '{plugin_name}'")
return self.plugins[plugin_name][function_name]

def create_function_from_prompt(
self,
function_name: str,
Expand Down
5 changes: 2 additions & 3 deletions python/semantic_kernel/planners/basic_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def _create_available_functions_string(self, kernel: Kernel) -> str:
for name in list(all_functions_descriptions_dict.keys()):
available_functions_string += name + "\n"
description = all_functions_descriptions_dict[name] or ""
available_functions_string += "description: " + description + "\n"
available_functions_string += "description: " + description + "\n" if description else ""
available_functions_string += "args:\n"

# Add the parameters for each function
Expand Down Expand Up @@ -225,8 +225,7 @@ async def execute_plan(self, plan: Plan, kernel: Kernel) -> str:

for subtask in subtasks:
plugin_name, function_name = subtask["function"].split(".")
kernel_function = kernel.plugins[plugin_name][function_name]

kernel_function = kernel.func(plugin_name, function_name)
# Get the arguments dictionary for the function
args = subtask.get("args", None)
if args:
Expand Down
2 changes: 1 addition & 1 deletion python/semantic_kernel/planners/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ async def invoke(

Args:
input (str, optional): The input to the plan. Defaults to None.
context (KernelContext, optional): The context to use. Defaults to None.
arguments (KernelArguments, optional): The context to use. Defaults to None.
settings (PromptExecutionSettings, optional): The AI request settings to use. Defaults to None.
memory (SemanticTextMemoryBase, optional): The memory to use. Defaults to None.
**kwargs: Additional keyword arguments.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,20 +105,17 @@ async def create_plan(self, goal: str) -> Plan:
if isinstance(plan_result, FunctionResult) and "exception" in plan_result.metadata:
raise PlannerCreatePlanError(
f"Error creating plan for goal: {plan_result.metadata['exception']}",
plan_result.metadata["exception"],
)
) from plan_result.metadata["exception"]

plan_result_string = str(plan_result).strip()

try:
get_plugin_function = self.config.get_plugin_function or SequentialPlanParser.get_plugin_function(
self._kernel
)
plan = SequentialPlanParser.to_plan_from_xml(
plan_result_string,
goal,
get_plugin_function,
self.config.allow_missing_functions,
xml_string=plan_result_string,
goal=goal,
kernel=self._kernel,
get_plugin_function=self.config.get_plugin_function,
allow_missing_functions=self.config.allow_missing_functions,
)

if len(plan._steps) == 0:
Expand Down
Loading
Loading