Skip to content

Commit c0ce8a7

Browse files
thinkallHk669
authored andcommitted
Update Microsoft Fabric notebook (#3243)
Co-authored-by: HRUSHIKESH DOKALA <[email protected]>
1 parent 320e552 commit c0ce8a7

File tree

1 file changed

+27
-21
lines changed

1 file changed

+27
-21
lines changed

notebook/agentchat_microsoft_fabric.ipynb

+27-21
Original file line numberDiff line numberDiff line change
@@ -113,32 +113,36 @@
113113
}
114114
],
115115
"source": [
116-
"import types\n",
116+
"from synapse.ml.mlflow import get_mlflow_env_config\n",
117117
"\n",
118-
"import httpx\n",
119-
"from synapse.ml.fabric.credentials import get_openai_httpx_sync_client\n",
120118
"\n",
121-
"import autogen\n",
119+
"def get_config_list():\n",
120+
" mlflow_env_configs = get_mlflow_env_config()\n",
121+
" access_token = mlflow_env_configs.driver_aad_token\n",
122+
" prebuilt_AI_base_url = mlflow_env_configs.workload_endpoint + \"cognitive/openai/\"\n",
122123
"\n",
123-
"http_client = get_openai_httpx_sync_client()\n",
124-
"http_client.__deepcopy__ = types.MethodType(\n",
125-
" lambda self, memo: self, http_client\n",
126-
") # https://microsoft.github.io/autogen/docs/topics/llm_configuration#adding-http-client-in-llm_config-for-proxy\n",
124+
" config_list = [\n",
125+
" {\n",
126+
" \"model\": \"gpt-4o\",\n",
127+
" \"api_key\": access_token,\n",
128+
" \"base_url\": prebuilt_AI_base_url,\n",
129+
" \"api_type\": \"azure\",\n",
130+
" \"api_version\": \"2024-02-01\",\n",
131+
" },\n",
132+
" ]\n",
127133
"\n",
128-
"config_list = [\n",
129-
" {\n",
130-
" \"model\": \"gpt-4o\",\n",
131-
" \"http_client\": http_client,\n",
132-
" \"api_type\": \"azure\",\n",
133-
" \"api_version\": \"2024-02-01\",\n",
134-
" },\n",
135-
"]\n",
134+
" # Set temperature, timeout and other LLM configurations\n",
135+
" llm_config = {\n",
136+
" \"config_list\": config_list,\n",
137+
" \"temperature\": 0,\n",
138+
" \"timeout\": 600,\n",
139+
" }\n",
140+
" return config_list, llm_config\n",
141+
"\n",
142+
"config_list, llm_config = get_config_list()\n",
136143
"\n",
137-
"# Set temperature, timeout and other LLM configurations\n",
138-
"llm_config = {\n",
139-
" \"config_list\": config_list,\n",
140-
" \"temperature\": 0,\n",
141-
"}"
144+
"assert len(config_list) > 0\n",
145+
"print(\"models to use: \", [config_list[i][\"model\"] for i in range(len(config_list))])"
142146
]
143147
},
144148
{
@@ -300,6 +304,8 @@
300304
}
301305
],
302306
"source": [
307+
"import autogen\n",
308+
"\n",
303309
"# create an AssistantAgent instance named \"assistant\"\n",
304310
"assistant = autogen.AssistantAgent(\n",
305311
" name=\"assistant\",\n",

0 commit comments

Comments
 (0)