diff --git a/onnxruntime/core/providers/migraphx/migraphx_execution_provider_utils.h b/onnxruntime/core/providers/migraphx/migraphx_execution_provider_utils.h index cce90f3ef82be..e20cc9140916a 100644 --- a/onnxruntime/core/providers/migraphx/migraphx_execution_provider_utils.h +++ b/onnxruntime/core/providers/migraphx/migraphx_execution_provider_utils.h @@ -268,7 +268,7 @@ inline std::string GenerateGraphId(const GraphViewer& graph_viewer) { const fs::path path{main_graph.ModelPath()}; if (path.has_filename()) { - const auto model_name = path.filename().string(); + const auto model_name = PathToUTF8String(path.filename().native()); LOGS_DEFAULT(INFO) << "Model name is '" << model_name << "'"; // Ensure enough characters are hashed in case model names are too short diff --git a/onnxruntime/core/providers/nv_tensorrt_rtx/onnx_ctx_model_helper.cc b/onnxruntime/core/providers/nv_tensorrt_rtx/onnx_ctx_model_helper.cc index 7baac6aa1f6d0..0b137c4674b00 100644 --- a/onnxruntime/core/providers/nv_tensorrt_rtx/onnx_ctx_model_helper.cc +++ b/onnxruntime/core/providers/nv_tensorrt_rtx/onnx_ctx_model_helper.cc @@ -166,7 +166,7 @@ Status CreateCtxNode(const GraphViewer& graph_viewer, } attr_ep_cache_context->set_s(engine_data_str); } else { - std::string engine_cache_filename = std::filesystem::path(engine_cache_path).filename().string(); + std::string engine_cache_filename = PathToUTF8String(std::filesystem::path(engine_cache_path).filename().native()); attr_ep_cache_context->set_s(engine_cache_filename); std::fstream engine_cache_file(engine_cache_path, std::ios::binary | std::ios::out); if (engine_cache_file.is_open()) { @@ -188,7 +188,7 @@ Status CreateCtxNode(const GraphViewer& graph_viewer, attr_onnx_filename->set_name(ONNX_MODEL_FILENAME); attr_onnx_filename->set_type(onnx::AttributeProto_AttributeType_STRING); - attr_onnx_filename->set_s(std::filesystem::path(onnx_model_path).filename().string()); + attr_onnx_filename->set_s(PathToUTF8String(std::filesystem::path(onnx_model_path).filename().native())); attr_sdk_version->set_name(SDK_VERSION); attr_sdk_version->set_type(onnx::AttributeProto_AttributeType_STRING); diff --git a/onnxruntime/core/providers/openvino/backend_manager.cc b/onnxruntime/core/providers/openvino/backend_manager.cc index 892bdec7abe83..c033b0b10a786 100644 --- a/onnxruntime/core/providers/openvino/backend_manager.cc +++ b/onnxruntime/core/providers/openvino/backend_manager.cc @@ -185,7 +185,7 @@ void BackendManager::TryExportCompiledBlobAsEPCtxNode(const onnxruntime::GraphVi model_blob_str = std::move(ss).str(); } } else { // External blob - model_blob_str = shared_context_.GetBinPath().filename().string(); + model_blob_str = PathToUTF8String(shared_context_.GetBinPath().filename().native()); } auto status = ep_ctx_handle_.AddOVEPCtxNodeToGraph(graph_body_viewer, diff --git a/onnxruntime/core/providers/qnn/builder/onnx_ctx_model_helper.cc b/onnxruntime/core/providers/qnn/builder/onnx_ctx_model_helper.cc index 0e49c0f897bea..68aa9a157f4a2 100644 --- a/onnxruntime/core/providers/qnn/builder/onnx_ctx_model_helper.cc +++ b/onnxruntime/core/providers/qnn/builder/onnx_ctx_model_helper.cc @@ -269,7 +269,7 @@ Status CreateEPContextNodes(Model* model, } context_bin_path = context_bin_path + ToPathString("_qnn.bin"); - context_cache_name = std::filesystem::path(context_bin_path).filename().string(); + context_cache_name = PathToUTF8String(std::filesystem::path(context_bin_path).filename().native()); // If generate ctx.onnx with share_ep_context enabled, all ctx.onnx should point to the same ctx.bin if (share_ep_contexts) { diff --git a/onnxruntime/core/providers/qnn/builder/qnn_profile_serializer.cc b/onnxruntime/core/providers/qnn/builder/qnn_profile_serializer.cc index fb76f2110cbc8..87340e5b3ebeb 100644 --- a/onnxruntime/core/providers/qnn/builder/qnn_profile_serializer.cc +++ b/onnxruntime/core/providers/qnn/builder/qnn_profile_serializer.cc @@ -237,7 +237,7 @@ Serializer::Serializer(const ProfilingInfo& profiling_info, tracelogging_provider_ep_enabled_(tracelogging_provider_ep_enabled) { #ifdef QNN_SYSTEM_PROFILE_API_ENABLED std::filesystem::path output_fs_filepath(profiling_info.csv_output_filepath); - qnn_log_filename_ = output_fs_filepath.filename().string(); + qnn_log_filename_ = PathToUTF8String(output_fs_filepath.filename().native()); // Remove extension (assumed to be ".csv") then add "_qnn.log" size_t extension_start_idx = qnn_log_filename_.rfind("."); qnn_log_filename_ = qnn_log_filename_.substr(0, extension_start_idx); diff --git a/onnxruntime/core/providers/tensorrt/onnx_ctx_model_helper.cc b/onnxruntime/core/providers/tensorrt/onnx_ctx_model_helper.cc index 2a54bfea86e91..091b110d8c746 100644 --- a/onnxruntime/core/providers/tensorrt/onnx_ctx_model_helper.cc +++ b/onnxruntime/core/providers/tensorrt/onnx_ctx_model_helper.cc @@ -120,7 +120,7 @@ ONNX_NAMESPACE::ModelProto* CreateCtxModel(const GraphViewer& graph_viewer, attr_2->set_s(compute_capability); attr_3->set_name(ONNX_MODEL_FILENAME); attr_3->set_type(onnx::AttributeProto_AttributeType_STRING); - attr_3->set_s(std::filesystem::path(onnx_model_path).filename().string()); + attr_3->set_s(PathToUTF8String(std::filesystem::path(onnx_model_path).filename().native())); attr_4->set_name(SOURCE); attr_4->set_type(onnx::AttributeProto_AttributeType_STRING); attr_4->set_s(kTensorrtExecutionProvider); diff --git a/onnxruntime/core/session/environment.cc b/onnxruntime/core/session/environment.cc index 7cd02e5413407..f37c685cf2f28 100644 --- a/onnxruntime/core/session/environment.cc +++ b/onnxruntime/core/session/environment.cc @@ -618,7 +618,7 @@ Status Environment::CreateAndRegisterInternalEps() { Status Environment::RegisterExecutionProviderLibrary(const std::string& registration_name, const ORTCHAR_T* lib_path) { std::lock_guard lock{mutex_}; - std::string lib_file_name = std::filesystem::path(lib_path).filename().string(); + std::string lib_file_name = PathToUTF8String(std::filesystem::path(lib_path).filename().native()); Env::Default().GetTelemetryProvider().LogRegisterEpLibraryWithLibPath(registration_name, lib_file_name); std::vector internal_factories = {}; diff --git a/onnxruntime/core/session/inference_session.cc b/onnxruntime/core/session/inference_session.cc index 08b58f3de1a11..b873c95b496bb 100644 --- a/onnxruntime/core/session/inference_session.cc +++ b/onnxruntime/core/session/inference_session.cc @@ -2612,7 +2612,7 @@ common::Status InferenceSession::Initialize() { // and log telemetry std::filesystem::path model_path = graph.ModelPath(); - std::string model_file_name = model_path.filename().string(); + std::string model_file_name = PathToUTF8String(model_path.filename().native()); bool model_has_fp16_inputs = ModelHasFP16Inputs(graph); env.GetTelemetryProvider().LogSessionCreation( session_id_, model_->IrVersion(), model_->ProducerName(), model_->ProducerVersion(), model_->Domain(), @@ -4096,7 +4096,7 @@ void InferenceSession::LogAllSessions() { if (nullptr != model) { onnxruntime::Graph& graph = model->MainGraph(); std::filesystem::path model_path = graph.ModelPath(); - std::string model_file_name = model_path.filename().string(); + std::string model_file_name = PathToUTF8String(model_path.filename().native()); bool model_has_fp16_inputs = ModelHasFP16Inputs(graph); std::string model_weight_type = session->GetWeightDataType(); std::string model_graph_hash = session->GetGraphHash();