Skip to content

Commit 36540b3

Browse files
committed
MINIFICPP-2185 Upgrade to Visual Studio 2022
1 parent 329fe5e commit 36540b3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+263
-199
lines changed

.github/workflows/ci.yml

+60-9
Original file line numberDiff line numberDiff line change
@@ -69,10 +69,10 @@ jobs:
6969
with:
7070
name: macos-binaries
7171
path: build/bin
72-
windows_VS2019:
73-
name: "windows-2019"
74-
runs-on: windows-2019
75-
timeout-minutes: 180
72+
windows_VS2022:
73+
name: "windows-2022"
74+
runs-on: windows-2022
75+
timeout-minutes: 240
7676
steps:
7777
- name: Support longpaths
7878
run: git config --system core.longpaths true
@@ -82,11 +82,11 @@ jobs:
8282
uses: actions/cache/restore@v3
8383
with:
8484
path: ~/AppData/Local/Mozilla/sccache/cache
85-
key: ${{ runner.os }}-sccache-${{ github.ref }}-${{ github.sha }}
85+
key: ${{ runner.os }}-2022-sccache-${{ github.ref }}-${{ github.sha }}
8686
restore-keys: |
87-
${{ runner.os }}-sccache-${{ github.ref }}
88-
${{ runner.os }}-sccache-refs/heads/main
89-
${{ runner.os }}-sccache
87+
${{ runner.os }}-2022-sccache-${{ github.ref }}
88+
${{ runner.os }}-2022-sccache-refs/heads/main
89+
${{ runner.os }}-2022-sccache
9090
- name: Run sccache-cache
9191
uses: mozilla-actions/[email protected]
9292
- name: Install ninja-build tool
@@ -116,13 +116,64 @@ jobs:
116116
if: always()
117117
with:
118118
path: ~/AppData/Local/Mozilla/sccache/cache
119-
key: ${{ runner.os }}-sccache-${{ github.ref }}-${{ github.sha }}
119+
key: ${{ runner.os }}-2022-sccache-${{ github.ref }}-${{ github.sha }}
120120
- name: test
121121
run: cd ..\b && ctest --timeout 300 --parallel %NUMBER_OF_PROCESSORS% -C Release --output-on-failure
122122
shell: cmd
123123
- name: linter
124124
run: cd ..\b && cmake --build . --target linter --config Release -j 8
125125
shell: cmd
126+
windows_VS2019:
127+
name: "windows-2019"
128+
runs-on: windows-2019
129+
timeout-minutes: 240
130+
steps:
131+
- name: Support longpaths
132+
run: git config --system core.longpaths true
133+
- id: checkout
134+
uses: actions/checkout@v3
135+
- name: sccache cache restore
136+
uses: actions/cache/restore@v3
137+
with:
138+
path: ~/AppData/Local/Mozilla/sccache/cache
139+
key: ${{ runner.os }}-2019-sccache-${{ github.ref }}-${{ github.sha }}
140+
restore-keys: |
141+
${{ runner.os }}-2019-sccache-${{ github.ref }}
142+
${{ runner.os }}-2019-sccache-refs/heads/main
143+
${{ runner.os }}-2019-sccache
144+
- name: Run sccache-cache
145+
uses: mozilla-actions/[email protected]
146+
- name: Install ninja-build tool
147+
uses: seanmiddleditch/gha-setup-ninja@v3
148+
- name: Set up Python
149+
uses: actions/setup-python@v4
150+
with:
151+
python-version: '3.11'
152+
- name: Set up Lua
153+
uses: xpol/[email protected]
154+
- name: Set up NASM for OpenSSL
155+
uses: ilammy/setup-nasm@v1
156+
- id: install-sqliteodbc-driver
157+
run: |
158+
Invoke-WebRequest -Uri "http://www.ch-werner.de/sqliteodbc/sqliteodbc_w64.exe" -OutFile "sqliteodbc_w64.exe"
159+
if ((Get-FileHash 'sqliteodbc_w64.exe').Hash -ne "0df79be4a4412542839ebf405b20d95a7dfc803da0b0b6b0dc653d30dc82ee84") {Write "Hash mismatch"; Exit 1}
160+
./sqliteodbc_w64.exe /S
161+
shell: powershell
162+
- name: build
163+
run: |
164+
for /f "usebackq delims=" %%i in (`vswhere.exe -latest -property installationPath`) do if exist "%%i\Common7\Tools\vsdevcmd.bat" call "%%i\Common7\Tools\vsdevcmd.bat" -arch=x64 -host_arch=x64
165+
win_build_vs.bat ..\b /64 /CI /S /A /PDH /SPLUNK /GCP /ELASTIC /K /L /R /Z /N /RO /PR /PYTHON_SCRIPTING /LUA_SCRIPTING /MQTT /SCCACHE /NINJA
166+
sccache --show-stats
167+
shell: cmd
168+
- name: sccache cache save
169+
uses: actions/cache/save@v3
170+
if: always()
171+
with:
172+
path: ~/AppData/Local/Mozilla/sccache/cache
173+
key: ${{ runner.os }}-2019-sccache-${{ github.ref }}-${{ github.sha }}
174+
- name: test
175+
run: cd ..\b && ctest --timeout 300 --parallel %NUMBER_OF_PROCESSORS% -C Release --output-on-failure
176+
shell: cmd
126177
ubuntu_20_04:
127178
name: "ubuntu-20.04"
128179
runs-on: ubuntu-20.04

CMakeLists.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ endif()
106106
# Enable usage of the VERSION specifier
107107
if (WIN32)
108108
add_compile_definitions(WIN32_LEAN_AND_MEAN _CRT_SECURE_NO_WARNINGS NOMINMAX)
109-
add_compile_options(/W3 /utf-8 /bigobj)
109+
add_compile_options(/W3 /utf-8 /bigobj /MP)
110110
endif()
111111

112112
if (NOT PORTABLE)

cmake/BundledOSSPUUID.cmake

+7-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,13 @@ function(use_bundled_osspuuid SOURCE_DIR BINARY_DIR)
3939
ENDFOREACH(BYPRODUCT)
4040

4141
# Build project
42-
set(CONFIGURE_COMMAND ./configure "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" "CFLAGS=${PASSTHROUGH_CMAKE_C_FLAGS} -fPIC" "CXXFLAGS=${PASSTHROUGH_CMAKE_CXX_FLAGS} -fPIC" --enable-shared=no --with-cxx --without-perl --without-php --without-pgsql "--prefix=${BINARY_DIR}/thirdparty/ossp-uuid-install")
42+
if(WIN32)
43+
set(ADDITIONAL_COMPILER_FLAGS "")
44+
else()
45+
set(ADDITIONAL_COMPILER_FLAGS "-fPIC")
46+
endif()
47+
set(CONFIGURE_COMMAND ./configure "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" "CFLAGS=${PASSTHROUGH_CMAKE_C_FLAGS} ${ADDITIONAL_COMPILER_FLAGS}" "CXXFLAGS=${PASSTHROUGH_CMAKE_CXX_FLAGS} ${ADDITIONAL_COMPILER_FLAGS}" --enable-shared=no --with-cxx --without-perl --without-php --without-pgsql "--prefix=${BINARY_DIR}/thirdparty/ossp-uuid-install")
48+
4349
string(TOLOWER "${CMAKE_BUILD_TYPE}" build_type)
4450
if(NOT build_type MATCHES debug)
4551
list(APPEND CONFIGURE_COMMAND --enable-debug=yes)

cmake/BundledOpenSSL.cmake

+1-1
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ function(use_openssl SOURCE_DIR BINARY_DIR)
7575
URL_HASH "SHA256=b3aa61334233b852b63ddb048df181177c2c659eb9d4376008118f9c08d07674"
7676
SOURCE_DIR "${BINARY_DIR}/thirdparty/openssl-src"
7777
BUILD_IN_SOURCE true
78-
CONFIGURE_COMMAND perl Configure "CFLAGS=${PASSTHROUGH_CMAKE_C_FLAGS} -fPIC" "CXXFLAGS=${PASSTHROUGH_CMAKE_CXX_FLAGS} -fPIC" ${OPENSSL_SHARED_FLAG} no-tests "--prefix=${OPENSSL_BIN_DIR}" "--openssldir=${OPENSSL_BIN_DIR}"
78+
CONFIGURE_COMMAND perl Configure "CFLAGS=${PASSTHROUGH_CMAKE_C_FLAGS}" "CXXFLAGS=${PASSTHROUGH_CMAKE_CXX_FLAGS}" ${OPENSSL_SHARED_FLAG} no-tests "--prefix=${OPENSSL_BIN_DIR}" "--openssldir=${OPENSSL_BIN_DIR}"
7979
BUILD_BYPRODUCTS ${OPENSSL_LIBRARIES_LIST}
8080
PATCH_COMMAND ${PC}
8181
EXCLUDE_FROM_ALL TRUE

extensions/aws/processors/S3Processor.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ void S3Processor::onSchedule(const std::shared_ptr<core::ProcessContext>& contex
121121

122122
if (auto communications_timeout = context->getProperty<core::TimePeriodValue>(CommunicationsTimeout)) {
123123
logger_->log_debug("S3Processor: Communications Timeout %" PRId64 " ms", communications_timeout->getMilliseconds().count());
124-
client_config_->connectTimeoutMs = gsl::narrow<int64_t>(communications_timeout->getMilliseconds().count());
124+
client_config_->connectTimeoutMs = gsl::narrow<long>(communications_timeout->getMilliseconds().count()); // NOLINT(runtime/int)
125125
} else {
126126
throw Exception(PROCESS_SCHEDULE_EXCEPTION, "Communications Timeout missing or invalid");
127127
}

extensions/aws/processors/S3Processor.h

+1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
#include <optional>
2626
#include <set>
2727
#include <string>
28+
#include <string_view>
2829
#include <utility>
2930

3031
#include "aws/core/auth/AWSCredentialsProvider.h"

extensions/aws/s3/S3Wrapper.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ std::optional<S3Wrapper::UploadPartsResult> S3Wrapper::uploadParts(const PutObje
129129
auto upload_part_request = Aws::S3::Model::UploadPartRequest{}
130130
.WithBucket(put_object_params.bucket)
131131
.WithKey(put_object_params.object_key)
132-
.WithPartNumber(part_number)
132+
.WithPartNumber(gsl::narrow<int>(part_number))
133133
.WithUploadId(upload_state.upload_id);
134134
upload_part_request.SetBody(stream_ptr);
135135

@@ -164,7 +164,7 @@ std::optional<Aws::S3::Model::CompleteMultipartUploadResult> S3Wrapper::complete
164164
for (size_t i = 0; i < upload_parts_result.part_etags.size(); ++i) {
165165
auto part = Aws::S3::Model::CompletedPart{}
166166
.WithETag(upload_parts_result.part_etags[i])
167-
.WithPartNumber(i + 1);
167+
.WithPartNumber(gsl::narrow<int>(i + 1));
168168
completed_multipart_upload.AddParts(part);
169169
}
170170

extensions/azure/storage/AzureDataLakeStorageClient.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ std::unique_ptr<Azure::Storage::Files::DataLake::DataLakeFileSystemClient> Azure
3939
const AzureStorageCredentials& credentials, const std::string& file_system_name, std::optional<uint64_t> number_of_retries) {
4040
Azure::Storage::Files::DataLake::DataLakeClientOptions options;
4141
if (number_of_retries) {
42-
options.Retry.MaxRetries = *number_of_retries;
42+
options.Retry.MaxRetries = gsl::narrow<int32_t>(*number_of_retries);
4343
}
4444

4545
if (credentials.getUseManagedIdentityCredentials()) {

extensions/elasticsearch/PostElasticsearch.cpp

+4-4
Original file line numberDiff line numberDiff line change
@@ -142,15 +142,15 @@ class ElasticPayload {
142142
[[nodiscard]] std::string headerString() const {
143143
rapidjson::Document first_line = rapidjson::Document(rapidjson::kObjectType);
144144

145-
auto operation_index_key = rapidjson::Value(operation_.data(), operation_.size());
145+
auto operation_index_key = rapidjson::Value(operation_.data(), gsl::narrow<rapidjson::SizeType>(operation_.size()));
146146
first_line.AddMember(operation_index_key, rapidjson::Value{rapidjson::kObjectType}, first_line.GetAllocator());
147147
auto& operation_request = first_line[operation_.c_str()];
148148

149-
auto index_json = rapidjson::Value(index_.data(), index_.size());
149+
auto index_json = rapidjson::Value(index_.data(), gsl::narrow<rapidjson::SizeType>(index_.size()));
150150
operation_request.AddMember("_index", index_json, first_line.GetAllocator());
151151

152152
if (id_) {
153-
auto id_json = rapidjson::Value(id_->data(), id_->size());
153+
auto id_json = rapidjson::Value(id_->data(), gsl::narrow<rapidjson::SizeType>(id_->size()));
154154
operation_request.AddMember("_id", id_json, first_line.GetAllocator());
155155
}
156156

@@ -211,7 +211,7 @@ void processResponseFromElastic(const rapidjson::Document& response, core::Proce
211211
auto& items = response["items"];
212212
gsl_Expects(items.IsArray());
213213
gsl_Expects(items.Size() == flowfiles_sent.size());
214-
for (size_t i = 0; i < items.Size(); ++i) {
214+
for (rapidjson::SizeType i = 0; i < items.Size(); ++i) {
215215
gsl_Expects(items[i].IsObject());
216216
for (auto it = items[i].MemberBegin(); it != items[i].MemberEnd(); ++it) {
217217
addAttributesFromResponse("elasticsearch", it, *flowfiles_sent[i]);

extensions/expression-language/common/Value.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,9 @@ class Value {
142142
if (value.empty()) return default_value;
143143
try {
144144
return std::invoke(conversion_function, value);
145-
} catch (const std::invalid_argument& ex) {
145+
} catch (const std::invalid_argument&) {
146146
throw std::invalid_argument{utils::StringUtils::join_pack(context, " failed to parse \"", value, "\": invalid argument")};
147-
} catch (const std::out_of_range& ex) {
147+
} catch (const std::out_of_range&) {
148148
throw std::out_of_range{utils::StringUtils::join_pack(context, " failed to parse \"", value, "\": out of range")};
149149
}
150150
}

extensions/libarchive/WriteArchiveStream.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ size_t WriteArchiveStreamImpl::write(const uint8_t* data, size_t len) {
115115
}
116116
gsl_Expects(data);
117117

118-
int result = archive_write_data(arch_.get(), data, len);
118+
int result = gsl::narrow<int>(archive_write_data(arch_.get(), data, len));
119119
if (result < 0) {
120120
logger_->log_error("Archive write data error %s", archive_error_string(arch_.get()));
121121
arch_entry_.reset();

extensions/librdkafka/ConsumeKafka.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ void rebalance_cb(rd_kafka_t* rk, rd_kafka_resp_err_t trigger, rd_kafka_topic_pa
113113
} // namespace
114114

115115
void ConsumeKafka::create_topic_partition_list() {
116-
kf_topic_partition_list_ = { rd_kafka_topic_partition_list_new(topic_names_.size()), utils::rd_kafka_topic_partition_list_deleter() };
116+
kf_topic_partition_list_ = { rd_kafka_topic_partition_list_new(gsl::narrow<int>(topic_names_.size())), utils::rd_kafka_topic_partition_list_deleter() };
117117

118118
// On subscriptions any topics prefixed with ^ will be regex matched
119119
if (utils::StringUtils::equalsIgnoreCase(TOPIC_FORMAT_PATTERNS, topic_name_format_)) {
@@ -229,8 +229,8 @@ std::vector<std::unique_ptr<rd_kafka_message_t, utils::rd_kafka_message_deleter>
229229
auto elapsed = std::chrono::steady_clock::now() - start;
230230
while (messages.size() < max_poll_records_ && elapsed < max_poll_time_milliseconds_) {
231231
logger_->log_debug("Polling for new messages for %d milliseconds...", max_poll_time_milliseconds_.count());
232-
std::unique_ptr<rd_kafka_message_t, utils::rd_kafka_message_deleter>
233-
message { rd_kafka_consumer_poll(consumer_.get(), std::chrono::duration_cast<std::chrono::milliseconds>(max_poll_time_milliseconds_ - elapsed).count()), utils::rd_kafka_message_deleter() };
232+
const auto timeout_ms = gsl::narrow<int>(std::chrono::duration_cast<std::chrono::milliseconds>(max_poll_time_milliseconds_ - elapsed).count());
233+
std::unique_ptr<rd_kafka_message_t, utils::rd_kafka_message_deleter> message{rd_kafka_consumer_poll(consumer_.get(), timeout_ms)};
234234
if (!message) {
235235
break;
236236
}

extensions/librdkafka/PublishKafka.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ class ReadCallback {
306306
error_ = rd_kafka_err2str(err);
307307
return read_size_;
308308
}
309-
read_size_ += readRet;
309+
read_size_ += gsl::narrow<uint32_t>(readRet);
310310
}
311311
return read_size_;
312312
}

extensions/mqtt/processors/PublishMQTT.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ void PublishMQTT::setMqtt5Properties(MQTTAsync_message& message, const std::stri
216216
if (message_expiry_interval_.has_value()) {
217217
MQTTProperty property;
218218
property.identifier = MQTTPROPERTY_CODE_MESSAGE_EXPIRY_INTERVAL;
219-
property.value.integer4 = message_expiry_interval_->count(); // NOLINT(cppcoreguidelines-pro-type-union-access)
219+
property.value.integer4 = gsl::narrow<int>(message_expiry_interval_->count()); // NOLINT(cppcoreguidelines-pro-type-union-access)
220220
MQTTProperties_add(&message.properties, &property);
221221
}
222222

extensions/pdh/PDHCounters.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ std::string PDHCounter::getCounterName() const {
5555
}
5656

5757
void SinglePDHCounter::addToJson(rapidjson::Value& body, rapidjson::Document::AllocatorType& alloc) const {
58-
rapidjson::Value key(getCounterName().c_str(), getCounterName().length(), alloc);
58+
rapidjson::Value key(getCounterName().c_str(), gsl::narrow<rapidjson::SizeType>(getCounterName().length()), alloc);
5959
rapidjson::Value& group_node = acquireNode(getObjectName(), body, alloc);
6060
group_node.AddMember(key, getValue(), alloc);
6161
}
@@ -91,7 +91,7 @@ void PDHCounterArray::addToJson(rapidjson::Value& body, rapidjson::Document::All
9191
rapidjson::Value& counter_node = acquireNode(node_name, group_node, alloc);
9292
rapidjson::Value value = getValue(i);
9393
rapidjson::Value key;
94-
key.SetString(getCounterName().c_str(), getCounterName().length(), alloc);
94+
key.SetString(getCounterName().c_str(), gsl::narrow<rapidjson::SizeType>(getCounterName().length()), alloc);
9595
counter_node.AddMember(key, value, alloc);
9696
}
9797
}

extensions/python/tests/PythonManifestTests.cpp

+2-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
#include "flow-tests/TestControllerWithFlow.h"
2626
#include "EmptyFlow.h"
2727
#include "c2/C2MetricsPublisher.h"
28+
#include "utils/gsl.h"
2829

2930
using minifi::state::response::SerializedResponseNode;
3031

@@ -40,7 +41,7 @@ const SerializedResponseNode& getNode(const std::vector<SerializedResponseNode>&
4041
for (auto& node : nodes) {
4142
if (node.name == name) return node;
4243
}
43-
assert(false);
44+
gsl_FailFast();
4445
}
4546

4647
TEST_CASE("Python processor's description is part of the manifest") {

extensions/smb/tests/SmbConnectionControllerServiceTests.cpp

+2-1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
#include "Catch.h"
2121
#include "SmbConnectionControllerService.h"
2222
#include "utils/TempSmbShare.h"
23+
#include "utils/UnicodeConversion.h"
2324

2425
namespace org::apache::nifi::minifi::extensions::smb::test {
2526

@@ -49,7 +50,7 @@ TEST_CASE_METHOD(SmbConnectionControllerServiceFixture, "SmbConnectionController
4950

5051
SECTION("Valid share") {
5152
plan_->setProperty(smb_connection_node_, SmbConnectionControllerService::Hostname, "localhost");
52-
plan_->setProperty(smb_connection_node_, SmbConnectionControllerService::Share, minifi::utils::OsUtils::wideStringToString(share_local_name));
53+
plan_->setProperty(smb_connection_node_, SmbConnectionControllerService::Share, minifi::utils::to_string(share_local_name));
5354

5455
REQUIRE_NOTHROW(plan_->finalize());
5556

extensions/sql/data/JSONSQLWriter.cpp

+3-11
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,7 @@
2222
#include "rapidjson/prettywriter.h"
2323
#include "Exception.h"
2424

25-
namespace org {
26-
namespace apache {
27-
namespace nifi {
28-
namespace minifi {
29-
namespace sql {
25+
namespace org::apache::nifi::minifi::sql {
3026

3127
JSONSQLWriter::JSONSQLWriter(bool pretty, ColumnFilter column_filter)
3228
: pretty_(pretty), current_batch_(rapidjson::kArrayType), column_filter_(std::move(column_filter)) {
@@ -83,7 +79,7 @@ void JSONSQLWriter::addToJSONRow(const std::string& column_name, rapidjson::Valu
8379

8480
rapidjson::Value JSONSQLWriter::toJSONString(const std::string& s) {
8581
rapidjson::Value jsonValue;
86-
jsonValue.SetString(s.c_str(), s.size(), current_batch_.GetAllocator());
82+
jsonValue.SetString(s.c_str(), gsl::narrow<rapidjson::SizeType>(s.size()), current_batch_.GetAllocator());
8783

8884
return jsonValue;
8985
}
@@ -102,8 +98,4 @@ std::string JSONSQLWriter::toString() {
10298
return {buffer.GetString(), buffer.GetSize()};
10399
}
104100

105-
} // namespace sql
106-
} // namespace minifi
107-
} // namespace nifi
108-
} // namespace apache
109-
} // namespace org
101+
} // namespace org::apache::nifi::minifi::sql

extensions/sql/data/JSONSQLWriter.h

+2-12
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,7 @@
2424

2525
#include "SQLWriter.h"
2626

27-
namespace org {
28-
namespace apache {
29-
namespace nifi {
30-
namespace minifi {
31-
namespace sql {
27+
namespace org::apache::nifi::minifi::sql {
3228

3329
class JSONSQLWriter: public SQLWriter {
3430
public:
@@ -63,10 +59,4 @@ class JSONSQLWriter: public SQLWriter {
6359
ColumnFilter column_filter_;
6460
};
6561

66-
} /* namespace sql */
67-
} /* namespace minifi */
68-
} /* namespace nifi */
69-
} /* namespace apache */
70-
} /* namespace org */
71-
72-
62+
} // namespace org::apache::nifi::minifi::sql

extensions/standard-processors/processors/PutTCP.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ asio::awaitable<std::error_code> ConnectionHandler<SocketType>::establishNewConn
215215
continue;
216216
}
217217
if (max_size_of_socket_send_buffer_)
218-
socket.lowest_layer().set_option(TcpSocket::send_buffer_size(*max_size_of_socket_send_buffer_));
218+
socket.lowest_layer().set_option(TcpSocket::send_buffer_size(gsl::narrow<int>(*max_size_of_socket_send_buffer_)));
219219
socket_.emplace(std::move(socket));
220220
co_return std::error_code();
221221
}

0 commit comments

Comments
 (0)