Open
Description
I'm using gtest to do some performance testing. On Linux I have less than 1 ms, but the same code on Windows has ~30ms.
I think it's a significant difference, but i don't undestand why
Client
// Create client
httplib::Client cli(ADDR, PORT);
ASSERT_TRUE(cli.is_valid());
httplib::Result res;
// Measure the get req/res time
auto start_time = std::chrono::high_resolution_clock::now();
res = cli.get(ROUTE_GET_TEST_TEXT);
auto end_time = std::chrono::high_resolution_clock::now();
auto elapsed_time = std::chrono::duration_cast<std::chrono::microseconds>(end_time - start_time).count();
// Verify response
ASSERT_TRUE(res);
ASSERT_TRUE(res != nullptr);
EXPECT_EQ(httplib::StatusCode::OK_200, res->status);
EXPECT_EQ(httplib::Error::Success, res.error());
EXPECT_EQ(TXT_DEFAULT_RESPONSE, res->body);
// Verify elapsed time
ASSERT_LE(elapsed_time, TIME_GET_US);
Server handle:
_svr.Get(ROUTE_GET_TEST_TEXT, [&](const httplib::Request& req, httplib::Response& res) {
res.set_content(TXT_DEFAULT_RESPONSE, "text/plain");
});
Common:
#define ADDR "127.0.0.1"
#define PORT 8181
#define TXT_DEFAULT_RESPONSE "Default Response to txt request"