Open
Description
Given the following file:
defmodule Http do
defmacro __using__(_) do
quote do
use HTTPoison.Base
def get(url, headers \\ [], options \\ []) do
response(:get, url, "", headers, options)
end
def delete(url, headers \\ [], options \\ []) do
response(:delete, url, "", headers, options)
end
def post(url, body, headers \\ [], options \\ []) do
response(:post, url, body, headers, options)
end
def patch(url, body, headers \\ [], options \\ []) do
response(:patch, url, body, headers, options)
end
def response(method, url, body, headers \\ [], options \\ []) do
url = options.base_url <> url
log(method, :start, url, options)
start_time = now()
response = request(method, url, body, headers, options)
case response do
{:ok, response} ->
log(method, :finish, url, options, response, start_time)
{:error, response} ->
log(method, :error, url, options, response, start_time)
end
response
end
def process_request_options(%_{} = options) do
options
|> Map.from_struct()
|> Enum.into([])
|> process_request_options()
end
def process_request_options(options) do
[
timeout: 10_000,
recv_timeout: 15_000,
hackney: [pool: :http_default_pool]
]
|> Keyword.merge(options)
end
def process_request_body(body) when is_map(body), do: Poison.encode!(body)
def process_request_body(body) when is_binary(body), do: body
def process_response_body(body) do
case Poison.Parser.parse(body) do
{:ok, parsed_body} -> parsed_body
{:error, _} -> body
end
end
def process_request_headers(headers) do
headers
end
defp now(), do: DateTime.utc_now() |> DateTime.to_unix(:millisecond)
defp log(request_method, at, path, options, response \\ nil, started_at \\ nil) do
data = %{
api_request: true,
at: at,
host: options.base_url,
request_id: options.request_id,
url: path,
method: request_method,
path: options.generic_path,
elapsed: started_at && now() - started_at
# status: response && response.status_code,
# reason: response && response.reason
}
# remove keys with falsy values
data = for {key, value} <- data, value, into: %{}, do: {key, value}
case response do
nil -> Plexy.Logger.debug(data)
%HTTPoison.Response{} -> Plexy.Logger.debug(data)
%HTTPoison.Error{} = error -> Plexy.Logger.error(data)
end
end
end
end
end
We get the following profiler output:
FUNCTIONS SORTED ON SELF TIME
count total (s) self (s) function
9466 10.457848 <SNR>77_is_string_or_comment()
312 11.036247 1.915420 elixir#indent#handle_inside_nested_construct()
8828 9.363995 0.120360 elixir#indent#searchpair_back_skip()
1522 1.123686 0.106535 <SNR>77_ends_with()
384 13.163921 0.071142 elixir#indent#indent()
52 0.205268 0.054884 elixir#indent#handle_inside_generic_block()
382 0.210430 0.053353 elixir#indent#handle_starts_with_end()
2542 0.096197 0.048705 <SNR>77_starts_with()
398 61.177775 0.024607 <SNR>77_get_base_indent()
326 0.164678 0.021031 elixir#indent#handle_starts_with_close_bracket()
4644 0.017567 <SNR>77_debug()
312 0.018112 0.006850 elixir#indent#handle_starts_with_binary_operator()
334 0.020190 0.006785 elixir#indent#handle_starts_with_pipe()
358 0.013070 0.005970 elixir#indent#handle_starts_with_mid_or_end_block_keyword()
358 0.087979 0.005809 elixir#indent#handle_following_trailing_do()
334 0.037895 0.003777 elixir#indent#handle_following_trailing_binary_operator()
230 0.003508 elixir#indent#handle_starts_with_comment()
8 0.003288 <SNR>77_find_last_pos()
384 13.166979 0.003058 elixir#indent()
1274 0.002925 <SNR>77_keyword()
The clear culprit is delegating to the syntax engine over and over. One thought - can we filter out all of the strings in a preprocessing step perhaps? That would surely be cheaper than checking them over and over