Page MenuHomePhorge

No OneTemporary

Size
30 KB
Referenced Files
None
Subscribers
None
diff --git a/lib/tesla/adapter/gun.ex b/lib/tesla/adapter/gun.ex
index 63744bb..6293bfe 100644
--- a/lib/tesla/adapter/gun.ex
+++ b/lib/tesla/adapter/gun.ex
@@ -1,471 +1,474 @@
if Code.ensure_loaded?(:gun) do
defmodule Tesla.Adapter.Gun do
@moduledoc """
Adapter for [gun](https://github.com/ninenines/gun).
Remember to add `{:gun, "~> 1.3"}` to dependencies.
In version 1.3 gun sends `host` header with port. Fixed in master branch.
Also, you need to recompile tesla after adding `:gun` dependency:
```
mix deps.clean tesla
mix deps.compile tesla
```
## Example usage
```
# set globally in config/config.exs
config :tesla, :adapter, Tesla.Adapter.Gun
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Gun
end
```
## Adapter specific options
- `:timeout` - Time, while process, will wait for gun messages.
- `:body_as` - What will be returned in `%Tesla.Env{}` body key. Possible values - `:plain`, `:stream`, `:chunks`. Defaults to `:plain`.
- `:plain` - as binary.
- `:stream` - as stream. If you don't want to close connection (because you want to reuse it later) pass `close_conn: false` in adapter opts.
- `:chunks` - as chunks. You can get response body in chunks using `Tesla.Adapter.Gun.read_chunk/3` function.
Processing of the chunks and checking body size must be done by yourself. Example of processing function is in `test/tesla/adapter/gun_test.exs` - `Tesla.Adapter.GunTest.read_body/4`. If you don't need connection later don't forget to close it with `Tesla.Adapter.Gun.close/1`.
- `:max_body` - Max response body size in bytes. Works only with `body_as: :plain`, with other settings you need to check response body size by yourself.
- `:conn` - Opened connection pid with gun. Is used for reusing gun connections.
- `:close_conn` - Close connection or not after receiving full response body. Is used for reusing gun connections. Defaults to `true`.
- `:certificates_verification` - Add SSL certificates verification. [erlang-certifi](https://github.com/certifi/erlang-certifi) [ssl_verify_fun.erl](https://github.com/deadtrickster/ssl_verify_fun.erl)
- `:proxy` - Proxy for requests. **Socks proxy are supported only for gun master branch**. Examples: `{'localhost', 1234}`, `{{127, 0, 0, 1}, 1234}`, `{:socks5, 'localhost', 1234}`.
## [Gun options](https://ninenines.eu/docs/en/gun/1.3/manual/gun/)
- `:connect_timeout` - Connection timeout.
- `:http_opts` - Options specific to the HTTP protocol.
- `:http2_opts` - Options specific to the HTTP/2 protocol.
- `:protocols` - Ordered list of preferred protocols. Defaults: `[:http2, :http]`- for :tls, `[:http]` - for :tcp.
- `:trace` - Whether to enable dbg tracing of the connection process. Should only be used during debugging. Default: false.
- `:transport` - Whether to use TLS or plain TCP. The default varies depending on the port used. Port 443 defaults to tls. All other ports default to tcp.
- `:transport_opts` - Transport options. They are TCP options or TLS options depending on the selected transport. Default: `[]`. Gun version: 1.3
- `:tls_opts` - TLS transport options. Default: `[]`. Gun from master branch.
- `:tcp_opts` - TCP trasnport options. Default: `[]`. Gun from master branch.
- `:socks_opts` - Options for socks. Default: `[]`. Gun from master branch.
- `:ws_opts` - Options specific to the Websocket protocol. Default: `%{}`.
- `:compress` - Whether to enable permessage-deflate compression. This does not guarantee that compression will be used as it is the server that ultimately decides. Defaults to false.
- `:protocols` - A non-empty list enables Websocket protocol negotiation. The list of protocols will be sent in the sec-websocket-protocol request header. The handler module interface is currently undocumented and must be set to `gun_ws_h`.
"""
@behaviour Tesla.Adapter
alias Tesla.Multipart
# TODO: update list after update to gun 2.0
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:socks_opts,
:ws_opts
]
@default_timeout 1_000
@impl Tesla.Adapter
def call(env, opts) do
with {:ok, status, headers, body} <- request(env, opts) do
{:ok, %{env | status: status, headers: format_headers(headers), body: body}}
end
end
@doc """
Reads chunk of the response body.
Returns `{:fin, binary()}` if all body received, otherwise returns `{:nofin, binary()}`.
"""
@spec read_chunk(pid(), reference(), keyword() | map()) ::
{:fin, binary()} | {:nofin, binary()} | {:error, atom()}
def read_chunk(pid, stream, opts) do
with {status, _} = chunk when status in [:fin, :error] <- do_read_chunk(pid, stream, opts) do
if opts[:close_conn], do: close(pid)
chunk
end
end
defp do_read_chunk(pid, stream, opts) do
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
{:fin, body}
{:gun_data, ^pid, ^stream, :nofin, part} ->
{:nofin, part}
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_chunk_timeout}
end
end
@doc """
Brutally close the `gun` connection.
"""
@spec close(pid()) :: :ok
defdelegate close(pid), to: :gun
defp format_headers(headers) do
for {key, value} <- headers do
{String.downcase(to_string(key)), to_string(value)}
end
end
defp request(env, opts) do
request(
Tesla.Adapter.Shared.format_method(env.method),
Tesla.build_url(env.url, env.query),
format_headers(env.headers),
env.body || "",
Tesla.Adapter.opts(
[close_conn: true, body_as: :plain, send_body: :at_once, receive: true],
env,
opts
)
|> Enum.into(%{})
)
end
defp request(method, url, headers, %Stream{} = body, opts),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, body, opts) when is_function(body),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
body = Multipart.body(mp)
do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
end
defp request(method, url, headers, body, opts),
do: do_request(method, url, headers, body, opts)
defp do_request(method, url, headers, body, opts) do
uri = URI.parse(url)
path = Tesla.Adapter.Shared.prepare_path(uri.path, uri.query)
with {:ok, pid, opts} <- open_conn(uri, opts) do
stream = open_stream(pid, method, path, headers, body, opts)
response = read_response(pid, stream, opts)
if opts[:close_conn] and opts[:body_as] not in [:stream, :chunks] do
close(pid)
end
response
end
end
+ @dialyzer [{:nowarn_function, open_conn: 2}, :no_match]
defp open_conn(%{scheme: scheme, host: host, port: port}, %{conn: conn} = opts)
when is_pid(conn) do
info = :gun.info(conn)
conn_scheme =
case info do
# gun master branch support, which has `origin_scheme` in connection info
%{origin_scheme: scheme} -> scheme
%{transport: :tls} -> "https"
_ -> "http"
end
conn_host =
case :inet.ntoa(info.origin_host) do
{:error, :einval} -> info.origin_host
ip -> ip
end
if conn_scheme == scheme and to_string(conn_host) == host and info.origin_port == port do
{:ok, conn, Map.put(opts, :receive, false)}
else
{:error, :invalid_conn}
end
end
defp open_conn(uri, opts) do
opts = maybe_add_transport(uri, opts)
tls_opts =
if uri.scheme == "https" do
opts
|> fetch_tls_opts()
|> maybe_add_verify_options(opts, uri)
else
[]
end
gun_opts = Map.take(opts, @gun_keys)
with {:ok, conn} <- do_open_conn(uri, opts, gun_opts, tls_opts) do
{:ok, conn, opts}
end
end
defp maybe_add_transport(%URI{scheme: "https"}, opts), do: Map.put(opts, :transport, :tls)
defp maybe_add_transport(_, opts), do: opts
# Support for gun master branch where transport_opts, were splitted to tls_opts and tcp_opts
# https://github.com/ninenines/gun/blob/491ddf58c0e14824a741852fdc522b390b306ae2/doc/src/manual/gun.asciidoc#changelog
# TODO: remove after update to gun 2.0
defp fetch_tls_opts(%{tls_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(%{transport_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(_), do: []
defp maybe_add_verify_options(tls_opts, %{certificates_verification: true}, %{host: host}) do
charlist =
host
|> to_charlist()
|> :idna.encode()
security_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun: {&:ssl_verify_hostname.verify_fun/3, [check_hostname: charlist]}
]
Keyword.merge(security_opts, tls_opts)
end
defp maybe_add_verify_options(tls_opts, _, _), do: tls_opts
defp do_open_conn(uri, %{proxy: {proxy_host, proxy_port}}, gun_opts, tls_opts) do
connect_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid),
stream <- :gun.connect(pid, connect_opts),
{:response, :fin, 200, _} <- :gun.await(pid, stream) do
{:ok, pid}
end
end
defp do_open_conn(uri, %{proxy: {proxy_type, proxy_host, proxy_port}}, gun_opts, tls_opts) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
|> Map.put(:version, version)
gun_opts =
gun_opts
|> Map.put(:protocols, [:socks])
|> Map.update(:socks_opts, socks_opts, &Map.merge(socks_opts, &1))
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid) do
{:ok, pid}
else
{:error, {:options, {:protocols, [:socks]}}} ->
{:error, "socks protocol is not supported"}
error ->
error
end
end
+ @dialyzer [{:nowarn_function, do_open_conn: 4}, :no_match]
defp do_open_conn(uri, opts, gun_opts, tls_opts) do
tcp_opts = Map.get(opts, :tcp_opts, [])
# if gun used from master
opts_with_master_keys =
gun_opts
|> Map.put(:tls_opts, tls_opts)
|> Map.put(:tcp_opts, tcp_opts)
host = domain_or_ip(uri.host)
with {:ok, pid} <- gun_open(host, uri.port, opts_with_master_keys, opts) do
{:ok, pid}
else
{:error, {:options, {key, _}}} when key in [:tcp_opts, :tls_opts] ->
gun_open(host, uri.port, Map.put(gun_opts, :transport_opts, tls_opts), opts)
error ->
error
end
end
+ @dialyzer [{:nowarn_function, gun_open: 4}, :no_match]
defp gun_open(host, port, gun_opts, opts) do
with {:ok, pid} <- :gun.open(host, port, gun_opts),
{_, true, _} <- {:receive, opts[:receive], pid},
{_, {:ok, _}, _} <- {:up, :gun.await_up(pid), pid} do
{:ok, pid}
else
{:receive, false, pid} ->
{:ok, pid}
{:up, error, pid} ->
close(pid)
error
error ->
error
end
end
defp tunnel_opts(uri) do
host = domain_or_ip(uri.host)
%{host: host, port: uri.port}
end
defp tunnel_tls_opts(opts, "https", tls_opts) do
http2_opts = %{protocols: [:http2], transport: :tls, tls_opts: tls_opts}
Map.merge(opts, http2_opts)
end
defp tunnel_tls_opts(opts, _, _), do: opts
defp open_stream(pid, method, path, headers, body, opts) do
req_opts = %{reply_to: opts[:reply_to] || self()}
open_stream(pid, method, path, headers, body, req_opts, opts[:send_body])
end
defp open_stream(pid, method, path, headers, body, req_opts, :stream) do
stream = :gun.request(pid, method, path, headers, "", req_opts)
for data <- body, do: :ok = :gun.data(pid, stream, :nofin, data)
:gun.data(pid, stream, :fin, "")
stream
end
defp open_stream(pid, method, path, headers, body, req_opts, :at_once),
do: :gun.request(pid, method, path, headers, body, req_opts)
defp read_response(pid, stream, opts) do
receive? = opts[:receive]
receive do
{:gun_response, ^pid, ^stream, :fin, status, headers} ->
{:ok, status, headers, ""}
{:gun_response, ^pid, ^stream, :nofin, status, headers} ->
format_response(pid, stream, opts, status, headers, opts[:body_as])
{:gun_up, ^pid, _protocol} when receive? ->
read_response(pid, stream, opts)
{:gun_error, ^pid, reason} ->
{:error, reason}
{:gun_down, ^pid, _, _, _, _} when receive? ->
read_response(pid, stream, opts)
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_response_timeout}
end
end
defp format_response(pid, stream, opts, status, headers, :plain) do
case read_body(pid, stream, opts) do
{:ok, body} ->
{:ok, status, headers, body}
{:error, error} ->
# prevent gun sending messages to owner process, if body is too large and connection is not closed
:ok = :gun.flush(stream)
{:error, error}
end
end
defp format_response(pid, stream, opts, status, headers, :stream) do
stream_body =
Stream.resource(
fn -> %{pid: pid, stream: stream} end,
fn
%{pid: pid, stream: stream} ->
case read_chunk(pid, stream, opts) do
{:nofin, part} -> {[part], %{pid: pid, stream: stream}}
{:fin, body} -> {[body], %{pid: pid, final: :fin}}
end
%{pid: pid, final: :fin} ->
{:halt, %{pid: pid}}
end,
fn %{pid: pid} ->
if opts[:close_conn], do: close(pid)
end
)
{:ok, status, headers, stream_body}
end
defp format_response(pid, stream, opts, status, headers, :chunks) do
{:ok, status, headers, %{pid: pid, stream: stream, opts: Enum.into(opts, [])}}
end
defp read_body(pid, stream, opts, acc \\ "") do
limit = opts[:max_body]
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
check_body_size(acc, body, limit)
{:gun_data, ^pid, ^stream, :nofin, part} ->
with {:ok, acc} <- check_body_size(acc, part, limit) do
read_body(pid, stream, opts, acc)
end
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_body_timeout}
end
end
defp check_body_size(acc, part, nil), do: {:ok, acc <> part}
defp check_body_size(acc, part, limit) do
body = acc <> part
if limit - byte_size(body) >= 0 do
{:ok, body}
else
{:error, :body_too_large}
end
end
defp domain_or_ip(host) do
charlist = to_charlist(host)
case :inet.parse_address(charlist) do
{:error, :einval} ->
:idna.encode(charlist)
{:ok, ip} ->
ip
end
end
end
end
diff --git a/lib/tesla/middleware/telemetry.ex b/lib/tesla/middleware/telemetry.ex
index 02e446d..cf8e834 100644
--- a/lib/tesla/middleware/telemetry.ex
+++ b/lib/tesla/middleware/telemetry.ex
@@ -1,120 +1,124 @@
if Code.ensure_loaded?(:telemetry) do
defmodule Tesla.Middleware.Telemetry do
@moduledoc """
Emits events using the `:telemetry` library to expose instrumentation.
## Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Telemetry
end
:telemetry.attach("my-tesla-telemetry", [:tesla, :request, :stop], fn event, measurements, meta, config ->
# Do something with the event
end)
```
## Telemetry Events
* `[:tesla, :request, :start]` - emitted at the beginning of the request.
* Measurement: `%{system_time: System.system_time()}`
* Metadata: `%{env: Tesla.Env.t()}`
* `[:tesla, :request, :stop]` - emitted at the end of the request.
* Measurement: `%{duration: native_time}`
* Metadata: `%{env: Tesla.Env.t()} | %{env: Tesla.Env.t(), error: term()}`
* `[:tesla, :request, :exception]` - emitted when an exception has been raised.
* Measurement: `%{duration: native_time}`
* Metadata: `%{kind: Exception.kind(), reason: term(), stacktrace: Exception.stacktrace()}`
## Legacy Telemetry Events
* `[:tesla, :request]` - This event is emitted for backwards compatibility only and should be considered deprecated.
This event can be disabled by setting `config :tesla, Tesla.Middleware.Telemetry, disable_legacy_event: true` in your config. Be sure to run `mix deps.compile --force tesla` after changing this setting to ensure the change is picked up.
Please check the [telemetry](https://hexdocs.pm/telemetry/) for the further usage.
"""
@disable_legacy_event Application.get_env(:tesla, Tesla.Middleware.Telemetry,
disable_legacy_event: false
)[:disable_legacy_event]
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, _opts) do
start_time = System.monotonic_time()
emit_start(%{env: env})
try do
Tesla.run(env, next)
catch
kind, reason ->
stacktrace = System.stacktrace()
duration = System.monotonic_time() - start_time
emit_exception(duration, %{kind: kind, reason: reason, stacktrace: stacktrace})
:erlang.raise(kind, reason, stacktrace)
else
{:ok, env} = result ->
duration = System.monotonic_time() - start_time
emit_stop(duration, %{env: env})
emit_legacy_event(duration, result)
result
{:error, reason} = result ->
duration = System.monotonic_time() - start_time
emit_stop(duration, %{env: env, error: reason})
emit_legacy_event(duration, result)
result
end
end
defp emit_start(metadata) do
:telemetry.execute(
[:tesla, :request, :start],
%{system_time: System.system_time()},
metadata
)
end
defp emit_stop(duration, metadata) do
:telemetry.execute(
[:tesla, :request, :stop],
%{duration: duration},
metadata
)
end
- defp emit_legacy_event(duration, result) do
- if !@disable_legacy_event do
+ if @disable_legacy_event do
+ defp emit_legacy_event(duration, result) do
+ :ok
+ end
+ else
+ defp emit_legacy_event(duration, result) do
duration_µs = System.convert_time_unit(duration, :native, :microsecond)
:telemetry.execute(
[:tesla, :request],
%{request_time: duration_µs},
%{result: result}
)
end
end
defp emit_exception(duration, metadata) do
:telemetry.execute(
[:tesla, :request, :exception],
%{duration: duration},
metadata
)
end
end
end
diff --git a/lib/tesla/mock.ex b/lib/tesla/mock.ex
index 61b38d7..cf8d797 100644
--- a/lib/tesla/mock.ex
+++ b/lib/tesla/mock.ex
@@ -1,239 +1,239 @@
defmodule Tesla.Mock do
@moduledoc """
Mock adapter for better testing.
## Setup
```
# config/test.exs
config :tesla, adapter: Tesla.Mock
# in case MyClient defines specific adapter with `adapter SpecificAdapter`
config :tesla, MyClient, adapter: Tesla.Mock
```
## Example test
```
defmodule MyAppTest do
use ExUnit.Case
setup do
Tesla.Mock.mock(fn
%{method: :get} ->
%Tesla.Env{status: 200, body: "hello"}
end)
:ok
end
test "list things" do
assert {:ok, env} = MyApp.get("...")
assert env.status == 200
assert env.body == "hello"
end
end
```
## Setting up mocks
```
# Match on method & url and return whole Tesla.Env
Tesla.Mock.mock(fn
%{method: :get, url: "http://example.com/list"} ->
%Tesla.Env{status: 200, body: "hello"}
end)
# You can use any logic required
Tesla.Mock.mock(fn env ->
case env.url do
"http://example.com/list" ->
%Tesla.Env{status: 200, body: "ok!"}
_ ->
%Tesla.Env{status: 404, body: "NotFound"}
end
end)
# mock will also accept short version of response
# in the form of {status, headers, body}
Tesla.Mock.mock(fn
%{method: :post} -> {201, %{}, %{id: 42}}
end)
```
## Global mocks
By default, mocks are bound to the current process,
i.e. the process running a single test case.
This design allows proper isolation between test cases
and make testing in parallel (`async: true`) possible.
While this style is recommended, there is one drawback:
if Tesla client is called from different process
it will not use the setup mock.
To solve this issue it is possible to setup a global mock
using `mock_global/1` function.
```
defmodule MyTest do
use ExUnit.Case, async: false # must be false!
setup_all do
Tesla.Mock.mock_global fn
env -> # ...
end
:ok
end
# ...
end
```
**WARNING**: Using global mocks may affect tests with local mock
(because of fallback to global mock in case local one is not found)
"""
defmodule Error do
defexception env: nil, ex: nil, stacktrace: []
def message(%__MODULE__{ex: nil}) do
"""
There is no mock set for process #{inspect(self())}.
Use Tesla.Mock.mock/1 to mock HTTP requests.
See https://github.com/teamon/tesla#testing
"""
end
def message(%__MODULE__{env: env, ex: %FunctionClauseError{} = ex, stacktrace: stacktrace}) do
"""
Request not mocked
The following request was not mocked:
#{inspect(env, pretty: true)}
#{Exception.format(:error, ex, stacktrace)}
"""
end
end
## PUBLIC API
@doc """
Setup mocks for current test.
This mock will only be available to the current process.
"""
@spec mock((Tesla.Env.t() -> Tesla.Env.t() | {integer, map, any})) :: no_return
def mock(fun) when is_function(fun), do: pdict_set(fun)
@doc """
Setup global mocks.
**WARNING**: This mock will be available to ALL processes.
It might cause conflicts when running tests in parallel!
"""
@spec mock_global((Tesla.Env.t() -> Tesla.Env.t() | {integer, map, any})) :: no_return
def mock_global(fun) when is_function(fun), do: agent_set(fun)
## HELPERS
@type response_opt :: :headers | :status
@type response_opts :: [{response_opt, any}]
@doc """
Return json response.
Example
import Tesla.Mock
mock fn
%{url: "/ok"} -> json(%{"some" => "data"})
%{url: "/404"} -> json(%{"some" => "data"}, status: 404)
end
"""
- @spec json(body :: term, opts :: [response_opts]) :: Tesla.Env.t()
+ @spec json(body :: term, opts :: response_opts) :: Tesla.Env.t()
def json(body, opts \\ []), do: response(json_encode(body), "application/json", opts)
defp json_encode(body) do
engine = Keyword.get(Application.get_env(:tesla, Tesla.Mock, []), :json_engine, Jason)
engine.encode!(body)
end
@doc """
Return text response.
Example
import Tesla.Mock
mock fn
%{url: "/ok"} -> text(%{"some" => "data"})
%{url: "/404"} -> text(%{"some" => "data"}, status: 404)
end
"""
- @spec text(body :: term, opts :: [response_opts]) :: Tesla.Env.t()
+ @spec text(body :: term, opts :: response_opts) :: Tesla.Env.t()
def text(body, opts \\ []), do: response(body, "text/plain", opts)
defp response(body, content_type, opts) do
defaults = [status: 200, headers: [{"content-type", content_type}]]
struct(Tesla.Env, Keyword.merge(defaults, [{:body, body} | opts]))
end
## ADAPTER IMPLEMENTATION
def call(env, _opts) do
case pdict_get() || agent_get() do
nil ->
raise Tesla.Mock.Error, env: env
fun ->
case rescue_call(fun, env) do
{status, headers, body} ->
{:ok, %{env | status: status, headers: headers, body: body}}
%Tesla.Env{} = env ->
{:ok, env}
{:ok, %Tesla.Env{} = env} ->
{:ok, env}
{:error, reason} ->
{:error, reason}
error ->
{:error, error}
end
end
end
defp pdict_set(fun), do: Process.put(__MODULE__, fun)
defp pdict_get, do: Process.get(__MODULE__)
defp agent_set(fun) do
case Process.whereis(__MODULE__) do
nil -> Agent.start_link(fn -> fun end, name: __MODULE__)
pid -> Agent.update(pid, fn _ -> fun end)
end
end
defp agent_get do
case Process.whereis(__MODULE__) do
nil -> nil
pid -> Agent.get(pid, fn f -> f end)
end
end
defp rescue_call(fun, env) do
fun.(env)
rescue
ex in FunctionClauseError ->
raise Tesla.Mock.Error, env: env, ex: ex, stacktrace: System.stacktrace()
end
end
diff --git a/mix.exs b/mix.exs
index 14a823c..d388b8b 100644
--- a/mix.exs
+++ b/mix.exs
@@ -1,134 +1,134 @@
defmodule Tesla.Mixfile do
use Mix.Project
@version "1.3.3"
def project do
[
app: :tesla,
version: @version,
description: description(),
package: package(),
source_url: "https://github.com/teamon/tesla",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
lockfile: lockfile(System.get_env("LOCKFILE")),
test_coverage: [tool: ExCoveralls],
dialyzer: [
- plt_add_apps: [:inets],
+ plt_add_apps: [:inets, :idna, :ssl_verify_fun],
plt_add_deps: :project
],
docs: docs()
]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: applications(Mix.env())]
end
def applications(:test), do: applications(:dev) ++ [:httparrot, :hackney, :ibrowse, :gun]
def applications(_), do: [:logger, :ssl, :inets]
defp description do
"HTTP client library, with support for middleware and multiple adapters."
end
defp package do
[
maintainers: ["Tymon Tobolski"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/teamon/tesla"}
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp lockfile(nil), do: "mix.lock"
defp lockfile(lockfile), do: "test/lockfiles/#{lockfile}.lock"
defp deps do
[
{:mime, "~> 1.0"},
# http clients
{:ibrowse, "~> 4.4.0", optional: true},
{:hackney, "~> 1.6", optional: true},
{:gun, "~> 1.3", optional: true},
{:castore, "~> 0.1", optional: true},
{:mint, "~> 1.0", optional: true},
# json parsers
{:jason, ">= 1.0.0", optional: true},
{:poison, ">= 1.0.0", optional: true},
{:exjsx, ">= 3.0.0", optional: true},
# other
{:fuse, "~> 2.4", optional: true},
{:telemetry, "~> 0.4", optional: true},
# testing & docs
{:excoveralls, "~> 0.8", only: :test},
{:httparrot, "~> 1.2", only: :test},
{:ex_doc, "~> 0.21", only: :dev},
{:mix_test_watch, "~> 1.0", only: :dev},
{:dialyxir, "~> 1.0.0-rc.3", only: [:dev, :test]},
{:inch_ex, "~> 0.5.6", only: :docs}
]
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
extras: ["README.md"],
groups_for_modules: [
Behaviours: [
Tesla.Adapter,
Tesla.Middleware
],
Adapters: [
Tesla.Adapter.Gun,
Tesla.Adapter.Hackney,
Tesla.Adapter.Httpc,
Tesla.Adapter.Ibrowse,
Tesla.Adapter.Mint
],
Middlewares: [
Tesla.Middleware.BaseUrl,
Tesla.Middleware.BasicAuth,
Tesla.Middleware.Compression,
Tesla.Middleware.CompressRequest,
Tesla.Middleware.DecodeJson,
Tesla.Middleware.DecodeRels,
Tesla.Middleware.DecompressResponse,
Tesla.Middleware.DigestAuth,
Tesla.Middleware.EncodeJson,
Tesla.Middleware.FollowRedirects,
Tesla.Middleware.FormUrlencoded,
Tesla.Middleware.Fuse,
Tesla.Middleware.Headers,
Tesla.Middleware.JSON,
Tesla.Middleware.KeepRequest,
Tesla.Middleware.Logger,
Tesla.Middleware.MethodOverride,
Tesla.Middleware.Opts,
Tesla.Middleware.PathParams,
Tesla.Middleware.Query,
Tesla.Middleware.Retry,
Tesla.Middleware.Telemetry,
Tesla.Middleware.Timeout
]
],
nest_modules_by_prefix: [
Tesla.Adapter,
Tesla.Middleware
]
]
end
end

File Metadata

Mime Type
text/x-diff
Expires
Sun, Nov 24, 7:57 PM (1 d, 4 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
39481
Default Alt Text
(30 KB)

Event Timeline