Last active
October 5, 2018 15:03
-
-
Save alexdesousa/35f707acf4310539a8e9 to your computer and use it in GitHub Desktop.
gen_server: concurrent vs. serial calls.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-module(ping_server). | |
-behaviour(gen_server). | |
-export([start/0, serial_burst/2, concurrent_burst/2]). | |
%gen_server callbacks. | |
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, code_change/3, | |
terminate/2]). | |
-define(TIMEOUT, 5000). | |
start() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). | |
%% Sends message to Node. | |
send_message(Node, Message, N) -> | |
try | |
Result = gen_server:call({?MODULE, Node}, Message, ?TIMEOUT), | |
io:format("~p message ~p response: ~p~n", [Message, N, Result]), | |
Result | |
catch | |
_:_ -> | |
io:format("~p message ~p timeout~n", [Message, N]), | |
pang | |
end. | |
%% Sends a burst of Size with the same Message to Node. | |
ping_burst(Node, Message, Size) -> | |
lists:map(fun(X) -> | |
spawn(fun() -> | |
send_message(Node, Message, X) | |
end) | |
end, | |
lists:seq(0, Size - 1)). | |
serial_burst(Node, Size) -> | |
ping_burst(Node, serial_ping, Size), | |
ok. | |
concurrent_burst(Node, Size) -> | |
ping_burst(Node, concurrent_ping, Size), | |
ok. | |
%% Heavy function simulation. | |
heavy(MaxDelay) -> timer:sleep(random:uniform(MaxDelay)). | |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
%% gen_server call-backs implementation. | |
init([]) -> {ok, {max_delay, 2000}}. | |
handle_call(serial_ping, _From, State = {max_delay, MaxDelay}) -> | |
heavy(MaxDelay), | |
{reply, pong, State}; | |
handle_call(concurrent_ping, From, State) -> | |
?MODULE ! {concurrent_ping, From}, | |
{noreply, State}; | |
handle_call(stop, _From, State) -> | |
{stop, normal, ok, State}; | |
handle_call(_, _, State) -> | |
{reply, not_implemented, State}. | |
handle_info({concurrent_ping, From}, State = {max_delay, MaxDelay}) -> | |
spawn(fun() -> | |
heavy(MaxDelay), | |
gen_server:reply(From, pong) | |
end), | |
{noreply, State}; | |
handle_info(_Message, State) -> | |
{noreply, State}. | |
handle_cast(_, State) -> {noreply, State}. | |
code_change(_OldVsn, State, _Extra) -> {ok, State}. | |
terminate(_Reason, _State) -> ok. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This code example shows both serial and concurrent services in a gen_server.
By default
gen_server
is a serial server. Responses are usually generated in thehandle_call
callback and sent to the client. If the server takes too much time to prepare a response to a client message, the clients in queue must wait in line. If the clients have timeouts on their calls, then an error will occur in the client.Things to note:
serial_ping
andconcurrent_ping
executes a function calledheavy/1
that takes a from 0 to maximum delay milliseconds to return. This is to simulate a function that takes too long to execute.Serial service
The serial service is implemented as a clause of the gen_server callback
handle_call
(lines 52-54):when using the function
serial_burst
that sendsN
number of messages to theserial_ping
service on theNode
this is the output:From 10 requests only 3 got a response. One way to fix this is to give the time
cunsuming job to another process.
Concurrent service
The concurrent server is implemented as a clause of the gen_server callback
handle_call
(lines 55-57) in conjunctuion with a clause of thehandle_info
(lines 64-69) callback where the server spawns a process todo the job:
This time the 10 requests got a response from the server:
Though this works, implementing concurrent servers this way in Erlang can get a
bit messy.
A Robust Solution
The server has several flaws:
I implemented tentacles_server, a gen_server wrapper, that takes care of all this flaws:
The same server can be accomplished with:
Advantages:
probability of being down is very low.