Elixir - Check if string is empty - websocket

I am playing with Elixir and Phoenix Framework for the first time after following this Tutorial..
I have a simple client/server app.
chat/lib/chat_web/room_channel.ex:
defmodule ChatWeb.RoomChannel do
use Phoenix.Channel
def join("room:lobby", _message, socket) do
{:ok, socket}
end
def join("room:" <> _private_room_id, _params, _socket) do
{:error, %{reason: "unauthorized"}}
end
def handle_in("new_msg", %{"body" => body}, socket) do
broadcast! socket, "new_msg", %{body: body}
{:noreply, socket}
end
end
I want to block empty incoming messages (body is empty string)
def handle_in("new_msg", %{"body" => body}, socket) do
# I guess the code should be here..
broadcast! socket, "new_msg", %{body: body}
{:noreply, socket}
end
How can I do that?

I want to block empty incoming messages (body is empty string)
You can add a guard clause for this. Either when body != "" or when byte_size(body) > 0
def handle_in("new_msg", %{"body" => body}, socket) when body != "" do
...
end
Now this function will only match if body is not "".
If you also want to handle empty body case, you can add two clauses like this (no need for the guard clause anymore since the second clause will never match if body is empty):
def handle_in("new_msg", %{"body" => ""}, socket) do
# broadcast error here
end
def handle_in("new_msg", %{"body" => body}, socket) do
# broadcast normal here
end

You can use answer proposed by #Dogbert, but to be 100% sure that string is not empty you can use wrap the broadcast! in the helper private function or just wrap into if or unless (negative if) expression.
unless String.trim(body) == "" do
broadcast! socket, "new_msg", %{body: body}
end
If you want to return an error message you try to use something more complex eg.:
if String.trim(body) != "" do
broadcast! socket, "new_msg", %{body: body}
else
broadcast! socket, "error_msg", %{body: "Body is empty"}
end

Related

Absinthe - How to put_session in resolver function?

I'm using Absinthe and have a sign in mutation. When users send over valid credentials, I'd like to set a session cookie in the response via put_session.
The problem I'm facing is that I'm not able to access the conn from within a resolver function. That tells me that I'm not supposed to update the connection's properties from within a resolver.
Is it possible to do this with Absinthe? What are some alternative solutions?
It looks like one solution is:
In the resolver, resolve either an {:ok, _} or an {:error, _} as normal
Add middleware after the resolver to pattern match that resolution.value returned from step 1 and update the GraphQL context
Use the before_send feature of Absinthe (which has access to both the GraphQL context and the connection to put_session before sending a response
Code Example
Mutation:
mutation do
#desc "Authenticate a user."
field :login, :user do
arg(:email, non_null(:string))
arg(:password, non_null(:string))
resolve(&Resolvers.Accounts.signin/3)
middleware(fn resolution, _ ->
case resolution.value do
%{user: user, auth_token: auth_token} ->
Map.update!(
resolution,
:context,
&Map.merge(&1, %{auth_token: auth_token, user: user})
)
_ ->
resolution
end
end)
end
end
Resolver:
defmodule AppWeb.Resolvers.Accounts do
alias App.Accounts
def signin(_, %{email: email, password: password}, _) do
if user = Accounts.get_user_by_email_and_password(email, password) do
auth_token = Accounts.generate_user_session_token(user)
{:ok, %{user: user, auth_token: auth_token}}
else
{:error, "Invalid credentials."}
end
end
end
Router:
defmodule AppWeb.Router do
use AppWeb, :router
pipeline :api do
plug(:accepts, ["json"])
plug(:fetch_session)
end
scope "/" do
pipe_through(:api)
forward("/api", Absinthe.Plug,
schema: AppWeb.Schema,
before_send: {__MODULE__, :absinthe_before_send}
)
forward("/graphiql", Absinthe.Plug.GraphiQL,
schema: AppWeb.Schema,
before_send: {__MODULE__, :absinthe_before_send}
)
end
def absinthe_before_send(conn, %Absinthe.Blueprint{} = blueprint) do
if auth_token = blueprint.execution.context[:auth_token] do
put_session(conn, :auth_token, auth_token)
else
conn
end
end
def absinthe_before_send(conn, _) do
conn
end
end
Not sure why you want to use a session, can't this be solved using a bearer?
Please disregard the interfaces. :-)
Mutation.
object :user_token_payload do
field(:user, :user)
field(:token, :string)
end
object :login_user_mutation_response, is_type_of: :login_user do
interface(:straw_hat_mutation_response)
field(:errors, list_of(:straw_hat_error))
field(:successful, non_null(:boolean))
field(:payload, :user_token_payload)
end
Resolver.
def authenticate_user(args, _) do
case Accounts.authenticate_user(args) do
{:ok, user, token} -> MutationResponse.succeeded(%{user: user, token: token})
{:error, message} -> MutationResponse.failed(StrawHat.Error.new(message))
end
end
Now the client can pass along that token with the Authorization header, and pick it up with a plug.
defmodule MyAppWeb.Plugs.Context do
import Plug.Conn
alias MyApp.Admission
def init(opts), do: opts
def call(conn, _) do
case build_context(conn) do
{:ok, context} -> put_private(conn, :absinthe, %{context: context})
_ -> put_private(conn, :absinthe, %{context: %{}})
end
end
#doc """
Return the current user context based on the authorization header
"""
def build_context(conn) do
auth_header =
get_req_header(conn, "authorization")
|> List.first()
if auth_header do
"Bearer " <> token = auth_header
case Admission.get_token_by_hash(token) do
nil -> :error
token -> {:ok, %{current_user: token.user}}
end
else
:error
end
end
end
Then add the plug to your pipeline
plug(MyApp.Plugs.Context)
Then you can pick up the current user in your resolvers like so.
def create_note(%{input: input}, %{context: %{current_user: user}}) do
end

Multiplayer game - Elixir channels

I'm beginner in Elixir.
I have an elixir application for a multiplayer game that simply replicates the received command to all players connected to the channel. This is work but have some latency when replicates the received command. what is the best option for remove latency problem?
For replicates the command to all players connected to the channel, i use the broadcast function. Is the best function for this?
See follow code:
defmodule GameWeb.GameChannel do
use GameWeb, :channel
alias Game.GameState
alias Game.Error
# join to topic game:*
def join("game:" <> code, %{"email" => email}, socket) do
case Map.has_key?(GameState.games(), code) do
true ->
socket = assign(socket, :player, 2)
game =
code
|> GameState.get_game()
|> Map.put(:player2, %{:email => email, :score => 0})
|> GameState.update_game()
socket = assign(socket, :game, game)
{:ok, game, socket}
false ->
socket = assign(socket, :player, 1)
game =
GameState.create_game(code)
|> Map.put(:player1, %{:email => email, :score => 0})
|> GameState.update_game()
socket = assign(socket, :game, game)
{:ok, game, socket}
end
end
# topic not found
def join(_topic, _payload, _socket) do
{:error, Error.get(:resource_not_found)}
end
def handle_in("playerAction", payload, socket) do
broadcast!(socket, "playerAction", Map.put(payload, :from_player, socket.assigns.player))
{:noreply, socket}
end
end

How to use Phoenix.Channel.reply/2 for async reply to channel push

I'm trying to extend the example of Phoenix.Channel.reply/2 from the Phoenix documentation into a fully working example of asynchronously replying to Phoenix channel/socket push events:
Taken from https://hexdocs.pm/phoenix/Phoenix.Channel.html#reply/2:
def handle_in("work", payload, socket) do
Worker.perform(payload, socket_ref(socket))
{:noreply, socket}
end
def handle_info({:work_complete, result, ref}, socket) do
reply ref, {:ok, result}
{:noreply, socket}
end
I've reworked the example as follows:
room_channels.ex
...
def handle_in("work", job, socket) do
send worker_pid, {self, job}
{:noreply, socket}
end
def handle_info({:work_complete, result}, socket) do
broadcast socket, "work_complete", %{result: result}
{:noreply, socket}
end
...
worker.ex
...
receive do
{pid, job} ->
result = perform(job) # stub
send pid, {:work_complete, result}
end
...
This solution works, but it doesn't rely on generating and passing a socket_ref with socket_ref(socket) and on Phoenix.Channel.reply/2. Instead it relies on Phoenix.Channel.broadcast/3.
The documentation implies that reply/2 is specifically used for this scenario of asynchronously replying to socket push events:
reply(arg1, arg2)
Replies asynchronously to a socket push.
Useful when you need to reply to a push that can’t otherwise be
handled using the {:reply, {status, payload}, socket} return from your
handle_in callbacks. reply/3 will be used in the rare cases you need
to perform work in another process and reply when finished by
generating a reference to the push with socket_ref/1.
When I generate and pass a socket_ref, and rely on Phoenix.Channel.reply/2 for asynchronous replies to socket push, I don't get it to work at all:
room_channels.ex
...
def handle_in("work", job, socket) do
send worker_pid, {self, job, socket_ref(socket)}
{:noreply, socket}
end
def handle_info({:work_complete, result, ref}, socket) do
reply ref, {:ok, result}
{:noreply, socket}
end
...
worker.ex
...
receive do
{pid, job, ref} ->
result = perform(job) # stub
send pid, {:work_complete, result, ref}
end
...
My room_channels.ex handle_info function is called but reply/2 doesn't seem to send a message down the socket. I see no stacktrace on stderr or any output on stdout to indicate an error either. What is more, keeping track of a socket_ref seems to just add overhead to my code.
What is the benefit of using a socket_ref and reply/2 over my solution with broadcast/3 and how can I get the solution with reply/2 to work?
I was wrong, the example with Phoenix.Channel.reply/2 will work:
room_channels.ex
...
def handle_in("work", job, socket) do
send worker_pid, {self, job, socket_ref(socket)}
{:noreply, socket}
end
def handle_info({:work_complete, result, ref}, socket) do
reply ref, {:ok, result}
{:noreply, socket}
end
...
worker.ex
...
receive do
{pid, job, ref} ->
result = perform(job) # stub
send pid, {:work_complete, result, ref}
end
...
In my implementation I made the mistake of sending a synchronous reply to the event push with the return value of {:reply, :ok, socket} instead of {:noreply, socket}.
On closer inspection of the websocket frames send from the server to the client, I found that the browser did receive my server replies from reply ref, {:ok, result} but that the associated callback was never called.
It seems that Phoenix' Socket.js client library accepts at most a single reply per push events.
hope I'm not too late to this conversation.
I managed to use your code above and got the callback to work in the javascript.
The trick is to listen to the phx_reply event. Inside priv/static/app.js each Channel javascript object has a list of predefined in CHANNEL_EVENTS (line and is set to listen on it at the following code block:
this.on(CHANNEL_EVENTS.reply, function (payload, ref) {
_this2.trigger(_this2.replyEventName(ref), payload);
});
What I did was within the channel on callback, I listen for the phx_reply event:
channel.on("phx_reply", (data) => {
console.log("DATA ", data);
// Process the data
}
This has been tested on Elixir 1.3 and Phoenix 1.2
Hope that helps!

Ruby: abort OpenURI based on content length

Ruby's OpenURI provides a content_length_proc option which allows determining* content length before the actual transfer is started:
open(url, :content_length_proc => lambda { |content_length|
puts "Content Length: #{content_length}"
}) { |data|
# data.meta, data.read etc.
}
Is there a way for this proc to abort the actual, full retrieval?
* I'm aware this is not reliable - but it's sufficient for a simple heuristic in my case
This is the corresponding code from open-uri.rb:
if options[:content_length_proc] && Net::HTTPSuccess === resp
if resp.key?('Content-Length')
options[:content_length_proc].call(resp['Content-Length'].to_i)
else
options[:content_length_proc].call(nil)
end
end
resp.read_body {|str|
...
}
So as you can see the return value of content_length_proc is ignored. But, what you could simply do to cancel the operation is raise some form of error in the callback - this will effectively abort further execution. If you raise a dedicated error class you could even rescue it and react to that specific situation:
begin
open(url, :content_length_proc => lambda { |content_length|
puts "Content Length: #{content_length}"
}) { |data|
# data.meta, data.read etc.
}
rescue MyError
# react to it
end

Recovering from a broken TCP socket in Ruby when in gets()

I'm reading lines of input on a TCP socket, similar to this:
class Bla
def getcmd
#sock.gets unless #sock.closed?
end
def start
srv = TCPServer.new(5000)
#sock = srv.accept
while ! #sock.closed?
ans = getcmd
end
end
end
If the endpoint terminates the connection while getline() is running then gets() hangs.
How can I work around this? Is it necessary to do non-blocking or timed I/O?
You can use select to see whether you can safely gets from the socket, see following implementation of a TCPServer using this technique.
require 'socket'
host, port = 'localhost', 7000
TCPServer.open(host, port) do |server|
while client = server.accept
readfds = true
got = nil
begin
readfds, writefds, exceptfds = select([client], nil, nil, 0.1)
p :r => readfds, :w => writefds, :e => exceptfds
if readfds
got = client.gets
p got
end
end while got
end
end
And here a client that tries to break the server:
require 'socket'
host, port = 'localhost', 7000
TCPSocket.open(host, port) do |socket|
socket.puts "Hey there"
socket.write 'he'
socket.flush
socket.close
end
The IO#closed? returns true when both reader and writer are closed.
In your case, the #sock.gets returns nil, and then you call the getcmd again, and this runs in a never ending loop. You can either use select, or close the socket when gets returns nil.
I recommend using readpartial to read from your socket and also catching peer resets:
while true
sockets_ready = select(#sockets, nil, nil, nil)
if sockets_ready != nil
sockets_ready[0].each do |socket|
begin
if (socket == #server_socket)
# puts "Connection accepted!"
#sockets << #server_socket.accept
else
# Received something on a client socket
if socket.eof?
# puts "Disconnect!"
socket.close
#sockets.delete(socket)
else
data = ""
recv_length = 256
while (tmp = socket.readpartial(recv_length))
data += tmp
break if (!socket.ready?)
end
listen socket, data
end
end
rescue Exception => exception
case exception
when Errno::ECONNRESET,Errno::ECONNABORTED,Errno::ETIMEDOUT
# puts "Socket: #{exception.class}"
#sockets.delete(socket)
else
raise exception
end
end
end
end
end
This code borrows heavily from some nice IBM code by M. Tim Jones. Note that #server_socket is initialized by:
#server_socket = TCPServer.open(port)
#sockets is just an array of sockets.
I simply pgrep "ruby" to find the pid, and kill -9 the pid and restart.
If you believe the rdoc for ruby sockets, they don't implement gets. This leads me to believe gets is being provided by a higher level of abstraction (maybe the IO libraries?) and probably isn't aware of socket-specific things like 'connection closed.'
Try using recvfrom instead of gets

Resources