Perl IO::Async parallel tcp connections - perl

I've got a problem that I can't easily find the solution to for some reason.
I try to build multiple parallel TCP connections to a server via IO::Async.
My goal is to run TCP connections in parallel. The connections do not need to communicate between themselves but I need to catch and save output of them in a hash.
The following code is an example with a single connection.
#!/usr/bin/perl
use strict;
use warnings;
use IO::Async::Loop;
use IO::Async::Stream;
my $CRLF = "\x0d\x0a"; # because \r\n is not portable
my $HOST = shift #ARGV or die "Need HOST";
my $PORT = shift #ARGV or die "Need PORT";
my $loop = IO::Async::Loop->new;
my $socket;
$loop->connect(
host => $HOST,
service => $PORT,
socktype => 'stream',
on_connected => sub { $socket = shift },
on_resolve_error => sub { die "Cannot resolve - $_[0]\n" },
on_connect_error => sub { die "Cannot connect\n" },
);
$loop->loop_once until defined $socket;
# $socket is just an IO::Socket reference
my $peeraddr = $socket->peerhost . ":" . $socket->peerport;
print STDERR "Connected to $peeraddr\n";
# We need to create a cross-connected pair of Streams. Can't do that
# easily without a temporary variable
my ( $socketstream, $stdiostream );
$socketstream = IO::Async::Stream->new(
handle => $socket,
on_read => sub {
my ( undef, $buffref, $eof ) = #_;
while( $$buffref =~ s/^(.*)$CRLF// ) {
$stdiostream->write( $1 . "\n" );
}
return 0;
},
on_closed => sub {
print STDERR "Closed connection to $peeraddr\n";
$stdiostream->close_when_empty;
},
);
$loop->add( $socketstream );
$stdiostream = IO::Async::Stream->new_for_stdio(
on_read => sub {
my ( undef, $buffref, $eof ) = #_;
while( $$buffref =~ s/^(.*)\n// ) {
$socketstream->write( $1 . $CRLF );
}
return 0;
},
on_closed => sub {
$socketstream->close_when_empty;
},
);
$loop->add( $stdiostream );
$loop->await_all( $socketstream->new_close_future, $stdiostream->new_close_future );
How could I modify this code to handle an IP list as asynchronous connections and store output in a dedicated hash?
Finally maybe to limit max parallel connection to 100.
Any ideas?

Related

How to write a client program for Net::WebSocket::Server program?

I have a server program that listens on 9000 port. But I can't find a way to write a client program for that server that connects server at 9000 port. Here is the main part of server program:
use strict;
use warnings;
use Net::WebSocket::Server;
my $port = "9000";
my $msg_count = 0;
print "starting server on $port \n\n";
my $count = 2400;
Net::WebSocket::Server->new(
listen => $port,
silence_max => 5,
tick_period => 300,
on_tick => sub {
my ($serv) = #_;
print "connections >> " . $serv->connections . "\n";
print $_->ip() for( $serv->connections() ); print "\n";
print $_->port() for( $serv->connections() ); print "\n\n";
$count++;
},
on_connect => sub {
my ($serv, $conn) = #_;
$conn->on(
handshake => sub {
my ($conn, $handshake) = #_;
my $tmp = $handshake->req->origin;
print "here ... $tmp \n\n";
},
utf8 => sub {
my ($conn, $msg) = #_;
my $IP = $conn->ip();
my $PORT = $conn->port();
my $SERVER = $conn->server();
my $SOCKET = $conn->socket();
my $str = Dumper $SOCKET;
I searched internet and what that sounds understandable to me is the following client program:
use strict;
use warnings;
use IO::Socket::SSL;
my $cl=IO::Socket::SSL->new("http://localhost:9000") or die "error=$!, ssl_error=$SSL_ERROR";
if($cl) {
$cl->connect_SSL or die $#;
# Something about certificates?
$cl->syswrite("Command");
close($cl);
}
But its not working. The error client program generates is as follows:
Expected 'PeerService' at client2.pl line 5.
I am newbie in Socket programming and currently understanding websockets programming in Perl.
Note: I am on windows platform.
I ran the example code suggested https://stackoverflow.com/questions/37318581/simple-perl-websocket-client. It gives error "Can't use an undefined value as a subroutine reference at C:/Strawberry/perl/site/lib/Protocol/WebSocket/Client.pm line 103.":
use strict;
use warnings;
use Protocol::WebSocket::Client;
my $client = Protocol::WebSocket::Client->new(url => 'ws://localhost:9000') or die "$!";
my $reply = "Free\n";
# Sends a correct handshake header
$client->connect or die "$!";
# Register on connect handler
$client->on(
connect => sub {
$client->write('hi there');
}
) or die "$!";
# Parses incoming data and on every frame calls on_read
$client->read($reply);
print "$reply\n";
# Sends correct close header
$client->disconnect;
Please investigate following demo code snippets for WebSocket Server and Client.
Note: please do not forget to alter code to match your server origin (ip address and port)
use strict;
use warnings;
use feature 'say';
use Net::WebSocket::Server;
my $origin = 'http://192.168.1.160:8080'; # server origin
my $port = 8080;
$| = 1;
say "Starting server on $port";
Net::WebSocket::Server->new(
listen => $port,
tick_period => 60,
on_tick => sub {
my ($serv) = #_;
my $stamp = 'Server time: ' . scalar localtime;
$_->send_utf8($stamp) for $serv->connections;
},
on_connect => sub {
my ($serv, $conn) = #_;
$conn->on(
handshake => sub {
my ($conn, $handshake) = #_;
$conn->disconnect() unless $handshake->req->origin eq $origin;
},
ready => sub {
my ($conn) = #_;
say "Client: connect IP $conn->{ip} PORT $conn->{port}";
my $msg = 'Connected server time is ' . scalar localtime . "\n";
$_->send_utf8($msg) for $conn->server->connections;
},
utf8 => sub {
my ($conn, $msg) = #_;
say "Client message: $conn->{ip} $msg";
$_->send_utf8('Server reply: ' . $msg)
for $conn->server->connections;
$conn->disconnect() if $msg eq 'exit';
},
binary => sub {
my ($conn, $msg) = #_;
$_->send_binary($msg) for $conn->server->connections;
},
pong => sub {
my ($conn, $msg) = #_;
$_->send_utf8($msg) for $conn->server->connections;
},
disconnect => sub {
my ($conn, $code, $reason) = #_;
say "Client: disconnect IP $conn->{ip} PORT $conn->{port}";
},
);
},
)->start;
Client
use strict;
use warnings;
use feature 'say';
use IO::Async::Loop;
use Net::Async::WebSocket::Client;
my $HOST = '192.168.1.160';
my $PORT = 8080;
my $loop = IO::Async::Loop->new;
my $client = Net::Async::WebSocket::Client->new(
on_text_frame => sub {
my ( $self, $frame ) = #_;
say $frame;
},
);
my $input = IO::Async::Stream->new_for_stdin(
on_read => sub {
my ( $self, $buffref, $eof ) = #_;
my $msg;
$msg = $1 while $$buffref =~ s/^(.*)\n//;
$client->send_text_frame( $msg );
$loop->loop_stop if $msg eq 'exit';
return 0;
},
);
$loop->add( $client );
$loop->add( $input );
$client->connect(
url => "ws://$HOST:$PORT/"
)->then( sub {
say 'Successfully connected to server';
$client->send_text_frame( scalar localtime );
})->get;
$loop->run;
say 'Bye, until next time';
exit 0;
References:
Net::WebSocket::Server
Net::Async::WebSocket::Client
IO::Async::Loop

Understanding async in perl on specific example

I have to write a script that get some URLs in parallel and do some work. In the past I have always used Parallel::ForkManager for such things, but now I wanted to learn something new and try asynchronous programming with AnyEvent (and AnyEvent::HTTP or AnyEvent::Curl::Multi) ... but I'm having problem understanding AnyEvent and writing a script that should:
open a file (every line is a seperate URL)
(from now in parallel, but with a limit for f.e. 10 concurrent requests)
read file line after line (I dont want to load whole file to memory - it might be big)
make a HTTP request for that URL
read response
updates MySQL record accordingly
(next file line)
I have read many manuals, tutorials, but its still hard for me to understand differences between blocking and non-blocking code. I have found similar script at http://perlmaven.com/fetching-several-web-pages-in-parallel-using-anyevent, where Mr. Szabo explains the basics, but I still cant understand how to implement something like:
...
open my $fh, "<", $file;
while ( my $line = <$fh> )
{
# http request, read response, update MySQL
}
close $fh
...
... and add a concurrency limit in this case.
I would be very grateful for help ;)
UPDATE
Following Ikegami's advice I gave Net::Curl::Multi a try. I'm very pleased with results. After years of using Parallel::ForkManager just for concurrent grabbing thousands of URLs, Net::Curl::Multi seems to be awesome.
Here is my code with while loop on filehandle. It seems to work as it should, but considering it's my first time writing something like this I would like to ask more experienced Perl users to take a look and tell me if there are some potential bugs, something I missed, etc.
Also, if I may ask: as I don't fully understand how Net::Curl::Multi's concurrency works, please tell me whether I should expect any problems with putting MySQL UPDATE command (via DBI) inside RESPONSE loop (besides higher server load obviously - I expect final script to run with about 50 concurrent N::C::M workers, maybe more).
#!/usr/bin/perl
use Net::Curl::Easy qw( :constants );
use Net::Curl::Multi qw( );
sub make_request {
my ( $url ) = #_;
my $easy = Net::Curl::Easy->new();
$easy->{url} = $url;
$easy->setopt( CURLOPT_URL, $url );
$easy->setopt( CURLOPT_HEADERDATA, \$easy->{head} );
$easy->setopt( CURLOPT_FILE, \$easy->{body} );
return $easy;
}
my $maxWorkers = 10;
my $multi = Net::Curl::Multi->new();
my $workers = 0;
my $i = 1;
open my $fh, "<", "urls.txt";
LINE: while ( my $url = <$fh> )
{
chomp( $url );
$url .= "?$i";
print "($i) $url\n";
my $easy = make_request( $url );
$multi->add_handle( $easy );
$workers++;
my $running = 0;
do {
my ($r, $w, $e) = $multi->fdset();
my $timeout = $multi->timeout();
select $r, $w, $e, $timeout / 1000
if $timeout > 0;
$running = $multi->perform();
RESPONSE: while ( my ( $msg, $easy, $result ) = $multi->info_read() ) {
$multi->remove_handle( $easy );
$workers--;
printf( "%s getting %s\n", $easy->getinfo( CURLINFO_RESPONSE_CODE ), $easy->{url} );
}
# dont max CPU while waiting
select( undef, undef, undef, 0.01 );
} while ( $workers == $maxWorkers || ( eof && $running ) );
$i++;
}
close $fh;
Net::Curl is a rather good library that's extremely fast. Furthermore, it can handle parallel requests too! I'd recommend using this instead of AnyEvent.
use Net::Curl::Easy qw( :constants );
use Net::Curl::Multi qw( );
sub make_request {
my ( $url ) = #_;
my $easy = Net::Curl::Easy->new();
$easy->{url} = $url;
$easy->setopt( CURLOPT_URL, $url );
$easy->setopt( CURLOPT_HEADERDATA, \$easy->{head} );
$easy->setopt( CURLOPT_FILE, \$easy->{body} );
return $easy;
}
my $max_running = 10;
my #urls = ( 'http://www.google.com/' );
my $multi = Net::Curl::Multi->new();
my $running = 0;
while (1) {
while ( #urls && $running < $max_running ) {
my $easy = make_request( shift( #urls ) );
$multi->add_handle( $easy );
++$running;
}
last if !$running;
my ( $r, $w, $e ) = $multi->fdset();
my $timeout = $multi->timeout();
select( $r, $w, $e, $timeout / 1000 )
if $timeout > 0;
$running = $multi->perform();
while ( my ( $msg, $easy, $result ) = $multi->info_read() ) {
$multi->remove_handle( $easy );
printf( "%s getting %s\n", $easy->getinfo( CURLINFO_RESPONSE_CODE ), $easy->{url} );
}
}
This does exactly what you want, in an asynchronous fashion, and it does that by wrapping Net::Curl in a safe fashion:
#!/usr/bin/env perl
package MyDownloader;
use strict;
use warnings qw(all);
use Moo;
extends 'YADA::Worker';
has '+use_stats'=> (default => sub { 1 });
has '+retry' => (default => sub { 10 });
after init => sub {
my ($self) = #_;
$self->setopt(
encoding => '',
verbose => 1,
);
};
after finish => sub {
my ($self, $result) = #_;
if ($self->has_error) {
print "ERROR: $result\n";
} else {
# do the interesting stuff here
printf "Finished downloading %s: %d bytes\n", $self->final_url, length ${$self->data};
}
};
around has_error => sub {
my $orig = shift;
my $self = shift;
return 1 if $self->$orig(#_);
return 1 if $self->getinfo('response_code') =~ m{^5[0-9]{2}$}x;
};
1;
package main;
use strict;
use warnings qw(all);
use Carp;
use YADA;
my $q = YADA->new(
max => 8,
timeout => 30,
);
open(my $fh, '<', 'file_with_urls_per_line.txt')
or croak "can't open queue: $!";
while (my $url = <$fh>) {
chomp $url;
$q->append(sub {
MyDownloader->new($url)
});
}
close $fh;
$q->wait;

Perl: Using IPC::Shareable for pooling Net::Server connections

I am trying to have a pool of shared connections that can be accessed by Net::Server instances. Unfortunately IPC::Shareable does not allow me to store the connections as they are code references. This is a stripped down version of the code:
use IPC::Shareable (':lock');
use parent 'Net::Server::Fork';
use MyConnectClass;
sub login {
return MyConnectClass->new();
};
my %connection;
tie %connection, 'IPC::Shareable', 'CONN', {
'create' => 1,
'exclusive' => 0,
'mode' => 0666,
'destroy' => 'yes',
}
or croak 'Can not tie connection variable';
sub add_connection {
my $id = shift(#_);
my $con = shift(#_);
$connection{$id} = $con;
};
sub get_connection {
my $id = # .. find unused connection
return $connection{$id};
}
sub process_request {
my $self = shift(#_);
eval {
my $connection = get_connection();
my $line = <STDIN>;
# .. use $connection to fetch data for user
};
};
for (my $i=0; $i<10; $i++) {
add_connection($i, &login);
};
main->run(
'host' => '*',
'port' => 7000,
'ipv' => '*',
'max_server' => 3,
};
Unfortunately the program dies after the first login: 'Can't store CODE items at ../../lib/Storable.pm'. This happens even when hiding $connection in an anonymous array. I am looking for an alternative to utilize the pool.
I appreciate your support
I am unable to propose an alternative module, but make a suggestion which may or not be of use. While you cannot store CODE, you can store strings which can be evaluated to run. would it be possible to pass a reference to the string q!&login! which you can dereference call after being assigned to $connection. ?
#!/usr/bin/perl
use warnings;
use strict;
use Storable;
my $codestring = q'sub { q^japh^ };' ;
#my $codestring = q'sub { return MyConnectClass->new(); }';
#
# for (0..9){ add_connection($i, $codestring) }
open my $file, '>', '.\filestore.dat' or die $!;
store \ $codestring, $file;
close $file;
open $file, '<', '.\filestore.dat' or die " 2 $!";
my $stringref = retrieve $file; # my $con = get_connection()
close $file;
print &{ eval $$stringref } ; # &{eval $$con} ;
exit 0; # my $line = <STDIN>; ...

What is wrong with this IO::Socket::UNIX example?

I am trying to implement a simple echo client/server, over a Unix socket. (My ultimate goal is to exchange JSON data, but this example is for simplicity). I have no idea why the client process disappears into a black hole when it tries to print to the socket the second time.
server.pl :
use IO::Socket::UNIX;
my $socket_path = '/tmp/mysocket';
unlink $socket_path if -e $socket_path;
my $socket = IO::Socket::UNIX->new(
Local => $socket_path,
Type => SOCK_STREAM,
Listen => SOMAXCONN,
);
die "Can't create socket: $!" unless $socket;
while (1) {
next unless my $connection = $socket->accept;
chomp( my $line = <$connection> );
print $connection "$line\n";
}
client.pl :
use IO::Socket::UNIX;
my $socket = IO::Socket::UNIX->new(
Type => SOCK_STREAM,
Peer => '/tmp/mysocket',
);
die "Can't create socket: $!" unless $socket;
my $line;
print $socket "one\n";
chomp( $line = <$socket> );
say $line;
print $socket "two\n";
chomp( $line = <$socket> );
say $line;
say "three";
Expected output:
> ./client.pl
> one
> two
> three
Actual output:
> ./client.pl
> one
You put the $socket->accept call inside your while loop. After your server establishes a connection and receives some input from the client, the next thing it wants to do is establish a new connection.
Move the accept call outside the while loop
my $connection = $socket->accept;
$connection->autoflush(1);
while (my $line = <$connection> ) {
chomp($line);
print $connection "$line\n";
}
or, if you do want to accept more than one connection,
while (1) {
next unless my $connection = $socket->accept;
$connection->autoflush(1);
while (my $line = <$connection>) {
chomp($line);
print $connection "$line\n";
}
}
Your current solution will also likely be "suffering from buffering", so both the server and the client should set autoflush(1) on their socket handlers.
Now to handle simultaneous connections, the server would usually call fork after getting a connection, and handling that connection in a child process.
while (1) {
my $connection = $socket->accept;
if (fork() == 0) {
$connection->autoflush(1);
while (my $line = <$connection>) {
chomp($line);
print $connection "$line\n";
}
close $connection;
exit;
}
}
A complete bi-directional example for anyone looking:
Server:
#! /usr/bin/perl -w
use strict;
use IO::Socket::UNIX qw( SOCK_STREAM SOMAXCONN );
my $SOCK_PATH = '/tmp/test.sock';
unlink($SOCK_PATH) if -e $SOCK_PATH;
my $server = IO::Socket::UNIX->new(
Type => SOCK_STREAM(),
Local => $SOCK_PATH,
Listen => SOMAXCONN,
)
or die("Can't create server socket: $!\n");
while (1) {
my $connection = $server->accept;
if (fork() == 0) {
print "** New connection received **\n";
$connection->autoflush(1);
my $count = 1;
while (my $line = <$connection>) {
if ($line){
chomp($line);
$connection->print($count . ' -> ' . $line . "\n"); # Sent response back to client, \n terminates
print "Received and replied to $count '$line'\n";
$count++;
}
}
close $connection;
exit;
}
}
Client:
#!/usr/bin/perl -w
use strict;
use IO::Socket::UNIX qw( SOCK_STREAM );
my $SOCK_PATH = '/tmp/test.sock';
my $client = IO::Socket::UNIX->new(
Type => SOCK_STREAM(),
Peer => $SOCK_PATH
)
or die("Can't connect to server: $!\n");
$client->autoflush(1);
## Listen for replies
if (fork() == 0) {
while (my $line = <$client>) {
if ($line){
chomp($line);
print("Recv: '" . $line . "'\n");
}
}
}
## Send something
for my $itm ('Alpha','Beta','Gamma','Delta'){
print("Send: " . $itm . "\n");
print($client $itm . "\n") or warn("Can't send: $!\n"); # send to server, \n terminates
}
print "** Client Finished **\n";

Creating A Single Threaded Server with AnyEvent (Perl)

I'm working on creating a local service to listen on localhost and provide a basic call and response type interface. What I'd like to start with is a baby server that you can connect to over telnet and echoes what it receives.
I've heard AnyEvent is great for this, but the documentation for AnyEvent::Socket does not give a very good example how to do this. I'd like to build this with AnyEvent, AnyEvent::Socket and AnyEvent::Handle.
Right now the little server code looks like this:
#!/usr/bin/env perl
use AnyEvent;
use AnyEvent::Handle;
use AnyEvent::Socket;
my $cv = AnyEvent->condvar;
my $host = '127.0.0.1';
my $port = 44244;
tcp_server($host, $port, sub {
my($fh) = #_;
my $cv = AnyEvent->condvar;
my $handle;
$handle = AnyEvent::Handle->new(
fh => $fh,
poll => "r",
on_read => sub {
my($self) = #_;
print "Received: " . $self->rbuf . "\n";
$cv->send;
}
);
$cv->recv;
});
print "Listening on $host\n";
$cv->wait;
This doesn't work and also if I telnet to localhost:44244 I get this:
EV: error in callback (ignoring): AnyEvent::CondVar:
recursive blocking wait attempted at server.pl line 29.
I think if I understand how to make a small single threaded server that I can connect to over telnet and prints out whatever its given and then waits for more input, I could take it a lot further from there. Any ideas?
You're blocking inside a callback. That's not allowed. There are a few ways to handle this. My preference is to launch a Coro thread from within the tcp_server callback. But without Coro, something like this might be what you're looking for:
#!/usr/bin/env perl5.16.2
use AnyEvent;
use AnyEvent::Handle;
use AnyEvent::Socket;
my $cv = AE::cv;
my $host = '127.0.0.1';
my $port = 44244;
my %connections;
tcp_server(
$host, $port, sub {
my ($fh) = #_;
print "Connected...\n";
my $handle;
$handle = AnyEvent::Handle->new(
fh => $fh,
poll => 'r',
on_read => sub {
my ($self) = #_;
print "Received: " . $self->rbuf . "\n";
},
on_eof => sub {
my ($hdl) = #_;
$hdl->destroy();
},
);
$connections{$handle} = $handle; # keep it alive.
return;
});
print "Listening on $host\n";
$cv->recv;
Note that I'm only waiting on one condvar. And I'm storing the handles to keep the AnyEvent::Handle objects alive longer. Work to clean up the $self->rbuf is left as an excersise for the reader :-)
Question cross-posted, answer, too :-)
I have heard good things about AnyEvent as well, but have not used it. I wrote a small nonblocking server in the past using IO::Select. There is an example in the documentation for that module (I've added a few lines):
use IO::Select;
use IO::Socket;
$lsn = new IO::Socket::INET(Listen => 1, LocalPort => 8080);
$sel = new IO::Select( $lsn );
while(#ready = $sel->can_read) {
foreach $fh (#ready) {
if($fh == $lsn) {
# Create a new socket
$new = $lsn->accept;
$sel->add($new);
}
else {
# Process socket
my $input = <$fh>;
print $fh "Hello there. You said: $input\n";
# Maybe we have finished with the socket
$sel->remove($fh);
$fh->close;
}
}
}
I'm not sure what your condvar is trying to trigger there. Use it to send state, like:
#!/usr/bin/env perl
use AnyEvent;
use AnyEvent::Handle;
use AnyEvent::Socket;
my $host = '127.0.0.1';
my $port = 44244;
my $exit = AnyEvent->condvar;
tcp_server($host, $port, sub {
my($fh) = #_;
my $handle; $handle = AnyEvent::Handle->new(
fh => $fh,
poll => "r",
on_read => sub {
my($self) = #_;
print "Received: " . $self->rbuf . "\n";
if ($self->rbuf eq 'exit') {
$exit->send;
}
}
);
});
print "Listening on $host\n";
$exit->recv;