Secure Socket with Poco - sockets

I am implementing a tcp server and client using secure sockets (Poco::Net::SecureServerSocket), I attach here the code I am using:
void serverClientTest()
{
try {
Poco::Net::initializeSSL();
// Socket server
Poco::Net::Context::Ptr ptrContext =
new Poco::Net::Context(Poco::Net::Context::TLS_SERVER_USE,
"./cert4/myKey.pem",
"./cert4/myCert.pem",
"./cert4/myCert.pem",
Poco::Net::Context::VERIFY_ONCE);
Poco::SharedPtr<Poco::Net::InvalidCertificateHandler> ptrCert = new Poco::Net::AcceptCertificateHandler(true);
Poco::SharedPtr<Poco::Net::PrivateKeyPassphraseHandler> ptrPrivateKeyPassphraseHandler;
ptrPrivateKeyPassphraseHandler = new Poco::Net::KeyConsoleHandler(true);
Poco::Net::SSLManager::instance().initializeServer(ptrPrivateKeyPassphraseHandler, ptrCert, ptrContext);
Poco::Net::SocketAddress serverAddress("0.0.0.0", 8085);
Poco::Net::SecureServerSocket serverSecureSocket(serverAddress);
Poco::Net::TCPServer srv(new Poco::Net::TCPServerConnectionFactoryImpl<EchoConnection>(), serverSecureSocket);
srv.start();
Poco::Net::Context::Ptr ptrContext2 =
new Poco::Net::Context(Poco::Net::Context::TLS_CLIENT_USE,
"./cert4/myKey.pem",
"./cert4/myCert.pem",
"./cert4/myCert.pem",
Poco::Net::Context::VERIFY_ONCE);
Poco::SharedPtr<Poco::Net::InvalidCertificateHandler> ptrCert2 = new Poco::Net::AcceptCertificateHandler(true);
Poco::SharedPtr<Poco::Net::PrivateKeyPassphraseHandler>ptrPrivateKeyPassphraseHandler2(new Poco::Net::KeyConsoleHandler(false));
Poco::Net::SSLManager::instance().initializeClient(ptrPrivateKeyPassphraseHandler2, ptrCert2, ptrContext2);
Poco::Net::SocketAddress sa("127.0.0.1", 8085);
Poco::Net::SecureStreamSocket ss1(sa);
std::string data("TEST TEST");
int retSend = ss1.sendBytes(data.data(), (int) data.size());
if (retSend>0)
{
std::cout << "buffer -> : " << data.data() << std::endl;
char buffer[1024];
memset(buffer, '\0', 1024);
int retRecv = ss1.receiveBytes(buffer, sizeof(buffer));
if (retRecv > 0)
{
std::cout << "buffer <- : " << buffer << std::endl;
}
else
{
std::cout << "ERROR: recv " << retRecv << std::endl;
}
}
ss1.close();
}
catch (Poco::Exception& ex)
{
std::cout << "!! EXCEPTION "<< ex.displayText() << std::endl;
}
}
//[....]
class EchoConnection: public Poco::Net::TCPServerConnection
{
public:
EchoConnection(const Poco::Net::StreamSocket& s): Poco::Net::TCPServerConnection(s){}
void run()
{
Poco::Net::StreamSocket& ss = socket();
std::cout << "connection from client: " << ss.address() << std::endl;
try
{
// ...
}
catch (Poco::Exception& exc)
{
std::cerr << "--------------- EchoConnection: " << exc.displayText() << std::endl;
}
}
};
I would like the server to close the connection if the client certificate is not known to the server, but it happens
which, even with the context:
Poco::Net::Context::Ptr ptrContext2 =
new Poco::Net::Context(Poco::Net::Context::TLS_CLIENT_USE,
"",
"",
"./cert4/myCert.pem",
Poco::Net::Context::VERIFY_ONCE);
Thanks to anyone who can help me out.

void serverClientTest()
{
try {
Poco::Net::initializeSSL();
// Socket server
Poco::Net::Context::Ptr ptrContext =
new Poco::Net::Context(Poco::Net::Context::SERVER_USE,
"./server.key",
"./server.crt",
"./ca.pem",
Poco::Net::Context::VERIFY_STRICT,
9,
false,
"ALL:!ADH:!LOW:!EXP:!MD5:#STRENGTH");
Poco::SharedPtr<Poco::Net::InvalidCertificateHandler> ptrCert = new Poco::Net::AcceptCertificateHandler(true);
Poco::SharedPtr<Poco::Net::PrivateKeyPassphraseHandler> ptrPrivateKeyPassphraseHandler;
ptrPrivateKeyPassphraseHandler = new Poco::Net::KeyConsoleHandler(true);
Poco::Net::SSLManager::instance().initializeServer(ptrPrivateKeyPassphraseHandler, ptrCert, ptrContext);
Poco::Net::SocketAddress serverAddress("0.0.0.0", 8085);
Poco::Net::SecureServerSocket serverSecureSocket(serverAddress);
Poco::Net::TCPServer srv(new Poco::Net::TCPServerConnectionFactoryImpl<EchoConnection>(), serverSecureSocket);
srv.start();
Poco::Net::Context::Ptr ptrContext2 =
new Poco::Net::Context(Poco::Net::Context::CLIENT_USE,
"./client.key",
"./client.crt",
"./ca.pem",
Poco::Net::Context::VERIFY_STRICT,
9,
true,
"ALL:!ADH:!LOW:!EXP:!MD5:#STRENGTH");
Poco::SharedPtr<Poco::Net::InvalidCertificateHandler> ptrCert2 = new Poco::Net::AcceptCertificateHandler(true);
Poco::SharedPtr<Poco::Net::PrivateKeyPassphraseHandler>ptrPrivateKeyPassphraseHandler2(new Poco::Net::KeyConsoleHandler(false));
Poco::Net::SSLManager::instance().initializeClient(ptrPrivateKeyPassphraseHandler2, ptrCert2, ptrContext2);
Poco::Net::SocketAddress sa("127.0.0.1", 8085);
Poco::Net::SecureStreamSocket ss1(sa);
std::string data("TEST");
int retSend = ss1.sendBytes(data.data(), (int) data.size());
if (retSend>0)
{
char buffer[1024];
memset(buffer, '\0', 1024);
int retRecv = ss1.receiveBytes(buffer, sizeof(buffer));
if (retRecv > 0)
{
std::cout << "buffer <-: " << buffer << std::endl;
}
else
{
std::cout << "ERROR: " << retRecv << std::endl;
}
}
ss1.close();
}
catch (Poco::Exception& ex)
{
std::cout << ex.displayText() << std::endl;
}
}

Related

how can I disable windows micphone enhancement by c++ code?

is there a way to disable the microphone audio enhancements from C++ code?
We are doing some audio processing things on the incoming microphone signal, and the microphone enhancements cause all kinds of trouble. We know we can turn it off manually on Windows10 by going to the Control panel --> Sound --> Recording --> Microphone --> Properties --> advanced--> Enhancements, but we need a way to do this from code.
enter image description here
when I try to access it via C++ It comes back saying there is no AGC on the device.
void AudioDeviceWindowsCore::DisableHardwareAGC() {
// get device topology object for that endpoint
CComPtr<IDeviceTopology> pTopo = NULL;
HRESULT ret = _ptrDeviceIn->Activate(__uuidof(IDeviceTopology), CLSCTX_ALL, NULL,
(void**)&pTopo);
if (FAILED(ret)) {
RTC_LOG(LS_ERROR) << "Couldn't get device topology object: hr = 0x"
<< rtc::ToHex(ret);
} else {
UINT count = 0;
pTopo->GetConnectorCount(&count);
CComPtr<IConnector> pConn = NULL;
ret = pTopo->GetConnector(0, &pConn);
if (FAILED(ret)) {
RTC_LOG(LS_ERROR) << __FUNCTION__ << " GetConnector:" << rtc::ToHex(ret);
return ;
}
CComPtr<IConnector> pConnNext = NULL;
ret = pConn->GetConnectedTo(&pConnNext);
if (FAILED(ret)) {
RTC_LOG(LS_ERROR) << __FUNCTION__
<< " GetConnectedTo:" << rtc::ToHex(ret);
return ;
}
CComPtr<IPart> pPart = NULL;
const IID IID_IPart = __uuidof(IPart);
ret = pConnNext->QueryInterface(IID_IPart, (void**)&pPart);
if (FAILED(ret)) {
RTC_LOG(LS_ERROR) << __FUNCTION__
<< " QueryInterface:" << rtc::ToHex(ret);
return ;
}
// all the real work is done in this function
ret = WalkTreeForwardsFromPart(pPart);
if (SUCCEEDED(ret)) {
RTC_LOG(INFO) << " now close hardware AGC ret:" << rtc::ToHex(ret);
} else {
RTC_LOG(INFO) << " Hardware not Supports Microphone AGC";
}
}
}
HRESULT WalkTreeForwardsFromPart(IPart* pPart) {
HRESULT hr = S_OK;
LPWSTR pwszPartName = NULL;
hr = pPart->GetName(&pwszPartName);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "Could not get part name: hr = 0x"<<rtc::ToHex(hr);
return hr;
}
RTC_LOG(INFO) << "Part name: " << *pwszPartName ? pwszPartName : L"Unnamed";
CoTaskMemFree(pwszPartName);
GUID type_id;
hr = pPart->GetSubType(&type_id);
if (hr == S_OK){
if (IsEqualGUID(type_id,KSNODETYPE_LOUDNESS) ) {
const IID IID_IAudioLoudness = __uuidof(IAudioLoudness);
IAudioLoudness* loudness = NULL;
hr = pPart->Activate(CLSCTX_ALL, IID_IAudioLoudness, (void**)&loudness);
if (E_NOINTERFACE == hr) {
RTC_LOG(LS_ERROR) << "NO AudioLoudness CONTROL";
// not a Microphone node
} else if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "Unexpected failure trying to activate "
"IID_IAudioLoudness : hr = 0x"
<< rtc::ToHex(hr);
} else {
// it's an AGC node...
RTC_LOG(INFO) << "HAS IAudioLoudness CONTROL";
hr = loudness->SetEnabled(0, NULL);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR)
<< "AudioLoudness Failed: hr = 0x" << rtc::ToHex(hr);
loudness->Release();
return hr;
}
loudness->Release();
}
} else if (IsEqualGUID(type_id, KSNODETYPE_AGC)) {
// Check AGC settings
const IID IID_IAudioAutoGainControl = __uuidof(IAudioAutoGainControl);
IAudioAutoGainControl* aGCcontrol = NULL;
hr = pPart->Activate(CLSCTX_ALL, IID_IAudioAutoGainControl,
(void**)&aGCcontrol);
if (E_NOINTERFACE == hr) {
RTC_LOG(LS_ERROR) << "NO AGC CONTROL";
// not a Microphone node
} else if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "Unexpected failure trying to activate "
"IAudioAutoGainControl : hr = 0x"
<< rtc::ToHex(hr);
return hr;
} else {
// it's an AGC node...
RTC_LOG(INFO) << "HAS AGC CONTROL";
aGCcontrol->SetEnabled(0, NULL);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "AGC Failed: hr = 0x" << rtc::ToHex(hr);
aGCcontrol->Release();
return hr;
}
aGCcontrol->Release();
}
}
}
// get the list of incoming parts
IPartsList* pOutgoingParts = NULL;
hr = pPart->EnumPartsOutgoing(&pOutgoingParts);
if (E_NOTFOUND == hr) {
// not an error... we've just reached the end of the path
RTC_LOG(INFO) << "No incoming parts at this part";
return hr;
}
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << " Couldn't enum outgoing parts";
return hr;
}
UINT nParts = 0;
hr = pOutgoingParts->GetCount(&nParts);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "Couldn't get count of outgoing parts";
pOutgoingParts->Release();
return hr;
}
// walk the tree on each incoming part recursively
for (UINT n = 0; n < nParts; n++) {
IPart* pOutgoingPart = NULL;
hr = pOutgoingParts->GetPart(n, &pOutgoingPart);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "Couldn't get part ";
pOutgoingParts->Release();
return hr;
}
hr = WalkTreeForwardsFromPart(pOutgoingPart);
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << " Couldn't walk tree on part";
pOutgoingPart->Release();
pOutgoingParts->Release();
return hr;
}
pOutgoingPart->Release();
}
pOutgoingParts->Release();
return S_OK;
}

Winsock invalid socket error once include external sources

I am trying to create a server/client communication console, but once i include the external libraries (cryptopp) and debug, it will always terminate due socket invalid error. I have no idea what is going on.
#include <iostream>
#include <WS2tcpip.h>
#include <string>
#pragma comment (lib, "ws2_32.lib")
#include "../cryptopp565/sha.h"
using CryptoPP::SHA1;
using namespace std;
int main()
{
//initialize winsock
WSADATA wsData;
WORD ver = MAKEWORD(2, 2);
int wsOk = WSAStartup(ver, &wsData);
if (wsOk != 0)
{
cerr << "Can't Initialize winsock! program terminate." << endl;
return 1;
}
//create a socket
SOCKET listening = socket(AF_INET, SOCK_STREAM, 0);
if (listening == INVALID_SOCKET)
{
cerr << "Can't create a socket! program terminate." << endl;
return 1;
}
//Promtp to get server port
string server_port_temp;
cout << "Enter port : ";
cin >> server_port_temp;
int server_port = stoi(server_port_temp);
//bind the ip address and port to a socket
sockaddr_in hint;
hint.sin_family = AF_INET;
hint.sin_port = htons(server_port);
hint.sin_addr.S_un.S_addr = INADDR_ANY;
bind(listening, (sockaddr*)&hint, sizeof(hint));
//tell winsock the socket is for listening
listen(listening, SOMAXCONN);
//wait for connection
sockaddr_in client;
int clientSize = sizeof(client);
SOCKET clientSocket = accept(listening, (sockaddr*)&client, &clientSize);
if (clientSocket == INVALID_SOCKET)
{
cerr << "Invalid socket ! program terminate." << endl;
return 1;
}
char host[NI_MAXHOST]; //client remote name
char service[NI_MAXHOST]; // service (poet client is connect on)
ZeroMemory(host, NI_MAXHOST);
ZeroMemory(service, NI_MAXHOST);
if (getnameinfo((sockaddr*)&client, sizeof(client), host, NI_MAXHOST, service, NI_MAXHOST, 0) == 0)
{
cout << host << " Connect on port " << service << endl;
}
else
{
inet_ntop(AF_INET, &client.sin_addr, host, NI_MAXHOST);
cout << host << " Connected on port " << ntohs(client.sin_port) << endl;
}
//close listening socket
closesocket(listening);
//while loop: communication
char buf[4096];
//communicate ________________
while (true)
{
ZeroMemory(buf, 4096);
//wait for client to send data
int bytesReceived = recv(clientSocket, buf, 4096, 0);
if (bytesReceived == SOCKET_ERROR)
{
cerr << "Error in recv(). Program terminate." << endl;
break;
}
if (bytesReceived == 0)
{
cout << "Client disconnected " << endl;
break;
}
cout << string(buf, 0, bytesReceived) << endl;
//echo message back to client
send(clientSocket, buf, bytesReceived + 1, 0);
}
//close the socket
closesocket(clientSocket);
//clean winsock
WSACleanup();
system("pause");
return 0;
}`

How to get OpenGL-ES working on Raspberry Pi with SDL2?

I am trying to get OpenGL-ES working on a Raspberry Pi, but so far no luck. I compiled SDL 2.0.3 from source with this, as the version in Rasbian is missing Raspberry Pi support:
./configure --prefix=/home/pi/run/SDL2-2.0.3/ \
--disable-video-x11 \
--disable-pulseaudio \
--disable-esd \
--disable-video-opengl
The code below should create a OpenGL context and clear the screen to red. When I run the code, the Raspberry Pi is switching video modes, but the screen is turning black instead of red and the calls to glGetString(GL_VERSION) and Co. return NULL which would indicate that something is wrong with the GL context creation.
#include <SDL.h>
#include <SDL_opengles2.h>
#include <iostream>
void print_gl_string(GLenum name)
{
const GLubyte* ret = glGetString(name);
if (ret == 0)
{
std::cerr << "error getting string: " << name << std::endl;
}
else
{
std::cerr << name << ": " << ret << std::endl;
}
}
void set_gl_attribute(SDL_GLattr attr, int value)
{
if (SDL_GL_SetAttribute(attr, value) != 0)
{
std::cerr << "SDL_GL_SetAttribute(" << attr << ", " << value << ") failed: " << SDL_GetError() << std::endl;
}
}
int main()
{
if (SDL_Init(SDL_INIT_VIDEO) != 0)
{
std::cerr << "SDL_Init() failed: " << SDL_GetError() << std::endl;
exit(EXIT_FAILURE);
}
SDL_DisplayMode videomode;
if (SDL_GetCurrentDisplayMode (0, &videomode) != 0)
{
std::cerr << "Error getting current display mode: " << SDL_GetError() << std::endl;
exit(EXIT_FAILURE);
}
std::cout << "Current screen mode: " << videomode.w << "x" << videomode.h << std::endl;
set_gl_attribute(SDL_GL_RED_SIZE, 5);
set_gl_attribute(SDL_GL_GREEN_SIZE, 6);
set_gl_attribute(SDL_GL_BLUE_SIZE, 5);
//set_gl_attribute(SDL_GL_DEPTH_SIZE, 8);
set_gl_attribute(SDL_GL_DOUBLEBUFFER, 1);
set_gl_attribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2);
set_gl_attribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
set_gl_attribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_ES);
SDL_Window* window = SDL_CreateWindow("Minimal SDL2 Example",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
720, 576,
SDL_WINDOW_OPENGL);
if (!window)
{
std::cerr << "Could not create window: " << SDL_GetError() << std::endl;
exit(EXIT_FAILURE);
}
SDL_GLContext gl_context = SDL_GL_CreateContext(window);
print_gl_string(GL_RENDERER);
print_gl_string(GL_SHADING_LANGUAGE_VERSION);
print_gl_string(GL_VERSION);
print_gl_string(GL_EXTENSIONS);
glClearColor(1.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Delay(5000);
SDL_GL_DeleteContext(gl_context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
The problem turned out to not be in the code, but in the library path. A simple -L/opt/vc/lib/ added to the compile command line fixed it. Without that the compiler would pick:
/usr/lib/arm-linux-gnueabihf/libGLESv2.so.2
While the right one would be (use ldd to check):
/opt/vc/lib/libGLESv2.so

How to pass struct object over socket using sendto() and rcvfrom() functions?

There is the code i am trying to implement in a client/server model but segmentation fault error is coming..
server:
/* this is the structure i am trying to pass*/
struct pkt{
char data[1000];
int seqNo;
int checksum;
void make_pkt(int seq,char dat[1000],int check)
{
seqNo=seq;
strcpy(data,dat);
checksum=check;
}
};
the main part is
main() /* main */
{
port=client_addr.sin_port;
cout<<"port : "<<port;
pkt *rcv=new pkt;
do{
cout<<"a"<<endl;
cout<<"port : "<<port;
pkt *newpkt=new pkt;
char *buffer=new char [1000];
strcpy(buffer,"Hamza");
newpkt->make_pkt(1,buffer,2);
cout<<"aa"<<endl;
client_addr.sin_port=port;
cout<<"port 2 : "<<client_addr.sin_port<<endl;
sendto(sockfd,&newpkt,sizeof(struct pkt),0,(struct sockaddr *) &client_addr,sizeof(client_addr));
cout<<"aaa"<<endl;
recvfrom(sockfd,&rcv,sizeof(struct pkt),0,(struct sockaddr *)&client_addr,&leng);
cout<<"bb :"<<rcv->seqNo<<endl;
}while(rcv->seqNo!=1);
}
client:
struct pkt{
char data[1000];
int seqNo;
int checksum;
pkt()
{
strcpy(data," ");
seqNo=0;
timer=0;
checksum=0;
}
};
main()
{
pkt *newpkt=new pkt;
do{
cout<<"aa"<<endl;
pkt *rcvpkt=new pkt;
socklen_t lengt=sizeof(my_addr);
recvfrom(sockfd,&rcvpkt,sizeof(struct pkt),0,(struct sockaddr *)&my_addr,&lengt);
cout<<"aaa"<<endl;
cout<<"aaa"<<rcvpkt->data<<endl;
newpkt->seqNo=1;
sendto(sockfd,&newpkt,sizeof(struct pkt),0,(struct sockaddr *) &my_addr,sizeof(my_addr));
}while(newpkt->seqNo!=1);
}
please tell me whats the problem in this code..?
You are not passing the struct instances to sendto() and recvfrom() correctly. Your pkt variables are pointers so you need to get rid of the & operator when passing them. You also have memory leaks. Try this instead:
main()
{
port = ntohs(client_addr.sin_port);
cout << "port : " << port;
pkt *rcv = new pkt;
do{
cout << "a" << endl;
cout << "port : " << port;
pkt *newpkt = new pkt;
char buffer[1000];
strcpy(buffer,"Hamza");
newpkt->make_pkt(1,buffer,2);
cout << "aa" << endl;
client_addr.sin_port = htons(port);
cout << "port 2 : " << ntohs(client_addr.sin_port) << endl;
sendto(sockfd, newpkt, sizeof(struct pkt), 0, (struct sockaddr *) &client_addr, sizeof(client_addr));
delete newpkt;
cout << "aaa" << endl;
recvfrom(sockfd, rcv, sizeof(struct pkt), 0, (struct sockaddr *)&client_addr, &leng);
cout << "bb :" << rcv->seqNo << endl;
}
while(rcv->seqNo != 1);
delete rcv;
}
main()
{
pkt *newpkt = new pkt;
do{
cout << "aa" << endl;
pkt *rcvpkt = new pkt;
socklen_t lengt = sizeof(my_addr);
recvfrom(sockfd, rcvpkt, sizeof(struct pkt), 0, (struct sockaddr *)&my_addr, &lengt);
cout << "aaa" << endl;
cout << "aaa" << rcvpkt->data << endl;
newpkt->seqNo = 1;
sendto(sockfd, newpkt, sizeof(struct pkt), 0, (struct sockaddr *) &my_addr, sizeof(my_addr));
delete rcvpkt;
}
while(newpkt->seqNo != 1);
delete newpkt;
}

VC++ Winsock2 Error 10049. Trying to build IRC bot

I'm trying to port my IRC bot from Python to C++ and I'm running into some issues with Winsock2. I'm fairly new to sockets in C/C++ and most of this code was pieced together from various tutorials. I keep getting error 10049 and am at a loss. Any help would be appreciated greatly. Thanks!
port_ is set to 6667 and host_ is "irc.rizon.net"
WSADATA wsaData;
int starterr = WSAStartup(MAKEWORD(2,2), &wsaData);
if (starterr != 0) {
std::cout << "Error: " << WSAGetLastError() << " occurred!" << std::endl;
WSACleanup();
return 1;
}
std::cout << "WSAStartup Successful!" << std::endl;
socketfd_ = socket(AF_INET,SOCK_STREAM,IPPROTO_TCP);
if (socketfd_ == INVALID_SOCKET) {
std::cout << "Error: " << WSAGetLastError() << " occurred!" << std::endl;
WSACleanup();
return 1;
}
std::cout << "Socket Creation Successful!" << std::endl;
sockaddr_in anews;
anews.sin_port = htons(port_);
anews.sin_addr.s_addr = inet_addr(host_.c_str());
anews.sin_family = AF_INET;
if (connect(socketfd_,(sockaddr*)&anews, sizeof(anews)) == SOCKET_ERROR) {
std::cout << "Error: " << WSAGetLastError() << " occurred!" << std::endl;
WSACleanup();
return 1;
}
std::cout << "Socket has connected successfuly!" << std::endl;
return 0;
inet_addr() takes a dotted IP address of the form "x.x.x.x" you are passing it the host name.
You can use gethostbyname():
hostent* host;
char* ip;
...
// Get the local host information
host= gethostbyname(host_.c_str());
ip= inet_ntoa(*(struct in_addr *)*host->h_addr_list);
sockaddr_in anews;
anews.sin_port = htons(port_);
anews.sin_addr.s_addr = inet_addr(ip);
anews.sin_family = AF_INET;
...
Or an easier route would be to use getaddrinfo():
struct addrinfo *ai;
if(getaddrinfo(host_.c_str(), "6667", NULL, &ai) != 0)
return 1;
socketfd_ = socket(ai->ai_family, SOCK_STREAM, 0);
if (socketfd_ == INVALID_SOCKET) {
freeaddrinfo(ai);
return 1
}
if (connect(socketfd_, ai->ai_addr, (int)ai->ai_addrlen) == SOCKET_ERROR) {
closesocket(socketfd_);
freeaddrinfo(ai);
return 1;
}
...