2025-11-30 21:20:04 +01:00
|
|
|
|
#include "StdAfx.h"
|
2019-01-05 20:21:43 +08:00
|
|
|
|
#include "IOCPServer.h"
|
|
|
|
|
|
#include "2015Remote.h"
|
|
|
|
|
|
|
|
|
|
|
|
#include <iostream>
|
2025-01-31 22:22:16 +08:00
|
|
|
|
#include <ws2tcpip.h>
|
2024-12-29 23:33:33 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 根据 socket 获取客户端IP地址.
|
2025-10-15 04:32:59 +08:00
|
|
|
|
std::string GetPeerName(SOCKET sock)
|
|
|
|
|
|
{
|
|
|
|
|
|
sockaddr_in ClientAddr = {};
|
|
|
|
|
|
int ulClientAddrLen = sizeof(sockaddr_in);
|
|
|
|
|
|
int s = getpeername(sock, (SOCKADDR*)&ClientAddr, &ulClientAddrLen);
|
|
|
|
|
|
return s != INVALID_SOCKET ? inet_ntoa(ClientAddr.sin_addr) : "";
|
2025-06-08 15:38:41 +08:00
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 根据 socket 获取客户端IP地址.
|
2025-10-15 04:32:59 +08:00
|
|
|
|
std::string GetRemoteIP(SOCKET sock)
|
|
|
|
|
|
{
|
|
|
|
|
|
sockaddr_in addr;
|
|
|
|
|
|
int addrLen = sizeof(addr);
|
|
|
|
|
|
|
|
|
|
|
|
if (getpeername(sock, (sockaddr*)&addr, &addrLen) == 0) {
|
|
|
|
|
|
char ipStr[INET_ADDRSTRLEN];
|
|
|
|
|
|
inet_ntop(AF_INET, &addr.sin_addr, ipStr, sizeof(ipStr));
|
2025-11-30 21:20:04 +01:00
|
|
|
|
TRACE(">>> 对端 IP 地址: %s\n", ipStr);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return ipStr;
|
|
|
|
|
|
}
|
2025-11-30 21:20:04 +01:00
|
|
|
|
TRACE(">>> 获取对端 IP 失败, 错误码: %d\n", WSAGetLastError());
|
2025-10-15 04:32:59 +08:00
|
|
|
|
char buf[10];
|
|
|
|
|
|
sprintf_s(buf, "%d", sock);
|
|
|
|
|
|
return buf;
|
2025-01-31 22:22:16 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2019-01-05 20:21:43 +08:00
|
|
|
|
IOCPServer::IOCPServer(void)
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
WSADATA wsaData;
|
|
|
|
|
|
if (WSAStartup(MAKEWORD(2,2), &wsaData)!=0) {
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hCompletionPort = NULL;
|
|
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
|
|
|
|
|
m_hListenThread = NULL;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_ulMaxConnections = 10000;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
InitializeCriticalSection(&m_cs);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_ulWorkThreadCount = 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_bTimeToKill = FALSE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_ulThreadPoolMin = 0;
|
|
|
|
|
|
m_ulThreadPoolMax = 0;
|
|
|
|
|
|
m_ulCPULowThreadsHold = 0;
|
|
|
|
|
|
m_ulCPUHighThreadsHold = 0;
|
|
|
|
|
|
m_ulCurrentThread = 0;
|
|
|
|
|
|
m_ulBusyThread = 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_ulKeepLiveTime = 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hKillEvent = NULL;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_NotifyProc = NULL;
|
|
|
|
|
|
m_OfflineProc = NULL;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
void IOCPServer::Destroy()
|
|
|
|
|
|
{
|
|
|
|
|
|
m_bTimeToKill = TRUE;
|
|
|
|
|
|
|
|
|
|
|
|
if (m_hKillEvent != NULL) {
|
|
|
|
|
|
SetEvent(m_hKillEvent);
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(m_hKillEvent);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hKillEvent = NULL;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (m_sListenSocket != INVALID_SOCKET) {
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
|
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (m_hCompletionPort != INVALID_HANDLE_VALUE) {
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(m_hCompletionPort);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hCompletionPort = INVALID_HANDLE_VALUE;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (m_hListenEvent != WSA_INVALID_EVENT) {
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(m_hListenEvent);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
|
|
|
|
|
}
|
2025-04-07 18:18:36 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
IOCPServer::~IOCPServer(void)
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
Destroy();
|
|
|
|
|
|
while (m_ulWorkThreadCount || m_hListenThread)
|
|
|
|
|
|
Sleep(10);
|
|
|
|
|
|
|
|
|
|
|
|
while (!m_ContextConnectionList.IsEmpty()) {
|
|
|
|
|
|
CONTEXT_OBJECT *ContextObject = m_ContextConnectionList.GetHead();
|
|
|
|
|
|
RemoveStaleContext(ContextObject);
|
|
|
|
|
|
SAFE_DELETE(ContextObject->olps);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
while (!m_ContextFreePoolList.IsEmpty()) {
|
|
|
|
|
|
CONTEXT_OBJECT *ContextObject = m_ContextFreePoolList.RemoveHead();
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 下述语句有崩溃概率,2019.1.14
|
2025-10-15 04:32:59 +08:00
|
|
|
|
//SAFE_DELETE(ContextObject->olps);
|
|
|
|
|
|
delete ContextObject;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
DeleteCriticalSection(&m_cs);
|
|
|
|
|
|
m_ulWorkThreadCount = 0;
|
|
|
|
|
|
|
|
|
|
|
|
m_ulThreadPoolMin = 0;
|
|
|
|
|
|
m_ulThreadPoolMax = 0;
|
|
|
|
|
|
m_ulCPULowThreadsHold = 0;
|
|
|
|
|
|
m_ulCPUHighThreadsHold = 0;
|
|
|
|
|
|
m_ulCurrentThread = 0;
|
|
|
|
|
|
m_ulBusyThread = 0;
|
|
|
|
|
|
m_ulKeepLiveTime = 0;
|
|
|
|
|
|
|
|
|
|
|
|
WSACleanup();
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 返回错误码0代表成功,否则代表错误信息.
|
2021-03-14 11:44:56 +08:00
|
|
|
|
UINT IOCPServer::StartServer(pfnNotifyProc NotifyProc, pfnOfflineProc OffProc, USHORT uPort)
|
2019-01-05 20:21:43 +08:00
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_nPort = uPort;
|
|
|
|
|
|
m_NotifyProc = NotifyProc;
|
|
|
|
|
|
m_OfflineProc = OffProc;
|
|
|
|
|
|
m_hKillEvent = CreateEvent(NULL,FALSE,FALSE,NULL);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (m_hKillEvent==NULL) {
|
|
|
|
|
|
return 1;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
m_sListenSocket = WSASocket(AF_INET, SOCK_STREAM, 0, NULL, 0, WSA_FLAG_OVERLAPPED); //创建监听套接字
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (m_sListenSocket == INVALID_SOCKET) {
|
|
|
|
|
|
return 2;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSACreateEvent();
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (m_hListenEvent == WSA_INVALID_EVENT) {
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
return 3;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
int iRet = WSAEventSelect(m_sListenSocket, //将监听套接字与事件进行关联并授予FD_ACCEPT的属性
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent,
|
|
|
|
|
|
FD_ACCEPT);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (iRet == SOCKET_ERROR) {
|
|
|
|
|
|
int a = GetLastError();
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
WSACloseEvent(m_hListenEvent);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return a;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
SOCKADDR_IN ServerAddr;
|
|
|
|
|
|
ServerAddr.sin_port = htons(uPort);
|
|
|
|
|
|
ServerAddr.sin_family = AF_INET;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
ServerAddr.sin_addr.s_addr = INADDR_ANY; //初始化本地网卡
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//将监听套机字和网卡进行bind
|
2025-10-15 04:32:59 +08:00
|
|
|
|
iRet = bind(m_sListenSocket,
|
|
|
|
|
|
(sockaddr*)&ServerAddr,
|
|
|
|
|
|
sizeof(ServerAddr));
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (iRet == SOCKET_ERROR) {
|
|
|
|
|
|
int a = GetLastError();
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
WSACloseEvent(m_hListenEvent);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return a;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
iRet = listen(m_sListenSocket, SOMAXCONN);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (iRet == SOCKET_ERROR) {
|
|
|
|
|
|
int a = GetLastError();
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
WSACloseEvent(m_hListenEvent);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return a;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenThread =
|
|
|
|
|
|
(HANDLE)CreateThread(NULL,
|
|
|
|
|
|
0,
|
|
|
|
|
|
ListenThreadProc,
|
2025-11-30 21:20:04 +01:00
|
|
|
|
(void*)this, //向Thread回调函数传入this 方便我们的线程回调访问类中的成员
|
2025-10-15 04:32:59 +08:00
|
|
|
|
0,
|
|
|
|
|
|
NULL);
|
|
|
|
|
|
if (m_hListenThread==NULL) {
|
|
|
|
|
|
int a = GetLastError();
|
|
|
|
|
|
closesocket(m_sListenSocket);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_sListenSocket = INVALID_SOCKET;
|
|
|
|
|
|
WSACloseEvent(m_hListenEvent);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hListenEvent = WSA_INVALID_EVENT;
|
|
|
|
|
|
return a;
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//启动工作线程 1 2
|
2025-10-15 04:32:59 +08:00
|
|
|
|
InitializeIOCP();
|
|
|
|
|
|
return 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//1创建完成端口
|
|
|
|
|
|
//2创建工作线程
|
2019-01-05 20:21:43 +08:00
|
|
|
|
BOOL IOCPServer::InitializeIOCP(VOID)
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_hCompletionPort = CreateIoCompletionPort(INVALID_HANDLE_VALUE, NULL, 0, 0 );
|
|
|
|
|
|
if ( m_hCompletionPort == NULL ) {
|
|
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (m_hCompletionPort==INVALID_HANDLE_VALUE) {
|
|
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
SYSTEM_INFO SystemInfo;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
GetSystemInfo(&SystemInfo); //获得PC中有几核
|
2025-10-15 04:32:59 +08:00
|
|
|
|
|
|
|
|
|
|
m_ulThreadPoolMin = 1;
|
|
|
|
|
|
m_ulThreadPoolMax = SystemInfo.dwNumberOfProcessors * 2;
|
|
|
|
|
|
m_ulCPULowThreadsHold = 10;
|
|
|
|
|
|
m_ulCPUHighThreadsHold = 75;
|
|
|
|
|
|
|
|
|
|
|
|
ULONG ulWorkThreadCount = m_ulThreadPoolMax;
|
|
|
|
|
|
|
|
|
|
|
|
HANDLE hWorkThread = NULL;
|
|
|
|
|
|
for (int i=0; i<ulWorkThreadCount; ++i) {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
hWorkThread = (HANDLE)CreateThread(NULL, //创建工作线程目的是处理投递到完成端口中的任务
|
2025-10-15 04:32:59 +08:00
|
|
|
|
0,
|
|
|
|
|
|
WorkThreadProc,
|
|
|
|
|
|
(void*)this,
|
|
|
|
|
|
0,
|
|
|
|
|
|
NULL);
|
|
|
|
|
|
if (hWorkThread == NULL ) {
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(m_hCompletionPort);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
AddWorkThread(1);
|
|
|
|
|
|
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(hWorkThread);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return TRUE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DWORD IOCPServer::WorkThreadProc(LPVOID lParam)
|
|
|
|
|
|
{
|
2026-01-15 14:34:10 +01:00
|
|
|
|
// 压缩库配置
|
2025-11-30 21:20:04 +01:00
|
|
|
|
ZSTD_DCtx* m_Dctx = ZSTD_createDCtx(); // 解压上下文
|
2026-01-15 14:34:10 +01:00
|
|
|
|
z_stream m_stream = {};
|
|
|
|
|
|
inflateInit2(&m_stream, 15);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
IOCPServer* This = (IOCPServer*)(lParam);
|
|
|
|
|
|
|
|
|
|
|
|
HANDLE hCompletionPort = This->m_hCompletionPort;
|
|
|
|
|
|
DWORD dwTrans = 0;
|
|
|
|
|
|
|
|
|
|
|
|
PCONTEXT_OBJECT ContextObject = NULL;
|
|
|
|
|
|
LPOVERLAPPED Overlapped = NULL;
|
|
|
|
|
|
OVERLAPPEDPLUS* OverlappedPlus = NULL;
|
|
|
|
|
|
ULONG ulBusyThread = 0;
|
|
|
|
|
|
BOOL bError = FALSE;
|
|
|
|
|
|
|
|
|
|
|
|
InterlockedIncrement(&This->m_ulCurrentThread);
|
|
|
|
|
|
InterlockedIncrement(&This->m_ulBusyThread);
|
|
|
|
|
|
timeBeginPeriod(1);
|
|
|
|
|
|
while (This->m_bTimeToKill==FALSE) {
|
|
|
|
|
|
InterlockedDecrement(&This->m_ulBusyThread);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// GetQueuedCompletionStatus耗时比较长,导致客户端发送数据的速率提高不了
|
2025-10-15 04:32:59 +08:00
|
|
|
|
BOOL bOk = GetQueuedCompletionStatus(
|
|
|
|
|
|
hCompletionPort,
|
|
|
|
|
|
&dwTrans,
|
|
|
|
|
|
(PULONG_PTR)&ContextObject,
|
|
|
|
|
|
&Overlapped, INFINITE);
|
|
|
|
|
|
DWORD dwIOError = GetLastError();
|
|
|
|
|
|
OverlappedPlus = CONTAINING_RECORD(Overlapped, OVERLAPPEDPLUS, m_ol);
|
|
|
|
|
|
ulBusyThread = InterlockedIncrement(&This->m_ulBusyThread); //1 1
|
2025-11-30 21:20:04 +01:00
|
|
|
|
if ( !bOk && dwIOError != WAIT_TIMEOUT ) { //当对方的套机制发生了关闭
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (ContextObject && This->m_bTimeToKill == FALSE &&dwTrans==0) {
|
|
|
|
|
|
ContextObject->olps = NULL;
|
2025-10-25 16:13:18 +08:00
|
|
|
|
Mprintf("!!! RemoveStaleContext: %d \n", WSAGetLastError());
|
2025-10-15 04:32:59 +08:00
|
|
|
|
This->RemoveStaleContext(ContextObject);
|
|
|
|
|
|
}
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
if (!bError) {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//分配一个新的线程到线程到线程池
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (ulBusyThread == This->m_ulCurrentThread) {
|
|
|
|
|
|
if (ulBusyThread < This->m_ulThreadPoolMax) {
|
|
|
|
|
|
if (ContextObject != NULL) {
|
|
|
|
|
|
HANDLE hThread = (HANDLE)CreateThread(NULL,
|
|
|
|
|
|
0,
|
|
|
|
|
|
WorkThreadProc,
|
|
|
|
|
|
(void*)This,
|
|
|
|
|
|
0,
|
|
|
|
|
|
NULL);
|
|
|
|
|
|
|
|
|
|
|
|
This->AddWorkThread(hThread ? 1:0);
|
|
|
|
|
|
|
2025-12-26 15:57:27 +01:00
|
|
|
|
SAFE_CLOSE_HANDLE(hThread);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (!bOk && dwIOError == WAIT_TIMEOUT) {
|
|
|
|
|
|
if (ContextObject == NULL) {
|
|
|
|
|
|
if (This->m_ulCurrentThread > This->m_ulThreadPoolMin) {
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bError = TRUE;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (!bError && !This->m_bTimeToKill) {
|
|
|
|
|
|
if(bOk && OverlappedPlus!=NULL && ContextObject!=NULL) {
|
|
|
|
|
|
try {
|
2026-01-15 14:34:10 +01:00
|
|
|
|
This->HandleIO(OverlappedPlus->m_ioType, ContextObject, dwTrans, m_Dctx, &m_stream);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
|
|
|
|
|
|
ContextObject = NULL;
|
|
|
|
|
|
} catch (...) {
|
|
|
|
|
|
Mprintf("This->HandleIO catched an error!!!");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
|
|
|
|
|
}
|
|
|
|
|
|
timeEndPeriod(1);
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
|
|
|
|
|
|
|
|
|
|
|
InterlockedDecrement(&This->m_ulCurrentThread);
|
|
|
|
|
|
InterlockedDecrement(&This->m_ulBusyThread);
|
|
|
|
|
|
int n= This->AddWorkThread(-1);
|
|
|
|
|
|
if (n == 0) {
|
|
|
|
|
|
Mprintf("======> IOCPServer All WorkThreadProc done\n");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-15 14:34:10 +01:00
|
|
|
|
inflateEnd(&m_stream);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
ZSTD_freeDCtx(m_Dctx);
|
|
|
|
|
|
|
|
|
|
|
|
return 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//在工作线程中被调用
|
2026-01-15 14:34:10 +01:00
|
|
|
|
BOOL IOCPServer::HandleIO(IOType PacketFlags,PCONTEXT_OBJECT ContextObject, DWORD dwTrans, ZSTD_DCtx* ctx, z_stream* z)
|
2019-01-10 19:35:03 +08:00
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
BOOL bRet = FALSE;
|
|
|
|
|
|
|
|
|
|
|
|
switch (PacketFlags) {
|
|
|
|
|
|
case IOInitialize:
|
|
|
|
|
|
bRet = OnClientInitializing(ContextObject, dwTrans);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case IORead:
|
2026-01-15 14:34:10 +01:00
|
|
|
|
bRet = OnClientReceiving(ContextObject, dwTrans, ctx, z);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
break;
|
|
|
|
|
|
case IOWrite:
|
|
|
|
|
|
bRet = OnClientPostSending(ContextObject, dwTrans);
|
|
|
|
|
|
break;
|
|
|
|
|
|
case IOIdle:
|
|
|
|
|
|
Mprintf("=> HandleIO PacketFlags= IOIdle\n");
|
|
|
|
|
|
break;
|
|
|
|
|
|
default:
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return bRet;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BOOL IOCPServer::OnClientInitializing(PCONTEXT_OBJECT ContextObject, DWORD dwTrans)
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return TRUE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-02 03:53:21 +08:00
|
|
|
|
// May be this function should be a member of `CONTEXT_OBJECT`.
|
2026-01-15 14:34:10 +01:00
|
|
|
|
BOOL ParseReceivedData(CONTEXT_OBJECT * ContextObject, DWORD dwTrans, pfnNotifyProc m_NotifyProc, ZSTD_DCtx* m_Dctx, z_stream* z)
|
2025-10-15 04:32:59 +08:00
|
|
|
|
{
|
2025-12-05 17:40:12 +01:00
|
|
|
|
AUTO_TICK(50, ContextObject->GetPeerName());
|
2025-10-15 04:32:59 +08:00
|
|
|
|
BOOL ret = 1;
|
|
|
|
|
|
try {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
if (dwTrans == 0) { //对方关闭了套接字
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//将接收到的数据拷贝到我们自己的内存中wsabuff 8192
|
2025-10-15 04:32:59 +08:00
|
|
|
|
ContextObject->InCompressedBuffer.WriteBuffer((PBYTE)ContextObject->szBuffer,dwTrans);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//查看数据包的完整性
|
2025-10-15 04:32:59 +08:00
|
|
|
|
while (true) {
|
|
|
|
|
|
PR pr = ContextObject->Parse(ContextObject->InCompressedBuffer);
|
|
|
|
|
|
if (pr.IsFailed()) {
|
|
|
|
|
|
ContextObject->InCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
break;
|
|
|
|
|
|
} else if (pr.IsNeedMore()) {
|
|
|
|
|
|
break;
|
|
|
|
|
|
} else if (pr.IsWinOSLogin()) {
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ULONG ulCompressedLength = 0;
|
|
|
|
|
|
ULONG ulOriginalLength = 0;
|
|
|
|
|
|
PBYTE CompressedBuffer = ContextObject->ReadBuffer(ulCompressedLength, ulOriginalLength);
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.WriteBuffer(CompressedBuffer, ulCompressedLength);
|
|
|
|
|
|
if (m_NotifyProc(ContextObject))
|
|
|
|
|
|
ret = CompressedBuffer[0] == TOKEN_LOGIN ? 999 : 1;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// CompressedBuffer 由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ULONG ulPackTotalLength = 0;
|
|
|
|
|
|
ContextObject->InCompressedBuffer.CopyBuffer(&ulPackTotalLength, sizeof(ULONG), pr.Result);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//取出数据包的总长度5字节标识+4字节数据包总长度+4字节原始数据长度
|
2025-10-15 04:32:59 +08:00
|
|
|
|
int bufLen = ContextObject->InCompressedBuffer.GetBufferLength();
|
|
|
|
|
|
if (ulPackTotalLength && bufLen >= ulPackTotalLength) {
|
|
|
|
|
|
ULONG ulCompressedLength = 0;
|
|
|
|
|
|
ULONG ulOriginalLength = 0;
|
|
|
|
|
|
PBYTE CompressedBuffer = ContextObject->ReadBuffer(ulCompressedLength, ulOriginalLength);
|
|
|
|
|
|
if (ContextObject->CompressMethod == COMPRESS_UNKNOWN) {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// CompressedBuffer 由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
throw "Unknown method";
|
|
|
|
|
|
} else if (ContextObject->CompressMethod == COMPRESS_NONE) {
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->Decode(CompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.WriteBuffer(CompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
if (m_NotifyProc(ContextObject))
|
|
|
|
|
|
ret = CompressedBuffer[0] == TOKEN_LOGIN ? 999 : 1;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// CompressedBuffer 由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
bool usingZstd = ContextObject->CompressMethod == COMPRESS_ZSTD, zlibFailed = false;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 使用预分配缓冲区,避免频繁内存分配
|
|
|
|
|
|
PBYTE DeCompressedBuffer = ContextObject->GetDecompressBuffer(ulOriginalLength);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
size_t iRet = usingZstd ?
|
|
|
|
|
|
Muncompress(DeCompressedBuffer, &ulOriginalLength, CompressedBuffer, ulCompressedLength) :
|
2026-01-15 14:34:10 +01:00
|
|
|
|
z_uncompress(z, DeCompressedBuffer, &ulOriginalLength, CompressedBuffer, ulCompressedLength);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (usingZstd ? C_SUCCESS(iRet) : (S_OK==iRet)) {
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->Decode(DeCompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.WriteBuffer(DeCompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
if (m_NotifyProc(ContextObject))
|
|
|
|
|
|
ret = DeCompressedBuffer[0] == TOKEN_LOGIN ? 999 : 1;
|
|
|
|
|
|
} else if (usingZstd) {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 尝试用zlib解压缩
|
2026-01-15 14:34:10 +01:00
|
|
|
|
if (Z_OK == z_uncompress(z, DeCompressedBuffer, &ulOriginalLength, CompressedBuffer, ulCompressedLength)) {
|
2025-10-15 04:32:59 +08:00
|
|
|
|
ContextObject->CompressMethod = COMPRESS_ZLIB;
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->Decode(DeCompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.WriteBuffer(DeCompressedBuffer, ulOriginalLength);
|
|
|
|
|
|
if (m_NotifyProc(ContextObject))
|
|
|
|
|
|
ret = DeCompressedBuffer[0] == TOKEN_LOGIN ? 999 : 1;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
zlibFailed = true;
|
|
|
|
|
|
ContextObject->CompressMethod = COMPRESS_UNKNOWN;
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
zlibFailed = true;
|
|
|
|
|
|
}
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// CompressedBuffer 和 DeCompressedBuffer 都由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (zlibFailed) {
|
|
|
|
|
|
Mprintf("[ERROR] ZLIB uncompress failed \n");
|
|
|
|
|
|
throw "Bad Buffer";
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
} catch(...) {
|
|
|
|
|
|
Mprintf("[ERROR] OnClientReceiving catch an error \n");
|
|
|
|
|
|
ContextObject->InCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
}
|
|
|
|
|
|
return ret;
|
2025-07-02 03:53:21 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-15 14:34:10 +01:00
|
|
|
|
BOOL IOCPServer::OnClientReceiving(PCONTEXT_OBJECT ContextObject, DWORD dwTrans, ZSTD_DCtx* ctx, z_stream* z)
|
2025-07-02 03:53:21 +08:00
|
|
|
|
{
|
2026-01-15 14:34:10 +01:00
|
|
|
|
if (FALSE == ParseReceivedData(ContextObject, dwTrans, m_NotifyProc, ctx, z)) {
|
2025-10-15 04:32:59 +08:00
|
|
|
|
RemoveStaleContext(ContextObject);
|
|
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
2025-07-02 03:53:21 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
PostRecv(ContextObject); //投递新的接收数据的请求
|
2025-03-13 23:34:33 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return TRUE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-01-15 14:34:10 +01:00
|
|
|
|
BOOL WriteContextData(CONTEXT_OBJECT* ContextObject, PBYTE szBuffer, size_t ulOriginalLength, ZSTD_CCtx* m_Cctx, z_stream* z)
|
2025-10-15 04:32:59 +08:00
|
|
|
|
{
|
|
|
|
|
|
assert(ContextObject);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// 输出服务端所发送的命令
|
2025-10-15 04:32:59 +08:00
|
|
|
|
int cmd = szBuffer[0];
|
|
|
|
|
|
if (ulOriginalLength < 100 && cmd != COMMAND_SCREEN_CONTROL && cmd != CMD_HEARTBEAT_ACK &&
|
|
|
|
|
|
cmd != CMD_DRAW_POINT && cmd != CMD_MOVEWINDOW && cmd != CMD_SET_SIZE) {
|
|
|
|
|
|
char buf[100] = { 0 };
|
|
|
|
|
|
if (ulOriginalLength == 1) {
|
|
|
|
|
|
sprintf_s(buf, "command %d", cmd);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
memcpy(buf, szBuffer, ulOriginalLength);
|
|
|
|
|
|
}
|
2025-10-26 04:53:16 +08:00
|
|
|
|
Mprintf("[COMMAND] Send: %s\r\n", buf);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
try {
|
|
|
|
|
|
do {
|
|
|
|
|
|
if (ulOriginalLength <= 0) return FALSE;
|
|
|
|
|
|
if (ContextObject->CompressMethod == COMPRESS_UNKNOWN) {
|
|
|
|
|
|
Mprintf("[ERROR] UNKNOWN compress method \n");
|
|
|
|
|
|
return FALSE;
|
|
|
|
|
|
} else if (ContextObject->CompressMethod == COMPRESS_NONE) {
|
|
|
|
|
|
Buffer tmp(szBuffer, ulOriginalLength);
|
|
|
|
|
|
szBuffer = tmp.Buf();
|
|
|
|
|
|
ContextObject->WriteBuffer(szBuffer, ulOriginalLength, ulOriginalLength, cmd);
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
bool usingZstd = ContextObject->CompressMethod == COMPRESS_ZSTD;
|
|
|
|
|
|
unsigned long ulCompressedLength = usingZstd ?
|
2025-11-30 21:20:04 +01:00
|
|
|
|
ZSTD_compressBound(ulOriginalLength) : (unsigned long)((double)ulOriginalLength * 1.001 + 12);
|
|
|
|
|
|
// 使用预分配缓冲区替代每次 new
|
|
|
|
|
|
LPBYTE CompressedBuffer = ContextObject->GetSendCompressBuffer(ulCompressedLength);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
Buffer tmp(szBuffer, ulOriginalLength);
|
|
|
|
|
|
szBuffer = tmp.Buf();
|
|
|
|
|
|
ContextObject->Encode(szBuffer, ulOriginalLength);
|
2025-11-29 23:22:55 +01:00
|
|
|
|
if (!m_Cctx) ContextObject->Encode(szBuffer, ulOriginalLength, usingZstd);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
size_t iRet = usingZstd ?
|
2026-01-15 19:05:12 +01:00
|
|
|
|
Mcompress(CompressedBuffer, &ulCompressedLength, (LPBYTE)szBuffer, ulOriginalLength, ContextObject->GetZstdLevel()):
|
2025-10-15 04:32:59 +08:00
|
|
|
|
compress(CompressedBuffer, &ulCompressedLength, (LPBYTE)szBuffer, ulOriginalLength);
|
|
|
|
|
|
|
|
|
|
|
|
if (usingZstd ? C_FAILED(iRet) : (S_OK != iRet)) {
|
|
|
|
|
|
Mprintf("[ERROR] compress failed \n");
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// SendCompressBuffer 由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ulCompressedLength = usingZstd ? iRet : ulCompressedLength;
|
|
|
|
|
|
|
|
|
|
|
|
ContextObject->WriteBuffer(CompressedBuffer, ulCompressedLength, ulOriginalLength, cmd);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
// SendCompressBuffer 由 CONTEXT_OBJECT 管理,不在此处释放
|
2025-10-15 04:32:59 +08:00
|
|
|
|
} while (false);
|
|
|
|
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
|
|
} catch (...) {
|
|
|
|
|
|
Mprintf("[ERROR] OnClientPreSending catch an error \n");
|
|
|
|
|
|
return FALSE;
|
|
|
|
|
|
}
|
2025-07-02 03:53:21 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-10-22 02:57:24 +08:00
|
|
|
|
BOOL IOCPServer::OnClientPreSending(CONTEXT_OBJECT* ContextObject, PBYTE szBuffer, size_t ulOriginalLength)
|
2025-07-02 03:53:21 +08:00
|
|
|
|
{
|
2026-01-15 19:05:12 +01:00
|
|
|
|
if (WriteContextData(ContextObject, szBuffer, ulOriginalLength, ContextObject->Zcctx)) {
|
2025-10-15 04:32:59 +08:00
|
|
|
|
OVERLAPPEDPLUS* OverlappedPlus = new OVERLAPPEDPLUS(IOWrite);
|
|
|
|
|
|
BOOL bOk = PostQueuedCompletionStatus(m_hCompletionPort, 0, (ULONG_PTR)ContextObject, &OverlappedPlus->m_ol);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
if ( (!bOk && GetLastError() != ERROR_IO_PENDING) ) { //如果投递失败
|
2025-10-15 04:32:59 +08:00
|
|
|
|
int a = GetLastError();
|
2025-11-30 21:20:04 +01:00
|
|
|
|
Mprintf("!!! OnClientPreSending 投递消息失败\n");
|
2025-10-15 04:32:59 +08:00
|
|
|
|
RemoveStaleContext(ContextObject);
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
2025-10-22 02:57:24 +08:00
|
|
|
|
return FALSE;
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
2025-10-22 02:57:24 +08:00
|
|
|
|
return TRUE;
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
2025-10-22 02:57:24 +08:00
|
|
|
|
return FALSE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
BOOL IOCPServer::OnClientPostSending(CONTEXT_OBJECT* ContextObject,ULONG ulCompletedLength)
|
2019-01-05 20:21:43 +08:00
|
|
|
|
{
|
2025-10-26 18:57:45 +08:00
|
|
|
|
CAutoCLock L(ContextObject->SendLock);
|
2025-10-15 04:32:59 +08:00
|
|
|
|
try {
|
|
|
|
|
|
DWORD ulFlags = MSG_PARTIAL;
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
ContextObject->OutCompressedBuffer.RemoveCompletedBuffer(ulCompletedLength); //将完成的数据从数据结构中去除
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (ContextObject->OutCompressedBuffer.GetBufferLength() == 0) {
|
|
|
|
|
|
ContextObject->OutCompressedBuffer.ClearBuffer();
|
2025-11-30 21:20:04 +01:00
|
|
|
|
return true; //走到这里说明我们的数据真正完全发送
|
2025-10-15 04:32:59 +08:00
|
|
|
|
} else {
|
2025-11-30 21:20:04 +01:00
|
|
|
|
OVERLAPPEDPLUS * OverlappedPlus = new OVERLAPPEDPLUS(IOWrite); //数据没有完成 我们继续投递 发送请求
|
2025-10-15 04:32:59 +08:00
|
|
|
|
|
|
|
|
|
|
ContextObject->wsaOutBuffer.buf = (char*)ContextObject->OutCompressedBuffer.GetBuffer(0);
|
|
|
|
|
|
ContextObject->wsaOutBuffer.len = ContextObject->OutCompressedBuffer.GetBufferLength();
|
|
|
|
|
|
int iOk = WSASend(ContextObject->sClientSocket, &ContextObject->wsaOutBuffer,1,
|
|
|
|
|
|
NULL, ulFlags,&OverlappedPlus->m_ol, NULL);
|
|
|
|
|
|
if ( iOk == SOCKET_ERROR && WSAGetLastError() != WSA_IO_PENDING ) {
|
2025-12-15 23:09:56 +01:00
|
|
|
|
if (RemoveStaleContext(ContextObject))
|
|
|
|
|
|
Mprintf("!!! OnClientPostSending 投递消息失败: %d\n", WSAGetLastError());
|
2025-10-15 04:32:59 +08:00
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
2025-10-26 18:57:45 +08:00
|
|
|
|
return FALSE;
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
2025-10-26 18:57:45 +08:00
|
|
|
|
return TRUE;
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
} catch(...) {
|
|
|
|
|
|
Mprintf("[ERROR] OnClientPostSending catch an error \n");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return FALSE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
DWORD IOCPServer::ListenThreadProc(LPVOID lParam) //监听线程
|
2019-01-05 20:21:43 +08:00
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
IOCPServer* This = (IOCPServer*)(lParam);
|
|
|
|
|
|
WSANETWORKEVENTS NetWorkEvents;
|
|
|
|
|
|
|
|
|
|
|
|
while(!This->m_bTimeToKill) {
|
|
|
|
|
|
if (WaitForSingleObject(This->m_hKillEvent, 100) == WAIT_OBJECT_0)
|
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
|
|
DWORD dwRet;
|
|
|
|
|
|
dwRet = WSAWaitForMultipleEvents(1,&This->m_hListenEvent,FALSE,100,FALSE);
|
|
|
|
|
|
if (dwRet == WSA_WAIT_TIMEOUT)
|
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
|
|
int iRet = WSAEnumNetworkEvents(This->m_sListenSocket,
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//如果事件授信 我们就将该事件转换成一个网络事件 进行 判断
|
2025-10-15 04:32:59 +08:00
|
|
|
|
This->m_hListenEvent,
|
|
|
|
|
|
&NetWorkEvents);
|
|
|
|
|
|
|
|
|
|
|
|
if (iRet == SOCKET_ERROR)
|
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
|
|
if (NetWorkEvents.lNetworkEvents & FD_ACCEPT) {
|
|
|
|
|
|
if (NetWorkEvents.iErrorCode[FD_ACCEPT_BIT] == 0) {
|
|
|
|
|
|
This->OnAccept();
|
|
|
|
|
|
} else {
|
|
|
|
|
|
break;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
This->m_hListenThread = NULL;
|
|
|
|
|
|
return 0;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void IOCPServer::OnAccept()
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
SOCKADDR_IN ClientAddr = {0};
|
|
|
|
|
|
SOCKET sClientSocket = INVALID_SOCKET;
|
|
|
|
|
|
|
|
|
|
|
|
int iLen = sizeof(SOCKADDR_IN);
|
|
|
|
|
|
sClientSocket = accept(m_sListenSocket,
|
|
|
|
|
|
(sockaddr*)&ClientAddr,
|
2025-11-30 21:20:04 +01:00
|
|
|
|
&iLen); //通过我们的监听套接字来生成一个与之信号通信的套接字
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (sClientSocket == SOCKET_ERROR) {
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//我们在这里为每一个到达的信号维护了一个与之关联的数据结构这里简称为用户的上下背景文
|
2025-10-15 04:32:59 +08:00
|
|
|
|
PCONTEXT_OBJECT ContextObject = AllocateContext(sClientSocket); // Context
|
|
|
|
|
|
|
|
|
|
|
|
if (ContextObject == NULL) {
|
|
|
|
|
|
closesocket(sClientSocket);
|
|
|
|
|
|
sClientSocket = INVALID_SOCKET;
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ContextObject->sClientSocket = sClientSocket;
|
|
|
|
|
|
|
|
|
|
|
|
ContextObject->wsaInBuf.buf = (char*)ContextObject->szBuffer;
|
|
|
|
|
|
ContextObject->wsaInBuf.len = sizeof(ContextObject->szBuffer);
|
|
|
|
|
|
|
|
|
|
|
|
HANDLE Handle = CreateIoCompletionPort((HANDLE)sClientSocket, m_hCompletionPort, (ULONG_PTR)ContextObject, 0);
|
|
|
|
|
|
|
|
|
|
|
|
if (Handle!=m_hCompletionPort) {
|
|
|
|
|
|
delete ContextObject;
|
|
|
|
|
|
ContextObject = NULL;
|
|
|
|
|
|
|
|
|
|
|
|
if (sClientSocket!=INVALID_SOCKET) {
|
|
|
|
|
|
closesocket(sClientSocket);
|
|
|
|
|
|
sClientSocket = INVALID_SOCKET;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//设置套接字的选项卡 Set KeepAlive 开启保活机制 SO_KEEPALIVE
|
|
|
|
|
|
//保持连接检测对方主机是否崩溃如果2小时内在此套接口的任一方向都没
|
|
|
|
|
|
//有数据交换,TCP就自动给对方 发一个保持存活
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_ulKeepLiveTime = 3;
|
|
|
|
|
|
const BOOL bKeepAlive = TRUE;
|
|
|
|
|
|
setsockopt(ContextObject->sClientSocket,SOL_SOCKET,SO_KEEPALIVE,(char*)&bKeepAlive,sizeof(bKeepAlive));
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//设置超时详细信息
|
2025-10-15 04:32:59 +08:00
|
|
|
|
tcp_keepalive KeepAlive;
|
2025-11-30 21:20:04 +01:00
|
|
|
|
KeepAlive.onoff = 1; // 启用保活
|
|
|
|
|
|
KeepAlive.keepalivetime = m_ulKeepLiveTime; //超过3分钟没有数据,就发送探测包
|
|
|
|
|
|
KeepAlive.keepaliveinterval = 1000 * 10; //重试间隔为10秒 Resend if No-Reply
|
2025-10-15 04:32:59 +08:00
|
|
|
|
WSAIoctl(ContextObject->sClientSocket, SIO_KEEPALIVE_VALS,&KeepAlive,sizeof(KeepAlive),
|
|
|
|
|
|
NULL,0,(unsigned long *)&bKeepAlive,0,NULL);
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//在做服务器时,如果发生客户端网线或断电等非正常断开的现象,如果服务器没有设置SO_KEEPALIVE选项,
|
|
|
|
|
|
//则会一直不关闭SOCKET。因为上的的设置是默认两个小时时间太长了所以我们就修正这个值
|
2025-10-15 04:32:59 +08:00
|
|
|
|
EnterCriticalSection(&m_cs);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
m_ContextConnectionList.AddTail(ContextObject); //插入到我们的内存列表中
|
2025-10-15 04:32:59 +08:00
|
|
|
|
LeaveCriticalSection(&m_cs);
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
OVERLAPPEDPLUS *OverlappedPlus = new OVERLAPPEDPLUS(IOInitialize); //注意这里的重叠IO请求是 用户请求上线
|
2025-10-15 04:32:59 +08:00
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
BOOL bOk = PostQueuedCompletionStatus(m_hCompletionPort, 0, (ULONG_PTR)ContextObject, &OverlappedPlus->m_ol); // 工作线程
|
|
|
|
|
|
//因为我们接受到了一个用户上线的请求那么我们就将该请求发送给我们的完成端口 让我们的工作线程处理它
|
|
|
|
|
|
if ( (!bOk && GetLastError() != ERROR_IO_PENDING)) { //如果投递失败
|
2025-10-15 04:32:59 +08:00
|
|
|
|
int a = GetLastError();
|
2025-11-30 21:20:04 +01:00
|
|
|
|
Mprintf("!!! OnAccept 投递消息失败\n");
|
2025-10-15 04:32:59 +08:00
|
|
|
|
RemoveStaleContext(ContextObject);
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
PostRecv(ContextObject);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
VOID IOCPServer::PostRecv(CONTEXT_OBJECT* ContextObject)
|
|
|
|
|
|
{
|
2025-11-30 21:20:04 +01:00
|
|
|
|
//向我们的刚上线的用户的投递一个接受数据的请求
|
|
|
|
|
|
// 如果用户的第一个数据包到达也就就是被控端的登陆请求到达我们的工作线程就
|
|
|
|
|
|
// 会响应,并调用ProcessIOMessage函数
|
2025-10-15 04:32:59 +08:00
|
|
|
|
OVERLAPPEDPLUS * OverlappedPlus = new OVERLAPPEDPLUS(IORead);
|
|
|
|
|
|
ContextObject->olps = OverlappedPlus;
|
|
|
|
|
|
|
|
|
|
|
|
DWORD dwReturn;
|
|
|
|
|
|
ULONG ulFlags = MSG_PARTIAL;
|
|
|
|
|
|
int iOk = WSARecv(ContextObject->sClientSocket, &ContextObject->wsaInBuf,
|
|
|
|
|
|
1,&dwReturn, &ulFlags,&OverlappedPlus->m_ol, NULL);
|
|
|
|
|
|
|
|
|
|
|
|
if (iOk == SOCKET_ERROR && WSAGetLastError() != WSA_IO_PENDING) {
|
|
|
|
|
|
int a = GetLastError();
|
2025-11-30 21:20:04 +01:00
|
|
|
|
Mprintf("!!! PostRecv 投递消息失败\n");
|
2025-10-15 04:32:59 +08:00
|
|
|
|
RemoveStaleContext(ContextObject);
|
|
|
|
|
|
SAFE_DELETE(OverlappedPlus);
|
|
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-08 15:38:41 +08:00
|
|
|
|
PCONTEXT_OBJECT IOCPServer::AllocateContext(SOCKET s)
|
2019-01-05 20:21:43 +08:00
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
PCONTEXT_OBJECT ContextObject = NULL;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
CLock cs(m_cs);
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (m_ContextConnectionList.GetCount() >= m_ulMaxConnections) {
|
|
|
|
|
|
return NULL;
|
|
|
|
|
|
}
|
2024-12-31 03:11:26 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
ContextObject = !m_ContextFreePoolList.IsEmpty() ? m_ContextFreePoolList.RemoveHead() : new CONTEXT_OBJECT;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
if (ContextObject != NULL) {
|
|
|
|
|
|
ContextObject->InitMember(s, this);
|
|
|
|
|
|
}
|
2019-01-10 19:35:03 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
return ContextObject;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-12-15 23:09:56 +01:00
|
|
|
|
BOOL IOCPServer::RemoveStaleContext(CONTEXT_OBJECT* ContextObject)
|
2019-01-05 20:21:43 +08:00
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
EnterCriticalSection(&m_cs);
|
|
|
|
|
|
auto find = m_ContextConnectionList.Find(ContextObject);
|
|
|
|
|
|
LeaveCriticalSection(&m_cs);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
if (find) { //在内存中查找该用户的上下文数据结构
|
2025-10-15 04:32:59 +08:00
|
|
|
|
m_OfflineProc(ContextObject);
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
CancelIo((HANDLE)ContextObject->sClientSocket); //取消在当前套接字的异步IO -->PostRecv
|
|
|
|
|
|
closesocket(ContextObject->sClientSocket); //关闭套接字
|
2025-10-15 04:32:59 +08:00
|
|
|
|
ContextObject->sClientSocket = INVALID_SOCKET;
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
while (!HasOverlappedIoCompleted((LPOVERLAPPED)ContextObject)) { //判断还有没有异步IO请求在当前套接字上
|
2025-10-15 04:32:59 +08:00
|
|
|
|
Sleep(0);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-11-30 21:20:04 +01:00
|
|
|
|
MoveContextToFreePoolList(ContextObject); //将该内存结构回收至内存池
|
2025-12-15 23:09:56 +01:00
|
|
|
|
return TRUE;
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
2025-12-15 23:09:56 +01:00
|
|
|
|
return FALSE;
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
VOID IOCPServer::MoveContextToFreePoolList(CONTEXT_OBJECT* ContextObject)
|
|
|
|
|
|
{
|
2025-10-15 04:32:59 +08:00
|
|
|
|
CLock cs(m_cs);
|
|
|
|
|
|
|
|
|
|
|
|
POSITION Pos = m_ContextConnectionList.Find(ContextObject);
|
|
|
|
|
|
if (Pos) {
|
|
|
|
|
|
ContextObject->InCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->InDeCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
ContextObject->OutCompressedBuffer.ClearBuffer();
|
|
|
|
|
|
|
|
|
|
|
|
memset(ContextObject->szBuffer,0,8192);
|
2025-11-30 21:20:04 +01:00
|
|
|
|
m_ContextFreePoolList.AddTail(ContextObject); //回收至内存池
|
|
|
|
|
|
m_ContextConnectionList.RemoveAt(Pos); //从内存结构中移除
|
2025-10-15 04:32:59 +08:00
|
|
|
|
}
|
2019-01-05 20:21:43 +08:00
|
|
|
|
}
|
2024-12-31 03:11:26 +08:00
|
|
|
|
|
2025-10-15 04:32:59 +08:00
|
|
|
|
void IOCPServer::UpdateMaxConnection(int maxConn)
|
|
|
|
|
|
{
|
|
|
|
|
|
CLock cs(m_cs);
|
|
|
|
|
|
m_ulMaxConnections = maxConn;
|
2024-12-31 03:11:26 +08:00
|
|
|
|
}
|