Did a little more digging, and came up with this:
common/EQStream.cpp:
Code:
1003 void EQStream::Process(const unsigned char *buffer, const uint32 length)
1004 {
1005 static unsigned char newbuffer[2048];
1006 uint32 newlength=0;
1007 if (EQProtocolPacket::ValidateCRC(buffer,length,Key)) {
1008 if (compressed) {
1009 newlength=EQProtocolPacket::Decompress(buffer,length,newbuffer,2048);
1010 } else {
1011 memcpy(newbuffer,buffer,length);
1012 newlength=length;
1013 if (encoded)
1014 EQProtocolPacket::ChatDecode(newbuffer,newlength-2,Key);
1015 }
1016 if (buffer[1]!=0x01 && buffer[1]!=0x02 && buffer[1]!=0x1d)
1017 newlength-=2;
1018 EQProtocolPacket *p = MakeProtocolPacket(newbuffer,newlength);
1019 ProcessPacket(p);
1020 delete p;
1021 ProcessQueue();
1022 } else {
1023 _log(NET__DEBUG, _L "Incoming packet failed checksum" __L);
1024 _hex(NET__NET_CREATE_HEX, buffer, length);
1025 }
1026 }
Key is defined a few times throughout
EQStream.cpp, and it seems to be somewhat dependent on the type of packet. For the most part, I think it's defined as 0:
Here is an example of where it isn't:
Code:
299 #ifndef COLLECTOR
300 Key=0x11223344;
301 SendSessionResponse();
302 #endif
310 Key=ntohl(Response->Key);
802 Response->Key=htonl(Key);
I'd dig deeper, but it's late for me. Good luck with figuring out the encoding
