After reading the TCP SSL example from MSDN, they use a byte array to read data into the stream. Is there a reason the array is limited to 2048? What if TCP sends a longer array than 2048? Also, how does the buffer.Length property continue to read the stream, as it is changing. It doesn't make complete sense to me. Why read the length of the buffer, wouldn't you want to read the length of the incremental bytes coming into the stream?
static string ReadMessage(SslStream sslStream)
{
// Read the message sent by the client.
// The client signals the end of the message using the
// "<EOF>" marker.
byte [] buffer = new byte[2048];
StringBuilder messageData = new StringBuilder();
int bytes = -1;
do
{
// Read the client's test message.
bytes = sslStream.Read(buffer, 0, buffer.Length);
// Use Decoder class to convert from bytes to UTF8
// in case a character spans two buffers.
Decoder decoder = Encoding.UTF8.GetDecoder();
char[] chars = new char[decoder.GetCharCount(buffer,0,bytes)];
decoder.GetChars(buffer, 0, bytes, chars,0);
messageData.Append (chars);
// Check for EOF or an empty message.
if (messageData.ToString().IndexOf("<EOF>") != -1)
{
break;
}
} while (bytes !=0);
return messageData.ToString();
}