From MSDN, it seems there's a limitation for the number of entities returned by the Query service:
A query against the Table service may return a maximum of 1,000 entities at one time and may execute for a maximum of five seconds.
But as I wrote a sample to show this issue, I didn't find any limitations for the number of returned entities, here is my key code:
public class DataProvider
{
public static string PartitionKey
{
get { return "PartitionKey"; }
}
public static IEnumerable<CustomerEntity> MoreThanThousandData()
{
var result = new List<CustomerEntity>();
for (int i = 0; i < 1200; i++)
{
result.Add(new CustomerEntity(PartitionKey, Guid.NewGuid().ToString())
{
Name = Guid.NewGuid().ToString(),
Age = new Random().Next(10, 70)
});
}
return result;
}
}
Insert 1200 entities to the table:
public class AfterOptimize
{
public void InsertDataToTable()
{
var cloudData = DataProvider.MoreThanThousandData();
Console.WriteLine("Plan to insert {0} entities to the table.", cloudData.Count());
InsertDataToTableInternal(AzureTableService.Table, cloudData);
}
private void InsertDataToTableInternal(CloudTable table, IEnumerable<ITableEntity> data)
{
var splitedData = data.Chunk(100);
Parallel.ForEach(splitedData, item =>
{
var batchInsertOperation = new TableBatchOperation();
foreach (var tableEntity in item)
{
batchInsertOperation.Add(TableOperation.Insert(tableEntity));
}
table.ExecuteBatch(batchInsertOperation);
});
}
}
Then, read from the table, the partition key are all the same here:
public void ReadCloudData()
{
InsertMoreThanOneThousandDataToTable();
var query =
new TableQuery<CustomerEntity>().Where(TableQuery.GenerateFilterCondition("PartitionKey",
QueryComparisons.Equal, DataProvider.PartitionKey));
var result = AzureTableService.Table.ExecuteQuery(query);
Console.WriteLine("Read {0} entities from table.", result.Count()); // output 1200
}
I only used the latest Azure storage .NET client API.