Я пытаюсь понять, есть ли польза от использования Redis для кэширования данных. Но MongoDb, кажется, пишет данные быстрее. Это с одним Mon go db без репликации.
Я сравниваю Redis pipe и пакетное сохранение, сохраняя 100 тыс. Строк, с сохранением в MongoDb, 1 или 100 строк одновременно. Я также сравниваю выравнивание 100 строк в одном документе или значение Redis для более быстрого сохранения.
Результаты:
Mongo: Saved 100000 rows, 100 rows at a time, 1564 elapsed milliseconds: 0.01564 ms per row. Num rows inserted: 100000
Redis list pipe Saved 100000 rows to, 100 row at a time, 2030 elapsed milliseconds: 0.0203 ms per row. new listLength: 100000
Redis list batch Saved 100000 rows, 100 row at a time, 2101 elapsed milliseconds: 0.02101 ms per row. new listLength: 100000
Redis list pipe complex Saved 100000 rows to, 10000 rows at a time, total elapsedMilliseconds: 187; 0.00187 ms per row. new listLength: 1000
Mongo: complex Saved 100000 rows, 10000 rows at a time, total 135 elapsed milliseconds: 0.00135 ms per row. Num rows inserted: 1000
Чтобы запустить приведенный ниже код C#, используйте MSVS 2019 и Nuget для добавьте StackExchange.Redis, MongoDB.Driver и Newtonsoft. Json.
Вывод: MongoDb экономит быстрее. Кто-нибудь может сказать, почему этот тест несправедлив? Мне нужно знать, если я не прав, прежде чем мы go пройдем путь MongoDb и проигнорируем слой кэширования.
using System;
using System.Collections.Generic;
using MongoDB.Driver;
using StackExchange.Redis;
using Newtonsoft.Json;
using System.Threading.Tasks;
namespace MongoAndRedisPerformanceTests
{
public class MGpsDatumJsonComplex
{
public MGpsDatumJsonComplex( string pJsonComplex )
{
jsonComplex = pJsonComplex;
}
public string jsonComplex { get; set; }
}
public class MGpsDatum
{
public MGpsDatum( int pUnitID, float pX, float pY, DateTime pTimeStamp, byte pSpeedKmph, float? pDistanceKM, DateTime? pTimeInsertedToProviderDb, int pOptionTemplateID )
{
ID = -1;
UnitID = pUnitID;
X = pX;
Y = pY;
TimeStamp = pTimeStamp;
SpeedKmph = pSpeedKmph;
DistanceKM = pDistanceKM;
TimeInsertedToProviderDb = pTimeInsertedToProviderDb;
OptionTemplateID = pOptionTemplateID;
extRef = "1000000000";
}
public string extRef { get; set; }
public int ID { get; set; }
public int UnitID { get; set; }
public float X { get; set; }
public float Y { get; set; }
public DateTime TimeStamp { get; set; }
public float? DistanceKM { get; set; }
public byte SpeedKmph { get; set; }
public int? OptionTemplateID { get; set; }
public DateTime? TimeInsertedToProviderDb { get; set; }
public float? OdoKm { get; set; }
public float? AuxFloat1 { get; set; }
public bool HasStoppedOrAtStopStatus { get; set; }
public DateTime TimeSavedToOSGPSdb { get; set; }
}
class Program
{
static void MongoDataSaveSpeedTest()
{
MongoClientSettings mongo_settings = new MongoClientSettings();
mongo_settings.Server = new MongoServerAddress( "127.0.0.1" );
MongoDB.Driver.MongoClient client = new MongoClient( mongo_settings );
IMongoDatabase db = client.GetDatabase( "OpsiService" );
IMongoCollection<MGpsDatum> posCollection = db.GetCollection<MGpsDatum>( "GpsDataSpeedTest" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
DateTime start = DateTime.Now;
int numRows = 10000;
for (int i = 0; i < numRows; i++)
{
posCollection.InsertOne( gpsDatum );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
Console.WriteLine( "Saved " + numRows + " rows to Mongo collection, 1 row at a time, " + elapsedMilliseconds + " elapsed milliseconds: " + ((float) ((float) elapsedMilliseconds)/ ((float) numRows ) ) + " ms per row." );
int br = 0;
}
static void MongoDataSaveSpeedTest_MultiInsert( int numRows, int batchSize )
{
MongoClientSettings mongo_settings = new MongoClientSettings();
mongo_settings.Server = new MongoServerAddress( "127.0.0.1" );
MongoDB.Driver.MongoClient client = new MongoClient( mongo_settings );
IMongoDatabase db = client.GetDatabase( "OpsiService" );
db.DropCollection( "GpsDataSpeedTest" );
IMongoCollection<MGpsDatum> posCollection = db.GetCollection<MGpsDatum>( "GpsDataSpeedTest" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
List<MGpsDatum> listGpsDatum = new List<MGpsDatum>( batchSize );
for (int i=0; i< batchSize; i++)
{
listGpsDatum.Add( gpsDatum );
}
DateTime start = DateTime.Now;
for (int i = 0; i < numRows/batchSize; i++)
{
posCollection.InsertMany( listGpsDatum );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long numRowsAfterInsert = posCollection.CountDocuments( FilterDefinition<MGpsDatum>.Empty );
Console.WriteLine( "Mongo: Saved " + numRows + " rows, " + batchSize + " rows at a time, " + elapsedMilliseconds + " elapsed milliseconds: " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. Num rows inserted: " + numRowsAfterInsert );
}
static void MongoDataSaveSpeedTest_MultiInsert_complex( int numRows, int batchSize, int numRowsInRecord )
{
MongoClientSettings mongo_settings = new MongoClientSettings();
mongo_settings.Server = new MongoServerAddress( "127.0.0.1" );
MongoDB.Driver.MongoClient client = new MongoClient( mongo_settings );
IMongoDatabase db = client.GetDatabase( "OpsiService" );
db.DropCollection( "GpsDataSpeedTestComplex" );
IMongoCollection<MGpsDatumJsonComplex> posCollection = db.GetCollection<MGpsDatumJsonComplex>( "GpsDataSpeedTestComplex" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
List<MGpsDatum> complexGpsDatum = new List<MGpsDatum>( numRowsInRecord );
for (int i=0; i<numRowsInRecord; i++)
{
complexGpsDatum.Add( gpsDatum );
}
List<MGpsDatumJsonComplex> listComplexGpsDatum = new List<MGpsDatumJsonComplex>( batchSize );
string complextGpsDatumStr = JsonConvert.SerializeObject( complexGpsDatum );
MGpsDatumJsonComplex complexJson = new MGpsDatumJsonComplex( complextGpsDatumStr );
for (int i = 0; i < batchSize; i++)
{
listComplexGpsDatum.Add( complexJson );
}
DateTime start = DateTime.Now;
for (int i = 0; i < numRows / (batchSize*numRowsInRecord ) ; i++)
{
posCollection.InsertMany( listComplexGpsDatum );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long numRowsAfterInsert = posCollection.CountDocuments( FilterDefinition<MGpsDatumJsonComplex>.Empty );
Console.WriteLine( "Mongo: complex Saved " + numRows + " rows, " + batchSize*numRowsInRecord + " rows at a time, total " + elapsedMilliseconds + " elapsed milliseconds: " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. Num rows inserted: " + numRowsAfterInsert );
}
// https://stackoverflow.com/questions/27796054/pipelining-vs-batching-in-stackexchange-redis
static void RedisSaveSpeedTest_Pipelining( int numRows, int batchSize )
{
ConnectionMultiplexer redis = ConnectionMultiplexer.Connect( "localhost:6379,connectTimeout=20000" );
IDatabase db = redis.GetDatabase( 1 );
db.KeyDelete( "GpsTestQueue" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
string gpsDatumJson = JsonConvert.SerializeObject( gpsDatum );
DateTime start = DateTime.Now;
for (int b = 0; b < numRows / batchSize; b++)
{
List<Task> addTasks = new List<Task>();
for (int i = 0; i < batchSize; i++)
{
Task<long> addAsync = db.ListLeftPushAsync( "GpsTestQueue", gpsDatumJson );
addTasks.Add( addAsync );
}
Task[] tasks = addTasks.ToArray();
Task.WaitAll( tasks );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long listLength = db.ListLength( "GpsTestQueue" );
Console.WriteLine( "Redis list pipe Saved " + numRows + " rows to, " + batchSize + " row at a time, " + elapsedMilliseconds + " elapsed milliseconds: " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. new listLength: " + listLength );
}
// https://stackoverflow.com/questions/27796054/pipelining-vs-batching-in-stackexchange-redis
static void RedisSaveSpeedTest_Batching( int numRows, int batchSize )
{
ConnectionMultiplexer redis = ConnectionMultiplexer.Connect( "localhost:6379,connectTimeout=20000" );
IDatabase db = redis.GetDatabase( 1 );
db.KeyDelete( "GpsTestQueue" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
string gpsDatumJson = JsonConvert.SerializeObject( gpsDatum );
DateTime start = DateTime.Now;
for (int b = 0; b < numRows / batchSize; b++)
{
List<Task> addTasks = new List<Task>();
IBatch batch = db.CreateBatch();
for (int i = 0; i < batchSize; i++)
{
Task<long> addAsync = db.ListLeftPushAsync( "GpsTestQueue", gpsDatumJson );
addTasks.Add( addAsync );
}
batch.Execute();
Task[] tasks = addTasks.ToArray();
Task.WaitAll( tasks );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long listLength = db.ListLength( "GpsTestQueue" );
Console.WriteLine( "Redis list batch Saved " + numRows + " rows, " + batchSize + " row at a time, " + elapsedMilliseconds + " elapsed milliseconds: " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. new listLength: " + listLength );
}
static void RedisSaveSpeedTest_Pipelining_complex( int numRows, int batchSize, int numRowsInRecord )
{
ConnectionMultiplexer redis = ConnectionMultiplexer.Connect( "localhost:6379,connectTimeout=20000" );
IDatabase db = redis.GetDatabase( 1 );
db.KeyDelete( "GpsTestQueue" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
string gpsDatumJson = JsonConvert.SerializeObject( gpsDatum );
string gpsDatumJsonComplex = "";
for (int i=0; i < numRowsInRecord; i++)
{
gpsDatumJsonComplex += gpsDatumJson;
}
DateTime start = DateTime.Now;
int numBatchSaves = numRows / (numRowsInRecord * batchSize);
for (int b = 0; b < numBatchSaves; b++)
{
List<Task> addTasks = new List<Task>();
for (int i = 0; i < batchSize; i++)
{
Task<long> addAsync = db.ListLeftPushAsync( "GpsTestQueue", gpsDatumJsonComplex );
addTasks.Add( addAsync );
}
Task[] tasks = addTasks.ToArray();
Task.WaitAll( tasks );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long listLength = db.ListLength( "GpsTestQueue" );
Console.WriteLine( "Redis list pipe complex Saved " + numRows + " rows to, " + batchSize* numRowsInRecord + " rows at a time, total elapsedMilliseconds: " + elapsedMilliseconds + "; " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. new listLength: " + listLength );
}
static void RedisSaveSpeedTest()
{
ConnectionMultiplexer redis = ConnectionMultiplexer.Connect( "localhost:6379,connectTimeout=20000" );
IDatabase db = redis.GetDatabase( 1 );
db.KeyDelete( "GpsTestQueue" );
MGpsDatum gpsDatum = new MGpsDatum( 1, 1, 1, DateTime.Now, 0, null, DateTime.Now, 1 );
string gpsDatumJson = JsonConvert.SerializeObject( gpsDatum );
DateTime start = DateTime.Now;
int numRows = 10000;
for (int i = 0; i < numRows; i++)
{
db.ListLeftPush( "GpsTestQueue", gpsDatumJson );
}
int elapsedMilliseconds = (int)(DateTime.Now - start).TotalMilliseconds;
long listLength = db.ListLength( "GpsTestQueue" );
Console.WriteLine( "batch Saved " + numRows + " rows to Redis collection, 1 row at a time, " + elapsedMilliseconds + " elapsed milliseconds. " + ((float)((float)elapsedMilliseconds) / ((float)numRows)) + " ms per row. new listLength: " + listLength );
int br = 0;
}
static void Main( string[] args )
{
//MongoDataSaveSpeedTest();
//RedisSaveSpeedTest();
int batchSize = 100;
int numRows = 100000;
int numRowsInRecord = 100;
MongoDataSaveSpeedTest_MultiInsert( numRows, batchSize );
RedisSaveSpeedTest_Pipelining( numRows, batchSize );
RedisSaveSpeedTest_Batching( numRows, batchSize );
RedisSaveSpeedTest_Pipelining_complex( numRows, batchSize, numRowsInRecord );
MongoDataSaveSpeedTest_MultiInsert_complex( numRows, batchSize, numRowsInRecord );
Console.ReadKey();
}
}
}