使用stackexchange.redis插入具有多个哈希键值的大容量哈希

new9mtju  于 2021-06-07  发布在  Redis
关注(0)|答案(0)|浏览(339)

我使用的是redis服务器-4.0.9,stackexchange.redis(2.2.4)。
我正在尝试将我的csv数据插入redis hash csv包含~10k记录,文件大小为2MB。它需要大约17秒才能打破10公里的纪录。
这是我所有的测试结果-
使用stringsetasync,简单的键、值对处理时间为~100k记录->8秒。
具有2对处理时间sec~100k记录->14秒hashsetasync的哈希键。
哈希键csv处理时间秒~10k记录->17秒
我正在使用流水线处理这个记录。

public class Redis_Test
{
    private static ConfigurationOptions _configurationOptions;
    public Redis_Test(string argStrEndPoints)
    {
        _configurationOptions = new ConfigurationOptions
        {

            EndPoints = { argStrEndPoints },
            AbortOnConnectFail = false,
            KeepAlive = 60

        };
    }

    public Redis_Test(string argStrEndPoints, string argStrPassword)
    {
        _configurationOptions = new ConfigurationOptions
        {

            EndPoints = { argStrEndPoints },
            Password = argStrPassword,
            AbortOnConnectFail = false,
            KeepAlive = 60

        };
    }

    private static IDatabase RedisCache
    {
        get
        {
            return Connection.GetDatabase();
        }
    }

    private static readonly Lazy<ConnectionMultiplexer> LazyConnection
        = new Lazy<ConnectionMultiplexer>(() => ConnectionMultiplexer.Connect(_configurationOptions));

    public static ConnectionMultiplexer Connection
    {
        get
        {
            return LazyConnection.Value;
        }
    }

    public void testpipe()
    {

        List<Task> redisTask = new List<Task>();
        int batch = 10000;
        for (int i = 0; i <= 100000; i++)
        {

            Task<bool> addAsyncTask = RedisCache.StringSetAsync("testKey:" + i.ToString(), "Value:" + i.ToString());
            redisTask.Add(addAsyncTask);

            if (i == batch)
            {
                Task[] TaskArray = redisTask.ToArray();
                Task.WaitAll(TaskArray);

                redisTask.Clear();
                batch = batch + 10000;

            }
        }

    }
    public void testpipehash()
    {

        List<Task> redisTask = new List<Task>();
        int batch = 10000;
        for (int i = 0; i <= 100000; i++)
        {
            HashEntry[] ab = new HashEntry[] { new HashEntry("hash1"+ i.ToString(), "hashvalue1"+i.ToString()),
                new HashEntry("hash2" + i.ToString(), "hashvalue2" + i.ToString()) };
            Task addAsyncTask = RedisCache.HashSetAsync("testhashKey:" + i.ToString(), ab);
            redisTask.Add(addAsyncTask);

            if (i == batch)
            {
                Task[] TaskArray = redisTask.ToArray();
                Task.WaitAll(TaskArray);

                redisTask.Clear();
                batch = batch + 10000;

            }
        }

    }

    public int testpipehashCSV(DataTable dataTable)
    {

        List<Task> redisTask = new List<Task>();
        int batch = 1000;

        var watch = System.Diagnostics.Stopwatch.StartNew();
        int i = 0;
        foreach (DataRow dr in dataTable.Rows)
        {
            // converting row to dic
            watch.Stop();

            Dictionary<string, string> test = new Dictionary<string, string>();
            test = dr.Table.Columns
                        .Cast<DataColumn>()
                        .ToDictionary(c => c.ColumnName, c => dr[c].ToString());

            //converting dic to hashentry[]
            var hashEntries = test.Select(
                       Pair => new HashEntry(Pair.Key, Pair.Value)).ToArray();

            watch.Start();

            Task addAsyncTask = RedisCache.HashSetAsync("testhashKeyCSV:" + i.ToString(), hashEntries);
            redisTask.Add(addAsyncTask);

            if (i == batch)
            {
                Task[] TaskArray = redisTask.ToArray();
                Task.WaitAll(TaskArray);

                redisTask.Clear();
                batch = batch + 1000;

            }
            i++;

        }
        watch.Stop();
        TimeSpan ts = watch.Elapsed;

        int totaltimeinsec = ts.Seconds;
        return totaltimeinsec;

    }
}

请任何建议,我可以改善我的csv文件数据加载到redis哈希。
谢谢。

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题