锁定模式以正确使用.NET MemoryCache

时间:2014-01-21 21:11:00

标签: c# .net multithreading memorycache

我认为此代码存在并发问题:

const string CacheKey = "CacheKey";
static string GetCachedData()
{
    string expensiveString =null;
    if (MemoryCache.Default.Contains(CacheKey))
    {
        expensiveString = MemoryCache.Default[CacheKey] as string;
    }
    else
    {
        CacheItemPolicy cip = new CacheItemPolicy()
        {
            AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
        };
        expensiveString = SomeHeavyAndExpensiveCalculation();
        MemoryCache.Default.Set(CacheKey, expensiveString, cip);
    }
    return expensiveString;
}

并发问题的原因是多个线程可以获取空键,然后尝试将数据插入缓存。

使此代码并发性证明的最短且最干净的方法是什么?我喜欢在缓存相关代码中遵循一个好的模式。链接到在线文章将是一个很大的帮助。

更新:

我根据@Scott Chamberlain的回答提出了这个代码。任何人都可以找到任何性能或并发问题吗? 如果这样做,它将节省许多代码和错误。

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.Caching;

namespace CachePoc
{
    class Program
    {
        static object everoneUseThisLockObject4CacheXYZ = new object();
        const string CacheXYZ = "CacheXYZ";
        static object everoneUseThisLockObject4CacheABC = new object();
        const string CacheABC = "CacheABC";

        static void Main(string[] args)
        {
            string xyzData = MemoryCacheHelper.GetCachedData<string>(CacheXYZ, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
            string abcData = MemoryCacheHelper.GetCachedData<string>(CacheABC, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
        }

        private static string SomeHeavyAndExpensiveXYZCalculation() {return "Expensive";}
        private static string SomeHeavyAndExpensiveABCCalculation() {return "Expensive";}

        public static class MemoryCacheHelper
        {
            public static T GetCachedData<T>(string cacheKey, object cacheLock, int cacheTimePolicyMinutes, Func<T> GetData)
                where T : class
            {
                //Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
                T cachedData = MemoryCache.Default.Get(cacheKey, null) as T;

                if (cachedData != null)
                {
                    return cachedData;
                }

                lock (cacheLock)
                {
                    //Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
                    cachedData = MemoryCache.Default.Get(cacheKey, null) as T;

                    if (cachedData != null)
                    {
                        return cachedData;
                    }

                    //The value still did not exist so we now write it in to the cache.
                    CacheItemPolicy cip = new CacheItemPolicy()
                    {
                        AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(cacheTimePolicyMinutes))
                    };
                    cachedData = GetData();
                    MemoryCache.Default.Set(cacheKey, cachedData, cip);
                    return cachedData;
                }
            }
        }
    }
}

10 个答案:

答案 0 :(得分:80)

这是我的第二次代码迭代。因为MemoryCache是线程安全的,所以您不需要锁定初始读取,您只需读取,如果缓存返回null,则执行锁定检查以查看是否需要创建字符串。它大大简化了代码。

const string CacheKey = "CacheKey";
static readonly object cacheLock = new object();
private static string GetCachedData()
{

    //Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
    var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;

    if (cachedString != null)
    {
        return cachedString;
    }

    lock (cacheLock)
    {
        //Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
        cachedString = MemoryCache.Default.Get(CacheKey, null) as string;

        if (cachedString != null)
        {
            return cachedString;
        }

        //The value still did not exist so we now write it in to the cache.
        var expensiveString = SomeHeavyAndExpensiveCalculation();
        CacheItemPolicy cip = new CacheItemPolicy()
                              {
                                  AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
                              };
        MemoryCache.Default.Set(CacheKey, expensiveString, cip);
        return expensiveString;
    }
}

编辑:以下代码是不必要的,但我想让它显示原始方法。对于使用具有线程安全读取但非线程安全写入的不同集合的未来访问者(几乎所有System.Collections命名空间下的类都是这样),它可能会有用。

以下是使用ReaderWriterLockSlim保护访问权限的方法。您需要做一种“Double Checked Locking”以查看是否有其他人在我们等待锁定时创建了缓存项目。

const string CacheKey = "CacheKey";
static readonly ReaderWriterLockSlim cacheLock = new ReaderWriterLockSlim();
static string GetCachedData()
{
    //First we do a read lock to see if it already exists, this allows multiple readers at the same time.
    cacheLock.EnterReadLock();
    try
    {
        //Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
        var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;

        if (cachedString != null)
        {
            return cachedString;
        }
    }
    finally
    {
        cacheLock.ExitReadLock();
    }

    //Only one UpgradeableReadLock can exist at one time, but it can co-exist with many ReadLocks
    cacheLock.EnterUpgradeableReadLock();
    try
    {
        //We need to check again to see if the string was created while we where waiting to enter the EnterUpgradeableReadLock
        var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;

        if (cachedString != null)
        {
            return cachedString;
        }

        //The entry still does not exist so we need to create it and enter the write lock
        var expensiveString = SomeHeavyAndExpensiveCalculation();
        cacheLock.EnterWriteLock(); //This will block till all the Readers flush.
        try
        {
            CacheItemPolicy cip = new CacheItemPolicy()
            {
                AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
            };
            MemoryCache.Default.Set(CacheKey, expensiveString, cip);
            return expensiveString;
        }
        finally 
        {
            cacheLock.ExitWriteLock();
        }
    }
    finally
    {
        cacheLock.ExitUpgradeableReadLock();
    }
}

答案 1 :(得分:33)

有一个开源库[免责声明:我写的]:LazyCache IMO通过两行代码满足您的要求:

IAppCache cache = new CachingService();
var cachedResults = cache.GetOrAdd("CacheKey", 
  () => SomeHeavyAndExpensiveCalculation());

默认情况下它内置了锁定,因此可缓存方法每次缓存未命中时只会执行一次,并且它使用lambda,因此您可以执行&#34;获取或添加&#34;一气呵成。默认为20分钟滑动到期。

甚至a NuGet package;)

答案 2 :(得分:31)

我已经通过在MemoryCache上使用AddOrGetExisting方法和使用Lazy initialization来解决了这个问题。

基本上,我的代码看起来像这样:

static string GetCachedData(string key, DateTimeOffset offset)
{
    Lazy<String> lazyObject = new Lazy<String>(() => SomeHeavyAndExpensiveCalculationThatReturnsAString());
    var returnedLazyObject = MemoryCache.Default.AddOrGetExisting(key, lazyObject, offset); 
    if (returnedLazyObject == null)
       return lazyObject.Value;
    return ((Lazy<String>) returnedLazyObject).Value;
}

这里最糟糕的情况是你创建了两次相同的Lazy对象。但这非常微不足道。 AddOrGetExisting的使用保证您只获得Lazy对象的一个​​实例,因此您也可以保证只调用一次昂贵的初始化方法。

答案 3 :(得分:15)

  

我认为此代码存在并发问题:

实际上,尽管可能有所改善,但它很可能很好。

现在,通常我们有多个线程在第一次使用时设置共享值的模式,不锁定正在获取和设置的值可以是:

  1. 灾难性的 - 其他代码只假设存在一个实例。
  2. 灾难性的 - 获取实例的代码不能只容忍一个(或者某个小数量)并发操作。
  3. 灾难性的 - 存储方式不是线程安全的(例如,有两个线程添加到字典中,你可以得到各种令人讨厌的错误。)
  4. 次优 - 整体性能比锁定确保只有一个线程完成获取值的工作更糟糕。
  5. 最优 - 让多个线程执行冗余工作的成本低于防止它的成本,特别是因为这只能在相对短暂的时间内发生。
  6. 但是,考虑到这里MemoryCache可能会逐出条目:

    1. 如果拥有多个实例是灾难性的,那么MemoryCache是错误的方法。
    2. 如果您必须阻止同时创建,则应在创建时执行此操作。
    3. MemoryCache在访问该对象方面是线程安全的,因此这不是一个问题。
    4. 当然,必须考虑这两种可能性,尽管现有的两个相同字符串实例的唯一时间可能是一个问题,如果你正在做非常特殊的优化,这里不适用*。

      所以,我们留下了各种可能性:

      1. 避免重复拨打SomeHeavyAndExpensiveCalculation()的费用会更便宜。
      2. 不要避免重复拨打SomeHeavyAndExpensiveCalculation()的费用。
      3. 并且解决这个问题可能很困难(事实上,这种情况值得分析,而不是假设你可以解决它)。这里值得考虑的是,锁定插入的最明显方法是阻止所有添加到缓存中,包括那些不相关的内容。

        这意味着如果我们有50个线程试图设置50个不同的值,那么我们将不得不让所有50个线程相互等待,即使它们甚至不会进行相同的计算。

        因此,你可能最好使用你拥有的代码,而不是避免竞争条件的代码,如果竞争条件是一个问题,你很可能需要在其他地方处理,或者需要一种不同的缓存策略,而不是驱逐旧条目的策略†。

        我要改变的一件事是我将Set()的号码替换为AddOrGetExisting()。从上面可以看出,它可能没有必要,但它可以收集新获得的项目,减少整体内存使用量,并允许较低的低代和高代收集率。

        所以是的,您可以使用双锁来防止并发,但要么并发实际上不是问题,要么以错误的方式存储值,或者对商店进行双重锁定不是最好的方法解决它。

        *如果您只知道一组字符串中的每一个都存在,您可以优化相等比较,这是唯一一次有两个字符串副本可能不正确而不是仅次优,但您需要要做有意义的非常不同类型的缓存。例如。排序XmlReader在内部进行。

        †很可能是无限期存储的,或者是使用弱引用的存储,因此只有在没有现有用途时才会驱逐条目。

答案 4 :(得分:1)

MemoryCache

控制台示例,“如何保存/获取简单的类对象”

启动并按任意键后输出 Esc

保存到缓存!
从缓存中获取!
Some1
Some2

     PL    date
  1:  a 2013.01
  2:  c 2013.01
  3:  a 2013.02
  4:  c 2013.02
  5:  a 2013.03
 ---           
514:  c 2017.45
515:  a 2017.46
516:  c 2017.46
517:  a 2017.47
518:  c 2017.47

答案 5 :(得分:1)

public interface ILazyCacheProvider : IAppCache
{
    /// <summary>
    /// Get data loaded - after allways throw cached result (even when data is older then needed) but very fast!
    /// </summary>
    /// <param name="key"></param>
    /// <param name="getData"></param>
    /// <param name="slidingExpiration"></param>
    /// <typeparam name="T"></typeparam>
    /// <returns></returns>
    T GetOrAddPermanent<T>(string key, Func<T> getData, TimeSpan slidingExpiration);
}

/// <summary>
/// Initialize LazyCache in runtime
/// </summary>
public class LazzyCacheProvider: CachingService, ILazyCacheProvider
{
    private readonly Logger _logger = LogManager.GetLogger("MemCashe");
    private readonly Hashtable _hash = new Hashtable();
    private readonly List<string>  _reloader = new List<string>();
    private readonly ConcurrentDictionary<string, DateTime> _lastLoad = new ConcurrentDictionary<string, DateTime>();  


    T ILazyCacheProvider.GetOrAddPermanent<T>(string dataKey, Func<T> getData, TimeSpan slidingExpiration)
    {
        var currentPrincipal = Thread.CurrentPrincipal;
        if (!ObjectCache.Contains(dataKey) && !_hash.Contains(dataKey))
        {
            _hash[dataKey] = null;
            _logger.Debug($"{dataKey} - first start");
            _lastLoad[dataKey] = DateTime.Now;
            _hash[dataKey] = ((object)GetOrAdd(dataKey, getData, slidingExpiration)).CloneObject();
            _lastLoad[dataKey] = DateTime.Now;
           _logger.Debug($"{dataKey} - first");
        }
        else
        {
            if ((!ObjectCache.Contains(dataKey) || _lastLoad[dataKey].AddMinutes(slidingExpiration.Minutes) < DateTime.Now) && _hash[dataKey] != null)
                Task.Run(() =>
                {
                    if (_reloader.Contains(dataKey)) return;
                    lock (_reloader)
                    {
                        if (ObjectCache.Contains(dataKey))
                        {
                            if(_lastLoad[dataKey].AddMinutes(slidingExpiration.Minutes) > DateTime.Now)
                                return;
                            _lastLoad[dataKey] = DateTime.Now;
                            Remove(dataKey);
                        }
                        _reloader.Add(dataKey);
                        Thread.CurrentPrincipal = currentPrincipal;
                        _logger.Debug($"{dataKey} - reload start");
                        _hash[dataKey] = ((object)GetOrAdd(dataKey, getData, slidingExpiration)).CloneObject();
                        _logger.Debug($"{dataKey} - reload");
                        _reloader.Remove(dataKey);
                    }
                });
        }
        if (_hash[dataKey] != null) return (T) (_hash[dataKey]);

        _logger.Debug($"{dataKey} - dummy start");
        var data = GetOrAdd(dataKey, getData, slidingExpiration);
        _logger.Debug($"{dataKey} - dummy");
        return (T)((object)data).CloneObject();
    }
}

答案 6 :(得分:1)

为避免全局锁定,您可以使用SingletonCache对每个键实施一个锁定,而不会增加内存使用量(不再引用锁定对象时将删除锁定对象,并且获取/释放是线程安全的,确保只有1个实例在其中通过比较和交换使用。

使用如下所示:

SingletonCache<string, object> keyLocks = new SingletonCache<string, object>();

const string CacheKey = "CacheKey";
static string GetCachedData()
{
    string expensiveString =null;
    if (MemoryCache.Default.Contains(CacheKey))
    {
        return MemoryCache.Default[CacheKey] as string;
    }

    // double checked lock
    using (var lifetime = keyLocks.Acquire(url))
    {
        lock (lifetime.Value)
        {
           if (MemoryCache.Default.Contains(CacheKey))
           {
              return MemoryCache.Default[CacheKey] as string;
           }

           cacheItemPolicy cip = new CacheItemPolicy()
           {
              AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
           };
           expensiveString = SomeHeavyAndExpensiveCalculation();
           MemoryCache.Default.Set(CacheKey, expensiveString, cip);
           return expensiveString;
        }
    }      
}

代码在GitHub上:https://github.com/bitfaster/BitFaster.Caching

Install-Package BitFaster.Caching

还有一种LRU实现,其重量比MemoryCache轻,并且具有几个优点-更快的并发读写,有界大小,无后台线程,内部性能计数器等。(免责声明,我写了它)。

答案 7 :(得分:0)

但是有点晚了... 全面实施:

    [HttpGet]
    public async Task<HttpResponseMessage> GetPageFromUriOrBody(RequestQuery requestQuery)
    {
        log(nameof(GetPageFromUriOrBody), nameof(requestQuery));
        var responseResult = await _requestQueryCache.GetOrCreate(
            nameof(GetPageFromUriOrBody)
            , requestQuery
            , (x) => getPageContent(x).Result);
        return Request.CreateResponse(System.Net.HttpStatusCode.Accepted, responseResult);
    }
    static MemoryCacheWithPolicy<RequestQuery, string> _requestQueryCache = new MemoryCacheWithPolicy<RequestQuery, string>();

这是getPageContent签名:

async Task<string> getPageContent(RequestQuery requestQuery);

这是MemoryCacheWithPolicy的实现:

public class MemoryCacheWithPolicy<TParameter, TResult>
{
    static ILogger _nlogger = new AppLogger().Logger;
    private MemoryCache _cache = new MemoryCache(new MemoryCacheOptions() 
    {
        //Size limit amount: this is actually a memory size limit value!
        SizeLimit = 1024 
    });

    /// <summary>
    /// Gets or creates a new memory cache record for a main data
    /// along with parameter data that is assocciated with main main.
    /// </summary>
    /// <param name="key">Main data cache memory key.</param>
    /// <param name="param">Parameter model that assocciated to main model (request result).</param>
    /// <param name="createCacheData">A delegate to create a new main data to cache.</param>
    /// <returns></returns>
    public async Task<TResult> GetOrCreate(object key, TParameter param, Func<TParameter, TResult> createCacheData)
    {
        // this key is used for param cache memory.
        var paramKey = key + nameof(param);

        if (!_cache.TryGetValue(key, out TResult cacheEntry))
        {
            // key is not in the cache, create data through the delegate.
            cacheEntry = createCacheData(param);
            createMemoryCache(key, cacheEntry, paramKey, param);

            _nlogger.Warn(" cache is created.");
        }
        else
        {
            // data is chached so far..., check if param model is same (or changed)?
            if(!_cache.TryGetValue(paramKey, out TParameter cacheParam))
            {
                //exception: this case should not happened!
            }

            if (!cacheParam.Equals(param))
            {
                // request param is changed, create data through the delegate.
                cacheEntry = createCacheData(param);
                createMemoryCache(key, cacheEntry, paramKey, param);
                _nlogger.Warn(" cache is re-created (param model has been changed).");
            }
            else
            {
                _nlogger.Trace(" cache is used.");
            }

        }
        return await Task.FromResult<TResult>(cacheEntry);
    }
    MemoryCacheEntryOptions createMemoryCacheEntryOptions(TimeSpan slidingOffset, TimeSpan relativeOffset)
    {
        // Cache data within [slidingOffset] seconds, 
        // request new result after [relativeOffset] seconds.
        return new MemoryCacheEntryOptions()

            // Size amount: this is actually an entry count per 
            // key limit value! not an actual memory size value!
            .SetSize(1)

            // Priority on removing when reaching size limit (memory pressure)
            .SetPriority(CacheItemPriority.High)

            // Keep in cache for this amount of time, reset it if accessed.
            .SetSlidingExpiration(slidingOffset)

            // Remove from cache after this time, regardless of sliding expiration
            .SetAbsoluteExpiration(relativeOffset);
        //
    }
    void createMemoryCache(object key, TResult cacheEntry, object paramKey, TParameter param)
    {
        // Cache data within 2 seconds, 
        // request new result after 5 seconds.
        var cacheEntryOptions = createMemoryCacheEntryOptions(
            TimeSpan.FromSeconds(2)
            , TimeSpan.FromSeconds(5));

        // Save data in cache.
        _cache.Set(key, cacheEntry, cacheEntryOptions);

        // Save param in cache.
        _cache.Set(paramKey, param, cacheEntryOptions);
    }
    void checkCacheEntry<T>(object key, string name)
    {
        _cache.TryGetValue(key, out T value);
        _nlogger.Fatal("Key: {0}, Name: {1}, Value: {2}", key, name, value);
    }
}

nlogger只是跟踪nLog行为的MemoryCacheWithPolicy对象。 如果通过委托(RequestQuery requestQuery更改了请求对象(Func<TParameter, TResult> createCacheData),或者在滑动时间或绝对时间达到其限制时重新创建内存缓存。注意,一切也都是异步的;)

答案 8 :(得分:0)

很难选择哪个更好;锁或ReaderWriterLockSlim。您需要真实的读写数量和比率等统计数据。

但是,如果您认为使用“锁定”是正确的方法。然后这是针对不同需求的不同解决方案。我还将在代码中包括Allan Xu的解决方案。因为两者可能因不同需求而需要。

以下是促使我采用此解决方案的要求:

  1. 由于某些原因,您不想或不能提供“ GetData”功能。也许“ GetData”函数位于其他具有大量构造函数的类中,并且您甚至不想创建一个实例,直到确保它无法逃脱。
  2. 您需要从应用程序的不同位置/层访问相同的缓存数据。而且这些不同的位置无法访问同一储物柜对象。
  3. 您没有恒定的缓存键。例如;需要使用sessionId缓存键缓存一些数据。

代码:

using System;
using System.Runtime.Caching;
using System.Collections.Concurrent;
using System.Collections.Generic;

namespace CachePoc
{
    class Program
    {
        static object everoneUseThisLockObject4CacheXYZ = new object();
        const string CacheXYZ = "CacheXYZ";
        static object everoneUseThisLockObject4CacheABC = new object();
        const string CacheABC = "CacheABC";

        static void Main(string[] args)
        {
            //Allan Xu's usage
            string xyzData = MemoryCacheHelper.GetCachedDataOrAdd<string>(CacheXYZ, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
            string abcData = MemoryCacheHelper.GetCachedDataOrAdd<string>(CacheABC, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);

            //My usage
            string sessionId = System.Web.HttpContext.Current.Session["CurrentUser.SessionId"].ToString();
            string yvz = MemoryCacheHelper.GetCachedData<string>(sessionId);
            if (string.IsNullOrWhiteSpace(yvz))
            {
                object locker = MemoryCacheHelper.GetLocker(sessionId);
                lock (locker)
                {
                    yvz = MemoryCacheHelper.GetCachedData<string>(sessionId);
                    if (string.IsNullOrWhiteSpace(yvz))
                    {
                        DatabaseRepositoryWithHeavyConstructorOverHead dbRepo = new DatabaseRepositoryWithHeavyConstructorOverHead();
                        yvz = dbRepo.GetDataExpensiveDataForSession(sessionId);
                        MemoryCacheHelper.AddDataToCache(sessionId, yvz, 5);
                    }
                }
            }
        }


        private static string SomeHeavyAndExpensiveXYZCalculation() { return "Expensive"; }
        private static string SomeHeavyAndExpensiveABCCalculation() { return "Expensive"; }

        public static class MemoryCacheHelper
        {
            //Allan Xu's solution
            public static T GetCachedDataOrAdd<T>(string cacheKey, object cacheLock, int minutesToExpire, Func<T> GetData) where T : class
            {
                //Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
                T cachedData = MemoryCache.Default.Get(cacheKey, null) as T;

                if (cachedData != null)
                    return cachedData;

                lock (cacheLock)
                {
                    //Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
                    cachedData = MemoryCache.Default.Get(cacheKey, null) as T;

                    if (cachedData != null)
                        return cachedData;

                    cachedData = GetData();
                    MemoryCache.Default.Set(cacheKey, cachedData, DateTime.Now.AddMinutes(minutesToExpire));
                    return cachedData;
                }
            }

            #region "My Solution"

            readonly static ConcurrentDictionary<string, object> Lockers = new ConcurrentDictionary<string, object>();
            public static object GetLocker(string cacheKey)
            {
                CleanupLockers();

                return Lockers.GetOrAdd(cacheKey, item => (cacheKey, new object()));
            }

            public static T GetCachedData<T>(string cacheKey) where T : class
            {
                CleanupLockers();

                T cachedData = MemoryCache.Default.Get(cacheKey) as T;
                return cachedData;
            }

            public static void AddDataToCache(string cacheKey, object value, int cacheTimePolicyMinutes)
            {
                CleanupLockers();

                MemoryCache.Default.Add(cacheKey, value, DateTimeOffset.Now.AddMinutes(cacheTimePolicyMinutes));
            }

            static DateTimeOffset lastCleanUpTime = DateTimeOffset.MinValue;
            static void CleanupLockers()
            {
                if (DateTimeOffset.Now.Subtract(lastCleanUpTime).TotalMinutes > 1)
                {
                    lock (Lockers)//maybe a better locker is needed?
                    {
                        try//bypass exceptions
                        {
                            List<string> lockersToRemove = new List<string>();
                            foreach (var locker in Lockers)
                            {
                                if (!MemoryCache.Default.Contains(locker.Key))
                                    lockersToRemove.Add(locker.Key);
                            }

                            object dummy;
                            foreach (string lockerKey in lockersToRemove)
                                Lockers.TryRemove(lockerKey, out dummy);

                            lastCleanUpTime = DateTimeOffset.Now;
                        }
                        catch (Exception)
                        { }
                    }
                }

            }
            #endregion
        }
    }

    class DatabaseRepositoryWithHeavyConstructorOverHead
    {
        internal string GetDataExpensiveDataForSession(string sessionId)
        {
            return "Expensive data from database";
        }
    }

}

答案 9 :(得分:0)

有些过时的问题,但也许仍然有用:您可以看看我最近发布的 FusionCache ⚡?

您正在寻找的功能描述为 here,您可以这样使用它:

const string CacheKey = "CacheKey";
static string GetCachedData()
{
    return fusionCache.GetOrSet(
        CacheKey,
        _ => SomeHeavyAndExpensiveCalculation(),
        TimeSpan.FromMinutes(20)
    );
}

您可能还会发现其他一些有趣的功能,例如 fail-safeadvanced timeouts,具有后台工厂完成功能并支持可选的分布式 2nd level 缓存。

如果你愿意,请告诉我你的想法。

/无耻插头