Skip to main content
added 4 characters in body; edited title
Source Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238

Implementing cache for server. Any suggestions?

I've used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity. I'm also thinking to implement a queue with \$O(1)\$ O(1) Remove to replace the List class.

Implementing cache for server. Any suggestions?

I've used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity. I'm also thinking to implement a queue with O(1) Remove to replace the List class.

Implementing cache for server

I've used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity. I'm also thinking to implement a queue with \$O(1)\$ Remove to replace the List class.

edited tags
Link
200_success
  • 145.7k
  • 22
  • 191
  • 481
edited body; edited tags; edited title
Source Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238

Implementing cache, any for server. Any suggestions?

I've just implemented a cache for my server, and I'd like your thoughts about performance improvements and maybe some possible bug fixes if there are:

public class Cache<CacheKey, CacheValue>
{
 
    private Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>> _map;

    /// <summary>
    /// The first element is the least recently used.
    /// </summary>
    private List<CacheKey> _lru;

    private int _maxSize;

    private ReaderWriterLock _lock;

    private int _minutes;

    public Cache(int maxNumberOfRecords, int minutes)
    {
        _maxSize = Math.Max(maxNumberOfRecords, 1);
        _lru = new List<CacheKey>(_maxSize + 1);
        _map = new Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>>();
        _lock = new ReaderWriterLock();
        _minutes = minutes;
    }

    public CacheValue find(CacheKey key)
    {
        _lock.AcquireReaderLock(-1);

        CacheValue ans;

        if (_map.ContainsKey(key) && (DateTime.Now - _map[key].Value).Minutes < _minutes)
        {
            ThreadPool.QueueUserWorkItem(o =>            //don't wait for lru to update
            {
                _lock.AcquireWriterLock(-1);
                _lru.Remove(key);
                _lru.Add(key);
                _lock.ReleaseWriterLock();
            });

            ans = _map[key].Key;
        }
        else
        {
            ans = default(CacheValue);
        }

        _lock.ReleaseReaderLock();
        return ans;
    }

    public void insert(CacheKey key, CacheValue value)
    {
        _lock.AcquireWriterLock(-1);

        if (_map.ContainsKey(key))      //if exists
        {
            _map.Remove(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //update value
        }
        else
        {                               //if not
            _lru.Add(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //add to cache

            if (_lru.Count > _maxSize)
            {
                _map.Remove(_lru[0]);   //and delete least recently used

                _lru.RemoveAt(0);
            }
        }

        _lock.ReleaseWriterLock();
    }
} 

II've used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity and I'm. I'm also thinking to implement a queue with O(1) Remove to replace the List class.

Implementing cache, any suggestions?

I've just implemented a cache for my server, and I'd like your thoughts about performance improvements and maybe some bug fixes if there are:

public class Cache<CacheKey, CacheValue>
{
 
    private Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>> _map;

    /// <summary>
    /// The first element is the least recently used.
    /// </summary>
    private List<CacheKey> _lru;

    private int _maxSize;

    private ReaderWriterLock _lock;

    private int _minutes;

    public Cache(int maxNumberOfRecords, int minutes)
    {
        _maxSize = Math.Max(maxNumberOfRecords, 1);
        _lru = new List<CacheKey>(_maxSize + 1);
        _map = new Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>>();
        _lock = new ReaderWriterLock();
        _minutes = minutes;
    }

    public CacheValue find(CacheKey key)
    {
        _lock.AcquireReaderLock(-1);

        CacheValue ans;

        if (_map.ContainsKey(key) && (DateTime.Now - _map[key].Value).Minutes < _minutes)
        {
            ThreadPool.QueueUserWorkItem(o =>            //don't wait for lru to update
            {
                _lock.AcquireWriterLock(-1);
                _lru.Remove(key);
                _lru.Add(key);
                _lock.ReleaseWriterLock();
            });

            ans = _map[key].Key;
        }
        else
        {
            ans = default(CacheValue);
        }

        _lock.ReleaseReaderLock();
        return ans;
    }

    public void insert(CacheKey key, CacheValue value)
    {
        _lock.AcquireWriterLock(-1);

        if (_map.ContainsKey(key))      //if exists
        {
            _map.Remove(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //update value
        }
        else
        {                               //if not
            _lru.Add(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //add to cache

            if (_lru.Count > _maxSize)
            {
                _map.Remove(_lru[0]);   //and delete least recently used

                _lru.RemoveAt(0);
            }
        }

        _lock.ReleaseWriterLock();
    }
} 

I used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity and I'm thinking to implement a queue with O(1) Remove to replace the List class.

Implementing cache for server. Any suggestions?

I've just implemented a cache for my server, and I'd like your thoughts about performance improvements and maybe some possible bug fixes:

public class Cache<CacheKey, CacheValue>
{
    private Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>> _map;

    /// <summary>
    /// The first element is the least recently used.
    /// </summary>
    private List<CacheKey> _lru;

    private int _maxSize;

    private ReaderWriterLock _lock;

    private int _minutes;

    public Cache(int maxNumberOfRecords, int minutes)
    {
        _maxSize = Math.Max(maxNumberOfRecords, 1);
        _lru = new List<CacheKey>(_maxSize + 1);
        _map = new Dictionary<CacheKey, KeyValuePair<CacheValue, DateTime>>();
        _lock = new ReaderWriterLock();
        _minutes = minutes;
    }

    public CacheValue find(CacheKey key)
    {
        _lock.AcquireReaderLock(-1);

        CacheValue ans;

        if (_map.ContainsKey(key) && (DateTime.Now - _map[key].Value).Minutes < _minutes)
        {
            ThreadPool.QueueUserWorkItem(o =>            //don't wait for lru to update
            {
                _lock.AcquireWriterLock(-1);
                _lru.Remove(key);
                _lru.Add(key);
                _lock.ReleaseWriterLock();
            });

            ans = _map[key].Key;
        }
        else
        {
            ans = default(CacheValue);
        }

        _lock.ReleaseReaderLock();
        return ans;
    }

    public void insert(CacheKey key, CacheValue value)
    {
        _lock.AcquireWriterLock(-1);

        if (_map.ContainsKey(key))      //if exists
        {
            _map.Remove(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //update value
        }
        else
        {                               //if not
            _lru.Add(key);
            _map.Add(key, new KeyValuePair<CacheValue, DateTime>(value, DateTime.Now));       //add to cache

            if (_lru.Count > _maxSize)
            {
                _map.Remove(_lru[0]);   //and delete least recently used

                _lru.RemoveAt(0);
            }
        }

        _lock.ReleaseWriterLock();
    }
} 

I've used ThreadPool because the List operations are expensive, but I think that this hurts the LRU integrity. I'm also thinking to implement a queue with O(1) Remove to replace the List class.

rm tag from title
Link
svick
  • 24.5k
  • 4
  • 53
  • 89
Loading
Source Link
assafmo
  • 143
  • 4
Loading