mirror of
https://github.com/lidarr/lidarr.git
synced 2025-08-19 13:10:13 -07:00
Revert Cached changes from bb02d73c4
Bring it back in line with Sonarr. I was originally worried that we were caching all Artists and this would be a resource hog. But the caching will happen on every RSS scan so I think we're better off just holding the cache and not worrying.
This commit is contained in:
parent
ace7371b69
commit
9945c986d9
2 changed files with 0 additions and 32 deletions
|
@ -101,33 +101,6 @@ namespace NzbDrone.Common.Test.CacheTests
|
||||||
|
|
||||||
hitCount.Should().BeInRange(3, 7);
|
hitCount.Should().BeInRange(3, 7);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Test]
|
|
||||||
[Retry(3)]
|
|
||||||
[Platform(Exclude = "MacOsX")]
|
|
||||||
public void should_clear_expired_when_they_expire()
|
|
||||||
{
|
|
||||||
int hitCount = 0;
|
|
||||||
_cachedString = new Cached<string>();
|
|
||||||
|
|
||||||
for (int i = 0; i < 10; i++)
|
|
||||||
{
|
|
||||||
_cachedString.Get("key",
|
|
||||||
() =>
|
|
||||||
{
|
|
||||||
hitCount++;
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
TimeSpan.FromMilliseconds(300));
|
|
||||||
|
|
||||||
Thread.Sleep(100);
|
|
||||||
}
|
|
||||||
|
|
||||||
Thread.Sleep(1000);
|
|
||||||
|
|
||||||
hitCount.Should().BeInRange(3, 7);
|
|
||||||
_cachedString.Values.Should().HaveCount(0);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public class Worker
|
public class Worker
|
||||||
|
|
|
@ -39,11 +39,6 @@ namespace NzbDrone.Common.Cache
|
||||||
{
|
{
|
||||||
Ensure.That(key, () => key).IsNotNullOrWhiteSpace();
|
Ensure.That(key, () => key).IsNotNullOrWhiteSpace();
|
||||||
_store[key] = new CacheItem(value, lifetime);
|
_store[key] = new CacheItem(value, lifetime);
|
||||||
|
|
||||||
if (lifetime != null)
|
|
||||||
{
|
|
||||||
System.Threading.Tasks.Task.Delay(lifetime.Value).ContinueWith(t => _store.TryRemove(key, out var temp));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public T Find(string key)
|
public T Find(string key)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue