Skip to content

Commit

Permalink
Adding lock to allow concurrent access to the cache (#10)
Browse files Browse the repository at this point in the history
* Adding lock to allow concurrent access to the cache

* Update package version to 1.3.1
  • Loading branch information
shengyfu authored May 12, 2023
1 parent 4c7e058 commit e37e37e
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 17 deletions.
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ Console.WriteLine(decoded);

PerfBenchmark result based on [PerfBenchmark.csproj](Tokenizer_C%23/PerfBenchmark/PerfBenchmark.csproj):
``` ini
BenchmarkDotNet=v0.13.3, OS=Windows 11 (10.0.22621.1413)
BenchmarkDotNet=v0.13.3, OS=Windows 11 (10.0.22621.1702)
Intel Core i7-1065G7 CPU 1.30GHz, 1 CPU, 8 logical and 4 physical cores
.NET SDK=7.0.300-preview.23165.23
[Host] : .NET 6.0.15 (6.0.1523.11507), X64 RyuJIT AVX2
DefaultJob : .NET 6.0.15 (6.0.1523.11507), X64 RyuJIT AVX2
```
.NET SDK=7.0.300-preview.23179.2
[Host] : .NET 6.0.16 (6.0.1623.17311), X64 RyuJIT AVX2
DefaultJob : .NET 6.0.16 (6.0.1623.17311), X64 RyuJIT AVX2

| Method | Mean | Error | StdDev |
|------- |--------:|---------:|---------:|
| Encode | 2.376 s | 0.0446 s | 0.0639 s |
| Encode | 2.414 s | 0.0303 s | 0.0253 s |

# Typescript implementation

Expand Down
2 changes: 1 addition & 1 deletion Tokenizer_C#/TokenizerLib/TokenizerLib.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
<Title>Tokenizer</Title>
<Description>Tokenizer for OpenAI large language models.</Description>
<LangVersion>8.0</LangVersion>
<AssemblyVersion>1.3.0</AssemblyVersion>
<AssemblyVersion>1.3.1</AssemblyVersion>
<FileVersion>$(AssemblyVersion)</FileVersion>
<Version>$(AssemblyVersion)</Version>
<Authors>Microsoft</Authors>
Expand Down
32 changes: 22 additions & 10 deletions Tokenizer_C#/TokenizerLib/Utils/LRUCache.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ internal class LruCache<TKey, TValue>
/// </summary>
public const int DefaultCacheSize = 4096;

private readonly object _lockObject = new object();

private class CacheItem
{
public readonly TKey Key;
Expand Down Expand Up @@ -56,17 +58,20 @@ public LruCache(int cacheSize = DefaultCacheSize)
/// </returns>
public bool Lookup(TKey key, out TValue value)
{
LinkedListNode<CacheItem> cached;
if (_cache.TryGetValue(key, out cached))
lock (_lockObject)
{
_lruList.Remove(cached);
_lruList.AddFirst(cached);
value = cached.Value.Value;
return true;
}
LinkedListNode<CacheItem> cached;
if (_cache.TryGetValue(key, out cached))
{
_lruList.Remove(cached);
_lruList.AddFirst(cached);
value = cached.Value.Value;
return true;
}

value = default!;
return false;
value = default!;
return false;
}
}

protected virtual void OnEviction(TValue evictedValue) { }
Expand All @@ -90,6 +95,14 @@ private void EvictIfNeeded()
public void Add(TKey key, TValue value) => Replace(key, value, out _);

public bool Replace(TKey key, TValue value, out TValue oldValue)
{
lock (_lockObject)
{
return ReplaceInternal(key, value, out oldValue);
}
}

private bool ReplaceInternal(TKey key, TValue value, out TValue oldValue)
{
if (_cache.TryGetValue(key, out LinkedListNode<CacheItem> cached))
{
Expand All @@ -99,7 +112,6 @@ public bool Replace(TKey key, TValue value, out TValue oldValue)
_lruList.AddFirst(cached);
return true;
}

EvictIfNeeded();
var node = new LinkedListNode<CacheItem>(new CacheItem(key, value));
_cache[key] = node;
Expand Down

0 comments on commit e37e37e

Please sign in to comment.