Skip to content

Commit a431792

Browse files
author
Loïc
authored
The log levels defined on llama.cpp and LlamaSharp side were not aligned anymore (issue #995) (#997)
* The log levels defined on llama.cpp and LlamaSharp side were not aligned anymore (issue #995) * Handle the continuous log level reusing the latest log level used
1 parent 5bce923 commit a431792

File tree

2 files changed

+95
-12
lines changed

2 files changed

+95
-12
lines changed
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
using LLama.Common;
2+
using LLama.Native;
3+
using Microsoft.Extensions.Logging;
4+
5+
namespace LLama.Unittest
6+
{
7+
public sealed class LLamaContextWithCustomLoggerTests
8+
: IDisposable
9+
{
10+
private sealed class CustomLogger : ILogger
11+
{
12+
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => default;
13+
14+
public void Log<TState>(
15+
LogLevel logLevel,
16+
EventId eventId,
17+
TState state,
18+
Exception? exception,
19+
Func<TState, Exception, string> formatter)
20+
{
21+
}
22+
23+
public bool IsEnabled(LogLevel logLevel) => true;
24+
}
25+
26+
private readonly LLamaWeights _weights;
27+
private readonly LLamaContext _context;
28+
29+
public LLamaContextWithCustomLoggerTests()
30+
{
31+
var @params = new ModelParams(Constants.GenerativeModelPath)
32+
{
33+
ContextSize = 128,
34+
GpuLayerCount = Constants.CIGpuLayerCount,
35+
};
36+
37+
// This unit test used to fail when loading the weights with such a naive logger set.
38+
//
39+
// See https://github.com/SciSharp/LLamaSharp/issues/995
40+
//
41+
// So the unit test here doesn't check that the logger is actually used
42+
// but at least that setting one doesn't crash the weights load.
43+
NativeLogConfig.llama_log_set(new CustomLogger());
44+
45+
_weights = LLamaWeights.LoadFromFile(@params);
46+
_context = _weights.CreateContext(@params);
47+
}
48+
49+
public void Dispose()
50+
{
51+
_weights.Dispose();
52+
_context.Dispose();
53+
}
54+
55+
[Fact]
56+
public void CheckProperties()
57+
{
58+
Assert.Equal(128u, _context.ContextSize);
59+
Assert.Equal(2048, _context.EmbeddingSize);
60+
Assert.Equal(128256, _context.VocabCount);
61+
}
62+
}
63+
}

LLama/Native/LLamaLogLevel.cs

Lines changed: 32 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,43 +4,63 @@
44
namespace LLama.Native
55
{
66
/// <summary>
7-
/// Severity level of a log message
7+
/// Severity level of a log message. This enum should always be aligned with
8+
/// the one defined on llama.cpp side at
9+
/// https://github.com/ggerganov/llama.cpp/blob/0eb4e12beebabae46d37b78742f4c5d4dbe52dc1/ggml/include/ggml.h#L559
810
/// </summary>
911
public enum LLamaLogLevel
1012
{
1113
/// <summary>
12-
/// Logs that highlight when the current flow of execution is stopped due to a failure.
14+
/// Logs are never written.
15+
/// </summary>
16+
None = 0,
17+
18+
/// <summary>
19+
/// Logs that are used for interactive investigation during development.
1320
/// </summary>
14-
Error = 2,
21+
Debug = 1,
22+
23+
/// <summary>
24+
/// Logs that track the general flow of the application.
25+
/// </summary>
26+
Info = 2,
1527

1628
/// <summary>
1729
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
1830
/// </summary>
1931
Warning = 3,
2032

2133
/// <summary>
22-
/// Logs that track the general flow of the application.
34+
/// Logs that highlight when the current flow of execution is stopped due to a failure.
2335
/// </summary>
24-
Info = 4,
36+
Error = 4,
2537

2638
/// <summary>
27-
/// Logs that are used for interactive investigation during development.
39+
/// Continue log level is equivalent to None in the way it is used in llama.cpp.
2840
/// </summary>
29-
Debug = 5,
41+
Continue = 5,
3042
}
3143

3244
internal static class LLamaLogLevelExtensions
3345
{
46+
/// <summary>
47+
/// Keeps track of the previous log level to be able to handle the log level <see cref="LLamaLogLevel.Continue"/>.
48+
/// </summary>
49+
[ThreadStatic] private static LogLevel _previous;
50+
3451
public static LogLevel ToLogLevel(this LLamaLogLevel llama)
3552
{
36-
return (llama) switch
53+
_previous = llama switch
3754
{
38-
LLamaLogLevel.Error => LogLevel.Error,
39-
LLamaLogLevel.Warning => LogLevel.Warning,
40-
LLamaLogLevel.Info => LogLevel.Information,
55+
LLamaLogLevel.None => LogLevel.None,
4156
LLamaLogLevel.Debug => LogLevel.Debug,
57+
LLamaLogLevel.Info => LogLevel.Information,
58+
LLamaLogLevel.Warning => LogLevel.Warning,
59+
LLamaLogLevel.Error => LogLevel.Error,
60+
LLamaLogLevel.Continue => _previous,
4261
_ => throw new ArgumentOutOfRangeException(nameof(llama), llama, null)
4362
};
63+
return _previous;
4464
}
4565
}
46-
}
66+
}

0 commit comments

Comments
 (0)