@@ -134,6 +134,13 @@ private async Task<bool> IsEnabledInternalAsync()
134134 ) ;
135135 _logger . LogDebug ( "Response status: {response}" , response . StatusCode ) ;
136136
137+ if ( ! response . IsSuccessStatusCode )
138+ {
139+ var errorResponse = await response . Content . ReadAsStringAsync ( ) ;
140+ _logger . LogDebug ( "LM error: {errorResponse}" , errorResponse ) ;
141+ return null ;
142+ }
143+
137144 var res = await response . Content . ReadFromJsonAsync < OllamaLanguageModelCompletionResponse > ( ) ;
138145 if ( res is null )
139146 {
@@ -173,7 +180,7 @@ private async Task<bool> IsEnabledInternalAsync()
173180 return null ;
174181 }
175182
176- if ( _configuration . CacheResponses && _cacheChatCompletion . TryGetValue ( messages , out var cachedResponse ) )
183+ if ( _configuration . CacheResponses && _cacheChatCompletion . TryGetCacheValue ( messages , out var cachedResponse ) )
177184 {
178185 _logger . LogDebug ( "Returning cached response for message: {lastMessage}" , messages . Last ( ) . Content ) ;
179186 return cachedResponse ;
@@ -221,9 +228,17 @@ private async Task<bool> IsEnabledInternalAsync()
221228 ) ;
222229 _logger . LogDebug ( "Response: {response}" , response . StatusCode ) ;
223230
231+ if ( ! response . IsSuccessStatusCode )
232+ {
233+ var errorResponse = await response . Content . ReadAsStringAsync ( ) ;
234+ _logger . LogDebug ( "LM error: {errorResponse}" , errorResponse ) ;
235+ return null ;
236+ }
237+
224238 var res = await response . Content . ReadFromJsonAsync < OllamaLanguageModelChatCompletionResponse > ( ) ;
225239 if ( res is null )
226240 {
241+ _logger . LogDebug ( "Response: null" ) ;
227242 return res ;
228243 }
229244
@@ -240,15 +255,15 @@ private async Task<bool> IsEnabledInternalAsync()
240255
241256internal static class OllamaCacheChatCompletionExtensions
242257{
243- public static OllamaLanguageModelChatCompletionMessage [ ] ? GetKey (
244- this Dictionary < OllamaLanguageModelChatCompletionMessage [ ] , OllamaLanguageModelChatCompletionResponse > cache ,
258+ public static ILanguageModelChatCompletionMessage [ ] ? GetKey (
259+ this Dictionary < ILanguageModelChatCompletionMessage [ ] , OllamaLanguageModelChatCompletionResponse > cache ,
245260 ILanguageModelChatCompletionMessage [ ] messages )
246261 {
247262 return cache . Keys . FirstOrDefault ( k => k . SequenceEqual ( messages ) ) ;
248263 }
249264
250- public static bool TryGetValue (
251- this Dictionary < OllamaLanguageModelChatCompletionMessage [ ] , OllamaLanguageModelChatCompletionResponse > cache ,
265+ public static bool TryGetCacheValue (
266+ this Dictionary < ILanguageModelChatCompletionMessage [ ] , OllamaLanguageModelChatCompletionResponse > cache ,
252267 ILanguageModelChatCompletionMessage [ ] messages , out OllamaLanguageModelChatCompletionResponse ? value )
253268 {
254269 var key = cache . GetKey ( messages ) ;
0 commit comments