-
Notifications
You must be signed in to change notification settings - Fork 11
Expand file tree
/
Copy pathModelListProvider.cs
More file actions
140 lines (125 loc) · 4.19 KB
/
ModelListProvider.cs
File metadata and controls
140 lines (125 loc) · 4.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
using Hartsy.Extensions.MagicPromptExtension.WebAPI;
using Newtonsoft.Json.Linq;
using SwarmUI.Accounts;
namespace Hartsy.Extensions.MagicPromptExtension;
public static class ModelListProvider
{
private const string LoadingPlaceholder = "loading///loading";
private static readonly TimeSpan CacheTtl = TimeSpan.FromSeconds(10);
private static readonly object _cacheLock = new();
private static JObject _cachedResponse;
private static DateTime _cacheTimeUtc;
private static volatile bool _fetchInProgress;
public static List<string> GetModelList(Session session)
{
var defaultResponse = new List<string> { LoadingPlaceholder };
try
{
var response = GetCachedResponse(session);
if (response?["success"]?.Value<bool>() != true)
{
return defaultResponse;
}
var models = response["models"] as JArray;
if (models == null || models.Count == 0)
{
return defaultResponse;
}
var list = new List<string>(models.Count);
foreach (var m in models)
{
var modelId = m?["model"]?.ToString();
if (string.IsNullOrWhiteSpace(modelId))
{
continue;
}
var name = m?["name"]?.ToString();
if (string.IsNullOrWhiteSpace(name))
{
name = modelId;
}
list.Add($"{modelId}///{name}");
}
return list.Count > 0 ? list : defaultResponse;
}
catch
{
return defaultResponse;
}
}
public static List<string> GetInstructionList(Session session)
{
var defaultResponse = new List<string> { LoadingPlaceholder };
try
{
var list = new List<string>();
var response = GetCachedResponse(session);
var settings = response?["settings"] as JObject;
var instructions = settings?["instructions"] as JObject;
var prompt = instructions?["prompt"]?.ToString();
if (!string.IsNullOrWhiteSpace(prompt))
{
list.Add("prompt///Enhance Prompt (Default)");
}
var custom = instructions?["custom"] as JObject;
if (custom != null)
{
foreach (var prop in custom.Properties())
{
var title = prop.Value?["title"]?.ToString();
if (!string.IsNullOrEmpty(title))
{
list.Add($"{prop.Name}///{title}");
}
}
}
return list.Count > 0 ? list : defaultResponse;
}
catch
{
return defaultResponse;
}
}
private static JObject GetCachedResponse(Session session)
{
lock (_cacheLock)
{
if (_cachedResponse != null && DateTime.UtcNow - _cacheTimeUtc < CacheTtl)
{
return _cachedResponse;
}
// A fetch is already running — return stale data (or null) without blocking
if (_fetchInProgress)
{
return _cachedResponse;
}
_fetchInProgress = true;
}
// Fire-and-forget: fetch in the background so the UI thread is never blocked
_ = Task.Run(async () =>
{
try
{
var response = await LLMAPICalls.GetMagicPromptModels(session);
lock (_cacheLock)
{
_cachedResponse = response;
_cacheTimeUtc = DateTime.UtcNow;
}
}
catch (Exception ex)
{
SwarmUI.Utils.Logs.Error($"[MagicPrompt] Background model fetch failed: {ex.Message}");
}
finally
{
_fetchInProgress = false;
}
});
// Return immediately with whatever we have (null on first call → loading placeholder)
lock (_cacheLock)
{
return _cachedResponse;
}
}
}