Skip to content

Commit 5c8b71c

Browse files
committed
feat: 发布 v4.7
将构建脚本、GitHub Actions 默认版本前缀和 README 下载链接统一提升至 4.7。 补上本地模型运行时端点解析与下载进度同步,避免本地模式测试与下载状态漂移。 新增端点解析与插件健康检查回归测试。 验证:npx vue-tsc --build;npm run build;dotnet test XUnityToolkit-WebUI.Tests/XUnityToolkit-WebUI.Tests.csproj
1 parent 8185a3f commit 5c8b71c

13 files changed

Lines changed: 329 additions & 35 deletions

File tree

.github/workflows/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
shell: pwsh
5959
run: |
6060
$v = '${{ inputs.build_version }}'
61-
if (-not $v) { $v = "4.6.$(Get-Date -Format 'yyyyMMddHHmm')" }
61+
if (-not $v) { $v = "4.7.$(Get-Date -Format 'yyyyMMddHHmm')" }
6262
$prefix = $v.Split('.')[0..1] -join '.'
6363
"build_version=$v" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
6464
"version_prefix=$prefix" | Out-File -FilePath $env:GITHUB_OUTPUT -Append

.github/workflows/dep-check.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ jobs:
9191
9292
FORCE="${{ inputs.force_build }}"
9393
if [ -n "$CHANGED" ] || [ "$FORCE" = "true" ]; then
94-
BUILD_VER="4.6.$(date -u +%Y%m%d%H%M)"
94+
BUILD_VER="4.7.$(date -u +%Y%m%d%H%M)"
9595
VER_PREFIX="v$(echo "$BUILD_VER" | cut -d. -f1,2)"
9696
RELEASE_TAG="auto-${BUILD_VER}"
9797

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@ XUnityToolkit-WebUI 适合需要给 Unity 游戏做机翻增强、术语约束
2828
<!-- DOWNLOAD_LINKS_START -->
2929
| 版本 | ZIP 便携版 | MSI 安装包 |
3030
|------|-----------|-----------|
31-
| **Full(完整版)** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64.msi) |
32-
| **No-LLAMA** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64-no-llama.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64-no-llama.msi) |
33-
| **Lite(精简版)** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64-lite.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.6/XUnityToolkit-WebUI-v4.6-win-x64-lite.msi) |
31+
| **Full(完整版)** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64.msi) |
32+
| **No-LLAMA** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64-no-llama.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64-no-llama.msi) |
33+
| **Lite(精简版)** | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64-lite.zip) | [下载](https://github.com/HanFengRuYue/XUnityToolkit-WebUI/releases/download/v4.7/XUnityToolkit-WebUI-v4.7-win-x64-lite.msi) |
3434
<!-- DOWNLOAD_LINKS_END -->
3535

3636
- **Full**:自包含,附带本地 AI 运行时与常用资源,适合大多数用户。

XUnityToolkit-Vue/src/api/games.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { api } from './client'
2-
import type { Game, UnityGameInfo, XUnityConfig, InstallationStatus, InstallOptions, AppSettings, VersionInfo, DataPathInfo, AddGameResponse, BatchAddResult, ModFrameworkType, TranslationStats, AiEndpointStatus, TmpFontStatus, TermEntry, LlmProvider, ApiEndpointConfig, EndpointTestResult, SteamGridDbSearchResult, SteamGridDbImage, CoverInfo, SteamStoreSearchResult, WebImageResult, GlossaryExtractionStats, LogEntry, AssetExtractionResult, PreTranslationStatus, TranslationEditorData, TranslationEntry, LocalLlmStatus, LocalLlmSettings, GpuInfo, BuiltInModelInfo, LocalModelEntry, LlamaStatus, LocalLlmTestResult, BepInExLogResponse, BepInExLogAnalysis, ScriptTagConfig, ScriptTagPreset, DynamicPatternStore, TermCandidateStore, PluginHealthReport, BepInExPlugin, TranslationEditorTextSource, TranslationRegexEditorData, RegexTranslationRule } from './types'
2+
import type { Game, UnityGameInfo, XUnityConfig, InstallationStatus, InstallOptions, AppSettings, VersionInfo, DataPathInfo, AddGameResponse, BatchAddResult, ModFrameworkType, TranslationStats, AiEndpointStatus, TmpFontStatus, TermEntry, LlmProvider, ApiEndpointConfig, EndpointTestResult, SteamGridDbSearchResult, SteamGridDbImage, CoverInfo, SteamStoreSearchResult, WebImageResult, GlossaryExtractionStats, LogEntry, AssetExtractionResult, PreTranslationStatus, TranslationEditorData, TranslationEntry, LocalLlmStatus, LocalLlmSettings, GpuInfo, BuiltInModelInfo, LocalModelEntry, LlamaStatus, LocalLlmTestResult, LocalLlmDownloadProgress, BepInExLogResponse, BepInExLogAnalysis, ScriptTagConfig, ScriptTagPreset, DynamicPatternStore, TermCandidateStore, PluginHealthReport, BepInExPlugin, TranslationEditorTextSource, TranslationRegexEditorData, RegexTranslationRule } from './types'
33

44
export const gamesApi = {
55
list: () => api.get<Game[]>('/api/games'),
@@ -265,6 +265,7 @@ export const localLlmApi = {
265265
saveSettings: (req: { gpuLayers: number; contextLength: number; kvCacheType?: string }) =>
266266
api.put<void>('/api/local-llm/settings', req),
267267
getCatalog: () => api.get<BuiltInModelInfo[]>('/api/local-llm/catalog'),
268+
getActiveDownloads: () => api.get<LocalLlmDownloadProgress[]>('/api/local-llm/downloads'),
268269
getLlamaStatus: () => api.get<LlamaStatus>('/api/local-llm/llama-status'),
269270
downloadLlama: () => api.post<void>('/api/local-llm/llama-download'),
270271
cancelLlamaDownload: () => api.post<void>('/api/local-llm/llama-download/cancel'),

XUnityToolkit-Vue/src/stores/localLlm.ts

Lines changed: 63 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,52 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
1919
status.value?.state === 'Starting' || status.value?.state === 'Stopping')
2020

2121
let connection: signalR.HubConnection | null = null
22+
let downloadSyncTimer: ReturnType<typeof setInterval> | null = null
23+
let isSyncingDownloads = false
24+
25+
function replaceDownloads(items: LocalLlmDownloadProgress[]) {
26+
downloads.value = new Map(items.map(item => [item.catalogId, item]))
27+
}
28+
29+
function stopDownloadSync() {
30+
if (downloadSyncTimer !== null) {
31+
clearInterval(downloadSyncTimer)
32+
downloadSyncTimer = null
33+
}
34+
}
35+
36+
function ensureDownloadSync() {
37+
if (downloadSyncTimer !== null) return
38+
39+
downloadSyncTimer = setInterval(() => {
40+
void syncDownloadState()
41+
}, 3000)
42+
}
43+
44+
async function syncDownloadState() {
45+
if (isSyncingDownloads) return
46+
isSyncingDownloads = true
47+
48+
try {
49+
const [currentSettings, activeDownloads] = await Promise.all([
50+
localLlmApi.getSettings(),
51+
localLlmApi.getActiveDownloads(),
52+
])
53+
54+
settings.value = currentSettings
55+
replaceDownloads(activeDownloads)
56+
57+
if (activeDownloads.length > 0) {
58+
ensureDownloadSync()
59+
} else {
60+
stopDownloadSync()
61+
}
62+
} catch {
63+
// Keep the last known UI state if reconciliation fails temporarily.
64+
} finally {
65+
isSyncingDownloads = false
66+
}
67+
}
2268

2369
async function connect() {
2470
if (connection && connection.state !== signalR.HubConnectionState.Disconnected) return
@@ -36,16 +82,15 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
3682
if (progress.done || progress.error) {
3783
downloads.value.delete(progress.catalogId)
3884
downloads.value = new Map(downloads.value)
39-
if (progress.done && !progress.error) {
40-
fetchModels()
41-
}
85+
void syncDownloadState()
4286
} else if (progress.paused) {
4387
downloads.value.delete(progress.catalogId)
4488
downloads.value = new Map(downloads.value)
45-
fetchSettings()
89+
void syncDownloadState()
4690
} else {
4791
downloads.value.set(progress.catalogId, progress)
4892
downloads.value = new Map(downloads.value)
93+
ensureDownloadSync()
4994
}
5095
})
5196

@@ -62,10 +107,12 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
62107

63108
connection.onreconnected(async () => {
64109
try { await connection?.invoke('JoinLocalLlmGroup') } catch { /* ignore */ }
110+
void syncDownloadState()
65111
})
66112

67113
await connection.start()
68114
await connection.invoke('JoinLocalLlmGroup')
115+
await syncDownloadState()
69116
}
70117

71118
async function disconnect() {
@@ -74,6 +121,7 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
74121
await connection.stop()
75122
connection = null
76123
}
124+
stopDownloadSync()
77125
}
78126

79127
async function fetchStatus() {
@@ -117,6 +165,15 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
117165

118166
async function downloadModel(catalogId: string) {
119167
await localLlmApi.downloadModel(catalogId)
168+
downloads.value.set(catalogId, {
169+
catalogId,
170+
bytesDownloaded: 0,
171+
totalBytes: 0,
172+
speedBytesPerSec: 0,
173+
done: false,
174+
})
175+
downloads.value = new Map(downloads.value)
176+
ensureDownloadSync()
120177
}
121178

122179
async function pauseDownload(catalogId: string) {
@@ -127,7 +184,7 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
127184
await localLlmApi.cancelDownload(catalogId)
128185
downloads.value.delete(catalogId)
129186
downloads.value = new Map(downloads.value)
130-
await fetchSettings()
187+
await syncDownloadState()
131188
}
132189

133190
async function downloadLlama() {
@@ -148,7 +205,7 @@ export const useLocalLlmStore = defineStore('localLlm', () => {
148205
status, settings, gpus, catalog, downloads, llamaStatus, llamaDownload,
149206
isRunning, isStarting, isBusy,
150207
connect, disconnect,
151-
fetchStatus, fetchSettings, fetchGpus, refreshGpus, fetchCatalog, fetchLlamaStatus, fetchModels,
208+
fetchStatus, fetchSettings, fetchGpus, refreshGpus, fetchCatalog, fetchLlamaStatus, fetchModels, syncDownloadState,
152209
startServer, stopServer, downloadModel, pauseDownload, cancelDownload,
153210
downloadLlama, cancelLlamaDownload, retryLlamaDownload,
154211
}
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
using XUnityToolkit_WebUI.Models;
2+
using XUnityToolkit_WebUI.Services;
3+
using Xunit;
4+
5+
namespace XUnityToolkit_WebUI.Tests.Services;
6+
7+
public sealed class LlmEndpointResolverTests
8+
{
9+
[Fact]
10+
public void BuildEffectiveEndpoints_ReplacesPersistedLocalEndpointInLocalMode()
11+
{
12+
var settings = new AiTranslationSettings
13+
{
14+
ActiveMode = "local",
15+
Endpoints =
16+
[
17+
new ApiEndpointConfig
18+
{
19+
Id = "local-1",
20+
Name = "本地模型",
21+
Provider = LlmProvider.Custom,
22+
ApiBaseUrl = "http://127.0.0.1:58381/v1",
23+
ApiKey = "local",
24+
ModelName = "",
25+
Priority = 8,
26+
Enabled = true
27+
}
28+
]
29+
};
30+
31+
var runtimeLocalEndpoint = new ApiEndpointConfig
32+
{
33+
Id = "local-1",
34+
Name = "本地模型",
35+
Provider = LlmProvider.Custom,
36+
ApiBaseUrl = "http://127.0.0.1:53266/v1",
37+
ApiKey = "local",
38+
ModelName = "Qwen3.5-9B-Q4_K_M.gguf",
39+
Priority = 8,
40+
Enabled = true
41+
};
42+
43+
var endpoints = LlmEndpointResolver.BuildEffectiveEndpoints(settings, runtimeLocalEndpoint);
44+
45+
var endpoint = Assert.Single(endpoints);
46+
Assert.Equal("http://127.0.0.1:53266/v1", endpoint.ApiBaseUrl);
47+
Assert.Equal("Qwen3.5-9B-Q4_K_M.gguf", endpoint.ModelName);
48+
}
49+
50+
[Fact]
51+
public void BuildEffectiveEndpoints_PreservesConfiguredCloudEndpointsOutsideLocalMode()
52+
{
53+
var settings = new AiTranslationSettings
54+
{
55+
ActiveMode = "cloud",
56+
Endpoints =
57+
[
58+
new ApiEndpointConfig
59+
{
60+
Id = "cloud-1",
61+
Name = "云端",
62+
Provider = LlmProvider.OpenAI,
63+
ApiBaseUrl = "https://api.openai.com/v1",
64+
ApiKey = "secret",
65+
ModelName = "gpt-4o-mini",
66+
Priority = 5,
67+
Enabled = true
68+
}
69+
]
70+
};
71+
72+
var endpoints = LlmEndpointResolver.BuildEffectiveEndpoints(settings, runtimeLocalEndpoint: null);
73+
74+
var endpoint = Assert.Single(endpoints);
75+
Assert.Equal("https://api.openai.com/v1", endpoint.ApiBaseUrl);
76+
Assert.Equal("gpt-4o-mini", endpoint.ModelName);
77+
}
78+
}
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
using System.Reflection;
2+
using XUnityToolkit_WebUI.Models;
3+
using XUnityToolkit_WebUI.Services;
4+
using Xunit;
5+
6+
namespace XUnityToolkit_WebUI.Tests.Services;
7+
8+
public sealed class PluginHealthCheckServiceTests
9+
{
10+
[Fact]
11+
public void CheckLogErrors_DoesNotTreatGameIdContaining507AsApiFailure()
12+
{
13+
var lines = new[]
14+
{
15+
"[Info : Console] [LLMTranslate] 游戏 ID: fb507f09a2f046a29c02b1583632aca4",
16+
"[Info : Console] [LLMTranslate] 连通性测试已发送: http://127.0.0.1:51821/api/translate/ping?gameId=fb507f09a2f046a29c02b1583632aca4"
17+
};
18+
19+
var checks = InvokeCheckLogErrors(lines);
20+
21+
var logCheck = Assert.Single(checks);
22+
Assert.Equal("logErrors", logCheck.Id);
23+
Assert.Equal(HealthStatus.Healthy, logCheck.Status);
24+
Assert.Null(logCheck.Details);
25+
}
26+
27+
[Fact]
28+
public void CheckLogErrors_DetectsExplicit502EndpointFailure()
29+
{
30+
var lines = new[]
31+
{
32+
"[Info : Console] [LLMTranslate] [错误] API 调用失败: HTTP 502 Bad Gateway"
33+
};
34+
35+
var checks = InvokeCheckLogErrors(lines);
36+
37+
var logCheck = Assert.Single(checks);
38+
Assert.Equal("logErrors", logCheck.Id);
39+
Assert.Equal(HealthStatus.Warning, logCheck.Status);
40+
var detail = Assert.Single(logCheck.Details!);
41+
Assert.Equal("API 调用失败", detail.Category);
42+
}
43+
44+
private static List<HealthCheckItem> InvokeCheckLogErrors(string[] lines)
45+
{
46+
var checks = new List<HealthCheckItem>();
47+
var method = typeof(PluginHealthCheckService).GetMethod(
48+
"CheckLogErrors",
49+
BindingFlags.NonPublic | BindingFlags.Static);
50+
51+
Assert.NotNull(method);
52+
method!.Invoke(null, [checks, lines, null]);
53+
return checks;
54+
}
55+
}

XUnityToolkit-WebUI/Endpoints/LocalLlmEndpoints.cs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ public static void MapLocalLlmEndpoints(this WebApplication app)
3333
group.MapGet("/catalog", () =>
3434
Results.Ok(ApiResult<IReadOnlyList<BuiltInModelInfo>>.Ok(BuiltInModelCatalog.Models)));
3535

36+
group.MapGet("/downloads", (LocalLlmService svc) =>
37+
Results.Ok(ApiResult<IReadOnlyList<LocalLlmDownloadProgress>>.Ok(svc.GetActiveDownloads())));
38+
3639
// ── llama.cpp binary status ──
3740

3841
group.MapGet("/llama-status", async (LocalLlmService svc, CancellationToken ct) =>
@@ -51,11 +54,9 @@ public static void MapLocalLlmEndpoints(this WebApplication app)
5154
if (status.State != LocalLlmServerState.Running)
5255
return Results.BadRequest(ApiResult.Fail("本地 AI 未运行"));
5356

54-
// Find the auto-registered local endpoint in AiTranslationSettings
55-
var llmSettings = await localSvc.LoadSettingsAsync(ct);
57+
// Always test against the running server's current runtime endpoint.
5658
var appSettings = await settingsSvc.GetAsync(ct);
57-
var localEndpoint = appSettings.AiTranslation.Endpoints
58-
.FirstOrDefault(e => e.Id == llmSettings.EndpointId);
59+
var localEndpoint = await localSvc.GetRuntimeEndpointAsync(ct);
5960
if (localEndpoint is null)
6061
return Results.BadRequest(ApiResult.Fail("未找到本地 AI 端点配置"));
6162

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
using XUnityToolkit_WebUI.Models;
2+
3+
namespace XUnityToolkit_WebUI.Services;
4+
5+
internal static class LlmEndpointResolver
6+
{
7+
public static List<ApiEndpointConfig> BuildEffectiveEndpoints(
8+
AiTranslationSettings settings,
9+
ApiEndpointConfig? runtimeLocalEndpoint)
10+
{
11+
var endpoints = settings.Endpoints
12+
.Where(e => e.Enabled && !string.IsNullOrWhiteSpace(e.ApiKey))
13+
.Select(Clone)
14+
.ToList();
15+
16+
if (!string.Equals(settings.ActiveMode, "local", StringComparison.OrdinalIgnoreCase))
17+
return endpoints;
18+
19+
if (runtimeLocalEndpoint is null)
20+
return endpoints;
21+
22+
endpoints.RemoveAll(e =>
23+
string.Equals(e.Id, runtimeLocalEndpoint.Id, StringComparison.OrdinalIgnoreCase)
24+
|| string.Equals(e.ApiKey, "local", StringComparison.OrdinalIgnoreCase));
25+
26+
endpoints.Insert(0, Clone(runtimeLocalEndpoint));
27+
return endpoints;
28+
}
29+
30+
private static ApiEndpointConfig Clone(ApiEndpointConfig endpoint) => new()
31+
{
32+
Id = endpoint.Id,
33+
Name = endpoint.Name,
34+
Provider = endpoint.Provider,
35+
ApiBaseUrl = endpoint.ApiBaseUrl,
36+
ApiKey = endpoint.ApiKey,
37+
ModelName = endpoint.ModelName,
38+
Priority = endpoint.Priority,
39+
Enabled = endpoint.Enabled
40+
};
41+
}

XUnityToolkit-WebUI/Services/LlmTranslationService.cs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ internal sealed record TranslationBatchResult(IList<string> Translations, IReadO
1616
public sealed class LlmTranslationService(
1717
IHttpClientFactory httpClientFactory,
1818
AppSettingsService settingsService,
19+
LocalLlmService localLlmService,
1920
TermService termService,
2021
TermMatchingService termMatchingService,
2122
TermAuditService termAuditService,
@@ -250,8 +251,13 @@ internal async Task<TranslationBatchResult> TranslateDetailedAsync(
250251

251252
// Local mode restrictions
252253
var isLocalMode = string.Equals(ai.ActiveMode, "local", StringComparison.OrdinalIgnoreCase);
254+
var runtimeLocalEndpoint = isLocalMode
255+
? await localLlmService.GetRuntimeEndpointAsync(ct)
256+
: null;
257+
var enabledEndpoints = LlmEndpointResolver.BuildEffectiveEndpoints(ai, runtimeLocalEndpoint);
258+
if (isLocalMode && runtimeLocalEndpoint is null)
259+
throw new InvalidOperationException("本地模型未启动,请先在本地 AI 页面启动模型");
253260

254-
var enabledEndpoints = ai.Endpoints.Where(e => e.Enabled && !string.IsNullOrWhiteSpace(e.ApiKey)).ToList();
255261
if (enabledEndpoints.Count == 0)
256262
{
257263
logger.LogWarning("没有可用的 AI 提供商: 总端点数={Total}, 各端点状态=[{Details}]",

0 commit comments

Comments
 (0)