From 51664619f548b9ac718b23fc14bf5297b2268ea4 Mon Sep 17 00:00:00 2001 From: momen Date: Wed, 15 Apr 2026 10:55:47 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=9B=B4=E6=96=B0=E5=99=A8?= =?UTF-8?q?=E6=89=93=E5=BC=80=E5=8D=A1=E9=A1=BF=EF=BC=8C=E4=BC=98=E5=8C=96?= =?UTF-8?q?=E6=9B=B4=E6=96=B0=E4=B8=AD=E5=BF=83?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../plans/2026-04-14-gitee-issue-bot.md | 996 +++++++++++ ...nstalled-apps-and-update-center-loading.md | 1571 +++++++++++++++++ .../2026-04-14-gitee-issue-bot-design.md | 365 ++++ ...d-apps-and-update-center-loading-design.md | 276 +++ 4 files changed, 3208 insertions(+) create mode 100644 docs/superpowers/plans/2026-04-14-gitee-issue-bot.md create mode 100644 docs/superpowers/plans/2026-04-15-installed-apps-and-update-center-loading.md create mode 100644 docs/superpowers/specs/2026-04-14-gitee-issue-bot-design.md create mode 100644 docs/superpowers/specs/2026-04-15-installed-apps-and-update-center-loading-design.md diff --git a/docs/superpowers/plans/2026-04-14-gitee-issue-bot.md b/docs/superpowers/plans/2026-04-14-gitee-issue-bot.md new file mode 100644 index 00000000..332fc5b2 --- /dev/null +++ b/docs/superpowers/plans/2026-04-14-gitee-issue-bot.md @@ -0,0 +1,996 @@ +# Gitee Issue Bot Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build a user-level `systemd`-driven issue bot that checks Spark Store Gitee issues every 6 hours, stores one ranked candidate locally, and only launches a new opencode window after explicit manual approval. + +**Architecture:** Keep the implementation outside the Electron runtime by adding a small TypeScript script set under `scripts/issue-bot/`, with focused helpers for Gitee fetching, ranking, local state, approval, and opencode launching. Use user-cache state storage plus `systemd --user` service/timer units, and pass the `~/Desktop/spark-store` + `Erotica`-based worktree requirement into the generated opencode prompt instead of creating worktrees during polling. + +**Tech Stack:** Node.js 22 with `--experimental-strip-types`, TypeScript strict mode, built-in `fetch`, Vitest, npm scripts, `systemd --user` units. + +--- + +## File Map + +- Create: `scripts/issue-bot/lib/types.ts` — shared strict TypeScript types for normalized issues, ranking results, and persisted state. +- Create: `scripts/issue-bot/lib/state.ts` — state file path resolution, JSON load/save, corruption backup, and default-state initialization. +- Create: `scripts/issue-bot/lib/ranking.ts` — issue filtering, heuristic scoring, and candidate selection. +- Create: `scripts/issue-bot/lib/gitee.ts` — fetch open issues from Gitee API first and normalize the response. +- Create: `scripts/issue-bot/lib/opencode.ts` — build approval prompt and spawn a configured opencode command. +- Create: `scripts/issue-bot/check-issues.ts` — one-shot polling entrypoint that updates `currentCandidate`. +- Create: `scripts/issue-bot/approve-issue.ts` — manual approval entrypoint that launches opencode and marks the approved issue. +- Create: `src/__tests__/unit/issue-bot/state.test.ts` — state initialization, backup, and save/load tests. +- Create: `src/__tests__/unit/issue-bot/ranking.test.ts` — scoring, filtering, and candidate selection tests. +- Create: `src/__tests__/unit/issue-bot/check-issues.test.ts` — polling orchestration tests using mocked fetch/state. +- Create: `src/__tests__/unit/issue-bot/approve-issue.test.ts` — approval and opencode-launch orchestration tests. +- Create: `src/__tests__/unit/issue-bot/packaging.test.ts` — npm script and systemd unit smoke tests. +- Modify: `package.json` — add `issue-bot:check` and `issue-bot:approve` scripts. +- Modify: `tsconfig.node.json` — include `scripts` for type-check coverage in build tooling. +- Create: `extras/systemd/spark-store-issue-bot.service` — `oneshot` user service for polling. +- Create: `extras/systemd/spark-store-issue-bot.timer` — six-hour persistent timer. + +### Task 1: Add Shared Types and Local State Storage + +**Files:** + +- Create: `scripts/issue-bot/lib/types.ts` +- Create: `scripts/issue-bot/lib/state.ts` +- Test: `src/__tests__/unit/issue-bot/state.test.ts` + +- [ ] **Step 1: Write the failing test** + +```ts +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; + +import { afterEach, describe, expect, it, vi } from "vitest"; + +import { + createDefaultIssueBotState, + getIssueBotStatePath, + loadIssueBotState, + saveIssueBotState, +} from "../../../../scripts/issue-bot/lib/state"; + +describe("issue-bot state", () => { + afterEach(() => { + vi.restoreAllMocks(); + delete process.env.XDG_CACHE_HOME; + }); + + it("uses the XDG cache directory when available", () => { + process.env.XDG_CACHE_HOME = "/tmp/spark-cache"; + + expect(getIssueBotStatePath()).toBe( + "/tmp/spark-cache/spark-store/issue-bot/state.json", + ); + }); + + it("returns a default state when the file does not exist", () => { + vi.spyOn(fs, "existsSync").mockReturnValue(false); + + expect(loadIssueBotState()).toEqual(createDefaultIssueBotState()); + }); + + it("backs up invalid JSON and resets to the default state", () => { + vi.spyOn(fs, "existsSync").mockReturnValue(true); + vi.spyOn(fs, "readFileSync").mockReturnValue("not-json"); + const renameSync = vi.spyOn(fs, "renameSync").mockImplementation(() => {}); + + expect(loadIssueBotState()).toEqual(createDefaultIssueBotState()); + expect(renameSync).toHaveBeenCalledWith( + expect.stringContaining("state.json"), + expect.stringContaining("state.json.bak-"), + ); + }); + + it("creates parent directories before saving state", () => { + const mkdirSync = vi + .spyOn(fs, "mkdirSync") + .mockImplementation(() => undefined); + const writeFileSync = vi + .spyOn(fs, "writeFileSync") + .mockImplementation(() => undefined); + + saveIssueBotState({ + ...createDefaultIssueBotState(), + lastRunStatus: "success", + lastRunMessage: "candidate updated", + }); + + expect(mkdirSync).toHaveBeenCalledWith( + path.dirname(getIssueBotStatePath()), + { recursive: true }, + ); + expect(writeFileSync).toHaveBeenCalledWith( + getIssueBotStatePath(), + expect.stringContaining('"lastRunStatus": "success"'), + "utf8", + ); + }); +}); +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/state.test.ts` + +Expected: FAIL with `Cannot find module '../../../../scripts/issue-bot/lib/state'`. + +- [ ] **Step 3: Write minimal implementation** + +```ts +// scripts/issue-bot/lib/types.ts +export interface NormalizedIssue { + id: number; + number: string; + title: string; + url: string; + state: "open" | "closed"; + createdAt: string; + updatedAt: string; + labels: string[]; + bodyPreview: string; +} + +export interface RankedIssue extends NormalizedIssue { + score: number; + rankingReasons: string[]; +} + +export interface ApprovedIssue { + id: number; + title: string; + url: string; + approvedAt: string; +} + +export interface IssueBotState { + currentCandidate: RankedIssue | null; + approvedIssue: ApprovedIssue | null; + seenIssueIds: number[]; + lastRunAt: string | null; + lastRunStatus: "idle" | "success" | "network-error" | "parse-error"; + lastRunMessage: string | null; +} +``` + +```ts +// scripts/issue-bot/lib/state.ts +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; + +import type { IssueBotState } from "./types"; + +export const createDefaultIssueBotState = (): IssueBotState => ({ + currentCandidate: null, + approvedIssue: null, + seenIssueIds: [], + lastRunAt: null, + lastRunStatus: "idle", + lastRunMessage: null, +}); + +export const getIssueBotStatePath = (): string => { + const cacheRoot = + process.env.XDG_CACHE_HOME || path.join(os.homedir(), ".cache"); + return path.join(cacheRoot, "spark-store", "issue-bot", "state.json"); +}; + +export const loadIssueBotState = (): IssueBotState => { + const filePath = getIssueBotStatePath(); + if (!fs.existsSync(filePath)) return createDefaultIssueBotState(); + + try { + const raw = fs.readFileSync(filePath, "utf8"); + return { + ...createDefaultIssueBotState(), + ...(JSON.parse(raw) as Partial), + }; + } catch { + const backupPath = `${filePath}.bak-${Date.now()}`; + fs.renameSync(filePath, backupPath); + return createDefaultIssueBotState(); + } +}; + +export const saveIssueBotState = (state: IssueBotState): void => { + const filePath = getIssueBotStatePath(); + fs.mkdirSync(path.dirname(filePath), { recursive: true }); + fs.writeFileSync(filePath, `${JSON.stringify(state, null, 2)}\n`, "utf8"); +}; +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/state.test.ts` + +Expected: PASS with 4 tests passed. + +- [ ] **Step 5: Commit** + +```bash +git add scripts/issue-bot/lib/types.ts scripts/issue-bot/lib/state.ts src/__tests__/unit/issue-bot/state.test.ts +git commit -m "feat(issue-bot): add local state storage" +``` + +### Task 2: Add Ranking Rules and Candidate Selection + +**Files:** + +- Create: `scripts/issue-bot/lib/ranking.ts` +- Test: `src/__tests__/unit/issue-bot/ranking.test.ts` + +- [ ] **Step 1: Write the failing test** + +```ts +import { describe, expect, it } from "vitest"; + +import { + rankIssues, + selectTopIssueCandidate, +} from "../../../../scripts/issue-bot/lib/ranking"; +import type { NormalizedIssue } from "../../../../scripts/issue-bot/lib/types"; + +const makeIssue = (overrides: Partial): NormalizedIssue => ({ + id: 1, + number: "I123", + title: "示例 issue", + url: "https://gitee.com/spark-store-project/spark-store/issues/I123", + state: "open", + createdAt: "2026-04-14T00:00:00.000Z", + updatedAt: "2026-04-14T00:00:00.000Z", + labels: [], + bodyPreview: "用户反馈应用无法安装,并附上了复现步骤和日志。", + ...overrides, +}); + +describe("issue-bot ranking", () => { + it("prioritizes install failures with actionable details", () => { + const ranked = rankIssues([ + makeIssue({ id: 1, title: "应用无法安装,附日志" }), + makeIssue({ id: 2, title: "建议增加分类筛选", bodyPreview: "功能建议" }), + ]); + + expect(ranked[0].id).toBe(1); + expect(ranked[0].score).toBeGreaterThan(ranked[1].score); + expect(ranked[0].rankingReasons).toContain( + "contains high-impact keyword: 无法安装", + ); + }); + + it("filters out closed issues and already-approved issues", () => { + const candidate = selectTopIssueCandidate( + [ + makeIssue({ id: 3, state: "closed", title: "已关闭问题" }), + makeIssue({ id: 4, title: "白屏并卡死" }), + ], + { approvedIssueId: 4 }, + ); + + expect(candidate).toBeNull(); + }); + + it("prefers more recently updated issues when scores otherwise match", () => { + const candidate = selectTopIssueCandidate( + [ + makeIssue({ + id: 5, + title: "启动白屏", + updatedAt: "2026-04-14T08:00:00.000Z", + }), + makeIssue({ + id: 6, + title: "启动白屏", + updatedAt: "2026-04-14T09:00:00.000Z", + }), + ], + { approvedIssueId: null }, + ); + + expect(candidate?.id).toBe(6); + }); +}); +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/ranking.test.ts` + +Expected: FAIL with `Cannot find module '../../../../scripts/issue-bot/lib/ranking'`. + +- [ ] **Step 3: Write minimal implementation** + +```ts +// scripts/issue-bot/lib/ranking.ts +import type { NormalizedIssue, RankedIssue } from "./types"; + +const HIGH_IMPACT_KEYWORDS = [ + "崩溃", + "打不开", + "无法安装", + "升级失败", + "卡死", + "白屏", + "闪退", +]; + +const CORE_FLOW_KEYWORDS = ["安装", "卸载", "更新", "启动", "搜索", "加载"]; + +const hasActionableDetail = (issue: NormalizedIssue): boolean => + /复现|日志|截图|error|错误/i.test(issue.bodyPreview); + +const scoreIssue = (issue: NormalizedIssue): RankedIssue => { + const reasons: string[] = []; + let score = 0; + const haystack = `${issue.title}\n${issue.bodyPreview}`; + + for (const keyword of HIGH_IMPACT_KEYWORDS) { + if (haystack.includes(keyword)) { + score += 10; + reasons.push(`contains high-impact keyword: ${keyword}`); + } + } + + for (const keyword of CORE_FLOW_KEYWORDS) { + if (haystack.includes(keyword)) { + score += 4; + reasons.push(`touches core flow: ${keyword}`); + break; + } + } + + if (hasActionableDetail(issue)) { + score += 6; + reasons.push("includes actionable detail"); + } + + if (/建议|需求|希望/.test(haystack)) { + score -= 4; + reasons.push("looks like feature discussion"); + } + + return { + ...issue, + score, + rankingReasons: reasons, + }; +}; + +export const rankIssues = (issues: NormalizedIssue[]): RankedIssue[] => + [...issues] + .filter((issue) => issue.state === "open") + .map(scoreIssue) + .sort((left, right) => { + if (right.score !== left.score) return right.score - left.score; + return Date.parse(right.updatedAt) - Date.parse(left.updatedAt); + }); + +export const selectTopIssueCandidate = ( + issues: NormalizedIssue[], + options: { approvedIssueId: number | null }, +): RankedIssue | null => { + const ranked = rankIssues(issues).filter( + (issue) => issue.id !== options.approvedIssueId, + ); + return ranked[0] ?? null; +}; +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/ranking.test.ts` + +Expected: PASS with 3 tests passed. + +- [ ] **Step 5: Commit** + +```bash +git add scripts/issue-bot/lib/ranking.ts src/__tests__/unit/issue-bot/ranking.test.ts +git commit -m "feat(issue-bot): rank candidate issues" +``` + +### Task 3: Add Gitee Fetching and Polling Entrypoint + +**Files:** + +- Create: `scripts/issue-bot/lib/gitee.ts` +- Create: `scripts/issue-bot/check-issues.ts` +- Test: `src/__tests__/unit/issue-bot/check-issues.test.ts` + +- [ ] **Step 1: Write the failing test** + +```ts +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import type { + IssueBotState, + NormalizedIssue, +} from "../../../../scripts/issue-bot/lib/types"; + +const loadState = vi.fn(); +const saveState = vi.fn(); +const listOpenIssues = vi.fn(); + +vi.mock("../../../../scripts/issue-bot/lib/state", () => ({ + createDefaultIssueBotState: () => ({ + currentCandidate: null, + approvedIssue: null, + seenIssueIds: [], + lastRunAt: null, + lastRunStatus: "idle", + lastRunMessage: null, + }), + loadIssueBotState: loadState, + saveIssueBotState: saveState, +})); + +vi.mock("../../../../scripts/issue-bot/lib/gitee", () => ({ + listOpenIssues, +})); + +describe("check-issues", () => { + beforeEach(() => { + vi.resetModules(); + loadState.mockReset(); + saveState.mockReset(); + listOpenIssues.mockReset(); + }); + + it("stores the top-ranked issue candidate", async () => { + const baseState: IssueBotState = { + currentCandidate: null, + approvedIssue: null, + seenIssueIds: [], + lastRunAt: null, + lastRunStatus: "idle", + lastRunMessage: null, + }; + + loadState.mockReturnValue(baseState); + listOpenIssues.mockResolvedValue([ + { + id: 10, + number: "I10", + title: "应用无法安装并白屏", + url: "https://gitee.com/spark-store-project/spark-store/issues/I10", + state: "open", + createdAt: "2026-04-14T00:00:00.000Z", + updatedAt: "2026-04-14T09:00:00.000Z", + labels: ["bug"], + bodyPreview: "复现步骤:1. 打开商店 2. 点击安装。附日志。", + }, + ] satisfies NormalizedIssue[]); + + const { runIssueBotCheck } = + await import("../../../../scripts/issue-bot/check-issues"); + await runIssueBotCheck(); + + expect(saveState).toHaveBeenCalledWith( + expect.objectContaining({ + currentCandidate: expect.objectContaining({ + id: 10, + title: "应用无法安装并白屏", + }), + lastRunStatus: "success", + }), + ); + }); + + it("keeps the previous candidate when fetching issues fails", async () => { + loadState.mockReturnValue({ + currentCandidate: { + id: 99, + number: "I99", + title: "旧候选", + url: "https://gitee.com/spark-store-project/spark-store/issues/I99", + state: "open", + createdAt: "2026-04-14T00:00:00.000Z", + updatedAt: "2026-04-14T00:00:00.000Z", + labels: [], + bodyPreview: "旧摘要", + score: 12, + rankingReasons: ["legacy candidate"], + }, + approvedIssue: null, + seenIssueIds: [], + lastRunAt: null, + lastRunStatus: "idle", + lastRunMessage: null, + }); + listOpenIssues.mockRejectedValue(new Error("network down")); + + const { runIssueBotCheck } = + await import("../../../../scripts/issue-bot/check-issues"); + await runIssueBotCheck(); + + expect(saveState).toHaveBeenCalledWith( + expect.objectContaining({ + currentCandidate: expect.objectContaining({ id: 99 }), + lastRunStatus: "network-error", + lastRunMessage: "network down", + }), + ); + }); +}); +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/check-issues.test.ts` + +Expected: FAIL with `Cannot find module '../../../../scripts/issue-bot/check-issues'`. + +- [ ] **Step 3: Write minimal implementation** + +```ts +// scripts/issue-bot/lib/gitee.ts +import type { NormalizedIssue } from "./types"; + +interface GiteeIssueApiResponse { + id: number; + number: string; + title: string; + state: "open" | "closed"; + created_at: string; + updated_at: string; + body?: string; + html_url: string; + labels?: Array<{ name?: string }>; +} + +const GITEE_ISSUES_API_URL = + "https://gitee.com/api/v5/repos/spark-store-project/spark-store/issues?state=open&sort=updated&direction=desc&page=1&per_page=50"; + +export const listOpenIssues = async (): Promise => { + const response = await fetch(GITEE_ISSUES_API_URL); + if (!response.ok) { + throw new Error(`Gitee request failed: ${response.status}`); + } + + const payload = (await response.json()) as GiteeIssueApiResponse[]; + return payload.map((issue) => ({ + id: issue.id, + number: issue.number, + title: issue.title, + url: issue.html_url, + state: issue.state, + createdAt: issue.created_at, + updatedAt: issue.updated_at, + labels: (issue.labels || []) + .map((label) => label.name?.trim() || "") + .filter((label) => label.length > 0), + bodyPreview: (issue.body || "").slice(0, 500), + })); +}; +``` + +```ts +// scripts/issue-bot/check-issues.ts +import { listOpenIssues } from "./lib/gitee"; +import { selectTopIssueCandidate } from "./lib/ranking"; +import { loadIssueBotState, saveIssueBotState } from "./lib/state"; + +export const runIssueBotCheck = async (): Promise => { + const state = loadIssueBotState(); + const now = new Date().toISOString(); + + try { + const issues = await listOpenIssues(); + const candidate = selectTopIssueCandidate(issues, { + approvedIssueId: state.approvedIssue?.id ?? null, + }); + + saveIssueBotState({ + ...state, + currentCandidate: candidate, + seenIssueIds: candidate + ? Array.from(new Set([...state.seenIssueIds, candidate.id])) + : state.seenIssueIds, + lastRunAt: now, + lastRunStatus: "success", + lastRunMessage: candidate + ? `candidate updated: ${candidate.title}` + : "no candidate issues found", + }); + } catch (error) { + saveIssueBotState({ + ...state, + lastRunAt: now, + lastRunStatus: "network-error", + lastRunMessage: error instanceof Error ? error.message : String(error), + }); + } +}; + +if (import.meta.url === `file://${process.argv[1]}`) { + runIssueBotCheck().catch((error) => { + console.error(error); + process.exitCode = 1; + }); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/check-issues.test.ts` + +Expected: PASS with 2 tests passed. + +- [ ] **Step 5: Commit** + +```bash +git add scripts/issue-bot/lib/gitee.ts scripts/issue-bot/check-issues.ts src/__tests__/unit/issue-bot/check-issues.test.ts +git commit -m "feat(issue-bot): poll gitee issues" +``` + +### Task 4: Add Opencode Prompt Generation and Manual Approval + +**Files:** + +- Create: `scripts/issue-bot/lib/opencode.ts` +- Create: `scripts/issue-bot/approve-issue.ts` +- Test: `src/__tests__/unit/issue-bot/approve-issue.test.ts` + +- [ ] **Step 1: Write the failing test** + +```ts +import { beforeEach, describe, expect, it, vi } from "vitest"; + +const loadState = vi.fn(); +const saveState = vi.fn(); +const launchOpencodeForIssue = vi.fn(); + +vi.mock("../../../../scripts/issue-bot/lib/state", () => ({ + loadIssueBotState: loadState, + saveIssueBotState: saveState, +})); + +vi.mock("../../../../scripts/issue-bot/lib/opencode", () => ({ + launchOpencodeForIssue, +})); + +describe("approve-issue", () => { + beforeEach(() => { + vi.resetModules(); + loadState.mockReset(); + saveState.mockReset(); + launchOpencodeForIssue.mockReset(); + }); + + it("marks the current candidate as approved and launches opencode", async () => { + loadState.mockReturnValue({ + currentCandidate: { + id: 42, + number: "I42", + title: "应用升级失败并白屏", + url: "https://gitee.com/spark-store-project/spark-store/issues/I42", + state: "open", + createdAt: "2026-04-14T00:00:00.000Z", + updatedAt: "2026-04-14T00:00:00.000Z", + labels: ["bug"], + bodyPreview: "更新后白屏,附日志。", + score: 20, + rankingReasons: ["contains high-impact keyword: 升级失败"], + }, + approvedIssue: null, + seenIssueIds: [42], + lastRunAt: "2026-04-14T09:00:00.000Z", + lastRunStatus: "success", + lastRunMessage: "candidate updated", + }); + + const { runIssueBotApproval } = + await import("../../../../scripts/issue-bot/approve-issue"); + await runIssueBotApproval(); + + expect(launchOpencodeForIssue).toHaveBeenCalledWith( + expect.objectContaining({ id: 42, title: "应用升级失败并白屏" }), + ); + expect(saveState).toHaveBeenCalledWith( + expect.objectContaining({ + currentCandidate: null, + approvedIssue: expect.objectContaining({ id: 42 }), + }), + ); + }); + + it("throws when there is no candidate to approve", async () => { + loadState.mockReturnValue({ + currentCandidate: null, + approvedIssue: null, + seenIssueIds: [], + lastRunAt: null, + lastRunStatus: "idle", + lastRunMessage: null, + }); + + const { runIssueBotApproval } = + await import("../../../../scripts/issue-bot/approve-issue"); + + await expect(runIssueBotApproval()).rejects.toThrow( + "No current issue candidate to approve.", + ); + }); +}); +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/approve-issue.test.ts` + +Expected: FAIL with `Cannot find module '../../../../scripts/issue-bot/approve-issue'`. + +- [ ] **Step 3: Write minimal implementation** + +```ts +// scripts/issue-bot/lib/opencode.ts +import { spawn } from "node:child_process"; + +import type { RankedIssue } from "./types"; + +export const buildOpencodePrompt = ( + issue: RankedIssue, +): string => `请处理以下 Spark Store issue: + +标题:${issue.title} +链接:${issue.url} +摘要:${issue.bodyPreview} +优先级原因:${issue.rankingReasons.join(";")} + +要求:先分析根因,再开始修复。默认基仓库必须使用 ~/Desktop/spark-store。 +如果开始修改代码,必须先使用 git worktree,从 Erotica 分支开出新的工作分支,并在该 worktree 中实施改动,不要直接在主工作区修改。`; + +export const launchOpencodeForIssue = async ( + issue: RankedIssue, +): Promise => { + const configuredCommand = process.env.SPARK_STORE_OPENCODE_CMD || "opencode"; + const child = spawn(configuredCommand, [buildOpencodePrompt(issue)], { + detached: true, + stdio: "ignore", + shell: true, + }); + + child.unref(); +}; +``` + +```ts +// scripts/issue-bot/approve-issue.ts +import { launchOpencodeForIssue } from "./lib/opencode"; +import { loadIssueBotState, saveIssueBotState } from "./lib/state"; + +export const runIssueBotApproval = async (): Promise => { + const state = loadIssueBotState(); + const candidate = state.currentCandidate; + + if (!candidate) { + throw new Error("No current issue candidate to approve."); + } + + await launchOpencodeForIssue(candidate); + + saveIssueBotState({ + ...state, + currentCandidate: null, + approvedIssue: { + id: candidate.id, + title: candidate.title, + url: candidate.url, + approvedAt: new Date().toISOString(), + }, + }); +}; + +if (import.meta.url === `file://${process.argv[1]}`) { + runIssueBotApproval().catch((error) => { + console.error(error); + process.exitCode = 1; + }); +} +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/approve-issue.test.ts` + +Expected: PASS with 2 tests passed. + +- [ ] **Step 5: Commit** + +```bash +git add scripts/issue-bot/lib/opencode.ts scripts/issue-bot/approve-issue.ts src/__tests__/unit/issue-bot/approve-issue.test.ts +git commit -m "feat(issue-bot): approve candidates and launch opencode" +``` + +### Task 5: Wire npm Scripts and systemd Units + +**Files:** + +- Modify: `package.json` +- Modify: `tsconfig.node.json` +- Create: `extras/systemd/spark-store-issue-bot.service` +- Create: `extras/systemd/spark-store-issue-bot.timer` +- Create: `src/__tests__/unit/issue-bot/packaging.test.ts` + +- [ ] **Step 1: Write the failing test** + +```ts +import { describe, expect, it } from "vitest"; + +import pkg from "../../../../package.json"; +import serviceUnit from "../../../../extras/systemd/spark-store-issue-bot.service?raw"; +import timerUnit from "../../../../extras/systemd/spark-store-issue-bot.timer?raw"; + +describe("issue-bot packaging", () => { + it("adds npm scripts for polling and approval", () => { + expect(pkg.scripts["issue-bot:check"]).toBe( + "node --experimental-strip-types scripts/issue-bot/check-issues.ts", + ); + expect(pkg.scripts["issue-bot:approve"]).toBe( + "node --experimental-strip-types scripts/issue-bot/approve-issue.ts", + ); + }); + + it("installs a six-hour persistent user timer", () => { + expect(serviceUnit).toContain("Type=oneshot"); + expect(serviceUnit).toContain( + "ExecStart=/usr/bin/env npm run issue-bot:check", + ); + expect(timerUnit).toContain("OnUnitActiveSec=6h"); + expect(timerUnit).toContain("Persistent=true"); + }); +}); +``` + +- [ ] **Step 2: Run test to verify it fails** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/packaging.test.ts` + +Expected: FAIL with `Failed to resolve import '../../../../extras/systemd/spark-store-issue-bot.service?raw'` and missing package scripts. + +- [ ] **Step 3: Write minimal implementation** + +```json +// package.json +{ + "scripts": { + "issue-bot:check": "node --experimental-strip-types scripts/issue-bot/check-issues.ts", + "issue-bot:approve": "node --experimental-strip-types scripts/issue-bot/approve-issue.ts" + } +} +``` + +```json +// tsconfig.node.json +{ + "include": ["vite.config.ts", "package.json", "electron", "scripts"] +} +``` + +```ini +; extras/systemd/spark-store-issue-bot.service +[Unit] +Description=Spark Store issue bot poller + +[Service] +Type=oneshot +WorkingDirectory=%h/Desktop/spark-store +ExecStart=/usr/bin/env npm run issue-bot:check +``` + +```ini +; extras/systemd/spark-store-issue-bot.timer +[Unit] +Description=Run Spark Store issue bot every 6 hours + +[Timer] +OnBootSec=15m +OnUnitActiveSec=6h +Persistent=true +Unit=spark-store-issue-bot.service + +[Install] +WantedBy=timers.target +``` + +- [ ] **Step 4: Run test to verify it passes** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/packaging.test.ts` + +Expected: PASS with 2 tests passed. + +- [ ] **Step 5: Commit** + +```bash +git add package.json tsconfig.node.json extras/systemd/spark-store-issue-bot.service extras/systemd/spark-store-issue-bot.timer src/__tests__/unit/issue-bot/packaging.test.ts +git commit -m "chore(issue-bot): wire scripts and timer units" +``` + +### Task 6: Run End-to-End Verification + +**Files:** + +- Modify: `scripts/issue-bot/check-issues.ts` +- Modify: `scripts/issue-bot/approve-issue.ts` +- Modify: `scripts/issue-bot/lib/gitee.ts` +- Modify: `scripts/issue-bot/lib/opencode.ts` +- Modify: `scripts/issue-bot/lib/ranking.ts` +- Modify: `scripts/issue-bot/lib/state.ts` +- Modify: `package.json` +- Modify: `tsconfig.node.json` +- Create: `extras/systemd/spark-store-issue-bot.service` +- Create: `extras/systemd/spark-store-issue-bot.timer` +- Test: `src/__tests__/unit/issue-bot/state.test.ts` +- Test: `src/__tests__/unit/issue-bot/ranking.test.ts` +- Test: `src/__tests__/unit/issue-bot/check-issues.test.ts` +- Test: `src/__tests__/unit/issue-bot/approve-issue.test.ts` +- Test: `src/__tests__/unit/issue-bot/packaging.test.ts` + +- [ ] **Step 1: Run focused issue-bot tests** + +Run: `npm run test -- --run src/__tests__/unit/issue-bot/state.test.ts src/__tests__/unit/issue-bot/ranking.test.ts src/__tests__/unit/issue-bot/check-issues.test.ts src/__tests__/unit/issue-bot/approve-issue.test.ts src/__tests__/unit/issue-bot/packaging.test.ts` + +Expected: PASS with all issue-bot tests green. + +- [ ] **Step 2: Run lint** + +Run: `npm run lint` + +Expected: PASS with no ESLint errors in `scripts/issue-bot`, `src/__tests__/unit/issue-bot`, and touched config files. + +- [ ] **Step 3: Run build verification** + +Run: `npm run build:vite` + +Expected: PASS with Electron/Vite bundles generated and no TypeScript errors after adding `scripts` to `tsconfig.node.json`. + +- [ ] **Step 4: Manually verify CLI entrypoints** + +Run: `npm run issue-bot:check` + +Expected: `~/.cache/spark-store/issue-bot/state.json` exists and contains either a populated `currentCandidate` or a `lastRunMessage` of `no candidate issues found`. + +Run: `SPARK_STORE_OPENCODE_CMD='printf' npm run issue-bot:approve` + +Expected: command exits successfully and prints the generated prompt containing both `~/Desktop/spark-store` and `Erotica`. + +- [ ] **Step 5: Manually verify systemd units** + +Run: `systemctl --user start spark-store-issue-bot.service` + +Expected: service runs once without unit-file syntax errors. + +Run: `systemctl --user enable --now spark-store-issue-bot.timer` + +Expected: timer is enabled, active, and reports the next run roughly 6 hours later. + +- [ ] **Step 6: Commit** + +```bash +git add scripts/issue-bot package.json tsconfig.node.json extras/systemd/spark-store-issue-bot.service extras/systemd/spark-store-issue-bot.timer src/__tests__/unit/issue-bot +git commit -m "feat(issue-bot): add automated issue polling workflow" +``` + +## Self-Review + +### Spec coverage + +- `systemd --user` timer requirement: covered by Task 5 and Task 6. +- One-candidate ranking with explainable reasons: covered by Task 2 and Task 3. +- Manual approval before opencode launch: covered by Task 4. +- Local cache-backed state with failure retention: covered by Task 1 and Task 3. +- `~/Desktop/spark-store` + `Erotica` worktree rule in the launch prompt: covered by Task 4 and manual verification in Task 6. + +### Placeholder scan + +- No `TBD`, `TODO`, or “implement later” placeholders remain. +- All code-changing steps include concrete code blocks. +- All verification steps include exact commands and expected outcomes. + +### Type consistency + +- `NormalizedIssue`, `RankedIssue`, `ApprovedIssue`, and `IssueBotState` are defined in Task 1 and reused consistently in Tasks 2-4. +- `runIssueBotCheck`, `runIssueBotApproval`, and `launchOpencodeForIssue` names stay unchanged across tests and implementation steps. diff --git a/docs/superpowers/plans/2026-04-15-installed-apps-and-update-center-loading.md b/docs/superpowers/plans/2026-04-15-installed-apps-and-update-center-loading.md new file mode 100644 index 00000000..bc486eba --- /dev/null +++ b/docs/superpowers/plans/2026-04-15-installed-apps-and-update-center-loading.md @@ -0,0 +1,1571 @@ +# Installed Apps And Update Center Loading Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Make the update center open immediately with visible loading feedback, switch Spark installed-app discovery to desktop entries under `/usr/share/applications`, and let users open installed apps from the management modal. + +**Architecture:** Keep the existing Electron IPC contracts in place. Add a renderer-only loading flag to the update-center store for immediate modal display, move Spark desktop discovery into a focused main-process helper that returns `InstalledAppInfo`-shaped records, and move installed-app normalization into a small renderer helper so `App.vue` can accept local desktop apps even when they are absent from the remote catalog. + +**Tech Stack:** Vue 3 Composition API, TypeScript, Electron IPC, Node `fs/path/child_process`, Vitest, Testing Library Vue + +--- + +## File Structure + +- Create: `electron/main/backend/sparkInstalledApps.ts` + Responsibility: scan `/usr/share/applications`, parse desktop files, resolve owning packages with `dpkg -S`, and return normalized Spark installed-app records. +- Create: `src/modules/installedApps.ts` + Responsibility: convert `list-installed` results into `App` objects without filtering out Spark apps that are missing from the remote catalog. +- Create: `src/__tests__/unit/sparkInstalledApps.test.ts` + Responsibility: regression coverage for Spark desktop discovery, deduping, and failure handling. +- Create: `src/__tests__/unit/installedApps.test.ts` + Responsibility: regression coverage for installed-app normalization and Spark fallback cards. +- Modify: `src/modules/updateCenter.ts` + Responsibility: expose `loading` on the update-center store and make `open()` show the modal before the first IPC result returns. +- Modify: `src/components/UpdateCenterModal.vue` + Responsibility: show the initial loading state and the lighter refresh-in-progress state. +- Modify: `src/components/update-center/UpdateCenterToolbar.vue` + Responsibility: disable and visually update the refresh button while loading. +- Modify: `src/__tests__/unit/update-center/store.test.ts` + Responsibility: prove the store opens immediately and toggles loading around `open()` and `refresh()`. +- Modify: `src/__tests__/unit/update-center/UpdateCenterModal.test.ts` + Responsibility: prove the loading panel and disabled refresh state render correctly. +- Modify: `electron/main/backend/install-manager.ts` + Responsibility: replace the inline Spark `dpkg-query -W` listing branch with the new desktop-discovery helper. +- Modify: `src/components/InstalledAppsModal.vue` + Responsibility: emit `open-app` and render the new `打开` action beside `卸载`. +- Modify: `src/__tests__/unit/InstalledAppsModal.test.ts` + Responsibility: prove the modal emits `open-app` and still keeps scroll chaining contained. +- Modify: `src/App.vue` + Responsibility: wire `InstalledAppsModal` to `openDownloadedApp()` and use the installed-app normalization helper instead of filtering Spark apps out when they are missing from the remote catalog. + +### Task 1: Update Center Store Loading Lifecycle + +**Files:** + +- Modify: `src/modules/updateCenter.ts` +- Test: `src/__tests__/unit/update-center/store.test.ts` + +- [ ] **Step 1: Write the failing tests** + +```ts +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { createUpdateCenterStore } from "@/modules/updateCenter"; +import { downloads } from "@/global/downloadStatus"; + +const createSnapshot = (overrides = {}) => ({ + items: [ + { + taskKey: "aptss:spark-weather", + packageName: "spark-weather", + displayName: "Spark Weather", + currentVersion: "1.0.0", + newVersion: "2.0.0", + source: "aptss" as const, + ignored: false, + }, + ], + tasks: [], + warnings: [], + hasRunningTasks: false, + ...overrides, +}); + +const createDeferred = () => { + let resolve!: (value: T) => void; + const promise = new Promise((nextResolve) => { + resolve = nextResolve; + }); + + return { promise, resolve }; +}; + +describe("updateCenter store", () => { + const open = vi.fn(); + const refresh = vi.fn(); + const start = vi.fn(); + const onState = vi.fn(); + const offState = vi.fn(); + + beforeEach(() => { + open.mockReset(); + refresh.mockReset(); + start.mockReset(); + onState.mockReset(); + offState.mockReset(); + downloads.value = []; + + Object.defineProperty(window, "updateCenter", { + configurable: true, + value: { + open, + refresh, + ignore: vi.fn(), + unignore: vi.fn(), + start, + cancel: vi.fn(), + getState: vi.fn(), + onState, + offState, + }, + }); + }); + + it("opens the modal immediately while waiting for the first snapshot", async () => { + const deferred = createDeferred>(); + open.mockReturnValue(deferred.promise); + const store = createUpdateCenterStore(); + + const openPromise = store.open(); + + expect(store.isOpen.value).toBe(true); + expect(store.loading.value).toBe(true); + + deferred.resolve(createSnapshot()); + await openPromise; + + expect(store.snapshot.value).toEqual(createSnapshot()); + expect(store.loading.value).toBe(false); + }); + + it("toggles loading around refresh", async () => { + const deferred = createDeferred>(); + refresh.mockReturnValue(deferred.promise); + const store = createUpdateCenterStore(); + + const refreshPromise = store.refresh(); + + expect(store.loading.value).toBe(true); + + deferred.resolve(createSnapshot({ warnings: ["refresh finished"] })); + await refreshPromise; + + expect(store.loading.value).toBe(false); + expect(store.snapshot.value.warnings).toEqual(["refresh finished"]); + }); +}); +``` + +- [ ] **Step 2: Run the store test file to verify it fails** + +Run: `npx vitest run src/__tests__/unit/update-center/store.test.ts` + +Expected: FAIL because `UpdateCenterStore` does not expose `loading`, and `open()` only sets `isOpen` after awaiting `window.updateCenter.open()`. + +- [ ] **Step 3: Write the minimal store implementation** + +```ts +import { computed, ref, type ComputedRef, type Ref } from "vue"; + +export interface UpdateCenterStore { + isOpen: Ref; + loading: Ref; + showCloseConfirm: Ref; + showMigrationConfirm: Ref; + searchQuery: Ref; + selectedTaskKeys: Ref>; + snapshot: Ref; + filteredItems: ComputedRef; + allSelected: ComputedRef; + someSelected: ComputedRef; + bind: () => void; + unbind: () => void; + open: () => Promise; + refresh: () => Promise; + toggleSelection: (taskKey: string) => void; + toggleSelectAll: () => void; + getSelectedItems: () => UpdateCenterItem[]; + closeNow: () => void; + startSelected: () => Promise; + requestClose: () => void; +} + +export const createUpdateCenterStore = (): UpdateCenterStore => { + const isOpen = ref(false); + const loading = ref(false); + const showCloseConfirm = ref(false); + const showMigrationConfirm = ref(false); + const searchQuery = ref(""); + const selectedTaskKeys = ref(new Set()); + const snapshot = ref(EMPTY_SNAPSHOT); + + const resetSessionState = (): void => { + showCloseConfirm.value = false; + showMigrationConfirm.value = false; + searchQuery.value = ""; + selectedTaskKeys.value = new Set(); + }; + + const open = async (): Promise => { + resetSessionState(); + isOpen.value = true; + loading.value = true; + try { + const nextSnapshot = await window.updateCenter.open(); + applySnapshot(nextSnapshot); + } finally { + loading.value = false; + } + }; + + const refresh = async (): Promise => { + loading.value = true; + try { + const nextSnapshot = await window.updateCenter.refresh(); + applySnapshot(nextSnapshot); + } finally { + loading.value = false; + } + }; + + const closeNow = (): void => { + resetSessionState(); + loading.value = false; + isOpen.value = false; + }; + + return { + isOpen, + loading, + showCloseConfirm, + showMigrationConfirm, + searchQuery, + selectedTaskKeys, + snapshot, + filteredItems, + allSelected, + someSelected, + bind, + unbind, + open, + refresh, + toggleSelection, + toggleSelectAll, + getSelectedItems, + closeNow, + startSelected, + requestClose, + }; +}; +``` + +- [ ] **Step 4: Re-run the store test file** + +Run: `npx vitest run src/__tests__/unit/update-center/store.test.ts` + +Expected: PASS with both new loading-lifecycle tests green. + +- [ ] **Step 5: Commit the store-loading change** + +```bash +git add src/modules/updateCenter.ts src/__tests__/unit/update-center/store.test.ts +git commit -m "fix(update-center): show loading before updates load" +``` + +### Task 2: Update Center Loading UI + +**Files:** + +- Modify: `src/components/UpdateCenterModal.vue` +- Modify: `src/components/update-center/UpdateCenterToolbar.vue` +- Test: `src/__tests__/unit/update-center/UpdateCenterModal.test.ts` + +- [ ] **Step 1: Write the failing loading-state UI tests** + +```ts +import { computed, ref } from "vue"; +import { fireEvent, render, screen } from "@testing-library/vue"; +import { describe, expect, it, vi } from "vitest"; + +import UpdateCenterModal from "@/components/UpdateCenterModal.vue"; +import type { + UpdateCenterItem, + UpdateCenterSnapshot, + UpdateCenterTaskState, +} from "@/global/typedefinition"; +import type { UpdateCenterStore } from "@/modules/updateCenter"; + +const createItem = ( + overrides: Partial = {}, +): UpdateCenterItem => ({ + taskKey: "aptss:spark-weather", + packageName: "spark-weather", + displayName: "Spark Weather", + currentVersion: "1.0.0", + newVersion: "2.0.0", + source: "aptss", + ...overrides, +}); + +const createTask = ( + overrides: Partial = {}, +): UpdateCenterTaskState => ({ + taskKey: "aptss:spark-weather", + packageName: "spark-weather", + source: "aptss", + status: "downloading", + progress: 42, + logs: [], + errorMessage: "", + ...overrides, +}); + +const createStore = ( + overrides: Partial = {}, +): UpdateCenterStore => { + const snapshot = ref({ + items: [ + createItem({ + taskKey: "aptss:spark-weather", + source: "aptss", + }), + createItem({ + taskKey: "apm:spark-clock", + packageName: "spark-clock", + displayName: "Spark Clock", + source: "apm", + isMigration: true, + migrationTarget: "apm", + }), + ], + tasks: [createTask()], + warnings: ["更新过程中请勿关闭商店"], + hasRunningTasks: true, + ...overrides, + }); + + const selectedTaskKeys = ref(new Set(["aptss:spark-weather"])); + + return { + isOpen: ref(true), + loading: ref(false), + showCloseConfirm: ref(true), + showMigrationConfirm: ref(false), + searchQuery: ref(""), + selectedTaskKeys, + snapshot, + filteredItems: computed(() => snapshot.value.items), + allSelected: computed(() => true), + someSelected: computed(() => false), + bind: vi.fn(), + unbind: vi.fn(), + open: vi.fn(), + refresh: vi.fn(), + toggleSelection: vi.fn(), + toggleSelectAll: vi.fn(), + getSelectedItems: vi.fn(() => + snapshot.value.items.filter( + (item) => + selectedTaskKeys.value.has(item.taskKey) && item.ignored !== true, + ), + ), + closeNow: vi.fn(), + startSelected: vi.fn(), + requestClose: vi.fn(), + }; +}; + +describe("UpdateCenterModal", () => { + it("shows an initial loading panel and disables refresh while loading", () => { + const store = createStore({ + items: [], + tasks: [], + warnings: [], + hasRunningTasks: false, + }); + store.loading.value = true; + + render(UpdateCenterModal, { + props: { + show: true, + store, + }, + }); + + expect(screen.getByText("正在检查更新…")).toBeTruthy(); + expect(screen.getByRole("button", { name: /刷新/ })).toBeDisabled(); + }); + + it("keeps existing items visible while showing the refresh hint", () => { + const store = createStore({ hasRunningTasks: false }); + store.loading.value = true; + + render(UpdateCenterModal, { + props: { + show: true, + store, + }, + }); + + expect(screen.getByText("Spark Weather")).toBeTruthy(); + expect(screen.getByText("正在刷新更新列表…")).toBeTruthy(); + }); +}); +``` + +- [ ] **Step 2: Run the modal test file to verify it fails** + +Run: `npx vitest run src/__tests__/unit/update-center/UpdateCenterModal.test.ts` + +Expected: FAIL because the toolbar does not accept a `loading` prop yet, and the modal renders neither `正在检查更新…` nor `正在刷新更新列表…`. + +- [ ] **Step 3: Implement the loading UI in the modal and toolbar** + +```vue + + + + +``` + +```vue + + +``` + +- [ ] **Step 4: Re-run the modal test file** + +Run: `npx vitest run src/__tests__/unit/update-center/UpdateCenterModal.test.ts` + +Expected: PASS with the new loading-panel and refresh-disabled assertions green. + +- [ ] **Step 5: Commit the loading UI change** + +```bash +git add src/components/UpdateCenterModal.vue src/components/update-center/UpdateCenterToolbar.vue src/__tests__/unit/update-center/UpdateCenterModal.test.ts +git commit -m "fix(update-center): show loading state in the modal" +``` + +### Task 3: Spark Desktop Discovery In The Main Process + +**Files:** + +- Create: `electron/main/backend/sparkInstalledApps.ts` +- Modify: `electron/main/backend/install-manager.ts` +- Test: `src/__tests__/unit/sparkInstalledApps.test.ts` + +- [ ] **Step 1: Write the failing Spark desktop-discovery tests** + +```ts +import { describe, expect, it, vi } from "vitest"; + +import { listSparkInstalledApps } from "../../../electron/main/backend/sparkInstalledApps"; + +describe("listSparkInstalledApps", () => { + it("builds Spark installed apps from visible desktop entries", async () => { + const applicationsDir = "/usr/share/applications"; + const fsLike = { + readdirSync: vi.fn(() => [ + "reader.desktop", + "hidden.desktop", + "reader-alt.desktop", + ]), + realpathSync: vi.fn((filePath: string) => filePath), + readFileSync: vi.fn((filePath: string) => { + const files: Record = { + [`${applicationsDir}/reader.desktop`]: + "[Desktop Entry]\nName=Spark Reader\nIcon=/usr/share/pixmaps/reader.png\n", + [`${applicationsDir}/hidden.desktop`]: + "[Desktop Entry]\nName=Hidden Reader\nNoDisplay=true\n", + [`${applicationsDir}/reader-alt.desktop`]: + "[Desktop Entry]\nName=Spark Reader Alt\nIcon=reader\n", + }; + + return files[filePath] ?? ""; + }), + }; + + const runCommand = vi.fn(async (command: string, args: string[]) => { + const key = `${command} ${args.join(" ")}`; + + if ( + key === "dpkg-query -W -f=${Package}\\t${Version}\\t${Architecture}\\n" + ) { + return { + code: 0, + stdout: "spark-reader\t1.2.3\tamd64\n", + stderr: "", + }; + } + + if (key === `dpkg -S ${applicationsDir}/reader.desktop`) { + return { + code: 0, + stdout: "spark-reader: /usr/share/applications/reader.desktop\n", + stderr: "", + }; + } + + if (key === `dpkg -S ${applicationsDir}/reader-alt.desktop`) { + return { + code: 0, + stdout: "spark-reader: /usr/share/applications/reader-alt.desktop\n", + stderr: "", + }; + } + + return { code: 1, stdout: "", stderr: "not owned" }; + }); + + const result = await listSparkInstalledApps({ + applicationsDir, + fsLike, + runCommand, + }); + + expect(result).toEqual({ + success: true, + apps: [ + { + pkgname: "spark-reader", + name: "Spark Reader", + version: "1.2.3", + arch: "amd64", + flags: "[installed]", + origin: "spark", + icon: "/usr/share/pixmaps/reader.png", + isDependency: false, + }, + ], + }); + }); + + it("returns a failure object when package metadata lookup fails", async () => { + const result = await listSparkInstalledApps({ + applicationsDir: "/usr/share/applications", + fsLike: { + readdirSync: vi.fn(() => []), + realpathSync: vi.fn((filePath: string) => filePath), + readFileSync: vi.fn(() => ""), + }, + runCommand: vi.fn(async () => ({ + code: 1, + stdout: "", + stderr: "dpkg-query failed", + })), + }); + + expect(result).toEqual({ + success: false, + message: "Failed to list installed packages", + apps: [], + }); + }); +}); +``` + +- [ ] **Step 2: Run the Spark desktop-discovery tests to verify they fail** + +Run: `npx vitest run src/__tests__/unit/sparkInstalledApps.test.ts` + +Expected: FAIL because `electron/main/backend/sparkInstalledApps.ts` does not exist yet. + +- [ ] **Step 3: Implement the Spark helper and wire it into `install-manager.ts`** + +```ts +// electron/main/backend/sparkInstalledApps.ts +import fs from "node:fs"; +import path from "node:path"; + +export interface SparkInstalledApp { + pkgname: string; + name: string; + version: string; + arch: string; + flags: string; + origin: "spark"; + icon?: string; + isDependency: boolean; +} + +export interface CommandResult { + code: number; + stdout: string; + stderr: string; +} + +export type CommandRunner = ( + command: string, + args: string[], +) => Promise; + +interface FsLike { + readdirSync: typeof fs.readdirSync; + realpathSync: typeof fs.realpathSync; + readFileSync: typeof fs.readFileSync; +} + +const PACKAGE_QUERY_ARGS = [ + "-W", + "-f=${Package}\t${Version}\t${Architecture}\\n", +]; + +const parseDesktopEntry = (content: string) => ({ + name: content.match(/^Name=(.+)$/m)?.[1]?.trim() ?? "", + icon: content.match(/^Icon=(.+)$/m)?.[1]?.trim() ?? "", + noDisplay: /^NoDisplay=true$/m.test(content), +}); + +const parsePackageMetadata = ( + stdout: string, +): Map => { + const metadata = new Map(); + + stdout + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0) + .forEach((line) => { + const [pkgname, version, arch] = line.split("\t"); + if (!pkgname || !version || !arch) { + return; + } + + metadata.set(pkgname, { version, arch }); + }); + + return metadata; +}; + +const parseDpkgOwner = (stdout: string): string | null => { + const firstLine = stdout + .split("\n") + .map((line) => line.trim()) + .find((line) => line.length > 0); + + if (!firstLine) { + return null; + } + + const ownerField = firstLine.split(":")[0]?.split(",")[0]?.trim(); + if (!ownerField) { + return null; + } + + return ownerField.replace(/:(amd64|arm64|i386|all)$/, ""); +}; + +export const listSparkInstalledApps = async ({ + applicationsDir = "/usr/share/applications", + fsLike = fs, + runCommand, +}: { + applicationsDir?: string; + fsLike?: FsLike; + runCommand: CommandRunner; +}): Promise< + | { success: true; apps: SparkInstalledApp[] } + | { success: false; message: string; apps: [] } +> => { + const metadataResult = await runCommand("dpkg-query", PACKAGE_QUERY_ARGS); + if (metadataResult.code !== 0) { + return { + success: false, + message: "Failed to list installed packages", + apps: [], + }; + } + + const packageMetadata = parsePackageMetadata(metadataResult.stdout); + const appsByPackage = new Map(); + const desktopFiles = fsLike + .readdirSync(applicationsDir) + .filter((entry) => entry.endsWith(".desktop")) + .sort(); + + for (const desktopFile of desktopFiles) { + const desktopPath = path.join(applicationsDir, desktopFile); + + try { + const resolvedDesktopPath = fsLike.realpathSync(desktopPath).toString(); + const content = fsLike.readFileSync(resolvedDesktopPath, "utf-8"); + const entry = parseDesktopEntry(content.toString()); + if (entry.noDisplay) { + continue; + } + + const ownerResult = await runCommand("dpkg", ["-S", resolvedDesktopPath]); + if (ownerResult.code !== 0) { + continue; + } + + const pkgname = parseDpkgOwner(ownerResult.stdout); + if (!pkgname || appsByPackage.has(pkgname)) { + continue; + } + + const metadata = packageMetadata.get(pkgname); + if (!metadata) { + continue; + } + + appsByPackage.set(pkgname, { + pkgname, + name: entry.name || pkgname, + version: metadata.version, + arch: metadata.arch, + flags: "[installed]", + origin: "spark", + icon: entry.icon || undefined, + isDependency: false, + }); + } catch { + continue; + } + } + + return { + success: true, + apps: [...appsByPackage.values()].sort((left, right) => + left.pkgname.localeCompare(right.pkgname), + ), + }; +}; +``` + +```ts +// electron/main/backend/install-manager.ts +import { listSparkInstalledApps } from "./sparkInstalledApps"; + +ipcMain.handle( + "list-installed", + async (_event, origin: "apm" | "spark" = "apm") => { + const apmBasePath = "/var/lib/apm/apm/files/ace-env/var/lib/apm"; + + try { + const installedApps: Array<{ + pkgname: string; + name: string; + version: string; + arch: string; + flags: string; + origin: "spark" | "apm"; + icon?: string; + isDependency: boolean; + }> = []; + + if (origin === "spark") { + return await listSparkInstalledApps({ runCommand: runCommandCapture }); + } + + const { code, stdout } = await runCommandCapture("apm", [ + "list", + "--installed", + ]); + + if (code !== 0) { + logger.warn(`Failed to list installed packages: ${stdout}`); + return { + success: false, + message: "Failed to list installed packages", + apps: [], + }; + } + + const cleanStdout = stdout.replace(/\x1b\[[0-9;]*m/g, ""); + const lines = cleanStdout.split("\n"); + + for (const line of lines) { + const trimmed = line.trim(); + if ( + !trimmed || + trimmed.startsWith("Listing") || + trimmed.startsWith("[INFO]") || + trimmed.startsWith("警告") + ) { + continue; + } + + const match = trimmed.match( + /^(\S+)\/\S+(?:,\S+)?\s+(\S+)\s+(\S+)\s+\[(.+)\]$/, + ); + if (!match) { + logger.debug(`Failed to parse line: ${trimmed}`); + continue; + } + + const [, pkgname, version, arch, flags] = match; + let appName = pkgname; + let icon = ""; + const pkgPath = path.join(apmBasePath, pkgname); + const entriesPath = path.join(pkgPath, "entries", "applications"); + const hasEntries = fs.existsSync(entriesPath); + + if (hasEntries) { + try { + const desktopFiles = fs.readdirSync(entriesPath); + for (const file of desktopFiles) { + if (!file.endsWith(".desktop")) { + continue; + } + + const desktopPath = path.join(entriesPath, file); + const content = fs.readFileSync(desktopPath, "utf-8"); + const nameMatch = content.match(/^Name=(.+)$/m); + const iconMatch = content.match(/^Icon=(.+)$/m); + if (nameMatch) appName = nameMatch[1].trim(); + if (iconMatch) icon = iconMatch[1].trim(); + break; + } + } catch (error) { + logger.warn(`Failed to read desktop file for ${pkgname}: ${error}`); + } + } + + installedApps.push({ + pkgname, + name: appName, + version, + arch, + flags, + origin: "apm", + icon: icon || undefined, + isDependency: !hasEntries, + }); + } + + installedApps.sort((left, right) => + left.pkgname.localeCompare(right.pkgname), + ); + return { success: true, apps: installedApps }; + } catch (error) { + logger.error( + `list-installed failed: ${error instanceof Error ? error.message : String(error)}`, + ); + return { + success: false, + message: error instanceof Error ? error.message : String(error), + apps: [], + }; + } + }, +); +``` + +- [ ] **Step 4: Re-run the Spark helper tests** + +Run: `npx vitest run src/__tests__/unit/sparkInstalledApps.test.ts` + +Expected: PASS with the desktop-discovery and metadata-failure cases green. + +- [ ] **Step 5: Commit the Spark discovery change** + +```bash +git add electron/main/backend/sparkInstalledApps.ts electron/main/backend/install-manager.ts src/__tests__/unit/sparkInstalledApps.test.ts +git commit -m "fix(installed-apps): discover spark apps from desktop files" +``` + +### Task 4: Installed App Normalization And Open Action + +**Files:** + +- Create: `src/modules/installedApps.ts` +- Modify: `src/App.vue` +- Modify: `src/components/InstalledAppsModal.vue` +- Create: `src/__tests__/unit/installedApps.test.ts` +- Modify: `src/__tests__/unit/InstalledAppsModal.test.ts` + +- [ ] **Step 1: Write the failing installed-app normalization and open-action tests** + +```ts +// src/__tests__/unit/installedApps.test.ts +import { describe, expect, it } from "vitest"; + +import { buildInstalledApps } from "@/modules/installedApps"; +import type { App, InstalledAppInfo } from "@/global/typedefinition"; + +const createInstalled = ( + overrides: Partial = {}, +): InstalledAppInfo => ({ + pkgname: "spark-reader", + name: "Spark Reader", + version: "1.0.0", + arch: "amd64", + flags: "[installed]", + origin: "spark", + icon: "/usr/share/pixmaps/reader.png", + isDependency: false, + ...overrides, +}); + +const createCatalogApp = (overrides: Partial = {}): App => ({ + name: "Spark Reader", + pkgname: "spark-reader", + version: "1.0.0", + category: "office", + tags: "", + more: "", + filename: "", + torrent_address: "", + author: "", + contributor: "", + website: "", + update: "", + size: "", + img_urls: [], + icons: "remote-icon.png", + origin: "spark", + currentStatus: "not-installed", + ...overrides, +}); + +describe("buildInstalledApps", () => { + it("keeps Spark desktop apps even when they are missing from the remote catalog", () => { + const result = buildInstalledApps({ + installed: [createInstalled()], + catalogApps: [], + origin: "spark", + }); + + expect(result).toMatchObject([ + { + name: "Spark Reader", + pkgname: "spark-reader", + category: "unknown", + origin: "spark", + currentStatus: "installed", + icons: "/usr/share/pixmaps/reader.png", + }, + ]); + }); + + it("reuses catalog metadata when the package exists in the selected origin", () => { + const result = buildInstalledApps({ + installed: [createInstalled({ origin: "apm", pkgname: "spark-clock" })], + catalogApps: [ + createCatalogApp({ + name: "Spark Clock", + pkgname: "spark-clock", + category: "utilities", + origin: "apm", + }), + ], + origin: "apm", + }); + + expect(result[0]).toMatchObject({ + name: "Spark Clock", + pkgname: "spark-clock", + category: "utilities", + origin: "apm", + currentStatus: "installed", + }); + }); +}); +``` + +```ts +// src/__tests__/unit/InstalledAppsModal.test.ts +import { fireEvent, render, screen } from "@testing-library/vue"; +import { describe, expect, it } from "vitest"; + +import InstalledAppsModal from "@/components/InstalledAppsModal.vue"; +import type { App } from "@/global/typedefinition"; + +const createInstalledApp = (): App => ({ + name: "Spark Reader", + pkgname: "spark-reader", + version: "1.0.0", + category: "unknown", + tags: "", + more: "", + filename: "", + torrent_address: "", + author: "", + contributor: "", + website: "", + update: "", + size: "", + img_urls: [], + icons: "/usr/share/pixmaps/reader.png", + origin: "spark", + currentStatus: "installed", + arch: "amd64", + flags: "[installed]", + isDependency: false, +}); + +describe("InstalledAppsModal", () => { + it("keeps scroll chaining inside the modal list", () => { + const { container } = render(InstalledAppsModal, { + props: { + show: true, + apps: [], + loading: false, + error: "", + activeOrigin: "spark", + storeFilter: "both", + apmAvailable: true, + }, + }); + + expect(screen.getByText("已安装应用")).toBeTruthy(); + const scrollContainer = container.querySelector(".overflow-y-auto"); + + expect(scrollContainer?.className).toContain("overscroll-contain"); + }); + + it("emits open-app with pkgname and origin", async () => { + const { emitted } = render(InstalledAppsModal, { + props: { + show: true, + apps: [createInstalledApp()], + loading: false, + error: "", + activeOrigin: "spark", + storeFilter: "both", + apmAvailable: true, + }, + }); + + await fireEvent.click(screen.getByRole("button", { name: "打开" })); + + expect(emitted()["open-app"]).toEqual([["spark-reader", "spark"]]); + }); +}); +``` + +- [ ] **Step 2: Run the installed-app tests to verify they fail** + +Run: `npx vitest run src/__tests__/unit/installedApps.test.ts src/__tests__/unit/InstalledAppsModal.test.ts` + +Expected: FAIL because `src/modules/installedApps.ts` does not exist yet, and `InstalledAppsModal.vue` does not emit `open-app`. + +- [ ] **Step 3: Implement normalization, remove the Spark catalog filter, and add the open button** + +```ts +// src/modules/installedApps.ts +import type { App, InstalledAppInfo } from "@/global/typedefinition"; + +export const buildInstalledApps = ({ + installed, + catalogApps, + origin, +}: { + installed: InstalledAppInfo[]; + catalogApps: App[]; + origin: "spark" | "apm"; +}): App[] => { + return installed.map((app) => { + const catalogApp = catalogApps.find( + (item) => item.pkgname === app.pkgname && item.origin === origin, + ); + + if (catalogApp) { + return { + ...catalogApp, + flags: app.flags, + arch: app.arch, + currentStatus: "installed" as const, + isDependency: app.isDependency, + }; + } + + return { + name: app.name || app.pkgname, + pkgname: app.pkgname, + version: app.version, + category: "unknown", + tags: "", + more: "", + filename: "", + torrent_address: "", + author: "", + contributor: "", + website: "", + update: "", + size: "", + img_urls: [], + icons: app.icon || "", + origin: app.origin, + currentStatus: "installed" as const, + arch: app.arch, + flags: app.flags, + isDependency: app.isDependency, + }; + }); +}; +``` + +```vue + + + + +``` + +```ts +// src/App.vue +import { buildInstalledApps } from "./modules/installedApps"; + + + +const refreshInstalledApps = async () => { + installedLoading.value = true; + installedError.value = ""; + try { + const origin = activeInstalledOrigin.value; + const result = await window.ipcRenderer.invoke("list-installed", origin); + if (!result?.success) { + installedApps.value = []; + installedError.value = result?.message || "读取已安装应用失败"; + return; + } + + installedApps.value = buildInstalledApps({ + installed: result.apps, + catalogApps: apps.value, + origin, + }); + } catch (error: unknown) { + installedApps.value = []; + installedError.value = (error as Error)?.message || "读取已安装应用失败"; + } finally { + installedLoading.value = false; + } +}; +``` + +- [ ] **Step 4: Re-run the installed-app tests** + +Run: `npx vitest run src/__tests__/unit/installedApps.test.ts src/__tests__/unit/InstalledAppsModal.test.ts` + +Expected: PASS with Spark fallback-card coverage and the `open-app` emission check green. + +- [ ] **Step 5: Commit the installed-app UI change** + +```bash +git add src/modules/installedApps.ts src/App.vue src/components/InstalledAppsModal.vue src/__tests__/unit/installedApps.test.ts src/__tests__/unit/InstalledAppsModal.test.ts +git commit -m "feat(installed-apps): open local apps from software manager" +``` + +## Final Verification + +Run these commands after the four tasks above are complete: + +1. `npx vitest run src/__tests__/unit/update-center/store.test.ts src/__tests__/unit/update-center/UpdateCenterModal.test.ts src/__tests__/unit/sparkInstalledApps.test.ts src/__tests__/unit/installedApps.test.ts src/__tests__/unit/InstalledAppsModal.test.ts` +2. `npm run lint` +3. `npm run build:vite` + +Then do one manual smoke pass in the running app: + +1. Open the update center and confirm the modal appears immediately with `正在检查更新…` before the first snapshot arrives. +2. Trigger update-center refresh and confirm the refresh button is disabled while `正在刷新更新列表…` is visible. +3. Open the installed-apps modal with `Spark 软件` selected and confirm desktop apps from `/usr/share/applications` appear even when they are absent from the remote store catalog. +4. Click `打开` on one `spark` entry and one `apm` entry and confirm each one goes through the existing `launch-app` IPC path. diff --git a/docs/superpowers/specs/2026-04-14-gitee-issue-bot-design.md b/docs/superpowers/specs/2026-04-14-gitee-issue-bot-design.md new file mode 100644 index 00000000..160e77db --- /dev/null +++ b/docs/superpowers/specs/2026-04-14-gitee-issue-bot-design.md @@ -0,0 +1,365 @@ +# Gitee Issue 巡检与 Opencode 启动设计 + +## 背景 + +当前仓库没有一个稳定的自动化流程,能够按固定周期检查 `https://gitee.com/spark-store-project/spark-store/issues`,筛出当前“最新且最重要”的 issue,并在人工确认后自动拉起新的 opencode 进程开始分析与修复。 + +你的目标不是让机器人直接静默修复,而是建立一个半自动流程: + +1. 每 6 小时自动检查一次 Gitee issues。 +2. 自动筛出 1 个当前最值得处理的候选 issue。 +3. 默认只汇报,不自动开始修改。 +4. 你确认后,自动打开新的 opencode 窗口开始处理。 +5. 后续实际开始修改代码时,仍然以 `~/Desktop/spark-store` 作为基仓库,但必须通过 git worktree 从 `Erotica` 分支开出新分支,在隔离工作区中执行修改。 + +## 目标 + +1. 使用 `systemd --user` 定时器实现每 6 小时自动巡检。 +2. 每轮最多选择 1 个 issue 作为候选项。 +3. 候选项必须有可解释的评分结果,便于人工确认。 +4. 默认不自动修复,只记录候选状态并等待批准。 +5. 批准后自动启动新的 opencode 窗口,并把 issue 上下文传入。 +6. 为后续修复流程固定 worktree 约束:从 `Erotica` 分支开新分支,并保持 `~/Desktop/spark-store` 作为主仓库入口。 +7. 整个方案尽量独立于 Electron 主进程现有运行逻辑,避免把定时调度耦合进应用本体。 + +## 非目标 + +1. 不在本次实现中加入“自动修复后自动提交 PR”之类更长的链路。 +2. 不在本次实现中加入应用内 GUI 审批界面。 +3. 不在本次实现中实现复杂的 AI 优先级判断;优先使用透明、可维护的规则评分。 +4. 不在本次实现中把 issue 处理结果自动回写到 Gitee。 +5. 不在本次实现中实际创建 worktree 并改代码;这里只固定后续执行约束和启动提示。 + +## 方案选择 + +本次考虑三种方案: + +1. 用户级 `systemd` 定时器 + 独立 Node/TypeScript 巡检脚本 + 本地批准入口。 +2. 用户级 `systemd` 定时器 + Gitee 评论驱动批准。 +3. 完全接入 Electron,使用应用内常驻进程和弹窗审批。 + +最终选择方案 1。 + +原因: + +1. 它最小化对现有桌面应用逻辑的侵入,不要求应用常驻。 +2. `systemd --user` 已符合你的运行环境偏好,也与仓库里已有的用户级后台命令模式一致。 +3. 本地批准入口最容易落地,不依赖额外的 Gitee 写权限和 webhook/comment 解析。 +4. 后续如果要升级成评论审批或 GUI 审批,也可以在该方案基础上扩展。 + +## 设计概览 + +新增一个独立的 issue 巡检子系统,由五部分组成: + +1. `check-issues` 巡检入口:抓取 issue、打分、落本地状态。 +2. `state` 状态层:保存当前候选项、历史批准记录和最近一次运行结果。 +3. `approve-issue` 批准入口:由你手动触发,读取当前候选项并进入启动流程。 +4. `opencode launcher`:负责拼接 issue prompt 并打开新的 opencode 窗口。 +5. `systemd --user` 单元:负责每 6 小时调度巡检入口。 + +整体数据流分为两个阶段: + +1. 自动巡检阶段:仅发现和记录,不启动修复。 +2. 人工批准阶段:由你确认后,才启动新的 opencode 会话。 + +## 文件与模块边界 + +### 脚本入口 + +- 新增:`scripts/issue-bot/check-issues.ts` + - 负责单次巡检执行。 + - 拉取 Gitee issues。 + - 调用评分逻辑选出候选项。 + - 写入状态文件和运行日志。 + +- 新增:`scripts/issue-bot/approve-issue.ts` + - 负责读取当前候选项。 + - 检查是否已有未完成批准任务。 + - 标记当前 issue 为已批准。 + - 调用 opencode 启动器。 + +### 共享库 + +- 新增:`scripts/issue-bot/lib/gitee.ts` + - 封装 issue 列表获取与基础字段归一化。 + - 输出统一结构,例如:`id`、`title`、`url`、`state`、`createdAt`、`updatedAt`、`labels`、`bodyPreview`。 + +- 新增:`scripts/issue-bot/lib/ranking.ts` + - 根据“最新且最重要”的规则计算分数。 + - 输出总分和评分明细,便于人工解释。 + +- 新增:`scripts/issue-bot/lib/state.ts` + - 负责本地状态读写。 + - 处理状态文件缺失、损坏、备份与迁移。 + +- 新增:`scripts/issue-bot/lib/opencode.ts` + - 负责生成发给 opencode 的 prompt。 + - 负责调用本地 opencode 启动命令。 + - 固定写入 worktree 执行约束。 + +### 配置与调度 + +- 新增:`extras/systemd/spark-store-issue-bot.service` + - 用户级一次性服务,执行单轮巡检。 + +- 新增:`extras/systemd/spark-store-issue-bot.timer` + - 每 6 小时触发一次 service。 + +- 修改:`package.json` + - 增加 `issue-bot:check`。 + - 增加 `issue-bot:approve`。 + +## 本地状态模型 + +建议把状态文件写到用户目录下的缓存位置,而不是仓库内,避免污染工作区。 + +建议路径:`~/.cache/spark-store/issue-bot/state.json` + +状态至少包含: + +```ts +interface IssueBotState { + currentCandidate: RankedIssue | null; + approvedIssue: ApprovedIssue | null; + seenIssueIds: number[]; + lastRunAt: string | null; + lastRunStatus: "idle" | "success" | "network-error" | "parse-error"; + lastRunMessage: string | null; +} +``` + +其中: + +1. `currentCandidate` 表示当前等待你批准的候选 issue。 +2. `approvedIssue` 表示已经批准并已启动 opencode 的 issue,用于避免重复批准。 +3. `seenIssueIds` 用于辅助去重,避免每轮都反复选择同一批低质量 issue。 +4. `lastRun*` 用于排查巡检失败原因。 + +## Gitee 拉取策略 + +优先顺序如下: + +1. 若存在可稳定使用的 Gitee API,则优先使用 API。 +2. 若 API 受限或字段不足,则退回页面抓取。 + +无论采用哪种来源,`gitee.ts` 对外只暴露统一的 issue 数据结构,不把 HTML 解析细节传播到评分层和状态层。 + +抓取范围只包含: + +1. 打开的 issue。 +2. 当前仓库 `spark-store-project/spark-store`。 +3. 必需字段能提取成功的 issue。 + +如果本轮无法获取完整 issue 列表: + +1. 记录错误。 +2. 不覆盖现有 `currentCandidate`。 +3. 结束本轮执行,等待下次 timer。 + +## 排序与筛选规则 + +评分逻辑使用可解释的静态规则,不做黑盒决策。 + +### 基础过滤 + +先过滤掉以下 issue: + +1. 已关闭 issue。 +2. 已批准且尚未被显式清理的 issue。 +3. 缺少标题或链接等关键字段的异常项。 + +### 加分项 + +以下情况加分: + +1. 标题或内容包含高影响关键词:`崩溃`、`打不开`、`无法安装`、`升级失败`、`卡死`、`白屏`、`闪退`。 +2. 与主流程强相关:安装、卸载、更新、启动、搜索、列表加载。 +3. 最近创建或最近更新。 +4. 含有复现步骤、日志、截图、错误信息。 +5. 带有明显 bug 类型标签。 + +### 减分项 + +以下情况减分: + +1. 纯咨询类或需求讨论类 issue。 +2. 信息过少,例如只有一句“不能用”。 +3. 明显重复、无明确可执行内容。 + +### 产出格式 + +`ranking.ts` 输出不只包含总分,还包含明细,例如: + +```ts +interface RankingBreakdown { + total: number; + reasons: string[]; +} +``` + +状态文件和批准前摘要都需要携带这些明细,确保“为什么选它”是透明的。 + +## 巡检流程 + +`check-issues.ts` 的单轮行为固定为: + +1. 读取本地状态。 +2. 拉取 Gitee issue 列表。 +3. 标准化数据。 +4. 按过滤规则剔除不可处理项。 +5. 计算每个 issue 的分数。 +6. 选出得分最高的 1 个 issue。 +7. 将其写入 `currentCandidate`。 +8. 更新 `lastRunAt`、`lastRunStatus` 和摘要信息。 + +如果没有候选项: + +1. 将 `currentCandidate` 设为 `null`。 +2. 写入“本轮无可处理 issue”的状态。 +3. 不触发任何后续动作。 + +## 批准流程 + +`approve-issue.ts` 的行为固定为: + +1. 读取本地状态。 +2. 检查 `currentCandidate` 是否存在。 +3. 检查是否已有 `approvedIssue` 正在等待处理结果。 +4. 若可批准,则将候选项复制到 `approvedIssue`。 +5. 调用 opencode 启动器。 +6. 启动成功后保留 `approvedIssue`,并可选择清空 `currentCandidate`。 + +本次实现采用保守策略: + +1. 启动成功后,清空 `currentCandidate`。 +2. 保留 `approvedIssue`,避免同一 issue 被重复批准。 + +后续如果需要“已完成”或“已放弃”清理动作,可以再补一个独立命令。 + +## Opencode 启动器设计 + +`opencode.ts` 负责两件事: + +1. 生成 prompt。 +2. 调用本地 opencode 启动命令。 + +### Prompt 内容 + +prompt 需要至少包含: + +1. issue 标题。 +2. issue URL。 +3. issue 摘要。 +4. 评分原因。 +5. 任务目标:分析根因并开始修复。 +6. 明确约束:开始修改时,基仓库使用 `~/Desktop/spark-store`,但实际编码必须通过 git worktree,从 `Erotica` 分支开出新分支后进行。 + +### Worktree 约束 + +批准后启动的新 opencode 会话中,必须显式看到以下执行约束: + +1. 基仓库固定为 `~/Desktop/spark-store`。 +2. 真正开始修改代码前,使用 git worktree 创建隔离工作区。 +3. 新 worktree 必须从 `Erotica` 分支开出新的工作分支。 +4. 修复工作在该 worktree 中进行,而不是直接在主仓库工作目录中进行。 + +这里的职责是“把约束传给后续修复会话”,而不是在当前巡检脚本里代替用户创建 worktree。 + +### 启动命令配置 + +不要把 opencode 启动命令硬编码成不可修改的固定路径。 + +推荐顺序: + +1. 读取环境变量,例如 `SPARK_STORE_OPENCODE_CMD`。 +2. 若未配置,则退回默认命令模板。 +3. 若命令不存在,返回明确错误并保留 `currentCandidate`/`approvedIssue` 状态供重试。 + +## systemd 调度设计 + +使用用户级 systemd 单元: + +### `spark-store-issue-bot.service` + +职责: + +1. 调用一次 `issue-bot:check`。 +2. 以 oneshot 形式运行。 +3. 将日志交给 systemd journal。 + +### `spark-store-issue-bot.timer` + +职责: + +1. 每 6 小时触发一次 service。 +2. 启用持久化调度,使设备休眠后恢复时仍可补跑。 + +不把批准动作放进 timer,因为批准必须由人工触发。 + +## 错误处理 + +### 网络或解析失败 + +1. 记录 `lastRunStatus` 为失败类型。 +2. 保留旧候选项,不清空有效状态。 +3. 输出清晰日志,供 `journalctl --user` 排查。 + +### 状态文件损坏 + +1. 读取失败时先备份原文件。 +2. 生成新的空状态。 +3. 在日志中注明发生了状态恢复。 + +### 启动 opencode 失败 + +1. 不丢失候选 issue 信息。 +2. 记录失败信息到状态文件。 +3. 允许你修正环境后再次执行批准或重试命令。 + +## 测试与验证 + +### 脚本层验证 + +需要至少覆盖以下行为: + +1. 有多个 issue 时,能按规则稳定选出得分最高的候选项。 +2. 无 issue 或全被过滤时,`currentCandidate` 正确为空。 +3. 状态文件缺失时能初始化默认状态。 +4. 状态文件损坏时能备份并恢复。 +5. 批准入口能读取候选项并更新状态。 +6. opencode 启动命令缺失时,能返回明确错误而不丢状态。 + +### 手动验证 + +需要人工验证: + +1. `npm run issue-bot:check` 能成功写出候选项。 +2. 连续运行两次巡检,状态更新符合预期,没有异常重复。 +3. `npm run issue-bot:approve` 能基于当前候选项启动新的 opencode 窗口。 +4. 启动后的 prompt 中包含 worktree 约束和 `Erotica` 分支要求。 +5. `systemctl --user start spark-store-issue-bot.service` 可执行。 +6. `systemctl --user enable --now spark-store-issue-bot.timer` 后能看到 timer 生效。 + +### 仓库质量验证 + +完成实现后,至少执行: + +1. `npm run lint` +2. `npm run build:vite` + +如果脚本新增了独立测试,还要运行相应测试命令。 + +## 风险与约束 + +1. Gitee 页面结构可能变化,因此 `gitee.ts` 需要把抓取逻辑局部化,避免影响其他模块。 +2. “最重要”本质上是启发式规则,不保证绝对正确,因此必须保留人工批准环节。 +3. 如果 opencode 的命令行接口或窗口启动方式在本机环境中变化,需要通过配置而不是源码硬编码来适配。 +4. worktree 约束属于后续修复会话的执行要求,当前设计只负责传达和固化,不负责提前改变用户当前工作区。 + +## 决策总结 + +1. 用 `systemd --user` 定时器每 6 小时巡检一次 Gitee issues。 +2. 每轮只选 1 个“最新且最重要”的候选 issue。 +3. 默认只汇报,不自动修复。 +4. 你批准后,再自动拉起新的 opencode 窗口。 +5. 启动 prompt 中必须固定写明:后续开始修改时,以 `~/Desktop/spark-store` 为基仓库,并通过 git worktree 从 `Erotica` 分支开新分支后执行修复。 diff --git a/docs/superpowers/specs/2026-04-15-installed-apps-and-update-center-loading-design.md b/docs/superpowers/specs/2026-04-15-installed-apps-and-update-center-loading-design.md new file mode 100644 index 00000000..cc53f235 --- /dev/null +++ b/docs/superpowers/specs/2026-04-15-installed-apps-and-update-center-loading-design.md @@ -0,0 +1,276 @@ +# 已安装应用管理与更新中心加载态设计 + +## 背景 + +当前仓库里有三个直接影响体验的问题: + +1. 更新中心调用 `updateCenterStore.open()` 时,会先等待主进程返回快照,再决定是否展示模态框。用户在数据返回前看不到任何反馈,主观感受就是“打开很慢”。 +2. 软件管理里 `spark` 来源当前直接读取 `dpkg-query -W` 的全量安装包,结果混入了大量没有桌面入口的系统包,与“软件管理”应管理可见应用的预期不符。 +3. 软件管理弹窗目前只有“卸载”操作,没有“打开”操作;同时 `src/App.vue` 对 `spark` 来源还有一条“若不在远端商店目录中则直接跳过”的过滤,会导致本机已有桌面应用即使后端已发现,也不会展示出来。 + +本次设计的目标是用最小改动修复这三个问题,不重做更新中心和软件管理的整体结构。 + +## 目标 + +1. 更新中心在用户触发打开时立即显示模态框,并展示明确的加载反馈。 +2. `spark` 软件管理改为基于 `/usr/share/applications` 的桌面应用扫描,而不是全量系统包扫描。 +3. `spark` 桌面应用通过 `realpath` 后的 desktop 文件路径,结合 `dpkg -S ` 反查所属包名。 +4. `apm` 软件管理保持现有 `apm list --installed` 语义,继续展示依赖项。 +5. 软件管理弹窗中的已安装项支持直接打开软件,复用当前已有的应用启动 IPC,而不是新增一套启动协议。 + +## 非目标 + +1. 不重构更新中心的主进程数据加载流程。 +2. 不把软件管理改成“每个 desktop 入口一条记录”;本次仍按“每个包一条记录”展示。 +3. 不改变 `apm` 来源中依赖项继续显示的现有产品决定。 +4. 不新增应用启动器脚本,也不修改 `launch-app` IPC 的入参与调用协议。 +5. 不把软件管理改造成新的独立模块或完整应用索引子系统。 + +## 方案概览 + +本次改动拆成三条最小链路: + +1. 更新中心在渲染层增加独立加载态,让模态框先出现,再等待主进程快照。 +2. `list-installed("spark")` 改为扫描 `/usr/share/applications` 并反查包名,再补齐版本、架构与图标信息。 +3. 已安装应用弹窗增加“打开”按钮,并移除 `spark` 来源依赖远端商店目录的前端过滤,让本机已发现的桌面应用能够真正显示与启动。 + +## 更新中心加载态 + +### 当前问题 + +`src/App.vue` 中的 `openUpdateModal()` 直接 `await updateCenterStore.open()`,而 `src/modules/updateCenter.ts` 的 `open()` 会在拿到完整快照后才把 `isOpen` 设为 `true`。因此用户点击后会先经历一段无反馈等待。 + +### 目标行为 + +1. 用户触发打开更新中心时,模态框立即出现。 +2. 数据尚未返回时,模态框主体显示“正在检查更新”的加载态,而不是空白区域。 +3. 首次打开完成后,正常展示更新列表或错误提示。 +4. 用户在已打开的更新中心里点击“刷新”时,继续使用同一加载状态字段,并禁用刷新按钮,避免重复触发。 + +### 设计 + +在 `src/modules/updateCenter.ts` 中为 `UpdateCenterStore` 新增渲染层加载状态,例如 `loading: Ref`。 + +行为规则: + +1. `open()` 调用开始时: + - 先重置本次会话状态; + - 立即设置 `isOpen.value = true`; + - 设置 `loading.value = true`; + - 然后再等待 `window.updateCenter.open()`。 +2. `open()` 成功或失败结束时: + - 统一将 `loading.value = false`。 +3. `refresh()` 开始时: + - 设置 `loading.value = true`; + - 调用 `window.updateCenter.refresh()`; + - 完成后再恢复 `loading.value = false`。 +4. `closeNow()` 时: + - 关闭模态框; + - 清理搜索、选中项与迁移确认状态; + - 同时清理渲染层加载态,避免下次打开继承旧状态。 + +### UI 呈现 + +`src/components/UpdateCenterModal.vue` 负责根据 `store.loading.value` 切换内容: + +1. 当 `loading === true` 且还没有可展示项时,列表区域显示居中的加载卡片或 spinner,文案为“正在检查更新…”。 +2. 当 `loading === true` 且已有旧列表时,保留当前列表内容,同时在顶部或列表区域显示轻量的“正在刷新…”提示,避免刷新时内容闪烁清空。 +3. `src/components/update-center/UpdateCenterToolbar.vue` 中的刷新按钮在 `loading === true` 时禁用,并可复用现有刷新图标做旋转或弱化处理。 + +这个方案只在渲染层加状态,不改主进程 `update-center-open` / `update-center-refresh` 的 IPC 协议,因此不会影响现有更新中心服务与测试边界。 + +## `spark` 软件管理的桌面应用扫描规则 + +### 当前问题 + +`electron/main/backend/install-manager.ts` 中 `list-installed("spark")` 目前直接跑: + +```bash +dpkg-query -W -f=${Package} ${Version} ${Architecture}\n +``` + +它得到的是全量系统包,而不是用户可管理的桌面软件。 + +### 目标行为 + +`spark` 来源的软件管理只显示 `/usr/share/applications` 下可映射到系统包的桌面应用,每个包只展示一个条目。 + +### 扫描算法 + +主进程对 `spark` 来源执行以下流程: + +1. 枚举 `/usr/share/applications` 目录中的 `.desktop` 文件。 +2. 对每个候选文件执行 `realpath`,得到实际 desktop 路径,兼容软链接场景。 +3. 读取 desktop 内容,解析: + - `Name` + - `Icon` + - `NoDisplay` +4. 过滤规则: + - 不是 `.desktop` 的文件直接跳过; + - `NoDisplay=true` 的 desktop 跳过; + - 无法读取、无法解析或 `realpath` 失败的条目跳过; + - `dpkg -S ` 无法定位所属包名的条目跳过。 +5. 对通过过滤的条目调用 `dpkg -S ` 反查所属包。 +6. 将 desktop 条目按包名去重: + - 同一包命中多个有效 desktop 时,仅保留第一个有效条目; + - “第一个”的定义以稳定排序后的 desktop 文件名遍历顺序为准,保证结果可预测。 +7. 收集到包名后,再补齐版本和架构信息,形成最终 `InstalledAppInfo[]`。 + +### 包信息补齐 + +为了保留当前软件管理卡片里的版本与架构展示,`spark` 来源仍需要版本与架构信息,但不再以它作为筛选源。 + +推荐做法: + +1. 先通过 desktop 扫描得到有效包名集合。 +2. 再执行一次 `dpkg-query -W -f=${Package}\t${Version}\t${Architecture}\n` 构建元数据映射。 +3. 仅为扫描结果中出现的包补齐 `version` 和 `arch`。 + +这样保留了现有 UI 所需字段,同时避免再次回到“全量包即软件管理内容”的旧行为。 + +### 图标与名称 + +对于 `spark` 来源: + +1. `name` 优先使用 desktop 的 `Name=`。 +2. `icon` 优先使用 desktop 的 `Icon=`;若图标字段是绝对路径,则延续现有 `file://` 使用方式;若是图标名,则允许继续走当前前端回退策略或显示默认占位。 +3. `pkgname` 以 `dpkg -S` 反查出的包名为准,而不是 desktop 文件名。 + +### 错误处理 + +桌面应用扫描必须按“单项失败不拖垮整体列表”处理: + +1. 某个 desktop 读取失败,只跳过该项。 +2. 某个 desktop 无法反查包名,只跳过该项。 +3. 只有当整个目录无法读取、或关键命令整体失败时,才返回 `success: false` 给渲染层。 + +## `apm` 软件管理保持现状 + +`apm` 来源继续使用当前 `apm list --installed` 结果,行为保持不变: + +1. 仍保留依赖项展示。 +2. 仍使用现有的 APM `entries/applications` 解析名称、图标与是否为依赖项。 +3. 不把 `apm` 来源改成纯 desktop 视角。 + +这样可以满足“apm 包含依赖”的明确要求,同时把本次修改范围限制在 `spark` 侧软件识别逻辑。 + +## 渲染层已安装应用列表修正 + +### 当前问题 + +`src/App.vue` 中 `refreshInstalledApps()` 当前有一条 `spark` 特有过滤: + +1. 先在远端商店应用列表 `apps.value` 中寻找同名应用; +2. 如果 `origin === "spark" && !appInfo`,则直接 `continue`。 + +这会让许多本机桌面应用即使被主进程发现,也不会显示在软件管理中。 + +### 新规则 + +1. `refreshInstalledApps()` 对 `spark` 与 `apm` 统一采用“远端有完整信息则复用,远端没有则构造最小 App 对象”的策略。 +2. 删除 `spark` 来源的“找不到远端目录就跳过”逻辑。 +3. 这样主进程发现的本机桌面应用,无论是否存在于远端商店分类 JSON 中,都能在软件管理中展示出来。 + +### 最小 App 对象 + +当远端列表中找不到对应应用时,继续构造最小 `App` 对象,并补齐以下关键字段: + +1. `name` +2. `pkgname` +3. `version` +4. `origin` +5. `currentStatus: "installed"` +6. `arch` +7. `flags` +8. `isDependency` +9. `icons`(如主进程提供) + +其他目录型字段继续使用当前最小占位值即可,不额外扩展模型。 + +## 软件管理“打开软件”交互 + +### 目标行为 + +已安装应用弹窗中的每一项都支持直接打开软件,且不影响现有“卸载”入口。 + +### 交互设计 + +`src/components/InstalledAppsModal.vue` 中每个应用项新增一个 `打开` 按钮: + +1. 点击“打开”时向父组件发出 `open-app` 事件,并透传: + - `pkgname` + - `origin` +2. “卸载”按钮保留。 +3. 对于没有可启动信息的项,不新增额外灰态逻辑,因为本次两侧都沿用包名启动;只要条目被纳入软件管理,就认为可以尝试启动。 + +### 启动链路 + +继续复用当前已有 IPC:`launch-app` + +1. `spark` 来源继续执行: + - `/opt/spark-store/extras/app-launcher start ` +2. `apm` 来源继续执行: + - `apm launch ` + +这个 IPC 已被下载详情与应用详情页复用,因此本次不改协议,只把软件管理接入同一入口。 + +## 模块影响范围 + +### 主进程 + +1. `electron/main/backend/install-manager.ts` + - 调整 `list-installed("spark")` 的发现逻辑。 + - 可按需要抽出一个小型 helper 处理 spark desktop 扫描,避免继续堆大单文件。 + +### 渲染层状态与页面 + +1. `src/modules/updateCenter.ts` + - 新增加载态,并调整 `open()` / `refresh()` / `closeNow()` 的时序。 +2. `src/components/UpdateCenterModal.vue` + - 根据加载态展示“正在检查更新”或“正在刷新”提示。 +3. `src/components/update-center/UpdateCenterToolbar.vue` + - 刷新按钮支持禁用与加载视觉状态。 +4. `src/components/InstalledAppsModal.vue` + - 新增“打开”按钮与 `open-app` 事件。 +5. `src/App.vue` + - 打开更新中心时不再等待模态框延迟出现。 + - 修正 `spark` 来源软件列表的远端目录过滤。 + - 将软件管理中的 `open-app` 事件接到现有 `openDownloadedApp()`。 + +## 测试策略 + +### 更新中心 + +扩展以下测试: + +1. `src/__tests__/unit/update-center/store.test.ts` + - 覆盖 `open()` 在等待快照期间就已将 `isOpen` 置为 `true`。 + - 覆盖 `loading` 在 `open()` 与 `refresh()` 生命周期中的变化。 +2. `src/__tests__/unit/update-center/UpdateCenterModal.test.ts` + - 覆盖加载态文案展示。 + - 覆盖刷新按钮在加载时被禁用。 + +### 软件管理 + +1. 为 `spark` desktop 扫描逻辑新增单元测试,覆盖: + - 从 `/usr/share/applications` 发现有效 desktop; + - 通过 `realpath + dpkg -S` 反查包名; + - 跳过 `NoDisplay=true`; + - 同包多个 desktop 仅保留一个; + - 单个 desktop 失败不会让整批结果失败。 +2. 扩展 `src/__tests__/unit/InstalledAppsModal.test.ts` + - 覆盖“打开”按钮可见; + - 覆盖点击后会发出 `open-app` 事件。 + +### 回归验证 + +1. `spark` 来源软件管理仍可卸载。 +2. `apm` 来源软件管理仍保留依赖项显示。 +3. 下载详情与应用详情页已有的 `launch-app` 调用不受影响。 + +## 风险与约束 + +1. `dpkg -S` 输出格式可能包含架构后缀或多条匹配结果,解析时需要明确采用“第一条所有权记录”的稳定策略,并只提取包名部分。 +2. 某些 desktop 图标可能是主题图标名而非绝对路径;本次不重做图标解析,只保证名称与路径被正确透传。 +3. 如果某些本机桌面应用没有远端商店元数据,软件管理中会显示最小信息卡片;这是预期结果,因为需求本身就是“以本机 `/usr/share/applications` 为准”。 +4. 更新中心加载态只解决“无反馈等待”的问题,不保证主进程真实查询耗时本身缩短。