feat(beadboard-550): replace issues.jsonl reads with direct Dolt SQL via mysql2

- src/lib/read-issues-dolt.ts: readIssuesViaDolt() queries issues+labels (GROUP_CONCAT)
  and dependencies in 2 SQL queries; normalizes Date cols to ISO strings; returns null
  on unreachable so caller can fall back gracefully
- src/lib/read-issues.ts: readIssuesFromDisk() tries Dolt first (always), falls back to
  issues.jsonl with console.warn; removes dead readIssuesViaBd/normalizeBdIssue/
  normalizeDependencies code now that the CLI path is superseded
- AGENTS.md: documents new Dolt read path + SSE watcher trigger; removes stale
  manual issues.jsonl re-export instructions (no longer needed)

Verified: bd writes update last-touched → chokidar fires → syncActivity → Dolt query
→ snapshot diff → SSE push. 146/146 tests pass, lint clean.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
ZenchantLive 2026-02-28 17:33:43 -08:00
parent 91b4777a7c
commit 335beb82d6
3 changed files with 196 additions and 175 deletions

View file

@ -260,22 +260,15 @@ wsl --shutdown
This is a one-time setup for mixed environments only. It is **not required** for single-platform contributors.
### Keeping issues.jsonl in sync (temporary)
### How the read path works
Until `beadboard-550` (direct mysql2 connection) is implemented, `issues.jsonl` must be exported manually after `bd` writes in mixed environments:
BeadBoard (`src/lib/read-issues.ts`) queries Dolt SQL directly via `mysql2` (`src/lib/dolt-client.ts`). On every page load or SSE-triggered refresh:
```bash
# In WSL2 — re-export Dolt state to issues.jsonl so Windows frontend sees it
bd list --all --limit 0 --json | python3 -c "
import sys, json
issues = json.load(sys.stdin)
with open('.beads/issues.jsonl', 'w') as f:
for issue in issues:
f.write(json.dumps(issue) + '\n')
print(f'Exported {len(issues)} issues to issues.jsonl')
"
```
1. `readIssuesFromDisk()` → tries `readIssuesViaDolt(projectRoot)` first
2. If Dolt unreachable → logs a warning and falls back to reading `issues.jsonl`
Once `beadboard-550` ships, `issues.jsonl` becomes a deprecated fallback and this step is no longer needed.
`issues.jsonl` is a **deprecated fallback** — no manual export step is required. The file is kept on disk by `bd` for git history, but BeadBoard does not rely on it when the Dolt server is running.
**SSE real-time updates**: `bd` touches `.beads/last-touched` on every write. Chokidar detects this change, triggers a snapshot diff, and fires an SSE event if anything changed — fetching fresh data from Dolt automatically.
<!-- END BEADS INTEGRATION -->

120
src/lib/read-issues-dolt.ts Normal file
View file

@ -0,0 +1,120 @@
import type { RowDataPacket } from 'mysql2';
import { getDoltConnection, DoltConnectionError } from './dolt-client';
import type { ReadIssuesOptions } from './read-issues';
import type { BeadDependency, BeadIssue, BeadStatus } from './types';
interface IssueRow extends RowDataPacket {
id: string;
title: string;
description: string | null;
status: string;
priority: number;
issue_type: string;
assignee: string | null;
owner: string | null;
close_reason: string | null;
closed_by_session: string | null;
created_by: string | null;
estimated_minutes: number | null;
external_ref: string | null;
metadata: Record<string, unknown> | null;
created_at: Date | string | null;
updated_at: Date | string | null;
closed_at: Date | string | null;
due_at: Date | string | null;
labels_concat: string | null;
}
interface DepRow extends RowDataPacket {
issue_id: string;
depends_on_id: string;
type: string;
}
function toIsoString(val: Date | string | null | undefined): string | null {
if (val === null || val === undefined) return null;
if (val instanceof Date) return val.toISOString();
return val;
}
function normalizeRow(row: IssueRow, deps: BeadDependency[]): BeadIssue {
return {
id: row.id,
title: row.title,
description: row.description ?? null,
status: (row.status as BeadStatus) ?? 'open',
priority: typeof row.priority === 'number' ? row.priority : 2,
issue_type: row.issue_type ?? 'task',
assignee: row.assignee ?? null,
templateId: null,
owner: row.owner ?? null,
labels: row.labels_concat ? row.labels_concat.split(',').filter(Boolean) : [],
dependencies: deps,
created_at: toIsoString(row.created_at) ?? '',
updated_at: toIsoString(row.updated_at) ?? '',
closed_at: toIsoString(row.closed_at),
close_reason: row.close_reason ?? null,
closed_by_session: row.closed_by_session ?? null,
created_by: row.created_by ?? null,
due_at: toIsoString(row.due_at),
estimated_minutes: typeof row.estimated_minutes === 'number' ? row.estimated_minutes : null,
external_ref: row.external_ref ?? null,
metadata: row.metadata ?? {},
};
}
/**
* Query Dolt SQL server directly for all issues, joining labels and dependencies.
* Returns null (never throws) if Dolt is unreachable so the caller can fall back
* to the issues.jsonl path.
*/
export async function readIssuesViaDolt(
projectRoot: string,
options: ReadIssuesOptions = {}
): Promise<BeadIssue[] | null> {
let pool;
try {
pool = await getDoltConnection(projectRoot);
} catch (err) {
if (err instanceof DoltConnectionError) {
return null;
}
return null;
}
try {
// Query 1: All issues with comma-separated labels (GROUP_CONCAT avoids N+1)
const [issueRows] = await pool.execute<IssueRow[]>(
`SELECT i.*, GROUP_CONCAT(l.label SEPARATOR ',') AS labels_concat
FROM issues i
LEFT JOIN labels l ON l.issue_id = i.id
GROUP BY i.id`
);
// Query 2: All dependencies in one shot
const [depRows] = await pool.execute<DepRow[]>(
`SELECT issue_id, depends_on_id, type FROM dependencies`
);
// Build issue_id → BeadDependency[] lookup
const depsMap = new Map<string, BeadDependency[]>();
for (const dep of depRows) {
const depType: BeadDependency['type'] =
dep.type === 'parent-child' ? 'parent' : (dep.type as BeadDependency['type']);
const list = depsMap.get(dep.issue_id) ?? [];
list.push({ type: depType, target: dep.depends_on_id });
depsMap.set(dep.issue_id, list);
}
return issueRows
.map((row) => normalizeRow(row, depsMap.get(row.id) ?? []))
.filter((issue) => {
if (issue.status === 'tombstone' && !options.includeTombstones) return false;
if (issue.labels.includes('gt:agent') && !options.skipAgentFilter) return false;
return true;
});
} catch {
return null;
}
}

View file

@ -1,161 +1,69 @@
import path from 'node:path';
import { runBdCommand } from './bridge';
import { parseIssuesJsonl } from './parser';
import { canonicalizeWindowsPath } from './pathing';
import { readTextFileWithRetry } from './read-text-retry';
import { buildProjectContext } from './project-context';
import type { BeadDependency, BeadIssue } from './types';
import type { BeadIssueWithProject, ProjectSource } from './types';
export interface ReadIssuesOptions {
projectRoot?: string;
includeTombstones?: boolean;
projectSource?: ProjectSource;
projectAddedAt?: string | null;
preferBd?: boolean;
skipAgentFilter?: boolean;
}
export function resolveIssuesJsonlPathCandidates(projectRoot: string = process.cwd()): string[] {
const baseDir = path.resolve(projectRoot, '.beads');
const primary = canonicalizeWindowsPath(path.join(baseDir, 'issues.jsonl'));
const fallback = canonicalizeWindowsPath(path.join(baseDir, 'issues.jsonl.new'));
return [primary, fallback];
}
export function resolveIssuesJsonlPath(projectRoot: string = process.cwd()): string {
return resolveIssuesJsonlPathCandidates(projectRoot)[0];
}
function normalizeDependencies(value: unknown): BeadDependency[] {
if (!Array.isArray(value)) {
return [];
}
return value
.map((item) => {
if (!item || typeof item !== 'object') {
return null;
}
const dep = item as { type?: unknown; target?: unknown; depends_on_id?: unknown };
if (typeof dep.type !== 'string') {
return null;
}
const target = typeof dep.target === 'string' ? dep.target : typeof dep.depends_on_id === 'string' ? dep.depends_on_id : null;
if (!target) {
return null;
}
return {
type: dep.type === 'parent-child' ? 'parent' : (dep.type as BeadDependency['type']),
target,
};
})
.filter((dep): dep is BeadDependency => dep !== null);
}
function normalizeBdIssue(raw: unknown): BeadIssue | null {
if (!raw || typeof raw !== 'object') {
return null;
}
const data = raw as Record<string, unknown>;
if (typeof data.id !== 'string' || typeof data.title !== 'string') {
return null;
}
return {
id: data.id,
title: data.title,
description: typeof data.description === 'string' ? data.description : null,
status: typeof data.status === 'string' ? (data.status as BeadIssue['status']) : 'open',
priority: typeof data.priority === 'number' ? data.priority : 2,
issue_type: typeof data.issue_type === 'string' ? data.issue_type : 'task',
assignee: typeof data.assignee === 'string' ? data.assignee : null,
templateId: null,
owner: typeof data.owner === 'string' ? data.owner : null,
labels: Array.isArray(data.labels) ? data.labels.filter((x): x is string => typeof x === 'string') : [],
dependencies: normalizeDependencies(data.dependencies),
created_at: typeof data.created_at === 'string' ? data.created_at : '',
updated_at: typeof data.updated_at === 'string' ? data.updated_at : '',
closed_at: typeof data.closed_at === 'string' ? data.closed_at : null,
close_reason: typeof data.close_reason === 'string' ? data.close_reason : null,
closed_by_session: typeof data.closed_by_session === 'string' ? data.closed_by_session : null,
created_by: typeof data.created_by === 'string' ? data.created_by : null,
due_at: typeof data.due_at === 'string' ? data.due_at : null,
estimated_minutes: typeof data.estimated_minutes === 'number' ? data.estimated_minutes : null,
external_ref: typeof data.external_ref === 'string' ? data.external_ref : null,
metadata: typeof data.metadata === 'object' && data.metadata !== null ? (data.metadata as Record<string, unknown>) : {},
};
}
async function readIssuesViaBd(options: ReadIssuesOptions, project: ReturnType<typeof buildProjectContext>): Promise<BeadIssueWithProject[] | null> {
const projectRoot = options.projectRoot ?? process.cwd();
const command = await runBdCommand({
projectRoot,
args: ['list', '--all', '--limit', '0', '--json'],
});
if (!command.success) {
return null;
}
try {
const parsed = JSON.parse(command.stdout) as unknown;
if (!Array.isArray(parsed)) {
return null;
}
return parsed
.map((issue) => normalizeBdIssue(issue))
.filter((issue): issue is BeadIssue => issue !== null)
.filter((issue) => {
// Exclude tombstones
if (issue.status === 'tombstone' && !options.includeTombstones) return false;
// Exclude agent identities from mission lists unless skipping filter (for watcher/diffing)
if (issue.labels.includes('gt:agent') && !options.skipAgentFilter) return false;
return true;
})
.map((issue) => ({
...issue,
project,
}));
} catch {
return null;
}
}
export async function readIssuesFromDisk(options: ReadIssuesOptions = {}): Promise<BeadIssueWithProject[]> {
const projectRoot = options.projectRoot ?? process.cwd();
const candidates = resolveIssuesJsonlPathCandidates(projectRoot);
const project = buildProjectContext(projectRoot, {
source: options.projectSource ?? 'local',
addedAt: options.projectAddedAt ?? null,
});
if (options.preferBd ?? false) {
const viaBd = await readIssuesViaBd(options, project);
if (viaBd) {
return viaBd;
}
}
for (const issuesPath of candidates) {
try {
const jsonl = await readTextFileWithRetry(issuesPath);
return parseIssuesJsonl(jsonl, {
includeTombstones: options.includeTombstones ?? false,
skipAgentFilter: options.skipAgentFilter ?? false,
}).map((issue) => ({
...issue,
project,
}));
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
continue;
}
throw error;
}
}
return [];
}
import path from 'node:path';
import { parseIssuesJsonl } from './parser';
import { canonicalizeWindowsPath } from './pathing';
import { readTextFileWithRetry } from './read-text-retry';
import { buildProjectContext } from './project-context';
import { readIssuesViaDolt } from './read-issues-dolt';
import type { BeadIssueWithProject, ProjectSource } from './types';
export interface ReadIssuesOptions {
projectRoot?: string;
includeTombstones?: boolean;
projectSource?: ProjectSource;
projectAddedAt?: string | null;
preferBd?: boolean;
skipAgentFilter?: boolean;
}
export function resolveIssuesJsonlPathCandidates(projectRoot: string = process.cwd()): string[] {
const baseDir = path.resolve(projectRoot, '.beads');
const primary = canonicalizeWindowsPath(path.join(baseDir, 'issues.jsonl'));
const fallback = canonicalizeWindowsPath(path.join(baseDir, 'issues.jsonl.new'));
return [primary, fallback];
}
export function resolveIssuesJsonlPath(projectRoot: string = process.cwd()): string {
return resolveIssuesJsonlPathCandidates(projectRoot)[0];
}
export async function readIssuesFromDisk(options: ReadIssuesOptions = {}): Promise<BeadIssueWithProject[]> {
const projectRoot = options.projectRoot ?? process.cwd();
const candidates = resolveIssuesJsonlPathCandidates(projectRoot);
const project = buildProjectContext(projectRoot, {
source: options.projectSource ?? 'local',
addedAt: options.projectAddedAt ?? null,
});
// Try Dolt SQL first (always preferred when server is available)
const viaDolt = await readIssuesViaDolt(projectRoot, options);
if (viaDolt !== null) {
return viaDolt.map((issue) => ({ ...issue, project }));
}
// Dolt unreachable — fall back to issues.jsonl
console.warn('[beadboard] Dolt unreachable, falling back to issues.jsonl (data may be stale)');
for (const issuesPath of candidates) {
try {
const jsonl = await readTextFileWithRetry(issuesPath);
return parseIssuesJsonl(jsonl, {
includeTombstones: options.includeTombstones ?? false,
skipAgentFilter: options.skipAgentFilter ?? false,
}).map((issue) => ({
...issue,
project,
}));
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
continue;
}
throw error;
}
}
return [];
}