Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
131 changes: 104 additions & 27 deletions src/remote/sshProcess.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,11 @@ export interface SshProcessMonitorOptions {
}

// 1 hour cleanup threshold for old network info files
const CLEANUP_MAX_AGE_MS = 60 * 60 * 1000;
const CLEANUP_NETWORK_MAX_AGE_MS = 60 * 60 * 1000;
// 7 day cleanup threshold for old proxy log files
const CLEANUP_LOG_MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000;
// Maximum number of proxy log files to keep during cleanup
const CLEANUP_MAX_LOG_FILES = 20;

/**
* Monitors the SSH process for a Coder workspace connection and displays
Expand Down Expand Up @@ -74,49 +78,110 @@ export class SshProcessMonitor implements vscode.Disposable {
private lastStaleSearchTime = 0;

/**
* Cleans up network info files older than the specified age.
* Helper to clean up files in a directory.
* Stats files in parallel, applies selection criteria, then deletes in parallel.
*/
private static async cleanupOldNetworkFiles(
networkInfoPath: string,
maxAgeMs: number,
private static async cleanupFiles(
dir: string,
fileType: string,
logger: Logger,
options: {
filter: (name: string) => boolean;
select: (
files: Array<{ name: string; mtime: number }>,
now: number,
) => Array<{ name: string }>;
},
): Promise<void> {
try {
const files = await fs.readdir(networkInfoPath);
const now = Date.now();
const files = await fs.readdir(dir);

// Gather file stats in parallel
const withStats = await Promise.all(
files.filter(options.filter).map(async (name) => {
try {
const stats = await fs.stat(path.join(dir, name));
return { name, mtime: stats.mtime.getTime() };
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
logger.debug(`Failed to stat ${fileType} ${name}`, error);
}
return null;
}
}),
);

const deletedFiles: string[] = [];
for (const file of files) {
if (!file.endsWith(".json")) {
continue;
}

const filePath = path.join(networkInfoPath, file);
try {
const stats = await fs.stat(filePath);
const ageMs = now - stats.mtime.getTime();
const toDelete = options.select(
withStats.filter((f) => f !== null),
now,
);

if (ageMs > maxAgeMs) {
await fs.unlink(filePath);
deletedFiles.push(file);
}
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
logger.debug(`Failed to clean up network info file ${file}`, error);
// Delete files in parallel
const results = await Promise.all(
toDelete.map(async (file) => {
try {
await fs.unlink(path.join(dir, file.name));
return file.name;
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
logger.debug(`Failed to delete ${fileType} ${file.name}`, error);
}
return null;
}
}
}
}),
);

const deletedFiles = results.filter((name) => name !== null);
if (deletedFiles.length > 0) {
logger.debug(
`Cleaned up ${deletedFiles.length} old network info file(s): ${deletedFiles.join(", ")}`,
`Cleaned up ${deletedFiles.length} ${fileType}(s): ${deletedFiles.join(", ")}`,
);
}
} catch {
// Directory may not exist yet, ignore
}
}

/**
* Cleans up network info files older than the specified age.
*/
private static async cleanupOldNetworkFiles(
networkInfoPath: string,
maxAgeMs: number,
logger: Logger,
): Promise<void> {
await SshProcessMonitor.cleanupFiles(
networkInfoPath,
"network info file",
logger,
{
filter: (name) => name.endsWith(".json"),
select: (files, now) => files.filter((f) => now - f.mtime > maxAgeMs),
},
);
}

/**
* Cleans up old proxy log files that exceed the retention limit.
* Only deletes files older than maxAgeMs when file count exceeds maxFilesToKeep.
*/
private static async cleanupOldLogFiles(
logDir: string,
maxFilesToKeep: number,
maxAgeMs: number,
logger: Logger,
): Promise<void> {
await SshProcessMonitor.cleanupFiles(logDir, "log file", logger, {
filter: (name) => name.startsWith("coder-ssh") && name.endsWith(".log"),
select: (files, now) =>
files
.toSorted((a, b) => a.mtime - b.mtime) // oldest first
.slice(0, -maxFilesToKeep) // keep the newest maxFilesToKeep
.filter((f) => now - f.mtime > maxAgeMs), // only delete stale files
});
}

private constructor(options: SshProcessMonitorOptions) {
this.options = {
...options,
Expand All @@ -142,12 +207,24 @@ export class SshProcessMonitor implements vscode.Disposable {
// Clean up old network info files (non-blocking, fire-and-forget)
SshProcessMonitor.cleanupOldNetworkFiles(
options.networkInfoPath,
CLEANUP_MAX_AGE_MS,
CLEANUP_NETWORK_MAX_AGE_MS,
options.logger,
).catch(() => {
// Ignore cleanup errors - they shouldn't affect monitoring
});

// Clean up old proxy log files (combined: count + age threshold)
if (options.proxyLogDir) {
SshProcessMonitor.cleanupOldLogFiles(
options.proxyLogDir,
CLEANUP_MAX_LOG_FILES,
CLEANUP_LOG_MAX_AGE_MS,
options.logger,
).catch(() => {
// Ignore cleanup errors - they shouldn't affect monitoring
});
}

monitor.searchForProcess().catch((err) => {
options.logger.error("Error in SSH process monitor", err);
});
Expand Down
130 changes: 124 additions & 6 deletions test/unit/remote/sshProcess.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -424,10 +424,12 @@ describe("SshProcessMonitor", () => {
});

describe("cleanup old network files", () => {
const setOldMtime = (filePath: string) => {
// Default cleanup is 1 hour; set mtime to 2 hours ago to mark as old
const TWO_HOURS_AGO = Date.now() - 2 * 60 * 60 * 1000;
vol.utimesSync(filePath, TWO_HOURS_AGO / 1000, TWO_HOURS_AGO / 1000);
// Network cleanup: 1 hour threshold
const NETWORK_MAX_AGE_MS = 60 * 60 * 1000;

const setMtimeAgo = (filePath: string, ageMs: number) => {
const mtime = (Date.now() - ageMs) / 1000;
vol.utimesSync(filePath, mtime, mtime);
};

it("deletes old .json files but preserves recent and non-.json files", async () => {
Expand All @@ -438,8 +440,8 @@ describe("SshProcessMonitor", () => {
"/network/recent.json": "{}",
"/network/old.log": "{}",
});
setOldMtime("/network/old.json");
setOldMtime("/network/old.log");
setMtimeAgo("/network/old.json", NETWORK_MAX_AGE_MS * 2);
setMtimeAgo("/network/old.log", NETWORK_MAX_AGE_MS * 2);

createMonitor({
codeLogDir: "/logs/window1",
Expand Down Expand Up @@ -477,6 +479,122 @@ describe("SshProcessMonitor", () => {
});
});

describe("cleanup proxy log files", () => {
// Proxy log cleanup: 7 day threshold, 20 files max
const LOG_MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000;
const LOG_MAX_FILES = 20;

const setMtimeAgo = (filePath: string, ageMs: number) => {
const mtime = (Date.now() - ageMs) / 1000;
vol.utimesSync(filePath, mtime, mtime);
};

const logFileName = (i: number) =>
`coder-ssh-${i.toString().padStart(2, "0")}.log`;

const setupTest = (total: number, stale: number): string[] => {
vol.fromJSON({
"/logs/ms-vscode-remote.remote-ssh/1-Remote - SSH.log":
"-> socksPort 12345 ->",
});
vol.mkdirSync("/proxy-logs", { recursive: true });

const files = Array.from({ length: total }, (_, i) => logFileName(i + 1));
for (const name of files) {
vol.writeFileSync(`/proxy-logs/${name}`, "");
}
for (let i = 0; i < stale; i++) {
setMtimeAgo(`/proxy-logs/${files[i]}`, LOG_MAX_AGE_MS * 2);
}
return files;
};

interface StaleLogTestCase {
total: number;
stale: number;
expected: number;
desc: string;
}

it.each<StaleLogTestCase>([
{ total: 25, stale: 8, expected: 20, desc: "Deletes until limit" },
{ total: 25, stale: 3, expected: 22, desc: "Only deletes stale" },
{ total: 25, stale: 0, expected: 25, desc: "Keeps recent files" },
{ total: 15, stale: 5, expected: 15, desc: "Keeps under limit" },
])(
"$desc: $total files, $stale stale → $expected remaining",
async ({ total, stale, expected }) => {
setupTest(total, stale);

createMonitor({
codeLogDir: "/logs/window1",
proxyLogDir: "/proxy-logs",
});

await vi.waitFor(() => {
expect(vol.readdirSync("/proxy-logs")).toHaveLength(expected);
});
},
);

it("only matches coder-ssh*.log files", async () => {
const files = setupTest(25, 25);
// Add non-matching files
const nonMatchingFiles = [
"other.log",
"coder-ssh-config.json",
"readme.txt",
];
for (const f of nonMatchingFiles) {
const filePath = `/proxy-logs/${f}`;
vol.writeFileSync(filePath, "");
setMtimeAgo(filePath, LOG_MAX_AGE_MS * 2);
}

createMonitor({
codeLogDir: "/logs/window1",
proxyLogDir: "/proxy-logs",
});

await vi.waitFor(() => {
expect(vol.readdirSync("/proxy-logs")).toHaveLength(
LOG_MAX_FILES + nonMatchingFiles.length,
);
});

const remaining = vol.readdirSync("/proxy-logs") as string[];
// Non-matching files preserved
expect(remaining).toContain("other.log");
expect(remaining).toContain("coder-ssh-config.json");
expect(remaining).toContain("readme.txt");
// Oldest matching files deleted
expect(remaining).not.toContain(files[0]);
expect(remaining).toContain(files[24]);
});

it("does not throw when proxy log directory is missing or empty", () => {
vol.fromJSON({
"/logs/ms-vscode-remote.remote-ssh/1-Remote - SSH.log":
"-> socksPort 12345 ->",
});
vol.mkdirSync("/empty-proxy-logs", { recursive: true });

expect(() =>
createMonitor({
codeLogDir: "/logs/window1",
proxyLogDir: "/nonexistent-proxy-logs",
}),
).not.toThrow();

expect(() =>
createMonitor({
codeLogDir: "/logs/window1",
proxyLogDir: "/empty-proxy-logs",
}),
).not.toThrow();
});
});

describe("missing file retry logic", () => {
beforeEach(() => vi.useFakeTimers());
afterEach(() => vi.useRealTimers());
Expand Down