Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 24 additions & 1 deletion apps/mcp-server/src/services/ILighthouseService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,14 @@
*/

import { UploadResult, DownloadResult, AccessCondition, Dataset } from "@lighthouse-tooling/types";
import { EnhancedAccessCondition } from "@lighthouse-tooling/sdk-wrapper";
import {
EnhancedAccessCondition,
BatchUploadOptions,
BatchDownloadOptions,
BatchOperationResult,
BatchDownloadFileResult,
FileInfo,
} from "@lighthouse-tooling/sdk-wrapper";

export interface StoredFile {
cid: string;
Expand Down Expand Up @@ -150,4 +157,20 @@ export interface ILighthouseService {
success: boolean;
error?: string;
}>;

/**
* Batch upload multiple files with configurable concurrency
*/
batchUploadFiles(
filePaths: string[],
options?: BatchUploadOptions,
): Promise<BatchOperationResult<FileInfo>>;

/**
* Batch download multiple files by CID with configurable concurrency
*/
batchDownloadFiles(
cids: string[],
options?: BatchDownloadOptions,
): Promise<BatchOperationResult<BatchDownloadFileResult>>;
}
113 changes: 112 additions & 1 deletion apps/mcp-server/src/services/LighthouseService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,17 @@
* Real Lighthouse Service - Uses the unified SDK wrapper for actual Lighthouse operations
*/

import { LighthouseAISDK, EnhancedAccessCondition } from "@lighthouse-tooling/sdk-wrapper";
import {
LighthouseAISDK,
EnhancedAccessCondition,
BatchUploadOptions,
BatchDownloadOptions,
BatchOperationResult,
BatchDownloadFileResult,
BatchUploadInput,
BatchDownloadInput,
FileInfo,
} from "@lighthouse-tooling/sdk-wrapper";
import { UploadResult, DownloadResult, AccessCondition, Dataset } from "@lighthouse-tooling/types";
import { Logger } from "@lighthouse-tooling/shared";
import { ILighthouseService, StoredFile } from "./ILighthouseService.js";
Expand Down Expand Up @@ -879,6 +889,107 @@ export class LighthouseService implements ILighthouseService {
}
}

/**
* Batch upload multiple files with configurable concurrency
*/
async batchUploadFiles(
filePaths: string[],
options?: BatchUploadOptions,
): Promise<BatchOperationResult<FileInfo>> {
const startTime = Date.now();

try {
this.logger.info("Starting batch upload", {
fileCount: filePaths.length,
concurrency: options?.concurrency || 3,
});

// Convert string paths to BatchUploadInput objects
const inputs: BatchUploadInput[] = filePaths.map((filePath) => ({
filePath,
}));

const result = await this.sdk.batchUpload(inputs, options);

// Store successful uploads in cache and database
for (const fileResult of result.results) {
if (fileResult.success && fileResult.data) {
const storedFile: StoredFile = {
cid: fileResult.data.hash,
filePath: fileResult.data.name,
size: fileResult.data.size,
encrypted: fileResult.data.encrypted,
accessConditions: options?.accessConditions,
tags: options?.tags,
uploadedAt: fileResult.data.uploadedAt,
pinned: true,
hash: fileResult.data.hash,
};

this.storage.saveFile(storedFile);
this.fileCache.set(fileResult.data.hash, storedFile);
}
}

const executionTime = Date.now() - startTime;
this.logger.info("Batch upload completed", {
total: result.total,
successful: result.successful,
failed: result.failed,
successRate: result.successRate,
executionTime,
});

return result;
} catch (error) {
this.logger.error("Batch upload failed", error as Error, {
fileCount: filePaths.length,
});
throw error;
}
}

/**
* Batch download multiple files by CID with configurable concurrency
*/
async batchDownloadFiles(
cids: string[],
options?: BatchDownloadOptions,
): Promise<BatchOperationResult<BatchDownloadFileResult>> {
const startTime = Date.now();

try {
this.logger.info("Starting batch download", {
cidCount: cids.length,
concurrency: options?.concurrency || 3,
outputDir: options?.outputDir,
});

// Convert string CIDs to BatchDownloadInput objects
const inputs: BatchDownloadInput[] = cids.map((cid) => ({
cid,
}));

const result = await this.sdk.batchDownload(inputs, options);

const executionTime = Date.now() - startTime;
this.logger.info("Batch download completed", {
total: result.total,
successful: result.successful,
failed: result.failed,
successRate: result.successRate,
executionTime,
});

return result;
} catch (error) {
this.logger.error("Batch download failed", error as Error, {
cidCount: cids.length,
});
throw error;
}
}

/**
* Cleanup resources
*/
Expand Down
170 changes: 169 additions & 1 deletion apps/mcp-server/src/services/MockLighthouseService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,14 @@
*/

import { UploadResult, DownloadResult, AccessCondition, Dataset } from "@lighthouse-tooling/types";
import { EnhancedAccessCondition } from "@lighthouse-tooling/sdk-wrapper";
import {
EnhancedAccessCondition,
BatchUploadOptions,
BatchDownloadOptions,
BatchOperationResult,
BatchDownloadFileResult,
FileInfo,
} from "@lighthouse-tooling/sdk-wrapper";
import { Logger, FileUtils } from "@lighthouse-tooling/shared";
import { CIDGenerator } from "../utils/cid-generator.js";
import { ILighthouseService, StoredFile } from "./ILighthouseService.js";
Expand Down Expand Up @@ -598,6 +605,167 @@ export class MockLighthouseService implements ILighthouseService {
}
}

/**
* Batch upload multiple files with configurable concurrency
*/
async batchUploadFiles(
filePaths: string[],
options?: BatchUploadOptions,
): Promise<BatchOperationResult<FileInfo>> {
const startTime = Date.now();
const results: Array<{
id: string;
success: boolean;
data?: FileInfo;
error?: string;
duration: number;
retries: number;
}> = [];

this.logger.info("Starting batch upload", {
fileCount: filePaths.length,
concurrency: options?.concurrency || 3,
});

for (const filePath of filePaths) {
const itemStartTime = Date.now();
try {
const uploadResult = await this.uploadFile({
filePath,
encrypt: options?.encrypt,
accessConditions: options?.accessConditions,
tags: options?.tags,
});

results.push({
id: filePath,
success: true,
data: {
hash: uploadResult.cid,
name: filePath.split("/").pop() || filePath,
size: uploadResult.size,
encrypted: uploadResult.encrypted,
mimeType: "application/octet-stream",
uploadedAt: uploadResult.uploadedAt,
},
duration: Date.now() - itemStartTime,
retries: 0,
});
} catch (error) {
if (!options?.continueOnError) {
throw error;
}
results.push({
id: filePath,
success: false,
error: error instanceof Error ? error.message : "Unknown error",
duration: Date.now() - itemStartTime,
retries: 0,
});
}
}

const successful = results.filter((r) => r.success).length;
const failed = results.filter((r) => !r.success).length;
const totalDuration = Date.now() - startTime;

this.logger.info("Batch upload completed", {
total: filePaths.length,
successful,
failed,
totalDuration,
});

return {
total: filePaths.length,
successful,
failed,
successRate: filePaths.length > 0 ? (successful / filePaths.length) * 100 : 0,
totalDuration,
averageDuration: results.length > 0 ? totalDuration / results.length : 0,
results,
};
}

/**
* Batch download multiple files by CID with configurable concurrency
*/
async batchDownloadFiles(
cids: string[],
options?: BatchDownloadOptions,
): Promise<BatchOperationResult<BatchDownloadFileResult>> {
const startTime = Date.now();
const results: Array<{
id: string;
success: boolean;
data?: BatchDownloadFileResult;
error?: string;
duration: number;
retries: number;
}> = [];

this.logger.info("Starting batch download", {
cidCount: cids.length,
concurrency: options?.concurrency || 3,
});

for (const cid of cids) {
const itemStartTime = Date.now();
try {
const downloadResult = await this.fetchFile({
cid,
outputPath: options?.outputDir ? `${options.outputDir}/${cid}` : undefined,
decrypt: options?.decrypt,
});

results.push({
id: cid,
success: true,
data: {
cid: downloadResult.cid,
filePath: downloadResult.filePath,
size: downloadResult.size,
decrypted: downloadResult.decrypted,
},
duration: Date.now() - itemStartTime,
retries: 0,
});
} catch (error) {
if (!options?.continueOnError) {
throw error;
}
results.push({
id: cid,
success: false,
error: error instanceof Error ? error.message : "Unknown error",
duration: Date.now() - itemStartTime,
retries: 0,
});
}
}

const successful = results.filter((r) => r.success).length;
const failed = results.filter((r) => !r.success).length;
const totalDuration = Date.now() - startTime;

this.logger.info("Batch download completed", {
total: cids.length,
successful,
failed,
totalDuration,
});

return {
total: cids.length,
successful,
failed,
successRate: cids.length > 0 ? (successful / cids.length) * 100 : 0,
totalDuration,
averageDuration: results.length > 0 ? totalDuration / results.length : 0,
results,
};
}

/**
* Simulate network delay for realistic behavior
*/
Expand Down
25 changes: 23 additions & 2 deletions apps/mcp-server/src/tests/integration.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,28 @@ describe("Lighthouse MCP Server Integration", () => {

it("should handle missing API key", () => {
expect(() => {
new LighthouseMCPServer({ lighthouseApiKey: undefined });
}).toThrow("LIGHTHOUSE_API_KEY environment variable is required");
new LighthouseMCPServer({
lighthouseApiKey: undefined,
authentication: {
defaultApiKey: undefined,
enablePerRequestAuth: true,
requireAuthentication: true,
keyValidationCache: {
enabled: false,
maxSize: 0,
ttlSeconds: 0,
cleanupIntervalSeconds: 0,
},
rateLimiting: {
enabled: false,
requestsPerMinute: 0,
burstLimit: 0,
keyBasedLimiting: false,
},
},
});
}).toThrow(
"LIGHTHOUSE_API_KEY environment variable or authentication.defaultApiKey is required",
);
});
});
Loading
Loading