feat: add support for S3 as a storage provider (#659)
* add s3 * instance the s3 client dynamically * refactor code * fix format * add docs * add docs * fix issue with s3 upload if you use the base path, fix issue with archiving -> disable archiving for s3 * split file service in local and s3 file service and fix s3 upload chunking * add working download/view * add new features to local service (from main branch) * revert s3 service and add working delete/remove functionality * refactor s3 service * Update backend/src/file/s3.service.ts Co-authored-by: Elias Schneider <login@eliasschneider.com> * Update frontend/src/components/admin/configuration/ConfigurationNavBar.tsx Co-authored-by: Elias Schneider <login@eliasschneider.com> * Update docs/docs/setup/s3.md Co-authored-by: Elias Schneider <login@eliasschneider.com> * Update backend/prisma/seed/config.seed.ts Co-authored-by: Elias Schneider <login@eliasschneider.com> * add note for ZIP archive in docs * create logger instance * make s3 instance dynamic * add icon import * remove console.logs * add correct pdf viewing format * add storage provider to share * refactor: run formatter * chore: add prisma migration * fix: don't expose `storageProvider` * chore: improve config variables description --------- Co-authored-by: Elias Schneider <login@eliasschneider.com>
This commit is contained in:
@@ -17,6 +17,7 @@ import { CreateShareGuard } from "src/share/guard/createShare.guard";
|
||||
import { ShareOwnerGuard } from "src/share/guard/shareOwner.guard";
|
||||
import { FileService } from "./file.service";
|
||||
import { FileSecurityGuard } from "./guard/fileSecurity.guard";
|
||||
import * as mime from "mime-types";
|
||||
|
||||
@Controller("shares/:shareId/files")
|
||||
export class FileController {
|
||||
@@ -53,7 +54,7 @@ export class FileController {
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
@Param("shareId") shareId: string,
|
||||
) {
|
||||
const zip = this.fileService.getZip(shareId);
|
||||
const zip = await this.fileService.getZip(shareId);
|
||||
res.set({
|
||||
"Content-Type": "application/zip",
|
||||
"Content-Disposition": contentDisposition(`${shareId}.zip`),
|
||||
@@ -73,13 +74,18 @@ export class FileController {
|
||||
const file = await this.fileService.get(shareId, fileId);
|
||||
|
||||
const headers = {
|
||||
"Content-Type": file.metaData.mimeType,
|
||||
"Content-Type":
|
||||
mime?.lookup?.(file.metaData.name) || "application/octet-stream",
|
||||
"Content-Length": file.metaData.size,
|
||||
"Content-Security-Policy": "script-src 'none'",
|
||||
};
|
||||
|
||||
if (download === "true") {
|
||||
headers["Content-Disposition"] = contentDisposition(file.metaData.name);
|
||||
} else {
|
||||
headers["Content-Disposition"] = contentDisposition(file.metaData.name, {
|
||||
type: "inline",
|
||||
});
|
||||
}
|
||||
|
||||
res.set(headers);
|
||||
|
||||
@@ -4,11 +4,13 @@ import { ReverseShareModule } from "src/reverseShare/reverseShare.module";
|
||||
import { ShareModule } from "src/share/share.module";
|
||||
import { FileController } from "./file.controller";
|
||||
import { FileService } from "./file.service";
|
||||
import { LocalFileService } from "./local.service";
|
||||
import { S3FileService } from "./s3.service";
|
||||
|
||||
@Module({
|
||||
imports: [JwtModule.register({}), ReverseShareModule, ShareModule],
|
||||
controllers: [FileController],
|
||||
providers: [FileService],
|
||||
providers: [FileService, LocalFileService, S3FileService],
|
||||
exports: [FileService],
|
||||
})
|
||||
export class FileModule {}
|
||||
|
||||
@@ -1,162 +1,88 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
HttpException,
|
||||
HttpStatus,
|
||||
Injectable,
|
||||
InternalServerErrorException,
|
||||
NotFoundException,
|
||||
} from "@nestjs/common";
|
||||
import { JwtService } from "@nestjs/jwt";
|
||||
import * as crypto from "crypto";
|
||||
import { createReadStream } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import * as mime from "mime-types";
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { LocalFileService } from "./local.service";
|
||||
import { S3FileService } from "./s3.service";
|
||||
import { ConfigService } from "src/config/config.service";
|
||||
import { PrismaService } from "src/prisma/prisma.service";
|
||||
import { validate as isValidUUID } from "uuid";
|
||||
import { SHARE_DIRECTORY } from "../constants";
|
||||
import { Readable } from "stream";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
|
||||
@Injectable()
|
||||
export class FileService {
|
||||
constructor(
|
||||
private prisma: PrismaService,
|
||||
private config: ConfigService,
|
||||
private localFileService: LocalFileService,
|
||||
private s3FileService: S3FileService,
|
||||
private configService: ConfigService,
|
||||
) {}
|
||||
|
||||
// Determine which service to use based on the current config value
|
||||
// shareId is optional -> can be used to overwrite a storage provider
|
||||
private getStorageService(
|
||||
storageProvider?: string,
|
||||
): S3FileService | LocalFileService {
|
||||
if (storageProvider != undefined)
|
||||
return storageProvider == "S3"
|
||||
? this.s3FileService
|
||||
: this.localFileService;
|
||||
return this.configService.get("s3.enabled")
|
||||
? this.s3FileService
|
||||
: this.localFileService;
|
||||
}
|
||||
|
||||
async create(
|
||||
data: string,
|
||||
chunk: { index: number; total: number },
|
||||
file: { id?: string; name: string },
|
||||
file: {
|
||||
id?: string;
|
||||
name: string;
|
||||
},
|
||||
shareId: string,
|
||||
) {
|
||||
if (!file.id) {
|
||||
file.id = crypto.randomUUID();
|
||||
} else if (!isValidUUID(file.id)) {
|
||||
throw new BadRequestException("Invalid file ID format");
|
||||
}
|
||||
|
||||
const share = await this.prisma.share.findUnique({
|
||||
where: { id: shareId },
|
||||
include: { files: true, reverseShare: true },
|
||||
});
|
||||
|
||||
if (share.uploadLocked)
|
||||
throw new BadRequestException("Share is already completed");
|
||||
|
||||
let diskFileSize: number;
|
||||
try {
|
||||
diskFileSize = (
|
||||
await fs.stat(`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`)
|
||||
).size;
|
||||
} catch {
|
||||
diskFileSize = 0;
|
||||
}
|
||||
|
||||
// If the sent chunk index and the expected chunk index doesn't match throw an error
|
||||
const chunkSize = this.config.get("share.chunkSize");
|
||||
const expectedChunkIndex = Math.ceil(diskFileSize / chunkSize);
|
||||
|
||||
if (expectedChunkIndex != chunk.index)
|
||||
throw new BadRequestException({
|
||||
message: "Unexpected chunk received",
|
||||
error: "unexpected_chunk_index",
|
||||
expectedChunkIndex,
|
||||
});
|
||||
|
||||
const buffer = Buffer.from(data, "base64");
|
||||
|
||||
// Check if there is enough space on the server
|
||||
const space = await fs.statfs(SHARE_DIRECTORY);
|
||||
const availableSpace = space.bavail * space.bsize;
|
||||
if (availableSpace < buffer.byteLength) {
|
||||
throw new InternalServerErrorException("Not enough space on the server");
|
||||
}
|
||||
|
||||
// Check if share size limit is exceeded
|
||||
const fileSizeSum = share.files.reduce(
|
||||
(n, { size }) => n + parseInt(size),
|
||||
0,
|
||||
);
|
||||
|
||||
const shareSizeSum = fileSizeSum + diskFileSize + buffer.byteLength;
|
||||
|
||||
if (
|
||||
shareSizeSum > this.config.get("share.maxSize") ||
|
||||
(share.reverseShare?.maxShareSize &&
|
||||
shareSizeSum > parseInt(share.reverseShare.maxShareSize))
|
||||
) {
|
||||
throw new HttpException(
|
||||
"Max share size exceeded",
|
||||
HttpStatus.PAYLOAD_TOO_LARGE,
|
||||
);
|
||||
}
|
||||
|
||||
await fs.appendFile(
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,
|
||||
buffer,
|
||||
);
|
||||
|
||||
const isLastChunk = chunk.index == chunk.total - 1;
|
||||
if (isLastChunk) {
|
||||
await fs.rename(
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}`,
|
||||
);
|
||||
const fileSize = (
|
||||
await fs.stat(`${SHARE_DIRECTORY}/${shareId}/${file.id}`)
|
||||
).size;
|
||||
await this.prisma.file.create({
|
||||
data: {
|
||||
id: file.id,
|
||||
name: file.name,
|
||||
size: fileSize.toString(),
|
||||
share: { connect: { id: shareId } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return file;
|
||||
const storageService = this.getStorageService();
|
||||
return storageService.create(data, chunk, file, shareId);
|
||||
}
|
||||
|
||||
async get(shareId: string, fileId: string) {
|
||||
const fileMetaData = await this.prisma.file.findUnique({
|
||||
where: { id: fileId },
|
||||
async get(shareId: string, fileId: string): Promise<File> {
|
||||
const share = await this.prisma.share.findFirst({
|
||||
where: { id: shareId },
|
||||
});
|
||||
|
||||
if (!fileMetaData) throw new NotFoundException("File not found");
|
||||
|
||||
const file = createReadStream(`${SHARE_DIRECTORY}/${shareId}/${fileId}`);
|
||||
|
||||
return {
|
||||
metaData: {
|
||||
mimeType: mime.contentType(fileMetaData.name.split(".").pop()),
|
||||
...fileMetaData,
|
||||
size: fileMetaData.size,
|
||||
},
|
||||
file,
|
||||
};
|
||||
const storageService = this.getStorageService(share.storageProvider);
|
||||
return storageService.get(shareId, fileId);
|
||||
}
|
||||
|
||||
async remove(shareId: string, fileId: string) {
|
||||
const fileMetaData = await this.prisma.file.findUnique({
|
||||
where: { id: fileId },
|
||||
});
|
||||
|
||||
if (!fileMetaData) throw new NotFoundException("File not found");
|
||||
|
||||
await fs.unlink(`${SHARE_DIRECTORY}/${shareId}/${fileId}`);
|
||||
|
||||
await this.prisma.file.delete({ where: { id: fileId } });
|
||||
const storageService = this.getStorageService();
|
||||
return storageService.remove(shareId, fileId);
|
||||
}
|
||||
|
||||
async deleteAllFiles(shareId: string) {
|
||||
await fs.rm(`${SHARE_DIRECTORY}/${shareId}`, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
const storageService = this.getStorageService();
|
||||
return storageService.deleteAllFiles(shareId);
|
||||
}
|
||||
|
||||
getZip(shareId: string) {
|
||||
return createReadStream(`${SHARE_DIRECTORY}/${shareId}/archive.zip`);
|
||||
const storageService = this.getStorageService();
|
||||
return this.streamToUint8Array(storageService.getZip(shareId) as Readable);
|
||||
}
|
||||
|
||||
private async streamToUint8Array(stream: Readable): Promise<Uint8Array> {
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on("data", (chunk) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on("end", () => resolve(new Uint8Array(Buffer.concat(chunks))));
|
||||
stream.on("error", reject);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export interface File {
|
||||
metaData: {
|
||||
id: string;
|
||||
size: string;
|
||||
createdAt: Date;
|
||||
mimeType: string | false;
|
||||
name: string;
|
||||
shareId: string;
|
||||
};
|
||||
file: Readable;
|
||||
}
|
||||
|
||||
161
backend/src/file/local.service.ts
Normal file
161
backend/src/file/local.service.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
HttpException,
|
||||
HttpStatus,
|
||||
Injectable,
|
||||
InternalServerErrorException,
|
||||
NotFoundException,
|
||||
} from "@nestjs/common";
|
||||
import * as crypto from "crypto";
|
||||
import { createReadStream } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import * as mime from "mime-types";
|
||||
import { ConfigService } from "src/config/config.service";
|
||||
import { PrismaService } from "src/prisma/prisma.service";
|
||||
import { validate as isValidUUID } from "uuid";
|
||||
import { SHARE_DIRECTORY } from "../constants";
|
||||
|
||||
@Injectable()
|
||||
export class LocalFileService {
|
||||
constructor(
|
||||
private prisma: PrismaService,
|
||||
private config: ConfigService,
|
||||
) {}
|
||||
|
||||
async create(
|
||||
data: string,
|
||||
chunk: { index: number; total: number },
|
||||
file: { id?: string; name: string },
|
||||
shareId: string,
|
||||
) {
|
||||
if (!file.id) {
|
||||
file.id = crypto.randomUUID();
|
||||
} else if (!isValidUUID(file.id)) {
|
||||
throw new BadRequestException("Invalid file ID format");
|
||||
}
|
||||
|
||||
const share = await this.prisma.share.findUnique({
|
||||
where: { id: shareId },
|
||||
include: { files: true, reverseShare: true },
|
||||
});
|
||||
|
||||
if (share.uploadLocked)
|
||||
throw new BadRequestException("Share is already completed");
|
||||
|
||||
let diskFileSize: number;
|
||||
try {
|
||||
diskFileSize = (
|
||||
await fs.stat(`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`)
|
||||
).size;
|
||||
} catch {
|
||||
diskFileSize = 0;
|
||||
}
|
||||
|
||||
// If the sent chunk index and the expected chunk index doesn't match throw an error
|
||||
const chunkSize = this.config.get("share.chunkSize");
|
||||
const expectedChunkIndex = Math.ceil(diskFileSize / chunkSize);
|
||||
|
||||
if (expectedChunkIndex != chunk.index)
|
||||
throw new BadRequestException({
|
||||
message: "Unexpected chunk received",
|
||||
error: "unexpected_chunk_index",
|
||||
expectedChunkIndex,
|
||||
});
|
||||
|
||||
const buffer = Buffer.from(data, "base64");
|
||||
|
||||
// Check if there is enough space on the server
|
||||
const space = await fs.statfs(SHARE_DIRECTORY);
|
||||
const availableSpace = space.bavail * space.bsize;
|
||||
if (availableSpace < buffer.byteLength) {
|
||||
throw new InternalServerErrorException("Not enough space on the server");
|
||||
}
|
||||
|
||||
// Check if share size limit is exceeded
|
||||
const fileSizeSum = share.files.reduce(
|
||||
(n, { size }) => n + parseInt(size),
|
||||
0,
|
||||
);
|
||||
|
||||
const shareSizeSum = fileSizeSum + diskFileSize + buffer.byteLength;
|
||||
|
||||
if (
|
||||
shareSizeSum > this.config.get("share.maxSize") ||
|
||||
(share.reverseShare?.maxShareSize &&
|
||||
shareSizeSum > parseInt(share.reverseShare.maxShareSize))
|
||||
) {
|
||||
throw new HttpException(
|
||||
"Max share size exceeded",
|
||||
HttpStatus.PAYLOAD_TOO_LARGE,
|
||||
);
|
||||
}
|
||||
|
||||
await fs.appendFile(
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,
|
||||
buffer,
|
||||
);
|
||||
|
||||
const isLastChunk = chunk.index == chunk.total - 1;
|
||||
if (isLastChunk) {
|
||||
await fs.rename(
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,
|
||||
`${SHARE_DIRECTORY}/${shareId}/${file.id}`,
|
||||
);
|
||||
const fileSize = (
|
||||
await fs.stat(`${SHARE_DIRECTORY}/${shareId}/${file.id}`)
|
||||
).size;
|
||||
await this.prisma.file.create({
|
||||
data: {
|
||||
id: file.id,
|
||||
name: file.name,
|
||||
size: fileSize.toString(),
|
||||
share: { connect: { id: shareId } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
async get(shareId: string, fileId: string) {
|
||||
const fileMetaData = await this.prisma.file.findUnique({
|
||||
where: { id: fileId },
|
||||
});
|
||||
|
||||
if (!fileMetaData) throw new NotFoundException("File not found");
|
||||
|
||||
const file = createReadStream(`${SHARE_DIRECTORY}/${shareId}/${fileId}`);
|
||||
|
||||
return {
|
||||
metaData: {
|
||||
mimeType: mime.contentType(fileMetaData.name.split(".").pop()),
|
||||
...fileMetaData,
|
||||
size: fileMetaData.size,
|
||||
},
|
||||
file,
|
||||
};
|
||||
}
|
||||
|
||||
async remove(shareId: string, fileId: string) {
|
||||
const fileMetaData = await this.prisma.file.findUnique({
|
||||
where: { id: fileId },
|
||||
});
|
||||
|
||||
if (!fileMetaData) throw new NotFoundException("File not found");
|
||||
|
||||
await fs.unlink(`${SHARE_DIRECTORY}/${shareId}/${fileId}`);
|
||||
|
||||
await this.prisma.file.delete({ where: { id: fileId } });
|
||||
}
|
||||
|
||||
async deleteAllFiles(shareId: string) {
|
||||
await fs.rm(`${SHARE_DIRECTORY}/${shareId}`, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
|
||||
getZip(shareId: string) {
|
||||
return createReadStream(`${SHARE_DIRECTORY}/${shareId}/archive.zip`);
|
||||
}
|
||||
}
|
||||
299
backend/src/file/s3.service.ts
Normal file
299
backend/src/file/s3.service.ts
Normal file
@@ -0,0 +1,299 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
Injectable,
|
||||
InternalServerErrorException,
|
||||
NotFoundException,
|
||||
Logger,
|
||||
} from "@nestjs/common";
|
||||
import {
|
||||
AbortMultipartUploadCommand,
|
||||
CompleteMultipartUploadCommand,
|
||||
CreateMultipartUploadCommand,
|
||||
DeleteObjectCommand,
|
||||
DeleteObjectsCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
ListObjectsV2Command,
|
||||
S3Client,
|
||||
UploadPartCommand,
|
||||
UploadPartCommandOutput,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import { PrismaService } from "src/prisma/prisma.service";
|
||||
import { ConfigService } from "src/config/config.service";
|
||||
import * as crypto from "crypto";
|
||||
import * as mime from "mime-types";
|
||||
import { File } from "./file.service";
|
||||
import { Readable } from "stream";
|
||||
import { validate as isValidUUID } from "uuid";
|
||||
|
||||
@Injectable()
|
||||
export class S3FileService {
|
||||
private readonly logger = new Logger(S3FileService.name);
|
||||
|
||||
private multipartUploads: Record<
|
||||
string,
|
||||
{
|
||||
uploadId: string;
|
||||
parts: Array<{ ETag: string | undefined; PartNumber: number }>;
|
||||
}
|
||||
> = {};
|
||||
|
||||
constructor(
|
||||
private prisma: PrismaService,
|
||||
private config: ConfigService,
|
||||
) {}
|
||||
|
||||
async create(
|
||||
data: string,
|
||||
chunk: { index: number; total: number },
|
||||
file: { id?: string; name: string },
|
||||
shareId: string,
|
||||
) {
|
||||
if (!file.id) {
|
||||
file.id = crypto.randomUUID();
|
||||
} else if (!isValidUUID(file.id)) {
|
||||
throw new BadRequestException("Invalid file ID format");
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(data, "base64");
|
||||
const key = `${this.getS3Path()}${shareId}/${file.name}`;
|
||||
const bucketName = this.config.get("s3.bucketName");
|
||||
const s3Instance = this.getS3Instance();
|
||||
|
||||
try {
|
||||
// Initialize multipart upload if it's the first chunk
|
||||
if (chunk.index === 0) {
|
||||
const multipartInitResponse = await s3Instance.send(
|
||||
new CreateMultipartUploadCommand({
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
|
||||
const uploadId = multipartInitResponse.UploadId;
|
||||
if (!uploadId) {
|
||||
throw new Error("Failed to initialize multipart upload.");
|
||||
}
|
||||
|
||||
// Store the uploadId and parts list in memory
|
||||
this.multipartUploads[file.id] = {
|
||||
uploadId,
|
||||
parts: [],
|
||||
};
|
||||
}
|
||||
|
||||
// Get the ongoing multipart upload
|
||||
const multipartUpload = this.multipartUploads[file.id];
|
||||
if (!multipartUpload) {
|
||||
throw new InternalServerErrorException(
|
||||
"Multipart upload session not found.",
|
||||
);
|
||||
}
|
||||
|
||||
const uploadId = multipartUpload.uploadId;
|
||||
|
||||
// Upload the current chunk
|
||||
const partNumber = chunk.index + 1; // Part numbers start from 1
|
||||
|
||||
const uploadPartResponse: UploadPartCommandOutput = await s3Instance.send(
|
||||
new UploadPartCommand({
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
PartNumber: partNumber,
|
||||
UploadId: uploadId,
|
||||
Body: buffer,
|
||||
}),
|
||||
);
|
||||
|
||||
// Store the ETag and PartNumber for later completion
|
||||
multipartUpload.parts.push({
|
||||
ETag: uploadPartResponse.ETag,
|
||||
PartNumber: partNumber,
|
||||
});
|
||||
|
||||
// Complete the multipart upload if it's the last chunk
|
||||
if (chunk.index === chunk.total - 1) {
|
||||
await s3Instance.send(
|
||||
new CompleteMultipartUploadCommand({
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
MultipartUpload: {
|
||||
Parts: multipartUpload.parts,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Remove the completed upload from memory
|
||||
delete this.multipartUploads[file.id];
|
||||
}
|
||||
} catch (error) {
|
||||
// Abort the multipart upload if it fails
|
||||
const multipartUpload = this.multipartUploads[file.id];
|
||||
if (multipartUpload) {
|
||||
try {
|
||||
await s3Instance.send(
|
||||
new AbortMultipartUploadCommand({
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
UploadId: multipartUpload.uploadId,
|
||||
}),
|
||||
);
|
||||
} catch (abortError) {
|
||||
console.error("Error aborting multipart upload:", abortError);
|
||||
}
|
||||
delete this.multipartUploads[file.id];
|
||||
}
|
||||
this.logger.error(error);
|
||||
throw new Error("Multipart upload failed. The upload has been aborted.");
|
||||
}
|
||||
|
||||
const isLastChunk = chunk.index == chunk.total - 1;
|
||||
if (isLastChunk) {
|
||||
const fileSize: number = await this.getFileSize(shareId, file.name);
|
||||
|
||||
await this.prisma.file.create({
|
||||
data: {
|
||||
id: file.id,
|
||||
name: file.name,
|
||||
size: fileSize.toString(),
|
||||
share: { connect: { id: shareId } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
async get(shareId: string, fileId: string): Promise<File> {
|
||||
const fileName = (
|
||||
await this.prisma.file.findUnique({ where: { id: fileId } })
|
||||
).name;
|
||||
|
||||
const s3Instance = this.getS3Instance();
|
||||
const key = `${this.getS3Path()}${shareId}/${fileName}`;
|
||||
const response = await s3Instance.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: this.config.get("s3.bucketName"),
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
metaData: {
|
||||
id: fileId,
|
||||
size: response.ContentLength?.toString() || "0",
|
||||
name: fileName,
|
||||
shareId: shareId,
|
||||
createdAt: response.LastModified || new Date(),
|
||||
mimeType:
|
||||
mime.contentType(fileId.split(".").pop()) ||
|
||||
"application/octet-stream",
|
||||
},
|
||||
file: response.Body as Readable,
|
||||
} as File;
|
||||
}
|
||||
|
||||
async remove(shareId: string, fileId: string) {
|
||||
const fileMetaData = await this.prisma.file.findUnique({
|
||||
where: { id: fileId },
|
||||
});
|
||||
|
||||
if (!fileMetaData) throw new NotFoundException("File not found");
|
||||
|
||||
const key = `${this.getS3Path()}${shareId}/${fileMetaData.name}`;
|
||||
const s3Instance = this.getS3Instance();
|
||||
|
||||
try {
|
||||
await s3Instance.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: this.config.get("s3.bucketName"),
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
throw new Error("Could not delete file from S3");
|
||||
}
|
||||
|
||||
await this.prisma.file.delete({ where: { id: fileId } });
|
||||
}
|
||||
|
||||
async deleteAllFiles(shareId: string) {
|
||||
const prefix = `${this.getS3Path()}${shareId}/`;
|
||||
const s3Instance = this.getS3Instance();
|
||||
|
||||
try {
|
||||
// List all objects under the given prefix
|
||||
const listResponse = await s3Instance.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.config.get("s3.bucketName"),
|
||||
Prefix: prefix,
|
||||
}),
|
||||
);
|
||||
|
||||
if (!listResponse.Contents || listResponse.Contents.length === 0) {
|
||||
throw new Error(`No files found for share ${shareId}`);
|
||||
}
|
||||
|
||||
// Extract the keys of the files to be deleted
|
||||
const objectsToDelete = listResponse.Contents.map((file) => ({
|
||||
Key: file.Key!,
|
||||
}));
|
||||
|
||||
// Delete all files in a single request (up to 1000 objects at once)
|
||||
await s3Instance.send(
|
||||
new DeleteObjectsCommand({
|
||||
Bucket: this.config.get("s3.bucketName"),
|
||||
Delete: {
|
||||
Objects: objectsToDelete,
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
throw new Error("Could not delete all files from S3");
|
||||
}
|
||||
}
|
||||
|
||||
async getFileSize(shareId: string, fileName: string): Promise<number> {
|
||||
const key = `${this.getS3Path()}${shareId}/${fileName}`;
|
||||
const s3Instance = this.getS3Instance();
|
||||
|
||||
try {
|
||||
// Get metadata of the file using HeadObjectCommand
|
||||
const headObjectResponse = await s3Instance.send(
|
||||
new HeadObjectCommand({
|
||||
Bucket: this.config.get("s3.bucketName"),
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
|
||||
// Return ContentLength which is the file size in bytes
|
||||
return headObjectResponse.ContentLength ?? 0;
|
||||
} catch (error) {
|
||||
throw new Error("Could not retrieve file size");
|
||||
}
|
||||
}
|
||||
|
||||
getS3Instance(): S3Client {
|
||||
return new S3Client({
|
||||
endpoint: this.config.get("s3.endpoint"),
|
||||
region: this.config.get("s3.region"),
|
||||
credentials: {
|
||||
accessKeyId: this.config.get("s3.key"),
|
||||
secretAccessKey: this.config.get("s3.secret"),
|
||||
},
|
||||
forcePathStyle: true,
|
||||
});
|
||||
}
|
||||
|
||||
getZip() {
|
||||
throw new BadRequestException(
|
||||
"ZIP download is not supported with S3 storage",
|
||||
);
|
||||
}
|
||||
|
||||
getS3Path(): string {
|
||||
const configS3Path = this.config.get("s3.bucketPath");
|
||||
return configS3Path ? `${configS3Path}/` : "";
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import { CreateShareDTO } from "./dto/createShare.dto";
|
||||
export class ShareService {
|
||||
constructor(
|
||||
private prisma: PrismaService,
|
||||
private configService: ConfigService,
|
||||
private fileService: FileService,
|
||||
private emailService: EmailService,
|
||||
private config: ConfigService,
|
||||
@@ -86,6 +87,7 @@ export class ShareService {
|
||||
? share.recipients.map((email) => ({ email }))
|
||||
: [],
|
||||
},
|
||||
storageProvider: this.configService.get("s3.enabled") ? "S3" : "LOCAL",
|
||||
},
|
||||
});
|
||||
|
||||
@@ -105,6 +107,8 @@ export class ShareService {
|
||||
}
|
||||
|
||||
async createZip(shareId: string) {
|
||||
if (this.config.get("s3.enabled")) return;
|
||||
|
||||
const path = `${SHARE_DIRECTORY}/${shareId}`;
|
||||
|
||||
const files = await this.prisma.file.findMany({ where: { shareId } });
|
||||
|
||||
Reference in New Issue
Block a user