Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DAM: Fix headers #3535

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .changeset/hungry-boxes-attack.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
"@comet/cms-api": patch
---

DAM: Fix headers

While we fixed a few issues with cache control headers in https://github.com/vivid-planet/comet/pull/2653, there are still a few issues with the way the headers were being handled:

- All headers are stored while we only need the `content-type` header.
- Imgproxy headers are passed through to the client.
- `content-type` is stored redundantly for AzureStorageAccounts and S3 Buckets.
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import { BlobHTTPHeaders, BlobServiceClient, RestError, StorageSharedKeyCredential } from "@azure/storage-blob";
import { Logger } from "@nestjs/common";
import { Readable } from "stream";

import { BlobStorageBackendInterface, CreateFileOptions, StorageMetaData } from "../blob-storage-backend.interface";
import { BlobStorageAzureConfig } from "./blob-storage-azure.config";

export class BlobStorageAzureStorage implements BlobStorageBackendInterface {
private readonly logger = new Logger(BlobStorageAzureStorage.name);
private readonly client: BlobServiceClient;

constructor(private readonly config: BlobStorageAzureConfig["azure"]) {
constructor(readonly config: BlobStorageAzureConfig["azure"]) {
const sharedKeyCredential = new StorageSharedKeyCredential(config.accountName, config.accountKey);
this.client = new BlobServiceClient(`https://${config.accountName}.blob.core.windows.net`, sharedKeyCredential);
}
Expand All @@ -29,10 +31,10 @@ export class BlobStorageAzureStorage implements BlobStorageBackendInterface {
// retry the creation for three times if the container is being deleted, waiting 30 seconds between each attempt.
// See https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container#remarks for more information.
if (error instanceof RestError && error.code === "ContainerBeingDeleted") {
console.info(`Container is being deleted, retrying in 30s`);
this.logger.log(`Container (${folderName}) is being deleted, retrying in 30s`);
await this.sleep(30);
currentAttempt++;
console.info(`Retrying... (Attempt ${currentAttempt} of ${maxNumberOfAttempts})`);
this.logger.log(`Retrying to create container (${folderName})... (Attempt ${currentAttempt} of ${maxNumberOfAttempts})`);
continue;
}

Expand All @@ -53,23 +55,19 @@ export class BlobStorageAzureStorage implements BlobStorageBackendInterface {
folderName: string,
fileName: string,
data: NodeJS.ReadableStream | Buffer | string,
{ headers }: CreateFileOptions,
{ contentType }: CreateFileOptions,
): Promise<void> {
const metadata = {
headers: JSON.stringify(headers),
};
const blobHTTPHeaders: BlobHTTPHeaders = {
blobContentType: headers["content-type"],
blobContentType: contentType,
};

const blockBlobClient = this.client.getContainerClient(folderName).getBlockBlobClient(fileName);
if (typeof data === "string") {
await blockBlobClient.uploadFile(data, { metadata, blobHTTPHeaders });
await blockBlobClient.uploadFile(data, { blobHTTPHeaders });
} else if (Buffer.isBuffer(data)) {
await blockBlobClient.uploadData(data, { metadata, blobHTTPHeaders });
await blockBlobClient.uploadData(data, { blobHTTPHeaders });
} else {
await blockBlobClient.uploadStream(new Readable().wrap(data), undefined, undefined, {
metadata,
blobHTTPHeaders,
});
}
Expand Down Expand Up @@ -99,7 +97,8 @@ export class BlobStorageAzureStorage implements BlobStorageBackendInterface {
size: properties.contentLength!, // is defined in node.js but not for browsers
etag: properties.etag,
lastModified: properties.lastModified,
headers: properties.metadata?.headers ? JSON.parse(properties.metadata.headers) : {},
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
contentType: properties.contentType!,
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ export type StorageMetaData = {
size: number;
etag?: string;
lastModified?: Date;
headers: Record<string, string>;
contentType: string;
};

export type CreateFileOptions = {
size: number;
headers: StorageMetaData["headers"];
contentType: string;
};

export interface BlobStorageBackendInterface {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ import { BlobStorageS3Storage } from "./s3/blob-storage-s3.storage";
@Injectable()
export class BlobStorageBackendService implements BlobStorageBackendInterface {
private readonly backend: BlobStorageBackendInterface;
constructor(@Inject(BLOB_STORAGE_CONFIG) private readonly config: BlobStorageConfig) {

constructor(@Inject(BLOB_STORAGE_CONFIG) readonly config: BlobStorageConfig) {
if (config.backend.driver === "file") {
this.backend = new BlobStorageFileStorage(config.backend.file);
} else if (config.backend.driver === "azure") {
Expand Down Expand Up @@ -43,9 +44,9 @@ export class BlobStorageBackendService implements BlobStorageBackendInterface {
folderName: string,
fileName: string,
data: NodeJS.ReadableStream | Buffer | string,
{ headers, ...options }: CreateFileOptions,
{ contentType, ...options }: CreateFileOptions,
): Promise<void> {
return this.backend.createFile(folderName, fileName, data, { ...options, headers: normalizeHeaders(headers) });
return this.backend.createFile(folderName, fileName, data, { ...options, contentType: contentType });
}

async getFile(folderName: string, fileName: string): Promise<Readable> {
Expand All @@ -61,8 +62,7 @@ export class BlobStorageBackendService implements BlobStorageBackendInterface {
}

async getFileMetaData(folderName: string, fileName: string): Promise<StorageMetaData> {
const metaData = await this.backend.getFileMetaData(folderName, fileName);
return { ...metaData, headers: normalizeHeaders(metaData.headers) };
return this.backend.getFileMetaData(folderName, fileName);
}

getBackendFilePathPrefix(): string {
Expand All @@ -79,20 +79,8 @@ export class BlobStorageBackendService implements BlobStorageBackendInterface {
if (!(await this.fileExists(folderName, objectName))) {
await this.createFile(folderName, objectName, file.path, {
size: file.size,
headers: {
"Content-Type": file.mimetype,
},
contentType: file.mimetype,
});
}
}
}

export const normalizeHeaders = (headers: CreateFileOptions["headers"]): CreateFileOptions["headers"] => {
const result: CreateFileOptions["headers"] = {};

for (const [key, value] of Object.entries(headers)) {
result[key.toLowerCase()] = value;
}

return result;
};
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export class BlobStorageFileStorage implements BlobStorageBackendInterface {
folderName: string,
fileName: string,
data: NodeJS.ReadableStream | Buffer | string,
{ headers }: CreateFileOptions,
{ contentType }: CreateFileOptions,
): Promise<void> {
if (!(await this.folderExists(`${folderName}/${path.dirname(fileName)}`))) {
await this.createFolder(`${folderName}/${path.dirname(fileName)}`);
Expand All @@ -70,9 +70,13 @@ export class BlobStorageFileStorage implements BlobStorageBackendInterface {
stream.end();
}
}),
await fs.promises.writeFile(`${this.path}/${folderName}/${fileName}-${this.headersFile}`, JSON.stringify(headers), {
encoding: "utf-8",
}),
await fs.promises.writeFile(
`${this.path}/${folderName}/${fileName}-${this.headersFile}`,
JSON.stringify({ "content-type": contentType }),
{
encoding: "utf-8",
},
),
]);
}

Expand Down Expand Up @@ -104,7 +108,7 @@ export class BlobStorageFileStorage implements BlobStorageBackendInterface {
return {
size: stat.size,
lastModified: stat.mtime,
headers,
contentType: headers["content-type"],
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,17 +79,12 @@ export class BlobStorageS3Storage implements BlobStorageBackendInterface {
folderName: string,
fileName: string,
data: NodeJS.ReadableStream | Buffer | string,
{ headers, size }: CreateFileOptions,
{ contentType, size }: CreateFileOptions,
): Promise<void> {
const metadata = {
headers: JSON.stringify(headers),
};

const input: AWS.PutObjectCommandInput = {
...this.getCommandInput(folderName, fileName),
ContentType: headers["content-type"],
ContentType: contentType,
ContentLength: size,
Metadata: metadata,
};

if (typeof data === "string") {
Expand Down Expand Up @@ -133,7 +128,8 @@ export class BlobStorageS3Storage implements BlobStorageBackendInterface {
size: response.ContentLength!,
etag: response.ETag,
lastModified: response.LastModified,
headers: response.Metadata?.headers ? JSON.parse(response.Metadata.headers) : {},
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
contentType: response.ContentType!,
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,10 @@ export class ScaledImagesCacheService {
}

const path = [createHashedPath(fileIdentifier), hasha(scaleSettingsCacheKey, { algorithm: "md5" })].join(sep);
await this.blobStorageBackendService.createFile(this.config.cacheDirectory, path, file, { size: metaData.size, headers: metaData.headers });
await this.blobStorageBackendService.createFile(this.config.cacheDirectory, path, file, {
size: metaData.size,
contentType: metaData.contentType,
});
}

async delete(fileIdentifier: string, scaleSettingsCacheKey?: string): Promise<void> {
Expand Down
44 changes: 23 additions & 21 deletions packages/api/cms-api/src/dam/images/images.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ export class ImagesController {
}

const file = await this.filesService.findOneById(params.fileId);

if (file === null) {
throw new NotFoundException();
}
Expand All @@ -67,7 +66,7 @@ export class ImagesController {
throw new ForbiddenException();
}

return this.getCroppedImage(file, params, accept, res, {
return this.pipeCroppedImage(file, params, accept, res, {
"cache-control": "max-age=31536000, private", // Local caches only (1 year)
});
}
Expand All @@ -84,7 +83,6 @@ export class ImagesController {
}

const file = await this.filesService.findOneById(params.fileId);

if (file === null) {
throw new NotFoundException();
}
Expand All @@ -97,7 +95,7 @@ export class ImagesController {
throw new ForbiddenException();
}

return this.getCroppedImage(file, params, accept, res, {
return this.pipeCroppedImage(file, params, accept, res, {
"cache-control": "max-age=31536000, private", // Local caches only (1 year)
});
}
Expand All @@ -110,7 +108,6 @@ export class ImagesController {
}

const file = await this.filesService.findOneById(params.fileId);

if (file === null) {
throw new NotFoundException();
}
Expand All @@ -119,7 +116,7 @@ export class ImagesController {
throw new BadRequestException("Content Hash mismatch!");
}

return this.getCroppedImage(file, params, accept, res, {
return this.pipeCroppedImage(file, params, accept, res, {
"cache-control": "max-age=86400, public", // Public cache (1 day)
});
}
Expand All @@ -132,7 +129,6 @@ export class ImagesController {
}

const file = await this.filesService.findOneById(params.fileId);

if (file === null) {
throw new NotFoundException();
}
Expand All @@ -141,7 +137,7 @@ export class ImagesController {
throw new BadRequestException("Content Hash mismatch!");
}

return this.getCroppedImage(file, params, accept, res, {
return this.pipeCroppedImage(file, params, accept, res, {
"cache-control": "max-age=86400, public", // Public cache (1 day)
});
}
Expand All @@ -150,12 +146,12 @@ export class ImagesController {
return hash === this.imagesService.createHash(imageParams);
}

private async getCroppedImage(
private async pipeCroppedImage(
file: FileInterface,
{ cropArea, resizeWidth, resizeHeight, focalPoint }: ImageParams,
accept: string,
res: Response,
overrideHeaders?: OutgoingHttpHeaders,
headers?: OutgoingHttpHeaders,
): Promise<void> {
if (!file.image) {
throw new NotFoundException();
Expand Down Expand Up @@ -223,26 +219,32 @@ export class ImagesController {

const cache = await this.cacheService.get(file.contentHash, path);
if (!cache) {
const response = await fetch(this.imgproxyService.getSignedUrl(path));
const headers: Record<string, string> = {};
for (const [key, value] of response.headers.entries()) {
headers[key] = value;
const imgproxyResponse = await fetch(this.imgproxyService.getSignedUrl(path));

const contentLength = imgproxyResponse.headers.get("content-length");
if (!contentLength) {
throw new Error("Content length not found");
}

const contentType = imgproxyResponse.headers.get("content-type");
if (!contentType) {
throw new Error("Content type not found");
}

res.writeHead(response.status, { ...headers, ...overrideHeaders });
response.body.pipe(new PassThrough()).pipe(res);
res.writeHead(imgproxyResponse.status, { ...headers, "content-length": contentLength, "content-type": contentType });
imgproxyResponse.body.pipe(new PassThrough()).pipe(res);

if (response.ok) {
if (imgproxyResponse.ok) {
await this.cacheService.set(file.contentHash, path, {
file: response.body.pipe(new PassThrough()),
file: imgproxyResponse.body.pipe(new PassThrough()),
metaData: {
size: Number(headers["content-length"]),
headers,
size: Number(contentLength),
contentType: contentType,
},
});
}
} else {
res.writeHead(200, { ...cache.metaData.headers, ...overrideHeaders });
res.writeHead(200, { ...headers, "content-type": cache.metaData.contentType });

cache.file.pipe(res);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,26 +151,32 @@ export function createFileUploadsDownloadController(options: { public: boolean }

const cache = await this.cacheService.get(file.contentHash, path);
if (!cache) {
const response = await fetch(this.imgproxyService.getSignedUrl(path));
const headers: Record<string, string> = {};
for (const [key, value] of response.headers.entries()) {
headers[key] = value;
const imgproxyResponse = await fetch(this.imgproxyService.getSignedUrl(path));

const contentLength = imgproxyResponse.headers.get("content-length");
if (!contentLength) {
throw new Error("Content length not found");
}

const contentType = imgproxyResponse.headers.get("content-type");
if (!contentType) {
throw new Error("Content type not found");
}

res.writeHead(response.status, headers);
response.body.pipe(new PassThrough()).pipe(res);
res.writeHead(imgproxyResponse.status, { "content-length": contentLength, "content-type": contentType });
imgproxyResponse.body.pipe(new PassThrough()).pipe(res);

if (response.ok) {
if (imgproxyResponse.ok) {
await this.cacheService.set(file.contentHash, path, {
file: response.body.pipe(new PassThrough()),
file: imgproxyResponse.body.pipe(new PassThrough()),
metaData: {
size: Number(headers["content-length"]),
headers,
size: Number(contentLength),
contentType: contentType,
},
});
}
} else {
res.writeHead(200, cache.metaData.headers);
res.writeHead(200, { "content-type": cache.metaData.contentType });

cache.file.pipe(res);
}
Expand Down