feat: добавить генерацию image variants

- добавлен shared config presets, custom transforms и allowlist hosts
- реализованы Backend endpoints для assets, presets и variants
- добавлена orchestration через PostgreSQL, RabbitMQ, S3 и worker
- обновлён Gateway read-through flow с L1 cache и корректным Vary: Accept
- добавлена миграция resize_mode для variants lookup
- обновлены dev scripts, env template, lockfile и документация
This commit is contained in:
2026-05-05 13:25:28 +03:00
parent bcadb85a83
commit 1c0e8277a3
59 changed files with 3526 additions and 143 deletions

View File

@@ -2,10 +2,11 @@
"name": "@image-platform/storage",
"version": "0.1.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"require": "./dist/index.js",
"import": "./dist/index.js",
"default": "./dist/index.js"
}
},

View File

@@ -8,11 +8,15 @@ export type StorageConfig = {
}
export function loadStorageConfigFromEnv(env: NodeJS.ProcessEnv = process.env): StorageConfig {
if (!env.S3_BUCKET) {
throw new Error("S3_BUCKET is required")
}
return {
accessKeyId: normalizeOptionalString(env.S3_ACCESS_KEY_ID),
bucket: env.S3_BUCKET ?? "image-platform",
bucket: env.S3_BUCKET,
endpoint: normalizeOptionalString(env.S3_ENDPOINT),
forcePathStyle: parseBoolean(env.S3_FORCE_PATH_STYLE, true),
forcePathStyle: parseBoolean(env.S3_FORCE_PATH_STYLE, false),
region: env.S3_REGION ?? "us-east-1",
secretAccessKey: normalizeOptionalString(env.S3_SECRET_ACCESS_KEY),
}

View File

@@ -1,3 +1,4 @@
export * from "./client.js"
export * from "./config.js"
export * from "./keys.js"
export * from "./objects.js"

View File

@@ -0,0 +1,91 @@
import { GetObjectCommand, HeadObjectCommand, PutObjectCommand, type S3Client } from "@aws-sdk/client-s3"
export type StoredObject = {
body: Buffer
contentLength: number | null
contentType: string | null
etag: string | null
}
export async function getObjectBuffer(client: S3Client, bucket: string, key: string): Promise<StoredObject | null> {
try {
const response = await client.send(new GetObjectCommand({ Bucket: bucket, Key: key }))
if (!response.Body) {
throw new Error(`S3 object ${key} has no body`)
}
const body = await streamToBuffer(response.Body as AsyncIterable<Uint8Array>)
return {
body,
contentLength: response.ContentLength ?? body.length,
contentType: response.ContentType ?? null,
etag: response.ETag ?? null,
}
} catch (error) {
if (isS3NotFound(error)) {
return null
}
throw error
}
}
export async function objectExists(client: S3Client, bucket: string, key: string) {
try {
await client.send(new HeadObjectCommand({ Bucket: bucket, Key: key }))
return true
} catch (error) {
if (isS3NotFound(error)) {
return false
}
throw error
}
}
export async function putObjectBuffer(input: {
body: Buffer
bucket: string
cacheControl?: string
client: S3Client
contentType: string
key: string
}) {
return input.client.send(
new PutObjectCommand({
Body: input.body,
Bucket: input.bucket,
CacheControl: input.cacheControl,
ContentType: input.contentType,
Key: input.key,
}),
)
}
function isS3NotFound(error: unknown) {
if (!(error instanceof Error)) {
return false
}
const withMetadata = error as Error & { $metadata?: { httpStatusCode?: number }; Code?: string; code?: string }
return (
withMetadata.$metadata?.httpStatusCode === 404 ||
error.name === "NoSuchKey" ||
error.name === "NotFound" ||
withMetadata.Code === "NoSuchKey" ||
withMetadata.code === "NoSuchKey"
)
}
async function streamToBuffer(stream: AsyncIterable<Uint8Array>) {
const chunks: Buffer[] = []
for await (const chunk of stream) {
chunks.push(Buffer.from(chunk))
}
return Buffer.concat(chunks)
}

View File

@@ -5,8 +5,8 @@
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"lib": ["ES2023"],
"module": "NodeNext",
"moduleResolution": "NodeNext",
"module": "Node16",
"moduleResolution": "Node16",
"noUncheckedIndexedAccess": true,
"outDir": "./dist",
"rootDir": "./src",