diff --git a/src/compose.ts b/src/compose.ts index e07da488..f4173bd8 100644 --- a/src/compose.ts +++ b/src/compose.ts @@ -40,7 +40,6 @@ import { import { ELYSIA_TRACE, type TraceHandler } from './trace' import { - coercePrimitiveRoot, ElysiaTypeCheck, getCookieValidator, getSchemaValidator, @@ -63,6 +62,7 @@ import type { SchemaValidator } from './types' import { tee } from './adapter/utils' +import { coercePrimitiveRoot } from './replace-schema' const allocateIf = (value: string, condition: unknown) => condition ? value : '' @@ -1473,9 +1473,21 @@ export const composeHandler = ({ if (candidate) { const isFirst = fileUnions.length === 0 + // Handle case where schema is wrapped in a Union (e.g., ObjectString coercion) + let properties = candidate.schema?.properties ?? type.properties + + // If no properties but schema is a Union, try to find the Object in anyOf + if (!properties && candidate.schema?.anyOf) { + const objectSchema = candidate.schema.anyOf.find((s: any) => s.type === 'object') + if (objectSchema) { + properties = objectSchema.properties + } + } + + if (!properties) continue const iterator = Object.entries( - type.properties + properties ) as [string, TSchema][] let validator = isFirst ? '\n' : ' else ' diff --git a/src/index.ts b/src/index.ts index f667387e..f0ff83b6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -43,13 +43,12 @@ import { } from './utils' import { - coercePrimitiveRoot, - stringToStructureCoercions, getSchemaValidator, getResponseSchemaValidator, getCookieValidator, ElysiaTypeCheck, - queryCoercions + hasType, + resolveSchema, } from './schema' import { composeHandler, @@ -165,6 +164,7 @@ import type { InlineHandlerNonMacro, Router } from './types' +import { coercePrimitiveRoot, coerceFormData, queryCoercions, stringToStructureCoercions } from './replace-schema' export type AnyElysia = Elysia @@ -588,7 +588,13 @@ export default class Elysia< dynamic, models, normalize, - additionalCoerce: coercePrimitiveRoot(), + additionalCoerce: (() => { + const resolved = resolveSchema(cloned.body, models, modules) + // Only check for Files if resolved schema is a TypeBox schema (has Kind symbol) + return (resolved && Kind in resolved && (hasType('File', resolved) || hasType('Files', resolved))) + ? coerceFormData() + : coercePrimitiveRoot() + })(), validators: standaloneValidators.map((x) => x.body), sanitize }), @@ -650,7 +656,13 @@ export default class Elysia< dynamic, models, normalize, - additionalCoerce: coercePrimitiveRoot(), + additionalCoerce: (() => { + const resolved = resolveSchema(cloned.body, models, modules) + // Only check for Files if resolved schema is a TypeBox schema (has Kind symbol) + return (resolved && Kind in resolved && (hasType('File', resolved) || hasType('Files', resolved))) + ? coerceFormData() + : coercePrimitiveRoot() + })(), validators: standaloneValidators.map( (x) => x.body ), @@ -8144,8 +8156,10 @@ export { export { getSchemaValidator, getResponseSchemaValidator, - replaceSchemaType } from './schema' +export { + replaceSchemaTypeFromManyOptions as replaceSchemaType +} from './replace-schema' export { mergeHook, diff --git a/src/replace-schema.ts b/src/replace-schema.ts new file mode 100644 index 00000000..de80d98b --- /dev/null +++ b/src/replace-schema.ts @@ -0,0 +1,266 @@ +import { Kind, type TAnySchema, type TSchema } from "@sinclair/typebox"; +import { t } from "./type-system"; +import type { MaybeArray } from "./types"; + +export interface ReplaceSchemaTypeOptions { + from: TSchema; + to(schema: TSchema): TSchema | null; + excludeRoot?: boolean; + rootOnly?: boolean; + original?: TAnySchema; + /** + * Traverse until object is found except root object + **/ + untilObjectFound?: boolean; + /** + * Only replace first object type, can be paired with excludeRoot + **/ + onlyFirst?: "object" | "array" | (string & {}); +} + +/** + * Replace schema types with custom transformation + * + * @param schema - The schema to transform + * @param options - Transformation options (single or array) + * @returns Transformed schema + * + * @example + * // Transform Object to ObjectString + * replaceSchemaType(schema, { + * from: t.Object({}), + * to: (s) => t.ObjectString(s.properties || {}, s), + * excludeRoot: true, + * onlyFirst: 'object' + * }) + */ +export const replaceSchemaTypeFromManyOptions = ( + schema: TSchema, + options: MaybeArray, +): TSchema => { + if (Array.isArray(options)) { + let result = schema; + for (const option of options) { + result = replaceSchemaTypeFromOption(result, option); + } + return result; + } + + return replaceSchemaTypeFromOption(schema, options); +}; + +const replaceSchemaTypeFromOption = ( + schema: TSchema, + option: ReplaceSchemaTypeOptions, +): TSchema => { + if (option.rootOnly && option.excludeRoot) { + throw new Error("Can't set both rootOnly and excludeRoot"); + } + if (option.rootOnly && option.onlyFirst) { + throw new Error("Can't set both rootOnly and onlyFirst"); + } + if (option.rootOnly && option.untilObjectFound) { + throw new Error("Can't set both rootOnly and untilObjectFound"); + } + + type WalkProps = { s: TSchema; isRoot: boolean; treeLvl: number }; + const walk = ({ s, isRoot, treeLvl }: WalkProps): TSchema => { + if (!s) return s; + // console.log("walk iteration", { s, isRoot, treeLvl, transformTo: option.to.toString() }) + + const skipRoot = isRoot && option.excludeRoot; + const fromKind = option.from[Kind]; + + // Double-wrapping check + if (s.elysiaMeta) { + const fromElysiaMeta = option.from.elysiaMeta; + if (fromElysiaMeta === s.elysiaMeta && !skipRoot) { + return option.to(s) as TSchema; + } + return s; + } + + const shouldTransform = fromKind && s[Kind] === fromKind; + if (!skipRoot && option.onlyFirst && s.type === option.onlyFirst) { + if (shouldTransform) { + return option.to(s) as TSchema; + } + return s; + } + + if (isRoot && option.rootOnly) { + if (shouldTransform) { + return option.to(s) as TSchema; + } + return s; + } + + if (!isRoot && option.untilObjectFound && s.type === "object") { + return s; + } + + const newWalkInput = { isRoot: false, treeLvl: treeLvl + 1 }; + const withTransformedChildren = { ...s }; + + if (s.oneOf) { + withTransformedChildren.oneOf = s.oneOf.map((x: TSchema) => + walk({ ...newWalkInput, s: x }), + ); + } + if (s.anyOf) { + withTransformedChildren.anyOf = s.anyOf.map((x: TSchema) => + walk({ ...newWalkInput, s: x }), + ); + } + if (s.allOf) { + withTransformedChildren.allOf = s.allOf.map((x: TSchema) => + walk({ ...newWalkInput, s: x }), + ); + } + if (s.not) { + withTransformedChildren.not = walk({ ...newWalkInput, s: s.not }); + } + + if (s.properties) { + withTransformedChildren.properties = {}; + for (const [k, v] of Object.entries(s.properties)) { + withTransformedChildren.properties[k] = walk({ + ...newWalkInput, + s: v as TSchema, + }); + } + } + + if (s.items) { + const items = s.items; + withTransformedChildren.items = Array.isArray(items) + ? items.map((x: TSchema) => walk({ ...newWalkInput, s: x })) + : walk({ ...newWalkInput, s: items as TSchema }); + } + + // Transform THIS node (with children already transformed) + const shouldTransformThis = + !skipRoot && fromKind && withTransformedChildren[Kind] === fromKind; + if (shouldTransformThis) { + return option.to(withTransformedChildren) as TSchema; + } + + return withTransformedChildren; + }; + + return walk({ s: schema, isRoot: true, treeLvl: 0 }); +}; + +/** + * Helper: Extract plain Object from ObjectString + * + * @example + * ObjectString structure: + * { + * elysiaMeta: "ObjectString", + * anyOf: [ + * { type: "string", format: "ObjectString" }, // ← String branch + * { type: "object", properties: {...} } // ← Object branch (we want this) + * ] + * } + * ArrayString structure: + * { + * elysiaMeta: "ArrayString", + * anyOf: [ + * { type: "string", format: "ArrayString" }, // ← String branch + * { type: "array", items: {...} } // ← Array branch (we want this) + * ] + * } + */ +export const revertObjAndArrStr = (schema: TSchema): TSchema => { + if (schema.elysiaMeta !== "ObjectString" && schema.elysiaMeta !== "ArrayString") + return schema; + + const anyOf = schema.anyOf; + if (!anyOf?.[1]) return schema; + + // anyOf[1] is the object branch (already clean, no elysiaMeta) + return anyOf[1]; +}; + +let _stringToStructureCoercions: ReplaceSchemaTypeOptions[]; + +export const stringToStructureCoercions = () => { + if (!_stringToStructureCoercions) { + _stringToStructureCoercions = [ + { + from: t.Object({}), + to: (schema) => t.ObjectString(schema.properties || {}, schema), + excludeRoot: true, + }, + { + from: t.Array(t.Any()), + to: (schema) => t.ArrayString(schema.items || t.Any(), schema), + }, + ] satisfies ReplaceSchemaTypeOptions[]; + } + + return _stringToStructureCoercions; +}; + +let _queryCoercions: ReplaceSchemaTypeOptions[]; + +export const queryCoercions = () => { + if (!_queryCoercions) { + _queryCoercions = [ + { + from: t.Object({}), + to: (schema) => t.ObjectString(schema.properties ?? {}, schema), + excludeRoot: true, + }, + { + from: t.Array(t.Any()), + to: (schema) => t.ArrayQuery(schema.items ?? t.Any(), schema), + }, + ] satisfies ReplaceSchemaTypeOptions[]; + } + + return _queryCoercions; +}; + +let _coercePrimitiveRoot: ReplaceSchemaTypeOptions[]; + +export const coercePrimitiveRoot = () => { + if (!_coercePrimitiveRoot) + _coercePrimitiveRoot = [ + { + from: t.Number(), + to: (schema) => t.Numeric(schema), + rootOnly: true, + }, + { + from: t.Boolean(), + to: (schema) => t.BooleanString(schema), + rootOnly: true, + }, + ] satisfies ReplaceSchemaTypeOptions[]; + + return _coercePrimitiveRoot; +}; + +let _coerceFormData: ReplaceSchemaTypeOptions[]; + +export const coerceFormData = () => { + if (!_coerceFormData) + _coerceFormData = [ + { + from: t.Object({}), + to: (schema) => t.ObjectString(schema.properties ?? {}, schema), + onlyFirst: 'object', + excludeRoot: true + }, + { + from: t.Array(t.Any()), + to: (schema) => t.ArrayString(schema.items ?? t.Any(), schema), + onlyFirst: 'array', + excludeRoot: true + }, + ] satisfies ReplaceSchemaTypeOptions[]; + + return _coerceFormData; +}; diff --git a/src/schema.ts b/src/schema.ts index 0c8fa39a..73e714e2 100644 --- a/src/schema.ts +++ b/src/schema.ts @@ -31,6 +31,7 @@ import type { } from './types' import type { StandardSchemaV1Like } from './types' +import { replaceSchemaTypeFromManyOptions, type ReplaceSchemaTypeOptions, stringToStructureCoercions } from './replace-schema' type MapValueError = ReturnType @@ -122,36 +123,63 @@ export const hasAdditionalProperties = ( return false } -export const hasType = (type: string, schema: TAnySchema) => { +/** + * Resolve a schema that might be a model reference (string) to the actual schema + */ + export const resolveSchema = ( + schema: TAnySchema | string | undefined, + models?: Record, + modules?: TModule + ): TAnySchema | StandardSchemaV1Like | undefined => { + if (!schema) return undefined + if (typeof schema !== 'string') return schema + + // Check modules first (higher priority) + // @ts-expect-error private property + if (modules && schema in modules.$defs) { + return (modules as TModule<{}, {}>).Import(schema as never) + } + + // Then check models + return models?.[schema] + } + +export const hasType = (type: string, schema: TAnySchema): boolean => { if (!schema) return false if (Kind in schema && schema[Kind] === type) return true + // Handle Import/Ref schemas (unwrap) + if (Kind in schema && schema[Kind] === 'Import') { + if (schema.$defs && schema.$ref) { + const ref = schema.$ref.replace('#/$defs/', '') + if (schema.$defs[ref]) { + return hasType(type, schema.$defs[ref]) + } + } + } + + if (schema.anyOf) return schema.anyOf.some((s: TSchema) => hasType(type, s)) + if (schema.oneOf) return schema.oneOf.some((s: TSchema) => hasType(type, s)) + if (schema.allOf) return schema.allOf.some((s: TSchema) => hasType(type, s)) + + if (schema.type === 'array' && schema.items) { + if (type === 'Files' && Kind in schema.items && schema.items[Kind] === 'File') { + return true + } + return hasType(type, schema.items) + } + if (schema.type === 'object') { const properties = schema.properties as Record if (!properties) return false for (const key of Object.keys(properties)) { - const property = properties[key] - - if (property.type === 'object') { - if (hasType(type, property)) return true - } else if (property.anyOf) { - for (let i = 0; i < property.anyOf.length; i++) - if (hasType(type, property.anyOf[i])) return true - } - - if (Kind in property && property[Kind] === type) return true + if (hasType(type, properties[key])) return true } - - return false } - return ( - !!schema.properties && - Kind in schema.properties && - schema.properties[Kind] === type - ) + return false } export const hasElysiaMeta = (meta: string, _schema: TAnySchema): boolean => { @@ -337,396 +365,6 @@ export const hasTransform = (schema: TAnySchema): boolean => { return TransformKind in schema } -interface ReplaceSchemaTypeOptions { - from: TSchema - to(options: Object): TSchema | null - excludeRoot?: boolean - rootOnly?: boolean - original?: TAnySchema - /** - * Traverse until object is found except root object - **/ - untilObjectFound?: boolean - /** - * Only replace first object type - **/ - onlyFirst?: 'object' | 'array' | (string & {}) -} - -interface ReplaceSchemaTypeConfig { - root: boolean - definitions?: Record | undefined -} - -export const replaceSchemaType = ( - schema: TSchema, - options: MaybeArray, - _config: Partial> = {} -) => { - const config = _config as ReplaceSchemaTypeConfig - config.root = true - - // if (schema.$defs) - // config.definitions = { - // ...config.definitions, - // ...schema.$defs - // } - - // const corceDefinitions = (option: ReplaceSchemaTypeOptions) => { - // if (!config.definitions) return - - // for (const [key, value] of Object.entries(config.definitions)) { - // const fromSymbol = option.from[Kind] - - // if (fromSymbol === 'Ref') continue - - // config.definitions[key] = _replaceSchemaType(value, option, config) - // } - // } - - if (!Array.isArray(options)) { - options.original = schema - - // corceDefinitions(options) - - return _replaceSchemaType(schema, options, config) - } - - for (const option of options) { - option.original = schema - - // corceDefinitions(option) - - schema = _replaceSchemaType(schema, option, config) - } - - return schema -} - -const _replaceSchemaType = ( - schema: TSchema, - options: ReplaceSchemaTypeOptions, - config: ReplaceSchemaTypeConfig -): TSchema => { - if (!schema) return schema - - const root = config.root - - if (options.onlyFirst && schema.type === options.onlyFirst) - return options.to(schema) ?? schema - - if (options.untilObjectFound && !root && schema.type === 'object') - return schema - - const fromSymbol = options.from[Kind] - - // if (schema.$ref) { - // if (schema.$defs && schema.$ref in schema.$defs) { - // const definitions: Record = {} - - // for (const [key, value] of Object.entries(schema.$defs)) - // definitions[key] = _replaceSchemaType( - // value as TSchema, - // options, - // config - // ) - - // config.definitions = { ...config.definitions, ...definitions } - // } - - // return schema - // } - - if (schema.oneOf) { - for (let i = 0; i < schema.oneOf.length; i++) - schema.oneOf[i] = _replaceSchemaType( - schema.oneOf[i], - options, - config - ) - - return schema - } - - if (schema.anyOf) { - for (let i = 0; i < schema.anyOf.length; i++) - schema.anyOf[i] = _replaceSchemaType( - schema.anyOf[i], - options, - config - ) - - return schema - } - - if (schema.allOf) { - for (let i = 0; i < schema.allOf.length; i++) - schema.allOf[i] = _replaceSchemaType( - schema.allOf[i], - options, - config - ) - - return schema - } - - if (schema.not) return _replaceSchemaType(schema.not, options, config) - - const isRoot = root && !!options.excludeRoot - - if (schema[Kind] === fromSymbol) { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { anyOf, oneOf, allOf, not, properties, items, ...rest } = schema - - const to = options.to(rest) - - if (!to) return schema - - // If t.Transform is used, we need to re-calculate Encode, Decode - let transform - - const composeProperties = (schema: TSchema) => { - const v = _composeProperties(schema) - - // $id is removed because it's used in Union inside an Import - if (v.$id) delete v.$id - - return v - } - - const _composeProperties = (v: TSchema) => { - if (properties && v.type === 'object') { - const newProperties = >{} - for (const [key, value] of Object.entries(properties)) - newProperties[key] = _replaceSchemaType( - value as TSchema, - options, - { - ...config, - root: false - } - ) - - return { - ...rest, - ...v, - properties: newProperties - } - } - - if (items && v.type === 'array') - return { - ...rest, - ...v, - items: _replaceSchemaType(items, options, { - ...config, - root: false - }) - } - - const value = { - ...rest, - ...v - } - - // Remove required as it's not object - delete value['required'] - - // Create default value for ObjectString - if ( - properties && - v.type === 'string' && - v.format === 'ObjectString' && - v.default === '{}' - ) { - transform = t.ObjectString(properties, rest) - value.properties = properties - } - // Create default value for ArrayString - else if ( - items && - v.type === 'string' && - v.format === 'ArrayString' && - v.default === '[]' - ) { - transform = t.ArrayString(items, rest) - value.items = items - } - - return value - } - - if (isRoot) { - if (properties) { - const newProperties = >{} - for (const [key, value] of Object.entries(properties)) - newProperties[key] = _replaceSchemaType( - value as TSchema, - options, - { - ...config, - root: false - } - ) - - return { - ...rest, - properties: newProperties - } - } else if (items?.map) - return { - ...rest, - items: items.map((v: TSchema) => - _replaceSchemaType(v, options, { - ...config, - root: false - }) - ) - } - - return rest - } - - if (to.anyOf) - for (let i = 0; i < to.anyOf.length; i++) - to.anyOf[i] = composeProperties(to.anyOf[i]) - else if (to.oneOf) - for (let i = 0; i < to.oneOf.length; i++) - to.oneOf[i] = composeProperties(to.oneOf[i]) - else if (to.allOf) - for (let i = 0; i < to.allOf.length; i++) - to.allOf[i] = composeProperties(to.allOf[i]) - else if (to.not) to.not = composeProperties(to.not) - - if (transform) to[TransformKind as any] = transform[TransformKind] - - if (to.anyOf || to.oneOf || to.allOf || to.not) return to - - if (properties) { - const newProperties = >{} - for (const [key, value] of Object.entries(properties)) - newProperties[key] = _replaceSchemaType( - value as TSchema, - options, - { - ...config, - root: false - } - ) - - return { - ...rest, - ...to, - properties: newProperties - } - } else if (items?.map) - return { - ...rest, - ...to, - items: items.map((v: TSchema) => - _replaceSchemaType(v, options, { - ...config, - root: false - }) - ) - } - - return { - ...rest, - ...to - } - } - - const properties = schema?.properties as Record - - if (properties && root && options.rootOnly !== true) - for (const [key, value] of Object.entries(properties)) { - switch (value[Kind]) { - case fromSymbol: - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { anyOf, oneOf, allOf, not, type, ...rest } = value - const to = options.to(rest) - - if (!to) return schema - - if (to.anyOf) - for (let i = 0; i < to.anyOf.length; i++) - to.anyOf[i] = { ...rest, ...to.anyOf[i] } - else if (to.oneOf) - for (let i = 0; i < to.oneOf.length; i++) - to.oneOf[i] = { ...rest, ...to.oneOf[i] } - else if (to.allOf) - for (let i = 0; i < to.allOf.length; i++) - to.allOf[i] = { ...rest, ...to.allOf[i] } - else if (to.not) to.not = { ...rest, ...to.not } - - properties[key] = { - ...rest, - ..._replaceSchemaType(rest, options, { - ...config, - root: false - }) - } - break - - case 'Object': - case 'Union': - properties[key] = _replaceSchemaType(value, options, { - ...config, - root: false - }) - break - - default: - if (Array.isArray(value.items)) { - for (let i = 0; i < value.items.length; i++) { - value.items[i] = _replaceSchemaType( - value.items[i], - options, - { - ...config, - root: false - } - ) - } - } else if ( - value.anyOf || - value.oneOf || - value.allOf || - value.not - ) - properties[key] = _replaceSchemaType(value, options, { - ...config, - root: false - }) - else if (value.type === 'array') { - value.items = _replaceSchemaType(value.items, options, { - ...config, - root: false - }) - } - - break - } - } - - if (schema.type === 'array' && schema.items) - if (Array.isArray(schema.items)) - schema.items = schema.items.map((item) => - _replaceSchemaType(item, options, { - ...config, - root: false - }) - ) - else - schema.items = _replaceSchemaType(schema.items, options, { - ...config, - root: false - }) - - return schema -} - const createCleaner = (schema: TAnySchema) => (value: unknown) => { if (typeof value === 'object') try { @@ -779,7 +417,7 @@ export const getSchemaValidator = < const replaceSchema = (schema: TAnySchema): TAnySchema => { if (coerce) - return replaceSchemaType(schema, [ + return replaceSchemaTypeFromManyOptions(schema, [ { from: t.Number(), to: (options) => t.Numeric(options), @@ -795,7 +433,7 @@ export const getSchemaValidator = < : [additionalCoerce]) ]) - return replaceSchemaType(schema, additionalCoerce) + return replaceSchemaTypeFromManyOptions(schema, additionalCoerce) } const mapSchema = ( @@ -819,13 +457,19 @@ export const getSchemaValidator = < if (!schema) return undefined as any } + const hasAdditionalCoerce = Array.isArray(additionalCoerce) ? + additionalCoerce.length > 0 : !!additionalCoerce if (Kind in schema) { if (schema[Kind] === 'Import') { if (!hasRef(schema.$defs[schema.$ref])) { schema = schema.$defs[schema.$ref] - if (coerce || additionalCoerce) + if (coerce || hasAdditionalCoerce) { schema = replaceSchema(schema as TSchema) + if ('$id' in schema && !schema.$defs) { + schema.$id = `${schema.$id}_coerced_${randomId()}`; + } + } } } else { if (hasRef(schema)) { @@ -838,7 +482,7 @@ export const getSchemaValidator = < }) schema = model.Import(id) - } else if (coerce || additionalCoerce) + } else if (coerce || hasAdditionalCoerce) schema = replaceSchema(schema as TSchema) } } @@ -1050,21 +694,19 @@ export const getSchemaValidator = < ) schema.additionalProperties = additionalProperties else - schema = replaceSchemaType(schema, { - onlyFirst: 'object', - from: t.Object({}), - // @ts-ignore - to({ properties, ...options }) { - // If nothing is return, use the original schema - if (!properties) return - if ('additionalProperties' in schema) return - - return t.Object(properties, { - ...options, - additionalProperties: false - }) - } - }) + schema = replaceSchemaTypeFromManyOptions(schema, { + onlyFirst: "object", + from: t.Object({}), + to(schema) { + if (!schema.properties) return schema; + if ("additionalProperties" in schema) return schema; + + return t.Object(schema.properties, { + ...schema, + additionalProperties: false, + }); + } + }); } if (dynamic) { @@ -1591,66 +1233,6 @@ export const getResponseSchemaValidator = ( return record } -let _stringToStructureCoercions: ReplaceSchemaTypeOptions[] - -export const stringToStructureCoercions = () => { - if (!_stringToStructureCoercions) { - _stringToStructureCoercions = [ - { - from: t.Object({}), - to: () => t.ObjectString({}), - excludeRoot: true - }, - { - from: t.Array(t.Any()), - to: () => t.ArrayString(t.Any()) - } - ] satisfies ReplaceSchemaTypeOptions[] - } - - return _stringToStructureCoercions -} - -let _queryCoercions: ReplaceSchemaTypeOptions[] - -export const queryCoercions = () => { - if (!_queryCoercions) { - _queryCoercions = [ - { - from: t.Object({}), - to: () => t.ObjectString({}), - excludeRoot: true - }, - { - from: t.Array(t.Any()), - to: () => t.ArrayQuery(t.Any()) - } - ] satisfies ReplaceSchemaTypeOptions[] - } - - return _queryCoercions -} - -let _coercePrimitiveRoot: ReplaceSchemaTypeOptions[] - -export const coercePrimitiveRoot = () => { - if (!_coercePrimitiveRoot) - _coercePrimitiveRoot = [ - { - from: t.Number(), - to: (options) => t.Numeric(options), - rootOnly: true - }, - { - from: t.Boolean(), - to: (options) => t.BooleanString(options), - rootOnly: true - } - ] satisfies ReplaceSchemaTypeOptions[] - - return _coercePrimitiveRoot -} - export const getCookieValidator = ({ validator, modules, diff --git a/src/type-system/index.ts b/src/type-system/index.ts index a5a3a082..6d558569 100644 --- a/src/type-system/index.ts +++ b/src/type-system/index.ts @@ -303,7 +303,7 @@ export const ElysiaType = { [ t.String({ format: 'ObjectString', - default: '{}' + default: options?.default }), schema ], @@ -373,7 +373,10 @@ export const ElysiaType = { default: options?.default }), schema - ]) + ], + { + elysiaMeta: 'ArrayString' + }) ) .Decode((value) => { if (Array.isArray(value)) { diff --git a/test/aot/has-type.test.ts b/test/aot/has-type.test.ts index 7eb7820a..6fdf3c6e 100644 --- a/test/aot/has-type.test.ts +++ b/test/aot/has-type.test.ts @@ -67,4 +67,88 @@ describe('Has Transform', () => { expect(hasType('File', schema)).toBe(true) }) + + it('found on direct Union', () => { + const schema = t.Union([ + t.Object({ + id: t.Number(), + liyue: t.File() + }), + t.Object({ + id: t.Number(), + liyue: t.Number(), + }), + ]) + + expect(hasType('File', schema)).toBe(true) + }) + + it('find in Import wrapping File', () => { + const schema = t.Module({ + Avatar: t.File() + }).Import('Avatar') + + expect(hasType('File', schema)).toBe(true) + }) + + it('find in Import wrapping Object with File', () => { + const schema = t.Module({ + Upload: t.Object({ + name: t.String(), + file: t.File() + }) + }).Import('Upload') + + expect(hasType('File', schema)).toBe(true) + }) + + it('return false for Import wrapping Object without File', () => { + const schema = t.Module({ + User: t.Object({ + name: t.String(), + age: t.Number() + }) + }).Import('User') + + expect(hasType('File', schema)).toBe(false) + }) + + it('find in Import wrapping Union with File', () => { + const schema = t.Module({ + Data: t.Union([ + t.Object({ file: t.File() }), + t.Null() + ]) + }).Import('Data') + + expect(hasType('File', schema)).toBe(true) + }) + + it('find in Import wrapping Array of Files', () => { + const schema = t.Module({ + Uploads: t.Array(t.File()) + }).Import('Uploads') + + expect(hasType('Files', schema)).toBe(true) + }) + + it('find in Import wrapping Array of Files using t.Files', () => { + const schema = t.Module({ + Uploads: t.Files() + }).Import('Uploads') + + expect(hasType('Files', schema)).toBe(true) + }) + + it('find in Array of Files (direct)', () => { + const schema = t.Array(t.File()) + + expect(hasType('Files', schema)).toBe(true) + }) + + it('find in Array of Files using t.Files (direct)', () => { + const schema = t.Files() + + expect(hasType('Files', schema)).toBe(true) + }) }) diff --git a/test/type-system/formdata.test.ts b/test/type-system/formdata.test.ts new file mode 100644 index 00000000..232c1047 --- /dev/null +++ b/test/type-system/formdata.test.ts @@ -0,0 +1,910 @@ +import { describe, expect, it } from "bun:test"; +import { z } from "zod"; +import { Elysia, fileType, t, type ValidationError } from "../../src"; + +const variantObject = t.Object({ + price: t.Number({ minimum: 0 }), + weight: t.Number({ minimum: 0 }), +}); + +const metadataObject = { + category: t.String(), + tags: t.Array(t.String()), + inStock: t.Boolean(), +}; + +const postProductModel = t.Object({ + name: t.String(), + variants: t.Array(variantObject), + metadata: t.Object(metadataObject), + image: t.File({ type: "image" }), +}); +type postProductModel = typeof postProductModel.static; + +const patchProductModel = t.Object({ + name: t.Optional(t.String()), + variants: t.Optional(t.Array(variantObject)), + metadata: t.Optional(t.Object(metadataObject)), + image: t.Optional(t.File({ type: "image" })), +}); +type patchProductModel = typeof patchProductModel.static; + +const postProductModelComplex = t.Object({ + name: t.String(), + variants: t.ArrayString(variantObject), + metadata: t.ObjectString(metadataObject), + image: t.File({ type: "image" }), +}); +type postProductModelComplex = typeof postProductModelComplex.static; + +const patchProductModelComplex = t.Object({ + name: t.Optional(t.String()), + variants: t.Optional(t.ArrayString(variantObject)), + metadata: t.Optional(t.ObjectString(metadataObject)), + image: t.Optional(t.File({ type: "image" })), +}); +type patchProductModelComplex = typeof patchProductModelComplex.static; + +describe.each([ + { aot: true }, + { aot: false }, +])("Nested FormData with file(s) support (aot: $aot)", ({ aot }) => { + const app = new Elysia({ aot }) + .post("/product", async ({ body, status }) => status("Created", body), { + body: postProductModel, + }) + .patch( + "/product/:id", + ({ body, params }) => ({ + id: params.id, + ...body, + }), + { + body: patchProductModel, + }, + ) + .post( + "/product-complex", + async ({ body, status }) => status("Created", body), + { + body: postProductModelComplex, + }, + ) + .patch( + "/product-complex/:id", + ({ body, params }) => ({ + id: params.id, + ...body, + }), + { + body: patchProductModelComplex, + }, + ); + describe("Nested FormData with mandatory bunFile (post operation)", async () => { + const bunFilePath1 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath1) as File; + + const newProduct: postProductModel = { + name: "Test Product", + variants: [ + { + price: 10, + weight: 100, + }, + { + price: 2.7, + weight: 32, + }, + ], + metadata: { + category: "Electronics", + tags: ["new", "featured", "sale"], + inStock: true, + }, + image: bunFile, + }; + + it("should create a product", async () => { + const stringifiedVariants = JSON.stringify(newProduct.variants); + const stringifiedMetadata = JSON.stringify(newProduct.metadata); + + const body = new FormData(); + body.append("name", newProduct.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product", { + method: "POST", + body, + }), + ); + expect(response.status).toBe(201); + const data = await response.json(); + expect(data).toEqual(newProduct); + }); + + it("should return validation error on nested ArrayString", async () => { + const stringifiedVariants = JSON.stringify([ + { + price: 23, + waighTypo: "", + }, + ]); + const stringifiedMetadata = JSON.stringify(newProduct.metadata); + + const body = new FormData(); + body.append("name", newProduct.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + + it("should return validation error on nested ObjectString", async () => { + const stringifiedVariants = JSON.stringify(newProduct.variants); + const stringifiedMetadata = JSON.stringify({ + categoryTypo: "Electronics", + tags: ["new", "featured", "sale"], + inStock: true, + }); + + const body = new FormData(); + body.append("name", newProduct.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + }); + + describe("Nested FormData with optionnal file (patch operation)", async () => { + const bunFilePath2 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath2) as File; + + it("PATCH with bunFile and omitted optional t.ObjectString", async () => { + const body = new FormData(); + body.append("name", "Updated Product"); + body.append("image", bunFile); + // metadata and variants fields are omitted (should be OK since they're optional) + + const response = await app.handle( + new Request("http://localhost/product/123", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModel; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product"); + expect(data?.metadata).toBeUndefined(); + expect(data?.variants).toBeUndefined(); + }); + + it("PATCH with file and valid t.ObjectString and t.ArrayString data", async () => { + const body = new FormData(); + body.append("name", "Updated Product"); + body.append("image", bunFile); + body.append( + "metadata", + JSON.stringify({ + category: "Electronics", + tags: ["sale", "new"], + inStock: true, + }), + ); + body.append( + "variants", + JSON.stringify([ + { + price: 15, + weight: 200, + }, + ]), + ); + + const response = await app.handle( + new Request("http://localhost/product/123", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModel; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product"); + expect(data?.metadata).toEqual({ + category: "Electronics", + tags: ["sale", "new"], + inStock: true, + }); + expect(data?.variants).toEqual([ + { + price: 15, + weight: 200, + }, + ]); + }); + + it("PATCH without file and omitted optional t.ObjectString and optional t.ArrayString", async () => { + const response = await app.handle( + new Request("http://localhost/product/123", { + method: "PATCH", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + name: "Updated Product", + }), + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModel; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product"); + expect(data?.image).toBeUndefined(); + expect(data?.metadata).toBeUndefined(); + expect(data?.variants).toBeUndefined(); + }); + + it("PATCH should return validation error on invalid ObjectString", async () => { + const body = new FormData(); + body.append("name", "Updated Product"); + body.append("image", bunFile); + body.append( + "metadata", + JSON.stringify({ + categoryTypo: "Electronics", // Wrong property name + tags: ["sale"], + inStock: true, + }), + ); + + const response = await app.handle( + new Request("http://localhost/product/123", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + + it("PATCH should return validation error on invalid ArrayString", async () => { + const body = new FormData(); + body.append("name", "Updated Product"); + body.append("image", bunFile); + body.append( + "variants", + JSON.stringify([ + { + priceTypo: 15, // Wrong property name + weight: 200, + }, + ]), + ); + + const response = await app.handle( + new Request("http://localhost/product/123", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + }); + + describe("Nested FormData with t.ArrayString and t.ObjectString (POST operation)", async () => { + const bunFilePath3 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath3) as File; + + const newProductComplex: postProductModelComplex = { + name: "Test Product Complex", + variants: [ + { + price: 10, + weight: 100, + }, + { + price: 2.7, + weight: 32, + }, + ], + metadata: { + category: "Electronics", + tags: ["new", "featured", "sale"], + inStock: true, + }, + image: bunFile, + }; + + it("should create a product with t.ArrayString and t.ObjectString", async () => { + const stringifiedVariants = JSON.stringify(newProductComplex.variants); + const stringifiedMetadata = JSON.stringify(newProductComplex.metadata); + + const body = new FormData(); + body.append("name", newProductComplex.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product-complex", { + method: "POST", + body, + }), + ); + expect(response.status).toBe(201); + const data = await response.json(); + expect(data).toEqual(newProductComplex); + }); + + it("should return validation error on invalid t.ArrayString nested structure", async () => { + const stringifiedVariants = JSON.stringify([ + { + price: 23, + weightTypo: 100, // Wrong property name + }, + ]); + const stringifiedMetadata = JSON.stringify(newProductComplex.metadata); + + const body = new FormData(); + body.append("name", newProductComplex.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product-complex", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + + it("should return validation error on invalid t.ObjectString nested structure", async () => { + const stringifiedVariants = JSON.stringify(newProductComplex.variants); + const stringifiedMetadata = JSON.stringify({ + categoryTypo: "Electronics", // Wrong property name + tags: ["new", "featured", "sale"], + inStock: true, + }); + + const body = new FormData(); + body.append("name", newProductComplex.name); + body.append("variants", stringifiedVariants); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product-complex", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + + it("should return validation error when variants is not a valid JSON string", async () => { + const stringifiedMetadata = JSON.stringify(newProductComplex.metadata); + + const body = new FormData(); + body.append("name", newProductComplex.name); + body.append("variants", "not-valid-json"); + body.append("metadata", stringifiedMetadata); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product-complex", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + + it("should return validation error when metadata is not a valid JSON string", async () => { + const stringifiedVariants = JSON.stringify(newProductComplex.variants); + + const body = new FormData(); + body.append("name", newProductComplex.name); + body.append("variants", stringifiedVariants); + body.append("metadata", "not-valid-json"); + body.append("image", bunFile); + + const response = await app.handle( + new Request("http://localhost/product-complex", { + method: "POST", + body, + }), + ); + const data = (await response.json()) as ValidationError; + expect(response.status).toBe(422); + expect(data.type).toBe("validation"); + }); + }); + + describe("Nested FormData with optional t.ArrayString and t.ObjectString (PATCH operation)", async () => { + const bunFilePath4 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath4) as File; + + it("PATCH with bunFile and omitted optional t.ObjectString and t.ArrayString", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + // metadata and variants fields are omitted (should be OK since they're optional) + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModelComplex; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product Complex"); + expect(data?.metadata).toBeUndefined(); + expect(data?.variants).toBeUndefined(); + }); + + it("PATCH with file and valid t.ObjectString and t.ArrayString data", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + body.append( + "metadata", + JSON.stringify({ + category: "Electronics", + tags: ["sale", "new"], + inStock: true, + }), + ); + body.append( + "variants", + JSON.stringify([ + { + price: 15, + weight: 200, + }, + ]), + ); + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModelComplex; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product Complex"); + expect(data?.metadata).toEqual({ + category: "Electronics", + tags: ["sale", "new"], + inStock: true, + }); + expect(data?.variants).toEqual([ + { + price: 15, + weight: 200, + }, + ]); + }); + + it("PATCH without file and omitted optional fields", async () => { + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + name: "Updated Product Complex", + }), + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as patchProductModelComplex; + expect(data).not.toBeNull(); + expect(data?.name).toBe("Updated Product Complex"); + expect(data?.image).toBeUndefined(); + expect(data?.metadata).toBeUndefined(); + expect(data?.variants).toBeUndefined(); + }); + + it("PATCH should return validation error on invalid t.ObjectString", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + body.append( + "metadata", + JSON.stringify({ + categoryTypo: "Electronics", // Wrong property name + tags: ["sale"], + inStock: true, + }), + ); + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + + it("PATCH should return validation error on invalid t.ArrayString", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + body.append( + "variants", + JSON.stringify([ + { + priceTypo: 15, // Wrong property name + weight: 200, + }, + ]), + ); + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + + it("PATCH should return validation error when metadata is not valid JSON", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + body.append("metadata", "invalid-json"); + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + + it("PATCH should return validation error when variants is not valid JSON", async () => { + const body = new FormData(); + body.append("name", "Updated Product Complex"); + body.append("image", bunFile); + body.append("variants", "invalid-json"); + + const response = await app.handle( + new Request("http://localhost/product-complex/456", { + method: "PATCH", + body, + }), + ); + + expect(response.status).toBe(422); + const data = (await response.json()) as ValidationError; + expect(data.type).toBe("validation"); + }); + }); + + describe("Model reference with File and nested Object", () => { + const bunFilePath5 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath5) as File; + + it("should coerce nested Object to ObjectString when using model reference", async () => { + const app = new Elysia() + .model( + "userWithAvatar", + t.Object({ + name: t.String(), + avatar: t.File(), + metadata: t.Object({ + age: t.Number(), + }), + }), + ) + .post("/user", ({ body }) => body, { + body: "userWithAvatar", + }); + + const formData = new FormData(); + formData.append("name", "John"); + formData.append("avatar", bunFile); + formData.append("metadata", JSON.stringify({ age: 25 })); + + const response = await app.handle( + new Request("http://localhost/user", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.name).toBe("John"); + expect(data.metadata).toEqual({ age: 25 }); + }); + }); + + describe.skip("Zod (for standard schema) with File and nested Object", () => { + const bunFilePath6 = `${import.meta.dir}/../images/aris-yuzu.jpg`; + const bunFile = Bun.file(bunFilePath6) as File; + + it("should handle Zod schema with File and nested object (without manual coercion)", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + name: z.string(), + file: z.file().refine((file) => fileType(file, "image/jpeg")), + metadata: z.object({ + age: z.number(), + }), + }), + }); + + const formData = new FormData(); + formData.append("name", "John"); + formData.append("file", bunFile); + formData.append("metadata", JSON.stringify({ age: 25 })); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.name).toBe("John"); + expect(data.metadata).toEqual({ age: 25 }); + }); + + it("should handle array JSON strings in FormData", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + tags: z.array(z.string()), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append("tags", JSON.stringify(["tag1", "tag2", "tag3"])); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.tags).toEqual(["tag1", "tag2", "tag3"]); + }); + + it("should keep invalid JSON as string", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + description: z.string(), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append("description", "{invalid json}"); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.description).toBe("{invalid json}"); + }); + + it("should keep plain strings that are not JSON", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + comment: z.string(), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append("comment", "This is a plain comment"); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.comment).toBe("This is a plain comment"); + }); + + it("should handle nested objects in JSON", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + profile: z.object({ + user: z.object({ + name: z.string(), + age: z.number(), + }), + settings: z.object({ + notifications: z.boolean(), + }), + }), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append( + "profile", + JSON.stringify({ + user: { name: "Alice", age: 30 }, + settings: { notifications: true }, + }), + ); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.profile).toEqual({ + user: { name: "Alice", age: 30 }, + settings: { notifications: true }, + }); + }); + + it("should handle Zod schema with optional fields", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + name: z.string(), + description: z.string().optional(), + metadata: z + .object({ + category: z.string(), + tags: z.array(z.string()).optional(), + featured: z.boolean().optional(), + }) + .optional(), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append("name", "Test Product"); + // Omit optional fields + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.name).toBe("Test Product"); + expect(data.description).toBeUndefined(); + expect(data.metadata).toBeUndefined(); + }); + + it("should handle Zod schema with optional fields provided", async () => { + const app = new Elysia({ aot }).post("/upload", ({ body }) => body, { + body: z.object({ + file: z.file().refine((file) => fileType(file, "image/jpeg")), + name: z.string(), + description: z.string().optional(), + metadata: z + .object({ + category: z.string(), + tags: z.array(z.string()).optional(), + featured: z.boolean().optional(), + }) + .optional(), + }), + }); + + const formData = new FormData(); + formData.append("file", bunFile); + formData.append("name", "Test Product"); + formData.append("description", "A test description"); + formData.append( + "metadata", + JSON.stringify({ + category: "electronics", + tags: ["phone", "mobile"], + featured: true, + }), + ); + + const response = await app.handle( + new Request("http://localhost/upload", { + method: "POST", + body: formData, + }), + ); + + expect(response.status).toBe(200); + const data = (await response.json()) as any; + expect(data.name).toBe("Test Product"); + expect(data.description).toBe("A test description"); + expect(data.metadata).toEqual({ + category: "electronics", + tags: ["phone", "mobile"], + featured: true, + }); + }); + }); +}); diff --git a/test/type-system/object-string.test.ts b/test/type-system/object-string.test.ts index 0bc8ca2a..88fe591e 100644 --- a/test/type-system/object-string.test.ts +++ b/test/type-system/object-string.test.ts @@ -5,7 +5,14 @@ import { req } from '../utils' describe('TypeSystem - ObjectString', () => { it('Create', () => { - expect(Value.Create(t.ObjectString({}))).toBe('{}') + expect(Value.Create(t.ObjectString({}))).toBeUndefined() + expect( + Value.Create( + t.ObjectString({}, { + default: '{}' + }) + ) + ).toBe('{}') }) it('Check', () => { @@ -82,4 +89,43 @@ describe('TypeSystem - ObjectString', () => { const res2 = await app.handle(req('/?pagination={"pageLimit":1}')) expect(res2.status).toBe(422) }) + + it('Optional', async () => { + const schema = t.Object({ + name: t.String(), + metadata: t.Optional(t.ObjectString({ + pageIndex: t.Number(), + pageLimit: t.Number() + })) + }) + + expect(Value.Check(schema, { name: 'test' })).toBe(true) + expect(Value.Create(schema).metadata).toBeUndefined() + + expect(Value.Check(schema, { + name: 'test', + metadata: { pageIndex: 1, pageLimit: 10 } + })).toBe(true) + expect(Value.Check(schema, { name: 'test', metadata: {} })).toBe(false) + }) + + it('Default value', async () => { + const schema = t.ObjectString({ + pageIndex: t.Number(), + pageLimit: t.Number() + }, { + default: { pageIndex: 0, pageLimit: 10 } + }) + + expect(Value.Create(schema)).toEqual({ pageIndex: 0, pageLimit: 10 }) + + expect(Value.Check(schema, { pageIndex: 1, pageLimit: 20 })).toBe(true) + expect(Value.Check(schema, { pageIndex: 0, pageLimit: 10 })).toBe(true) + expect(Value.Check(schema, JSON.stringify({ pageIndex: 1, pageLimit: 20 }))).toBe(true) + expect(Value.Check(schema, JSON.stringify({ pageIndex: 0, pageLimit: 10 }))).toBe(true) + + expect(Value.Check(schema, {})).toBe(false) + expect(Value.Check(schema, { pageIndex: 1 })).toBe(false) + expect(Value.Check(schema, undefined)).toBe(false) + }) }) diff --git a/test/units/replace-schema-type.test.ts b/test/units/replace-schema-type.test.ts index 12a70546..f29e3745 100644 --- a/test/units/replace-schema-type.test.ts +++ b/test/units/replace-schema-type.test.ts @@ -1,6 +1,11 @@ import { describe, it, expect } from 'bun:test' +import type { TSchema } from '@sinclair/typebox' import { Elysia, t } from '../../src' -import { replaceSchemaType } from '../../src/schema' +import { + replaceSchemaTypeFromManyOptions as replaceSchemaType, + revertObjAndArrStr, + coerceFormData +} from '../../src/replace-schema' import { req } from '../utils' describe('Replace Schema Type', () => { @@ -95,7 +100,7 @@ describe('Replace Schema Type', () => { }), { from: t.Number(), - to: () => t.Numeric() + to: (options) => t.Numeric(options) } ) ).toMatchObject( @@ -145,7 +150,7 @@ describe('Replace Schema Type', () => { }), { from: t.Object({}), - to: () => t.ObjectString({}), + to: (schema) => t.ObjectString(schema.properties), excludeRoot: true, untilObjectFound: false } @@ -213,4 +218,727 @@ describe('Replace Schema Type', () => { }) ) }) + + describe('Basic Transformation', () => { + it('should transform Object to ObjectString', () => { + expect( + replaceSchemaType( + t.Object({ + name: t.String() + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s) + } + ) + ).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + + it('should transform Array to ArrayString', () => { + expect( + replaceSchemaType(t.Array(t.String()), { + from: t.Array(t.Any()), + to: (s) => t.ArrayString(s.items || t.Any(), s) + }) + ).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + }) + + it('should preserve properties after transformation', () => { + expect( + replaceSchemaType( + t.Object({ + name: t.String(), + age: t.Number() + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s) + } + ) + ).toMatchObject( + t.ObjectString({ + name: t.String(), + age: t.Number() + }) + ) + }) + }) + + describe('excludeRoot Option', () => { + it('should NOT transform root when excludeRoot is true', () => { + const result = replaceSchemaType( + t.Object({ + metadata: t.Object({ + category: t.String() + }) + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + excludeRoot: true + } + ) + + expect(result).toMatchObject({ + type: 'object' + }) + expect(result.elysiaMeta).toBeUndefined() + expect(result.properties.metadata).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + + it('should transform root when excludeRoot is false', () => { + expect( + replaceSchemaType( + t.Object({ + name: t.String() + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + excludeRoot: false + } + ) + ).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + }) + + describe('onlyFirst Option', () => { + it('should stop traversal after first match', () => { + const result = replaceSchemaType( + t.Object({ + level1: t.Object({ + level2: t.Object({ + level3: t.String() + }) + }) + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + onlyFirst: 'object', + excludeRoot: true + } + ) + + expect(result.properties.level1).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + const level1ObjBranch = result.properties.level1.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(level1ObjBranch.properties.level2).toMatchObject({ + type: 'object' + }) + expect(level1ObjBranch.properties.level2.elysiaMeta).toBeUndefined() + }) + + it('should transform all siblings at same level', () => { + const result = replaceSchemaType( + t.Object({ + obj1: t.Object({ a: t.String() }), + obj2: t.Object({ b: t.String() }), + str: t.String() + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + onlyFirst: 'object', + excludeRoot: true + } + ) + + expect(result.properties.obj1).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.properties.obj2).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.properties.str).toMatchObject({ + type: 'string' + }) + }) + }) + + describe('rootOnly Option', () => { + it('should only transform root, not children', () => { + const result = replaceSchemaType( + t.Object({ + nested: t.Object({ + deep: t.String() + }) + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + rootOnly: true + } + ) + + expect(result).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + const objBranch = result.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(objBranch.properties.nested).toMatchObject({ + type: 'object' + }) + expect(objBranch.properties.nested.elysiaMeta).toBeUndefined() + }) + + it('should not transform if root does not match', () => { + expect( + replaceSchemaType(t.String(), { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + rootOnly: true + }) + ).toMatchObject({ + type: 'string' + }) + }) + }) + + describe('Double-wrapping Protection', () => { + it('should NOT double-wrap ObjectString', () => { + const result = replaceSchemaType( + t.Object({ + metadata: t.ObjectString({ + category: t.String() + }) + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + excludeRoot: true + } + ) + + expect(result.properties.metadata).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + const anyOf = result.properties.metadata.anyOf + const objBranch = anyOf.find((x: TSchema) => x.type === 'object') + expect(objBranch.elysiaMeta).toBeUndefined() + expect(objBranch.anyOf).toBeUndefined() + }) + + it('should NOT double-wrap ArrayString', () => { + const result = replaceSchemaType( + t.Object({ + items: t.ArrayString(t.String()) + }), + { + from: t.Array(t.Any()), + to: (s) => t.ArrayString(s.items || t.Any(), s), + excludeRoot: true + } + ) + + expect(result.properties.items).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + + const anyOf = result.properties.items.anyOf + const arrBranch = anyOf.find((x: TSchema) => x.type === 'array') + expect(arrBranch.elysiaMeta).toBeUndefined() + }) + }) + + describe('Bottom-up Traversal', () => { + it('should transform children before parents', () => { + const result = replaceSchemaType( + t.Object({ + level1: t.Object({ + level2: t.Object({ + level3: t.String() + }) + }) + }), + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + excludeRoot: true + } + ) + + expect(result.properties.level1).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + const level1ObjBranch = result.properties.level1.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(level1ObjBranch.properties.level2).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + }) + + describe('Array of Options', () => { + it('should apply multiple transformations in order', () => { + const result = replaceSchemaType( + t.Object({ + metadata: t.Object({ + category: t.String() + }), + tags: t.Array(t.String()) + }), + [ + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s), + excludeRoot: true + }, + { + from: t.Array(t.Any()), + to: (s) => t.ArrayString(s.items || t.Any(), s), + excludeRoot: true + } + ] + ) + + expect(result.properties.metadata).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.properties.tags).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + }) + }) + + describe('Composition Types', () => { + it('should traverse anyOf branches', () => { + const result = replaceSchemaType( + { + anyOf: [ + t.Object({ a: t.String() }), + t.Object({ b: t.Number() }) + ] + } as any, + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s) + } + ) + + expect(result.anyOf[0]).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.anyOf[1]).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + + it('should traverse oneOf branches', () => { + const result = replaceSchemaType( + { + oneOf: [t.Object({ type: t.String() }), t.Array(t.String())] + } as any, + { + from: t.Object({}), + to: (s) => t.ObjectString(s.properties || {}, s) + } + ) + + expect(result.oneOf[0]).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.oneOf[1]).toMatchObject({ + type: 'array' + }) + }) + }) + + describe('Reverse Transformation Helpers', () => { + it('should extract plain Object from ObjectString', () => { + const objectString = t.ObjectString({ + name: t.String(), + age: t.Number() + }) + + const result = revertObjAndArrStr(objectString) + + expect(result).toMatchObject({ + type: 'object' + }) + expect(result.elysiaMeta).toBeUndefined() + expect(result.anyOf).toBeUndefined() + expect(result.properties).toMatchObject({ + name: { type: 'string' }, + age: { type: 'number' } + }) + }) + + it('should return unchanged if not ObjectString', () => { + const plainObject = t.Object({ + name: t.String() + }) + + const result = revertObjAndArrStr(plainObject) + + expect(result).toBe(plainObject) + }) + + it('should extract plain Array from ArrayString', () => { + const arrayString = t.ArrayString(t.String()) + + const result = revertObjAndArrStr(arrayString) + + expect(result).toMatchObject({ + type: 'array' + }) + expect(result.elysiaMeta).toBeUndefined() + expect(result.anyOf).toBeUndefined() + expect(result.items).toMatchObject({ + type: 'string' + }) + }) + + it('should return unchanged if not ArrayString', () => { + const plainArray = t.Array(t.String()) + + const result = revertObjAndArrStr(plainArray) + + expect(result).toBe(plainArray) + }) + + it('should transform ObjectString back to Object', () => { + const result = replaceSchemaType( + t.Object({ + metadata: t.ObjectString({ + category: t.String() + }) + }), + { + from: t.ObjectString({}), + to: (s) => revertObjAndArrStr(s), + excludeRoot: true + } + ) + + expect(result.properties.metadata).toMatchObject({ + type: 'object' + }) + expect(result.properties.metadata.elysiaMeta).toBeUndefined() + expect(result.properties.metadata.anyOf).toBeUndefined() + expect(result.properties.metadata.properties.category).toMatchObject( + { + type: 'string' + } + ) + }) + + it('should transform ArrayString back to Array', () => { + const result = replaceSchemaType( + t.Object({ + tags: t.ArrayString(t.String()) + }), + { + from: t.ArrayString(t.Any()), + to: (s) => revertObjAndArrStr(s), + excludeRoot: true + } + ) + + expect(result.properties.tags).toMatchObject({ + type: 'array' + }) + expect(result.properties.tags.elysiaMeta).toBeUndefined() + expect(result.properties.tags.anyOf).toBeUndefined() + }) + }) + + describe('coerceFormData', () => { + it('should convert first-level Object to ObjectString (excluding root)', () => { + const result = replaceSchemaType( + t.Object({ + user: t.Object({ + name: t.String(), + age: t.Number() + }) + }), + coerceFormData() + ) + + // Root should remain plain Object + expect(result).toMatchObject({ + type: 'object' + }) + expect(result.elysiaMeta).toBeUndefined() + + // First-level nested object should be converted to ObjectString + expect(result.properties.user).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + }) + + it('should NOT convert deeper nested Objects', () => { + const result = replaceSchemaType( + t.Object({ + level1: t.Object({ + level2: t.Object({ + level3: t.Object({ + value: t.String() + }) + }) + }) + }), + coerceFormData() + ) + + // level1 should be ObjectString + expect(result.properties.level1).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + // level2 should remain plain Object (not converted) + const level1ObjBranch = result.properties.level1.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(level1ObjBranch.properties.level2).toMatchObject({ + type: 'object' + }) + expect(level1ObjBranch.properties.level2.elysiaMeta).toBeUndefined() + + // level3 should also remain plain Object + expect(level1ObjBranch.properties.level2.properties.level3).toMatchObject({ + type: 'object' + }) + expect(level1ObjBranch.properties.level2.properties.level3.elysiaMeta).toBeUndefined() + }) + + it('should convert first-level Array to ArrayString', () => { + const result = replaceSchemaType( + t.Object({ + tags: t.Array(t.String()) + }), + coerceFormData() + ) + + // tags should be converted to ArrayString + expect(result.properties.tags).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + }) + + it('should NOT convert deeper nested Arrays', () => { + const result = replaceSchemaType( + t.Object({ + level1: t.Array( + t.Array( + t.Array(t.String()) + ) + ) + }), + coerceFormData() + ) + + // First-level array should be ArrayString + expect(result.properties.level1).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + + // Second-level array should remain plain Array + const level1ArrBranch = result.properties.level1.anyOf.find( + (x: TSchema) => x.type === 'array' + ) + expect(level1ArrBranch.items).toMatchObject({ + type: 'array' + }) + expect(level1ArrBranch.items.elysiaMeta).toBeUndefined() + + // Third-level array should also remain plain Array + expect(level1ArrBranch.items.items).toMatchObject({ + type: 'array' + }) + expect(level1ArrBranch.items.items.elysiaMeta).toBeUndefined() + }) + + it('should handle Object with File and nested Object', () => { + const result = replaceSchemaType( + t.Object({ + avatar: t.File(), + metadata: t.Object({ + tags: t.Array(t.String()), + settings: t.Object({ + theme: t.String() + }) + }) + }), + coerceFormData() + ) + + // Root should remain Object + expect(result.type).toBe('object') + expect(result.elysiaMeta).toBeUndefined() + + // File should remain as File + expect(result.properties.avatar).toMatchObject({ + type: 'string', + format: 'binary' + }) + + // First-level metadata should be ObjectString + expect(result.properties.metadata).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + // Nested tags array should remain plain Array (not converted) + const metadataObjBranch = result.properties.metadata.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(metadataObjBranch.properties.tags).toMatchObject({ + type: 'array' + }) + expect(metadataObjBranch.properties.tags.elysiaMeta).toBeUndefined() + + // Nested settings object should remain plain Object (not converted) + expect(metadataObjBranch.properties.settings).toMatchObject({ + type: 'object' + }) + expect(metadataObjBranch.properties.settings.elysiaMeta).toBeUndefined() + }) + + it('should handle Object with Files (array) and nested structures', () => { + const result = replaceSchemaType( + t.Object({ + images: t.Files(), + data: t.Object({ + items: t.Array( + t.Object({ + name: t.String() + }) + ) + }) + }), + coerceFormData() + ) + + // Files should remain as Files + expect(result.properties.images).toMatchObject({ + type: 'array', + items: { + type: 'string', + format: 'binary' + }, + elysiaMeta: 'Files' + }) + + // First-level data should be ObjectString + expect(result.properties.data).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + // Nested items array should remain plain Array + const dataObjBranch = result.properties.data.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + expect(dataObjBranch.properties.items).toMatchObject({ + type: 'array' + }) + expect(dataObjBranch.properties.items.elysiaMeta).toBeUndefined() + + // Array items (objects) should remain plain Objects + expect(dataObjBranch.properties.items.items).toMatchObject({ + type: 'object' + }) + expect(dataObjBranch.properties.items.items.elysiaMeta).toBeUndefined() + }) + + it('should convert all first-level siblings', () => { + const result = replaceSchemaType( + t.Object({ + obj1: t.Object({ a: t.String() }), + obj2: t.Object({ b: t.Number() }), + arr1: t.Array(t.String()), + arr2: t.Array(t.Number()), + file: t.File(), + str: t.String() + }), + coerceFormData() + ) + + // All first-level objects should be ObjectString + expect(result.properties.obj1).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + expect(result.properties.obj2).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + // All first-level arrays should be ArrayString + expect(result.properties.arr1).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + expect(result.properties.arr2).toMatchObject({ + elysiaMeta: 'ArrayString' + }) + + // Other types should remain unchanged + expect(result.properties.file).toMatchObject({ + type: 'string', + format: 'binary' + }) + expect(result.properties.str).toMatchObject({ + type: 'string' + }) + }) + + it('should handle mixed nested structures correctly', () => { + const result = replaceSchemaType( + t.Object({ + upload: t.File(), + config: t.Object({ + nested: t.Object({ + deep: t.Array( + t.Object({ + value: t.String() + }) + ) + }) + }) + }), + coerceFormData() + ) + + // config should be ObjectString + expect(result.properties.config).toMatchObject({ + elysiaMeta: 'ObjectString' + }) + + const configObjBranch = result.properties.config.anyOf.find( + (x: TSchema) => x.type === 'object' + ) + + // nested should remain plain Object + expect(configObjBranch.properties.nested).toMatchObject({ + type: 'object' + }) + expect(configObjBranch.properties.nested.elysiaMeta).toBeUndefined() + + // deep array should remain plain Array + expect(configObjBranch.properties.nested.properties.deep).toMatchObject({ + type: 'array' + }) + expect(configObjBranch.properties.nested.properties.deep.elysiaMeta).toBeUndefined() + + // Array items should remain plain Objects + expect(configObjBranch.properties.nested.properties.deep.items).toMatchObject({ + type: 'object' + }) + expect(configObjBranch.properties.nested.properties.deep.items.elysiaMeta).toBeUndefined() + }) + }) })