Skip to content

Commit

Permalink
Enable path normalization by default
Browse files Browse the repository at this point in the history
Remove both the `SnakeCaseParamMapper` and `KebabCaseParamMapper`
by default, add the `PathNormalizer` by default.

Add removal of `_` to path normalizer.

Fix some issues with the `HikariDataSourceDecoder` when enabling
path normalization by default -- that decoder requires the
original key case as its props are case-sensitive. Create an
abstract `UnnormalizedKeysDecoder` which has the ability to restore
the case of keys via the `sourceKey`.

Fix breaking explicit sealed types with normalization because the
discriminator field defaults to `_type` which normalizes to `type`.
Disable normalization if the field name matches the discriminator field
name, and the node is a `MapNode`.

Fix reporting for strict mode to use the `sourceKey` value, so that
reporting matches the source value, not the normalized value.

Update Preprocessor implementations to correctly copy the source key
when new Map and Array nodes are created.
  • Loading branch information
rocketraman committed Apr 4, 2024
1 parent eac8a0b commit 80c6ed8
Show file tree
Hide file tree
Showing 19 changed files with 107 additions and 37 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package com.sksamuel.hoplite

import com.sksamuel.hoplite.decoder.Decoder
import com.sksamuel.hoplite.decoder.DotPath
import com.sksamuel.hoplite.fp.NonEmptyList
import com.sksamuel.hoplite.internal.OverridePath
import com.sksamuel.hoplite.parsers.Parser
Expand Down Expand Up @@ -30,9 +29,9 @@ sealed interface ConfigFailure {
*/
fun description(): String

data class UnusedPath(val path: DotPath, val pos: Pos) : ConfigFailure {
data class UnusedPath(val decodedPath: DecodedPath) : ConfigFailure {
override fun description(): String {
return "Config value '${path.flatten()}' at ${pos.loc()} was unused"
return "Config value '${decodedPath.sourceKey ?: decodedPath.path.flatten()}' at ${decodedPath.pos.loc()} was unused"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import com.sksamuel.hoplite.sources.EnvironmentVariableOverridePropertySource
import com.sksamuel.hoplite.sources.SystemPropertiesPropertySource
import com.sksamuel.hoplite.sources.UserSettingsPropertySource
import com.sksamuel.hoplite.sources.XdgConfigPropertySource
import com.sksamuel.hoplite.transformer.PathNormalizer
import java.util.ServiceLoader

class ConfigLoaderBuilder private constructor() {
Expand Down Expand Up @@ -423,7 +424,9 @@ fun defaultPreprocessors(): List<Preprocessor> = listOf(
LookupPreprocessor,
)

fun defaultNodeTransformers(): List<NodeTransformer> = emptyList()
fun defaultNodeTransformers(): List<NodeTransformer> = listOf(
PathNormalizer,
)

fun defaultResolvers(): List<Resolver> = listOf(
EnvVarContextResolver,
Expand All @@ -438,8 +441,6 @@ fun defaultResolvers(): List<Resolver> = listOf(
fun defaultParamMappers(): List<ParameterMapper> = listOf(
DefaultParamMapper,
LowercaseParamMapper,
SnakeCaseParamMapper,
KebabCaseParamMapper,
AliasAnnotationParamMapper,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,3 +75,9 @@ data class DecoderConfig(
val flattenArraysToString: Boolean,
val resolveTypesCaseInsensitive: Boolean,
)

data class DecodedPath(
val path: DotPath,
val sourceKey: String?,
val pos: Pos,
)
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ object AliasAnnotationParamMapper : ParameterMapper {
* the snake case equivalent.
*
* For example, camelCasePilsen will become snake_case_pilsen.
*
* When using the [PathNormalizer] (which is enabled by default), this mapper is unnecessary.
*/
object SnakeCaseParamMapper : ParameterMapper {

Expand All @@ -86,6 +88,8 @@ object SnakeCaseParamMapper : ParameterMapper {
* the kebab case equivalent.
*
* For example, camelCasePilsen will become kebab-case-pilsen.
*
* When using the [PathNormalizer] (which is enabled by default), this mapper is unnecessary.
*/
object KebabCaseParamMapper : ParameterMapper {

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package com.sksamuel.hoplite.decoder

import com.sksamuel.hoplite.ConfigResult
import com.sksamuel.hoplite.DecoderContext
import com.sksamuel.hoplite.MapNode
import com.sksamuel.hoplite.Node
import com.sksamuel.hoplite.transform
import kotlin.reflect.KType

/**
* A decoder which decodes based on unnormalized keys.
*
* This is useful for decoders that need to know the original key names.
*
* It restores the original key names from the node source key.
*/
abstract class AbstractUnnormalizedKeysDecoder<T> : NullHandlingDecoder<T> {
override fun safeDecode(node: Node, type: KType, context: DecoderContext): ConfigResult<T> {
val unnormalizedNode = node.transform {
val sourceKey = it.sourceKey
when (it) {
is MapNode -> it.copy(map = it.map.mapKeys { (k, v) ->
(v.sourceKey ?: k).removePrefix("$sourceKey.")
})
else -> it
}
}

return safeDecodeUnnormalized(unnormalizedNode, type, context)
}

abstract fun safeDecodeUnnormalized(node: Node, type: KType, context: DecoderContext): ConfigResult<T>
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,13 @@ import kotlin.reflect.full.isSubtypeOf
import kotlin.reflect.full.starProjectedType
import kotlin.reflect.full.withNullability

class MapDecoder : NullHandlingDecoder<Map<*, *>> {
class MapDecoder : AbstractUnnormalizedKeysDecoder<Map<*, *>>() {

override fun supports(type: KType): Boolean =
type.isSubtypeOf(Map::class.starProjectedType) ||
type.isSubtypeOf(Map::class.starProjectedType.withNullability(true))

override fun safeDecode(node: Node,
type: KType,
context: DecoderContext): ConfigResult<Map<*, *>> {
override fun safeDecodeUnnormalized(node: Node, type: KType, context: DecoderContext): ConfigResult<Map<*, *>> {
require(type.arguments.size == 2)

val kType = type.arguments[0].type!!
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ class ConfigParser(
private val contextResolverMode: ContextResolverMode,
) {

private val loader = PropertySourceLoader(nodeTransformers, classpathResourceLoader, parserRegistry, allowEmptyTree)
private val loader = PropertySourceLoader(nodeTransformers, sealedTypeDiscriminatorField, classpathResourceLoader, parserRegistry, allowEmptyTree)
private val cascader = Cascader(cascadeMode, allowEmptyTree, allowNullOverride)
private val preprocessing = Preprocessing(preprocessors, preprocessingIterations)
private val decoding = Decoding(decoderRegistry, secretsPolicy)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class DecodeModeValidator(private val mode: DecodeMode) {

private fun ensureAllUsed(result: DecodingState): ConfigResult<DecodingState> {
return if (result.unused.isEmpty()) result.valid() else {
val errors = NonEmptyList.unsafe(result.unused.map { ConfigFailure.UnusedPath(it.first, it.second) })
val errors = NonEmptyList.unsafe(result.unused.map { ConfigFailure.UnusedPath(it) })
ConfigFailure.MultipleFailures(errors).invalid()
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
package com.sksamuel.hoplite.internal

import com.sksamuel.hoplite.ConfigResult
import com.sksamuel.hoplite.DecodedPath
import com.sksamuel.hoplite.DecoderContext
import com.sksamuel.hoplite.Node
import com.sksamuel.hoplite.NodeState
import com.sksamuel.hoplite.Pos
import com.sksamuel.hoplite.decoder.DecoderRegistry
import com.sksamuel.hoplite.decoder.DotPath
import com.sksamuel.hoplite.fp.flatMap
import com.sksamuel.hoplite.paths
import com.sksamuel.hoplite.decodedPaths
import com.sksamuel.hoplite.secrets.SecretsPolicy
import com.sksamuel.hoplite.traverse
import kotlin.reflect.KClass
Expand All @@ -35,9 +35,9 @@ internal fun createDecodingState(
context: DecoderContext,
secretsPolicy: SecretsPolicy?
): DecodingState {
val (used, unused) = root.paths()
.filterNot { it.first == DotPath.root }
.partition { context.usedPaths.contains(it.first) }
val (used, unused) = root.decodedPaths()
.filterNot { it.path == DotPath.root }
.partition { context.usedPaths.contains(it.path) }
return DecodingState(root, used, unused, createNodeStates(root, context, secretsPolicy))
}

Expand All @@ -64,7 +64,7 @@ private fun createNodeStates(

data class DecodingState(
val root: Node,
val used: List<Pair<DotPath, Pos>>,
val unused: List<Pair<DotPath, Pos>>,
val used: List<DecodedPath>,
val unused: List<DecodedPath>,
val states: List<NodeState>
)
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import com.sksamuel.hoplite.transformer.NodeTransformer
*/
class PropertySourceLoader(
private val nodeTransformers: List<NodeTransformer>,
private val sealedTypeDiscriminatorField: String?,
private val classpathResourceLoader: ClasspathResourceLoader,
private val parserRegistry: ParserRegistry,
private val allowEmptyPropertySources: Boolean
Expand Down Expand Up @@ -49,7 +50,7 @@ class PropertySourceLoader(
.map { it.node(PropertySourceContext(parserRegistry, allowEmptyPropertySources)) }
.map { configResult ->
configResult.flatMap { node ->
nodeTransformers.fold(node) { acc, normalizer -> acc.transform { normalizer.transform(it) } }.valid()
nodeTransformers.fold(node) { acc, normalizer -> acc.transform { normalizer.transform(it, sealedTypeDiscriminatorField) } }.valid()
}
}
.sequence()
Expand Down
11 changes: 10 additions & 1 deletion hoplite-core/src/main/kotlin/com/sksamuel/hoplite/nodes.kt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ sealed interface Node {

/**
* The original source key of this node without any normalization.
* Useful for reporting.
* Useful for reporting or potentially for custom decoders.
*/
val sourceKey: String?

Expand Down Expand Up @@ -113,6 +113,15 @@ fun Node.paths(): Set<Pair<DotPath, Pos>> = setOf(this.path to this.pos) + when
else -> emptySet()
}

/**
* Return all decoded paths recursively in this tree.
*/
fun Node.decodedPaths(): Set<DecodedPath> = setOf(DecodedPath(this.path, this.sourceKey, this.pos)) + when (this) {
is ArrayNode -> this.elements.flatMap { it.decodedPaths() }
is MapNode -> this.map.map { it.value.decodedPaths() }.flatten()
else -> emptySet()
}

/**
* Return all nodes in this tree, recursively transformed per the given transformer function.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ object LookupPreprocessor : Preprocessor {
fun handle(n: Node): Node = when (n) {
is MapNode -> {
val value = if (n.value is StringNode) replace(replace(n.value, regex1), regex2) else n.value
MapNode(n.map.map { (k, v) -> k to handle(v) }.toMap(), n.pos, n.path, value)
MapNode(n.map.map { (k, v) -> k to handle(v) }.toMap(), n.pos, n.path, value, sourceKey = n.sourceKey)
}
is ArrayNode -> ArrayNode(n.elements.map { handle(it) }, n.pos, n.path)
is ArrayNode -> ArrayNode(n.elements.map { handle(it) }, n.pos, n.path, sourceKey = n.sourceKey)
is StringNode -> replace(replace(n, regex1), regex2)
else -> n
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ abstract class TraversingPrimitivePreprocessor : Preprocessor {
.map { it.toMap() }.flatMap { map ->
val value = if (node.value is PrimitiveNode) handle(node.value, context) else node.value.valid()
value.map { v ->
MapNode(map, node.pos, node.path, v)
MapNode(map, node.pos, node.path, v, sourceKey = node.sourceKey)
}
}
}
is ArrayNode -> {
node.elements.map { process(it, context) }.sequence()
.mapInvalid { ConfigFailure.MultipleFailures(it) }
.map { ArrayNode(it, node.pos, node.path) }
.map { ArrayNode(it, node.pos, node.path, sourceKey = node.sourceKey) }
}
is PrimitiveNode -> handle(node, context)
else -> node.valid()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ import com.sksamuel.hoplite.*
* be applied at configuration loading time.
*/
interface NodeTransformer {
fun transform(node: Node): Node
fun transform(node: Node, sealedTypeDiscriminatorField: String?): Node
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,38 @@ import com.sksamuel.hoplite.*
*
* Path normalization does the following for all node keys and each element of each node's path:
* * Removes dashes
* * Removes underscores
* * Converts to lower-case
*
* It does NOT normalize the sealed type discriminator field for map nodes.
*/
object PathNormalizer : NodeTransformer {
fun normalizePathElement(element: String): String = element.replace("-", "").lowercase()
fun normalizePathElement(element: String): String = element
.replace("-", "")
.replace("_", "")
.lowercase()

override fun transform(node: Node): Node = node
override fun transform(node: Node, sealedTypeDiscriminatorField: String?): Node = node
.transform {
val normalizedPathNode = it.withPath(
it.path.copy(keys = it.path.keys.map { key ->
normalizePathElement(key)
if (it is MapNode) normalizePathElementExceptDiscriminator(key, sealedTypeDiscriminatorField)
else normalizePathElement(key)
})
)
when (normalizedPathNode){
is MapNode -> normalizedPathNode.copy(map = normalizedPathNode.map.mapKeys { (key, _) -> normalizePathElement(key) })
is MapNode -> normalizedPathNode.copy(map = normalizedPathNode.map.mapKeys { (key, _) ->
normalizePathElementExceptDiscriminator(key, sealedTypeDiscriminatorField)
})
else -> normalizedPathNode
}
}

private fun normalizePathElementExceptDiscriminator(element: String, sealedTypeDiscriminatorField: String?): String {
return if (sealedTypeDiscriminatorField != null && element == sealedTypeDiscriminatorField) element
else element
.replace("-", "")
.replace("_", "")
.lowercase()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import io.kotest.core.spec.style.FunSpec
import io.kotest.matchers.shouldBe
import io.kotest.matchers.shouldNotBe
import io.kotest.matchers.types.instanceOf
import io.kotest.matchers.types.shouldBeInstanceOf

class WithoutDefaultsRegistryTest : FunSpec() {
init {
Expand All @@ -13,7 +14,7 @@ class WithoutDefaultsRegistryTest : FunSpec() {
addMapSource(mapOf("custom_value" to "\${PATH}", "PATH" to "\${PATH}"))
}
val e = loader.loadConfig<Config>()
e as Validated.Valid<Config>
e.shouldBeInstanceOf<Validated.Valid<Config>>()
e.value.customValue shouldNotBe "\${path}"
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class PathNormalizerTest : FunSpec({
environmentVariableMap = { mapOf("A" to "a", "A.B" to "ab", "A.B.CD" to "abcd") },
).node(PropertySourceContext.empty).getUnsafe()

PathNormalizer.transform(node) shouldBe MapNode(
PathNormalizer.transform(node, null) shouldBe MapNode(
map = mapOf(
"a" to MapNode(
map = mapOf(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,19 @@ import com.sksamuel.hoplite.DecoderContext
import com.sksamuel.hoplite.MapNode
import com.sksamuel.hoplite.Node
import com.sksamuel.hoplite.PrimitiveNode
import com.sksamuel.hoplite.decoder.Decoder
import com.sksamuel.hoplite.decoder.AbstractUnnormalizedKeysDecoder
import com.sksamuel.hoplite.fp.invalid
import com.sksamuel.hoplite.fp.valid
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import java.util.Properties
import kotlin.reflect.KType

class HikariDataSourceDecoder : Decoder<HikariDataSource> {
class HikariDataSourceDecoder : AbstractUnnormalizedKeysDecoder<HikariDataSource>() {

override fun supports(type: KType): Boolean = type.classifier == HikariDataSource::class

override fun decode(node: Node, type: KType, context: DecoderContext): ConfigResult<HikariDataSource> {
override fun safeDecodeUnnormalized(node: Node, type: KType, context: DecoderContext): ConfigResult<HikariDataSource> {

val props = Properties()

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package com.sksamuel.hoplite

import com.zaxxer.hikari.HikariDataSource
import io.kotest.assertions.throwables.shouldThrowAny
import com.zaxxer.hikari.pool.HikariPool
import io.kotest.assertions.throwables.shouldThrow
import io.kotest.core.spec.style.StringSpec
import io.kotest.matchers.string.shouldContain

Expand All @@ -10,7 +11,7 @@ class HikariDataSourceTest : StringSpec() {
"hikari datasource decoder" {
data class Config(val db: HikariDataSource)

shouldThrowAny {
shouldThrow<HikariPool.PoolInitializationException> {
ConfigLoader().loadConfigOrThrow<Config>("/hikari.yaml").db
}.cause?.cause?.message shouldContain "serverhost"
}
Expand Down

0 comments on commit 80c6ed8

Please sign in to comment.