mirror of https://github.com/spf13/viper.git
Recurse into arrays when converting keys to lowercase
Fixes #1386 Signed-off-by: Andrew Richardson <andrew.richardson@kaleido.io>
This commit is contained in:
parent
98c63ede11
commit
5247643f02
17
util.go
17
util.go
|
@ -64,8 +64,7 @@ func copyAndInsensitiviseMap(m map[string]interface{}) map[string]interface{} {
|
||||||
return nm
|
return nm
|
||||||
}
|
}
|
||||||
|
|
||||||
func insensitiviseMap(m map[string]interface{}) {
|
func insensitiviseVal(val interface{}) interface{} {
|
||||||
for key, val := range m {
|
|
||||||
switch val.(type) {
|
switch val.(type) {
|
||||||
case map[interface{}]interface{}:
|
case map[interface{}]interface{}:
|
||||||
// nested map: cast and recursively insensitivise
|
// nested map: cast and recursively insensitivise
|
||||||
|
@ -74,8 +73,16 @@ func insensitiviseMap(m map[string]interface{}) {
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
// nested map: recursively insensitivise
|
// nested map: recursively insensitivise
|
||||||
insensitiviseMap(val.(map[string]interface{}))
|
insensitiviseMap(val.(map[string]interface{}))
|
||||||
|
case []interface{}:
|
||||||
|
// nested array: recursively insensitivise
|
||||||
|
insensitiveArray(val.([]interface{}))
|
||||||
|
}
|
||||||
|
return val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func insensitiviseMap(m map[string]interface{}) {
|
||||||
|
for key, val := range m {
|
||||||
|
val = insensitiviseVal(val)
|
||||||
lower := strings.ToLower(key)
|
lower := strings.ToLower(key)
|
||||||
if key != lower {
|
if key != lower {
|
||||||
// remove old key (not lower-cased)
|
// remove old key (not lower-cased)
|
||||||
|
@ -86,6 +93,12 @@ func insensitiviseMap(m map[string]interface{}) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func insensitiveArray(a []interface{}) {
|
||||||
|
for i, val := range a {
|
||||||
|
a[i] = insensitiviseVal(val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func absPathify(logger Logger, inPath string) string {
|
func absPathify(logger Logger, inPath string) string {
|
||||||
logger.Info("trying to resolve absolute path", "path", inPath)
|
logger.Info("trying to resolve absolute path", "path", inPath)
|
||||||
|
|
||||||
|
|
|
@ -2516,7 +2516,10 @@ func TestKeyDelimiter(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
var yamlDeepNestedSlices = []byte(`TV:
|
var yamlDeepNestedSlices = []byte(`TV:
|
||||||
- title: "The expanse"
|
- title: "The Expanse"
|
||||||
|
title_i18n:
|
||||||
|
USA: "The Expanse"
|
||||||
|
Japan: "エクスパンス -巨獣めざめる-"
|
||||||
seasons:
|
seasons:
|
||||||
- first_released: "December 14, 2015"
|
- first_released: "December 14, 2015"
|
||||||
episodes:
|
episodes:
|
||||||
|
@ -2546,11 +2549,15 @@ func TestSliceIndexAccess(t *testing.T) {
|
||||||
err := v.unmarshalReader(r, v.config)
|
err := v.unmarshalReader(r, v.config)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, "The expanse", v.GetString("tv.0.title"))
|
assert.Equal(t, "The Expanse", v.GetString("tv.0.title"))
|
||||||
assert.Equal(t, "February 1, 2017", v.GetString("tv.0.seasons.1.first_released"))
|
assert.Equal(t, "February 1, 2017", v.GetString("tv.0.seasons.1.first_released"))
|
||||||
assert.Equal(t, "Static", v.GetString("tv.0.seasons.1.episodes.2.title"))
|
assert.Equal(t, "Static", v.GetString("tv.0.seasons.1.episodes.2.title"))
|
||||||
assert.Equal(t, "December 15, 2015", v.GetString("tv.0.seasons.0.episodes.1.air_date"))
|
assert.Equal(t, "December 15, 2015", v.GetString("tv.0.seasons.0.episodes.1.air_date"))
|
||||||
|
|
||||||
|
// Test nested keys with capital letters
|
||||||
|
assert.Equal(t, "The Expanse", v.GetString("tv.0.title_i18n.USA"))
|
||||||
|
assert.Equal(t, "エクスパンス -巨獣めざめる-", v.GetString("tv.0.title_i18n.Japan"))
|
||||||
|
|
||||||
// Test for index out of bounds
|
// Test for index out of bounds
|
||||||
assert.Equal(t, "", v.GetString("tv.0.seasons.2.first_released"))
|
assert.Equal(t, "", v.GetString("tv.0.seasons.2.first_released"))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue