mirror of
https://github.com/spf13/viper
synced 2024-11-16 10:07:00 +00:00
Change properties codec to return flat keys
This commit is contained in:
parent
dc76f3c0a9
commit
cd03c57ee4
3 changed files with 13 additions and 16 deletions
|
@ -2,11 +2,9 @@ package javaproperties
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/magiconair/properties"
|
"github.com/magiconair/properties"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
"sort"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for Java properties encoding.
|
// Codec implements the encoding.Encoder and encoding.Decoder interfaces for Java properties encoding.
|
||||||
|
@ -64,14 +62,7 @@ func (c *Codec) Decode(b []byte, v map[string]interface{}) error {
|
||||||
for _, key := range c.Properties.Keys() {
|
for _, key := range c.Properties.Keys() {
|
||||||
// ignore existence check: we know it's there
|
// ignore existence check: we know it's there
|
||||||
value, _ := c.Properties.Get(key)
|
value, _ := c.Properties.Get(key)
|
||||||
|
v[key] = value
|
||||||
// recursively build nested maps
|
|
||||||
path := strings.Split(key, c.keyDelimiter())
|
|
||||||
lastKey := strings.ToLower(path[len(path)-1])
|
|
||||||
deepestMap := deepSearch(v, path[0:len(path)-1])
|
|
||||||
|
|
||||||
// set innermost value
|
|
||||||
deepestMap[lastKey] = value
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -18,10 +18,8 @@ map.key = value
|
||||||
|
|
||||||
// Viper's internal representation
|
// Viper's internal representation
|
||||||
var data = map[string]interface{}{
|
var data = map[string]interface{}{
|
||||||
"key": "value",
|
"key": "value",
|
||||||
"map": map[string]interface{}{
|
"map.key": "value",
|
||||||
"key": "value",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCodec_Encode(t *testing.T) {
|
func TestCodec_Encode(t *testing.T) {
|
||||||
|
|
|
@ -111,6 +111,7 @@ p_type: donut
|
||||||
p_name: Cake
|
p_name: Cake
|
||||||
p_ppu: 0.55
|
p_ppu: 0.55
|
||||||
p_batters.batter.type: Regular
|
p_batters.batter.type: Regular
|
||||||
|
p_batters.batter: Foo
|
||||||
`)
|
`)
|
||||||
|
|
||||||
var remoteExample = []byte(`{
|
var remoteExample = []byte(`{
|
||||||
|
@ -570,6 +571,8 @@ func TestJSON(t *testing.T) {
|
||||||
func TestProperties(t *testing.T) {
|
func TestProperties(t *testing.T) {
|
||||||
initProperties()
|
initProperties()
|
||||||
assert.Equal(t, "0001", Get("p_id"))
|
assert.Equal(t, "0001", Get("p_id"))
|
||||||
|
assert.Equal(t, "Regular", Get("p_batters.batter.type"))
|
||||||
|
assert.Equal(t, "Foo", Get("p_batters.batter"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOML(t *testing.T) {
|
func TestTOML(t *testing.T) {
|
||||||
|
@ -765,6 +768,7 @@ func TestAllKeys(t *testing.T) {
|
||||||
"p_id",
|
"p_id",
|
||||||
"p_ppu",
|
"p_ppu",
|
||||||
"p_batters.batter.type",
|
"p_batters.batter.type",
|
||||||
|
"p_batters.batter",
|
||||||
"p_type",
|
"p_type",
|
||||||
"p_name",
|
"p_name",
|
||||||
"foos",
|
"foos",
|
||||||
|
@ -822,7 +826,10 @@ func TestAllKeys(t *testing.T) {
|
||||||
"p_ppu": "0.55",
|
"p_ppu": "0.55",
|
||||||
"p_name": "Cake",
|
"p_name": "Cake",
|
||||||
"p_batters": map[string]interface{}{
|
"p_batters": map[string]interface{}{
|
||||||
"batter": map[string]interface{}{"type": "Regular"},
|
"batter": []interface{}{
|
||||||
|
"Foo",
|
||||||
|
map[string]interface{}{"type": "Regular"},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"p_type": "donut",
|
"p_type": "donut",
|
||||||
"foos": []map[string]interface{}{
|
"foos": []map[string]interface{}{
|
||||||
|
@ -1556,6 +1563,7 @@ p_type = donut
|
||||||
p_name = Cake
|
p_name = Cake
|
||||||
p_ppu = 0.55
|
p_ppu = 0.55
|
||||||
p_batters.batter.type = Regular
|
p_batters.batter.type = Regular
|
||||||
|
p_batters.batter = Foo
|
||||||
`)
|
`)
|
||||||
|
|
||||||
// var yamlWriteExpected = []byte(`age: 35
|
// var yamlWriteExpected = []byte(`age: 35
|
||||||
|
|
Loading…
Reference in a new issue