Skip to content

Commit

Permalink
[fix]修正Json序列化数组类型成员时无法赋值的BUG。如果元素组长度不足,则创建新数组。主要是某些成员使用数组类型,然后使用[]初始…
Browse files Browse the repository at this point in the history
…化,导致数组长度为零。NewLifeX/NewLife.Redis#131
  • Loading branch information
nnhy committed May 25, 2024
1 parent b40e0e6 commit e185eff
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 3 deletions.
4 changes: 3 additions & 1 deletion NewLife.Core/Serialization/Json/JsonReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,9 @@ private Array ParseArray(IList<Object> list, Type type, Object? target)
var elmType = type?.GetElementTypeEx();
if (elmType == null) elmType = typeof(Object);

if (target is not Array arr) arr = Array.CreateInstance(elmType, list.Count);
// 如果元素组长度不足,则创建新数组。主要是某些成员使用数组类型,然后使用[]初始化,导致数组长度为零
if (target is not Array arr || arr.Length < list.Count) arr = Array.CreateInstance(elmType, list.Count);

// 如果源数组有值,则最大只能创建源数组那么多项,抛弃多余项
for (var i = 0; i < list.Count && i < arr.Length; i++)
{
Expand Down
2 changes: 2 additions & 0 deletions XUnitTest.Core/Serialization/FastJsonTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ public void DateTimeTest()
{
var model = new Model();
Rand.Fill(model);
model.Roles = ["admin", "user"];
model.Scores = [1, 2, 3];
var js = _json.Write(model, true);

var models = _json.Read(_json_value, typeof(Model[])) as Model[];
Expand Down
2 changes: 2 additions & 0 deletions XUnitTest.Core/Serialization/JsonTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ public void DateTimeTest()
{
var model = new Model();
Rand.Fill(model);
model.Roles = ["admin", "user"];
model.Scores = [1, 2, 3];
var js = model.ToJson(true);

var models = _json_value.ToJsonEntity<Model[]>();
Expand Down
15 changes: 13 additions & 2 deletions XUnitTest.Core/Serialization/JsonTestBase.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using System;
using NewLife;
using Xunit;

namespace XUnitTest.Serialization;
Expand All @@ -22,7 +23,9 @@ static JsonTestBase()
"Type": "NewLife.Common.PinYin",
"Offset": "2022-11-29T14:13:17.8763881+08:00",
"Date": "2022-11-29",
"Time": "14:13:17.8763881"
"Time": "14:13:17.8763881",
"Roles": ["admin", "user"],
"Scores": [1, 2, 3]
},
{
"ID": 0,
Expand All @@ -36,7 +39,9 @@ static JsonTestBase()
"Type": "String",
"Offset": "2022-11-29T14:13:17.8763881+08:00",
"Date": "2022-11-29",
"Time": "14:13:17.8763881"
"Time": "14:13:17.8763881",
"Roles": ["admin", "user"],
"Scores": [1, 2, 3]
}
]
""";
Expand All @@ -59,6 +64,8 @@ protected void CheckModel(Model[] models)
Assert.Equal(DateTimeOffset.Parse("2022-11-29T14:13:17.8763881+08:00"), m.Offset);
Assert.Equal(DateOnly.Parse("2022-11-29"), m.Date);
Assert.Equal(TimeOnly.Parse("14:13:17.8763881"), m.Time);
Assert.Equal("admin,user", m.Roles?.Join());
Assert.Equal("1,2,3", m.Scores?.Join());

m = models[1];
Assert.Equal(27, m.UserId);
Expand All @@ -74,6 +81,8 @@ protected void CheckModel(Model[] models)
Assert.Equal(DateTimeOffset.Parse("2022-11-29T14:13:17.8763881+08:00"), m.Offset);
Assert.Equal(DateOnly.Parse("2022-11-29"), m.Date);
Assert.Equal(TimeOnly.Parse("14:13:17.8763881"), m.Time);
Assert.Equal("admin,user", m.Roles?.Join());
Assert.Equal("1,2,3", m.Scores?.Join());
}

protected class Model
Expand All @@ -90,5 +99,7 @@ protected class Model
public DateTimeOffset Offset { get; set; }
public DateOnly Date { get; set; }
public TimeOnly Time { get; set; }
public String[] Roles { get; set; }
public Int32[] Scores { get; set; } = [];
}
}
2 changes: 2 additions & 0 deletions XUnitTest.Core/Serialization/SystemJsonTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ public void DateTimeTest()
{
var model = new Model();
Rand.Fill(model);
model.Roles = ["admin", "user"];
model.Scores = [1, 2, 3];
var js = _json.Write(model, true);

var models = _json.Read(_json_value, typeof(Model[])) as Model[];
Expand Down

0 comments on commit e185eff

Please sign in to comment.