refactor(metrics): optimize JSON marshaling and aggregation logic

- Updated JSON marshaling in SystemInfo to use quoted keys.
- Refactored aggregation logic to dynamically append entries.
- Adjusted test cases to reflect changes in data structure and ensure accurate serialization.
This commit is contained in:
yusing
2025-09-14 00:07:34 +08:00
parent d56663d3f9
commit 124069aaa4
4 changed files with 79 additions and 70 deletions

View File

@@ -47,7 +47,7 @@ func (s *SystemInfo) MarshalJSON() ([]byte, error) {
b = append(b, ',')
}
b = fmt.Appendf(b,
`"%s":{"device":"%s","path":"%s","fstype":"%s","total":%d,"free":%d,"used":%d,"used_percent":%.2f}`,
`%q:{"device":%q,"path":%q,"fstype":%q,"total":%d,"free":%d,"used":%d,"used_percent":%.2f}`,
device,
device,
disk.Path,
@@ -74,7 +74,7 @@ func (s *SystemInfo) MarshalJSON() ([]byte, error) {
b = append(b, ',')
}
b = fmt.Appendf(b,
`"%s":{"name":"%s","read_bytes":%d,"write_bytes":%d,"read_speed":%.2f,"write_speed":%.2f,"iops":%d}`,
`%q:{"name":%q,"read_bytes":%d,"write_bytes":%d,"read_speed":%.2f,"write_speed":%.2f,"iops":%d}`,
name,
name,
usage.ReadBytes,
@@ -114,7 +114,7 @@ func (s *SystemInfo) MarshalJSON() ([]byte, error) {
b = append(b, ',')
}
b = fmt.Appendf(b,
`{"name":"%s","temperature":%.2f,"high":%.2f,"critical":%.2f}`,
`{"name":%q,"temperature":%.2f,"high":%.2f,"critical":%.2f}`,
sensor.SensorKey,
sensor.Temperature,
sensor.High,

View File

@@ -223,39 +223,39 @@ func (s *SystemInfo) collectSensorsInfo(ctx context.Context) error {
func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggregated) {
n := len(entries)
aggregated := Aggregated{
Entries: make([]map[string]any, n),
Entries: make([]map[string]any, 0, n),
Mode: SystemInfoAggregateMode(query.Get("aggregate")),
}
switch aggregated.Mode {
case SystemInfoAggregateModeCPUAverage:
for i, entry := range entries {
for _, entry := range entries {
if entry.CPUAverage != nil {
aggregated.Entries[i] = map[string]any{
aggregated.Entries = append(aggregated.Entries, map[string]any{
"timestamp": entry.Timestamp,
"cpu_average": *entry.CPUAverage,
}
})
}
}
case SystemInfoAggregateModeMemoryUsage:
for i, entry := range entries {
for _, entry := range entries {
if entry.Memory != nil {
aggregated.Entries[i] = map[string]any{
aggregated.Entries = append(aggregated.Entries, map[string]any{
"timestamp": entry.Timestamp,
"memory_usage": entry.Memory.Used,
}
})
}
}
case SystemInfoAggregateModeMemoryUsagePercent:
for i, entry := range entries {
for _, entry := range entries {
if entry.Memory != nil {
aggregated.Entries[i] = map[string]any{
aggregated.Entries = append(aggregated.Entries, map[string]any{
"timestamp": entry.Timestamp,
"memory_usage_percent": entry.Memory.UsedPercent,
}
})
}
}
case SystemInfoAggregateModeDisksReadSpeed:
for i, entry := range entries {
for _, entry := range entries {
if entry.DisksIO == nil {
continue
}
@@ -264,10 +264,10 @@ func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggre
m[name] = usage.ReadSpeed
}
m["timestamp"] = entry.Timestamp
aggregated.Entries[i] = m
aggregated.Entries = append(aggregated.Entries, m)
}
case SystemInfoAggregateModeDisksWriteSpeed:
for i, entry := range entries {
for _, entry := range entries {
if entry.DisksIO == nil {
continue
}
@@ -276,10 +276,10 @@ func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggre
m[name] = usage.WriteSpeed
}
m["timestamp"] = entry.Timestamp
aggregated.Entries[i] = m
aggregated.Entries = append(aggregated.Entries, m)
}
case SystemInfoAggregateModeDisksIOPS:
for i, entry := range entries {
for _, entry := range entries {
if entry.DisksIO == nil {
continue
}
@@ -288,10 +288,10 @@ func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggre
m[name] = usage.Iops
}
m["timestamp"] = entry.Timestamp
aggregated.Entries[i] = m
aggregated.Entries = append(aggregated.Entries, m)
}
case SystemInfoAggregateModeDiskUsage:
for i, entry := range entries {
for _, entry := range entries {
if entry.Disks == nil {
continue
}
@@ -300,32 +300,32 @@ func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggre
m[name] = disk.Used
}
m["timestamp"] = entry.Timestamp
aggregated.Entries[i] = m
aggregated.Entries = append(aggregated.Entries, m)
}
case SystemInfoAggregateModeNetworkSpeed:
for i, entry := range entries {
for _, entry := range entries {
if entry.Network == nil {
continue
}
aggregated.Entries[i] = map[string]any{
aggregated.Entries = append(aggregated.Entries, map[string]any{
"timestamp": entry.Timestamp,
"upload": entry.Network.UploadSpeed,
"download": entry.Network.DownloadSpeed,
}
})
}
case SystemInfoAggregateModeNetworkTransfer:
for i, entry := range entries {
for _, entry := range entries {
if entry.Network == nil {
continue
}
aggregated.Entries[i] = map[string]any{
aggregated.Entries = append(aggregated.Entries, map[string]any{
"timestamp": entry.Timestamp,
"upload": entry.Network.BytesSent,
"download": entry.Network.BytesRecv,
}
})
}
case SystemInfoAggregateModeSensorTemperature:
for i, entry := range entries {
for _, entry := range entries {
if entry.Sensors == nil {
continue
}
@@ -334,7 +334,7 @@ func aggregate(entries []*SystemInfo, query url.Values) (total int, result Aggre
m[sensor.SensorKey] = sensor.Temperature
}
m["timestamp"] = entry.Timestamp
aggregated.Entries[i] = m
aggregated.Entries = append(aggregated.Entries, m)
}
default:
return -1, Aggregated{}

View File

@@ -133,11 +133,11 @@ func TestSerialize(t *testing.T) {
ExpectNoError(t, err)
var v []map[string]any
ExpectNoError(t, json.Unmarshal(s, &v))
ExpectEqual(t, len(v), len(result))
ExpectEqual(t, len(v), len(result.Entries))
for i, m := range v {
for k, v := range m {
// some int64 values are converted to float64 on json.Unmarshal
vv := reflect.ValueOf(result[i][k])
vv := reflect.ValueOf(result.Entries[i][k])
ExpectEqual(t, reflect.ValueOf(v).Convert(vv.Type()).Interface(), vv.Interface())
}
}
@@ -177,7 +177,7 @@ func BenchmarkSerialize(b *testing.B) {
b.Run("json", func(b *testing.B) {
for b.Loop() {
for _, query := range allQueries {
_, _ = json.Marshal([]map[string]any(queries[string(query)]))
_, _ = json.Marshal([]map[string]any(queries[string(query)].Entries))
}
}
})