Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
VinozzZ committed Feb 19, 2025
1 parent 0df49c8 commit 735e991
Show file tree
Hide file tree
Showing 3 changed files with 180 additions and 8 deletions.
25 changes: 17 additions & 8 deletions agent/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,21 +193,30 @@ func (agent *Agent) healthCheck() {
select {
case <-agent.ctx.Done():
case <-timer.C:
lastHealth := agent.lastHealth
report := healthMessage(agent.health.IsAlive())
if report.GetHealthy() {
report.Healthy = agent.health.IsReady()
}

// report health only if it has changed
if lastHealth == nil || lastHealth.GetHealthy() != report.GetHealthy() {
report := agent.calculateHealth()
if report != nil {
agent.lastHealth = report
agent.opampClient.SetHealth(report)
}
}
}
}

func (agent *Agent) calculateHealth() *protobufs.ComponentHealth {
lastHealth := agent.lastHealth
report := healthMessage(agent.health.IsAlive())
if report.GetHealthy() {
report.Healthy = agent.health.IsReady()
}

// report health only if it has changed
if lastHealth == nil || lastHealth.GetHealthy() != report.GetHealthy() {
return report
}

return nil
}

func (agent *Agent) composeEffectiveConfig() *protobufs.EffectiveConfig {
configYAML, err := config.SerializeToYAML(agent.effectiveConfig)
if err != nil {
Expand Down
130 changes: 130 additions & 0 deletions agent/agent_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
package agent

import (
"context"
"fmt"
"testing"
"time"

"github.com/honeycombio/refinery/config"
"github.com/honeycombio/refinery/internal/health"
"github.com/honeycombio/refinery/logger"
"github.com/honeycombio/refinery/metrics"
"github.com/open-telemetry/opamp-go/client/types"
"github.com/open-telemetry/opamp-go/protobufs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestAgentOnMessage_RemoteConfig(t *testing.T) {
var reloadCalled int
cfg := &config.MockConfig{
GetLoggerLevelVal: config.InfoLevel,
GetSamplerTypeVal: "FakeSamplerType",
GetSamplerTypeName: "FakeSamplerName",
}
cfg.Callbacks = []config.ConfigReloadCallback{
func(configHash, ruleCfgHash string) {
reloadCalled++
},
}
agent := NewAgent(Logger{Logger: &logger.NullLogger{}}, "1.0.0", cfg, &metrics.NullMetrics{}, &health.Health{})

testcases := []struct {
name string
configMap map[string]*protobufs.AgentConfigFile
configHash []byte
expectedReloadCount int
status protobufs.RemoteConfigStatuses
}{
{
name: "empty config map",
configMap: map[string]*protobufs.AgentConfigFile{},
configHash: []byte{0},
expectedReloadCount: 0,
},
{
name: "new refinery config from remote config",
configMap: map[string]*protobufs.AgentConfigFile{
"refinery_config": {
Body: []byte(`{"Logger":{"Level":"debug"}}`),
ContentType: "text/yaml",
},
},
configHash: []byte{1},
expectedReloadCount: 1,
status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED,
},
{
name: "new refinery rules from remote config",
configMap: map[string]*protobufs.AgentConfigFile{
"refinery_config": {
Body: []byte(`{"Logger":{"Level":"debug"}}`),
ContentType: "text/yaml",
},
"refinery_rules": {
Body: []byte(`{"rules":[{"name":"test","type":"fake"]}`),
},
},
configHash: []byte{2},
expectedReloadCount: 2,
status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED,
},
{
name: "same remote config should not cause reload",
configMap: map[string]*protobufs.AgentConfigFile{
"refinery_config": {
Body: []byte(`{"Logger":{"Level":"debug"}}`),
ContentType: "text/yaml",
},
"refinery_rules": {
Body: []byte(`{"rules":[{"name":"test","type":"fake"]}`),
},
},
configHash: []byte{2},
expectedReloadCount: 2,
status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED,
},
}

for _, tc := range testcases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
agent.onMessage(context.Background(), &types.MessageData{
RemoteConfig: &protobufs.AgentRemoteConfig{
Config: &protobufs.AgentConfigMap{
ConfigMap: tc.configMap,
},
ConfigHash: tc.configHash,
},
})
require.Eventually(t, func() bool {
return tc.expectedReloadCount == reloadCalled
}, 1*time.Second, 100*time.Millisecond, fmt.Sprintf("unexpected reload count %d", reloadCalled))

require.Equal(t, tc.status, agent.remoteConfigStatus.GetStatus(), fmt.Sprintf("unexpected status %s", agent.remoteConfigStatus.GetStatus()))
if tc.status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED {
assert.Equal(t, tc.configHash, agent.remoteConfigStatus.GetLastRemoteConfigHash())
assert.Equal(t, tc.configHash, agent.remoteConfig.GetConfigHash())
assert.Equal(t, tc.configMap, agent.remoteConfig.GetConfig().GetConfigMap())
}
})
}

}

func TestHealthCheck(t *testing.T) {
healthReporter := &health.MockHealthReporter{}
agent := NewAgent(Logger{Logger: &logger.NullLogger{}}, "1.0.0", &config.MockConfig{}, &metrics.NullMetrics{}, healthReporter)

// health check should start with false
require.False(t, agent.calculateHealth().Healthy)

// health check should be false if Refinery is not ready
healthReporter.SetAlive(true)
require.False(t, agent.calculateHealth().Healthy)

// health check should be true if both alive and ready are true
healthReporter.SetReady(true)
require.True(t, agent.calculateHealth().Healthy)
}
33 changes: 33 additions & 0 deletions internal/health/mock.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package health

import "sync"

type MockHealthReporter struct {
isAlive bool
isReady bool
mutex sync.Mutex
}

func (m *MockHealthReporter) SetAlive(isAlive bool) {
m.mutex.Lock()
defer m.mutex.Unlock()
m.isAlive = isAlive
}

func (m *MockHealthReporter) IsAlive() bool {
m.mutex.Lock()
defer m.mutex.Unlock()
return m.isAlive
}

func (m *MockHealthReporter) SetReady(isReady bool) {
m.mutex.Lock()
defer m.mutex.Unlock()
m.isReady = isReady
}

func (m *MockHealthReporter) IsReady() bool {
m.mutex.Lock()
defer m.mutex.Unlock()
return m.isReady
}

0 comments on commit 735e991

Please sign in to comment.