From 43d674ca59e3ada68d45e10b84946252be48fdbf Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Sat, 23 Nov 2024 21:41:55 +0700
Subject: [PATCH 1/8] feat: allow local evaluation

fix: missing e2e tests

fix: invalid API Key for test e2e

test: update e2e test for event

fix: e2e test fail

fix: typo error with segment user

chore: remove BKTClientImpl from index.js

feat: allow local evaluation
---
 .github/workflows/e2e.yml                     |    4 +-
 .github/workflows/push.yml                    |    1 +
 ava-e2e.config.mjs                            |    6 +-
 ava-test.config.mjs                           |   19 +-
 e2e/client.ts                                 |    5 +-
 e2e/constants/constants.ts                    |    2 +
 e2e/evaluations_defaut_strategy.ts            |    2 +-
 e2e/evaluations_segment_user.ts               |  108 ++
 e2e/evaluations_targeting_strategy.ts         |    2 +-
 e2e/events.ts                                 |   25 +-
 e2e/local_evaluation/client.ts                |  116 ++
 .../evaluation_defaut_strategy.ts             |  121 ++
 .../evaluation_segment_user.ts                |  113 ++
 .../evaluation_targeting_strategy.ts          |  113 ++
 e2e/local_evaluation/events.ts                |  107 ++
 package.json                                  |    7 +
 src/__tests__/cache/feature_cache.ts          |   67 +
 src/__tests__/cache/in_memory_cache.ts        |   51 +
 .../cache/processor/featureCache/in_memory.ts |  122 ++
 .../cache/processor/featureCache/polling.ts   |  121 ++
 .../cache/processor/featureCache/update.ts    |  557 +++++++
 .../processor/segementUsersCache/polling.ts   |  112 ++
 .../processor/segementUsersCache/update.ts    |  393 +++++
 src/__tests__/cache/segements_user.ts         |   80 +
 src/__tests__/client_local_evaluation.ts      | 1293 +++++++++++++++++
 src/__tests__/evaluator/evaluator.ts          |  500 +++++++
 src/__tests__/event_emiter.ts                 |  101 ++
 src/__tests__/gprc/client.ts                  |   56 +
 src/__tests__/mocks/cache.ts                  |   22 +
 src/__tests__/mocks/gprc.ts                   |   23 +
 src/__tests__/schedule.ts                     |   38 +
 .../typeConverter/converter_default.ts        |   28 +
 .../typeConverter/converter_string_to_bool.ts |   32 +
 .../typeConverter/converter_string_to_num.ts  |   36 +
 .../converter_string_to_object.ts             |   50 +
 src/cache/cache.ts                            |    9 +
 src/cache/features.ts                         |   40 +
 src/cache/inMemoryCache.ts                    |   66 +
 src/cache/namespace.ts                        |   55 +
 .../processor/featureFlagCacheProcessor.ts    |  163 +++
 .../processor/segmentUsersCacheProcessor.ts   |  146 ++
 src/cache/segmentUsers.ts                     |   53 +
 src/client.ts                                 |  341 +++++
 src/config.ts                                 |   21 +
 src/evaluator/evaluator.ts                    |   11 +
 src/evaluator/local.ts                        |  141 ++
 src/grpc/client.ts                            |  142 ++
 src/index.ts                                  |  328 +----
 src/objects/apiId.ts                          |   12 +-
 src/objects/metricsEvent.ts                   |   31 +-
 src/objects/reason.ts                         |    5 +-
 src/objects/request.ts                        |    2 +-
 src/objects/response.ts                       |    2 +-
 src/objects/status.ts                         |   44 +-
 src/processorEventsEmitter.ts                 |   30 +
 src/utils/clock.ts                            |    7 +
 tsconfig.json                                 |    2 +
 yarn.lock                                     |  236 ++-
 58 files changed, 6036 insertions(+), 284 deletions(-)
 create mode 100644 e2e/evaluations_segment_user.ts
 create mode 100644 e2e/local_evaluation/client.ts
 create mode 100644 e2e/local_evaluation/evaluation_defaut_strategy.ts
 create mode 100644 e2e/local_evaluation/evaluation_segment_user.ts
 create mode 100644 e2e/local_evaluation/evaluation_targeting_strategy.ts
 create mode 100644 e2e/local_evaluation/events.ts
 create mode 100644 src/__tests__/cache/feature_cache.ts
 create mode 100644 src/__tests__/cache/in_memory_cache.ts
 create mode 100644 src/__tests__/cache/processor/featureCache/in_memory.ts
 create mode 100644 src/__tests__/cache/processor/featureCache/polling.ts
 create mode 100644 src/__tests__/cache/processor/featureCache/update.ts
 create mode 100644 src/__tests__/cache/processor/segementUsersCache/polling.ts
 create mode 100644 src/__tests__/cache/processor/segementUsersCache/update.ts
 create mode 100644 src/__tests__/cache/segements_user.ts
 create mode 100644 src/__tests__/client_local_evaluation.ts
 create mode 100644 src/__tests__/evaluator/evaluator.ts
 create mode 100644 src/__tests__/event_emiter.ts
 create mode 100644 src/__tests__/gprc/client.ts
 create mode 100644 src/__tests__/mocks/cache.ts
 create mode 100644 src/__tests__/mocks/gprc.ts
 create mode 100644 src/__tests__/schedule.ts
 create mode 100644 src/__tests__/typeConverter/converter_default.ts
 create mode 100644 src/__tests__/typeConverter/converter_string_to_bool.ts
 create mode 100644 src/__tests__/typeConverter/converter_string_to_num.ts
 create mode 100644 src/__tests__/typeConverter/converter_string_to_object.ts
 create mode 100644 src/cache/cache.ts
 create mode 100644 src/cache/features.ts
 create mode 100644 src/cache/inMemoryCache.ts
 create mode 100644 src/cache/namespace.ts
 create mode 100644 src/cache/processor/featureFlagCacheProcessor.ts
 create mode 100644 src/cache/processor/segmentUsersCacheProcessor.ts
 create mode 100644 src/cache/segmentUsers.ts
 create mode 100644 src/client.ts
 create mode 100644 src/evaluator/evaluator.ts
 create mode 100644 src/evaluator/local.ts
 create mode 100644 src/grpc/client.ts
 create mode 100644 src/processorEventsEmitter.ts
 create mode 100644 src/utils/clock.ts

diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
index 00a9925..20882f3 100644
--- a/.github/workflows/e2e.yml
+++ b/.github/workflows/e2e.yml
@@ -8,6 +8,8 @@ on:
         required: true
       E2E_TOKEN:
         required: true
+      E2E_SERVER_ROLE_TOKEN:
+        required: true
       NPM_TOKEN:
         required: true
 
@@ -43,5 +45,5 @@ jobs:
         run: make build
       - name: e2e test
         run: |
-          sed -i -e "s|<HOST>|${{ secrets.E2E_HOST }}|" -e "s|<TOKEN>|${{ secrets.E2E_TOKEN }}|" ava-e2e.config.mjs
+          sed -i -e "s|<HOST>|${{ secrets.E2E_HOST }}|" -e "s|<TOKEN>|${{ secrets.E2E_TOKEN }}|" -e "s|<SERVER_ROLE_TOKEN>|${{ secrets.E2E_SERVER_ROLE_TOKEN }}|" ava-e2e.config.mjs
           make e2e
diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index aea56bc..160c4f6 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -123,5 +123,6 @@ jobs:
     secrets:
       E2E_HOST: ${{ secrets.E2E_HOST }}
       E2E_TOKEN: ${{ secrets.E2E_TOKEN }}
+      E2E_SERVER_ROLE_TOKEN: ${{ secrets.E2E_SERVER_ROLE_TOKEN }}
       NPM_TOKEN: ${{ secrets.NPM_TOKEN}}
 
diff --git a/ava-e2e.config.mjs b/ava-e2e.config.mjs
index 7341c9d..90cae8b 100644
--- a/ava-e2e.config.mjs
+++ b/ava-e2e.config.mjs
@@ -5,9 +5,13 @@ export default {
       configFile: false,
     },
   },
-  files: ['__e2e/__test__/*.js'],
+  files: [
+    '__e2e/__test__/*.js',
+    '__e2e/__test__/local_evaluation/*.js'
+  ],
   environmentVariables: {
     HOST: '<HOST>', // replace this. e.g. api-dev.bucketeer.jp
     TOKEN: '<TOKEN>', // replace this.
+    SERVER_ROLE_TOKEN: '<SERVER_ROLE_TOKEN>', // replace this with the server role token for testing with local evaluate
   },
 };
diff --git a/ava-test.config.mjs b/ava-test.config.mjs
index aa8a8be..3c155b7 100644
--- a/ava-test.config.mjs
+++ b/ava-test.config.mjs
@@ -1,9 +1,26 @@
 export default {
+  failFast: true,
+  failWithoutAssertions: false,
   babel: {
     testOptions: {
       babelrc: false,
       configFile: false,
     },
   },
-  files: ['__test/**/__tests__/*.js'],
+  files: [
+    '__test/**/__tests__/**/*.js',
+    '!__test/**/__tests__/utils/**',
+    '!__test/**/__tests__/testdata/**',
+    '!__test/**/__tests__/mocks/**',
+  ],
+  "typescript": {
+			"extensions": [
+				"ts",
+				"tsx"
+			],
+			"rewritePaths": {
+				"src/": "build/"
+			},
+      "compile": "tsc"
+		}
 };
diff --git a/e2e/client.ts b/e2e/client.ts
index 9171140..e5eb0b6 100644
--- a/e2e/client.ts
+++ b/e2e/client.ts
@@ -1,15 +1,14 @@
 import test from 'ava'
-import { initialize, DefaultLogger, BKTClientImpl } from '../lib';
+import { initialize, DefaultLogger } from '../lib';
 import { HOST, TOKEN, FEATURE_TAG, TARGETED_USER_ID, FEATURE_ID_BOOLEAN } from './constants/constants';
 import { MetricsEvent, isMetricsEvent } from '../lib/objects/metricsEvent';
 import { ApiId } from '../lib/objects/apiId';
+import { BKTClientImpl } from '../lib/client';
 
 const FORBIDDEN_ERROR_METRICS_EVENT_NAME =
   'type.googleapis.com/bucketeer.event.client.ForbiddenErrorMetricsEvent';
 const NOT_FOUND_ERROR_METRICS_EVENT_NAME =
   'type.googleapis.com/bucketeer.event.client.NotFoundErrorMetricsEvent';
-const UNKNOWN_ERROR_METRICS_EVENT_NAME =
-  'type.googleapis.com/bucketeer.event.client.UnknownErrorMetricsEvent';
 
 //Note: There is a different compared to other SDK clients.
 test('Using a random string in the api key setting should not throw exception', async (t) => {
diff --git a/e2e/constants/constants.ts b/e2e/constants/constants.ts
index 5f8cc7b..f3098e0 100644
--- a/e2e/constants/constants.ts
+++ b/e2e/constants/constants.ts
@@ -1,7 +1,9 @@
 export const HOST = process.env.HOST!;
 export const TOKEN = process.env.TOKEN!;
+export const SERVER_ROLE_TOKEN = process.env.SERVER_ROLE_TOKEN!;
 export const FEATURE_TAG = 'nodejs'
 export const TARGETED_USER_ID = 'bucketeer-nodejs-server-user-id-1'
+export const TARGETED_SEGMENT_USER_ID = 'bucketeer-nodejs-server-user-id-2'
 
 export const FEATURE_ID_BOOLEAN = 'feature-nodejs-server-e2e-boolean'
 export const FEATURE_ID_STRING = 'feature-nodejs-server-e2e-string'
diff --git a/e2e/evaluations_defaut_strategy.ts b/e2e/evaluations_defaut_strategy.ts
index 2c3f165..22b96e3 100644
--- a/e2e/evaluations_defaut_strategy.ts
+++ b/e2e/evaluations_defaut_strategy.ts
@@ -43,7 +43,7 @@ test('stringVariation', async (t) => {
     await bktClient.stringVariationDetails(defaultUser, FEATURE_ID_STRING, ''),
     {
       featureId: FEATURE_ID_STRING,
-      featureVersion: 4,
+      featureVersion: 22,
       userId: defaultUser.id,
       variationId: '16a9db43-dfba-485c-8300-8747af5caf61',
       variationName: 'variation 1',
diff --git a/e2e/evaluations_segment_user.ts b/e2e/evaluations_segment_user.ts
new file mode 100644
index 0000000..97f11d9
--- /dev/null
+++ b/e2e/evaluations_segment_user.ts
@@ -0,0 +1,108 @@
+import anyTest, { TestFn } from 'ava';
+import { Bucketeer, DefaultLogger, User, initialize } from '../lib';
+import { HOST, FEATURE_TAG, TARGETED_SEGMENT_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, TOKEN } from './constants/constants';
+
+const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedSegmentUser: User }>;
+
+test.before( async (t) => {
+  t.context = {
+    bktClient: initialize({
+      host: HOST,
+      token: TOKEN,
+      tag: FEATURE_TAG,
+      logger: new DefaultLogger('error'),
+      enableLocalEvaluation: false,
+      cachePollingInterval: 3000,
+    }),
+    targetedSegmentUser: { id: TARGETED_SEGMENT_USER_ID, data: {} },
+  };   
+});
+
+test.after(async (t) => {
+  const { bktClient } = t.context;
+  bktClient.destroy();
+});
+
+test('boolVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.booleanVariation(targetedSegmentUser, FEATURE_ID_BOOLEAN, false), true);
+  t.deepEqual(
+    await bktClient.booleanVariationDetails(targetedSegmentUser, FEATURE_ID_BOOLEAN, false),
+    {
+      featureId: FEATURE_ID_BOOLEAN,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: 'f948b6dd-c366-4828-8ee0-72edbe2c0eea',
+      variationName: 'variation 1',
+      variationValue: true,
+      reason: 'DEFAULT',
+    }
+  )
+});
+
+test('stringVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.stringVariation(targetedSegmentUser, FEATURE_ID_STRING, ''), 'value-3');
+  t.deepEqual(
+    await bktClient.stringVariationDetails(targetedSegmentUser, FEATURE_ID_STRING, 'true'),
+    {
+      featureId: FEATURE_ID_STRING,
+      featureVersion: 22,
+      userId: targetedSegmentUser.id,
+      variationId: 'e92fa326-2c7a-45f2-aaf7-ab9eb59f0ccf',
+      variationName: 'variation 3',
+      variationValue: 'value-3',
+      reason: 'RULE',
+    }
+  )
+});
+
+test('numberVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_INT, 0), 10);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_INT, 1),
+    {
+      featureId: FEATURE_ID_INT,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: '935ac588-c3ef-4bc8-915b-666369cdcada',
+      variationName: 'variation 1',
+      variationValue: 10,
+      reason: 'DEFAULT',
+    }
+  )
+
+  t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_FLOAT, 0.0), 2.1);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_FLOAT, 1.1),
+    {
+      featureId: FEATURE_ID_FLOAT,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: '0b04a309-31cd-471f-acf0-0ea662d16737',
+      variationName: 'variation 1',
+      variationValue: 2.1,
+      reason: 'DEFAULT',
+    }
+  )
+
+});
+
+test('objectVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.deepEqual(await bktClient.getJsonVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(await bktClient.objectVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(
+    await bktClient.objectVariationDetails(targetedSegmentUser, FEATURE_ID_JSON, {}),
+    {
+      featureId: FEATURE_ID_JSON,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: 'ff8299ed-80c9-4d30-9e92-a55750ad3ffb',
+      variationName: 'variation 1',
+      variationValue: { str: 'str1', int: 'int1' },
+      reason: 'DEFAULT',
+    }
+  )
+});
diff --git a/e2e/evaluations_targeting_strategy.ts b/e2e/evaluations_targeting_strategy.ts
index d491023..c5cee69 100644
--- a/e2e/evaluations_targeting_strategy.ts
+++ b/e2e/evaluations_targeting_strategy.ts
@@ -44,7 +44,7 @@ test('stringVariation', async (t) => {
     await bktClient.stringVariationDetails(targetedUser, FEATURE_ID_STRING, 'true'),
     {
       featureId: FEATURE_ID_STRING,
-      featureVersion: 4,
+      featureVersion: 22,
       userId: targetedUser.id,
       variationId: 'a3336346-931e-40f4-923a-603c642285d7',
       variationName: 'variation 2',
diff --git a/e2e/events.ts b/e2e/events.ts
index 3713dde..2bfa514 100644
--- a/e2e/events.ts
+++ b/e2e/events.ts
@@ -1,10 +1,11 @@
 import anyTest, { TestFn } from 'ava';
 import { Bucketeer, DefaultLogger, User, initialize } from '../lib';
 import { HOST, TOKEN, FEATURE_TAG, TARGETED_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, GOAL_ID, GOAL_VALUE } from './constants/constants';
-import { BKTClientImpl } from '../lib';
+import { BKTClientImpl } from '../lib/client';
 import { isGoalEvent } from '../lib/objects/goalEvent';
 import { isMetricsEvent } from '../lib/objects/metricsEvent';
 import { isEvaluationEvent } from '../lib/objects/evaluationEvent';
+import { isStatusErrorMetricsEvent } from '../lib/objects/status';
 
 const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedUser: User }>;
 
@@ -32,7 +33,7 @@ test('goal event', async (t) => {
   t.true(events.some((e: { event: any; }) => (isGoalEvent(e.event))))
 });
 
-test('default evaluation event', async (t) => {
+test('evaluation event', async (t) => {
   const { bktClient, targetedUser } = t.context;
   t.is(await bktClient.booleanVariation(targetedUser, FEATURE_ID_BOOLEAN, true), false);
   t.deepEqual(await bktClient.getJsonVariation(targetedUser, FEATURE_ID_JSON, {}), { "str": "str2", "int": "int2" });
@@ -48,8 +49,28 @@ test('default evaluation event', async (t) => {
   t.true(events.some((e) => (isMetricsEvent(e.event))));
 });
 
+test('default evaluation event', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  const notFoundFeatureId = 'not-found-feature-id';
+  t.is(await bktClient.booleanVariation(targetedUser, notFoundFeatureId, true), true);
+  t.deepEqual(await bktClient.getJsonVariation(targetedUser, notFoundFeatureId, { "str": "str2",}), { "str": "str2" });
+  t.deepEqual(await bktClient.objectVariation(targetedUser, notFoundFeatureId, { "str": "str2" }),  { "str": "str2" });
+  t.is(await bktClient.numberVariation(targetedUser, notFoundFeatureId, 10), 10);
+  t.is(await bktClient.numberVariation(targetedUser, notFoundFeatureId, 3.3), 3.3);
+  t.is(await bktClient.stringVariation(targetedUser, notFoundFeatureId, 'value-9'), 'value-9');
+  const bktClientImpl = bktClient as BKTClientImpl
+  const events = bktClientImpl.eventStore.getAll()
+  // (DefaultEvaluationEvent, Error Event) x 6
+  t.is(events.length, 12);
+  t.true(events.some((e) => (isEvaluationEvent(e.event))));
+  t.true(events.some((e) => (isMetricsEvent(e.event))));
+  t.true(events.some((e) => (isStatusErrorMetricsEvent(e.event, NOT_FOUND_ERROR_METRICS_EVENT_NAME))));
+});
+
 test.afterEach(async (t) => {
   const { bktClient } = t.context;
   bktClient.destroy();
 });
 
+const NOT_FOUND_ERROR_METRICS_EVENT_NAME =
+  'type.googleapis.com/bucketeer.event.client.NotFoundErrorMetricsEvent';
diff --git a/e2e/local_evaluation/client.ts b/e2e/local_evaluation/client.ts
new file mode 100644
index 0000000..544c1c6
--- /dev/null
+++ b/e2e/local_evaluation/client.ts
@@ -0,0 +1,116 @@
+import test from 'ava'
+import { initialize, DefaultLogger } from '../../lib';
+import { HOST, TOKEN, FEATURE_TAG, TARGETED_USER_ID, FEATURE_ID_BOOLEAN, SERVER_ROLE_TOKEN } from '../constants/constants';
+import { isMetricsEvent } from '../../lib/objects/metricsEvent';
+import { BKTClientImpl } from '../../lib/client';
+
+test('Using a random string in the api key setting should not throw exception', async (t) => {
+  const bktClient = initialize({
+    host: HOST,
+    token: "TOKEN_RANDOM",
+    tag: FEATURE_TAG,
+    cachePollingInterval: 1000,
+    enableLocalEvaluation: true,
+    logger: new DefaultLogger("error")
+  });
+
+  await new Promise((resolve) => {
+    setTimeout(resolve, 3000);
+  });
+
+  const user = { id: TARGETED_USER_ID, data: {} }
+  // The client can not load the evaluation, we will received the default value `true`
+  // Other SDK clients e2e test will expect the value is `false`
+  const result = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, true));
+  t.true(result);
+
+  const bktClientImpl = bktClient as BKTClientImpl
+  const events = bktClientImpl.eventStore.getAll()
+  t.true(events.some((e) => {
+    return isMetricsEvent(e.event);
+  }));
+
+  bktClient.destroy()
+});
+
+test('altering featureTag should not affect api request', async (t) => {
+  const config = {
+    host: HOST,
+    token: SERVER_ROLE_TOKEN,
+    tag: FEATURE_TAG,
+    cachePollingInterval: 1000,
+    enableLocalEvaluation: true,
+    logger: new DefaultLogger("error")
+  }
+
+  const bktClient = initialize(config);
+  await new Promise((resolve) => {
+    setTimeout(resolve, 3000);
+  });
+
+  const user = { id: TARGETED_USER_ID, data: {} }
+  const result = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, false));
+  t.true(result);
+  config.tag = "RANDOME"
+
+  const resultAfterAlterAPIKey = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, false));
+  t.true(resultAfterAlterAPIKey);
+
+  bktClient.destroy()
+});
+
+test('Altering the api key should not affect api request', async (t) => {
+  const config = {
+    host: HOST,
+    token: SERVER_ROLE_TOKEN,
+    tag: FEATURE_TAG,
+    cachePollingInterval: 1000,
+    enableLocalEvaluation: true,
+    logger: new DefaultLogger("error")
+  }
+
+  const bktClient = initialize(config);
+  await new Promise((resolve) => {
+    setTimeout(resolve, 3000);
+  });
+
+  const user = { id: TARGETED_USER_ID, data: {} }
+  const result = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, false));
+  t.true(result);
+  config.token = "RANDOME"
+
+  const resultAfterAlterAPIKey = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, false));
+  t.true(resultAfterAlterAPIKey);
+
+  bktClient.destroy()
+});
+
+//Note: There is a different compared to other SDK clients.
+test('Using a random string in the featureTag setting should affect api request', async (t) => {
+  const bktClient = initialize({
+    host: HOST,
+    token: SERVER_ROLE_TOKEN,
+    tag: "RANDOM",
+    cachePollingInterval: 1000,
+    enableLocalEvaluation: true,
+    logger: new DefaultLogger("error")
+  });
+
+  await new Promise((resolve) => {
+    setTimeout(resolve, 3000);
+  });
+
+  const user = { id: TARGETED_USER_ID, data: {} }
+  const result = await t.notThrowsAsync(bktClient.booleanVariation(user, FEATURE_ID_BOOLEAN, true));
+  // The client can not load the evaluation, we will received the default value `true`
+  // Other SDK clients e2e test will expect the value is `false`
+  t.true(result);
+
+  const bktClientImpl = bktClient as BKTClientImpl
+  const events = bktClientImpl.eventStore.getAll()
+  t.true(events.some((e) => {
+    return isMetricsEvent(e.event);
+  }));
+
+  bktClient.destroy()
+});
\ No newline at end of file
diff --git a/e2e/local_evaluation/evaluation_defaut_strategy.ts b/e2e/local_evaluation/evaluation_defaut_strategy.ts
new file mode 100644
index 0000000..23fa019
--- /dev/null
+++ b/e2e/local_evaluation/evaluation_defaut_strategy.ts
@@ -0,0 +1,121 @@
+import anyTest, { TestFn } from 'ava';
+import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
+import {
+  HOST,
+  SERVER_ROLE_TOKEN,
+  FEATURE_TAG,
+  FEATURE_ID_BOOLEAN,
+  FEATURE_ID_STRING,
+  FEATURE_ID_INT,
+  FEATURE_ID_JSON,
+  FEATURE_ID_FLOAT,
+} from '../constants/constants';
+
+const test = anyTest as TestFn<{ bktClient: Bucketeer; defaultUser: User }>;
+
+test.before( async (t) => {
+  t.context = {
+    bktClient: initialize({
+      host: HOST,
+      token: SERVER_ROLE_TOKEN,
+      tag: FEATURE_TAG,
+      logger: new DefaultLogger('error'),
+      enableLocalEvaluation: true,
+      cachePollingInterval: 3000,
+    }),
+    defaultUser: { id: 'user-1', data: {} },
+  };   
+  // Waiting for the cache available
+  await new Promise(resolve => {
+    setTimeout(resolve, 5000);
+  });
+});
+
+test.after(async (t) => {
+  const { bktClient } = t.context;
+  bktClient.destroy();
+});
+
+test('boolVariation', async (t) => {
+  const { bktClient, defaultUser } = t.context;
+  t.is(await bktClient.booleanVariation(defaultUser, FEATURE_ID_BOOLEAN, false), true);
+  t.deepEqual(
+    await bktClient.booleanVariationDetails(defaultUser, FEATURE_ID_BOOLEAN, false),
+    {
+      featureId: FEATURE_ID_BOOLEAN,
+      featureVersion: 5,
+      userId: defaultUser.id,
+      variationId: 'f948b6dd-c366-4828-8ee0-72edbe2c0eea',
+      variationName: 'variation 1',
+      variationValue: true,
+      reason: 'DEFAULT',
+    }
+  )
+});
+
+test('stringVariation', async (t) => {
+  const { bktClient, defaultUser } = t.context;
+  t.is(await bktClient.stringVariation(defaultUser, FEATURE_ID_STRING, ''), 'value-1');
+  t.deepEqual(
+    await bktClient.stringVariationDetails(defaultUser, FEATURE_ID_STRING, ''),
+    {
+      featureId: FEATURE_ID_STRING,
+      featureVersion: 22,
+      userId: defaultUser.id,
+      variationId: '16a9db43-dfba-485c-8300-8747af5caf61',
+      variationName: 'variation 1',
+      variationValue: 'value-1',
+      reason: 'DEFAULT',
+    }
+  )
+});
+
+test('numberVariation', async (t) => {
+  const { bktClient, defaultUser } = t.context;
+  t.is(await bktClient.numberVariation(defaultUser, FEATURE_ID_INT, 0), 10);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(defaultUser, FEATURE_ID_INT, 1),
+    {
+      featureId: FEATURE_ID_INT,
+      featureVersion: 5,
+      userId: defaultUser.id,
+      variationId: '935ac588-c3ef-4bc8-915b-666369cdcada',
+      variationName: 'variation 1',
+      variationValue: 10,
+      reason: 'DEFAULT',
+    }
+  )
+
+  t.is(await bktClient.numberVariation(defaultUser, FEATURE_ID_FLOAT, 0.0), 2.1);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(defaultUser, FEATURE_ID_FLOAT, 1.1),
+    {
+      featureId: FEATURE_ID_FLOAT,
+      featureVersion: 5,
+      userId: defaultUser.id,
+      variationId: '0b04a309-31cd-471f-acf0-0ea662d16737',
+      variationName: 'variation 1',
+      variationValue: 2.1,
+      reason: 'DEFAULT',
+    }
+  )
+
+});
+
+test('objectVariation', async (t) => {
+  const { bktClient, defaultUser } = t.context;
+  t.deepEqual(await bktClient.getJsonVariation(defaultUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(await bktClient.objectVariation(defaultUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(
+    await bktClient.objectVariationDetails(defaultUser, FEATURE_ID_JSON, {}),
+    {
+      featureId: FEATURE_ID_JSON,
+      featureVersion: 5,
+      userId: defaultUser.id,
+      variationId: 'ff8299ed-80c9-4d30-9e92-a55750ad3ffb',
+      variationName: 'variation 1',
+      variationValue: { str: 'str1', int: 'int1' },
+      reason: 'DEFAULT',
+    }
+  )
+});
diff --git a/e2e/local_evaluation/evaluation_segment_user.ts b/e2e/local_evaluation/evaluation_segment_user.ts
new file mode 100644
index 0000000..fd09db6
--- /dev/null
+++ b/e2e/local_evaluation/evaluation_segment_user.ts
@@ -0,0 +1,113 @@
+import anyTest, { TestFn } from 'ava';
+import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
+import { HOST, FEATURE_TAG, TARGETED_SEGMENT_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, SERVER_ROLE_TOKEN } from '../constants/constants';
+
+const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedSegmentUser: User }>;
+
+test.before( async (t) => {
+  t.context = {
+    bktClient: initialize({
+      host: HOST,
+      token: SERVER_ROLE_TOKEN,
+      tag: FEATURE_TAG,
+      logger: new DefaultLogger('error'),
+      enableLocalEvaluation: true,
+      cachePollingInterval: 3000,
+    }),
+    targetedSegmentUser: { id: TARGETED_SEGMENT_USER_ID, data: {} },
+  };   
+
+  await new Promise(resolve => {
+    setTimeout(resolve, 5000);
+  });
+});
+
+test.after(async (t) => {
+  const { bktClient } = t.context;
+  bktClient.destroy();
+});
+
+test('boolVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.booleanVariation(targetedSegmentUser, FEATURE_ID_BOOLEAN, false), true);
+  t.deepEqual(
+    await bktClient.booleanVariationDetails(targetedSegmentUser, FEATURE_ID_BOOLEAN, false),
+    {
+      featureId: FEATURE_ID_BOOLEAN,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: 'f948b6dd-c366-4828-8ee0-72edbe2c0eea',
+      variationName: 'variation 1',
+      variationValue: true,
+      reason: 'DEFAULT',
+    }
+  )
+});
+
+// This testcase `stringVariation` is setup for testing 
+test('stringVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.stringVariation(targetedSegmentUser, FEATURE_ID_STRING, ''), 'value-3');
+  t.deepEqual(
+    await bktClient.stringVariationDetails(targetedSegmentUser, FEATURE_ID_STRING, 'true'),
+    {
+      featureId: FEATURE_ID_STRING,
+      featureVersion: 22,
+      userId: targetedSegmentUser.id,
+      variationId: 'e92fa326-2c7a-45f2-aaf7-ab9eb59f0ccf',
+      variationName: 'variation 3',
+      variationValue: 'value-3',
+      reason: 'RULE',
+    }
+  )
+});
+
+test('numberVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_INT, 0), 10);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_INT, 1),
+    {
+      featureId: FEATURE_ID_INT,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: '935ac588-c3ef-4bc8-915b-666369cdcada',
+      variationName: 'variation 1',
+      variationValue: 10,
+      reason: 'DEFAULT',
+    }
+  )
+
+  t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_FLOAT, 0.0), 2.1);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_FLOAT, 1.1),
+    {
+      featureId: FEATURE_ID_FLOAT,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: '0b04a309-31cd-471f-acf0-0ea662d16737',
+      variationName: 'variation 1',
+      variationValue: 2.1,
+      reason: 'DEFAULT',
+    }
+  )
+  
+});
+
+test('objectVariation', async (t) => {
+  const { bktClient, targetedSegmentUser } = t.context;
+  t.deepEqual(await bktClient.getJsonVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(await bktClient.objectVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
+  t.deepEqual(
+    await bktClient.objectVariationDetails(targetedSegmentUser, FEATURE_ID_JSON, {}),
+    {
+      featureId: FEATURE_ID_JSON,
+      featureVersion: 5,
+      userId: targetedSegmentUser.id,
+      variationId: 'ff8299ed-80c9-4d30-9e92-a55750ad3ffb',
+      variationName: 'variation 1',
+      variationValue: { str: 'str1', int: 'int1' },
+      reason: 'DEFAULT',
+    }
+  )
+});
diff --git a/e2e/local_evaluation/evaluation_targeting_strategy.ts b/e2e/local_evaluation/evaluation_targeting_strategy.ts
new file mode 100644
index 0000000..be400ca
--- /dev/null
+++ b/e2e/local_evaluation/evaluation_targeting_strategy.ts
@@ -0,0 +1,113 @@
+import anyTest, { TestFn } from 'ava';
+import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
+import { HOST, FEATURE_TAG, TARGETED_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, SERVER_ROLE_TOKEN } from '../constants/constants';
+
+const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedUser: User }>;
+
+test.before( async (t) => {
+  t.context = {
+    bktClient: initialize({
+      host: HOST,
+      token: SERVER_ROLE_TOKEN,
+      tag: FEATURE_TAG,
+      logger: new DefaultLogger('error'),
+      enableLocalEvaluation: true,
+      cachePollingInterval: 3000,
+    }),
+    targetedUser: { id: TARGETED_USER_ID, data: {} },
+  };   
+
+  await new Promise(resolve => {
+    setTimeout(resolve, 5000);
+  });
+});
+
+test.after(async (t) => {
+  const { bktClient } = t.context;
+  bktClient.destroy();
+});
+
+
+test('boolVariation', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.is(await bktClient.booleanVariation(targetedUser, FEATURE_ID_BOOLEAN, true), false);
+  t.deepEqual(
+    await bktClient.booleanVariationDetails(targetedUser, FEATURE_ID_BOOLEAN, true),
+    {
+      featureId: FEATURE_ID_BOOLEAN,
+      featureVersion: 5,
+      userId: targetedUser.id,
+      variationId: '29f318b0-d770-48a5-8ae5-c1c692ed6cec',
+      variationName: 'variation 2',
+      variationValue: false,
+      reason: 'TARGET',
+    }
+  )
+});
+
+test('stringVariation', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.is(await bktClient.stringVariation(targetedUser, FEATURE_ID_STRING, ''), 'value-2');
+  t.deepEqual(
+    await bktClient.stringVariationDetails(targetedUser, FEATURE_ID_STRING, 'true'),
+    {
+      featureId: FEATURE_ID_STRING,
+      featureVersion: 22,
+      userId: targetedUser.id,
+      variationId: 'a3336346-931e-40f4-923a-603c642285d7',
+      variationName: 'variation 2',
+      variationValue: 'value-2',
+      reason: 'TARGET',
+    }
+  )
+});
+
+test('numberVariation', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_INT, 0), 20);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedUser, FEATURE_ID_INT, 99),
+    {
+      featureId: FEATURE_ID_INT,
+      featureVersion: 5,
+      userId: targetedUser.id,
+      variationId: '125380f8-5c18-4a80-b37d-a41331acf075',
+      variationName: 'variation 2',
+      variationValue: 20,
+      reason: 'TARGET',
+    }
+  )
+
+  t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_FLOAT, 0.0), 3.1);
+  t.deepEqual(
+    await bktClient.numberVariationDetails(targetedUser, FEATURE_ID_FLOAT, 99),
+    {
+      featureId: FEATURE_ID_FLOAT,
+      featureVersion: 5,
+      userId: targetedUser.id,
+      variationId: 'fdd0585b-dde4-4c2b-8f41-a1ca8f25d6a3',
+      variationName: 'variation 2',
+      variationValue: 3.1,
+      reason: 'TARGET',
+    }
+  )
+
+});
+
+test('objectVariation', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.deepEqual(await bktClient.getJsonVariation(targetedUser, FEATURE_ID_JSON, {}), { "str": "str2", "int": "int2" });
+  t.deepEqual(await bktClient.objectVariation(targetedUser, FEATURE_ID_JSON, {}), { "str": "str2", "int": "int2" });
+  t.deepEqual(
+    await bktClient.objectVariationDetails(targetedUser, FEATURE_ID_JSON, 99),
+    {
+      featureId: FEATURE_ID_JSON,
+      featureVersion: 5,
+      userId: targetedUser.id,
+      variationId: '636e08e5-7ecd-4c91-88f7-4443c8486767',
+      variationName: 'variation 2',
+      variationValue: { str: 'str2', int: 'int2' },
+      reason: 'TARGET',
+    }
+  )
+});
\ No newline at end of file
diff --git a/e2e/local_evaluation/events.ts b/e2e/local_evaluation/events.ts
new file mode 100644
index 0000000..e3ad053
--- /dev/null
+++ b/e2e/local_evaluation/events.ts
@@ -0,0 +1,107 @@
+import anyTest, { TestFn } from 'ava';
+import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
+import {
+  HOST,
+  TOKEN,
+  FEATURE_TAG,
+  TARGETED_USER_ID,
+  FEATURE_ID_BOOLEAN,
+  FEATURE_ID_STRING,
+  FEATURE_ID_INT,
+  FEATURE_ID_JSON,
+  FEATURE_ID_FLOAT,
+  GOAL_ID,
+  GOAL_VALUE,
+  SERVER_ROLE_TOKEN,
+} from '../constants/constants';
+import { BKTClientImpl } from '../../lib/client';
+import { isGoalEvent } from '../../lib/objects/goalEvent';
+import { isErrorMetricsEvent, isMetricsEvent } from '../../lib/objects/metricsEvent';
+import { isEvaluationEvent } from '../../lib/objects/evaluationEvent';
+
+const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedUser: User }>;
+
+test.beforeEach(async (t) => {
+  t.context = {
+    bktClient: initialize({
+      host: HOST,
+      token: SERVER_ROLE_TOKEN,
+      tag: FEATURE_TAG,
+      logger: new DefaultLogger('error'),
+      enableLocalEvaluation: true,
+      cachePollingInterval: 3000,
+    }),
+    targetedUser: { id: TARGETED_USER_ID, data: {} },
+  };
+  // Waiting for the cache available
+  await new Promise((resolve) => {
+    setTimeout(resolve, 5000);
+  });
+});
+
+test('goal event', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.is(await bktClient.booleanVariation(targetedUser, FEATURE_ID_BOOLEAN, true), false);
+  bktClient.track(targetedUser, GOAL_ID, GOAL_VALUE);
+  const bktClientImpl = bktClient as BKTClientImpl;
+  const events = bktClientImpl.eventStore.getAll();
+  // Feature Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // Segment User Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // booleanVariation : 2 event (EvaluationEvent,  Metrics Event - Latency)
+  // goal : 1 event (GoalEvent)
+  // SUM : 7 events
+  t.is(events.length, 7);
+  t.true(events.some((e: { event: any }) => isGoalEvent(e.event)));
+});
+
+test('evaluation event', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  t.is(await bktClient.booleanVariation(targetedUser, FEATURE_ID_BOOLEAN, true), false);
+  t.deepEqual(await bktClient.getJsonVariation(targetedUser, FEATURE_ID_JSON, {}), {
+    str: 'str2',
+    int: 'int2',
+  });
+  t.deepEqual(await bktClient.objectVariation(targetedUser, FEATURE_ID_JSON, {}), {
+    str: 'str2',
+    int: 'int2',
+  });
+  t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_INT, 0), 20);
+  t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_FLOAT, 0.0), 3.1);
+  t.is(await bktClient.stringVariation(targetedUser, FEATURE_ID_STRING, ''), 'value-2');
+  const bktClientImpl = bktClient as BKTClientImpl;
+  const events = bktClientImpl.eventStore.getAll();
+  // Feature Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // Segment User Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // (EvaluationEvent, Metrics Event - Latency) x 6
+  t.is(events.length, 16);
+  t.true(events.some((e) => isEvaluationEvent(e.event)));
+  t.true(events.some((e) => isMetricsEvent(e.event)));
+});
+
+test('default evaluation event', async (t) => {
+  const { bktClient, targetedUser } = t.context;
+  const notFoundFeatureId = 'not-found-feature-id';
+  t.is(await bktClient.booleanVariation(targetedUser, notFoundFeatureId, true), true);
+  t.deepEqual(await bktClient.getJsonVariation(targetedUser, notFoundFeatureId, { "str": "str2",}), { "str": "str2" });
+  t.deepEqual(await bktClient.objectVariation(targetedUser, notFoundFeatureId, { "str": "str2" }),  { "str": "str2" });
+  t.is(await bktClient.numberVariation(targetedUser, notFoundFeatureId, 10), 10);
+  t.is(await bktClient.numberVariation(targetedUser, notFoundFeatureId, 3.3), 3.3);
+  t.is(await bktClient.stringVariation(targetedUser, notFoundFeatureId, 'value-9'), 'value-9');
+  const bktClientImpl = bktClient as BKTClientImpl
+  const events = bktClientImpl.eventStore.getAll()
+  // Feature Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // Segment User Cache : 2 events (Metrics Event - Latency, Metrics Event - Metrics Size)
+  // (DefaultEvaluationEvent, Error Event) x 6
+  t.is(events.length, 16);
+  t.true(events.some((e) => (isEvaluationEvent(e.event))));
+  t.true(events.some((e) => (isMetricsEvent(e.event))));
+  // TODO: fix me, it should not unknown error - should be not-found error
+  t.true(events.some((e) => (isErrorMetricsEvent(e.event, UNKNOWN_ERROR_METRICS_EVENT_NAME))));
+});
+
+test.afterEach(async (t) => {
+  const { bktClient } = t.context;
+  bktClient.destroy();
+});
+
+const UNKNOWN_ERROR_METRICS_EVENT_NAME = "type.googleapis.com/bucketeer.event.client.UnknownErrorMetricsEvent";
\ No newline at end of file
diff --git a/package.json b/package.json
index e59c884..6febecc 100644
--- a/package.json
+++ b/package.json
@@ -11,9 +11,14 @@
     "@types/node": "^22.10.1",
     "@types/uuid": "^10.0.0",
     "uuid": "^11.0.3"
+    "@bucketeer/evaluation": "0.0.1",
+    "@improbable-eng/grpc-web": "^0.13.0",
+    "@improbable-eng/grpc-web-node-http-transport": "^0.15.0",
+    "google-protobuf": "^3.21.4"
   },
   "devDependencies": {
     "@ava/babel": "2.0.0",
+    "@ava/typescript": "^5.0.0",
     "@babel/cli": "7.25.9",
     "@babel/core": "7.26.0",
     "@babel/preset-env": "7.26.0",
@@ -32,6 +37,8 @@
     "@typescript-eslint/eslint-plugin": "^8.16.0",
     "@typescript-eslint/parser": "^8.16.0",
     "ava": "6.2.0",
+    "@types/sinon": "^17.0.3",
+    "sinon": "^19.0.2",
     "cpx": "1.5.0",
     "eslint": "^9.16.0",
     "eslint-config-prettier": "9.1.0",
diff --git a/src/__tests__/cache/feature_cache.ts b/src/__tests__/cache/feature_cache.ts
new file mode 100644
index 0000000..1b93e1f
--- /dev/null
+++ b/src/__tests__/cache/feature_cache.ts
@@ -0,0 +1,67 @@
+import test from 'ava';
+import { createFeature } from '@kenji71089/evaluation';
+import { NewFeatureCache } from '../../cache/features';
+import { InMemoryCache } from '../../cache/inMemoryCache';
+
+test('put - delete - get', async t => {
+  const cache = NewFeatureCache({cache: new InMemoryCache(), ttl: 1000});
+  const feature1 = createFeature({id: 'feature1'});
+  const feature2 = createFeature({id: 'feature2'});
+  await cache.put(feature1);
+  await cache.put(feature2);
+
+  t.deepEqual(await cache.get('feature1'), feature1);
+  t.deepEqual(await cache.get('feature2'), feature2);
+
+  await cache.delete('feature1');
+  const deletedValue = await cache.get('feature1');
+  t.is(deletedValue, null);
+
+  await cache.deleteAll();
+  const clearedValue = await cache.get('feature2');
+  t.is(clearedValue, null);
+});
+
+test('get should return null if key does not exist', async t => {
+  const cache = new InMemoryCache();
+  const featureCache = NewFeatureCache({ cache, ttl: 1000 });
+
+  const result = await featureCache.get('nonexistent');
+  t.is(result, null);
+});
+
+test('put should store the value in the cache', async t => {
+  const cache = new InMemoryCache();
+  const featureCache = NewFeatureCache({ cache, ttl: 1000 });
+  const feature = createFeature({id: 'feature1'});
+
+  await featureCache.put(feature);
+  const result = await featureCache.get('feature1');
+  t.deepEqual(result, feature);
+});
+
+test('delete should remove the value from the cache', async t => {
+  const cache = new InMemoryCache();
+  const featureCache = NewFeatureCache({ cache, ttl: 1000 });
+  const feature = createFeature({id: 'feature1'});
+
+  await featureCache.put(feature);
+  await featureCache.delete('feature1');
+  const result = await featureCache.get('feature1');
+  t.is(result, null);
+});
+
+test('clear should remove all values from the cache', async t => {
+  const cache = new InMemoryCache();
+  const featureCache = NewFeatureCache({ cache, ttl: 1000 });
+  const feature1 = createFeature({id: 'feature1'});
+  const feature2 = createFeature({id: 'feature2'});
+
+  await featureCache.put(feature1);
+  await featureCache.put(feature2);
+  await featureCache.deleteAll();
+  const result1 = await featureCache.get('feature1');
+  const result2 = await featureCache.get('feature2');
+  t.is(result1, null);
+  t.is(result2, null);
+});
\ No newline at end of file
diff --git a/src/__tests__/cache/in_memory_cache.ts b/src/__tests__/cache/in_memory_cache.ts
new file mode 100644
index 0000000..46c0e78
--- /dev/null
+++ b/src/__tests__/cache/in_memory_cache.ts
@@ -0,0 +1,51 @@
+import test from 'ava';
+import { InMemoryCache } from '../../cache/inMemoryCache';
+
+test('put and get a value', async t => {
+  const cache = new InMemoryCache();
+  await cache.put('key1', 'value1', 1000);
+  const value = await cache.get('key1');
+  t.is(value, 'value1');
+});
+
+test('get returns null for non-existent key', async t => {
+  const cache = new InMemoryCache();
+  const value = await cache.get('nonExistentKey');
+  t.is(value, null);
+});
+
+test('get returns null for expired key', async t => {
+  const cache = new InMemoryCache();
+  await cache.put('key1', 'value1', 1);
+  await new Promise(resolve => setTimeout(resolve, 10)); // wait for the key to expire
+  const value = await cache.get('key1');
+  t.is(value, null);
+});
+
+test('scan returns keys with given prefix', async t => {
+  const cache = new InMemoryCache();
+  await cache.put('prefix_key1', 'value1', 1000);
+  await cache.put('prefix_key2', 'value2', 1000);
+  await cache.put('other_key', 'value3', 1000);
+  const keys = await cache.scan('prefix_');
+  t.deepEqual(keys, ['prefix_key1', 'prefix_key2']);
+});
+
+test('delete removes a key', async t => {
+  const cache = new InMemoryCache();
+  await cache.put('key1', 'value1', 1000);
+  await cache.delete('key1');
+  const value = await cache.get('key1');
+  t.is(value, null);
+});
+
+test('destroy clears all entries', async t => {
+  const cache = new InMemoryCache();
+  await cache.put('key1', 'value1', 1000);
+  await cache.put('key2', 'value2', 1000);
+  await cache.deleteAll();
+  const value1 = await cache.get('key1');
+  const value2 = await cache.get('key2');
+  t.is(value1, null);
+  t.is(value2, null);
+});
\ No newline at end of file
diff --git a/src/__tests__/cache/processor/featureCache/in_memory.ts b/src/__tests__/cache/processor/featureCache/in_memory.ts
new file mode 100644
index 0000000..c934d8e
--- /dev/null
+++ b/src/__tests__/cache/processor/featureCache/in_memory.ts
@@ -0,0 +1,122 @@
+import test from 'ava';
+import { InMemoryCache } from '../../../../cache/inMemoryCache';
+import { NewFeatureCache } from '../../../../cache/features';
+import {
+  FEATURE_FLAG_ID,
+  FEATURE_FLAG_REQUESTED_AT,
+  NewFeatureFlagProcessor,
+} from '../../../../cache/processor/featureFlagCacheProcessor';
+
+import { Clock } from '../../../../utils/clock';
+import { GetFeatureFlagsResponse, GetSegmentUsersResponse, createFeature } from '@kenji71089/evaluation';
+import { GRPCClient } from '../../../../grpc/client';
+import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
+
+class SpyGRPCCLient implements GRPCClient {
+  segmentUsersRes: GetSegmentUsersResponse | null;
+  featureFlags: GetFeatureFlagsResponse | null;
+  getSegmentUsersError: Error | null;
+  getFeatureFlagsError: Error | null;
+
+  getSegementUsersRequest: {
+    segmentIdsList: Array<string>;
+    requestedAt: number;
+    version: string;
+  } | null;
+
+  getFeatureFlagsRequest: {
+    tag: string;
+    featureFlagsId: string;
+    requestedAt: number;
+    version: string;
+  } | null;
+
+  getSegmentUsers(options: {
+    segmentIdsList: Array<string>;
+    requestedAt: number;
+    version: string;
+  }): Promise<GetSegmentUsersResponse> {
+
+    this.getSegementUsersRequest = options
+
+    if (this.getSegmentUsersError) {
+      return Promise.reject(this.getSegmentUsersError);
+    }
+    if (this.segmentUsersRes) {
+      return Promise.resolve(this.segmentUsersRes);
+    }
+    throw new Error('Missing response');
+  }
+
+  getFeatureFlags(options: {
+    tag: string;
+    featureFlagsId: string;
+    requestedAt: number;
+    version: string;
+  }): Promise<GetFeatureFlagsResponse> {
+
+    this.getFeatureFlagsRequest = options
+
+    if (this.getFeatureFlagsError) {
+      return Promise.reject(this.getFeatureFlagsError);
+    }
+    if (this.featureFlags) {
+      return Promise.resolve(this.featureFlags);
+    }
+    throw new Error('Missing response');
+  }
+}
+
+test('polling cache - using InMemoryCache()', async (t) => {
+  const clock = new Clock();
+
+  const cache = new InMemoryCache();
+  const featureCache = NewFeatureCache({ cache, ttl: 1000 });
+  const eventEmitter = new ProcessorEventsEmitter();
+  const featureFlag = 'nodejs';
+  const grpc = new SpyGRPCCLient();
+
+  const featuresResponse = new GetFeatureFlagsResponse();
+  featuresResponse.setFeatureFlagsId('featureFlagsId');
+  featuresResponse.setRequestedAt(1000);
+  const featureList = featuresResponse.getFeaturesList();
+  const feature1 = createFeature({ id: 'feature1' });
+  const feature2 = createFeature({ id: 'feature2' });
+
+  featureList.push(feature1);
+  featureList.push(feature2);
+
+  grpc.featureFlags = featuresResponse;
+
+  const processor = NewFeatureFlagProcessor({
+    cache: cache,
+    featureFlagCache: featureCache,
+    pollingInterval: 10,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    featureTag: featureFlag,
+    clock: clock,
+  });
+
+  processor.start();
+
+  // Wait for 2 seconds before continuing the test
+  await new Promise((resolve) => setTimeout(resolve, 2000));
+
+  processor.stop();
+
+  t.deepEqual(await featureCache.get('feature1'), feature1);
+  t.deepEqual(await featureCache.get('feature2'), feature2);
+
+  const featureFlagId = await cache.get(FEATURE_FLAG_ID);
+  t.is(featureFlagId, 'featureFlagsId');
+
+  const requestedAt = await cache.get(FEATURE_FLAG_REQUESTED_AT);
+  t.true(requestedAt == 1000);
+
+  t.deepEqual(grpc.getFeatureFlagsRequest, {
+    tag: featureFlag,
+    featureFlagsId: 'featureFlagsId',
+    requestedAt: 1000,
+  });
+});
\ No newline at end of file
diff --git a/src/__tests__/cache/processor/featureCache/polling.ts b/src/__tests__/cache/processor/featureCache/polling.ts
new file mode 100644
index 0000000..4b9fadc
--- /dev/null
+++ b/src/__tests__/cache/processor/featureCache/polling.ts
@@ -0,0 +1,121 @@
+import test from 'ava';
+import sino from 'sinon';
+import { NewFeatureCache } from '../../../../cache/features';
+import {
+  FEATURE_FLAG_CACHE_TTL,
+  FEATURE_FLAG_ID,
+  NewFeatureFlagProcessor,
+} from '../../../../cache/processor/featureFlagCacheProcessor';
+import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
+import {
+  GetFeatureFlagsResponse,
+  createFeature,
+} from '@kenji71089/evaluation';
+
+import { FEATURE_FLAG_REQUESTED_AT } from '../../../../../__test/cache/processor/featureFlagCacheProcessor';
+import { Clock } from '../../../../utils/clock';
+import { MockCache } from '../../../mocks/cache';
+import { MockGRPCClient } from '../../../mocks/gprc';
+
+test('polling cache', async (t) => {
+
+  const clock = new Clock();
+  const mockClock = sino.mock(clock);
+  const mockClockExpected = mockClock.expects('getTime').atLeast(1);
+  mockClockExpected.onFirstCall().returns(0);
+  mockClockExpected.onSecondCall().returns(3210);
+  mockClockExpected.onThirdCall().returns(4200);
+  mockClockExpected.onCall(3).returns(6000);
+
+  const cache = new MockCache();
+  const mockCache = sino.mock(cache);
+  const mockCacheStbFeatureFlagId = mockCache.expects('get').atLeast(1).withArgs(FEATURE_FLAG_ID);
+  mockCacheStbFeatureFlagId.onFirstCall().returns(null);
+  mockCacheStbFeatureFlagId.returns('featureFlagsId');
+
+  mockCache.expects('put').atLeast(1).withArgs(FEATURE_FLAG_ID, 'featureFlagsId', FEATURE_FLAG_CACHE_TTL); 
+
+  const mockCacheStbFeatureFlagRequestedAt = mockCache.expects('get').atLeast(1).withArgs(FEATURE_FLAG_REQUESTED_AT);
+  mockCacheStbFeatureFlagRequestedAt.onFirstCall().returns(0);
+  mockCacheStbFeatureFlagRequestedAt.returns(1100);
+
+  mockCache.expects('put').atLeast(1).withArgs(FEATURE_FLAG_REQUESTED_AT, 1100, FEATURE_FLAG_CACHE_TTL); 
+
+  const gRPCClient = new MockGRPCClient();
+  const mockGRPCClient = sino.mock(gRPCClient);
+
+  const featureFlag = 'nodejs';
+  const featuresResponse = new GetFeatureFlagsResponse();
+  featuresResponse.setFeatureFlagsId('featureFlagsId');
+  featuresResponse.setRequestedAt(1100);
+  const featureList = featuresResponse.getFeaturesList();
+  const feature1 = createFeature({ id: 'feature1' });
+  const feature2 = createFeature({ id: 'feature2' });
+
+  featureList.push(feature1);
+  featureList.push(feature2);
+
+  const responseSize = featuresResponse.serializeBinary().length;
+
+  mockCache.expects('put').atLeast(1).withArgs('features:feature1', feature1, FEATURE_FLAG_CACHE_TTL); 
+  mockCache.expects('put').atLeast(1).withArgs('features:feature2', feature2, FEATURE_FLAG_CACHE_TTL); 
+
+  mockGRPCClient
+  .expects('getFeatureFlags')
+  .atLeast(1)
+  .withArgs({
+    tag: featureFlag,
+    featureFlagsId: '',
+    requestedAt: 0,
+  })
+  .resolves(featuresResponse);
+
+  mockGRPCClient
+  .expects('getFeatureFlags')
+  .atLeast(1)
+  .withArgs({
+    tag: featureFlag,
+    featureFlagsId: 'featureFlagsId',
+    requestedAt: 1100,
+  })
+  .resolves(featuresResponse);
+
+
+  mockGRPCClient.expects('getSegmentUsers').never();
+
+  const eventEmitter = new ProcessorEventsEmitter();
+  const mockProcessorEventsEmitter = sino.mock(eventEmitter);
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .atLeast(1)
+    .withArgs('pushLatencyMetricsEvent', { latency: 3.21, apiId: 4 });
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .atLeast(1)
+    .withArgs('pushLatencyMetricsEvent', { latency: 1.8, apiId: 4 });
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .atLeast(1)
+    .withArgs('pushSizeMetricsEvent', { size: responseSize, apiId: 4 });
+  mockProcessorEventsEmitter.expects('emit').never().withArgs('error', sino.match.any);
+
+  const processor = NewFeatureFlagProcessor({
+    cache: cache,
+    featureFlagCache: NewFeatureCache({ cache: cache, ttl: 0 }),
+    pollingInterval: 1000,
+    grpc: gRPCClient,
+    eventEmitter: eventEmitter,
+    featureTag: featureFlag,
+    clock: clock,
+  });
+
+  processor.start();
+
+  await new Promise((resolve) => setTimeout(resolve, 3000));
+
+  processor.stop();
+  mockClock.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  mockGRPCClient.verify();
+});
diff --git a/src/__tests__/cache/processor/featureCache/update.ts b/src/__tests__/cache/processor/featureCache/update.ts
new file mode 100644
index 0000000..7131b50
--- /dev/null
+++ b/src/__tests__/cache/processor/featureCache/update.ts
@@ -0,0 +1,557 @@
+import anyTest, { TestFn } from 'ava';
+import { NewFeatureCache } from '../../../../cache/features';
+import {
+  FEATURE_FLAG_CACHE_TTL,
+  FEATURE_FLAG_ID,
+  FeatureFlagProcessorOptions,
+  DefaultFeatureFlagProcessor,
+} from '../../../../cache/processor/featureFlagCacheProcessor';
+
+import {
+  Feature,
+  GetFeatureFlagsResponse,
+  createFeature,
+} from '@kenji71089/evaluation';
+import sino from 'sinon';
+import { FEATURE_FLAG_REQUESTED_AT } from '../../../../../__test/cache/processor/featureFlagCacheProcessor';
+import { Clock } from '../../../../utils/clock';
+import { MockCache } from '../../../mocks/cache';
+import { MockGRPCClient } from '../../../mocks/gprc';
+import { ApiId } from '../../../../objects/apiId';
+import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
+
+const test = anyTest as TestFn<{
+  featureTag: string;
+  processor: DefaultFeatureFlagProcessor;
+  options: FeatureFlagProcessorOptions;
+  sandbox: sino.SinonSandbox;
+  feature: Feature;
+  archivedFeatureIds: string[];
+}>;
+
+test.beforeEach((t) => {
+  const sandbox = sino.createSandbox();
+  const cache = new MockCache();
+  const grpc = new MockGRPCClient();
+  const eventEmitter = new ProcessorEventsEmitter();
+  const clock = new Clock();
+  const featureFlagCache = NewFeatureCache({ cache: cache, ttl: FEATURE_FLAG_CACHE_TTL });
+  const options = {
+    cache: cache,
+    featureFlagCache: featureFlagCache,
+    pollingInterval: 1000,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    featureTag: 'nodejs',
+    clock: clock,
+  };
+  const singleFeature = createFeature({ id: 'feature-flag-id-2' });
+  const archivedFeatureIds = ['feature-flags-id-3',
+    'feature-flags-id-4',]
+  const processor = new DefaultFeatureFlagProcessor(options);
+  t.context = {
+    featureTag: 'nodejs',
+    processor: processor,
+    options: options,
+    sandbox: sandbox,
+    feature: singleFeature,
+    archivedFeatureIds: archivedFeatureIds,
+  };
+});
+
+test.afterEach((t) => {
+  t.context.sandbox.restore();
+});
+
+test('err: failed while getting featureFlagsID', async (t) => {
+  const { processor, options, sandbox } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+
+  const error = new Error('Internal error');
+  mockCache.expects('get').once().withArgs(FEATURE_FLAG_ID).throws(error);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: error, apiId: ApiId.GET_FEATURE_FLAGS });
+  await processor.runUpdateCache();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while getting requestedAt', async (t) => {
+  const { processor, options, sandbox } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const error = new Error('Internal error');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).throws(error);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: error, apiId: ApiId.GET_FEATURE_FLAGS });
+  await processor.runUpdateCache();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while requesting cache from the server', async (t) => {
+  const { featureTag, processor, options, sandbox } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(null);
+  const error = new Error('Internal error');
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: '',
+      requestedAt: 0,
+    })
+    .throws(error);
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: error, apiId: ApiId.GET_FEATURE_FLAGS });
+
+  await processor.runUpdateCache();
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while putting featureFlagsID, and the forceUpdate is true', async (t) => {
+  const { featureTag, processor, options, sandbox, feature } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+  const internalError = new Error('Internal error');
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(true);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList([]);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  // err: failed while putting featureFlagsID, and the forceUpdate is true
+  mockCache
+    .expects('put')
+    .withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL)
+    .throws(internalError);
+  mockFeatureFlagCache.expects('deleteAll').once();
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalError, apiId: ApiId.GET_FEATURE_FLAGS });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while putting requestedAt, and the forceUpdate is true', async (t) => {
+  const { featureTag, processor, options, sandbox, feature } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+  const internalError = new Error('Internal error');
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL);
+
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(true);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList([]);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  // err: failed while putting requestedAt, and the forceUpdate is true
+  mockCache
+    .expects('put')
+    .withArgs(FEATURE_FLAG_REQUESTED_AT, 20, FEATURE_FLAG_CACHE_TTL)
+    .throws(internalError);
+  mockFeatureFlagCache.expects('deleteAll').once();
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalError, apiId: ApiId.GET_FEATURE_FLAGS });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while putting featureFlagsID, and the forceUpdate is false', async (t) => {
+  const { featureTag, processor, options, sandbox, feature, archivedFeatureIds } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+  const internalError = new Error('Internal error');
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList(archivedFeatureIds);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  // err: failed while putting featureFlagsID, and the forceUpdate is false
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL).throws(internalError);
+  mockFeatureFlagCache.expects('deleteAll').never();
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[0]);
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[1]);
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalError, apiId: ApiId.GET_FEATURE_FLAGS });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('err: failed while putting requestedAt, and the forceUpdate is false', async (t) => {
+  const { featureTag, processor, options, sandbox, feature, archivedFeatureIds } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+  const internalError = new Error('Internal error');
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList(archivedFeatureIds);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  // err: failed while putting requestedAt, and the forceUpdate is false
+  mockFeatureFlagCache.expects('deleteAll').never();
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[0]);
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[1]);
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_REQUESTED_AT).throws(internalError);
+  
+  mockProcessorEventsEmitter
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalError, apiId: ApiId.GET_FEATURE_FLAGS });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('success: featureFlagsID not found', async (t) => {
+  const { featureTag, processor, options, sandbox } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns(null);
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  // success: featureFlagsID not found
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, '', FEATURE_FLAG_CACHE_TTL);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_REQUESTED_AT, 0, FEATURE_FLAG_CACHE_TTL);
+
+  const response = new GetFeatureFlagsResponse();
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: '',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('success: requestedAt not found', async (t) => {
+  const { featureTag, processor, options, sandbox } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(null);
+
+  // success: requestedAt not found
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, '', FEATURE_FLAG_CACHE_TTL);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_REQUESTED_AT, 0, FEATURE_FLAG_CACHE_TTL);
+
+  const response = new GetFeatureFlagsResponse();
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 0,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('success: forceUpdate is true', async (t) => {
+  const { featureTag, processor, options, sandbox, feature } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(true);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList([]);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_REQUESTED_AT, 20, FEATURE_FLAG_CACHE_TTL);
+
+  mockFeatureFlagCache.expects('deleteAll').once();
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
+
+test('success: forceUpdate is false', async (t) => {
+  const { featureTag, processor, options, sandbox, feature, archivedFeatureIds } = t.context;
+  const mockCache = sandbox.mock(options.cache);
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  const mockFeatureFlagCache = sandbox.mock(options.featureFlagCache);
+
+  mockCache.expects('get').withArgs(FEATURE_FLAG_ID).returns('feature-flags-id-1');
+  mockCache.expects('get').withArgs(FEATURE_FLAG_REQUESTED_AT).returns(10);
+
+  const response = new GetFeatureFlagsResponse();
+  response.setFeatureFlagsId('feature-flags-id-2');
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setFeaturesList([feature]);
+  response.setArchivedFeatureFlagIdsList(archivedFeatureIds);
+
+  const responseSize = response.serializeBinary().length;
+
+  mockGRPCClient
+    .expects('getFeatureFlags')
+    .once()
+    .withArgs({
+      tag: featureTag,
+      featureFlagsId: 'feature-flags-id-1',
+      requestedAt: 10,
+    })
+    .returns(response);
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: responseSize,
+    apiId: ApiId.GET_FEATURE_FLAGS,
+  });
+
+  mockCache.expects('put').withArgs(FEATURE_FLAG_ID, 'feature-flags-id-2', FEATURE_FLAG_CACHE_TTL);
+  mockCache.expects('put').withArgs(FEATURE_FLAG_REQUESTED_AT, 20, FEATURE_FLAG_CACHE_TTL);
+
+  mockFeatureFlagCache.expects('deleteAll').never();
+  mockFeatureFlagCache.expects('put').withArgs(feature);
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[0]);
+  mockFeatureFlagCache.expects('delete').withArgs(archivedFeatureIds[1]);
+
+  await processor.runUpdateCache();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  mockProcessorEventsEmitter.verify();
+});
\ No newline at end of file
diff --git a/src/__tests__/cache/processor/segementUsersCache/polling.ts b/src/__tests__/cache/processor/segementUsersCache/polling.ts
new file mode 100644
index 0000000..4c37775
--- /dev/null
+++ b/src/__tests__/cache/processor/segementUsersCache/polling.ts
@@ -0,0 +1,112 @@
+import test from 'ava';
+import sino from 'sinon';
+
+import { GetSegmentUsersResponse, SegmentUsers } from '@kenji71089/evaluation';
+
+import {
+  NewSegementUserCacheProcessor,
+  SEGEMENT_USERS_CACHE_TTL,
+  SEGEMENT_USERS_REQUESTED_AT,
+} from '../../../../cache/processor/segmentUsersCacheProcessor';
+import { MockCache } from '../../../mocks/cache';
+import { MockGRPCClient } from '../../../mocks/gprc';
+
+import { Clock } from '../../../../utils/clock';
+import {
+  NewSegmentUsersCache,
+  SEGMENT_USERS_NAME_SPACE as SEGMENT_USERS_CACHE_NAME_SPACE,
+} from '../../../../cache/segmentUsers';
+import { ApiId } from '../../../../objects/apiId';
+import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
+
+test('polling cache', async (t) => {
+  const cache = new MockCache();
+  const grpc = new MockGRPCClient();
+  const eventEmitter = new ProcessorEventsEmitter();
+  const clock = new Clock();
+  const featureTag = 'featureTag';
+
+  const options = {
+    cache,
+    segmentUsersCache: NewSegmentUsersCache({ cache: cache, ttl: SEGEMENT_USERS_CACHE_TTL }),
+    pollingInterval: 1000,
+    grpc,
+    eventEmitter,
+    featureTag: featureTag,
+    clock,
+  };
+
+  const mockClock = sino.mock(clock);
+  const mockClockExpected = mockClock.expects('getTime').atLeast(2);
+  mockClockExpected.onFirstCall().returns(0);
+  mockClockExpected.onSecondCall().returns(2210);
+  mockClockExpected.onThirdCall().returns(4200);
+  mockClockExpected.onCall(3).returns(7000);
+
+  const mockCache = sino.mock(cache);
+  const mockCacheGetAllExpect = mockCache
+    .expects('scan')
+    .withArgs(SEGMENT_USERS_CACHE_NAME_SPACE)
+    .twice();
+  mockCacheGetAllExpect.onFirstCall().resolves([]);
+  mockCacheGetAllExpect.resolves([]);
+
+  const mockCacheLastUpdatedExpect = mockCache.expects('get').twice();
+
+  mockCacheLastUpdatedExpect.withArgs(SEGEMENT_USERS_REQUESTED_AT).onFirstCall().resolves(null);
+  mockCacheLastUpdatedExpect.resolves(1100);
+
+  const segementUser1 = new SegmentUsers();
+  segementUser1.setSegmentId('segmentId1');
+
+  const segementUser2 = new SegmentUsers();
+  segementUser2.setSegmentId('segmentId2');
+
+  const response = new GetSegmentUsersResponse();
+  response.setRequestedAt(1200);
+  response.setSegmentUsersList([segementUser1, segementUser2]);
+  response.setForceUpdate(false);
+
+  const responseSize = response.serializeBinary().length;
+
+  const mockGRPCClient = sino.mock(grpc);
+  const mockGRPCClientGetSegmentUsersExpect = mockGRPCClient.expects('getSegmentUsers').twice();
+  mockGRPCClientGetSegmentUsersExpect.onFirstCall().resolves(response);
+  mockGRPCClientGetSegmentUsersExpect.resolves(response);
+
+  mockCache.expects('put').twice().withArgs(SEGEMENT_USERS_REQUESTED_AT, 1200);
+  mockCache
+    .expects('put')
+    .twice()
+    .withArgs(`${SEGMENT_USERS_CACHE_NAME_SPACE}segmentId1`, segementUser1);
+  mockCache
+    .expects('put')
+    .twice()
+    .withArgs(`${SEGMENT_USERS_CACHE_NAME_SPACE}segmentId2`, segementUser2);
+
+  const mockEventEmitter = sino.mock(eventEmitter);
+  mockEventEmitter.expects('emit').twice().withArgs('pushLatencyMetricsEvent', {
+    latency: 2.21,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockEventEmitter.expects('emit').twice().withArgs('pushLatencyMetricsEvent', {
+    latency: 2.8,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockEventEmitter
+    .expects('emit')
+    .twice()
+    .withArgs('pushSizeMetricsEvent', { size: responseSize, apiId: ApiId.GET_SEGMENT_USERS });
+
+  const processor = NewSegementUserCacheProcessor(options);
+
+  processor.start();
+
+  await new Promise((resolve) => setTimeout(resolve, 2100));
+
+  processor.stop();
+
+  mockCache.verify();
+  mockGRPCClient.verify();
+  t.pass();
+});
diff --git a/src/__tests__/cache/processor/segementUsersCache/update.ts b/src/__tests__/cache/processor/segementUsersCache/update.ts
new file mode 100644
index 0000000..882d4f0
--- /dev/null
+++ b/src/__tests__/cache/processor/segementUsersCache/update.ts
@@ -0,0 +1,393 @@
+import anyTest, { TestFn } from 'ava';
+import sino from 'sinon';
+
+import { GetSegmentUsersResponse, SegmentUser, SegmentUsers } from '@kenji71089/evaluation';
+
+import {
+  DefaultSegementUserCacheProcessor,
+  SEGEMENT_USERS_CACHE_TTL,
+  SEGEMENT_USERS_REQUESTED_AT,
+  SegementUsersCacheProcessorOptions,
+} from '../../../../cache/processor/segmentUsersCacheProcessor';
+import { MockCache } from '../../../mocks/cache';
+import { MockGRPCClient } from '../../../mocks/gprc';
+
+import { Clock } from '../../../../utils/clock';
+import {
+  NewSegmentUsersCache,
+} from '../../../../cache/segmentUsers';
+import { ApiId } from '../../../../objects/apiId';
+import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
+
+const test = anyTest as TestFn<{
+  processor: DefaultSegementUserCacheProcessor;
+  options: SegementUsersCacheProcessorOptions;
+  sandbox: sino.SinonSandbox;
+  singleSegementUser: SegmentUsers;
+  deletedSegmentIDs: string[];
+}>;
+
+test.beforeEach((t) => {
+  const sandbox = sino.createSandbox();
+  const cache = new MockCache();
+  const grpc = new MockGRPCClient();
+  const eventEmitter = new ProcessorEventsEmitter();
+  const clock = new Clock();
+  const segmentUsersCache = NewSegmentUsersCache({ cache: cache, ttl: SEGEMENT_USERS_CACHE_TTL });
+  const options = {
+    cache: cache,
+    segmentUsersCache: segmentUsersCache,
+    pollingInterval: 1000,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    clock: clock,
+  } satisfies SegementUsersCacheProcessorOptions;
+
+  const singleSegementUsers = new SegmentUsers();
+  singleSegementUsers.setSegmentId('segment-id');
+  const segementUser = new SegmentUser();
+  segementUser.setId('user-id');
+  singleSegementUsers.getUsersList().push(segementUser);
+  singleSegementUsers.setUpdatedAt(20);
+
+  const deletedSegmentIDs = ['segment-id-3', 'segment-id-4'];
+  const processor = new DefaultSegementUserCacheProcessor(options);
+  t.context = {
+    processor: processor,
+    options: options,
+    sandbox: sandbox,
+    singleSegementUser: singleSegementUsers,
+    deletedSegmentIDs: deletedSegmentIDs,
+  };
+});
+
+test.afterEach((t) => {
+  t.context.sandbox.restore();
+});
+
+test('err: failed while getting segment IDs', async (t) => {
+  const { processor, sandbox, options } = t.context;
+  const internalErr = new Error('internal error');
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').rejects(internalErr);
+ 
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('error', {
+    error: internalErr,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  await processor.runUpdateCache();
+  mockProcessorEventsEmitter.verify();
+  mockSegementUsersCache.verify();
+  t.pass();
+});
+
+test('err: failed while getting requestedAt', async (t) => {
+  const { processor, sandbox, options } = t.context;
+  const internalErr = new Error('internal error');
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves([]);
+
+  const mockCache = sandbox.mock(options.cache);
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).rejects(internalErr);
+
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('error', {
+    error: internalErr,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  await processor.runUpdateCache();
+  mockProcessorEventsEmitter.verify();
+  mockSegementUsersCache.verify();
+  t.pass();
+});
+
+test('err: failed while putting requestedAt, and the forceUpdate is true', async (t) => {
+
+  const { processor, sandbox, options, singleSegementUser } = t.context;
+  const internalErr = new Error('internal error');
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves(['segment-id']);
+
+  const mockCache = sandbox.mock(options.cache);
+
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(10);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([singleSegementUser]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(true);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: ['segment-id'],
+    requestedAt: 10,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').resolves();
+  mockSegementUsersCache.expects('put').withArgs(singleSegementUser).resolves();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).throws(internalErr);
+  mockProcessorEventsEmitter.expects('emit').withArgs('error', {
+    error: internalErr,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  try {
+    await processor.runUpdateCache();
+  } catch (err) {
+    t.fail('should not throw an error');
+  }
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
+
+test('err: failed while putting requestedAt, and the forceUpdate is false', async (t) => {
+
+  const { processor, sandbox, options, singleSegementUser } = t.context;
+  const internalErr = new Error('internal error');
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves(['segment-id']);
+
+  const mockCache = sandbox.mock(options.cache);
+
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(10);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([singleSegementUser]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: ['segment-id'],
+    requestedAt: 10,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').never();
+  mockSegementUsersCache.expects('put').withArgs(singleSegementUser).resolves();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).throws(internalErr);
+  mockProcessorEventsEmitter.expects('emit').withArgs('error', {
+    error: internalErr,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  try {
+    await processor.runUpdateCache();
+  } catch (err) {
+    t.fail('should not throw an error');
+  }
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
+
+test('success: get segment IDs not found', async (t) => {
+  const { processor, sandbox, options } = t.context;
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves([]);
+
+  const mockCache = sandbox.mock(options.cache);
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(10);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: [],
+    requestedAt: 10,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').never();
+  mockSegementUsersCache.expects('put').never();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).resolves();
+
+  await processor.runUpdateCache();
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
+
+test('success: requestedAt not found', async (t) => {
+  const { processor, sandbox, options, singleSegementUser } = t.context;
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves(['segment-id']);
+
+  const mockCache = sandbox.mock(options.cache);
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(null);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([singleSegementUser]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: ['segment-id'],
+    requestedAt: 0,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').never();
+  mockSegementUsersCache.expects('put').withArgs(singleSegementUser).resolves();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).resolves();
+
+  await processor.runUpdateCache();
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
+
+test('success: force update is true', async (t) => {
+  const { processor, sandbox, options, singleSegementUser } = t.context;
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves(['segment-id']);
+
+  const mockCache = sandbox.mock(options.cache);
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(10);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([singleSegementUser]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(true);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: ['segment-id'],
+    requestedAt: 10,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').resolves();
+  mockSegementUsersCache.expects('put').withArgs(singleSegementUser).resolves();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).resolves();
+
+  await processor.runUpdateCache();
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
+
+test('success: force update is false', async (t) => {
+  const { processor, sandbox, options, singleSegementUser } = t.context;
+
+  const mockSegementUsersCache = sandbox.mock(options.segmentUsersCache);
+  mockSegementUsersCache.expects('getIds').resolves(['segment-id']);
+
+  const mockCache = sandbox.mock(options.cache);
+  mockCache.expects('get').withArgs(SEGEMENT_USERS_REQUESTED_AT).resolves(10);
+
+  const response = new GetSegmentUsersResponse();
+  response.setSegmentUsersList([singleSegementUser]);
+  response.setRequestedAt(20);
+  response.setForceUpdate(false);
+  response.setDeletedSegmentIdsList([]);
+
+  const mockGRPCClient = sandbox.mock(options.grpc);
+  mockGRPCClient.expects('getSegmentUsers').withArgs({
+    segmentIdsList: ['segment-id'],
+    requestedAt: 10,
+  }).resolves(response);
+  
+  const mockProcessorEventsEmitter = sandbox.mock(options.eventEmitter);
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushLatencyMetricsEvent', {
+    latency: sino.match.any,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+  mockProcessorEventsEmitter.expects('emit').once().withArgs('pushSizeMetricsEvent', {
+    size: response.serializeBinary().length,
+    apiId: ApiId.GET_SEGMENT_USERS,
+  });
+
+  mockSegementUsersCache.expects('deleteAll').never();
+  mockSegementUsersCache.expects('put').withArgs(singleSegementUser).resolves();
+
+  mockCache.expects('put').withArgs(SEGEMENT_USERS_REQUESTED_AT, 20, SEGEMENT_USERS_CACHE_TTL).resolves();
+
+  await processor.runUpdateCache();
+  mockGRPCClient.verify();
+  mockSegementUsersCache.verify();
+  mockCache.verify();
+  mockProcessorEventsEmitter.verify();
+  t.pass();
+});
\ No newline at end of file
diff --git a/src/__tests__/cache/segements_user.ts b/src/__tests__/cache/segements_user.ts
new file mode 100644
index 0000000..65b27dc
--- /dev/null
+++ b/src/__tests__/cache/segements_user.ts
@@ -0,0 +1,80 @@
+import test from 'ava';
+import { SegmentUsers, } from '@kenji71089/evaluation';
+import { InMemoryCache } from '../../cache/inMemoryCache';
+import { NewSegmentUsersCache } from '../../cache/segmentUsers';
+
+function createSegmentUsers(
+  id: string,
+): SegmentUsers {
+  const segmentUsers = new SegmentUsers();
+  segmentUsers.setSegmentId(id);
+  return segmentUsers;
+}
+
+test('get should return null if key does not exist', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+
+  const result = await segmentUsersCache.get('nonexistent');
+  t.is(result, null);
+});
+
+test('put should store the value in the cache', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+  const segmentUser = createSegmentUsers('segment1');
+
+  await segmentUsersCache.put(segmentUser);
+  const result = await segmentUsersCache.get('segment1');
+  t.deepEqual(result, segmentUser);
+});
+
+test('delete should remove the value from the cache', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+  const segmentUser = createSegmentUsers('segment1');
+
+  await segmentUsersCache.put(segmentUser);
+  await segmentUsersCache.delete('segment1');
+  const result = await segmentUsersCache.get('segment1');
+  t.is(result, null);
+});
+
+test('clear should remove all values from the cache', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+  const segmentUser1 = createSegmentUsers('segment1');
+  const segmentUser2 = createSegmentUsers('segment2');
+
+  await segmentUsersCache.put(segmentUser1);
+  await segmentUsersCache.put(segmentUser2);
+  await segmentUsersCache.deleteAll();
+  const result1 = await segmentUsersCache.get('segment1');
+  const result2 = await segmentUsersCache.get('segment2');
+  t.is(result1, null);
+  t.is(result2, null);
+});
+
+test('getAll should return all values from the cache', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+  const segmentUser1 = createSegmentUsers('segment1');
+  const segmentUser2 = createSegmentUsers('segment2');
+
+  await segmentUsersCache.put(segmentUser1);
+  await segmentUsersCache.put(segmentUser2);
+  const result = await segmentUsersCache.getAll();
+  t.deepEqual(result, [segmentUser1, segmentUser2]);
+});
+
+test('getIds should return all keys from the cache', async t => {
+  const cache = new InMemoryCache<SegmentUsers>();
+  const segmentUsersCache = NewSegmentUsersCache({ cache, ttl: 1000 });
+  const segmentUser1 = createSegmentUsers('segment1');
+  const segmentUser2 = createSegmentUsers('segment2');
+
+  await segmentUsersCache.put(segmentUser1);
+  await segmentUsersCache.put(segmentUser2);
+  const result = await segmentUsersCache.getIds();
+  t.deepEqual(result, ['segment1', 'segment2']);
+});
diff --git a/src/__tests__/client_local_evaluation.ts b/src/__tests__/client_local_evaluation.ts
new file mode 100644
index 0000000..8b994ee
--- /dev/null
+++ b/src/__tests__/client_local_evaluation.ts
@@ -0,0 +1,1293 @@
+import anyTest, { TestFn } from 'ava';
+import sino from 'sinon';
+
+import {
+  createFeature,
+  Feature,
+  SegmentUser,
+  SegmentUsers,
+  User,
+  createPrerequisite,
+  Strategy,
+  Clause,
+  createUser,
+  createSegmentUser,
+} from '@kenji71089/evaluation';
+
+import { LocalEvaluator } from '../evaluator/local';
+import {
+  NewSegementUserCacheProcessor,
+  SEGEMENT_USERS_CACHE_TTL,
+  SegementUsersCacheProcessor,
+} from '../cache/processor/segmentUsersCacheProcessor';
+import {
+  FEATURE_FLAG_CACHE_TTL,
+  FeatureFlagProcessor,
+  NewFeatureFlagProcessor,
+} from '../cache/processor/featureFlagCacheProcessor';
+import { MockCache } from './mocks/cache';
+import { MockGRPCClient } from './mocks/gprc';
+import { ProcessorEventsEmitter } from '../processorEventsEmitter';
+import { Clock } from '../utils/clock';
+import { NewSegmentUsersCache, SegmentUsersCache } from '../cache/segmentUsers';
+import { NewFeatureCache, FeaturesCache } from '../cache/features';
+import { ApiId } from '@kenji71089/evaluation/lib/proto/event/client/event_pb';
+import { Config, DefaultLogger } from '../index';
+import { APIClient } from '../api/client';
+import { EventStore } from '../stores/EventStore';
+import { Evaluation } from '../objects/evaluation';
+import { BKTEvaluationDetails } from '../evaluationDetails';
+import { BKTValue } from '../types';
+import { BKTClientImpl } from '../client';
+
+const test = anyTest as TestFn<{
+  sandbox: sino.SinonSandbox;
+  evaluator: LocalEvaluator;
+  cache: MockCache;
+  grpc: MockGRPCClient;
+  eventEmitter: ProcessorEventsEmitter;
+  clock: Clock;
+  segmentUsersCache: SegmentUsersCache;
+  featureFlagCache: FeaturesCache;
+
+  featureFlagProcessor: FeatureFlagProcessor;
+  segementUsersCacheProcessor: SegementUsersCacheProcessor;
+
+  sdkInstance: BKTClientImpl;
+
+  data: {
+    feature3: Feature;
+    feature4: Feature;
+    ftBoolean: Feature;
+    ftInt: Feature;
+    ftFloat: Feature;
+    ftString: Feature;
+    ftJSON: Feature;
+
+    segmentUser2: SegmentUsers;
+
+    user1: User;
+    user2: User;
+  };
+}>;
+
+test.beforeEach((t) => {
+  const sandbox = sino.createSandbox();
+  t.context.sandbox = sandbox;
+
+  const user1 = createUser('user-id-1', {});
+  const user2 = createUser('user-id-2', {});
+
+  const sgUser2 = createSegmentUser('user-id-2', 'segment-id-2', SegmentUser.State.INCLUDED);
+  const sgUser3 = createSegmentUser('user-id-3', 'segment-id-2', SegmentUser.State.INCLUDED);
+
+  const segmentUsers2 = new SegmentUsers();
+  segmentUsers2.setSegmentId('segment-id-2');
+  segmentUsers2.setUsersList([sgUser2, sgUser3]);
+
+  const feature3 = createFeature({
+    id: 'feature-id-3',
+    version: 0,
+    name: 'feature3',
+    enabled: true,
+    tagList: ['server'],
+    prerequisitesList: [createPrerequisite('feature-id-4', 'variation-true-id')],
+    rules: [
+      {
+        id: '',
+        attribute: '',
+        fixedVariation: '',
+        operator: Clause.Operator.SEGMENT,
+        values: [segmentUsers2.getSegmentId()],
+      },
+    ],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const feature4 = createFeature({
+    id: 'feature-id-4',
+    version: 0,
+    name: 'feature4',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.BOOLEAN,
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const ftBoolean = createFeature({
+    id: 'feature-id-boolean',
+    version: 0,
+    name: 'feature-boolean',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.BOOLEAN,
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const ftInt = createFeature({
+    id: 'feature-id-int',
+    version: 0,
+    name: 'feature-int',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.NUMBER,
+    variations: [
+      {
+        id: 'variation-int10-id',
+        name: 'int10-name',
+        value: '10',
+        description: 'variation-int10-id',
+      },
+      {
+        id: 'variation-int20-id',
+        name: 'int20-name',
+        value: '20',
+        description: 'variation-int20-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-int10-id',
+    },
+    offVariation: 'variation-int20-id',
+  });
+
+  const ftFloat = createFeature({
+    id: 'feature-id-float',
+    version: 0,
+    name: 'feature-float',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.NUMBER,
+    variations: [
+      {
+        id: 'variation-float10-id',
+        name: 'float10-name',
+        value: '10.11',
+        description: 'variation-float10-id',
+      },
+      {
+        id: 'variation-float20-id',
+        name: 'float20-name',
+        value: '20.11',
+        description: 'variation-float20-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-float10-id',
+    },
+    offVariation: 'variation-float20-id',
+  });
+
+  const ftString = createFeature({
+    id: 'feature-id-string',
+    version: 0,
+    name: 'feature-string',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.STRING,
+    variations: [
+      {
+        id: 'variation-string10-id',
+        name: 'string10-name',
+        value: 'value 10',
+        description: 'variation-string10-id',
+      },
+      {
+        id: 'variation-string20-id',
+        name: 'string20-name',
+        value: 'value 20',
+        description: 'variation-string20-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-string10-id',
+    },
+    offVariation: 'variation-string20-id',
+  });
+
+  const ftJSON = createFeature({
+    id: 'feature-id-json',
+    version: 0,
+    name: 'feature-json',
+    enabled: true,
+    tagList: ['server'],
+    variationType: Feature.VariationType.JSON,
+    variations: [
+      {
+        id: 'variation-json1-id',
+        name: 'json1-name',
+        value: '{"Str": "str1", "Int": 1}',
+        description: 'variation-json1-id',
+      },
+      {
+        id: 'variation-json2-id',
+        name: 'json2-name',
+        value: '{"Str": "str2", "Int": 2}',
+        description: 'variation-json2-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-json1-id',
+    },
+    //TODO: is this correct? I think it should be a string `variation-json2-id`
+    offVariation: '{"Str": "str2", "Int": 2}',
+  });
+
+  const tag = 'server';
+  const cache = new MockCache();
+  const grpc = new MockGRPCClient();
+  const eventEmitter = new ProcessorEventsEmitter();
+  const clock = new Clock();
+  const segmentUsersCache = NewSegmentUsersCache({ cache: cache, ttl: SEGEMENT_USERS_CACHE_TTL });
+  const featureFlagCache = NewFeatureCache({ cache: cache, ttl: FEATURE_FLAG_CACHE_TTL });
+
+  const config = {
+    host: 'api.bucketeer.io',
+    token: 'api_key_value',
+    tag: 'server',
+    logger: new DefaultLogger('error'),
+    cachePollingInterval: 1000,
+    enableLocalEvaluation: true,
+  } satisfies Config;
+
+  const featureFlagProcessor = NewFeatureFlagProcessor({
+    cache: cache,
+    featureFlagCache: featureFlagCache,
+    pollingInterval: config.cachePollingInterval!,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    featureTag: config.tag,
+    clock: new Clock(),
+  });
+
+  const segementUsersCacheProcessor = NewSegementUserCacheProcessor({
+    cache: cache,
+    segmentUsersCache: segmentUsersCache,
+    pollingInterval: config.cachePollingInterval!,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    clock: new Clock(),
+  });
+
+  const evaluator = new LocalEvaluator({
+    tag: tag,
+    featuresCache: featureFlagCache,
+    segementUsersCache: segmentUsersCache,
+  });
+
+  const bktOptions = {
+    cache: cache,
+    apiClient: new APIClient(config.host, config.token),
+    eventStore: new EventStore(),
+    localEvaluator: evaluator,
+    featureFlagProcessor: featureFlagProcessor,
+    segementUsersCacheProcessor: segementUsersCacheProcessor,
+    eventEmitter: eventEmitter,
+  };
+
+  const sdkInstance = new BKTClientImpl(config, bktOptions);
+
+  t.context = {
+    data: {
+      feature3: feature3,
+      feature4: feature4,
+      ftBoolean: ftBoolean,
+      ftInt: ftInt,
+      ftFloat: ftFloat,
+      ftString: ftString,
+      ftJSON: ftJSON,
+
+      segmentUser2: segmentUsers2,
+      user1: user1,
+      user2: user2,
+    },
+    evaluator: evaluator,
+    cache: cache,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    clock: clock,
+    segmentUsersCache: segmentUsersCache,
+    featureFlagCache: featureFlagCache,
+    featureFlagProcessor: featureFlagProcessor,
+    segementUsersCacheProcessor: segementUsersCacheProcessor,
+    sandbox: sandbox,
+    sdkInstance: sdkInstance,
+  };
+});
+
+test.afterEach((t) => {
+  t.context.sandbox.restore();
+  t.context.sdkInstance.destroy();
+});
+
+test('boolVariation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftBoolean } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftBoolean.getId() });
+
+  const result = await sdkInstance.booleanVariation(sdkUser, ftBoolean.getId(), false);
+  t.is(result, false);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('boolVariation - success: boolean variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftBoolean } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).resolves(ftBoolean);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-boolean:0:user-id-1',
+    featureId: ftBoolean.getId(),
+    featureVersion: ftBoolean.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftBoolean.getVariationsList()[0].getId(),
+    variationName: ftBoolean.getVariationsList()[0].getName(),
+    variationValue: ftBoolean.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.booleanVariation(sdkUser, ftBoolean.getId(), false);
+  t.is(result, true);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('booleanVariationDetails - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftBoolean } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftBoolean.getId() });
+
+    const evaluationDetails = {
+      featureId: ftBoolean.getId(),
+      featureVersion: ftBoolean.getVersion(),
+      userId: 'user-id-1',
+      variationId: '',
+      variationName: '',
+      variationValue: false,
+      reason: 'CLIENT',
+    } satisfies BKTEvaluationDetails<boolean>;
+
+  const result = await sdkInstance.booleanVariationDetails(sdkUser, ftBoolean.getId(), false);
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('booleanVariationDetails - success: boolean variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftBoolean } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).resolves(ftBoolean);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-boolean:0:user-id-1',
+    featureId: ftBoolean.getId(),
+    featureVersion: ftBoolean.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftBoolean.getVariationsList()[0].getId(),
+    variationName: ftBoolean.getVariationsList()[0].getName(),
+    variationValue: ftBoolean.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const evaluationDetails = {
+    featureId: ftBoolean.getId(),
+    featureVersion: ftBoolean.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftBoolean.getVariationsList()[0].getId(),
+    variationName: ftBoolean.getVariationsList()[0].getName(),
+    variationValue: true,
+    reason: 'DEFAULT',
+  } satisfies BKTEvaluationDetails<boolean>;
+
+  const result = await sdkInstance.booleanVariationDetails(sdkUser, ftBoolean.getId(), false);
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('numberVariation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftInt } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftInt.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftInt.getId() });
+
+  const result = await sdkInstance.numberVariation(sdkUser, ftInt.getId(), 1);
+  t.is(result, 1);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('numberVariation - success: number variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftInt } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftInt.getId()).resolves(ftInt);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-int:0:user-id-1',
+    featureId: ftInt.getId(),
+    featureVersion: ftInt.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftInt.getVariationsList()[0].getId(),
+    variationName: ftInt.getVariationsList()[0].getName(),
+    variationValue: ftInt.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.numberVariation(sdkUser, ftInt.getId(), 1);
+  t.is(result, 10);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('numberVariation - success: number variation (float)', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftFloat } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftFloat.getId()).resolves(ftFloat);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-float:0:user-id-1',
+    featureId: ftFloat.getId(),
+    featureVersion: ftFloat.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftFloat.getVariationsList()[0].getId(),
+    variationName: ftFloat.getVariationsList()[0].getName(),
+    variationValue: ftFloat.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.numberVariation(sdkUser, ftFloat.getId(), 1);
+  t.is(result, 10.11);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('numberVariationDetails - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftInt } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftInt.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftInt.getId() });
+
+    const evaluationDetails = {
+      featureId: ftInt.getId(),
+      featureVersion: ftInt.getVersion(),
+      userId: 'user-id-1',
+      variationId: '',
+      variationName: '',
+      variationValue: 1,
+      reason: 'CLIENT',
+    } satisfies BKTEvaluationDetails<number>;
+
+  const result = await sdkInstance.numberVariationDetails(sdkUser, ftInt.getId(), 1);
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('numberVariationDetails - success: number variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftInt } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftInt.getId()).resolves(ftInt);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-int:0:user-id-1',
+    featureId: ftInt.getId(),
+    featureVersion: ftInt.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftInt.getVariationsList()[0].getId(),
+    variationName: ftInt.getVariationsList()[0].getName(),
+    variationValue: ftInt.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const evaluationDetails = {
+    featureId: ftInt.getId(),
+    featureVersion: ftInt.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftInt.getVariationsList()[0].getId(),
+    variationName: ftInt.getVariationsList()[0].getName(),
+    variationValue: 10,
+    reason: 'DEFAULT',
+  } satisfies BKTEvaluationDetails<number>;
+
+  const result = await sdkInstance.numberVariationDetails(sdkUser, ftInt.getId(), 1);
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('numberVariationDetails - success: number variation (float)', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftFloat } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftFloat.getId()).resolves(ftFloat);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-float:0:user-id-1',
+    featureId: ftFloat.getId(),
+    featureVersion: ftFloat.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftFloat.getVariationsList()[0].getId(),
+    variationName: ftFloat.getVariationsList()[0].getName(),
+    variationValue: ftFloat.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const evaluationDetails = {
+    featureId: ftFloat.getId(),
+    featureVersion: ftFloat.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftFloat.getVariationsList()[0].getId(),
+    variationName: ftFloat.getVariationsList()[0].getName(),
+    variationValue: 10.11,
+    reason: 'DEFAULT',
+  } satisfies BKTEvaluationDetails<number>;
+
+  const result = await sdkInstance.numberVariationDetails(sdkUser, ftFloat.getId(), 1);
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('stringVariation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftString } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftString.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftString.getId() });
+
+  const result = await sdkInstance.stringVariation(sdkUser, ftString.getId(), 'default');
+  t.is(result, 'default');
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('stringVariation - success: string variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftString } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftString.getId()).resolves(ftString);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-string:0:user-id-1',
+    featureId: ftString.getId(),
+    featureVersion: ftString.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftString.getVariationsList()[0].getId(),
+    variationName: ftString.getVariationsList()[0].getName(),
+    variationValue: ftString.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.stringVariation(sdkUser, ftString.getId(), 'default');
+  t.is(result, 'value 10');
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('stringVariationDetails - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftString } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftString.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftString.getId() });
+
+    const evaluationDetails = {
+      featureId: ftString.getId(),
+      featureVersion: ftString.getVersion(),
+      userId: 'user-id-1',
+      variationId: '',
+      variationName: '',
+      variationValue: 'default',
+      reason: 'CLIENT',
+    } satisfies BKTEvaluationDetails<string>;
+
+  const result = await sdkInstance.stringVariationDetails(sdkUser, ftString.getId(), 'default');
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('stringVariationDetails - success: string variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftString } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftString.getId()).resolves(ftString);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-string:0:user-id-1',
+    featureId: ftString.getId(),
+    featureVersion: ftString.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftString.getVariationsList()[0].getId(),
+    variationName: ftString.getVariationsList()[0].getName(),
+    variationValue: ftString.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const evaluationDetails = {
+    featureId: ftString.getId(),
+    featureVersion: ftString.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftString.getVariationsList()[0].getId(),
+    variationName: ftString.getVariationsList()[0].getName(),
+    variationValue: 'value 10',
+    reason: 'DEFAULT',
+  } satisfies BKTEvaluationDetails<string>;
+
+  const result = await sdkInstance.stringVariationDetails(sdkUser, ftString.getId(), 'default');
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('jsonVariation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftJSON } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftJSON.getId() });
+
+  const result = await sdkInstance.getJsonVariation(sdkUser, ftJSON.getId(), {});
+  t.deepEqual(result, {});
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('jsonVariation - success: json variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftJSON } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).resolves(ftJSON);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-json:0:user-id-1',
+    featureId: ftJSON.getId(),
+    featureVersion: ftJSON.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftJSON.getVariationsList()[0].getId(),
+    variationName: ftJSON.getVariationsList()[0].getName(),
+    variationValue: ftJSON.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.getJsonVariation(sdkUser, ftJSON.getId(), {});
+  t.deepEqual(result, { Str: 'str1', Int: 1 });
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('objectVariation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftJSON } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftJSON.getId() });
+
+  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), {test: 'test1'});
+  t.deepEqual(result, {test: 'test1'});
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('objectVariation - success: json variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftJSON } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).resolves(ftJSON);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-json:0:user-id-1',
+    featureId: ftJSON.getId(),
+    featureVersion: ftJSON.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftJSON.getVariationsList()[0].getId(),
+    variationName: ftJSON.getVariationsList()[0].getName(),
+    variationValue: ftJSON.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), {test: 'test1'});
+  t.deepEqual(result, { Str: 'str1', Int: 1 });
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('objectVariationDetail - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftJSON } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftJSON.getId() });
+
+    const evaluationDetails = {
+      featureId: ftJSON.getId(),
+      featureVersion: ftJSON.getVersion(),
+      userId: 'user-id-1',
+      variationId: '',
+      variationName: '',
+      variationValue: {test: 'test1'},
+      reason: 'CLIENT',
+    } satisfies BKTEvaluationDetails<BKTValue>;
+
+  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {test: 'test1'});
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('objectVariationDetail - success: object variation', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter } = t.context;
+  const { user1, ftJSON } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftJSON.getId()).resolves(ftJSON);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-json:0:user-id-1',
+    featureId: ftJSON.getId(),
+    featureVersion: ftJSON.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftJSON.getVariationsList()[0].getId(),
+    variationName: ftJSON.getVariationsList()[0].getName(),
+    variationValue: ftJSON.getVariationsList()[0].getValue(),
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+  .expects('emit')
+  .once()
+  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const evaluationDetails = {
+    featureId: ftJSON.getId(),
+    featureVersion: ftJSON.getVersion(),
+    userId: 'user-id-1',
+    variationId: ftJSON.getVariationsList()[0].getId(),
+    variationName: ftJSON.getVariationsList()[0].getName(),
+    variationValue: { Str: 'str1', Int: 1 },
+    reason: 'DEFAULT',
+  } satisfies BKTEvaluationDetails<BKTValue>;
+
+  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {test: 'test1'});
+  t.deepEqual(result, evaluationDetails);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test('getEvaluation - err: internal error', async (t) => {
+  const { data, featureFlagCache, eventEmitter, sdkInstance } = t.context;
+  const { user1, ftBoolean } = data;
+
+  const internalErr = new Error('internal error');
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).rejects(internalErr);
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.getEvaluation(sdkUser, ftBoolean.getId());
+  t.is(result, null);
+
+  featureFlagCacheMock.verify();
+  eventProcessorMock.verify();
+  t.pass();
+});
+
+test('getEvaluation - success', async (t) => {
+  const { data, featureFlagCache, sdkInstance, eventEmitter, segmentUsersCache } = t.context;
+  const { user1, feature3, feature4, segmentUser2 } = data;
+
+  const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
+  featureFlagCacheMock.expects('get').once().withExactArgs(feature3.getId()).resolves(feature3);
+  featureFlagCacheMock.expects('get').once().withExactArgs(feature4.getId()).resolves(feature4);
+  
+  const segementUsersCacheMock = t.context.sandbox.mock(segmentUsersCache);
+  segementUsersCacheMock.expects('get').once().withExactArgs(segmentUser2.getSegmentId()).resolves(segmentUser2);
+
+  const sdkUser = {
+    id: user1.getId(),
+    data: {},
+  };
+
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  const evaluation = {
+    id: 'feature-id-3:0:user-id-1',
+    featureId: 'feature-id-3',
+    featureVersion: 0,
+    userId: 'user-id-1',
+    variationId: 'variation-true-id',
+    variationName: feature3.getVariationsList()[0].getName(),
+    variationValue: 'true',
+    reason: { type: 'DEFAULT', ruleId: '' },
+  } satisfies Evaluation;
+
+  eventProcessorMock
+    .expects('emit')
+    .once()
+    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+
+  const result = await sdkInstance.getEvaluation(sdkUser, feature3.getId());
+  t.deepEqual(result, evaluation);
+
+  featureFlagCacheMock.verify();
+  t.pass();
+});
+
+test ('sdk destroy - success', async (t) => {
+  const { sdkInstance, eventEmitter, featureFlagProcessor, segementUsersCacheProcessor } = t.context;
+  const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
+  eventProcessorMock.expects('close').once().resolves();
+
+  const featureFlagProcessorCacheMock = t.context.sandbox.mock(featureFlagProcessor);
+  featureFlagProcessorCacheMock.expects('stop').once().resolves();
+
+  const segmentUsersCacheProcessorMock = t.context.sandbox.mock(segementUsersCacheProcessor);
+  segmentUsersCacheProcessorMock.expects('stop').once().resolves();
+
+  await sdkInstance.destroy();
+  eventProcessorMock.verify();
+  featureFlagProcessorCacheMock.verify();
+  segmentUsersCacheProcessorMock.verify();
+  t.pass();
+});
\ No newline at end of file
diff --git a/src/__tests__/evaluator/evaluator.ts b/src/__tests__/evaluator/evaluator.ts
new file mode 100644
index 0000000..9fda41b
--- /dev/null
+++ b/src/__tests__/evaluator/evaluator.ts
@@ -0,0 +1,500 @@
+import anyTest, { TestFn } from 'ava';
+import sino from 'sinon';
+
+import {
+  createFeature,
+  Feature,
+  SegmentUser,
+  SegmentUsers,
+  User,
+  createPrerequisite,
+  Strategy,
+  Clause,
+  createUser,
+  createSegmentUser,
+} from '@kenji71089/evaluation';
+
+import { LocalEvaluator } from '../../evaluator/local';
+import { SEGEMENT_USERS_CACHE_TTL } from '../../cache/processor/segmentUsersCacheProcessor';
+import { FEATURE_FLAG_CACHE_TTL } from '../../cache/processor/featureFlagCacheProcessor';
+import { MockCache } from '../mocks/cache';
+import { MockGRPCClient } from '../mocks/gprc';
+
+import { Clock } from '../../utils/clock';
+import { NewSegmentUsersCache, SegmentUsersCache } from '../../cache/segmentUsers';
+import { NewFeatureCache, FeaturesCache } from '../../cache/features';
+import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
+
+const test = anyTest as TestFn<{
+  sandbox: sino.SinonSandbox;
+  evaluator: LocalEvaluator;
+  cache: MockCache;
+  grpc: MockGRPCClient;
+  eventEmitter: ProcessorEventsEmitter;
+  clock: Clock;
+  segmentUsersCache: SegmentUsersCache;
+  featureFlagCache: FeaturesCache;
+  data: {
+    feature1: Feature;
+    feature2: Feature;
+    feature3: Feature;
+    feature4: Feature;
+    feature5: Feature;
+
+    segmentUser1: SegmentUsers;
+    segmentUser2: SegmentUsers;
+
+    user1: User;
+    user2: User;
+  };
+}>;
+
+test.beforeEach((t) => {
+  const sandbox = sino.createSandbox();
+  t.context.sandbox = sandbox;
+
+  const user1 = createUser('user-id-1', {});
+  const user2 = createUser('user-id-2', {});
+
+  const sgUser1 = createSegmentUser('user-id-1', '', SegmentUser.State.INCLUDED);
+  const sgUser2 = createSegmentUser('user-id-2', 'segment-id-2', SegmentUser.State.INCLUDED);
+  const sgUser3 = createSegmentUser('user-id-3', 'segment-id-2', SegmentUser.State.INCLUDED);
+
+  const segmentUsers1 = new SegmentUsers();
+  segmentUsers1.setSegmentId('segment-id-1');
+  segmentUsers1.setUsersList([sgUser1]);
+
+  const segmentUsers2 = new SegmentUsers();
+  segmentUsers2.setSegmentId('segment-id-2');
+  segmentUsers2.setUsersList([sgUser2, sgUser3]);
+
+  const feature1 = createFeature({
+    id: 'feature-id-1',
+    version: 0,
+    name: 'feature1',
+    enabled: true,
+    tagList: ['server'],
+    prerequisitesList: [createPrerequisite('feature-id-2', 'variation-true-id')],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const feature2 = createFeature({
+    id: 'feature-id-2',
+    version: 0,
+    name: 'feature2',
+    enabled: true,
+    tagList: ['server'],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const feature3 = createFeature({
+    id: 'feature-id-3',
+    version: 0,
+    name: 'feature3',
+    enabled: true,
+    tagList: ['server'],
+    prerequisitesList: [createPrerequisite('feature-id-4', 'variation-true-id')],
+    rules: [
+      {
+        id: '',
+        attribute: '',
+        fixedVariation: '',
+        operator: Clause.Operator.SEGMENT,
+        values: [segmentUsers1.getSegmentId()],
+      },
+      {
+        id: '',
+        attribute: '',
+        fixedVariation: '',
+        operator: Clause.Operator.SEGMENT,
+        values: [segmentUsers2.getSegmentId()],
+      },
+    ],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const feature4 = createFeature({
+    id: 'feature-id-4',
+    version: 0,
+    name: 'feature4',
+    enabled: false,
+    tagList: ['server'],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const feature5 = createFeature({
+    id: 'feature-id-5',
+    version: 0,
+    name: 'feature5',
+    enabled: true,
+    tagList: ['server'],
+    variations: [
+      {
+        id: 'variation-true-id',
+        name: 'true-name',
+        value: 'true',
+        description: 'variation-true-id',
+      },
+      {
+        id: 'variation-false-id',
+        name: 'false-name',
+        value: 'false',
+        description: 'variation-false-id',
+      },
+    ],
+    rules: [
+      {
+        id: 'clause-id',
+        attribute: '',
+        operator: Clause.Operator.SEGMENT,
+        values: [segmentUsers2.getSegmentId()],
+        fixedVariation: 'variation-true-id',
+      },
+    ],
+    defaultStrategy: {
+      type: Strategy.Type.FIXED,
+      variation: 'variation-true-id',
+    },
+    offVariation: 'variation-false-id',
+  });
+
+  const tag = 'server';
+  const cache = new MockCache();
+  const grpc = new MockGRPCClient();
+  const eventEmitter = new ProcessorEventsEmitter();
+  const clock = new Clock();
+  const segmentUsersCache = NewSegmentUsersCache({ cache: cache, ttl: SEGEMENT_USERS_CACHE_TTL });
+  const featureFlagCache = NewFeatureCache({ cache: cache, ttl: FEATURE_FLAG_CACHE_TTL });
+  const evaluator = new LocalEvaluator({
+    tag: tag,
+    featuresCache: featureFlagCache,
+    segementUsersCache: segmentUsersCache,
+  });
+
+  t.context = {
+    data: {
+      feature1: feature1,
+      feature2: feature2,
+      feature3: feature3,
+      feature4: feature4,
+      feature5: feature5,
+      segmentUser1: segmentUsers1,
+      segmentUser2: segmentUsers2,
+      user1: user1,
+      user2: user2,
+    },
+    evaluator: evaluator,
+    cache: cache,
+    grpc: grpc,
+    eventEmitter: eventEmitter,
+    clock: clock,
+    segmentUsersCache: segmentUsersCache,
+    featureFlagCache: featureFlagCache,
+    sandbox: sandbox,
+  };
+});
+
+test.afterEach((t) => {
+  t.context.sandbox.restore();
+});
+
+test('evaluate | err: failed to get feature flag from cache', async (t) => {
+  const { evaluator, featureFlagCache } = t.context;
+  const { feature1 } = t.context.data;
+  const err = new Error('internal error');
+  const mock = t.context.sandbox.mock(featureFlagCache).expects('get');
+  mock.rejects(err);
+  await evaluator
+    .evaluate(
+      {
+        id: 'id',
+        data: {},
+      },
+      feature1.getId(),
+    )
+    .catch((e) => {
+      t.is(e, err);
+    });
+  mock.verify();
+  t.pass();
+});
+
+test('evaluate | err: failed to get prerequisite feature flag from cache', async (t) => {
+  const { evaluator, featureFlagCache, sandbox } = t.context;
+  const { feature1, feature2 } = t.context.data;
+  const err = new Error('internal error');
+  const mock = sandbox.mock(featureFlagCache);
+  mock.expects('get').withArgs(feature2.getId()).exactly(1).rejects(err);
+  mock.expects('get').withArgs(feature1.getId()).exactly(1).resolves(feature1);
+  
+  await evaluator
+    .evaluate(
+      {
+        id: 'id',
+        data: {},
+      },
+      feature1.getId(),
+    )
+    .catch((e) => {
+      t.is(e, err);
+    });
+
+  mock.verify();
+  t.pass();
+});
+
+test ('evaluate | err: failed to get segment from cache', async (t) => {
+  const { evaluator, featureFlagCache, segmentUsersCache, sandbox } = t.context;
+  const { feature5, segmentUser2 } = t.context.data;
+  const err = new Error('internal error');
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature5.getId()).resolves(feature5);
+  
+  const segmentUsersCacheMock = sandbox.mock(segmentUsersCache);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser2.getSegmentId()).rejects(err);
+  
+  await evaluator
+    .evaluate(
+      {
+        id: 'id',
+        data: {},
+      },
+      feature5.getId(),
+    )
+    .catch((e) => {
+      t.is(e, err);
+    });
+
+  featuresCacheMock.verify();
+  segmentUsersCacheMock.verify();
+  
+  t.pass();
+});
+
+test ('evaluate | success: with no prerequisites', async (t) => {
+  const { evaluator, featureFlagCache, segmentUsersCache, sandbox } = t.context;
+  const { feature5, segmentUser2 } = t.context.data;
+  
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature5.getId()).resolves(feature5);
+  
+  const segmentUsersCacheMock = sandbox.mock(segmentUsersCache);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser2.getSegmentId()).resolves(segmentUser2);
+  
+  const evaluation = await evaluator
+    .evaluate(
+      {
+        id: 'user-id-1',
+        data: {},
+      },
+      feature5.getId(),
+    );
+
+  t.deepEqual(evaluation, {
+    id: 'feature-id-5:0:user-id-1',
+    featureId: 'feature-id-5',
+    featureVersion: 0,
+    userId: 'user-id-1',
+    variationId: 'variation-true-id',
+    reason: {
+      ruleId: '',
+      type: 'DEFAULT',
+    },
+    variationValue: 'true',
+    variationName: 'true-name',
+  });
+  featuresCacheMock.verify();
+  segmentUsersCacheMock.verify();
+  
+  t.pass();
+});
+
+test ('evaluate | success: with prerequisite feature disabled (It must return the off variation)', async (t) => {
+  const { evaluator, featureFlagCache, segmentUsersCache, sandbox } = t.context;
+  const { feature3, feature4, segmentUser1, segmentUser2 } = t.context.data;
+  
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature3.getId()).resolves(feature3);
+  featuresCacheMock.expects('get').withArgs(feature4.getId()).resolves(feature4);
+
+  const segmentUsersCacheMock = sandbox.mock(segmentUsersCache);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser1.getSegmentId()).resolves(segmentUser1);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser2.getSegmentId()).resolves(segmentUser2);
+
+  const evaluation = await evaluator
+    .evaluate(
+      {
+        id: 'user-id-1',
+        data: {},
+      },
+      feature3.getId(),
+    );
+
+  t.deepEqual(evaluation, {
+    id: 'feature-id-3:0:user-id-1',
+    featureId: 'feature-id-3',
+    featureVersion: 0,
+    userId: 'user-id-1',
+    variationId: 'variation-false-id',
+    reason: {
+      ruleId: '',
+      type: 'PREREQUISITE',
+    },
+    variationValue: 'false',
+    variationName: 'false-name',
+  });
+
+  featuresCacheMock.verify();
+  segmentUsersCacheMock.verify();
+
+  t.pass();
+});
+
+test ('evaluate | success: with prerequisite feature enabled (It must return the default strategy variation)', async (t) => {
+
+  const { evaluator, featureFlagCache, sandbox } = t.context;
+  const { feature1, feature2 } = t.context.data;
+
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature1.getId()).resolves(feature1);
+  featuresCacheMock.expects('get').withArgs(feature2.getId()).resolves(feature2);
+
+  const evaluation = await evaluator
+    .evaluate(
+      {
+        id: 'user-id-1',
+        data: {},
+      },
+      feature1.getId(),
+    );
+
+  t.deepEqual(evaluation, {
+    id: 'feature-id-1:0:user-id-1',
+    featureId: 'feature-id-1',
+    featureVersion: 0,
+    userId: 'user-id-1',
+    variationId: 'variation-true-id',
+    reason: {
+      ruleId: '',
+      type: 'DEFAULT',
+    },
+    variationValue: 'true',
+    variationName: 'true-name',
+  });
+
+  featuresCacheMock.verify();
+
+  t.pass();
+});
+
+test ('evaluate | success: with segment user', async (t) => {
+  const { evaluator, featureFlagCache, segmentUsersCache, sandbox } = t.context;
+  const { feature5, segmentUser2 } = t.context.data;
+
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature5.getId()).resolves(feature5);
+
+  const segmentUsersCacheMock = sandbox.mock(segmentUsersCache);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser2.getSegmentId()).resolves(segmentUser2);
+
+  const evaluation = await evaluator
+    .evaluate(
+      {
+        id: 'user-id-2',
+        data: {},
+      },
+      feature5.getId(),
+    );
+
+  t.deepEqual(evaluation, {
+    id: 'feature-id-5:0:user-id-2',
+    featureId: 'feature-id-5',
+    featureVersion: 0,
+    userId: 'user-id-2',
+    variationId: 'variation-true-id',
+    reason: {
+      //TODO: check this again, the GO SDK test has a different value for this ruleId (it is empty)
+      ruleId: 'clause-id',
+      type: 'RULE',
+    },
+    variationValue: 'true',
+    variationName: 'true-name',
+  });
+
+  featuresCacheMock.verify();
+  segmentUsersCacheMock.verify();
+
+  t.pass();
+});
\ No newline at end of file
diff --git a/src/__tests__/event_emiter.ts b/src/__tests__/event_emiter.ts
new file mode 100644
index 0000000..8fe6a3c
--- /dev/null
+++ b/src/__tests__/event_emiter.ts
@@ -0,0 +1,101 @@
+import test from 'ava';
+import sinon from 'sinon';
+import { ProcessorEventsEmitter } from '../processorEventsEmitter';
+import { User } from '../objects/user';
+import { Evaluation } from '../objects/evaluation';
+import { ApiId } from '../objects/apiId';
+
+test('should emit pushEvaluationEvent', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const user: User = { id: 'user1', data: {} };
+
+  const evaluation: Evaluation = {
+    id: 'eval1',
+    featureId: 'feature1',
+    featureVersion: 1,
+    userId: 'user1',
+    variationId: 'var1',
+    variationName: 'variation1',
+    variationValue: 'value1',
+  } satisfies Evaluation;
+  const listener = sinon.spy();
+
+  emitter.on('pushEvaluationEvent', listener);
+  emitter.emit('pushEvaluationEvent', { user, evaluation });
+
+  t.true(listener.calledOnce);
+  t.deepEqual(listener.firstCall.args[0], { user, evaluation });
+});
+
+test('should emit pushLatencyMetricsEvent', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const latency = 123;
+  const apiId = ApiId.GET_EVALUATION;
+  const listener = sinon.spy();
+
+  emitter.on('pushLatencyMetricsEvent', listener);
+  emitter.emit('pushLatencyMetricsEvent', { latency, apiId });
+
+  t.true(listener.calledOnce);
+  t.deepEqual(listener.firstCall.args[0], { latency, apiId });
+});
+
+test('should emit pushSizeMetricsEvent', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const size = 456;
+  const apiId = ApiId.GET_EVALUATION;
+  const listener = sinon.spy();
+
+  emitter.on('pushSizeMetricsEvent', listener);
+  emitter.emit('pushSizeMetricsEvent', { size, apiId });
+
+  t.true(listener.calledOnce);
+  t.deepEqual(listener.firstCall.args[0], { size, apiId });
+});
+
+test('should emit error event', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const error = new Error('Test error');
+  const apiId = ApiId.GET_EVALUATION;
+  const listener = sinon.spy();
+
+  emitter.on('error', listener);
+  emitter.emit('error', { error, apiId });
+
+  t.true(listener.calledOnce);
+  t.deepEqual(listener.firstCall.args[0], { error, apiId });
+});
+
+test('should emit pushDefaultEvaluationEvent', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const user: User = { id: 'user2', data: {} };
+  const featureId = 'feature1';
+  const listener = sinon.spy();
+
+  emitter.on('pushDefaultEvaluationEvent', listener);
+  emitter.emit('pushDefaultEvaluationEvent', { user, featureId });
+
+  t.true(listener.calledOnce);
+  t.deepEqual(listener.firstCall.args[0], { user, featureId });
+});
+
+test('should remove all listeners on close', (t) => {
+  const emitter = new ProcessorEventsEmitter();
+  const listener = sinon.spy();
+
+  emitter.on('pushEvaluationEvent', listener);
+  emitter.close();
+  emitter.emit('pushEvaluationEvent', {
+    user: { id: 'user3', data: {} },
+    evaluation: {
+      id: 'eval1',
+      featureId: 'feature1',
+      featureVersion: 1,
+      userId: 'user1',
+      variationId: 'var1',
+      variationName: 'variation1',
+      variationValue: 'value1',
+    } satisfies Evaluation,
+  });
+  t.false(listener.called);
+});
diff --git a/src/__tests__/gprc/client.ts b/src/__tests__/gprc/client.ts
new file mode 100644
index 0000000..cbd59d8
--- /dev/null
+++ b/src/__tests__/gprc/client.ts
@@ -0,0 +1,56 @@
+import test from 'ava';
+import { InvalidStatusError } from '../../api/client';
+import { convertSerivceError, DefaultGRPCClient, grpcToRestStatus } from '../../grpc/client';
+import { ServiceError } from '@kenji71089/evaluation';
+import { grpc } from '@improbable-eng/grpc-web';
+
+test('grpcToRestStatus should return correct HTTP status for known gRPC codes', t => {
+  t.is(grpcToRestStatus(0), 200); // OK
+  t.is(grpcToRestStatus(1), 499); // CANCELLED
+  t.is(grpcToRestStatus(2), 500); // UNKNOWN
+  t.is(grpcToRestStatus(3), 400); // INVALID_ARGUMENT
+  t.is(grpcToRestStatus(4), 504); // DEADLINE_EXCEEDED
+  t.is(grpcToRestStatus(5), 404); // NOT_FOUND
+  t.is(grpcToRestStatus(6), 409); // ALREADY_EXISTS
+  t.is(grpcToRestStatus(7), 403); // PERMISSION_DENIED
+  t.is(grpcToRestStatus(8), 429); // RESOURCE_EXHAUSTED
+  t.is(grpcToRestStatus(9), 400); // FAILED_PRECONDITION
+  t.is(grpcToRestStatus(10), 409); // ABORTED
+  t.is(grpcToRestStatus(11), 400); // OUT_OF_RANGE
+  t.is(grpcToRestStatus(12), 501); // UNIMPLEMENTED
+  t.is(grpcToRestStatus(13), 500); // INTERNAL
+  t.is(grpcToRestStatus(14), 503); // UNAVAILABLE
+  t.is(grpcToRestStatus(15), 500); // DATA_LOSS
+  t.is(grpcToRestStatus(16), 401); // UNAUTHENTICATED
+});
+
+test('grpcToRestStatus should return 500 for unknown gRPC codes', t => {
+  t.is(grpcToRestStatus(999), 500); // Unknown gRPC code
+  t.is(grpcToRestStatus(-1), 500); // Invalid gRPC code
+});
+
+test('convertSerivceError should convert ServiceError to InvalidStatusError', t => {
+  const serviceError: ServiceError = {
+    message: 'Test error message',
+    code: 500,
+    metadata: new grpc.Metadata(),
+  };
+
+  const result = convertSerivceError(serviceError);
+
+  t.true(result instanceof InvalidStatusError);
+  t.is(result.message, serviceError.message);
+  t.is(result.code, serviceError.code);
+});
+
+test('GRPCClient should convert ServiceError to InvalidStatusError', async t => {
+  // there is not gprc server running on this port, error is expected
+  const client = new DefaultGRPCClient('https://localhost:26948', 'apiKey');
+
+  try {
+    await client.getFeatureFlags({featureFlagsId: 'featureFlagsId', requestedAt: 123, tag: 'tag'});
+  } catch (error) {
+    t.true(error instanceof InvalidStatusError);
+    t.is(error.code, 500);
+  }
+});
\ No newline at end of file
diff --git a/src/__tests__/mocks/cache.ts b/src/__tests__/mocks/cache.ts
new file mode 100644
index 0000000..c3690dc
--- /dev/null
+++ b/src/__tests__/mocks/cache.ts
@@ -0,0 +1,22 @@
+import { Cache } from '../../cache/cache';
+
+class MockCache implements Cache {
+  get(_key: string): Promise<any | null> {
+    throw new Error('Method not implemented.');
+  }
+  put(_key: string, _value: any, _ttl: number): Promise<void> {
+    throw new Error('Method not implemented.');
+  }
+  delete(_key: string): Promise<void> {
+    throw new Error('Method not implemented.');
+  }
+  scan(_keyPrefix: string): Promise<string[]> {
+    throw new Error('Method not implemented.');
+  }
+  deleteAll(): Promise<void> {
+    throw new Error('Method not implemented.');
+  }
+
+}
+
+export { MockCache };
\ No newline at end of file
diff --git a/src/__tests__/mocks/gprc.ts b/src/__tests__/mocks/gprc.ts
new file mode 100644
index 0000000..d99be17
--- /dev/null
+++ b/src/__tests__/mocks/gprc.ts
@@ -0,0 +1,23 @@
+import {
+  GetSegmentUsersResponse,
+  GetFeatureFlagsResponse,
+} from '@kenji71089/evaluation';
+import { GRPCClient } from '../../grpc/client';
+
+class MockGRPCClient implements GRPCClient {
+  getFeatureFlags(_options: {
+    tag: string;
+    featureFlagsId: string;
+    requestedAt: number;
+  }): Promise<GetFeatureFlagsResponse> {
+    throw new Error('Method not implemented.');
+  }
+  getSegmentUsers(_options: {
+    segmentIdsList: Array<string>;
+    requestedAt: number;
+  }): Promise<GetSegmentUsersResponse> {
+    throw new Error('Method not implemented.');
+  }
+}
+
+export { MockGRPCClient };
\ No newline at end of file
diff --git a/src/__tests__/schedule.ts b/src/__tests__/schedule.ts
new file mode 100644
index 0000000..70ad395
--- /dev/null
+++ b/src/__tests__/schedule.ts
@@ -0,0 +1,38 @@
+import test from 'ava';
+import { createSchedule, removeSchedule } from '../schedule';
+
+test('createSchedule should return a NodeJS.Timeout', t => {
+  const fn = () => {};
+  const interval = 1000;
+  const timeout = createSchedule(fn, interval);
+  
+  t.truthy(timeout);
+  t.is(typeof timeout, 'object');
+  removeSchedule(timeout); // Clean up
+});
+
+test('createSchedule should call the function at the specified interval', async t => {
+  let callCount = 0;
+  const fn = () => { callCount++; };
+  const interval = 100;
+  const timeout = createSchedule(fn, interval);
+
+  await new Promise(resolve => setTimeout(resolve, 350));
+  t.true(callCount >= 3);
+
+  removeSchedule(timeout); // Clean up
+});
+
+test('removeSchedule should clear the interval', async t => {
+  let callCount = 0;
+  const fn = () => { callCount++; };
+  const interval = 100;
+  const timeout = createSchedule(fn, interval);
+
+  await new Promise(resolve => setTimeout(resolve, 250));
+  removeSchedule(timeout);
+
+  const callCountAfterClear = callCount;
+  await new Promise(resolve => setTimeout(resolve, 250));
+  t.is(callCount, callCountAfterClear);
+});
\ No newline at end of file
diff --git a/src/__tests__/typeConverter/converter_default.ts b/src/__tests__/typeConverter/converter_default.ts
new file mode 100644
index 0000000..224785c
--- /dev/null
+++ b/src/__tests__/typeConverter/converter_default.ts
@@ -0,0 +1,28 @@
+import test from 'ava';
+import { defaultStringToTypeConverter } from '../../converter';
+
+type StringConvertTestCase = {
+  input: string;
+  expected: string;
+};
+
+// List of test cases
+const stringConvertTestCases: StringConvertTestCase[] = [
+  { input: 'default true', expected: 'default true' },
+  { input: 'default false', expected: 'default false' },
+  { input: ' ', expected: ' ' },
+  { input: '', expected: '' },
+  { input: '1', expected: '1' },
+  { input: '2', expected: '2' },
+  { input: '2.0', expected: '2.0' },
+  { input: '12.1', expected: '12.1' },
+  { input: '[]', expected: '[]' },
+  { input: '{"key1": "value1"}', expected: '{"key1": "value1"}' },
+];
+
+stringConvertTestCases.forEach(({ input, expected }, index) => {
+  test(`defaultStringToTypeConverter test case ${index + 1}`, (t) => {
+    const output = defaultStringToTypeConverter(input);
+    t.is(output, expected);
+  });
+});
diff --git a/src/__tests__/typeConverter/converter_string_to_bool.ts b/src/__tests__/typeConverter/converter_string_to_bool.ts
new file mode 100644
index 0000000..723c1ad
--- /dev/null
+++ b/src/__tests__/typeConverter/converter_string_to_bool.ts
@@ -0,0 +1,32 @@
+import test from 'ava';
+import { stringToBoolConverter } from '../../converter';
+
+type StringToBoolConvertTestCase = {
+  input: string;
+  expected: boolean | null;
+};
+
+// List of test cases
+const stringConvertTestCases: StringToBoolConvertTestCase[] = [
+  { input: 'default true', expected: null },
+  { input: 'default false', expected: null },
+  { input: ' ', expected: null },
+  { input: '', expected: null },
+  { input: '1', expected: null },
+  { input: '2', expected: null },
+  { input: '[]', expected: null },
+  { input: '{"key1": "value1"}', expected: null },
+  { input: 'true', expected: true },
+  { input: 'false', expected: false },
+];
+
+stringConvertTestCases.forEach(({ input, expected }, index) => {
+  test(`stringToBoolConverter test case ${index + 1} input: ${input} - expected: ${expected}`, (t) => {
+    try {
+      const output = stringToBoolConverter(input);
+      t.is(output, expected);
+    } catch (err) {
+      t.is(expected, null);
+    }
+  });
+});
diff --git a/src/__tests__/typeConverter/converter_string_to_num.ts b/src/__tests__/typeConverter/converter_string_to_num.ts
new file mode 100644
index 0000000..ad4cdf9
--- /dev/null
+++ b/src/__tests__/typeConverter/converter_string_to_num.ts
@@ -0,0 +1,36 @@
+import test from 'ava';
+import { stringToBoolConverter, stringToNumberConverter } from '../../converter';
+
+type StringToNumConvertTestCase = {
+  input: string;
+  expected: number | null;
+};
+
+// List of test cases
+const stringConvertTestCases: StringToNumConvertTestCase[] = [
+  { input: 'default true', expected: null },
+  { input: 'default false', expected: null },
+  { input: ' ', expected: null },
+  { input: '', expected: null },
+  { input: '1', expected: 1 },
+  { input: '2', expected: 2 },
+  { input: '0.1', expected: 0.1 },
+  { input: '12.1', expected: 12.1 },
+  { input: '12.0', expected: 12 },
+  { input: '1', expected: 1 },
+  { input: '[]', expected: null },
+  { input: '{"key1": "value1"}', expected: null },
+  { input: 'true', expected: null },
+  { input: 'false', expected: null },
+];
+
+stringConvertTestCases.forEach(({ input, expected }, index) => {
+  test(`stringToNumberConverter test case ${index + 1} input: ${input} - expected: ${expected}`, (t) => {
+    try {
+      const output = stringToNumberConverter(input);
+      t.is(output, expected);
+    } catch (err) {
+      t.is(expected, null);
+    }
+  });
+});
diff --git a/src/__tests__/typeConverter/converter_string_to_object.ts b/src/__tests__/typeConverter/converter_string_to_object.ts
new file mode 100644
index 0000000..18f694e
--- /dev/null
+++ b/src/__tests__/typeConverter/converter_string_to_object.ts
@@ -0,0 +1,50 @@
+import test from 'ava';
+import { stringToObjectConverter } from '../../converter';
+import { BKTValue } from '../../types';
+
+type StringToJSonValueConvertTestCase = {
+  input: string;
+  expected: BKTValue | null;
+};
+
+// List of test cases
+const stringConvertTestCases: StringToJSonValueConvertTestCase[] = [
+  { input: 'default true', expected: null },
+  { input: 'default false', expected: null },
+  { input: ' ', expected: null },
+  { input: '', expected: null },
+  { input: '1', expected: null },
+  { input: '2', expected: null },
+  { input: '0.1', expected: null },
+  { input: '12.1', expected: null },
+  { input: '12.0', expected: null },
+  { input: '1', expected: null },
+  { input: '[]', expected: [] },
+  { input: '{}', expected: {} },
+  { input: '{"key1": "value1"}', expected: { key1: 'value1' } },
+  {
+    input: JSON.stringify({ key1: 'value1', key2: 'value1', key3: 'value1' }),
+    expected: { key1: 'value1', key2: 'value1', key3: 'value1' },
+  },
+  {
+    input: JSON.stringify({ key1: [1, 2, 3], key2: 'value1', key3: 'value1' }),
+    expected: { key1: [1, 2, 3], key2: 'value1', key3: 'value1' },
+  },
+  {
+    input: JSON.stringify([1, 2, 3]),
+    expected: [1, 2, 3],
+  },
+  { input: 'true', expected: null },
+  { input: 'false', expected: null },
+];
+
+stringConvertTestCases.forEach(({ input, expected }, index) => {
+  test(`stringToObjectConverter test case ${index + 1} input: ${input} - expected: ${expected}`, (t) => {
+    try {
+      const output = stringToObjectConverter(input);
+      t.deepEqual(output, expected);
+    } catch (err) {
+      t.deepEqual(expected, null);
+    }
+  });
+});
diff --git a/src/cache/cache.ts b/src/cache/cache.ts
new file mode 100644
index 0000000..9524090
--- /dev/null
+++ b/src/cache/cache.ts
@@ -0,0 +1,9 @@
+interface Cache {
+  get(key: string): Promise<any | null>;
+  put(key: string, value: any, ttl: number): Promise<void>;
+  delete(key: string): Promise<void>;
+  scan(keyPrefix: string): Promise<string[]>;
+  deleteAll(): Promise<void>;
+}
+
+export { Cache };
\ No newline at end of file
diff --git a/src/cache/features.ts b/src/cache/features.ts
new file mode 100644
index 0000000..647059f
--- /dev/null
+++ b/src/cache/features.ts
@@ -0,0 +1,40 @@
+import { Feature } from '@kenji71089/evaluation';
+import { Cache } from './cache';
+import { NamespaceCache } from './namespace';
+
+interface FeaturesCache {
+  get(key: string): Promise<Feature | null>;
+  put(value: Feature): Promise<void>;
+  delete(key: string): Promise<void>;
+  deleteAll(): Promise<void>;
+}
+
+function NewFeatureCache(options: { cache: Cache; ttl: number; }): FeaturesCache {
+  return new FeatureNamespaceCache(options.cache, options.ttl);
+}
+
+class FeatureNamespaceCache implements FeaturesCache {
+  private cache: NamespaceCache<Feature>;
+  
+  constructor(cache: Cache, ttl: number) {
+    this.cache = new NamespaceCache<Feature>(cache, ttl, 'features:');
+  }
+
+  async get(key: string): Promise<Feature | null> {
+    return this.cache.get(key);
+  }
+
+  async put(value: Feature): Promise<void> {
+    return this.cache.put(value.getId(), value);
+  }
+
+  async delete(key: string): Promise<void> {
+    return this.cache.delete(key);
+  }
+
+  async deleteAll(): Promise<void> {
+    return this.cache.deleteAll();
+  }
+}
+
+export { FeaturesCache, NewFeatureCache };
\ No newline at end of file
diff --git a/src/cache/inMemoryCache.ts b/src/cache/inMemoryCache.ts
new file mode 100644
index 0000000..3640999
--- /dev/null
+++ b/src/cache/inMemoryCache.ts
@@ -0,0 +1,66 @@
+import { Cache } from './cache';
+
+class Entry {
+  value: any;
+  expiration: number;
+
+  constructor(value: any, expiration: number) {
+    this.value = value;
+    this.expiration = expiration;
+  }
+}
+
+//Simple in-memory cache implementation
+class InMemoryCache<T> implements Cache {
+  private entries: Map<string, Entry>;
+
+  constructor() {
+    this.entries = new Map<string, Entry>();
+  }
+
+  private isExpired(entry: Entry): boolean {
+    if (entry.expiration === 0) {
+      return false;
+    }
+    const now = Date.now();
+    return entry.expiration <= now;
+  }
+
+  async get(key: string): Promise<T | null> {
+    const entry = this.entries.get(key);
+    if (!entry) {
+      return null;
+    }
+
+    //TODO: Implement scheduler for cleaning up of expired entries
+    if (this.isExpired(entry)) {
+      this.entries.delete(key); // Remove expired entry
+      return null;
+    }
+
+    return entry.value;
+  }
+
+  async put(key: string, value: T, ttl: number): Promise<void> {
+    const expirationTime = Date.now() + ttl;
+    this.entries.set(key, new Entry(value, ttl == 0 ? 0 : expirationTime));
+  }
+
+  async scan(keyPrefix: string): Promise<string[]> {
+    const keys: string[] = [];
+    const allKeys = Array.from(this.entries.keys());
+    const filteredKeys = allKeys.filter(key => key.startsWith(keyPrefix));
+    keys.push(...filteredKeys);
+    return keys;
+  }
+
+  async delete(key: string): Promise<void> {
+    this.entries.delete(key);
+  }
+
+  async deleteAll(): Promise<void> {
+    this.entries.clear();
+  }
+}
+
+export { InMemoryCache };
\ No newline at end of file
diff --git a/src/cache/namespace.ts b/src/cache/namespace.ts
new file mode 100644
index 0000000..3a70a17
--- /dev/null
+++ b/src/cache/namespace.ts
@@ -0,0 +1,55 @@
+import { Cache } from './cache';
+
+class NamespaceCache<T> {
+  private cache: Cache;
+  private ttl: number;
+  private _namespace: string;
+
+  get namespace(): string {
+    return this._namespace;
+  }
+
+  constructor(cache: Cache, ttl: number, namespace: string) {
+    this.cache = cache;
+    this.ttl = ttl;
+    this._namespace = namespace;
+  }
+
+  async get(key: string): Promise<T | null> {
+    return this.cache.get(this.composeKey(key));
+  }
+
+  async put(key: string, value: T): Promise<void> {
+    return this.cache.put(this.composeKey(key), value, this.ttl);
+  }
+
+  async delete(key: string): Promise<void> {
+    return this.cache.delete(this.composeKey(key));
+  }
+
+  async getAll(): Promise<T[]> {
+    const ids = await this.getIds();  
+    const promises = ids.map(id => this.get(id));
+    const results = await Promise.all(promises);
+    return results.filter((result) => result !== null);
+  }
+
+  private async getInternalIds(): Promise<string[]> {
+    return this.cache.scan(this.namespace);
+  }
+
+  async getIds(): Promise<string[]> {
+    return (await this.getInternalIds()).map(key => key.replace(this.namespace, ''));
+  }
+
+  async deleteAll(): Promise<void> {
+    const ids = await this.getIds();
+    ids.map(id => this.delete(id));
+  }
+
+  composeKey(id: string): string {
+    return this.namespace + id;
+  }
+}
+
+export { NamespaceCache };
\ No newline at end of file
diff --git a/src/cache/processor/featureFlagCacheProcessor.ts b/src/cache/processor/featureFlagCacheProcessor.ts
new file mode 100644
index 0000000..4405c38
--- /dev/null
+++ b/src/cache/processor/featureFlagCacheProcessor.ts
@@ -0,0 +1,163 @@
+import { FeaturesCache } from '../features';
+import { Cache } from '../cache';
+import { GRPCClient } from '../../grpc/client';
+import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
+import { createSchedule, removeSchedule } from '../../schedule';
+import { Feature } from '@kenji71089/evaluation';
+import { ApiId } from '../../objects/apiId';
+import { Clock } from '../../utils/clock';
+
+interface FeatureFlagProcessor {
+  start(): void;
+  stop(): void;
+}
+
+type FeatureFlagProcessorOptions = {
+  cache: Cache;
+  featureFlagCache: FeaturesCache;
+  pollingInterval: number;
+  grpc: GRPCClient;
+  eventEmitter: ProcessorEventsEmitter;
+  featureTag: string;
+  clock: Clock;
+};
+
+function NewFeatureFlagProcessor(options: FeatureFlagProcessorOptions): FeatureFlagProcessor {
+  return new DefaultFeatureFlagProcessor(options);
+}
+
+const FEATURE_FLAG_CACHE_TTL = 0;
+const FEATURE_FLAG_REQUESTED_AT = 'bucketeer_feature_flag_requested_at';
+const FEATURE_FLAG_ID = 'bucketeer_feature_flag_id';
+
+class DefaultFeatureFlagProcessor implements FeatureFlagProcessor {
+  private featureFlagCache: FeaturesCache;
+  private cache: Cache;
+  private grpc: GRPCClient;
+  private eventEmitter: ProcessorEventsEmitter;
+  private pollingScheduleID?: NodeJS.Timeout;
+  private pollingInterval: number;
+  private clock: Clock;
+  featureTag: string;
+
+  constructor(options: FeatureFlagProcessorOptions) {
+    this.featureFlagCache = options.featureFlagCache;
+    this.cache = options.cache;
+    this.grpc = options.grpc;
+    this.eventEmitter = options.eventEmitter;
+    this.pollingInterval = options.pollingInterval;
+    this.featureTag = options.featureTag;
+    this.clock = options.clock;
+  }
+
+  start() {
+    this.pollingScheduleID = createSchedule(() => {
+      this.runUpdateCache();
+    }, this.pollingInterval);
+  }
+
+  stop() {
+    if (this.pollingScheduleID) removeSchedule(this.pollingScheduleID);
+  }
+
+  async runUpdateCache() {
+    try {
+      await this.updateCache();
+    } catch (error) {
+      this.pushErrorMetricsEvent(error);
+    }
+  }
+
+  private async updateCache() {
+    const featureFlagsId = await this.getFeatureFlagId();
+    const requestedAt = await this.getFeatureFlagRequestedAt();
+    const startTime: number = this.clock.getTime();
+    const featureFlags = await this.grpc.getFeatureFlags({
+      requestedAt: requestedAt,
+      tag: this.featureTag,
+      featureFlagsId: featureFlagsId,
+    });
+
+    const endTime = this.clock.getTime();
+    const latency = (endTime - startTime) / 1000;
+
+    this.pushLatencyMetricsEvent(latency);
+    this.pushSizeMetricsEvent(featureFlags.serializeBinary().length);
+
+    const forceUpdate = featureFlags.getForceUpdate();
+    if (forceUpdate) {
+      await this.deleteAllAndSaveLocalCache(
+        featureFlags.getRequestedAt(),
+        featureFlags.getFeatureFlagsId(),
+        featureFlags.getFeaturesList(),
+      );
+    } else {
+      await this.updateLocalCache(
+        featureFlags.getRequestedAt(),
+        featureFlags.getFeatureFlagsId(),
+        featureFlags.getFeaturesList(),
+        featureFlags.getArchivedFeatureFlagIdsList(),
+      );
+    }
+  }
+
+  private async getFeatureFlagRequestedAt(): Promise<number> {
+    const requestedAt = await this.cache.get(FEATURE_FLAG_REQUESTED_AT);
+    return requestedAt || 0;
+  }
+
+  private async getFeatureFlagId(): Promise<string> {
+    const featureFlagId = await this.cache.get(FEATURE_FLAG_ID);
+    return featureFlagId || '';
+  }
+
+  private async deleteAllAndSaveLocalCache(
+    requestedAt: number,
+    featureFlagsId: string,
+    features: Feature[],
+  ) {
+    await this.featureFlagCache.deleteAll();
+    await this.updateLocalCache(requestedAt, featureFlagsId, features, []);
+  }
+
+  private async updateLocalCache(
+    requestedAt: number,
+    featureFlagsId: string,
+    features: Feature[],
+    archivedFeatureIds: string[],
+  ) {
+    for (const featureId of archivedFeatureIds) {
+      await this.featureFlagCache.delete(featureId);
+    }
+    for (const feature of features) {
+      await this.featureFlagCache.put(feature);
+    }
+    await this.cache.put(FEATURE_FLAG_ID, featureFlagsId, FEATURE_FLAG_CACHE_TTL);
+    await this.cache.put(FEATURE_FLAG_REQUESTED_AT, requestedAt, FEATURE_FLAG_CACHE_TTL);
+  }
+
+  async pushLatencyMetricsEvent(latency: number) {
+    this.eventEmitter.emit('pushLatencyMetricsEvent', {
+      latency: latency,
+      apiId: ApiId.GET_FEATURE_FLAGS,
+    });
+  }
+
+  async pushErrorMetricsEvent(error: any) {
+    this.eventEmitter.emit('error', { error: error, apiId: ApiId.GET_FEATURE_FLAGS });
+  }
+
+  async pushSizeMetricsEvent(size: number) {
+    this.eventEmitter.emit('pushSizeMetricsEvent', { size: size, apiId: ApiId.GET_FEATURE_FLAGS });
+  }
+}
+
+export {
+  FeatureFlagProcessor,
+  NewFeatureFlagProcessor,
+  DefaultFeatureFlagProcessor,
+  FeatureFlagProcessorOptions,
+  FEATURE_FLAG_CACHE_TTL,
+  FEATURE_FLAG_ID,
+  FEATURE_FLAG_REQUESTED_AT,
+};
diff --git a/src/cache/processor/segmentUsersCacheProcessor.ts b/src/cache/processor/segmentUsersCacheProcessor.ts
new file mode 100644
index 0000000..2e32f13
--- /dev/null
+++ b/src/cache/processor/segmentUsersCacheProcessor.ts
@@ -0,0 +1,146 @@
+import { SegmentUsersCache } from '../segmentUsers';
+import { GRPCClient } from '../../grpc/client';
+import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
+import { Cache } from '../cache';
+import { ApiId } from '../../objects/apiId';
+import { SegmentUsers } from '@kenji71089/evaluation';
+import { createSchedule, removeSchedule } from '../../schedule';
+import { Clock } from '../../utils/clock';
+
+interface SegementUsersCacheProcessor {
+  start(): void;
+  stop(): void;
+}
+
+type SegementUsersCacheProcessorOptions = {
+  cache: Cache;
+  segmentUsersCache: SegmentUsersCache;
+  pollingInterval: number;
+  grpc: GRPCClient;
+  eventEmitter: ProcessorEventsEmitter;
+  clock: Clock;
+};
+
+const SEGEMENT_USERS_REQUESTED_AT = 'bucketeer_segment_users_requested_at';
+const SEGEMENT_USERS_CACHE_TTL = 0;
+
+function NewSegementUserCacheProcessor(
+  options: SegementUsersCacheProcessorOptions,
+): SegementUsersCacheProcessor {
+  return new DefaultSegementUserCacheProcessor(options);
+}
+
+class DefaultSegementUserCacheProcessor implements SegementUsersCacheProcessor {
+  private cache: Cache;
+  private segmentUsersCache: SegmentUsersCache;
+  private pollingInterval: number;
+  private grpc: GRPCClient;
+  private eventEmitter: ProcessorEventsEmitter;
+  private pollingScheduleID?: NodeJS.Timeout;
+  private clock: Clock;
+
+  constructor(options: SegementUsersCacheProcessorOptions) {
+    this.cache = options.cache;
+    this.segmentUsersCache = options.segmentUsersCache;
+    this.pollingInterval = options.pollingInterval;
+    this.grpc = options.grpc;
+    this.eventEmitter = options.eventEmitter;
+    this.clock = options.clock;
+  }
+
+  start() {
+    this.pollingScheduleID = createSchedule(() => this.runUpdateCache(), this.pollingInterval);
+  }
+
+  stop() {
+    if (this.pollingScheduleID) removeSchedule(this.pollingScheduleID);
+  }
+
+  async runUpdateCache() {
+    try {
+      await this.updateCache();
+    } catch (error) {
+      this.pushErrorMetricsEvent(error);
+    }
+  }
+
+  private async updateCache() {
+    const segmentIds = await this.segmentUsersCache.getIds();
+    const requestedAt = await this.getSegmentUsersRequestedAt();
+    
+    const startTime: number = this.clock.getTime();
+
+    const resp = await this.grpc.getSegmentUsers({
+      segmentIdsList: segmentIds,
+      requestedAt: requestedAt,
+    });
+
+    const endTime: number = this.clock.getTime();
+    const latency = (endTime - startTime) / 1000;
+
+    this.pushLatencyMetricsEvent(latency);
+    this.pushSizeMetricsEvent(resp.serializeBinary().length);
+
+    if (resp.getForceUpdate()) {
+      await this.deleteAllAndSaveLocalCache(resp.getRequestedAt(), resp.getSegmentUsersList());
+    } else {
+      await this.updateLocalCache(
+        resp.getRequestedAt(),
+        resp.getSegmentUsersList(),
+        resp.getDeletedSegmentIdsList(),
+      );
+    }
+  }
+
+  async deleteAllAndSaveLocalCache(requestedAt: number, segmentUsersList: SegmentUsers[]) {
+    await this.segmentUsersCache.deleteAll();
+    await this.updateLocalCache(requestedAt, segmentUsersList, []);
+  }
+
+  async updateLocalCache(
+    requestedAt: number,
+    segmentUsersList: SegmentUsers[],
+    deletedSegmentIds: string[],
+  ) {
+    for (const deletedSegmentId of deletedSegmentIds) {
+      await this.segmentUsersCache.delete(deletedSegmentId);
+    }
+    for (const segmentUsers of segmentUsersList) {
+      await this.segmentUsersCache.put(segmentUsers);
+    }
+    await this.putSegmentUsersRequestedAt(requestedAt);
+  }
+
+  private async getSegmentUsersRequestedAt(): Promise<number> {
+    const requestedAt = await this.cache.get(SEGEMENT_USERS_REQUESTED_AT);
+    return requestedAt ? Number(requestedAt) : 0;
+  }
+
+  putSegmentUsersRequestedAt(requestedAt: number): Promise<void> {
+    return this.cache.put(SEGEMENT_USERS_REQUESTED_AT, requestedAt, SEGEMENT_USERS_CACHE_TTL);
+  }
+
+  async pushLatencyMetricsEvent(latency: number) {
+    this.eventEmitter.emit('pushLatencyMetricsEvent', {
+      latency: latency,
+      apiId: ApiId.GET_SEGMENT_USERS,
+    });
+  }
+
+  async pushErrorMetricsEvent(error: any) {
+    this.eventEmitter.emit('error', { error: error, apiId: ApiId.GET_SEGMENT_USERS });
+  }
+
+  async pushSizeMetricsEvent(size: number) {
+    this.eventEmitter.emit('pushSizeMetricsEvent', { size: size, apiId: ApiId.GET_SEGMENT_USERS });
+  }
+}
+
+export {
+  SegementUsersCacheProcessor,
+  SegementUsersCacheProcessorOptions,
+  NewSegementUserCacheProcessor,
+  DefaultSegementUserCacheProcessor,
+  SEGEMENT_USERS_CACHE_TTL,
+  SEGEMENT_USERS_REQUESTED_AT,
+};
diff --git a/src/cache/segmentUsers.ts b/src/cache/segmentUsers.ts
new file mode 100644
index 0000000..242dcfc
--- /dev/null
+++ b/src/cache/segmentUsers.ts
@@ -0,0 +1,53 @@
+import { SegmentUsers } from '@kenji71089/evaluation';
+import { Cache } from './cache';
+import { NamespaceCache } from './namespace';
+
+interface SegmentUsersCache {
+  get(key: string): Promise<SegmentUsers | null>;
+  put(value: SegmentUsers): Promise<void>;
+  delete(key: string): Promise<void>;
+  deleteAll(): Promise<void>;
+
+  getAll(): Promise<SegmentUsers[]>;
+  getIds(): Promise<string[]>;
+}
+
+function NewSegmentUsersCache(options: {cache: Cache, ttl: number}): SegmentUsersCache {
+  return new SegmentUsersNamespaceCache(options.cache, options.ttl);
+}
+
+const SEGMENT_USERS_NAME_SPACE = 'segment_users:';
+
+class SegmentUsersNamespaceCache implements SegmentUsersCache {
+  private nameSpaceCache: NamespaceCache<SegmentUsers>;
+
+  constructor(cache: Cache, ttl: number) {
+    this.nameSpaceCache = new NamespaceCache<SegmentUsers>(cache, ttl, SEGMENT_USERS_NAME_SPACE);
+  }
+
+  get(key: string): Promise<SegmentUsers | null> {
+    return this.nameSpaceCache.get(key);
+  }
+
+  delete(key: string): Promise<void> {
+    return this.nameSpaceCache.delete(key);
+  }
+
+  getAll(): Promise<SegmentUsers[]> {
+    return this.nameSpaceCache.getAll();
+  }
+
+  getIds(): Promise<string[]> {
+    return this.nameSpaceCache.getIds();
+  }
+
+  deleteAll(): Promise<void> {
+    return this.nameSpaceCache.deleteAll();
+  }
+
+  put(value: SegmentUsers): Promise<void> {
+    return this.nameSpaceCache.put(value.getSegmentId(), value);
+  }
+}
+
+export { SegmentUsersCache, NewSegmentUsersCache, SEGMENT_USERS_NAME_SPACE };
\ No newline at end of file
diff --git a/src/client.ts b/src/client.ts
new file mode 100644
index 0000000..7b161af
--- /dev/null
+++ b/src/client.ts
@@ -0,0 +1,341 @@
+import { User } from './objects/user';
+import { EventStore } from './stores/EventStore';
+import { createSchedule, removeSchedule } from './schedule';
+import { GIT_REVISION } from './shared';
+import { APIClient } from './api/client';
+import { Config } from './config';
+import { createDefaultEvaluationEvent, createEvaluationEvent } from './objects/evaluationEvent';
+import { createGoalEvent } from './objects/goalEvent';
+import {
+  createLatencyMetricsEvent,
+  createSizeMetricsEvent,
+  toErrorMetricsEvent,
+} from './objects/metricsEvent';
+import { Evaluation } from './objects/evaluation';
+import { Event } from './objects/event';
+import { GetEvaluationResponse } from './objects/response';
+import { ApiId, NodeApiIds } from './objects/apiId';
+import { BKTEvaluationDetails, newDefaultBKTEvaluationDetails } from './evaluationDetails';
+import { BKTValue } from './types';
+import {
+  defaultStringToTypeConverter,
+  stringToBoolConverter,
+  stringToNumberConverter,
+  stringToObjectConverter,
+  StringToTypeConverter,
+} from './converter';
+import { error } from 'console';
+import { FeatureFlagProcessor } from './cache/processor/featureFlagCacheProcessor';
+import { SegementUsersCacheProcessor } from './cache/processor/segmentUsersCacheProcessor';
+import { ProcessorEventsEmitter } from './processorEventsEmitter';
+import { NodeEvaluator } from './evaluator/evaluator';
+import { Bucketeer, BuildInfo } from '.';
+
+const COUNT_PER_REGISTER_EVENT = 100;
+
+export class BKTClientImpl implements Bucketeer {
+  apiClient: APIClient;
+  eventStore: EventStore;
+  config: Config;
+  registerEventsScheduleID: NodeJS.Timeout;
+
+  eventEmitter: ProcessorEventsEmitter;
+  featureFlagProcessor: FeatureFlagProcessor | null = null;
+  segementUsersCacheProcessor: SegementUsersCacheProcessor | null = null;
+  localEvaluator: NodeEvaluator | null = null;
+
+  constructor(
+    config: Config,
+    options: {
+      apiClient: APIClient;
+      eventStore: EventStore;
+      localEvaluator: NodeEvaluator | null;
+      featureFlagProcessor: FeatureFlagProcessor | null;
+      segementUsersCacheProcessor: SegementUsersCacheProcessor | null;
+      eventEmitter: ProcessorEventsEmitter;
+    },
+  ) {
+    this.config = config;
+    this.apiClient = options.apiClient;
+    this.eventStore = options.eventStore;
+    this.registerEventsScheduleID = createSchedule(() => {
+      if (this.eventStore.size() > 0) {
+        this.callRegisterEvents(this.eventStore.takeout(this.eventStore.size()));
+      }
+    }, this.config.pollingIntervalForRegisterEvents!);
+
+    this.eventEmitter = options.eventEmitter;
+
+    if (this.config.enableLocalEvaluation === true) {
+      this.featureFlagProcessor = options.featureFlagProcessor;
+      this.segementUsersCacheProcessor = options.segementUsersCacheProcessor;
+      this.localEvaluator = options.localEvaluator;
+
+      this.featureFlagProcessor?.start();
+      this.segementUsersCacheProcessor?.start();
+    }
+
+    this.eventEmitter.on('error', ({ error, apiId }) => {
+      this.saveErrorMetricsEvent(this.config.tag, error, apiId);
+    });
+
+    this.eventEmitter.on('pushDefaultEvaluationEvent', ({ user, featureId }) => {
+      this.saveDefaultEvaluationEvent(user, featureId);
+    });
+
+    this.eventEmitter.on('pushLatencyMetricsEvent', ({ latency, apiId }) => {
+      this.saveLatencyMetricsEvent(config.tag, latency, apiId);
+    });
+
+    this.eventEmitter.on('pushSizeMetricsEvent', ({ size, apiId }) => {
+      this.saveSizeMetricsEvent(config.tag, size, apiId);
+    });
+
+    this.eventEmitter.on('pushEvaluationEvent', ({ user, evaluation }) => {
+      this.saveEvaluationEvent(user, evaluation);
+    });
+  }
+
+  async stringVariation(user: User, featureId: string, defaultValue: string): Promise<string> {
+    return (await this.stringVariationDetails(user, featureId, defaultValue)).variationValue;
+  }
+
+  async booleanVariationDetails(
+    user: User,
+    featureId: string,
+    defaultValue: boolean,
+  ): Promise<BKTEvaluationDetails<boolean>> {
+    return this.getVariationDetails(user, featureId, defaultValue, stringToBoolConverter);
+  }
+
+  async booleanVariation(user: User, featureId: string, defaultValue: boolean): Promise<boolean> {
+    return (await this.booleanVariationDetails(user, featureId, defaultValue)).variationValue;
+  }
+
+  async stringVariationDetails(
+    user: User,
+    featureId: string,
+    defaultValue: string,
+  ): Promise<BKTEvaluationDetails<string>> {
+    return this.getVariationDetails(user, featureId, defaultValue, defaultStringToTypeConverter);
+  }
+
+  async numberVariation(user: User, featureId: string, defaultValue: number): Promise<number> {
+    return (await this.numberVariationDetails(user, featureId, defaultValue)).variationValue;
+  }
+
+  async numberVariationDetails(
+    user: User,
+    featureId: string,
+    defaultValue: number,
+  ): Promise<BKTEvaluationDetails<number>> {
+    return this.getVariationDetails(user, featureId, defaultValue, stringToNumberConverter);
+  }
+
+  async objectVariation(user: User, featureId: string, defaultValue: BKTValue): Promise<BKTValue> {
+    return (await this.objectVariationDetails(user, featureId, defaultValue)).variationValue;
+  }
+
+  async objectVariationDetails(
+    user: User,
+    featureId: string,
+    defaultValue: BKTValue,
+  ): Promise<BKTEvaluationDetails<BKTValue>> {
+    return this.getVariationDetails(user, featureId, defaultValue, stringToObjectConverter);
+  }
+
+  private registerEvents(): void {
+    if (this.eventStore.size() >= COUNT_PER_REGISTER_EVENT) {
+      this.callRegisterEvents(this.eventStore.takeout(COUNT_PER_REGISTER_EVENT));
+    }
+  }
+
+  private registerAllEvents(): void {
+    if (this.eventStore.size() > 0) {
+      this.callRegisterEvents(this.eventStore.getAll());
+    }
+  }
+
+  private callRegisterEvents(events: Array<Event>): void {
+    this.apiClient.registerEvents(events).catch((e) => {
+      this.saveErrorMetricsEvent(this.config.tag, e, ApiId.REGISTER_EVENTS);
+      this.config.logger?.warn('register events failed', e);
+    });
+  }
+
+  private saveDefaultEvaluationEvent(user: User, featureId: string) {
+    this.eventStore.add(createDefaultEvaluationEvent(this.config.tag, user, featureId));
+    this.registerEvents();
+  }
+
+  private saveEvaluationEvent(user: User, evaluation: Evaluation) {
+    this.eventStore.add(createEvaluationEvent(this.config.tag, user, evaluation));
+    this.registerEvents();
+  }
+
+  private saveGoalEvent(user: User, goalId: string, value?: number) {
+    this.eventStore.add(createGoalEvent(this.config.tag, goalId, user, value ? value : 0));
+    this.registerEvents();
+  }
+
+  private saveLatencyMetricsEvent(tag: string, second: number, apiId: NodeApiIds) {
+    this.eventStore.add(createLatencyMetricsEvent(tag, second, apiId));
+    this.registerEvents();
+  }
+
+  private saveSizeMetricsEvent(tag: string, size: number, apiId: NodeApiIds) {
+    this.eventStore.add(createSizeMetricsEvent(tag, size, apiId));
+    this.registerEvents();
+  }
+
+  private saveErrorMetricsEvent(tag: string, e: any, apiId: NodeApiIds) {
+    const event = toErrorMetricsEvent(e, tag, apiId);
+    this.eventStore.add(event);
+    this.registerEvents();
+  }
+
+  async getEvaluation(user: User, featureId: string): Promise<Evaluation | null> {
+    if (this.config.enableLocalEvaluation === true) {
+      return this.getEvaluationLocally(user, featureId);
+    }
+    return this.getEvaluationRemotely(user, featureId);
+  }
+
+  private async getEvaluationRemotely(user: User, featureId: string): Promise<Evaluation | null> {
+    const startTime: number = Date.now();
+    let res: GetEvaluationResponse;
+    let size: number;
+    try {
+      [res, size] = await this.apiClient.getEvaluation(this.config.tag, user, featureId);
+      const second = (Date.now() - startTime) / 1000;
+      this.eventEmitter.emit('pushLatencyMetricsEvent', {
+        latency: second,
+        apiId: ApiId.GET_EVALUATION,
+      });
+      this.eventEmitter.emit('pushSizeMetricsEvent', { size: size, apiId: ApiId.GET_EVALUATION });
+
+      const evaluation = res?.evaluation;
+      if (evaluation == null) {
+        throw Error('Fail to get evaluation. Reason: null response.');
+      }
+      return evaluation;
+    } catch (error) {
+      this.eventEmitter.emit('error', { error: error, apiId: ApiId.GET_EVALUATION });
+    }
+
+    return null;
+  }
+
+  private async getEvaluationLocally(user: User, featureId: string): Promise<Evaluation | null> {
+    const startTime: number = Date.now();
+    try {
+      if (this.localEvaluator) {
+        let evaluation = await this.localEvaluator.evaluate(user, featureId);
+
+        const second = (Date.now() - startTime) / 1000;
+        // don't log size of the local evaluation because it will log from the feature flag processor
+        this.eventEmitter.emit('pushLatencyMetricsEvent', {
+          latency: second,
+          apiId: ApiId.SDK_GET_VARIATION,
+        });
+
+        return evaluation;
+      } else {
+        throw new Error('LocalEvaluator is not initialized');
+      }
+    } catch (error) {
+      this.eventEmitter.emit('error', { error: error, apiId: ApiId.SDK_GET_VARIATION });
+    }
+
+    return null;
+  }
+
+  private async getVariationDetails<T extends BKTValue>(
+    user: User,
+    featureId: string,
+    defaultValue: T,
+    typeConverter: StringToTypeConverter<T>,
+  ): Promise<BKTEvaluationDetails<T>> {
+    const evaluation = await this.getEvaluation(user, featureId);
+    const variationValue = evaluation?.variationValue;
+
+    // Handle conversion based on the type of T
+    let result: T | null = null;
+
+    if (variationValue !== undefined && variationValue !== null) {
+      try {
+        result = typeConverter(variationValue);
+      } catch (err) {
+        result = null;
+        this.eventEmitter.emit('error', { error: err, apiId: ApiId.SDK_GET_VARIATION });
+
+        this.config.logger?.error(
+          `getVariationDetails failed to parse: ${variationValue} using: ${typeof typeConverter} with error: ${error.toString()}`,
+        );
+      }
+    }
+
+    try {
+      if (evaluation !== null && result !== null) {
+        this.eventEmitter.emit('pushEvaluationEvent', { user: user, evaluation: evaluation });
+        return {
+          featureId: evaluation.featureId,
+          featureVersion: evaluation.featureVersion,
+          userId: evaluation.userId,
+          variationId: evaluation.variationId,
+          variationName: evaluation.variationName,
+          variationValue: result,
+          reason: evaluation.reason?.type || 'DEFAULT',
+        } satisfies BKTEvaluationDetails<T>;
+      }
+    } catch (error) {
+      this.eventEmitter.emit('error', { error: error, apiId: ApiId.SDK_GET_VARIATION });
+      this.config.logger?.error('getVariationDetails failed to save event', error);
+    }
+
+    this.eventEmitter.emit('pushDefaultEvaluationEvent', { user, featureId });
+    return newDefaultBKTEvaluationDetails(user.id, featureId, defaultValue);
+  }
+
+  async getStringVariation(user: User, featureId: string, defaultValue: string): Promise<string> {
+    return this.stringVariation(user, featureId, defaultValue);
+  }
+
+  async getBoolVariation(user: User, featureId: string, defaultValue: boolean): Promise<boolean> {
+    return this.booleanVariation(user, featureId, defaultValue);
+  }
+
+  async getNumberVariation(user: User, featureId: string, defaultValue: number): Promise<number> {
+    return this.numberVariation(user, featureId, defaultValue);
+  }
+
+  async getJsonVariation(user: User, featureId: string, defaultValue: object): Promise<object> {
+    const valueStr = await this.getStringVariation(user, featureId, '');
+    try {
+      return JSON.parse(valueStr);
+    } catch (e) {
+      this.config.logger?.debug('getJsonVariation failed to parse', e);
+      return defaultValue;
+    }
+  }
+
+  track(user: User, goalId: string, value?: number): void {
+    this.config.logger?.debug('track is called', goalId, value);
+    this.saveGoalEvent(user, goalId, value);
+  }
+
+  async destroy(): Promise<void> {
+    this.registerAllEvents();
+    removeSchedule(this.registerEventsScheduleID);
+    this.eventEmitter.close();
+    this.featureFlagProcessor?.stop();
+    this.segementUsersCacheProcessor?.stop();
+    this.config.logger?.info('destroy finished', this.registerEventsScheduleID);
+  }
+
+  getBuildInfo(): BuildInfo {
+    return {
+      GIT_REVISION,
+    };
+  }
+}
diff --git a/src/config.ts b/src/config.ts
index ee6938d..bed3e79 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -18,6 +18,17 @@ export interface Config {
    */
   pollingIntervalForRegisterEvents?: number;
   logger?: Logger;
+
+  /**
+  * Evaluate the end user locally in the SDK instead of on the server.
+  * Note: To evaluate the user locally, you must create an API key and select the server-side role.
+  */
+  enableLocalEvaluation?: boolean;
+
+  /**
+   * Sets the polling interval for cache updating. Default: 1 min - specify in milliseconds.
+   */
+  cachePollingInterval?: number;
 }
 
 export const defaultConfig = {
@@ -27,4 +38,14 @@ export const defaultConfig = {
   tag: '',
   pollingIntervalForRegisterEvents: 1 * 60 * 1000,
   logger: new DefaultLogger(),
+
+  enableLocalEvaluation: false,
+  cachePollingInterval: 1 * 60 * 1000,
 };
+
+export const defineBKTConfig = (config: Config): Config => {
+  return {
+    ...defaultConfig,
+    ...config,
+  };
+}
\ No newline at end of file
diff --git a/src/evaluator/evaluator.ts b/src/evaluator/evaluator.ts
new file mode 100644
index 0000000..4820b2f
--- /dev/null
+++ b/src/evaluator/evaluator.ts
@@ -0,0 +1,11 @@
+
+import { Evaluation } from '../objects/evaluation';
+import { User } from '../objects/user';
+
+// Node.js evaluator interface. This interface is used to evaluate the feature flag for the given user.
+// Prefix `Node` is used to avoid conflict with the evaluator interface in the `@kenji71089/evaluation` package.
+interface NodeEvaluator {
+  evaluate(user: User, featureID: string): Promise<Evaluation>;
+}
+
+export { NodeEvaluator}
\ No newline at end of file
diff --git a/src/evaluator/local.ts b/src/evaluator/local.ts
new file mode 100644
index 0000000..23f928e
--- /dev/null
+++ b/src/evaluator/local.ts
@@ -0,0 +1,141 @@
+import {
+  createUser,
+  Evaluator,
+  Feature,
+  SegmentUser,
+  UserEvaluations,
+  Reason as ProtoReason,
+} from '@kenji71089/evaluation';
+
+import { FeaturesCache } from '../cache/features';
+import { SegmentUsersCache } from '../cache/segmentUsers';
+import { Evaluation } from '../objects/evaluation';
+import { User } from '../objects/user';
+import { Reason, ReasonType } from '../objects/reason';
+import { NodeEvaluator } from './evaluator';
+
+class LocalEvaluator implements NodeEvaluator {
+  private tag: string;
+  private featureCache: FeaturesCache;
+  private segementUsersCache: SegmentUsersCache;
+
+  constructor(options: {
+    tag: string;
+    featuresCache: FeaturesCache;
+    segementUsersCache: SegmentUsersCache;
+  }) {
+    this.tag = options.tag;
+    this.segementUsersCache = options.segementUsersCache;
+    this.featureCache = options.featuresCache;
+  }
+
+  async evaluate(user: User, featureID: string): Promise<Evaluation> {
+    // Get the target feature
+    const feature = await this.featureCache.get(featureID);
+    if (feature === null) {
+      throw new Error('Feature not found');
+    }
+    const targetFeatures = await this.getTargetFeatures(feature);
+    const evaluator = new Evaluator();
+    const fIds = evaluator.listSegmentIDs(feature);
+    const segmentUsersMap = new Map<string, SegmentUser[]>();
+    for (const fId of fIds) {
+      const segmentUser = await this.segementUsersCache.get(fId);
+      if (segmentUser !== null) {
+      segmentUsersMap.set(segmentUser.getSegmentId(), segmentUser.getUsersList());
+      }
+    }
+
+    const protoUser = createUser(user.id, user.data);
+    const userEvaluations = await evaluator.evaluateFeatures(
+      targetFeatures,
+      protoUser,
+      segmentUsersMap,
+      this.tag,
+    );
+
+    const evaluation = this.findEvaluation(userEvaluations, featureID);
+    return evaluation;
+  }
+
+  findEvaluation(userEvaluations: UserEvaluations, featureId: String): Evaluation {
+    for (const evaluation of userEvaluations.getEvaluationsList()) {
+      if (evaluation.getFeatureId() === featureId) {
+        return {
+          id: evaluation.getId(),
+          featureId: evaluation.getFeatureId(),
+          featureVersion: evaluation.getFeatureVersion(),
+          userId: evaluation.getUserId(),
+          variationId: evaluation.getVariationId(),
+          variationName: evaluation.getVariationName(),
+          variationValue: evaluation.getVariationValue(),
+          reason: protoReasonToReason(evaluation.getReason()),
+        };
+      }
+    }
+
+    throw new Error('Evaluation not found');
+  }
+
+  async getTargetFeatures(feature: Feature): Promise<Feature[]> {
+    const targetFeatures: Feature[] = [feature];
+    if (feature.getPrerequisitesList().length === 0) {
+      return targetFeatures;
+    }
+    const prerequisiteFeatures = await this.getPrerequisiteFeatures(feature);
+    return targetFeatures.concat(prerequisiteFeatures);
+  }
+
+  async getPrerequisiteFeatures(feature: Feature): Promise<Feature[]> {
+    const prerequisites: Record<string, Feature> = {};
+    const queue: Feature[] = [feature];
+
+    while (queue.length > 0) {
+      const f = queue.shift();
+      if (!f) continue;
+
+      for (const p of f.getPrerequisitesList()) {
+        const preFeature = await this.featureCache.get(p.getFeatureId());
+        if (preFeature !== null) {
+          prerequisites[p.getFeatureId()] = preFeature;
+          queue.push(preFeature);
+        }
+      }
+    }
+
+    return Object.values(prerequisites);
+  }
+}
+
+function protoReasonToReason(protoReason: ProtoReason | undefined): Reason {
+  if (protoReason === undefined) {
+    return {
+      type: 'DEFAULT',
+    };
+  }
+  return {
+    type: protoReasonTypeToReasonType(protoReason.getType()),
+    ruleId: protoReason.getRuleId(),
+  };
+}
+
+function protoReasonTypeToReasonType(protoReasonType: number): ReasonType {
+  switch (protoReasonType) {
+    case ProtoReason.Type.TARGET:
+      return 'TARGET';
+    case ProtoReason.Type.RULE:
+      return 'RULE';
+    case ProtoReason.Type.DEFAULT:
+      return 'DEFAULT';
+    case ProtoReason.Type.CLIENT:
+      return 'CLIENT';
+    case ProtoReason.Type.OFF_VARIATION:
+      return 'OFF_VARIATION';
+    case ProtoReason.Type.PREREQUISITE:
+      return 'PREREQUISITE';
+    default:
+      return 'DEFAULT';
+  }
+}
+
+export { LocalEvaluator };
diff --git a/src/grpc/client.ts b/src/grpc/client.ts
new file mode 100644
index 0000000..fabfe7d
--- /dev/null
+++ b/src/grpc/client.ts
@@ -0,0 +1,142 @@
+import {
+  GatewayClient,
+  GetFeatureFlagsRequest,
+  GetFeatureFlagsResponse,
+  GetSegmentUsersRequest,
+  GetSegmentUsersResponse,
+  ServiceError,
+} from '@kenji71089/evaluation';
+import { grpc } from '@improbable-eng/grpc-web';
+import { NodeHttpTransport } from '@improbable-eng/grpc-web-node-http-transport';
+import { SourceId } from '../objects/sourceId';
+import { version } from '../objects/version';
+import { InvalidStatusError } from '../api/client';
+
+interface GRPCClient {
+  getSegmentUsers(
+    options: {
+      segmentIdsList: Array<string>,
+      requestedAt: number,
+    }
+  ): Promise<GetSegmentUsersResponse> 
+
+  getFeatureFlags(
+    options: {
+      tag: string,
+      featureFlagsId: string,
+      requestedAt: number,
+    }
+  ): Promise<GetFeatureFlagsResponse>
+}
+
+const scheme = 'https://';
+class DefaultGRPCClient {
+  private readonly apiKey: string;
+  private client: GatewayClient;
+
+  constructor(host: string, apiKey: string) {
+    this.apiKey = apiKey;
+    let serviceHost = host;
+    if (!host.includes(scheme)) {
+      serviceHost = `${scheme}${host}`;
+    }
+    this.client = new GatewayClient(serviceHost, {
+      transport: NodeHttpTransport(),
+    })
+  }
+
+  getMetadata() {
+    const metadata = new grpc.Metadata();
+    metadata.set('authorization', this.apiKey);
+    return metadata;
+  }
+
+  getSegmentUsers(
+    options: {
+      segmentIdsList: Array<string>,
+      requestedAt: number,
+    }
+  ): Promise<GetSegmentUsersResponse> {
+    const req = new GetSegmentUsersRequest();
+    req.setSegmentIdsList(options.segmentIdsList);
+    req.setRequestedAt(options.requestedAt);
+
+    req.setSourceId(SourceId.NODE_SERVER);
+    req.setSdkVersion(version);
+
+    return new Promise((resolve, reject) => {
+      this.client.getSegmentUsers(req, this.getMetadata(), (err, res) => {
+        if (err) {
+          const invalidStatusError = convertSerivceError(err);
+          return reject(invalidStatusError);
+        }
+        if (res) {
+          resolve(res);
+        } else {
+          reject(new Error('Response is null'));
+        }
+      });
+    });
+  }
+
+  getFeatureFlags(
+    options: {
+      tag: string,
+      featureFlagsId: string,
+      requestedAt: number,
+    }
+  ): Promise<GetFeatureFlagsResponse> {
+    const req = new GetFeatureFlagsRequest();
+    req.setTag(options.tag);
+    req.setFeatureFlagsId(options.featureFlagsId);
+    req.setRequestedAt(options.requestedAt);
+
+    req.setSourceId(SourceId.NODE_SERVER);
+    req.setSdkVersion(version);
+
+    return new Promise((resolve, reject) => {
+      this.client.getFeatureFlags(req, this.getMetadata(), (err, res) => {
+        if (err) {
+          const invalidStatusError = convertSerivceError(err);
+          return reject(invalidStatusError);
+        }
+        if (res) {
+          resolve(res);
+        } else {
+          reject(new Error('Response is null'));
+        }
+      });
+    });
+  }
+}
+
+function convertSerivceError(err: ServiceError): InvalidStatusError {
+  return new InvalidStatusError(err.message, grpcToRestStatus(err.code));
+}
+
+function grpcToRestStatus(grpcCode: number): number {
+  // https://grpc.github.io/grpc/core/md_doc_statuscodes.html
+  const grpcToRestMap: { [key: number]: number } = {
+      0: 200, // OK
+      1: 499, // CANCELLED
+      2: 500, // UNKNOWN
+      3: 400, // INVALID_ARGUMENT
+      4: 504, // DEADLINE_EXCEEDED
+      5: 404, // NOT_FOUND
+      6: 409, // ALREADY_EXISTS
+      7: 403, // PERMISSION_DENIED
+      8: 429, // RESOURCE_EXHAUSTED
+      9: 400, // FAILED_PRECONDITION
+      10: 409, // ABORTED
+      11: 400, // OUT_OF_RANGE
+      12: 501, // UNIMPLEMENTED
+      13: 500, // INTERNAL
+      14: 503, // UNAVAILABLE
+      15: 500, // DATA_LOSS
+      16: 401  // UNAUTHENTICATED
+  };
+
+  return grpcToRestMap[grpcCode] || 500; // Default to 500 if gRPC code is unrecognized
+}
+
+export { GRPCClient, DefaultGRPCClient, convertSerivceError, grpcToRestStatus };
\ No newline at end of file
diff --git a/src/index.ts b/src/index.ts
index d2eda26..2bdde64 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,30 +1,27 @@
 import { User } from './objects/user';
 import { EventStore } from './stores/EventStore';
-import { createSchedule, removeSchedule } from './schedule';
-import { GIT_REVISION } from './shared';
 import { APIClient } from './api/client';
 import { Config, defaultConfig } from './config';
-import { createDefaultEvaluationEvent, createEvaluationEvent } from './objects/evaluationEvent';
-import { createGoalEvent } from './objects/goalEvent';
-import {
-  createLatencyMetricsEvent,
-  createSizeMetricsEvent,
-  toErrorMetricsEvent,
-} from './objects/metricsEvent';
-import { Evaluation } from './objects/evaluation';
-import { Event } from './objects/event';
-import { GetEvaluationResponse } from './objects/response';
-import { ApiId, NodeApiIds } from './objects/apiId';
-import { BKTEvaluationDetails, newDefaultBKTEvaluationDetails } from './evaluationDetails';
+import { BKTEvaluationDetails } from './evaluationDetails';
 import { BKTValue } from './types';
+import { InMemoryCache } from './cache/inMemoryCache';
+import { NewFeatureCache } from './cache/features';
+import {
+  FEATURE_FLAG_CACHE_TTL,
+  FeatureFlagProcessor,
+  NewFeatureFlagProcessor,
+} from './cache/processor/featureFlagCacheProcessor';
+import { NewSegmentUsersCache } from './cache/segmentUsers';
 import {
-  defaultStringToTypeConverter,
-  stringToBoolConverter,
-  stringToNumberConverter,
-  stringToObjectConverter,
-  StringToTypeConverter,
-} from './converter';
-import { error } from 'console';
+  NewSegementUserCacheProcessor,
+  SEGEMENT_USERS_CACHE_TTL,
+  SegementUsersCacheProcessor,
+} from './cache/processor/segmentUsersCacheProcessor';
+import { DefaultGRPCClient } from './grpc/client';
+import { ProcessorEventsEmitter } from './processorEventsEmitter';
+import { Clock } from './utils/clock';
+import { LocalEvaluator } from './evaluator/local';
+import { BKTClientImpl } from './client';
 
 export interface BuildInfo {
   readonly GIT_REVISION: string;
@@ -57,7 +54,6 @@ export interface Bucketeer {
    */
   getNumberVariation(user: User, featureId: string, defaultValue: number): Promise<number>;
 
-
   /**
    * booleanVariation returns variation as boolean.
    * If a variation returned by server is not boolean, defaultValue is retured.
@@ -75,13 +71,13 @@ export interface Bucketeer {
   ): Promise<BKTEvaluationDetails<boolean>>;
 
   /**
- * stringVariation returns variation as string.
- * If a variation returned by server is not string, defaultValue is retured.
- * @param user User information.
- * @param featureId Feature flag ID to use.
- * @param defaultValue The variation value that is retured if SDK fails to fetch the variation or the variation is not string.
- * @returns The variation value returned from server or default value.
- */
+   * stringVariation returns variation as string.
+   * If a variation returned by server is not string, defaultValue is retured.
+   * @param user User information.
+   * @param featureId Feature flag ID to use.
+   * @param defaultValue The variation value that is retured if SDK fails to fetch the variation or the variation is not string.
+   * @returns The variation value returned from server or default value.
+   */
   stringVariation(user: User, featureId: string, defaultValue: string): Promise<string>;
 
   stringVariationDetails(
@@ -143,241 +139,71 @@ export interface Bucketeer {
   getBuildInfo(): BuildInfo;
 }
 
-const COUNT_PER_REGISTER_EVENT = 100;
-
 /**
  * initialize initializes a Bucketeer instance and returns it.
  * @param config Configurations of the SDK.
  * @returns Bucketeer SDK instance.
  */
 export function initialize(config: Config): Bucketeer {
-  return new BKTClientImpl(config);
+  const resolvedConfig = {
+    ...defaultConfig,
+    ...config,
+  };
+  return defaultInitialize(resolvedConfig);
 }
 
-export class BKTClientImpl implements Bucketeer {
-  apiClient: APIClient;
-  eventStore: EventStore;
-  config: Config;
-  registerEventsScheduleID: NodeJS.Timeout;
-
-  constructor(config: Config) {
-    this.config = {
-      ...defaultConfig,
-      ...config,
-    };
-
-    this.apiClient = new APIClient(this.config.host, this.config.token);
-    this.eventStore = new EventStore();
-    this.registerEventsScheduleID = createSchedule(() => {
-      if (this.eventStore.size() > 0) {
-        this.callRegisterEvents(this.eventStore.takeout(this.eventStore.size()));
-      }
-    }, this.config.pollingIntervalForRegisterEvents!);
-  }
-
-  async stringVariation(user: User, featureId: string, defaultValue: string): Promise<string> {
-    return (await this.stringVariationDetails(user, featureId, defaultValue)).variationValue;
-  }
-
-  async booleanVariationDetails(
-    user: User,
-    featureId: string,
-    defaultValue: boolean,
-  ): Promise<BKTEvaluationDetails<boolean>> {
-    return this.getVariationDetails(user, featureId, defaultValue, stringToBoolConverter);
-  }
-
-  async booleanVariation(user: User, featureId: string, defaultValue: boolean): Promise<boolean> {
-    return (await this.booleanVariationDetails(user, featureId, defaultValue)).variationValue;
-  }
-
-  async stringVariationDetails(
-    user: User,
-    featureId: string,
-    defaultValue: string,
-  ): Promise<BKTEvaluationDetails<string>> {
-    return this.getVariationDetails(user, featureId, defaultValue, defaultStringToTypeConverter);
-  }
-
-  async numberVariation(user: User, featureId: string, defaultValue: number): Promise<number> {
-    return (await this.numberVariationDetails(user, featureId, defaultValue)).variationValue;
-  }
-
-  async numberVariationDetails(
-    user: User,
-    featureId: string,
-    defaultValue: number,
-  ): Promise<BKTEvaluationDetails<number>> {
-    return this.getVariationDetails(user, featureId, defaultValue, stringToNumberConverter);
-  }
-
-  async objectVariation(user: User, featureId: string, defaultValue: BKTValue): Promise<BKTValue> {
-    return (await this.objectVariationDetails(user, featureId, defaultValue)).variationValue;
-  }
-
-  async objectVariationDetails(
-    user: User,
-    featureId: string,
-    defaultValue: BKTValue,
-  ): Promise<BKTEvaluationDetails<BKTValue>> {
-    return this.getVariationDetails(user, featureId, defaultValue, stringToObjectConverter);
-  }
-
-  registerEvents(): void {
-    if (this.eventStore.size() >= COUNT_PER_REGISTER_EVENT) {
-      this.callRegisterEvents(this.eventStore.takeout(COUNT_PER_REGISTER_EVENT));
-    }
-  }
-
-  registerAllEvents(): void {
-    if (this.eventStore.size() > 0) {
-      this.callRegisterEvents(this.eventStore.getAll());
-    }
-  }
-
-  callRegisterEvents(events: Array<Event>): void {
-    this.apiClient.registerEvents(events).catch((e) => {
-      this.saveErrorMetricsEvent(this.config.tag, e, ApiId.REGISTER_EVENTS);
-      this.config.logger?.warn('register events failed', e);
+function defaultInitialize(resolvedConfig: Config): Bucketeer {
+  const apiClient = new APIClient(resolvedConfig.host, resolvedConfig.token);
+  const eventStore = new EventStore();
+  const eventEmitter = new ProcessorEventsEmitter();
+  
+  let featureFlagProcessor: FeatureFlagProcessor | null = null;
+  let segementUsersCacheProcessor: SegementUsersCacheProcessor | null = null;
+  let localEvaluator: LocalEvaluator | null = null;
+  if (resolvedConfig.enableLocalEvaluation === true) {
+    const grpcClient = new DefaultGRPCClient(resolvedConfig.host, resolvedConfig.token);
+    const cache = new InMemoryCache();
+    const featureFlagCache = NewFeatureCache({ cache: cache, ttl: FEATURE_FLAG_CACHE_TTL });
+
+    const segementUsersCache = NewSegmentUsersCache({
+      cache: cache,
+      ttl: SEGEMENT_USERS_CACHE_TTL,
     });
-  }
-
-  saveDefaultEvaluationEvent(user: User, featureId: string) {
-    this.eventStore.add(createDefaultEvaluationEvent(this.config.tag, user, featureId));
-    this.registerEvents();
-  }
-
-  saveEvaluationEvent(user: User, evaluation: Evaluation) {
-    this.eventStore.add(createEvaluationEvent(this.config.tag, user, evaluation));
-    this.registerEvents();
-  }
-
-  saveGoalEvent(user: User, goalId: string, value?: number) {
-    this.eventStore.add(createGoalEvent(this.config.tag, goalId, user, value ? value : 0));
-    this.registerEvents();
-  }
-
-  saveEvaluationMetricsEvent(tag: string, second: number, size: number) {
-    this.saveLatencyMetricsEvent(tag, second, ApiId.GET_EVALUATION);
-    this.saveSizeMetricsEvent(tag, size, ApiId.GET_EVALUATION);
-  }
-
-  saveLatencyMetricsEvent(tag: string, second: number, apiId: NodeApiIds) {
-    this.eventStore.add(createLatencyMetricsEvent(tag, second, apiId));
-    this.registerEvents();
-  }
-
-  saveSizeMetricsEvent(tag: string, size: number, apiId: NodeApiIds) {
-    this.eventStore.add(createSizeMetricsEvent(tag, size, apiId));
-    this.registerEvents();
-  }
-
-  saveErrorMetricsEvent(tag: string, e: any, apiId: NodeApiIds) {
-    const event = toErrorMetricsEvent(e, tag, apiId, this.config.logger);
-    if (event) {
-      this.eventStore.add(event);
-      this.registerEvents();
-    }
-  }
 
-  async getEvaluation(user: User, featureId: string): Promise<Evaluation | null> {
-    const startTime: number = Date.now();
-    let res: GetEvaluationResponse;
-    let size: number;
-    try {
-      [res, size] = await this.apiClient.getEvaluation(this.config.tag, user, featureId);
-    } catch (error) {
-      this.saveErrorMetricsEvent(this.config.tag, error, ApiId.GET_EVALUATION);
-      return null;
-    }
-    const evaluation = res?.evaluation;
-    if (evaluation == null) {
-      const error = Error('Fail to get evaluation. Reason: null response.');
-      this.saveErrorMetricsEvent(this.config.tag, error, ApiId.GET_EVALUATION);
-      return null;
-    }
-    const second = (Date.now() - startTime) / 1000;
-    this.saveEvaluationMetricsEvent(this.config.tag, second, size);
-    return evaluation;
-  }
-
-  async getVariationDetails<T extends BKTValue>(
-    user: User,
-    featureId: string,
-    defaultValue: T,
-    typeConverter: StringToTypeConverter<T>,
-  ): Promise<BKTEvaluationDetails<T>> {
-    const evaluation = await this.getEvaluation(user, featureId);
-    const variationValue = evaluation?.variationValue;
-
-    // Handle conversion based on the type of T
-    let result: T | null = null;
-
-    if (variationValue !== undefined && variationValue !== null) {
-      try {
-        result = typeConverter(variationValue);
-      } catch (err) {
-        result = null;
-        this.saveErrorMetricsEvent(this.config.tag, error, ApiId.GET_EVALUATION);
-        this.config.logger?.debug(
-          `getVariationDetails failed to parse: ${variationValue} using: ${typeof typeConverter} with error: ${error.toString()}`,
-        );
-      }
-    }
-
-    if (evaluation !== null && result !== null) {
-      this.saveEvaluationEvent(user, evaluation);
-      return {
-        featureId: evaluation.featureId,
-        featureVersion: evaluation.featureVersion,
-        userId: evaluation.userId,
-        variationId: evaluation.variationId,
-        variationName: evaluation.variationName,
-        variationValue: result,
-        reason: evaluation.reason?.type || 'DEFAULT',
-      } satisfies BKTEvaluationDetails<T>;
-    } else {
-      this.saveDefaultEvaluationEvent(user, featureId);
-      return newDefaultBKTEvaluationDetails(user.id, featureId, defaultValue);
-    }
-  }
-
-  async getStringVariation(user: User, featureId: string, defaultValue: string): Promise<string> {
-    return this.stringVariation(user, featureId, defaultValue);
-  }
-
-  async getBoolVariation(user: User, featureId: string, defaultValue: boolean): Promise<boolean> {
-    return this.booleanVariation(user, featureId, defaultValue);
-  }
-
-  async getNumberVariation(user: User, featureId: string, defaultValue: number): Promise<number> {
-    return this.numberVariation(user, featureId, defaultValue);
-  }
+    featureFlagProcessor = NewFeatureFlagProcessor({
+      cache: cache,
+      featureFlagCache: featureFlagCache,
+      pollingInterval: resolvedConfig.cachePollingInterval!,
+      grpc: grpcClient,
+      eventEmitter: eventEmitter,
+      featureTag: resolvedConfig.tag,
+      clock: new Clock(),
+    });
 
-  async getJsonVariation(user: User, featureId: string, defaultValue: object): Promise<object> {
-    const valueStr = await this.getStringVariation(user, featureId, '');
-    try {
-      return JSON.parse(valueStr);
-    } catch (e) {
-      this.config.logger?.debug('getJsonVariation failed to parse', e);
-      return defaultValue;
-    }
-  }
+    segementUsersCacheProcessor = NewSegementUserCacheProcessor({
+      cache: cache,
+      segmentUsersCache: segementUsersCache,
+      pollingInterval: resolvedConfig.cachePollingInterval!,
+      grpc: grpcClient,
+      eventEmitter: eventEmitter,
+      clock: new Clock(),
+    });
 
-  track(user: User, goalId: string, value?: number): void {
-    this.config.logger?.debug('track is called', goalId, value);
-    this.saveGoalEvent(user, goalId, value);
+    localEvaluator = new LocalEvaluator({
+      tag: resolvedConfig.tag,
+      featuresCache: featureFlagCache,
+      segementUsersCache: segementUsersCache,
+    });
   }
 
-  async destroy(): Promise<void> {
-    this.registerAllEvents();
-    removeSchedule(this.registerEventsScheduleID);
-    this.config.logger?.info('destroy finished', this.registerEventsScheduleID);
-  }
+  const options = {
+    apiClient: apiClient,
+    eventStore: eventStore,
+    localEvaluator: localEvaluator,
+    featureFlagProcessor: featureFlagProcessor,
+    segementUsersCacheProcessor: segementUsersCacheProcessor,
+    eventEmitter: eventEmitter,
+  };
 
-  getBuildInfo(): BuildInfo {
-    return {
-      GIT_REVISION,
-    };
-  }
+  return new BKTClientImpl(resolvedConfig, options);
 }
diff --git a/src/objects/apiId.ts b/src/objects/apiId.ts
index 56437e1..03cb83b 100644
--- a/src/objects/apiId.ts
+++ b/src/objects/apiId.ts
@@ -1,8 +1,18 @@
+// The API IDs must match the IDs defined on the main repository
+// https://github.com/bucketeer-io/bucketeer/blob/main/proto/event/client/event.proto
 export enum ApiId {
   UNKNOWN_API = 0,
   GET_EVALUATION = 1,
   GET_EVALUATIONS = 2,
   REGISTER_EVENTS = 3,
+  GET_FEATURE_FLAGS = 4,
+  GET_SEGMENT_USERS = 5,
+  SDK_GET_VARIATION = 100,
 }
 
-export type NodeApiIds = ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+export type NodeApiIds =
+  | ApiId.GET_EVALUATION
+  | ApiId.REGISTER_EVENTS
+  | ApiId.GET_FEATURE_FLAGS
+  | ApiId.GET_SEGMENT_USERS
+  | ApiId.SDK_GET_VARIATION;
diff --git a/src/objects/metricsEvent.ts b/src/objects/metricsEvent.ts
index c9e5432..b8bb6b1 100644
--- a/src/objects/metricsEvent.ts
+++ b/src/objects/metricsEvent.ts
@@ -50,39 +50,39 @@ export type ErrorMetricsEvent =
 export type SuccessMetricsEvent = SizeMetricsEvent | LatencyMetricsEvent;
 
 export type TimeoutErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof TIMEOUT_ERROR_METRICS_EVENT_NAME;
 };
 
 export type InternalSdkErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof INTERNAL_SDK_ERROR_METRICS_EVENT_NAME;
 };
 
 export type NetworkErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof NETWORK_ERROR_METRICS_EVENT_NAME;
 };
 
 export type SizeMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   sizeByte: number;
   labels: { [key: string]: string };
   '@type': typeof SIZE_METRICS_EVENT_NAME;
 };
 
 export type LatencyMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   latencySecond: number;
   labels: { [key: string]: string };
   '@type': typeof LATENCY_METRICS_EVENT_NAME;
 };
 
 export type UnknownErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof UNKNOWN_ERROR_METRICS_EVENT_NAME;
 };
@@ -244,6 +244,25 @@ function isNodeError(error: unknown): error is NodeJS.ErrnoException {
   return typeUtils.isNativeError(error);
 }
 
+export function isErrorMetricsEvent(obj: any, specificErrorType?: string): obj is MetricsEvent {
+  if (!isMetricsEvent(obj) || !obj.event) {
+    return false;
+  }
+  // check event type in ErrorMetricsEvent
+  if (specificErrorType) {
+    return obj.event['@type'] === specificErrorType;
+  }
+
+  const errorEventTypes = [
+    TIMEOUT_ERROR_METRICS_EVENT_NAME,
+    INTERNAL_SDK_ERROR_METRICS_EVENT_NAME,
+    NETWORK_ERROR_METRICS_EVENT_NAME,
+    UNKNOWN_ERROR_METRICS_EVENT_NAME,
+  ];
+  
+  return errorEventTypes.includes(obj.event['@type']);
+}
+
 export function isMetricsEvent(obj: any): obj is MetricsEvent {
   const isObject = typeof obj === 'object' && obj !== null;
   const hasTimestamp = typeof obj.timestamp === 'number';
diff --git a/src/objects/reason.ts b/src/objects/reason.ts
index 1eb6e4a..a082bd7 100644
--- a/src/objects/reason.ts
+++ b/src/objects/reason.ts
@@ -1,4 +1,6 @@
-export type Reason = {
+import { Reason as ProtoReason } from '@kenji71089/evaluation';
+
+type Reason = {
   type: ReasonType;
   ruleId?: string;
 };
@@ -11,3 +13,4 @@ type ReasonType =
   | 'OFF_VARIATION'
   | 'PREREQUISITE'
 
+export { Reason, ReasonType };
\ No newline at end of file
diff --git a/src/objects/request.ts b/src/objects/request.ts
index eb09617..c473559 100644
--- a/src/objects/request.ts
+++ b/src/objects/request.ts
@@ -15,4 +15,4 @@ export type GetEvaluationRequest = BaseRequest & {
   tag: string;
   user?: User;
   featureId: string;
-};
+};
\ No newline at end of file
diff --git a/src/objects/response.ts b/src/objects/response.ts
index 31645ac..347bc45 100644
--- a/src/objects/response.ts
+++ b/src/objects/response.ts
@@ -11,4 +11,4 @@ export type RegisterEventsResponse = {
 
 export type GetEvaluationResponse = {
   evaluation?: Evaluation;
-};
+};
\ No newline at end of file
diff --git a/src/objects/status.ts b/src/objects/status.ts
index e38b917..20888fe 100644
--- a/src/objects/status.ts
+++ b/src/objects/status.ts
@@ -1,6 +1,6 @@
 import { ApiId, NodeApiIds } from './apiId';
 import { createEvent } from './event';
-import { createMetricsEvent } from './metricsEvent';
+import { createMetricsEvent, isMetricsEvent, MetricsEvent } from './metricsEvent';
 
 const FORBIDDEN_ERROR_METRICS_EVENT_NAME =
   'type.googleapis.com/bucketeer.event.client.ForbiddenErrorMetricsEvent';
@@ -22,55 +22,55 @@ const PAYLOAD_TOO_LARGE_ERROR_METRICS_EVENT_NAME =
   'type.googleapis.com/bucketeer.event.client.PayloadTooLargeExceptionEvent';
 
 export type BadRequestErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof BAD_REQUEST_ERROR_METRICS_EVENT_NAME;
 };
 
 export type UnauthorizedErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof UNAUTHORIZED_ERROR_METRICS_EVENT_NAME;
 };
 
 export type ForbiddenErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof FORBIDDEN_ERROR_METRICS_EVENT_NAME;
 };
 
 export type NotFoundErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof NOT_FOUND_ERROR_METRICS_EVENT_NAME;
 };
 
 export type ClientClosedRequestErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof CLIENT_CLOSED_REQUEST_ERROR_METRICS_EVENT_NAME;
 };
 
 export type InternalServerErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof INTERNAL_SERVER_ERROR_METRICS_EVENT_NAME;
 };
 
 export type ServiceUnavailableErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof SERVICE_UNAVAILABLE_ERROR_METRICS_EVENT_NAME;
 };
 
 export type RedirectRequestErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof REDIRECT_REQUEST_ERROR_METRICS_EVENT_NAME;
 };
 
 export type PayLoadTooLargetErrorMetricsEvent = {
-  apiId: ApiId.GET_EVALUATION | ApiId.REGISTER_EVENTS;
+  apiId: NodeApiIds;
   labels: { [key: string]: string };
   '@type': typeof PAYLOAD_TOO_LARGE_ERROR_METRICS_EVENT_NAME;
 };
@@ -198,3 +198,27 @@ export function createPayloadTooLargeErrorMetricsEvent(tag: string, apiId: NodeA
   const metricsEvent = createMetricsEvent(payloadTooLargeMetricsEvent);
   return createEvent(metricsEvent);
 }
+
+export function isStatusErrorMetricsEvent(obj: any, specificErrorType?: string): obj is MetricsEvent {
+  if (!isMetricsEvent(obj) || !obj.event) {
+    return false;
+  }
+  // check event type in ErrorMetricsEvent
+  if (specificErrorType) {
+    return obj.event['@type'] === specificErrorType;
+  }
+
+  const statusErrorEventTypes =  [
+    FORBIDDEN_ERROR_METRICS_EVENT_NAME,
+    BAD_REQUEST_ERROR_METRICS_EVENT_NAME,
+    UNAUTHORIZED_ERROR_METRICS_EVENT_NAME,
+    NOT_FOUND_ERROR_METRICS_EVENT_NAME,
+    CLIENT_CLOSED_REQUEST_ERROR_METRICS_EVENT_NAME,
+    INTERNAL_SERVER_ERROR_METRICS_EVENT_NAME,
+    SERVICE_UNAVAILABLE_ERROR_METRICS_EVENT_NAME,
+    REDIRECT_REQUEST_ERROR_METRICS_EVENT_NAME,
+    PAYLOAD_TOO_LARGE_ERROR_METRICS_EVENT_NAME,
+  ];
+    
+  return statusErrorEventTypes.includes(obj.event['@type']);
+}
diff --git a/src/processorEventsEmitter.ts b/src/processorEventsEmitter.ts
new file mode 100644
index 0000000..f3864b5
--- /dev/null
+++ b/src/processorEventsEmitter.ts
@@ -0,0 +1,30 @@
+import { EventEmitter } from 'events';
+import { NodeApiIds } from './objects/apiId';
+import { User } from './objects/user';
+import { Evaluation } from './objects/evaluation';
+
+// Define event types with specific data types.
+interface ProcessorEvents {
+  pushEvaluationEvent: { user: User, evaluation: Evaluation };
+  pushLatencyMetricsEvent: { latency: number, apiId: NodeApiIds };
+  pushSizeMetricsEvent: { size: number, apiId: NodeApiIds };
+  error: { error: any, apiId: NodeApiIds };
+  pushDefaultEvaluationEvent: { user: User, featureId: string };
+}
+
+// Create a strongly-typed EventEmitter.
+class ProcessorEventsEmitter extends EventEmitter {
+  emit<K extends keyof ProcessorEvents>(event: K, data: ProcessorEvents[K]): boolean {
+    return super.emit(event, data);
+  }
+
+  on<K extends keyof ProcessorEvents>(event: K, listener: (data: ProcessorEvents[K]) => void): this {
+    return super.on(event, listener);
+  }
+  
+  close() {
+    this.removeAllListeners();
+  }
+}
+
+export { ProcessorEventsEmitter };
\ No newline at end of file
diff --git a/src/utils/clock.ts b/src/utils/clock.ts
new file mode 100644
index 0000000..d874c03
--- /dev/null
+++ b/src/utils/clock.ts
@@ -0,0 +1,7 @@
+class Clock {
+  getTime(): number {
+    return Date.now();
+  }
+}
+
+export { Clock };
\ No newline at end of file
diff --git a/tsconfig.json b/tsconfig.json
index 1720f6a..015a5be 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -5,6 +5,8 @@
     "module": "ESNext", /* Specify module code generation. */
     "moduleResolution": "node",
     "lib": [
+      "DOM",
+			"DOM.Iterable",
       "es6"
     ],
     "declaration": true,
diff --git a/yarn.lock b/yarn.lock
index 3d1f4fd..2a6bde6 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -47,10 +47,18 @@
   resolved "https://registry.npmjs.org/@ava/require-precompiled/-/require-precompiled-1.0.0.tgz"
   integrity sha512-N7w4g+P/SUL8SF+HC4Z4e/ctV6nQ5AERC90K90r4xZQ8WVrJux9albvfyYAzygyU47CSqMWh6yJwFs8DYaeWmg==
 
+"@ava/typescript@^5.0.0":
+  version "5.0.0"
+  resolved "https://registry.yarnpkg.com/@ava/typescript/-/typescript-5.0.0.tgz#1772294379ee3f93ee2843310504efba1695b0e7"
+  integrity sha512-2twsQz2fUd95QK1MtKuEnjkiN47SKHZfi/vWj040EN6Eo2ZW3SNcAwncJqXXoMTYZTWtBRXYp3Fg8z+JkFI9aQ==
+  dependencies:
+    escape-string-regexp "^5.0.0"
+    execa "^8.0.1"
+
 "@babel/cli@7.25.9":
   version "7.25.9"
-  resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.25.9.tgz#51036166fd0e9cfb26eee1b9ddc264a0d6d5f843"
-  integrity sha512-I+02IfrTiSanpxJBlZQYb18qCxB6c2Ih371cVpfgIrPQrjAYkf45XxomTJOG8JBWX5GY35/+TmhCMdJ4ZPkL8Q==
+  resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.25.6.tgz#bc35561adc78ade43ac9c09a690768493ab9ed95"
+  integrity sha512-Z+Doemr4VtvSD2SNHTrkiFZ1LX+JI6tyRXAAOb4N9khIuPyoEPmTPJarPm8ljJV1D6bnMQjyHMWTT9NeKbQuXA==
   dependencies:
     "@jridgewell/trace-mapping" "^0.3.25"
     commander "^6.2.0"
@@ -1117,6 +1125,24 @@
   version "0.4.1"
   resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.4.1.tgz#9a96ce501bc62df46c4031fbd970e3cc6b10f07b"
   integrity sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==
+"@improbable-eng/grpc-web-node-http-transport@^0.15.0":
+  version "0.15.0"
+  resolved "https://registry.yarnpkg.com/@improbable-eng/grpc-web-node-http-transport/-/grpc-web-node-http-transport-0.15.0.tgz#5a064472ef43489cbd075a91fb831c2abeb09d68"
+  integrity sha512-HLgJfVolGGpjc9DWPhmMmXJx8YGzkek7jcCFO1YYkSOoO81MWRZentPOd/JiKiZuU08wtc4BG+WNuGzsQB5jZA==
+
+"@improbable-eng/grpc-web@^0.13.0":
+  version "0.13.0"
+  resolved "https://registry.yarnpkg.com/@improbable-eng/grpc-web/-/grpc-web-0.13.0.tgz#289e6fc4dafc00b1af8e2b93b970e6892299014d"
+  integrity sha512-vaxxT+Qwb7GPqDQrBV4vAAfH0HywgOLw6xGIKXd9Q8hcV63CQhmS3p4+pZ9/wVvt4Ph3ZDK9fdC983b9aGMUFg==
+  dependencies:
+    browser-headers "^0.4.0"
+
+"@improbable-eng/grpc-web@^0.15.0":
+  version "0.15.0"
+  resolved "https://registry.yarnpkg.com/@improbable-eng/grpc-web/-/grpc-web-0.15.0.tgz#3e47e9fdd90381a74abd4b7d26e67422a2a04bef"
+  integrity sha512-ERft9/0/8CmYalqOVnJnpdDry28q+j+nAlFFARdjyxXDJ+Mhgv9+F600QC8BR9ygOfrXRlAk6CvST2j+JCpQPg==
+  dependencies:
+    browser-headers "^0.4.1"
 
 "@inquirer/confirm@^5.0.0":
   version "5.0.1"
@@ -1205,6 +1231,18 @@
     "@jridgewell/resolve-uri" "^3.1.0"
     "@jridgewell/sourcemap-codec" "^1.4.14"
 
+"@kenji71089/evaluation@0.0.1":
+  version "0.0.1"
+  resolved "https://registry.yarnpkg.com/@kenji71089/evaluation/-/evaluation-0.0.1.tgz#8606fed88cdcacfebd68afb407629ac40cb99002"
+  integrity sha512-DXWMk90Hs29wyyFGS6Gpj3VFw31Q6qBhFuMZvHrUxBRvJc2g8J0vFrd0Y9ckfyaTCyowIilG7TWQYUCencDj2g==
+  dependencies:
+    "@improbable-eng/grpc-web" "^0.15.0"
+    "@types/fnv-plus" "^1.3.2"
+    "@types/google-protobuf" "^3.15.12"
+    "@types/semver" "^7.5.8"
+    fnv-plus "^1.3.1"
+    google-protobuf "3.14.0"
+
 "@mapbox/node-pre-gyp@^1.0.5":
   version "1.0.11"
   resolved "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz"
@@ -1425,6 +1463,34 @@
   resolved "https://registry.yarnpkg.com/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz#719df7fb41766bc143369eaa0dd56d8dc87c9958"
   integrity sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==
 
+"@sinonjs/commons@^3.0.1":
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.1.tgz#1029357e44ca901a615585f6d27738dbc89084cd"
+  integrity sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==
+  dependencies:
+    type-detect "4.0.8"
+
+"@sinonjs/fake-timers@^13.0.1", "@sinonjs/fake-timers@^13.0.2":
+  version "13.0.5"
+  resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz#36b9dbc21ad5546486ea9173d6bea063eb1717d5"
+  integrity sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==
+  dependencies:
+    "@sinonjs/commons" "^3.0.1"
+
+"@sinonjs/samsam@^8.0.1":
+  version "8.0.2"
+  resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.2.tgz#e4386bf668ff36c95949e55a38dc5f5892fc2689"
+  integrity sha512-v46t/fwnhejRSFTGqbpn9u+LQ9xJDse10gNnPgAcxgdoCDMXj/G2asWAC/8Qs+BAZDicX+MNZouXT1A7c83kVw==
+  dependencies:
+    "@sinonjs/commons" "^3.0.1"
+    lodash.get "^4.4.2"
+    type-detect "^4.1.0"
+
+"@sinonjs/text-encoding@^0.7.3":
+  version "0.7.3"
+  resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz#282046f03e886e352b2d5f5da5eb755e01457f3f"
+  integrity sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==
+
 "@types/cookie@^0.6.0":
   version "0.6.0"
   resolved "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz"
@@ -1450,6 +1516,16 @@
   resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50"
   integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==
 
+"@types/fnv-plus@^1.3.2":
+  version "1.3.2"
+  resolved "https://registry.yarnpkg.com/@types/fnv-plus/-/fnv-plus-1.3.2.tgz#bd591c1031ae48a18c99eaa60f659288aea545c0"
+  integrity sha512-Bgr5yn2dph2q8HZKDS002Pob6vaRTRfhqN9E+TOhjKsJvnfZXULPR3ihH8dL5ZjgxbNhqhTn9hijpbAMPtKZzw==
+
+"@types/google-protobuf@^3.15.12":
+  version "3.15.12"
+  resolved "https://registry.yarnpkg.com/@types/google-protobuf/-/google-protobuf-3.15.12.tgz#eb2ba0eddd65712211a2b455dc6071d665ccf49b"
+  integrity sha512-40um9QqwHjRS92qnOaDpL7RmDK15NuZYo9HihiJRbYkMQZlWnuH8AdvbMy8/o6lgLmKbDUKa+OALCltHdbOTpQ==
+
 "@types/json-schema@*", "@types/json-schema@^7.0.15":
   version "7.0.15"
   resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz"
@@ -1475,6 +1551,23 @@
   resolved "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz"
   integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==
 
+"@types/semver@^7.5.8":
+  version "7.5.8"
+  resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e"
+  integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==
+
+"@types/sinon@^17.0.3":
+  version "17.0.3"
+  resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-17.0.3.tgz#9aa7e62f0a323b9ead177ed23a36ea757141a5fa"
+  integrity sha512-j3uovdn8ewky9kRBG19bOwaZbexJu/XjtkHyjvUgt4xfPFz18dcORIMqnYh66Fx3Powhcr85NT5+er3+oViapw==
+  dependencies:
+    "@types/sinonjs__fake-timers" "*"
+
+"@types/sinonjs__fake-timers@*":
+  version "8.1.5"
+  resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz#5fd3592ff10c1e9695d377020c033116cc2889f2"
+  integrity sha512-mQkU2jY8jJEF7YHjHvsQO8+3ughTL1mcnn96igfhONmR+fUPSKIkefQYpSe8bsly2Ep7oQbn/6VG5/9/0qcArQ==
+
 "@types/statuses@^2.0.4":
   version "2.0.5"
   resolved "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.5.tgz"
@@ -1985,6 +2078,11 @@ braces@^3.0.2, braces@^3.0.3, braces@~3.0.2:
   dependencies:
     fill-range "^7.1.1"
 
+browser-headers@^0.4.0, browser-headers@^0.4.1:
+  version "0.4.1"
+  resolved "https://registry.yarnpkg.com/browser-headers/-/browser-headers-0.4.1.tgz#4308a7ad3b240f4203dbb45acedb38dc2d65dd02"
+  integrity sha512-CA9hsySZVo9371qEHjHZtYxV2cFtVj5Wj/ZHi8ooEsrtm4vOnl9Y9HmyYWk9q+05d7K3rdoAE0j3MVEFVvtQtg==
+
 browserslist@^4.24.0, browserslist@^4.24.2:
   version "4.24.2"
   resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.2.tgz#f5845bc91069dbd55ee89faf9822e1d885d16580"
@@ -2538,6 +2636,11 @@ detect-libc@^2.0.0:
   resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz"
   integrity sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==
 
+diff@^7.0.0:
+  version "7.0.0"
+  resolved "https://registry.yarnpkg.com/diff/-/diff-7.0.0.tgz#3fb34d387cd76d803f6eebea67b921dab0182a9a"
+  integrity sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==
+
 dir-glob@^3.0.1:
   version "3.0.1"
   resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz"
@@ -2783,6 +2886,21 @@ execa@^1.0.0:
     signal-exit "^3.0.0"
     strip-eof "^1.0.0"
 
+execa@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.yarnpkg.com/execa/-/execa-8.0.1.tgz#51f6a5943b580f963c3ca9c6321796db8cc39b8c"
+  integrity sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==
+  dependencies:
+    cross-spawn "^7.0.3"
+    get-stream "^8.0.1"
+    human-signals "^5.0.0"
+    is-stream "^3.0.0"
+    merge-stream "^2.0.0"
+    npm-run-path "^5.1.0"
+    onetime "^6.0.0"
+    signal-exit "^4.1.0"
+    strip-final-newline "^3.0.0"
+
 expand-brackets@^0.1.4:
   version "0.1.5"
   resolved "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz"
@@ -3016,6 +3134,11 @@ flatted@^3.2.9:
   resolved "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz"
   integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==
 
+fnv-plus@^1.3.1:
+  version "1.3.1"
+  resolved "https://registry.yarnpkg.com/fnv-plus/-/fnv-plus-1.3.1.tgz#c34cb4572565434acb08ba257e4044ce2b006d67"
+  integrity sha512-Gz1EvfOneuFfk4yG458dJ3TLJ7gV19q3OM/vVvvHf7eT02Hm1DleB4edsia6ahbKgAYxO9gvyQ1ioWZR+a00Yw==
+
 for-in@^1.0.1, for-in@^1.0.2:
   version "1.0.2"
   resolved "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz"
@@ -3146,6 +3269,11 @@ get-stream@^4.0.0:
   dependencies:
     pump "^3.0.0"
 
+get-stream@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-8.0.1.tgz#def9dfd71742cd7754a7761ed43749a27d02eca2"
+  integrity sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==
+
 get-value@^2.0.3, get-value@^2.0.6:
   version "2.0.6"
   resolved "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz"
@@ -3233,6 +3361,16 @@ globby@^14.0.2:
     slash "^5.1.0"
     unicorn-magic "^0.1.0"
 
+google-protobuf@3.14.0:
+  version "3.14.0"
+  resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.14.0.tgz#20373d22046e63831a5110e11a84f713cc43651e"
+  integrity sha512-bwa8dBuMpOxg7COyqkW6muQuvNnWgVN8TX/epDRGW5m0jcrmq2QJyCyiV8ZE2/6LaIIqJtiv9bYokFhfpy/o6w==
+
+google-protobuf@^3.21.4:
+  version "3.21.4"
+  resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.21.4.tgz#2f933e8b6e5e9f8edde66b7be0024b68f77da6c9"
+  integrity sha512-MnG7N936zcKTco4Jd2PX2U96Kf9PxygAPKBug+74LHzmHXmceN16MmRcdgZv+DGef/S9YvQAfRsNCn4cjf9yyQ==
+
 google-protobuf@^3.6.1:
   version "3.19.1"
   resolved "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.19.1.tgz"
@@ -3351,6 +3489,11 @@ https-proxy-agent@^5.0.0:
     agent-base "6"
     debug "4"
 
+human-signals@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28"
+  integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==
+
 iconv-lite@^0.4.24:
   version "0.4.24"
   resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz"
@@ -3702,6 +3845,11 @@ is-stream@^2.0.0:
   resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz"
   integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
 
+is-stream@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac"
+  integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==
+
 is-typedarray@^1.0.0:
   version "1.0.0"
   resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz"
@@ -3843,6 +3991,11 @@ jsonfile@^4.0.0:
   optionalDependencies:
     graceful-fs "^4.1.6"
 
+just-extend@^6.2.0:
+  version "6.2.0"
+  resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-6.2.0.tgz#b816abfb3d67ee860482e7401564672558163947"
+  integrity sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==
+
 keyv@^4.5.4:
   version "4.5.4"
   resolved "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz"
@@ -3930,6 +4083,11 @@ lodash.flattendeep@^4.4.0:
   resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz"
   integrity sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=
 
+lodash.get@^4.4.2:
+  version "4.4.2"
+  resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
+  integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==
+
 lodash.merge@^4.6.2:
   version "4.6.2"
   resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz"
@@ -4029,6 +4187,11 @@ memoize@^10.0.0:
   dependencies:
     mimic-function "^5.0.0"
 
+merge-stream@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"
+  integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
 merge2@^1.3.0, merge2@^1.4.1:
   version "1.4.1"
   resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz"
@@ -4105,6 +4268,11 @@ mimic-fn@^2.1.0:
   resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz"
   integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
 
+mimic-fn@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc"
+  integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==
+
 mimic-function@^5.0.0:
   version "5.0.0"
   resolved "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.0.tgz"
@@ -4257,6 +4425,17 @@ nice-try@^1.0.4:
   resolved "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz"
   integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
 
+nise@^6.1.1:
+  version "6.1.1"
+  resolved "https://registry.yarnpkg.com/nise/-/nise-6.1.1.tgz#78ea93cc49be122e44cb7c8fdf597b0e8778b64a"
+  integrity sha512-aMSAzLVY7LyeM60gvBS423nBmIPP+Wy7St7hsb+8/fc1HmeoHJfLO8CKse4u3BtOZvQLJghYPI2i/1WZrEj5/g==
+  dependencies:
+    "@sinonjs/commons" "^3.0.1"
+    "@sinonjs/fake-timers" "^13.0.1"
+    "@sinonjs/text-encoding" "^0.7.3"
+    just-extend "^6.2.0"
+    path-to-regexp "^8.1.0"
+
 node-domexception@^1.0.0:
   version "1.0.0"
   resolved "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz"
@@ -4319,6 +4498,13 @@ npm-run-path@^2.0.0:
   dependencies:
     path-key "^2.0.0"
 
+npm-run-path@^5.1.0:
+  version "5.3.0"
+  resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.3.0.tgz#e23353d0ebb9317f174e93417e4a4d82d0249e9f"
+  integrity sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==
+  dependencies:
+    path-key "^4.0.0"
+
 npmlog@^5.0.1:
   version "5.0.1"
   resolved "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz"
@@ -4397,6 +4583,13 @@ onetime@^5.1.0:
   dependencies:
     mimic-fn "^2.1.0"
 
+onetime@^6.0.0:
+  version "6.0.0"
+  resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4"
+  integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==
+  dependencies:
+    mimic-fn "^4.0.0"
+
 opn@^5.3.0:
   version "5.5.0"
   resolved "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz"
@@ -4566,6 +4759,11 @@ path-key@^3.1.0:
   resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz"
   integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
 
+path-key@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18"
+  integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==
+
 path-parse@^1.0.7:
   version "1.0.7"
   resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz"
@@ -4576,6 +4774,11 @@ path-to-regexp@^6.3.0:
   resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4"
   integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==
 
+path-to-regexp@^8.1.0:
+  version "8.2.0"
+  resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-8.2.0.tgz#73990cc29e57a3ff2a0d914095156df5db79e8b4"
+  integrity sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==
+
 path-type@^4.0.0:
   version "4.0.0"
   resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz"
@@ -5177,6 +5380,18 @@ signal-exit@^4.0.1, signal-exit@^4.1.0:
   resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz"
   integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==
 
+sinon@^19.0.2:
+  version "19.0.2"
+  resolved "https://registry.yarnpkg.com/sinon/-/sinon-19.0.2.tgz#944cf771d22236aa84fc1ab70ce5bffc3a215dad"
+  integrity sha512-euuToqM+PjO4UgXeLETsfQiuoyPXlqFezr6YZDFwHR3t4qaX0fZUe1MfPMznTL5f8BWrVS89KduLdMUsxFCO6g==
+  dependencies:
+    "@sinonjs/commons" "^3.0.1"
+    "@sinonjs/fake-timers" "^13.0.2"
+    "@sinonjs/samsam" "^8.0.1"
+    diff "^7.0.0"
+    nise "^6.1.1"
+    supports-color "^7.2.0"
+
 slash@^2.0.0:
   version "2.0.0"
   resolved "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz"
@@ -5394,6 +5609,11 @@ strip-eof@^1.0.0:
   resolved "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz"
   integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
 
+strip-final-newline@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd"
+  integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==
+
 strip-json-comments@^3.1.1:
   version "3.1.1"
   resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
@@ -5423,7 +5643,7 @@ supports-color@^5.3.0:
   dependencies:
     has-flag "^3.0.0"
 
-supports-color@^7.1.0:
+supports-color@^7.1.0, supports-color@^7.2.0:
   version "7.2.0"
   resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz"
   integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
@@ -5578,6 +5798,16 @@ type-check@^0.4.0, type-check@~0.4.0:
   dependencies:
     prelude-ls "^1.2.1"
 
+type-detect@4.0.8:
+  version "4.0.8"
+  resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
+  integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
+
+type-detect@^4.1.0:
+  version "4.1.0"
+  resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.1.0.tgz#deb2453e8f08dcae7ae98c626b13dddb0155906c"
+  integrity sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==
+
 type-fest@^0.13.1:
   version "0.13.1"
   resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz"

From 637e9ff94615d69630cc85e7ca302d6e460d9b8f Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Tue, 10 Dec 2024 13:11:50 +0700
Subject: [PATCH 2/8] chore: use official lib @bucketeer/evaluation

---
 package.json                                  |  2 +-
 src/__tests__/cache/feature_cache.ts          |  2 +-
 .../cache/processor/featureCache/in_memory.ts |  2 +-
 .../cache/processor/featureCache/polling.ts   |  2 +-
 .../cache/processor/featureCache/update.ts    |  2 +-
 .../processor/segementUsersCache/polling.ts   |  2 +-
 .../processor/segementUsersCache/update.ts    |  2 +-
 src/__tests__/cache/segements_user.ts         |  2 +-
 src/__tests__/client_local_evaluation.ts      |  4 +--
 src/__tests__/evaluator/evaluator.ts          |  2 +-
 src/__tests__/gprc/client.ts                  |  2 +-
 src/__tests__/mocks/gprc.ts                   |  2 +-
 src/cache/features.ts                         |  2 +-
 .../processor/featureFlagCacheProcessor.ts    |  2 +-
 .../processor/segmentUsersCacheProcessor.ts   |  2 +-
 src/cache/segmentUsers.ts                     |  2 +-
 src/evaluator/evaluator.ts                    |  2 +-
 src/evaluator/local.ts                        |  2 +-
 src/grpc/client.ts                            |  2 +-
 src/objects/reason.ts                         |  2 +-
 yarn.lock                                     | 26 +++++++++----------
 21 files changed, 34 insertions(+), 34 deletions(-)

diff --git a/package.json b/package.json
index 6febecc..702aad8 100644
--- a/package.json
+++ b/package.json
@@ -10,7 +10,7 @@
   "dependencies": {
     "@types/node": "^22.10.1",
     "@types/uuid": "^10.0.0",
-    "uuid": "^11.0.3"
+    "uuid": "^11.0.3",
     "@bucketeer/evaluation": "0.0.1",
     "@improbable-eng/grpc-web": "^0.13.0",
     "@improbable-eng/grpc-web-node-http-transport": "^0.15.0",
diff --git a/src/__tests__/cache/feature_cache.ts b/src/__tests__/cache/feature_cache.ts
index 1b93e1f..4946b14 100644
--- a/src/__tests__/cache/feature_cache.ts
+++ b/src/__tests__/cache/feature_cache.ts
@@ -1,5 +1,5 @@
 import test from 'ava';
-import { createFeature } from '@kenji71089/evaluation';
+import { createFeature } from '@bucketeer/evaluation';
 import { NewFeatureCache } from '../../cache/features';
 import { InMemoryCache } from '../../cache/inMemoryCache';
 
diff --git a/src/__tests__/cache/processor/featureCache/in_memory.ts b/src/__tests__/cache/processor/featureCache/in_memory.ts
index c934d8e..7531343 100644
--- a/src/__tests__/cache/processor/featureCache/in_memory.ts
+++ b/src/__tests__/cache/processor/featureCache/in_memory.ts
@@ -8,7 +8,7 @@ import {
 } from '../../../../cache/processor/featureFlagCacheProcessor';
 
 import { Clock } from '../../../../utils/clock';
-import { GetFeatureFlagsResponse, GetSegmentUsersResponse, createFeature } from '@kenji71089/evaluation';
+import { GetFeatureFlagsResponse, GetSegmentUsersResponse, createFeature } from '@bucketeer/evaluation';
 import { GRPCClient } from '../../../../grpc/client';
 import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
 
diff --git a/src/__tests__/cache/processor/featureCache/polling.ts b/src/__tests__/cache/processor/featureCache/polling.ts
index 4b9fadc..7c997ad 100644
--- a/src/__tests__/cache/processor/featureCache/polling.ts
+++ b/src/__tests__/cache/processor/featureCache/polling.ts
@@ -10,7 +10,7 @@ import { ProcessorEventsEmitter } from '../../../../processorEventsEmitter';
 import {
   GetFeatureFlagsResponse,
   createFeature,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 
 import { FEATURE_FLAG_REQUESTED_AT } from '../../../../../__test/cache/processor/featureFlagCacheProcessor';
 import { Clock } from '../../../../utils/clock';
diff --git a/src/__tests__/cache/processor/featureCache/update.ts b/src/__tests__/cache/processor/featureCache/update.ts
index 7131b50..5652ce7 100644
--- a/src/__tests__/cache/processor/featureCache/update.ts
+++ b/src/__tests__/cache/processor/featureCache/update.ts
@@ -11,7 +11,7 @@ import {
   Feature,
   GetFeatureFlagsResponse,
   createFeature,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 import sino from 'sinon';
 import { FEATURE_FLAG_REQUESTED_AT } from '../../../../../__test/cache/processor/featureFlagCacheProcessor';
 import { Clock } from '../../../../utils/clock';
diff --git a/src/__tests__/cache/processor/segementUsersCache/polling.ts b/src/__tests__/cache/processor/segementUsersCache/polling.ts
index 4c37775..2af00ec 100644
--- a/src/__tests__/cache/processor/segementUsersCache/polling.ts
+++ b/src/__tests__/cache/processor/segementUsersCache/polling.ts
@@ -1,7 +1,7 @@
 import test from 'ava';
 import sino from 'sinon';
 
-import { GetSegmentUsersResponse, SegmentUsers } from '@kenji71089/evaluation';
+import { GetSegmentUsersResponse, SegmentUsers } from '@bucketeer/evaluation';
 
 import {
   NewSegementUserCacheProcessor,
diff --git a/src/__tests__/cache/processor/segementUsersCache/update.ts b/src/__tests__/cache/processor/segementUsersCache/update.ts
index 882d4f0..b849516 100644
--- a/src/__tests__/cache/processor/segementUsersCache/update.ts
+++ b/src/__tests__/cache/processor/segementUsersCache/update.ts
@@ -1,7 +1,7 @@
 import anyTest, { TestFn } from 'ava';
 import sino from 'sinon';
 
-import { GetSegmentUsersResponse, SegmentUser, SegmentUsers } from '@kenji71089/evaluation';
+import { GetSegmentUsersResponse, SegmentUser, SegmentUsers } from '@bucketeer/evaluation';
 
 import {
   DefaultSegementUserCacheProcessor,
diff --git a/src/__tests__/cache/segements_user.ts b/src/__tests__/cache/segements_user.ts
index 65b27dc..cbc6ffc 100644
--- a/src/__tests__/cache/segements_user.ts
+++ b/src/__tests__/cache/segements_user.ts
@@ -1,5 +1,5 @@
 import test from 'ava';
-import { SegmentUsers, } from '@kenji71089/evaluation';
+import { SegmentUsers, } from '@bucketeer/evaluation';
 import { InMemoryCache } from '../../cache/inMemoryCache';
 import { NewSegmentUsersCache } from '../../cache/segmentUsers';
 
diff --git a/src/__tests__/client_local_evaluation.ts b/src/__tests__/client_local_evaluation.ts
index 8b994ee..5b8fd46 100644
--- a/src/__tests__/client_local_evaluation.ts
+++ b/src/__tests__/client_local_evaluation.ts
@@ -12,7 +12,7 @@ import {
   Clause,
   createUser,
   createSegmentUser,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 
 import { LocalEvaluator } from '../evaluator/local';
 import {
@@ -31,7 +31,7 @@ import { ProcessorEventsEmitter } from '../processorEventsEmitter';
 import { Clock } from '../utils/clock';
 import { NewSegmentUsersCache, SegmentUsersCache } from '../cache/segmentUsers';
 import { NewFeatureCache, FeaturesCache } from '../cache/features';
-import { ApiId } from '@kenji71089/evaluation/lib/proto/event/client/event_pb';
+import { ApiId } from '@bucketeer/evaluation/lib/proto/event/client/event_pb';
 import { Config, DefaultLogger } from '../index';
 import { APIClient } from '../api/client';
 import { EventStore } from '../stores/EventStore';
diff --git a/src/__tests__/evaluator/evaluator.ts b/src/__tests__/evaluator/evaluator.ts
index 9fda41b..fd4ad86 100644
--- a/src/__tests__/evaluator/evaluator.ts
+++ b/src/__tests__/evaluator/evaluator.ts
@@ -12,7 +12,7 @@ import {
   Clause,
   createUser,
   createSegmentUser,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 
 import { LocalEvaluator } from '../../evaluator/local';
 import { SEGEMENT_USERS_CACHE_TTL } from '../../cache/processor/segmentUsersCacheProcessor';
diff --git a/src/__tests__/gprc/client.ts b/src/__tests__/gprc/client.ts
index cbd59d8..7fc6aaf 100644
--- a/src/__tests__/gprc/client.ts
+++ b/src/__tests__/gprc/client.ts
@@ -1,7 +1,7 @@
 import test from 'ava';
 import { InvalidStatusError } from '../../api/client';
 import { convertSerivceError, DefaultGRPCClient, grpcToRestStatus } from '../../grpc/client';
-import { ServiceError } from '@kenji71089/evaluation';
+import { ServiceError } from '@bucketeer/evaluation';
 import { grpc } from '@improbable-eng/grpc-web';
 
 test('grpcToRestStatus should return correct HTTP status for known gRPC codes', t => {
diff --git a/src/__tests__/mocks/gprc.ts b/src/__tests__/mocks/gprc.ts
index d99be17..14a989c 100644
--- a/src/__tests__/mocks/gprc.ts
+++ b/src/__tests__/mocks/gprc.ts
@@ -1,7 +1,7 @@
 import {
   GetSegmentUsersResponse,
   GetFeatureFlagsResponse,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 import { GRPCClient } from '../../grpc/client';
 
 class MockGRPCClient implements GRPCClient {
diff --git a/src/cache/features.ts b/src/cache/features.ts
index 647059f..df2441c 100644
--- a/src/cache/features.ts
+++ b/src/cache/features.ts
@@ -1,4 +1,4 @@
-import { Feature } from '@kenji71089/evaluation';
+import { Feature } from '@bucketeer/evaluation';
 import { Cache } from './cache';
 import { NamespaceCache } from './namespace';
 
diff --git a/src/cache/processor/featureFlagCacheProcessor.ts b/src/cache/processor/featureFlagCacheProcessor.ts
index 4405c38..d0f7d35 100644
--- a/src/cache/processor/featureFlagCacheProcessor.ts
+++ b/src/cache/processor/featureFlagCacheProcessor.ts
@@ -3,7 +3,7 @@ import { Cache } from '../cache';
 import { GRPCClient } from '../../grpc/client';
 import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
 import { createSchedule, removeSchedule } from '../../schedule';
-import { Feature } from '@kenji71089/evaluation';
+import { Feature } from '@bucketeer/evaluation';
 import { ApiId } from '../../objects/apiId';
 import { Clock } from '../../utils/clock';
 
diff --git a/src/cache/processor/segmentUsersCacheProcessor.ts b/src/cache/processor/segmentUsersCacheProcessor.ts
index 2e32f13..01b7174 100644
--- a/src/cache/processor/segmentUsersCacheProcessor.ts
+++ b/src/cache/processor/segmentUsersCacheProcessor.ts
@@ -3,7 +3,7 @@ import { GRPCClient } from '../../grpc/client';
 import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
 import { Cache } from '../cache';
 import { ApiId } from '../../objects/apiId';
-import { SegmentUsers } from '@kenji71089/evaluation';
+import { SegmentUsers } from '@bucketeer/evaluation';
 import { createSchedule, removeSchedule } from '../../schedule';
 import { Clock } from '../../utils/clock';
 
diff --git a/src/cache/segmentUsers.ts b/src/cache/segmentUsers.ts
index 242dcfc..51f2b78 100644
--- a/src/cache/segmentUsers.ts
+++ b/src/cache/segmentUsers.ts
@@ -1,4 +1,4 @@
-import { SegmentUsers } from '@kenji71089/evaluation';
+import { SegmentUsers } from '@bucketeer/evaluation';
 import { Cache } from './cache';
 import { NamespaceCache } from './namespace';
 
diff --git a/src/evaluator/evaluator.ts b/src/evaluator/evaluator.ts
index 4820b2f..7a52d88 100644
--- a/src/evaluator/evaluator.ts
+++ b/src/evaluator/evaluator.ts
@@ -3,7 +3,7 @@ import { Evaluation } from '../objects/evaluation';
 import { User } from '../objects/user';
 
 // Node.js evaluator interface. This interface is used to evaluate the feature flag for the given user.
-// Prefix `Node` is used to avoid conflict with the evaluator interface in the `@kenji71089/evaluation` package.
+// Prefix `Node` is used to avoid conflict with the evaluator interface in the `@bucketeer/evaluation` package.
 interface NodeEvaluator {
   evaluate(user: User, featureID: string): Promise<Evaluation>;
 }
diff --git a/src/evaluator/local.ts b/src/evaluator/local.ts
index 23f928e..d3a124a 100644
--- a/src/evaluator/local.ts
+++ b/src/evaluator/local.ts
@@ -5,7 +5,7 @@ import {
   SegmentUser,
   UserEvaluations,
   Reason as ProtoReason,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 
 import { FeaturesCache } from '../cache/features';
 import { SegmentUsersCache } from '../cache/segmentUsers';
diff --git a/src/grpc/client.ts b/src/grpc/client.ts
index fabfe7d..86d02f3 100644
--- a/src/grpc/client.ts
+++ b/src/grpc/client.ts
@@ -5,7 +5,7 @@ import {
   GetSegmentUsersRequest,
   GetSegmentUsersResponse,
   ServiceError,
-} from '@kenji71089/evaluation';
+} from '@bucketeer/evaluation';
 import { grpc } from '@improbable-eng/grpc-web';
 import { NodeHttpTransport } from '@improbable-eng/grpc-web-node-http-transport';
 import { SourceId } from '../objects/sourceId';
diff --git a/src/objects/reason.ts b/src/objects/reason.ts
index a082bd7..e331445 100644
--- a/src/objects/reason.ts
+++ b/src/objects/reason.ts
@@ -1,4 +1,4 @@
-import { Reason as ProtoReason } from '@kenji71089/evaluation';
+import { Reason as ProtoReason } from '@bucketeer/evaluation';
 
 type Reason = {
   type: ReasonType;
diff --git a/yarn.lock b/yarn.lock
index 2a6bde6..2115433 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -56,7 +56,7 @@
     execa "^8.0.1"
 
 "@babel/cli@7.25.9":
-  version "7.25.9"
+  version "7.25.6"
   resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.25.6.tgz#bc35561adc78ade43ac9c09a690768493ab9ed95"
   integrity sha512-Z+Doemr4VtvSD2SNHTrkiFZ1LX+JI6tyRXAAOb4N9khIuPyoEPmTPJarPm8ljJV1D6bnMQjyHMWTT9NeKbQuXA==
   dependencies:
@@ -1018,6 +1018,18 @@
   dependencies:
     google-protobuf "^3.6.1"
 
+"@bucketeer/evaluation@0.0.1":
+  version "0.0.1"
+  resolved "https://registry.yarnpkg.com/@bucketeer/evaluation/-/evaluation-0.0.1.tgz#a1320e569be3a2548d103e62013a8b0be9ded75c"
+  integrity sha512-i2IAt2N5Zl2KVg3YrCLkjOZaDZCB0cqmk5G7tXBU+21V743e8dRP9J10y4Q3lTukOCIMtOh873IJMamd+1i6Ew==
+  dependencies:
+    "@improbable-eng/grpc-web" "^0.15.0"
+    "@types/fnv-plus" "^1.3.2"
+    "@types/google-protobuf" "^3.15.12"
+    "@types/semver" "^7.5.8"
+    fnv-plus "^1.3.1"
+    google-protobuf "3.14.0"
+
 "@bundled-es-modules/cookie@^2.0.1":
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/@bundled-es-modules/cookie/-/cookie-2.0.1.tgz#b41376af6a06b3e32a15241d927b840a9b4de507"
@@ -1231,18 +1243,6 @@
     "@jridgewell/resolve-uri" "^3.1.0"
     "@jridgewell/sourcemap-codec" "^1.4.14"
 
-"@kenji71089/evaluation@0.0.1":
-  version "0.0.1"
-  resolved "https://registry.yarnpkg.com/@kenji71089/evaluation/-/evaluation-0.0.1.tgz#8606fed88cdcacfebd68afb407629ac40cb99002"
-  integrity sha512-DXWMk90Hs29wyyFGS6Gpj3VFw31Q6qBhFuMZvHrUxBRvJc2g8J0vFrd0Y9ckfyaTCyowIilG7TWQYUCencDj2g==
-  dependencies:
-    "@improbable-eng/grpc-web" "^0.15.0"
-    "@types/fnv-plus" "^1.3.2"
-    "@types/google-protobuf" "^3.15.12"
-    "@types/semver" "^7.5.8"
-    fnv-plus "^1.3.1"
-    google-protobuf "3.14.0"
-
 "@mapbox/node-pre-gyp@^1.0.5":
   version "1.0.11"
   resolved "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz"

From c34970ef365837771eced88a2ffef0faa44f65e4 Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Mon, 2 Dec 2024 22:41:59 +0700
Subject: [PATCH 3/8] feat: error handling improvement

---
 src/__tests__/api_failed.ts                   |   2 +-
 src/__tests__/client_local_evaluation.ts      | 332 ++++++++++++------
 src/__tests__/converter_string_to_bool.ts     |   2 +
 src/__tests__/converter_string_to_num.ts      |   2 +
 src/__tests__/converter_string_to_object.ts   |   2 +
 src/__tests__/error_to_metrics_event.ts       |  27 +-
 src/__tests__/evaluator/evaluator.ts          |  55 ++-
 src/__tests__/event.ts                        |  14 +
 src/__tests__/gprc/client.ts                  |   3 +-
 src/api/client.ts                             |  13 +-
 .../processor/segmentUsersCacheProcessor.ts   |   1 +
 src/client.ts                                 |   3 +-
 src/converter.ts                              |  13 +-
 src/evaluator/local.ts                        | 103 ++++--
 src/grpc/client.ts                            |   2 +-
 src/objects/errors.ts                         |  28 ++
 src/objects/metricsEvent.ts                   |  22 +-
 17 files changed, 465 insertions(+), 159 deletions(-)
 create mode 100644 src/objects/errors.ts

diff --git a/src/__tests__/api_failed.ts b/src/__tests__/api_failed.ts
index 078afb0..91d4240 100644
--- a/src/__tests__/api_failed.ts
+++ b/src/__tests__/api_failed.ts
@@ -4,7 +4,7 @@ import fs from 'fs';
 import { APIClient } from '../api/client';
 import { User } from '../bootstrap';
 import path from 'path';
-import { InvalidStatusError } from '../api/client';
+import { InvalidStatusError } from '../objects/errors';
 
 const apiKey = '';
 
diff --git a/src/__tests__/client_local_evaluation.ts b/src/__tests__/client_local_evaluation.ts
index 5b8fd46..17d69f5 100644
--- a/src/__tests__/client_local_evaluation.ts
+++ b/src/__tests__/client_local_evaluation.ts
@@ -39,6 +39,8 @@ import { Evaluation } from '../objects/evaluation';
 import { BKTEvaluationDetails } from '../evaluationDetails';
 import { BKTValue } from '../types';
 import { BKTClientImpl } from '../client';
+import { IllegalStateError } from '../objects/errors';
+import sinon from 'sinon';
 
 const test = anyTest as TestFn<{
   sandbox: sino.SinonSandbox;
@@ -383,6 +385,7 @@ test('boolVariation - err: internal error', async (t) => {
   const { user1, ftBoolean } = data;
 
   const internalErr = new Error('internal error');
+
   const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
   featureFlagCacheMock.expects('get').once().withExactArgs(ftBoolean.getId()).rejects(internalErr);
 
@@ -395,7 +398,15 @@ test('boolVariation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
@@ -435,14 +446,17 @@ test('boolVariation - success: boolean variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.booleanVariation(sdkUser, ftBoolean.getId(), false);
   t.is(result, true);
@@ -468,22 +482,30 @@ test('booleanVariationDetails - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
     .once()
     .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftBoolean.getId() });
 
-    const evaluationDetails = {
-      featureId: ftBoolean.getId(),
-      featureVersion: ftBoolean.getVersion(),
-      userId: 'user-id-1',
-      variationId: '',
-      variationName: '',
-      variationValue: false,
-      reason: 'CLIENT',
-    } satisfies BKTEvaluationDetails<boolean>;
+  const evaluationDetails = {
+    featureId: ftBoolean.getId(),
+    featureVersion: ftBoolean.getVersion(),
+    userId: 'user-id-1',
+    variationId: '',
+    variationName: '',
+    variationValue: false,
+    reason: 'CLIENT',
+  } satisfies BKTEvaluationDetails<boolean>;
 
   const result = await sdkInstance.booleanVariationDetails(sdkUser, ftBoolean.getId(), false);
   t.deepEqual(result, evaluationDetails);
@@ -518,14 +540,17 @@ test('booleanVariationDetails - success: boolean variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const evaluationDetails = {
     featureId: ftBoolean.getId(),
@@ -561,7 +586,15 @@ test('numberVariation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
@@ -601,14 +634,17 @@ test('numberVariation - success: number variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.numberVariation(sdkUser, ftInt.getId(), 1);
   t.is(result, 10);
@@ -642,14 +678,17 @@ test('numberVariation - success: number variation (float)', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.numberVariation(sdkUser, ftFloat.getId(), 1);
   t.is(result, 10.11);
@@ -675,22 +714,30 @@ test('numberVariationDetails - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
     .once()
     .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftInt.getId() });
 
-    const evaluationDetails = {
-      featureId: ftInt.getId(),
-      featureVersion: ftInt.getVersion(),
-      userId: 'user-id-1',
-      variationId: '',
-      variationName: '',
-      variationValue: 1,
-      reason: 'CLIENT',
-    } satisfies BKTEvaluationDetails<number>;
+  const evaluationDetails = {
+    featureId: ftInt.getId(),
+    featureVersion: ftInt.getVersion(),
+    userId: 'user-id-1',
+    variationId: '',
+    variationName: '',
+    variationValue: 1,
+    reason: 'CLIENT',
+  } satisfies BKTEvaluationDetails<number>;
 
   const result = await sdkInstance.numberVariationDetails(sdkUser, ftInt.getId(), 1);
   t.deepEqual(result, evaluationDetails);
@@ -725,14 +772,17 @@ test('numberVariationDetails - success: number variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const evaluationDetails = {
     featureId: ftInt.getId(),
@@ -776,14 +826,17 @@ test('numberVariationDetails - success: number variation (float)', async (t) =>
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const evaluationDetails = {
     featureId: ftFloat.getId(),
@@ -819,7 +872,15 @@ test('stringVariation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
@@ -859,14 +920,17 @@ test('stringVariation - success: string variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.stringVariation(sdkUser, ftString.getId(), 'default');
   t.is(result, 'value 10');
@@ -892,22 +956,30 @@ test('stringVariationDetails - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
     .once()
     .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftString.getId() });
 
-    const evaluationDetails = {
-      featureId: ftString.getId(),
-      featureVersion: ftString.getVersion(),
-      userId: 'user-id-1',
-      variationId: '',
-      variationName: '',
-      variationValue: 'default',
-      reason: 'CLIENT',
-    } satisfies BKTEvaluationDetails<string>;
+  const evaluationDetails = {
+    featureId: ftString.getId(),
+    featureVersion: ftString.getVersion(),
+    userId: 'user-id-1',
+    variationId: '',
+    variationName: '',
+    variationValue: 'default',
+    reason: 'CLIENT',
+  } satisfies BKTEvaluationDetails<string>;
 
   const result = await sdkInstance.stringVariationDetails(sdkUser, ftString.getId(), 'default');
   t.deepEqual(result, evaluationDetails);
@@ -942,14 +1014,17 @@ test('stringVariationDetails - success: string variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const evaluationDetails = {
     featureId: ftString.getId(),
@@ -985,7 +1060,15 @@ test('jsonVariation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
@@ -1025,14 +1108,17 @@ test('jsonVariation - success: json variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.getJsonVariation(sdkUser, ftJSON.getId(), {});
   t.deepEqual(result, { Str: 'str1', Int: 1 });
@@ -1058,15 +1144,23 @@ test('objectVariation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
     .once()
     .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftJSON.getId() });
 
-  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), {test: 'test1'});
-  t.deepEqual(result, {test: 'test1'});
+  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), { test: 'test1' });
+  t.deepEqual(result, { test: 'test1' });
 
   featureFlagCacheMock.verify();
   eventProcessorMock.verify();
@@ -1098,16 +1192,19 @@ test('objectVariation - success: json variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
-  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), {test: 'test1'});
+  const result = await sdkInstance.objectVariation(sdkUser, ftJSON.getId(), { test: 'test1' });
   t.deepEqual(result, { Str: 'str1', Int: 1 });
 
   featureFlagCacheMock.verify();
@@ -1131,24 +1228,34 @@ test('objectVariationDetail - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   eventProcessorMock
     .expects('emit')
     .once()
     .withArgs('pushDefaultEvaluationEvent', { user: sdkUser, featureId: ftJSON.getId() });
 
-    const evaluationDetails = {
-      featureId: ftJSON.getId(),
-      featureVersion: ftJSON.getVersion(),
-      userId: 'user-id-1',
-      variationId: '',
-      variationName: '',
-      variationValue: {test: 'test1'},
-      reason: 'CLIENT',
-    } satisfies BKTEvaluationDetails<BKTValue>;
-
-  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {test: 'test1'});
+  const evaluationDetails = {
+    featureId: ftJSON.getId(),
+    featureVersion: ftJSON.getVersion(),
+    userId: 'user-id-1',
+    variationId: '',
+    variationName: '',
+    variationValue: { test: 'test1' },
+    reason: 'CLIENT',
+  } satisfies BKTEvaluationDetails<BKTValue>;
+
+  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {
+    test: 'test1',
+  });
   t.deepEqual(result, evaluationDetails);
 
   featureFlagCacheMock.verify();
@@ -1181,14 +1288,17 @@ test('objectVariationDetail - success: object variation', async (t) => {
   } satisfies Evaluation;
 
   eventProcessorMock
-  .expects('emit')
-  .once()
-  .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
+    .expects('emit')
+    .once()
+    .withArgs('pushEvaluationEvent', { user: sdkUser, evaluation: evaluation });
 
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const evaluationDetails = {
     featureId: ftJSON.getId(),
@@ -1200,7 +1310,9 @@ test('objectVariationDetail - success: object variation', async (t) => {
     reason: 'DEFAULT',
   } satisfies BKTEvaluationDetails<BKTValue>;
 
-  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {test: 'test1'});
+  const result = await sdkInstance.objectVariationDetails(sdkUser, ftJSON.getId(), {
+    test: 'test1',
+  });
   t.deepEqual(result, evaluationDetails);
 
   featureFlagCacheMock.verify();
@@ -1224,7 +1336,15 @@ test('getEvaluation - err: internal error', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('error', { error: internalErr, apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs(
+      'error',
+      sinon.match({
+        error: sinon.match
+          .instanceOf(IllegalStateError)
+          .and(sinon.match.has('message', 'Failed to get feature: internal error')),
+        apiId: ApiId.SDK_GET_VARIATION,
+      }),
+    );
 
   const result = await sdkInstance.getEvaluation(sdkUser, ftBoolean.getId());
   t.is(result, null);
@@ -1241,9 +1361,13 @@ test('getEvaluation - success', async (t) => {
   const featureFlagCacheMock = t.context.sandbox.mock(featureFlagCache);
   featureFlagCacheMock.expects('get').once().withExactArgs(feature3.getId()).resolves(feature3);
   featureFlagCacheMock.expects('get').once().withExactArgs(feature4.getId()).resolves(feature4);
-  
+
   const segementUsersCacheMock = t.context.sandbox.mock(segmentUsersCache);
-  segementUsersCacheMock.expects('get').once().withExactArgs(segmentUser2.getSegmentId()).resolves(segmentUser2);
+  segementUsersCacheMock
+    .expects('get')
+    .once()
+    .withExactArgs(segmentUser2.getSegmentId())
+    .resolves(segmentUser2);
 
   const sdkUser = {
     id: user1.getId(),
@@ -1265,7 +1389,10 @@ test('getEvaluation - success', async (t) => {
   eventProcessorMock
     .expects('emit')
     .once()
-    .withArgs('pushLatencyMetricsEvent', { latency: sino.match.any , apiId: ApiId.SDK_GET_VARIATION });
+    .withArgs('pushLatencyMetricsEvent', {
+      latency: sino.match.any,
+      apiId: ApiId.SDK_GET_VARIATION,
+    });
 
   const result = await sdkInstance.getEvaluation(sdkUser, feature3.getId());
   t.deepEqual(result, evaluation);
@@ -1274,8 +1401,9 @@ test('getEvaluation - success', async (t) => {
   t.pass();
 });
 
-test ('sdk destroy - success', async (t) => {
-  const { sdkInstance, eventEmitter, featureFlagProcessor, segementUsersCacheProcessor } = t.context;
+test('sdk destroy - success', async (t) => {
+  const { sdkInstance, eventEmitter, featureFlagProcessor, segementUsersCacheProcessor } =
+    t.context;
   const eventProcessorMock = t.context.sandbox.mock(eventEmitter);
   eventProcessorMock.expects('close').once().resolves();
 
@@ -1290,4 +1418,4 @@ test ('sdk destroy - success', async (t) => {
   featureFlagProcessorCacheMock.verify();
   segmentUsersCacheProcessorMock.verify();
   t.pass();
-});
\ No newline at end of file
+});
diff --git a/src/__tests__/converter_string_to_bool.ts b/src/__tests__/converter_string_to_bool.ts
index 5348232..25d7875 100644
--- a/src/__tests__/converter_string_to_bool.ts
+++ b/src/__tests__/converter_string_to_bool.ts
@@ -1,5 +1,6 @@
 import test from 'ava';
 import { stringToBoolConverter } from '../converter';
+import { IllegalArgumentError } from '../objects/errors';
 
 type StringToBoolConvertTestCase = {
   input: string;
@@ -27,6 +28,7 @@ stringConvertTestCases.forEach(({ input, expected }, index) => {
       t.is(output, expected);
     } catch (err) {
       t.is(expected, null);
+      t.true(err instanceof IllegalArgumentError);
     }
   });
 });
diff --git a/src/__tests__/converter_string_to_num.ts b/src/__tests__/converter_string_to_num.ts
index b8b79dd..f2b12a1 100644
--- a/src/__tests__/converter_string_to_num.ts
+++ b/src/__tests__/converter_string_to_num.ts
@@ -1,5 +1,6 @@
 import test from 'ava';
 import { stringToBoolConverter, stringToNumberConverter } from '../converter';
+import { IllegalArgumentError } from '../objects/errors';
 
 type StringToNumConvertTestCase = {
   input: string;
@@ -31,6 +32,7 @@ stringConvertTestCases.forEach(({ input, expected }, index) => {
       t.is(output, expected);
     } catch (err) {
       t.is(expected, null);
+      t.true(err instanceof IllegalArgumentError);
     }
   });
 });
diff --git a/src/__tests__/converter_string_to_object.ts b/src/__tests__/converter_string_to_object.ts
index 258b168..10072a9 100644
--- a/src/__tests__/converter_string_to_object.ts
+++ b/src/__tests__/converter_string_to_object.ts
@@ -1,6 +1,7 @@
 import test from 'ava';
 import { stringToObjectConverter } from '../converter';
 import { BKTValue } from '../types';
+import { IllegalArgumentError } from '../objects/errors';
 
 type StringToJSonValueConvertTestCase = {
   input: string;
@@ -45,6 +46,7 @@ stringConvertTestCases.forEach(({ input, expected }, index) => {
       t.deepEqual(output, expected);
     } catch (err) {
       t.deepEqual(expected, null);
+      t.true(err instanceof IllegalArgumentError);
     }
   });
 });
diff --git a/src/__tests__/error_to_metrics_event.ts b/src/__tests__/error_to_metrics_event.ts
index 330a7a2..2ff61df 100644
--- a/src/__tests__/error_to_metrics_event.ts
+++ b/src/__tests__/error_to_metrics_event.ts
@@ -4,6 +4,7 @@ import {
   createUnknownErrorMetricsEvent,
   createNetworkErrorMetricsEvent,
   toErrorMetricsEvent,
+  createInternalSdkErrorMetricsEvent,
 } from '../objects/metricsEvent';
 import { ApiId } from '../objects/apiId';
 import {
@@ -13,7 +14,29 @@ import {
   createPayloadTooLargeErrorMetricsEvent,
   createServiceUnavailableErrorMetricsEvent,
 } from '../objects/status';
-import { InvalidStatusError } from '../api/client';
+import { InvalidStatusError, IllegalStateError, IllegalArgumentError } from '../objects/errors';
+
+test('toErrorMetricsEvent returns correct event for IllegalStateError', (t) => {
+  const error = new IllegalStateError('Feature not found');
+  const tag = 'test-tag';
+  const apiId = ApiId.GET_EVALUATION;
+
+  const expectedEvent = createInternalSdkErrorMetricsEvent(tag, apiId, 'Feature not found').event;
+  const actualEvent = toErrorMetricsEvent(error, tag, apiId)?.event;
+  
+  t.deepEqual(actualEvent, expectedEvent);
+});
+
+test('toErrorMetricsEvent returns correct event for IllegalArgumentError', (t) => {
+  const error = new IllegalArgumentError('Input string must be non-blank');
+  const tag = 'test-tag';
+  const apiId = ApiId.GET_EVALUATION;
+
+  const expectedEvent = createInternalSdkErrorMetricsEvent(tag, apiId, 'Input string must be non-blank').event;
+  const actualEvent = toErrorMetricsEvent(error, tag, apiId)?.event;
+
+  t.deepEqual(actualEvent, expectedEvent);
+});
 
 test('toErrorMetricsEvent returns correct event for InvalidStatusError with 400 status code', (t) => {
   const error = new InvalidStatusError('Bad Request', 400);
@@ -159,7 +182,7 @@ test('toErrorMetricsEvent returns correct event for unknown object', (t) => {
   const tag = 'test-tag';
   const apiId = ApiId.GET_EVALUATION;
 
-  const expectedEvent = createUnknownErrorMetricsEvent(tag, apiId, undefined, undefined).event;
+  const expectedEvent = createUnknownErrorMetricsEvent(tag, apiId, undefined, String(error)).event;
   const actualEvent = toErrorMetricsEvent(error, tag, apiId)?.event;
 
   t.deepEqual(actualEvent, expectedEvent);
diff --git a/src/__tests__/evaluator/evaluator.ts b/src/__tests__/evaluator/evaluator.ts
index fd4ad86..8c89aaa 100644
--- a/src/__tests__/evaluator/evaluator.ts
+++ b/src/__tests__/evaluator/evaluator.ts
@@ -24,6 +24,7 @@ import { Clock } from '../../utils/clock';
 import { NewSegmentUsersCache, SegmentUsersCache } from '../../cache/segmentUsers';
 import { NewFeatureCache, FeaturesCache } from '../../cache/features';
 import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
+import { IllegalStateError } from '../../objects/errors';
 
 const test = anyTest as TestFn<{
   sandbox: sino.SinonSandbox;
@@ -285,7 +286,27 @@ test('evaluate | err: failed to get feature flag from cache', async (t) => {
       feature1.getId(),
     )
     .catch((e) => {
-      t.is(e, err);
+      t.deepEqual(e, new IllegalStateError(`Failed to get feature: ${err.message}`));
+    });
+  mock.verify();
+  t.pass();
+});
+
+test('evaluate | err: get feature flag from cache | cache missing', async (t) => {
+  const { evaluator, featureFlagCache } = t.context;
+  const { feature1 } = t.context.data;
+  const mock = t.context.sandbox.mock(featureFlagCache).expects('get');
+  mock.resolves(null);
+  await evaluator
+    .evaluate(
+      {
+        id: 'id',
+        data: {},
+      },
+      feature1.getId(),
+    )
+    .catch((e) => {
+      t.deepEqual(e, new IllegalStateError(`Feature not found: ${feature1.getId()}`));
     });
   mock.verify();
   t.pass();
@@ -308,7 +329,7 @@ test('evaluate | err: failed to get prerequisite feature flag from cache', async
       feature1.getId(),
     )
     .catch((e) => {
-      t.is(e, err);
+      t.deepEqual(e, new IllegalStateError(`Failed to get feature: ${err.message}`));
     });
 
   mock.verify();
@@ -334,7 +355,35 @@ test ('evaluate | err: failed to get segment from cache', async (t) => {
       feature5.getId(),
     )
     .catch((e) => {
-      t.is(e, err);
+      t.deepEqual(e, new IllegalStateError(`Failed to get segment users: ${err.message}`));
+    });
+
+  featuresCacheMock.verify();
+  segmentUsersCacheMock.verify();
+  
+  t.pass();
+});
+
+test ('evaluate | err: get segment from cache | cache missing', async (t) => {
+  const { evaluator, featureFlagCache, segmentUsersCache, sandbox } = t.context;
+  const { feature5, segmentUser2 } = t.context.data;
+  const err = new Error('internal error');
+  const featuresCacheMock = sandbox.mock(featureFlagCache);
+  featuresCacheMock.expects('get').withArgs(feature5.getId()).resolves(feature5);
+  
+  const segmentUsersCacheMock = sandbox.mock(segmentUsersCache);
+  segmentUsersCacheMock.expects('get').withArgs(segmentUser2.getSegmentId()).resolves(null);
+  
+  await evaluator
+    .evaluate(
+      {
+        id: 'id',
+        data: {},
+      },
+      feature5.getId(),
+    )
+    .catch((e) => {
+      t.deepEqual(e, new IllegalStateError(`Segment users not found: ${segmentUser2.getSegmentId()}`));
     });
 
   featuresCacheMock.verify();
diff --git a/src/__tests__/event.ts b/src/__tests__/event.ts
index d400cba..3fc9cf1 100644
--- a/src/__tests__/event.ts
+++ b/src/__tests__/event.ts
@@ -509,3 +509,17 @@ test('createUnknownErrorMetricsEvent with statusCode and errorMessage', (t) => {
   const metrics = actual.event as MetricsEvent;
   t.deepEqual(metrics.event, expectedEvent);
 });
+
+test('createInternalSdkErrorMetricsEvent with errorMessage', (t) => {
+  const expectedEvent = {
+    apiId,
+    labels: {
+      tag,
+      error_message: 'internal error',
+    },
+    '@type': INTERNAL_SDK_ERROR_METRICS_EVENT_NAME,
+  };
+  const actual = createInternalSdkErrorMetricsEvent(tag, apiId, 'internal error');
+  const metrics = actual.event as MetricsEvent;
+  t.deepEqual(metrics.event, expectedEvent);
+});
\ No newline at end of file
diff --git a/src/__tests__/gprc/client.ts b/src/__tests__/gprc/client.ts
index 7fc6aaf..155e03a 100644
--- a/src/__tests__/gprc/client.ts
+++ b/src/__tests__/gprc/client.ts
@@ -1,8 +1,9 @@
 import test from 'ava';
-import { InvalidStatusError } from '../../api/client';
+
 import { convertSerivceError, DefaultGRPCClient, grpcToRestStatus } from '../../grpc/client';
 import { ServiceError } from '@bucketeer/evaluation';
 import { grpc } from '@improbable-eng/grpc-web';
+import { InvalidStatusError } from '../../objects/errors';
 
 test('grpcToRestStatus should return correct HTTP status for known gRPC codes', t => {
   t.is(grpcToRestStatus(0), 200); // OK
diff --git a/src/api/client.ts b/src/api/client.ts
index 94b41b3..fe9b812 100644
--- a/src/api/client.ts
+++ b/src/api/client.ts
@@ -5,6 +5,7 @@ import { SourceId } from '../objects/sourceId';
 import { GetEvaluationRequest, RegisterEventsRequest } from '../objects/request';
 import { GetEvaluationResponse, RegisterEventsResponse } from '../objects/response';
 import { version } from '../objects/version';
+import { InvalidStatusError } from '../objects/errors';
 
 const scheme = 'https://';
 const evaluationAPI = '/get_evaluation';
@@ -112,14 +113,4 @@ export class APIClient {
       });
     });
   }
-}
-
-export class InvalidStatusError extends Error {
-  readonly code: number | undefined;
-  constructor(message: string, code: number | undefined) {
-    super(message);
-    this.code = code;
-    // Set the prototype explicitly.
-    Object.setPrototypeOf(this, InvalidStatusError.prototype);
-  }
-}
+}
\ No newline at end of file
diff --git a/src/cache/processor/segmentUsersCacheProcessor.ts b/src/cache/processor/segmentUsersCacheProcessor.ts
index 01b7174..aa36da7 100644
--- a/src/cache/processor/segmentUsersCacheProcessor.ts
+++ b/src/cache/processor/segmentUsersCacheProcessor.ts
@@ -6,6 +6,7 @@ import { ApiId } from '../../objects/apiId';
 import { SegmentUsers } from '@bucketeer/evaluation';
 import { createSchedule, removeSchedule } from '../../schedule';
 import { Clock } from '../../utils/clock';
+import { InvalidStatusError } from '../../objects/errors';
 
 interface SegementUsersCacheProcessor {
   start(): void;
diff --git a/src/client.ts b/src/client.ts
index 7b161af..d8de223 100644
--- a/src/client.ts
+++ b/src/client.ts
@@ -30,6 +30,7 @@ import { SegementUsersCacheProcessor } from './cache/processor/segmentUsersCache
 import { ProcessorEventsEmitter } from './processorEventsEmitter';
 import { NodeEvaluator } from './evaluator/evaluator';
 import { Bucketeer, BuildInfo } from '.';
+import { IllegalStateError } from './objects/errors';
 
 const COUNT_PER_REGISTER_EVENT = 100;
 
@@ -241,7 +242,7 @@ export class BKTClientImpl implements Bucketeer {
 
         return evaluation;
       } else {
-        throw new Error('LocalEvaluator is not initialized');
+        throw new IllegalStateError('LocalEvaluator is not initialized');
       }
     } catch (error) {
       this.eventEmitter.emit('error', { error: error, apiId: ApiId.SDK_GET_VARIATION });
diff --git a/src/converter.ts b/src/converter.ts
index 145a59a..470704b 100644
--- a/src/converter.ts
+++ b/src/converter.ts
@@ -1,3 +1,4 @@
+import { IllegalArgumentError } from './objects/errors';
 import { BKTValue } from './types';
 
 export type StringToTypeConverter<T> = (input: string) => T | null;
@@ -25,12 +26,20 @@ export const stringToNumberConverter: StringToTypeConverter<number> = (input: st
 
 export const stringToObjectConverter: StringToTypeConverter<BKTValue> = (input: string) => {
   assetNonBlankString(input);
-  return parseJsonObjectOrArray(input);
+  try {
+    return parseJsonObjectOrArray(input);
+  } catch (err) {
+    if (err instanceof Error) {
+      throw new IllegalArgumentError(err.message);
+    } else {
+      throw new IllegalArgumentError(String(err));
+    }
+  } 
 };
 
 function assetNonBlankString(input: string) {
   if (input.trim().length == 0) {
-    throw new Error('Input string must be non-blank');
+    throw new IllegalArgumentError('Input string must be non-blank');
   }
 }
 
diff --git a/src/evaluator/local.ts b/src/evaluator/local.ts
index d3a124a..d31e100 100644
--- a/src/evaluator/local.ts
+++ b/src/evaluator/local.ts
@@ -3,6 +3,7 @@ import {
   Evaluator,
   Feature,
   SegmentUser,
+  SegmentUsers,
   UserEvaluations,
   Reason as ProtoReason,
 } from '@bucketeer/evaluation';
@@ -13,6 +14,7 @@ import { Evaluation } from '../objects/evaluation';
 import { User } from '../objects/user';
 import { Reason, ReasonType } from '../objects/reason';
 import { NodeEvaluator } from './evaluator';
+import { IllegalStateError } from '../objects/errors';
 
 class LocalEvaluator implements NodeEvaluator {
   private tag: string;
@@ -31,34 +33,79 @@ class LocalEvaluator implements NodeEvaluator {
 
   async evaluate(user: User, featureID: string): Promise<Evaluation> {
     // Get the target feature
-    const feature = await this.featureCache.get(featureID);
-    if (feature === null) {
-      throw new Error('Feature not found');
-    }
-    const targetFeatures = await this.getTargetFeatures(feature);
-    const evaluator = new Evaluator();
-    const fIds = evaluator.listSegmentIDs(feature);
-    const segmentUsersMap = new Map<string, SegmentUser[]>();
-    for (const fId of fIds) {
-      const segmentUser = await this.segementUsersCache.get(fId);
-      if (segmentUser !== null) {
-      segmentUsersMap.set(segmentUser.getSegmentId(), segmentUser.getUsersList());
-      }
-    }
-
-    const protoUser = createUser(user.id, user.data);
-    const userEvaluations = await evaluator.evaluateFeatures(
-      targetFeatures,
-      protoUser,
-      segmentUsersMap,
-      this.tag,
-    );
+    const feature = await this.getFeatures(featureID);
+    const userEvaluations = await this.evaluateFeatures(user, feature);
 
     const evaluation = this.findEvaluation(userEvaluations, featureID);
     return evaluation;
   }
 
-  findEvaluation(userEvaluations: UserEvaluations, featureId: String): Evaluation {
+  private async getFeatures(featureID: string): Promise<Feature> {
+    return this.getFeaturesFromCache(featureID).then((feature) => {
+      if (feature === null) {
+        throw new IllegalStateError(`Feature not found: ${featureID}`);
+      }
+      return feature;
+    });
+  }
+
+  private async getFeaturesFromCache(featureID: string): Promise<Feature | null> {
+    return this.featureCache.get(featureID).catch((error) => {
+      throw new IllegalStateError(
+        `Failed to get feature: ${error instanceof Error ? error.message : String(error)}`,
+      );
+    });
+  }
+
+  private async getSegmentUsers(segmentUserId: string): Promise<SegmentUsers> {
+    return this.getSegmentUsersFromCache(segmentUserId).then((segmentUsers) => {
+      if (segmentUsers === null) {
+        throw new IllegalStateError(`Segment users not found: ${segmentUserId}`);
+      }
+      return segmentUsers
+    });
+  }
+
+  private async getSegmentUsersFromCache(segmentUserId: string): Promise<SegmentUsers | null> {
+    return this.segementUsersCache.get(segmentUserId).catch((error) => {
+      throw new IllegalStateError(
+        `Failed to get segment users: ${error instanceof Error ? error.message : String(error)}`,
+      );
+    });
+  }
+
+  private async evaluateFeatures(user: User, feature: Feature): Promise<UserEvaluations> {
+    try {
+      const targetFeatures = await this.getTargetFeatures(feature);
+      const evaluator = new Evaluator();
+      const fIds = evaluator.listSegmentIDs(feature);
+      const segmentUsersMap = new Map<string, SegmentUser[]>();
+      for (const fId of fIds) {
+        const segmentUser = await this.getSegmentUsers(fId);
+        if (segmentUser !== null) {
+          segmentUsersMap.set(segmentUser.getSegmentId(), segmentUser.getUsersList());
+        }
+      }
+
+      const protoUser = createUser(user.id, user.data);
+      const userEvaluations = await evaluator.evaluateFeatures(
+        targetFeatures,
+        protoUser,
+        segmentUsersMap,
+        this.tag,
+      );
+      return userEvaluations;
+    } catch (error) {
+      if (error instanceof IllegalStateError) {
+        throw error;
+      }
+      throw new IllegalStateError(
+        `Failed to evaluate feature: ${error instanceof Error ? error.message : String(error)}`,
+      );
+    }
+  }
+
+  private findEvaluation(userEvaluations: UserEvaluations, featureId: String): Evaluation {
     for (const evaluation of userEvaluations.getEvaluationsList()) {
       if (evaluation.getFeatureId() === featureId) {
         return {
@@ -74,7 +121,7 @@ class LocalEvaluator implements NodeEvaluator {
       }
     }
 
-    throw new Error('Evaluation not found');
+    throw new IllegalStateError(`Evaluation not found for feature: ${featureId}`);
   }
 
   async getTargetFeatures(feature: Feature): Promise<Feature[]> {
@@ -95,11 +142,9 @@ class LocalEvaluator implements NodeEvaluator {
       if (!f) continue;
 
       for (const p of f.getPrerequisitesList()) {
-        const preFeature = await this.featureCache.get(p.getFeatureId());
-        if (preFeature !== null) {
-          prerequisites[p.getFeatureId()] = preFeature;
-          queue.push(preFeature);
-        }
+        const preFeature = await this.getFeatures(p.getFeatureId());
+        prerequisites[p.getFeatureId()] = preFeature;
+        queue.push(preFeature);
       }
     }
 
diff --git a/src/grpc/client.ts b/src/grpc/client.ts
index 86d02f3..31e284c 100644
--- a/src/grpc/client.ts
+++ b/src/grpc/client.ts
@@ -10,7 +10,7 @@ import { grpc } from '@improbable-eng/grpc-web';
 import { NodeHttpTransport } from '@improbable-eng/grpc-web-node-http-transport';
 import { SourceId } from '../objects/sourceId';
 import { version } from '../objects/version';
-import { InvalidStatusError } from '../api/client';
+import { InvalidStatusError } from '../objects/errors';
 
 interface GRPCClient {
   getSegmentUsers(
diff --git a/src/objects/errors.ts b/src/objects/errors.ts
new file mode 100644
index 0000000..e9efaa6
--- /dev/null
+++ b/src/objects/errors.ts
@@ -0,0 +1,28 @@
+export class InvalidStatusError extends Error {
+  name = 'InvalidStatusError'
+  readonly code: number | undefined;
+  constructor(message: string, code: number | undefined) {
+    super(message);
+    this.code = code;
+    // Set the prototype explicitly.
+    Object.setPrototypeOf(this, new.target.prototype);
+  }
+}
+
+export class IllegalArgumentError extends Error {
+  name = 'BKTBaseException'
+  constructor(message: string) {
+    super(message);
+    // Set the prototype explicitly.
+    Object.setPrototypeOf(this, new.target.prototype);
+  }
+}
+
+export class IllegalStateError extends Error {
+  name = 'IllegalStateError'
+  constructor(message: string) {
+    super(message);
+    // Set the prototype explicitly.
+    Object.setPrototypeOf(this, new.target.prototype);
+  }
+}
diff --git a/src/objects/metricsEvent.ts b/src/objects/metricsEvent.ts
index b8bb6b1..7595be0 100644
--- a/src/objects/metricsEvent.ts
+++ b/src/objects/metricsEvent.ts
@@ -1,7 +1,7 @@
-import { InvalidStatusError } from '../api/client';
 import { Logger } from '../logger';
+import { IllegalArgumentError, IllegalStateError, InvalidStatusError } from '../objects/errors';
 import { createTimestamp } from '../utils/time';
-import { ApiId, NodeApiIds } from './apiId';
+import { NodeApiIds } from './apiId';
 import { createEvent, Event } from './event';
 import { SourceId } from './sourceId';
 import {
@@ -100,7 +100,11 @@ export function createSizeMetricsEvent(tag: string, size: number, apiId: NodeApi
   return createEvent(metricsEvent);
 }
 
-export function createInternalSdkErrorMetricsEvent(tag: string, apiId: NodeApiIds) {
+export function createInternalSdkErrorMetricsEvent(
+  tag: string,
+  apiId: NodeApiIds,
+  errorMessage?: string,
+) {
   const internalErrorMetricsEvent: InternalSdkErrorMetricsEvent = {
     apiId,
     labels: {
@@ -108,6 +112,9 @@ export function createInternalSdkErrorMetricsEvent(tag: string, apiId: NodeApiId
     },
     '@type': INTERNAL_SDK_ERROR_METRICS_EVENT_NAME,
   };
+  if (errorMessage && errorMessage.length > 0) {
+    internalErrorMetricsEvent.labels.error_message = errorMessage;
+  }
   const metricsEvent = createMetricsEvent(internalErrorMetricsEvent);
   return createEvent(metricsEvent);
 }
@@ -195,6 +202,9 @@ export const toErrorMetricsEvent = (
   apiId: NodeApiIds,
   logger?: Logger,
 ): Event | null => {
+  if (e instanceof IllegalArgumentError || e instanceof IllegalStateError) {
+    return createInternalSdkErrorMetricsEvent(tag, apiId, e.message);
+  }
   if (e instanceof InvalidStatusError) {
     const statusCode = e.code ?? 0;
     switch (true) {
@@ -211,7 +221,7 @@ export const toErrorMetricsEvent = (
       case statusCode == 404:
         return createNotFoundErrorMetricsEvent(tag, apiId);
       case statusCode == 405:
-        return createInternalSdkErrorMetricsEvent(tag, apiId);
+        return createInternalSdkErrorMetricsEvent(tag, apiId, e.message);
       case statusCode == 408:
         return createTimeoutErrorMetricsEvent(tag, apiId);
       case statusCode == 413:
@@ -237,7 +247,7 @@ export const toErrorMetricsEvent = (
         return createUnknownErrorMetricsEvent(tag, apiId, undefined, e.message);
     }
   }
-  return createUnknownErrorMetricsEvent(tag, apiId, undefined, undefined);
+  return createUnknownErrorMetricsEvent(tag, apiId, undefined, String(e));
 };
 
 function isNodeError(error: unknown): error is NodeJS.ErrnoException {
@@ -259,7 +269,7 @@ export function isErrorMetricsEvent(obj: any, specificErrorType?: string): obj i
     NETWORK_ERROR_METRICS_EVENT_NAME,
     UNKNOWN_ERROR_METRICS_EVENT_NAME,
   ];
-  
+
   return errorEventTypes.includes(obj.event['@type']);
 }
 

From 0747cd6f95079b2d6acafd3771c3084284620fc4 Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Tue, 10 Dec 2024 20:19:53 +0700
Subject: [PATCH 4/8] fix: missing code

---
 src/client.ts | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/client.ts b/src/client.ts
index d8de223..838f38f 100644
--- a/src/client.ts
+++ b/src/client.ts
@@ -191,8 +191,10 @@ export class BKTClientImpl implements Bucketeer {
 
   private saveErrorMetricsEvent(tag: string, e: any, apiId: NodeApiIds) {
     const event = toErrorMetricsEvent(e, tag, apiId);
-    this.eventStore.add(event);
-    this.registerEvents();
+    if (event) {
+      this.eventStore.add(event);
+      this.registerEvents();
+    }
   }
 
   async getEvaluation(user: User, featureId: string): Promise<Evaluation | null> {

From 7c0b03e92207979c66afa256f45bd4afa8a97701 Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Tue, 10 Dec 2024 20:42:38 +0700
Subject: [PATCH 5/8] chore: correct test after rebase

---
 e2e/local_evaluation/events.ts       |  6 +++---
 src/__tests__/evaluator/evaluator.ts |  5 +++--
 src/evaluator/local.ts               | 12 ++++++------
 3 files changed, 12 insertions(+), 11 deletions(-)

diff --git a/e2e/local_evaluation/events.ts b/e2e/local_evaluation/events.ts
index e3ad053..971d705 100644
--- a/e2e/local_evaluation/events.ts
+++ b/e2e/local_evaluation/events.ts
@@ -95,8 +95,7 @@ test('default evaluation event', async (t) => {
   t.is(events.length, 16);
   t.true(events.some((e) => (isEvaluationEvent(e.event))));
   t.true(events.some((e) => (isMetricsEvent(e.event))));
-  // TODO: fix me, it should not unknown error - should be not-found error
-  t.true(events.some((e) => (isErrorMetricsEvent(e.event, UNKNOWN_ERROR_METRICS_EVENT_NAME))));
+  t.true(events.some((e) => (isErrorMetricsEvent(e.event, NOT_FOUND_ERROR_METRICS_EVENT_NAME))));
 });
 
 test.afterEach(async (t) => {
@@ -104,4 +103,5 @@ test.afterEach(async (t) => {
   bktClient.destroy();
 });
 
-const UNKNOWN_ERROR_METRICS_EVENT_NAME = "type.googleapis.com/bucketeer.event.client.UnknownErrorMetricsEvent";
\ No newline at end of file
+const NOT_FOUND_ERROR_METRICS_EVENT_NAME =
+  'type.googleapis.com/bucketeer.event.client.NotFoundErrorMetricsEvent';
\ No newline at end of file
diff --git a/src/__tests__/evaluator/evaluator.ts b/src/__tests__/evaluator/evaluator.ts
index 8c89aaa..8d9c88c 100644
--- a/src/__tests__/evaluator/evaluator.ts
+++ b/src/__tests__/evaluator/evaluator.ts
@@ -25,6 +25,7 @@ import { NewSegmentUsersCache, SegmentUsersCache } from '../../cache/segmentUser
 import { NewFeatureCache, FeaturesCache } from '../../cache/features';
 import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
 import { IllegalStateError } from '../../objects/errors';
+import { InvalidStatusError } from '../../../__e2e/lib/objects/errors';
 
 const test = anyTest as TestFn<{
   sandbox: sino.SinonSandbox;
@@ -306,7 +307,7 @@ test('evaluate | err: get feature flag from cache | cache missing', async (t) =>
       feature1.getId(),
     )
     .catch((e) => {
-      t.deepEqual(e, new IllegalStateError(`Feature not found: ${feature1.getId()}`));
+      t.deepEqual(e, new InvalidStatusError(`Feature not found: ${feature1.getId()}`, 404));
     });
   mock.verify();
   t.pass();
@@ -383,7 +384,7 @@ test ('evaluate | err: get segment from cache | cache missing', async (t) => {
       feature5.getId(),
     )
     .catch((e) => {
-      t.deepEqual(e, new IllegalStateError(`Segment users not found: ${segmentUser2.getSegmentId()}`));
+      t.deepEqual(e, new InvalidStatusError(`Segment users not found: ${segmentUser2.getSegmentId()}`, 404));
     });
 
   featuresCacheMock.verify();
diff --git a/src/evaluator/local.ts b/src/evaluator/local.ts
index d31e100..3bdc2d4 100644
--- a/src/evaluator/local.ts
+++ b/src/evaluator/local.ts
@@ -14,7 +14,7 @@ import { Evaluation } from '../objects/evaluation';
 import { User } from '../objects/user';
 import { Reason, ReasonType } from '../objects/reason';
 import { NodeEvaluator } from './evaluator';
-import { IllegalStateError } from '../objects/errors';
+import { IllegalStateError, InvalidStatusError } from '../objects/errors';
 
 class LocalEvaluator implements NodeEvaluator {
   private tag: string;
@@ -43,7 +43,7 @@ class LocalEvaluator implements NodeEvaluator {
   private async getFeatures(featureID: string): Promise<Feature> {
     return this.getFeaturesFromCache(featureID).then((feature) => {
       if (feature === null) {
-        throw new IllegalStateError(`Feature not found: ${featureID}`);
+        throw new InvalidStatusError(`Feature not found: ${featureID}`, 404);
       }
       return feature;
     });
@@ -60,9 +60,9 @@ class LocalEvaluator implements NodeEvaluator {
   private async getSegmentUsers(segmentUserId: string): Promise<SegmentUsers> {
     return this.getSegmentUsersFromCache(segmentUserId).then((segmentUsers) => {
       if (segmentUsers === null) {
-        throw new IllegalStateError(`Segment users not found: ${segmentUserId}`);
+        throw new InvalidStatusError(`Segment users not found: ${segmentUserId}`, 404);
       }
-      return segmentUsers
+      return segmentUsers;
     });
   }
 
@@ -96,7 +96,7 @@ class LocalEvaluator implements NodeEvaluator {
       );
       return userEvaluations;
     } catch (error) {
-      if (error instanceof IllegalStateError) {
+      if (error instanceof InvalidStatusError || error instanceof IllegalStateError) {
         throw error;
       }
       throw new IllegalStateError(
@@ -121,7 +121,7 @@ class LocalEvaluator implements NodeEvaluator {
       }
     }
 
-    throw new IllegalStateError(`Evaluation not found for feature: ${featureId}`);
+    throw new InvalidStatusError(`Evaluation not found for feature: ${featureId}`, 404);
   }
 
   async getTargetFeatures(feature: Feature): Promise<Feature[]> {

From 3fa2544807b1a845544ccb0b715ecd1f66e5b763 Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Tue, 10 Dec 2024 20:52:37 +0700
Subject: [PATCH 6/8] fix: test after rebase

---
 .../evaluation_defaut_strategy.ts               | 17 ++++++++++++-----
 src/__tests__/evaluator/evaluator.ts            |  3 +--
 2 files changed, 13 insertions(+), 7 deletions(-)

diff --git a/e2e/local_evaluation/evaluation_defaut_strategy.ts b/e2e/local_evaluation/evaluation_defaut_strategy.ts
index 23fa019..5ce9d90 100644
--- a/e2e/local_evaluation/evaluation_defaut_strategy.ts
+++ b/e2e/local_evaluation/evaluation_defaut_strategy.ts
@@ -10,6 +10,8 @@ import {
   FEATURE_ID_JSON,
   FEATURE_ID_FLOAT,
 } from '../constants/constants';
+import { assetEvaluationDetails } from '../utils/assert';
+import exp from 'constants';
 
 const test = anyTest as TestFn<{ bktClient: Bucketeer; defaultUser: User }>;
 
@@ -39,7 +41,8 @@ test.after(async (t) => {
 test('boolVariation', async (t) => {
   const { bktClient, defaultUser } = t.context;
   t.is(await bktClient.booleanVariation(defaultUser, FEATURE_ID_BOOLEAN, false), true);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.booleanVariationDetails(defaultUser, FEATURE_ID_BOOLEAN, false),
     {
       featureId: FEATURE_ID_BOOLEAN,
@@ -56,7 +59,8 @@ test('boolVariation', async (t) => {
 test('stringVariation', async (t) => {
   const { bktClient, defaultUser } = t.context;
   t.is(await bktClient.stringVariation(defaultUser, FEATURE_ID_STRING, ''), 'value-1');
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.stringVariationDetails(defaultUser, FEATURE_ID_STRING, ''),
     {
       featureId: FEATURE_ID_STRING,
@@ -73,7 +77,8 @@ test('stringVariation', async (t) => {
 test('numberVariation', async (t) => {
   const { bktClient, defaultUser } = t.context;
   t.is(await bktClient.numberVariation(defaultUser, FEATURE_ID_INT, 0), 10);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(defaultUser, FEATURE_ID_INT, 1),
     {
       featureId: FEATURE_ID_INT,
@@ -87,7 +92,8 @@ test('numberVariation', async (t) => {
   )
 
   t.is(await bktClient.numberVariation(defaultUser, FEATURE_ID_FLOAT, 0.0), 2.1);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(defaultUser, FEATURE_ID_FLOAT, 1.1),
     {
       featureId: FEATURE_ID_FLOAT,
@@ -106,7 +112,8 @@ test('objectVariation', async (t) => {
   const { bktClient, defaultUser } = t.context;
   t.deepEqual(await bktClient.getJsonVariation(defaultUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
   t.deepEqual(await bktClient.objectVariation(defaultUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.objectVariationDetails(defaultUser, FEATURE_ID_JSON, {}),
     {
       featureId: FEATURE_ID_JSON,
diff --git a/src/__tests__/evaluator/evaluator.ts b/src/__tests__/evaluator/evaluator.ts
index 8d9c88c..a0defa5 100644
--- a/src/__tests__/evaluator/evaluator.ts
+++ b/src/__tests__/evaluator/evaluator.ts
@@ -24,8 +24,7 @@ import { Clock } from '../../utils/clock';
 import { NewSegmentUsersCache, SegmentUsersCache } from '../../cache/segmentUsers';
 import { NewFeatureCache, FeaturesCache } from '../../cache/features';
 import { ProcessorEventsEmitter } from '../../processorEventsEmitter';
-import { IllegalStateError } from '../../objects/errors';
-import { InvalidStatusError } from '../../../__e2e/lib/objects/errors';
+import { IllegalStateError, InvalidStatusError } from '../../objects/errors';
 
 const test = anyTest as TestFn<{
   sandbox: sino.SinonSandbox;

From b4ff1a4372918b5fa1cb84deb7fa9e29a3bb7263 Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Tue, 10 Dec 2024 21:01:05 +0700
Subject: [PATCH 7/8] fix: missing e2e tests

---
 e2e/local_evaluation/evaluation_segment_user.ts | 16 +++++++++++-----
 .../evaluation_targeting_strategy.ts            | 17 +++++++++++------
 2 files changed, 22 insertions(+), 11 deletions(-)

diff --git a/e2e/local_evaluation/evaluation_segment_user.ts b/e2e/local_evaluation/evaluation_segment_user.ts
index fd09db6..838d6cc 100644
--- a/e2e/local_evaluation/evaluation_segment_user.ts
+++ b/e2e/local_evaluation/evaluation_segment_user.ts
@@ -1,6 +1,7 @@
 import anyTest, { TestFn } from 'ava';
 import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
 import { HOST, FEATURE_TAG, TARGETED_SEGMENT_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, SERVER_ROLE_TOKEN } from '../constants/constants';
+import { assetEvaluationDetails } from '../utils/assert';
 
 const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedSegmentUser: User }>;
 
@@ -30,7 +31,8 @@ test.after(async (t) => {
 test('boolVariation', async (t) => {
   const { bktClient, targetedSegmentUser } = t.context;
   t.is(await bktClient.booleanVariation(targetedSegmentUser, FEATURE_ID_BOOLEAN, false), true);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.booleanVariationDetails(targetedSegmentUser, FEATURE_ID_BOOLEAN, false),
     {
       featureId: FEATURE_ID_BOOLEAN,
@@ -48,7 +50,8 @@ test('boolVariation', async (t) => {
 test('stringVariation', async (t) => {
   const { bktClient, targetedSegmentUser } = t.context;
   t.is(await bktClient.stringVariation(targetedSegmentUser, FEATURE_ID_STRING, ''), 'value-3');
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.stringVariationDetails(targetedSegmentUser, FEATURE_ID_STRING, 'true'),
     {
       featureId: FEATURE_ID_STRING,
@@ -65,7 +68,8 @@ test('stringVariation', async (t) => {
 test('numberVariation', async (t) => {
   const { bktClient, targetedSegmentUser } = t.context;
   t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_INT, 0), 10);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_INT, 1),
     {
       featureId: FEATURE_ID_INT,
@@ -79,7 +83,8 @@ test('numberVariation', async (t) => {
   )
 
   t.is(await bktClient.numberVariation(targetedSegmentUser, FEATURE_ID_FLOAT, 0.0), 2.1);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(targetedSegmentUser, FEATURE_ID_FLOAT, 1.1),
     {
       featureId: FEATURE_ID_FLOAT,
@@ -98,7 +103,8 @@ test('objectVariation', async (t) => {
   const { bktClient, targetedSegmentUser } = t.context;
   t.deepEqual(await bktClient.getJsonVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
   t.deepEqual(await bktClient.objectVariation(targetedSegmentUser, FEATURE_ID_JSON, {}), { "str": "str1", "int": "int1" });
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.objectVariationDetails(targetedSegmentUser, FEATURE_ID_JSON, {}),
     {
       featureId: FEATURE_ID_JSON,
diff --git a/e2e/local_evaluation/evaluation_targeting_strategy.ts b/e2e/local_evaluation/evaluation_targeting_strategy.ts
index be400ca..de74391 100644
--- a/e2e/local_evaluation/evaluation_targeting_strategy.ts
+++ b/e2e/local_evaluation/evaluation_targeting_strategy.ts
@@ -1,6 +1,7 @@
 import anyTest, { TestFn } from 'ava';
 import { Bucketeer, DefaultLogger, User, initialize } from '../../lib';
 import { HOST, FEATURE_TAG, TARGETED_USER_ID, FEATURE_ID_BOOLEAN, FEATURE_ID_STRING, FEATURE_ID_INT, FEATURE_ID_JSON, FEATURE_ID_FLOAT, SERVER_ROLE_TOKEN } from '../constants/constants';
+import { assetEvaluationDetails } from '../utils/assert';
 
 const test = anyTest as TestFn<{ bktClient: Bucketeer; targetedUser: User }>;
 
@@ -31,7 +32,8 @@ test.after(async (t) => {
 test('boolVariation', async (t) => {
   const { bktClient, targetedUser } = t.context;
   t.is(await bktClient.booleanVariation(targetedUser, FEATURE_ID_BOOLEAN, true), false);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.booleanVariationDetails(targetedUser, FEATURE_ID_BOOLEAN, true),
     {
       featureId: FEATURE_ID_BOOLEAN,
@@ -48,7 +50,8 @@ test('boolVariation', async (t) => {
 test('stringVariation', async (t) => {
   const { bktClient, targetedUser } = t.context;
   t.is(await bktClient.stringVariation(targetedUser, FEATURE_ID_STRING, ''), 'value-2');
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.stringVariationDetails(targetedUser, FEATURE_ID_STRING, 'true'),
     {
       featureId: FEATURE_ID_STRING,
@@ -65,7 +68,8 @@ test('stringVariation', async (t) => {
 test('numberVariation', async (t) => {
   const { bktClient, targetedUser } = t.context;
   t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_INT, 0), 20);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(targetedUser, FEATURE_ID_INT, 99),
     {
       featureId: FEATURE_ID_INT,
@@ -79,7 +83,8 @@ test('numberVariation', async (t) => {
   )
 
   t.is(await bktClient.numberVariation(targetedUser, FEATURE_ID_FLOAT, 0.0), 3.1);
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.numberVariationDetails(targetedUser, FEATURE_ID_FLOAT, 99),
     {
       featureId: FEATURE_ID_FLOAT,
@@ -91,14 +96,14 @@ test('numberVariation', async (t) => {
       reason: 'TARGET',
     }
   )
-
 });
 
 test('objectVariation', async (t) => {
   const { bktClient, targetedUser } = t.context;
   t.deepEqual(await bktClient.getJsonVariation(targetedUser, FEATURE_ID_JSON, {}), { "str": "str2", "int": "int2" });
   t.deepEqual(await bktClient.objectVariation(targetedUser, FEATURE_ID_JSON, {}), { "str": "str2", "int": "int2" });
-  t.deepEqual(
+  assetEvaluationDetails(
+    t,
     await bktClient.objectVariationDetails(targetedUser, FEATURE_ID_JSON, 99),
     {
       featureId: FEATURE_ID_JSON,

From 10aca9032d2072ef45060823bcaff89ebfcb7baf Mon Sep 17 00:00:00 2001
From: duyhungtnn <duyhung71089@gmail.com>
Date: Mon, 6 Jan 2025 17:17:15 +0700
Subject: [PATCH 8/8] fix: missing code after merge

---
 src/client.ts | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git a/src/client.ts b/src/client.ts
index 838f38f..8ef8219 100644
--- a/src/client.ts
+++ b/src/client.ts
@@ -24,13 +24,13 @@ import {
   stringToObjectConverter,
   StringToTypeConverter,
 } from './converter';
-import { error } from 'console';
 import { FeatureFlagProcessor } from './cache/processor/featureFlagCacheProcessor';
 import { SegementUsersCacheProcessor } from './cache/processor/segmentUsersCacheProcessor';
 import { ProcessorEventsEmitter } from './processorEventsEmitter';
 import { NodeEvaluator } from './evaluator/evaluator';
 import { Bucketeer, BuildInfo } from '.';
 import { IllegalStateError } from './objects/errors';
+import { assertGetEvaluationRequest } from './assert';
 
 const COUNT_PER_REGISTER_EVENT = 100;
 
@@ -259,6 +259,17 @@ export class BKTClientImpl implements Bucketeer {
     defaultValue: T,
     typeConverter: StringToTypeConverter<T>,
   ): Promise<BKTEvaluationDetails<T>> {
+    try {
+      assertGetEvaluationRequest(user, featureId);
+    } catch (error) {
+      this.config.logger?.error('getVariationDetails failed', error);
+      return newDefaultBKTEvaluationDetails(
+        user && user.id ? user.id : '',
+        featureId ?? '',
+        defaultValue,
+        'DEFAULT');
+    }
+
     const evaluation = await this.getEvaluation(user, featureId);
     const variationValue = evaluation?.variationValue;
 
@@ -273,7 +284,7 @@ export class BKTClientImpl implements Bucketeer {
         this.eventEmitter.emit('error', { error: err, apiId: ApiId.SDK_GET_VARIATION });
 
         this.config.logger?.error(
-          `getVariationDetails failed to parse: ${variationValue} using: ${typeof typeConverter} with error: ${error.toString()}`,
+          `getVariationDetails failed to parse: ${variationValue} using: ${typeof typeConverter} with error: ${err}`,
         );
       }
     }