From bba4f5820f916ccc88e129c6230c1c4dd93eb278 Mon Sep 17 00:00:00 2001
From: ImMin5 <mino@megazone.com>
Date: Sat, 30 Nov 2024 00:32:41 +0900
Subject: [PATCH] feat: add Asset interface

---
 .../inventory_v2/conf/collector_conf.py       |  32 +
 src/spaceone/inventory_v2/conf/global_conf.py |  13 +-
 src/spaceone/inventory_v2/error/asset.py      |   5 +
 src/spaceone/inventory_v2/error/collector.py  | 105 +++
 src/spaceone/inventory_v2/info/region_info.py |   5 +-
 .../inventory_v2/interface/grpc/metric.py     |  56 ++
 .../interface/grpc/metric_data.py             |  26 +
 .../interface/grpc/metric_example.py          |  44 ++
 .../inventory_v2/interface/task/__init__.py   |   0
 .../interface/task/v1/__init__.py             |   0
 .../interface/task/v1/inventory_scheduler.py  |  73 ++
 src/spaceone/inventory_v2/lib/rule_matcher.py |  51 ++
 .../inventory_v2/manager/asset_manager.py     | 115 +++
 .../manager/collecting_manager.py             | 587 +++++++++++++++
 .../manager/collection_state_manager.py       |   4 +-
 .../manager/collector_rule_manager.py         |  74 +-
 .../manager/metric_data_manager.py            | 329 ++++++++
 .../manager/metric_example_manager.py         |  68 ++
 .../inventory_v2/manager/metric_manager.py    | 711 ++++++++++++++++++
 .../inventory_v2/manager/region_manager.py    |   2 +-
 src/spaceone/inventory_v2/model/__init__.py   |   5 +-
 .../inventory_v2/model/asset/__init__.py      |   0
 .../inventory_v2/model/asset/database.py      | 183 +++++
 .../inventory_v2/model/asset/request.py       |  15 +
 .../inventory_v2/model/asset/response.py      |   5 +
 .../model/asset_group/__init__.py             |   0
 .../inventory_v2/model/asset_type/__init__.py |   0
 .../inventory_v2/model/asset_type/database.py |  89 +++
 .../model/collection_state/database.py        |   8 +-
 .../inventory_v2/model/collector/database.py  |   2 -
 .../inventory_v2/model/collector/request.py   |   6 +-
 .../inventory_v2/model/collector/response.py  |   5 +-
 .../inventory_v2/model/metric/__init__.py     |   0
 .../inventory_v2/model/metric/database.py     |  64 ++
 .../inventory_v2/model/metric/request.py      |  86 +++
 .../inventory_v2/model/metric/response.py     |  38 +
 .../model/metric_data/__init__.py             |   0
 .../model/metric_data/database.py             | 144 ++++
 .../inventory_v2/model/metric_data/request.py |  33 +
 .../model/metric_data/response.py             |  25 +
 .../model/metric_example/__init__.py          |   0
 .../model/metric_example/database.py          |  36 +
 .../model/metric_example/request.py           |  58 ++
 .../model/metric_example/response.py          |  30 +
 .../inventory_v2/model/region/__init__.py     |   0
 .../model/{ => region}/region_model.py        |   0
 .../inventory_v2/service/asset_service.py     | 372 +++++++++
 .../inventory_v2/service/collector_service.py | 132 ++--
 .../service/metric_data_service.py            | 139 ++++
 .../service/metric_example_service.py         | 227 ++++++
 .../inventory_v2/service/metric_service.py    | 376 +++++++++
 .../inventory_v2/service/region_service.py    |  16 +-
 52 files changed, 4262 insertions(+), 132 deletions(-)
 create mode 100644 src/spaceone/inventory_v2/conf/collector_conf.py
 create mode 100644 src/spaceone/inventory_v2/error/asset.py
 create mode 100644 src/spaceone/inventory_v2/error/collector.py
 create mode 100644 src/spaceone/inventory_v2/interface/grpc/metric.py
 create mode 100644 src/spaceone/inventory_v2/interface/grpc/metric_data.py
 create mode 100644 src/spaceone/inventory_v2/interface/grpc/metric_example.py
 create mode 100644 src/spaceone/inventory_v2/interface/task/__init__.py
 create mode 100644 src/spaceone/inventory_v2/interface/task/v1/__init__.py
 create mode 100644 src/spaceone/inventory_v2/interface/task/v1/inventory_scheduler.py
 create mode 100644 src/spaceone/inventory_v2/lib/rule_matcher.py
 create mode 100644 src/spaceone/inventory_v2/manager/asset_manager.py
 create mode 100644 src/spaceone/inventory_v2/manager/collecting_manager.py
 create mode 100644 src/spaceone/inventory_v2/manager/metric_data_manager.py
 create mode 100644 src/spaceone/inventory_v2/manager/metric_example_manager.py
 create mode 100644 src/spaceone/inventory_v2/manager/metric_manager.py
 create mode 100644 src/spaceone/inventory_v2/model/asset/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/asset/database.py
 create mode 100644 src/spaceone/inventory_v2/model/asset/request.py
 create mode 100644 src/spaceone/inventory_v2/model/asset/response.py
 create mode 100644 src/spaceone/inventory_v2/model/asset_group/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/asset_type/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/asset_type/database.py
 create mode 100644 src/spaceone/inventory_v2/model/metric/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/metric/database.py
 create mode 100644 src/spaceone/inventory_v2/model/metric/request.py
 create mode 100644 src/spaceone/inventory_v2/model/metric/response.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_data/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_data/database.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_data/request.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_data/response.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_example/__init__.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_example/database.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_example/request.py
 create mode 100644 src/spaceone/inventory_v2/model/metric_example/response.py
 create mode 100644 src/spaceone/inventory_v2/model/region/__init__.py
 rename src/spaceone/inventory_v2/model/{ => region}/region_model.py (100%)
 create mode 100644 src/spaceone/inventory_v2/service/asset_service.py
 create mode 100644 src/spaceone/inventory_v2/service/metric_data_service.py
 create mode 100644 src/spaceone/inventory_v2/service/metric_example_service.py
 create mode 100644 src/spaceone/inventory_v2/service/metric_service.py

diff --git a/src/spaceone/inventory_v2/conf/collector_conf.py b/src/spaceone/inventory_v2/conf/collector_conf.py
new file mode 100644
index 0000000..50bd53c
--- /dev/null
+++ b/src/spaceone/inventory_v2/conf/collector_conf.py
@@ -0,0 +1,32 @@
+######################################################################
+#    ************ Very Important ************
+#
+# This is resource map for collector
+# If you add new service and manager for specific RESOURCE_TYPE,
+# add here for collector
+######################################################################
+
+RESOURCE_MAP = {
+    "inventory.CloudService": ("CloudServiceService", "CloudServiceManager"),
+    "inventory.CloudServiceType": (
+        "CloudServiceTypeService",
+        "CloudServiceTypeManager",
+    ),
+    "inventory.Region": ("RegionService", "RegionManager"),
+    "inventory.ErrorResource": ("CollectorService", "CollectingManager"),
+}
+
+
+OP_MAP = {"=": "eq", ">=": "gte", "<=": "lte", ">": "gt", "<": "lt", "!=": "not"}
+
+DB_QUEUE_NAME = "db_q"
+
+NOT_COUNT = 0
+CREATED = 1
+UPDATED = 2
+ERROR = 3
+
+JOB_TASK_STAT_EXPIRE_TIME = 3600  # 1 hour
+WATCHDOG_WAITING_TIME = 30  # wait 30 seconds, before watchdog works
+
+MAX_MESSAGE_LENGTH = 2000
diff --git a/src/spaceone/inventory_v2/conf/global_conf.py b/src/spaceone/inventory_v2/conf/global_conf.py
index ef075b0..4c36007 100644
--- a/src/spaceone/inventory_v2/conf/global_conf.py
+++ b/src/spaceone/inventory_v2/conf/global_conf.py
@@ -47,13 +47,20 @@
             "identity": "grpc://identity:50051",
             "monitoring": "grpc://monitoring:50051",
             "file_manager": "grpc://file-manager:50051",
-            "secret": "grpc://secret:50051"
+            "secret": "grpc://secret:50051",
         },
     }
 }
 
+# Queue Settings
+QUEUES = {
+    "inventory_q": {
+        "backend": "spaceone.core.queue.redis_queue.RedisQueue",
+        "host": "redis",
+        "port": 6379,
+        "channel": "inventory_job",
+    },
+}
 # Scheduler Settings
-QUEUES = {}
 SCHEDULERS = {}
 WORKERS = {}
-TOKEN_INFO = {}
diff --git a/src/spaceone/inventory_v2/error/asset.py b/src/spaceone/inventory_v2/error/asset.py
new file mode 100644
index 0000000..97b4f1b
--- /dev/null
+++ b/src/spaceone/inventory_v2/error/asset.py
@@ -0,0 +1,5 @@
+from spaceone.core.error import *
+
+
+class ERROR_RESOURCE_ALREADY_DELETED(ERROR_INVALID_ARGUMENT):
+    _message = "{resource_type} has already been deleted. ({resource_id})"
diff --git a/src/spaceone/inventory_v2/error/collector.py b/src/spaceone/inventory_v2/error/collector.py
new file mode 100644
index 0000000..16851ca
--- /dev/null
+++ b/src/spaceone/inventory_v2/error/collector.py
@@ -0,0 +1,105 @@
+from spaceone.core.error import *
+
+
+class ERROR_NO_COLLECTOR(ERROR_BASE):
+    _message = "{collector_id} does not exist in {domain_id}"
+
+
+class ERROR_COLLECTOR_STATE(ERROR_BASE):
+    _message = "collector state is {state}"
+
+
+class ERROR_INIT_PLUGIN_FAILURE(ERROR_BASE):
+    _message = "Fail to init plugin, params={params}"
+
+
+class ERROR_VERIFY_PLUGIN_FAILURE(ERROR_BASE):
+    _message = "Fail to verify plugin, params={params}"
+
+
+class ERROR_NO_PLUGIN_PARAMETER(ERROR_BASE):
+    _message = "parameter: {param} is required"
+
+
+class ERROR_TOKEN_AUTHENTICATION_FAILURE(ERROR_BASE):
+    _message = "A access token or refresh token is invalid."
+
+
+class ERROR_AUTHENTICATION_FAILURE_PLUGIN(ERROR_BASE):
+    _message = (
+        "External plugin authentication exception. (plugin_error_message={message})"
+    )
+
+
+class ERROR_JOB_STATE_CHANGE(ERROR_BASE):
+    _message = "Job {job_id} state change: {status} -> {action}"
+
+
+class ERROR_JOB_TASK_STATE_CHANGE(ERROR_BASE):
+    _message = "Job task {job_task_id} state change: {status} -> {action}"
+
+
+class ERROR_COLLECT_FILTER(ERROR_BASE):
+    _message = "collect failed, plugin_info: {plugin_info}, filter: {param}"
+
+
+class ERROR_COLLECTOR_SECRET(ERROR_BASE):
+    _message = "collect failed, plugin_info: {plugin_info}, secret_id: {param}"
+
+
+class ERROR_JOB_UPDATE(ERROR_BASE):
+    _message = "job update failed, param={param}"
+
+
+class ERROR_COLLECTOR_COLLECTING(ERROR_BASE):
+    _message = "collecting failed, plugin_info: {plugin_info}, filter: {filter}"
+
+
+class ERROR_COLLECT_CANCELED(ERROR_BASE):
+    _message = "collecting canceled, job_id: {job_id}"
+
+
+class ERROR_UNSUPPORTED_RESOURCE_TYPE(ERROR_BASE):
+    _message = "collector can not find resource_type: {resource_type}"
+
+
+class ERROR_UNSUPPORTED_FILTER_KEY(ERROR_BASE):
+    _message = "request unsupported filter_key {filter_key} : {filter_value}"
+
+
+class ERROR_COLLECT_INITIALIZE(ERROR_BASE):
+    _message = "failed on stage {stage}, params: {params}"
+
+
+class ERROR_INVALID_PLUGIN_VERSION(ERROR_INVALID_ARGUMENT):
+    _message = (
+        "Plugin version is invalid. (plugin_id = {plugin_id}, version = {version})"
+    )
+
+
+class ERROR_NOT_ALLOWED_PLUGIN_ID(ERROR_INVALID_ARGUMENT):
+    _message = "Changing plugin_id is not allowed. (old_plugin_id = {old_plugin_id}, new_plugin_id = {new_plugin_id})"
+
+
+class ERROR_WRONG_PLUGIN_SETTINGS(ERROR_BASE):
+    _message = "The plugin settings is incorrect. (key = {key})"
+
+
+class ERROR_INVALID_PLUGIN_OPTIONS(ERROR_INTERNAL_API):
+    _message = "The options received from the plugin is invalid. (reason = {reason})"
+
+
+class ERROR_RESOURCE_KEYS_NOT_DEFINED(ERROR_BASE):
+    _message = "{resource_type} manager does not define resource_keys field"
+
+
+class ERROR_TOO_MANY_MATCH(ERROR_BASE):
+    _message = "The same resource exists. (match_key = {match_key}, matched_resources = {resources}, more = {more})"
+
+
+class ERROR_UNSUPPORTED_SCHEDULE(ERROR_BASE):
+    _message = "supported schedules: {supported}, requested: {requested}"
+
+
+class ERROR_NOT_ALLOWED_SECRET_ID(ERROR_BASE):
+    _message = "Not allowed secret_id: {secret_id}"
diff --git a/src/spaceone/inventory_v2/info/region_info.py b/src/spaceone/inventory_v2/info/region_info.py
index a44baf4..0c0d77f 100644
--- a/src/spaceone/inventory_v2/info/region_info.py
+++ b/src/spaceone/inventory_v2/info/region_info.py
@@ -3,12 +3,13 @@
 from spaceone.api.inventory.v2 import region_pb2
 from spaceone.core.pygrpc.message_type import *
 from spaceone.core import utils
-from spaceone.inventory_v2.model.region_model import Region
+from spaceone.inventory_v2.model.region.region_model import Region
 
 __all__ = ["RegionInfo", "RegionsInfo"]
 
 _LOGGER = logging.getLogger(__name__)
 
+
 def RegionInfo(region_vo: Region, minimal=False):
     info = {
         "region_id": region_vo.region_id,
@@ -35,4 +36,4 @@ def RegionsInfo(region_vos, total_count, **kwargs):
     return region_pb2.RegionsInfo(
         results=list(map(functools.partial(RegionInfo, **kwargs), region_vos)),
         total_count=total_count,
-    )
\ No newline at end of file
+    )
diff --git a/src/spaceone/inventory_v2/interface/grpc/metric.py b/src/spaceone/inventory_v2/interface/grpc/metric.py
new file mode 100644
index 0000000..8297ce9
--- /dev/null
+++ b/src/spaceone/inventory_v2/interface/grpc/metric.py
@@ -0,0 +1,56 @@
+from spaceone.core.pygrpc import BaseAPI
+from spaceone.api.inventory_v2.v1 import metric_pb2, metric_pb2_grpc
+from spaceone.inventory.service.metric_service import MetricService
+
+
+class Metric(BaseAPI, metric_pb2_grpc.MetricServicer):
+    pb2 = metric_pb2
+    pb2_grpc = metric_pb2_grpc
+
+    def create(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.create(params)
+        return self.dict_to_message(response)
+
+    def update(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.update(params)
+        return self.dict_to_message(response)
+
+    def delete(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        metric_svc.delete(params)
+        return self.empty()
+
+    def run(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        metric_svc.run(params)
+        return self.empty()
+
+    def test(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.test(params)
+        return self.dict_to_message(response)
+
+    def get(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.get(params)
+        return self.dict_to_message(response)
+
+    def list(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.list(params)
+        return self.dict_to_message(response)
+
+    def stat(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_svc = MetricService(metadata)
+        response: dict = metric_svc.stat(params)
+        return self.dict_to_message(response)
diff --git a/src/spaceone/inventory_v2/interface/grpc/metric_data.py b/src/spaceone/inventory_v2/interface/grpc/metric_data.py
new file mode 100644
index 0000000..e8d06cf
--- /dev/null
+++ b/src/spaceone/inventory_v2/interface/grpc/metric_data.py
@@ -0,0 +1,26 @@
+from spaceone.core.pygrpc import BaseAPI
+from spaceone.api.inventory_v2.v1 import metric_data_pb2, metric_data_pb2_grpc
+from spaceone.inventory.service.metric_data_service import MetricDataService
+
+
+class MetricData(BaseAPI, metric_data_pb2_grpc.MetricDataServicer):
+    pb2 = metric_data_pb2
+    pb2_grpc = metric_data_pb2_grpc
+
+    def list(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_data_svc = MetricDataService(metadata)
+        response: dict = metric_data_svc.list(params)
+        return self.dict_to_message(response)
+
+    def analyze(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_data_svc = MetricDataService(metadata)
+        response: dict = metric_data_svc.analyze(params)
+        return self.dict_to_message(response)
+
+    def stat(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_data_svc = MetricDataService(metadata)
+        response: dict = metric_data_svc.stat(params)
+        return self.dict_to_message(response)
diff --git a/src/spaceone/inventory_v2/interface/grpc/metric_example.py b/src/spaceone/inventory_v2/interface/grpc/metric_example.py
new file mode 100644
index 0000000..f39d65d
--- /dev/null
+++ b/src/spaceone/inventory_v2/interface/grpc/metric_example.py
@@ -0,0 +1,44 @@
+from spaceone.core.pygrpc import BaseAPI
+from spaceone.api.inventory_v2.v1 import metric_example_pb2, metric_example_pb2_grpc
+from spaceone.inventory.service.metric_example_service import MetricExampleService
+
+
+class MetricExample(BaseAPI, metric_example_pb2_grpc.MetricExampleServicer):
+    pb2 = metric_example_pb2
+    pb2_grpc = metric_example_pb2_grpc
+
+    def create(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        response: dict = metric_example_svc.create(params)
+        return self.dict_to_message(response)
+
+    def update(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        response: dict = metric_example_svc.update(params)
+        return self.dict_to_message(response)
+
+    def delete(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        metric_example_svc.delete(params)
+        return self.empty()
+
+    def get(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        response: dict = metric_example_svc.get(params)
+        return self.dict_to_message(response)
+
+    def list(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        response: dict = metric_example_svc.list(params)
+        return self.dict_to_message(response)
+
+    def stat(self, request, context):
+        params, metadata = self.parse_request(request, context)
+        metric_example_svc = MetricExampleService(metadata)
+        response: dict = metric_example_svc.stat(params)
+        return self.dict_to_message(response)
diff --git a/src/spaceone/inventory_v2/interface/task/__init__.py b/src/spaceone/inventory_v2/interface/task/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/interface/task/v1/__init__.py b/src/spaceone/inventory_v2/interface/task/v1/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/interface/task/v1/inventory_scheduler.py b/src/spaceone/inventory_v2/interface/task/v1/inventory_scheduler.py
new file mode 100644
index 0000000..a351a3e
--- /dev/null
+++ b/src/spaceone/inventory_v2/interface/task/v1/inventory_scheduler.py
@@ -0,0 +1,73 @@
+import logging
+from datetime import datetime
+from spaceone.core.error import ERROR_CONFIGURATION
+from spaceone.core.locator import Locator
+from spaceone.core.scheduler import HourlyScheduler
+from spaceone.core import config, utils
+from spaceone.inventory_v2.service.collector_service import CollectorService
+
+__all__ = ["InventoryHourlyScheduler"]
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class InventoryHourlyScheduler(HourlyScheduler):
+    def __init__(self, queue, interval, minute=":00"):
+        super().__init__(queue, interval, minute)
+        self.locator = Locator()
+        self._init_config()
+
+    def _init_config(self):
+        self._token = config.get_global("TOKEN")
+        if self._token is None:
+            raise ERROR_CONFIGURATION(key="TOKEN")
+
+    def create_task(self):
+        current_hour = datetime.utcnow().hour
+        return [
+            self._create_job_request(collector_vo)
+            for collector_vo in self.list_schedule_collectors(current_hour)
+        ]
+
+    def list_schedule_collectors(self, current_hour: int):
+        try:
+            collector_svc: CollectorService = self.locator.get_service(
+                CollectorService, {"token": self._token}
+            )
+            collector_vos, total_count = collector_svc.scheduled_collectors(
+                {"hour": current_hour}
+            )
+            _LOGGER.debug(
+                f"[list_schedule_collectors] scheduled collectors count (UTC {current_hour}): {total_count}"
+            )
+            return collector_vos
+        except Exception as e:
+            _LOGGER.error(e, exc_info=True)
+            return []
+
+    def _create_job_request(self, collector_vo):
+        schedule_job = {
+            "locator": "SERVICE",
+            "name": "CollectorService",
+            "metadata": {
+                "token": self._token,
+            },
+            "method": "collect",
+            "params": {
+                "params": {
+                    "collector_id": collector_vo.collector_id,
+                    "domain_id": collector_vo.domain_id,
+                }
+            },
+        }
+
+        _LOGGER.debug(
+            f"[_create_job_request] tasks: inventory_collect_schedule: {collector_vo.collector_id}"
+        )
+
+        return {
+            "name": "inventory_collect_schedule",
+            "version": "v1",
+            "executionEngine": "BaseWorker",
+            "stages": [schedule_job],
+        }
diff --git a/src/spaceone/inventory_v2/lib/rule_matcher.py b/src/spaceone/inventory_v2/lib/rule_matcher.py
new file mode 100644
index 0000000..8d82cd5
--- /dev/null
+++ b/src/spaceone/inventory_v2/lib/rule_matcher.py
@@ -0,0 +1,51 @@
+def find_data(data: any, key: str) -> any:
+    """
+    find hierarchy data
+    :param data:
+    e.g.
+    {
+      'data': {
+        'vm': {
+          'vm_id': 'i-1234'
+        }
+      }
+    }
+    :param key: e.g 'data.vm.vm_id'
+    :return: a found value(e.g 'i-1234'), otherwise, None.
+    """
+    if not isinstance(data, dict) or not isinstance(key, str):
+        return None
+
+    key_parsed = key.split(".", 1)
+    if len(key_parsed) > 1:
+        return find_data(data.get(key_parsed[0]), key_parsed[1])
+    else:
+        return data.get(key_parsed[0], None)
+
+
+def dict_key_int_parser(data: dict) -> dict:
+    # change key to int type, if all keys are able to cast to int type. Otherwise, leave it as original type.
+    try:
+        data = {int(key): value for key, value in data.items()}
+    except Exception as e:
+        pass
+
+    return data
+
+
+def make_query(
+    key: str, rules: dict, resource: dict, domain_id: str, workspace_id: str
+) -> dict:
+    _filter = [
+        {"k": "domain_id", "v": domain_id, "o": "eq"},
+        {"k": "workspace_id", "v": workspace_id, "o": "eq"},
+    ]
+
+    for rule in rules[key]:
+        value = find_data(resource, rule)
+        if value:
+            _filter.append({"k": rule, "v": value, "o": "eq"})
+
+    return {
+        "filter": _filter,
+    }
diff --git a/src/spaceone/inventory_v2/manager/asset_manager.py b/src/spaceone/inventory_v2/manager/asset_manager.py
new file mode 100644
index 0000000..f59f1dc
--- /dev/null
+++ b/src/spaceone/inventory_v2/manager/asset_manager.py
@@ -0,0 +1,115 @@
+import logging
+import copy
+import math
+import pytz
+from typing import Tuple, List
+from datetime import datetime
+
+from spaceone.core.model.mongo_model import QuerySet
+from spaceone.core.manager import BaseManager
+from spaceone.core import utils
+
+from spaceone.inventory_v2.model.asset.database import Asset
+
+_LOGGER = logging.getLogger(__name__)
+
+MERGE_KEYS = [
+    "name",
+    "ip_addresses",
+    "account",
+    "instance_type",
+    "instance_size",
+    "reference",
+    "region_code",
+    "ref_region",
+    "project_id",
+    "data",
+]
+
+SIZE_MAP = {
+    "KB": 1024,
+    "MB": 1024 * 1024,
+    "GB": 1024 * 1024 * 1024,
+    "TB": 1024 * 1024 * 1024 * 1024,
+    "PB": 1024 * 1024 * 1024 * 1024 * 1024,
+    "EB": 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
+    "ZB": 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
+    "YB": 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024,
+}
+
+
+class AssetManager(BaseManager):
+    resource_keys = ["asset_id"]
+    query_method = "list_assets"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.asset_model = Asset
+
+    def create_asset(self, params: dict) -> Asset:
+        def _rollback(vo: Asset):
+            _LOGGER.info(
+                f"[ROLLBACK] Delete asset : {vo.provider} ({vo.asset_type_id})"
+            )
+            vo.terminate()
+
+        asset_vo: Asset = self.asset_model.create(params)
+        self.transaction.add_rollback(_rollback, asset_vo)
+
+        return asset_vo
+
+    def update_asset_by_vo(self, params: dict, asset_vo: Asset) -> Asset:
+        def _rollback(old_data):
+            _LOGGER.info(f'[ROLLBACK] Revert Data : {old_data.get("cloud_service_id")}')
+            asset_vo.update(old_data)
+
+        self.transaction.add_rollback(_rollback, asset_vo.to_dict())
+        asset_vo: Asset = asset_vo.update(params)
+
+        return asset_vo
+
+    @staticmethod
+    def delete_cloud_service_by_vo(asset_vo: Asset) -> None:
+        asset_vo.delete()
+
+    def get_asset(
+        self,
+        asset_id: str,
+        domain_id: str,
+        workspace_id: str = None,
+        user_projects: list = None,
+    ):
+        conditions = {"asset_id": asset_id, "domain_id": domain_id}
+
+        if workspace_id:
+            conditions["workspace_id"] = workspace_id
+
+        if user_projects:
+            conditions["project_id"] = user_projects
+
+        return self.asset_model.get(**conditions)
+
+    @staticmethod
+    def merge_data(new_data: dict, old_data: dict) -> dict:
+        for key in MERGE_KEYS:
+            if key in new_data:
+                new_value = new_data[key]
+                old_value = old_data.get(key)
+                if key in ["data", "tags"]:
+                    is_changed = False
+                    for sub_key, sub_value in new_value.items():
+                        if sub_value != old_value.get(sub_key):
+                            is_changed = True
+                            break
+
+                    if is_changed:
+                        merged_value = copy.deepcopy(old_value)
+                        merged_value.update(new_value)
+                        new_data[key] = merged_value
+                    else:
+                        del new_data[key]
+                else:
+                    if new_value == old_value:
+                        del new_data[key]
+
+        return new_data
diff --git a/src/spaceone/inventory_v2/manager/collecting_manager.py b/src/spaceone/inventory_v2/manager/collecting_manager.py
new file mode 100644
index 0000000..9b5ed6e
--- /dev/null
+++ b/src/spaceone/inventory_v2/manager/collecting_manager.py
@@ -0,0 +1,587 @@
+import logging
+import time
+from typing import Generator
+from spaceone.core import utils
+from spaceone.core.manager import BaseManager
+from spaceone.inventory_v2.lib.resource_manager import ResourceManager
+from spaceone.inventory_v2.manager.asset_manager import AssetManager
+from spaceone.inventory_v2.manager.job_manager import JobManager
+from spaceone.inventory_v2.manager.job_task_manager import JobTaskManager
+from spaceone.inventory_v2.manager.collector_manager import CollectorManager
+from spaceone.inventory_v2.manager.plugin_manager import PluginManager
+from spaceone.inventory_v2.manager.collector_plugin_manager import (
+    CollectorPluginManager,
+)
+
+# from spaceone.inventory.manager.namespace_manager import NamespaceManager
+# from spaceone.inventory.manager.metric_manager import MetricManager
+from spaceone.inventory_v2.model.job_task.database import JobTask
+from spaceone.inventory_v2.error import *
+from spaceone.inventory_v2.lib import rule_matcher
+from spaceone.inventory_v2.conf.collector_conf import *
+from spaceone.inventory_v2.service.asset_service import AssetService
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class CollectingManager(BaseManager):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.job_mgr = JobManager()
+        self.job_task_mgr = JobTaskManager()
+        # self.namespace_mgr = NamespaceManager()
+        # self.metric_mgr = MetricManager()
+
+        self.db_queue = DB_QUEUE_NAME
+        self._service_and_manager_map = {}
+
+    def collecting_resources(self, params: dict) -> bool:
+        """Execute collecting task to get resources from plugin
+        Args:
+            params (dict): {
+                'collector_id': 'str',
+                'job_id': 'str',
+                'job_task_id': 'str',
+                'domain_id': 'str',
+                'plugin_info': 'dict',
+                'task_options': 'dict',
+                'is_sub_task': 'bool',
+                'secret_info': 'dict',
+                'secret_data': 'dict',
+                'token': 'str'
+            }
+        """
+
+        # set token to transaction meta
+        token = params["token"]
+        self.transaction.set_meta("token", token)
+
+        plugin_manager = PluginManager()
+        collector_plugin_mgr = CollectorPluginManager()
+
+        job_id = params["job_id"]
+        job_task_id = params["job_task_id"]
+        collector_id = params["collector_id"]
+        domain_id = params["domain_id"]
+        task_options = params.get("task_options")
+        is_sub_task = params.get("is_sub_task", False)
+        secret_info = params["secret_info"]
+        secret_data = params["secret_data"]
+        plugin_info = params["plugin_info"]
+
+        if is_sub_task:
+            _LOGGER.debug(
+                f"[collecting_resources] start sub task: {job_task_id} "
+                f"(task_options => {utils.dump_json(task_options)})"
+            )
+        else:
+            _LOGGER.debug(f"[collecting_resources] start job task: {job_task_id}")
+
+        if not self._check_concurrency(collector_id, job_id, domain_id):
+            time.sleep(60)
+            _LOGGER.debug(f"[collecting_resources] retry sub task: {job_task_id}")
+            self.job_task_mgr.push_job_task(params)
+            return True
+
+        job_task_vo = self.job_task_mgr.get(job_task_id, domain_id)
+
+        # add workspace_id to params from secret_info
+        params["workspace_id"] = secret_info["workspace_id"]
+
+        if self.job_mgr.check_cancel(job_id, domain_id):
+            self.job_task_mgr.add_error(
+                job_task_vo,
+                "ERROR_COLLECT_CANCELED",
+                "The job has been canceled.",
+            )
+            self.job_task_mgr.make_failure_by_vo(job_task_vo)
+            raise ERROR_COLLECT_CANCELED(job_id=job_id)
+
+        self.job_task_mgr.make_inprogress_by_vo(job_task_vo)
+
+        try:
+            # get plugin endpoint from plugin manager
+            endpoint, updated_version = plugin_manager.get_endpoint(
+                plugin_info["plugin_id"],
+                domain_id,
+                plugin_info.get("upgrade_mode", "AUTO"),
+                plugin_info.get("version"),
+            )
+
+            # collect data from plugin
+            resources = collector_plugin_mgr.collect(
+                endpoint,
+                plugin_info["options"],
+                secret_data.get("data", {}),
+                task_options,
+            )
+
+            # delete secret_data in params for security
+            del params["secret_data"]
+        except Exception as e:
+            if isinstance(e, ERROR_BASE):
+                error_message = e.message
+            else:
+                error_message = str(e)
+
+            _LOGGER.error(
+                f"[collecting_resources] plugin collecting error ({job_task_id}): {error_message}",
+                exc_info=True,
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo, "ERROR_COLLECTOR_PLUGIN", error_message
+            )
+
+            self.job_task_mgr.make_failure_by_vo(job_task_vo, {"failure_count": 1})
+            raise ERROR_COLLECTOR_COLLECTING(plugin_info=plugin_info)
+
+        job_task_status = "SUCCESS"
+
+        try:
+            collecting_count_info = self._upsert_collecting_resources(
+                resources, params, job_task_vo
+            )
+
+            if collecting_count_info["failure_count"] > 0:
+                job_task_status = "FAILURE"
+
+        except Exception as e:
+            if isinstance(e, ERROR_BASE):
+                error_message = e.message
+            else:
+                error_message = str(e)
+
+            _LOGGER.error(
+                f"[collecting_resources] upsert resources error ({job_task_id}): {error_message}",
+                exc_info=True,
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo, "ERROR_COLLECTOR_PLUGIN", error_message
+            )
+            job_task_status = "FAILURE"
+            collecting_count_info = {"failure_count": 1}
+
+        _LOGGER.debug(
+            f"[collecting_resources] job task summary ({job_task_id}: {job_task_status}) "
+            f"=> {collecting_count_info}"
+        )
+
+        if job_task_status == "SUCCESS":
+            self.job_task_mgr.decrease_remained_sub_tasks(
+                job_task_vo, collecting_count_info
+            )
+        else:
+            self.job_task_mgr.make_failure_by_vo(job_task_vo, collecting_count_info)
+
+        return True
+
+    def _check_concurrency(self, collector_id: str, job_id: str, domain_id: str):
+        collector_mgr: CollectorManager = self.locator.get_manager(CollectorManager)
+        try:
+            collector_vo = collector_mgr.get_collector(collector_id, domain_id)
+            plugin_info = collector_vo.plugin_info.to_dict()
+            metadata = plugin_info.get("metadata", {})
+        except Exception as e:
+            _LOGGER.warning(
+                f"[_check_concurrency] failed to get collector metadata: {e}"
+            )
+            metadata = {}
+
+        max_concurrency = metadata.get("concurrency")
+        if max_concurrency and isinstance(max_concurrency, int):
+            job_task_vos = self.job_task_mgr.filter_job_tasks(
+                job_id=job_id, domain_id=domain_id, status="IN_PROGRESS"
+            )
+            current_concurrency = job_task_vos.count()
+            if job_task_vos.count() >= max_concurrency:
+                _LOGGER.debug(
+                    f"[_check_concurrency] job task concurrency exceeded ({job_id}): "
+                    f"{current_concurrency}/{max_concurrency}"
+                )
+                return False
+
+        return True
+
+    def _upsert_collecting_resources(
+        self, resources: Generator[dict, None, None], params: dict, job_task_vo: JobTask
+    ) -> dict:
+        """
+        Args:
+            params (Generator): {
+                'collector_id': 'str',
+                'job_id': 'str',
+                'job_task_id': 'str',
+                'workspace_id': 'str',
+                'domain_id': 'str',
+                'plugin_info': 'dict',
+                'task_options': 'dict',
+                'secret_info': 'dict',
+            }
+        """
+
+        created_count = 0
+        updated_count = 0
+        failure_count = 0
+        total_count = 0
+
+        self._set_transaction_meta(params)
+
+        for resource_data in resources:
+            resource_type = resource_data.get("resource_type")
+            total_count += 1
+
+            try:
+                if resource_type in ["inventory.Namespace", "inventory.Metric"]:
+                    # self._upsert_metric_and_namespace(resource_data, params)
+                    # total_count -= 1
+                    pass
+
+                else:
+                    upsert_result = self._upsert_resource(
+                        resource_data, params, job_task_vo
+                    )
+
+                    if upsert_result == NOT_COUNT:
+                        # skip count for cloud service type and region
+                        total_count -= 1
+                        pass
+                    elif upsert_result == CREATED:
+                        created_count += 1
+                    elif upsert_result == UPDATED:
+                        updated_count += 1
+                    else:
+                        failure_count += 1
+
+            except Exception as e:
+                _LOGGER.error(
+                    f"[_upsert_collecting_resources] upsert resource error: {e}",
+                    exc_info=True,
+                )
+                self.job_task_mgr.add_error(
+                    job_task_vo,
+                    "ERROR_UNKNOWN",
+                    f"failed to upsert {resource_type}: {e}",
+                    {"resource_type": resource_type},
+                )
+                failure_count += 1
+
+        return {
+            "total_count": total_count,
+            "created_count": created_count,
+            "updated_count": updated_count,
+            "failure_count": failure_count,
+        }
+
+    def _upsert_metric_and_namespace(self, resource_data: dict, params: dict) -> None:
+        """
+        Args:
+            resource_data (dict): resource information from plugin
+            params(dict): {
+                'collector_id': 'str',
+                'job_id': 'str',
+                'job_task_id': 'str',
+                'workspace_id': 'str',
+                'domain_id': 'str',
+                'plugin_info': 'dict',
+                'task_options': 'dict',
+                'secret_info': 'dict'
+            }
+        Returns:
+            None
+        """
+
+        domain_id = params["domain_id"]
+        plugin_id = params["plugin_info"].get("plugin_id")
+        resource_type = resource_data.get("resource_type")
+        request_data = resource_data.get("resource", {})
+        request_data["domain_id"] = domain_id
+        request_data["plugin_id"] = plugin_id
+        request_data["is_managed"] = True
+        request_data["resource_group"] = "DOMAIN"
+        request_data["workspace_id"] = "*"
+
+        if resource_type == "inventory.Namespace":
+            namespace_id = request_data.get("namespace_id")
+            version = request_data.get("version")
+
+            namespace_vos = self.namespace_mgr.filter_namespaces(
+                namespace_id=namespace_id, domain_id=domain_id
+            )
+            if namespace_vos.count() == 0:
+                self.namespace_mgr.create_namespace(request_data)
+            else:
+                namespace_vo = namespace_vos[0]
+
+                if namespace_vo.version != version:
+                    self.namespace_mgr.update_namespace_by_vo(
+                        request_data, namespace_vo
+                    )
+        else:
+            metric_id = request_data.get("metric_id")
+            version = request_data.get("version")
+
+            metric_vos = self.metric_mgr.filter_metrics(
+                metric_id=metric_id, domain_id=domain_id
+            )
+
+            if metric_vos.count() == 0:
+                self.metric_mgr.create_metric(request_data)
+            else:
+                metric_vo = metric_vos[0]
+
+                if metric_vo.version != version:
+                    if "query_options" in request_data:
+                        old_query_hash = utils.dict_to_hash(metric_vo.query_options)
+                        new_query_hash = utils.dict_to_hash(
+                            request_data.get("query_options")
+                        )
+                        if old_query_hash == new_query_hash:
+                            del request_data["query_options"]
+
+                    self.metric_mgr.update_metric_by_vo(request_data, metric_vo)
+
+    def _upsert_resource(
+        self, resource_data: dict, params: dict, job_task_vo: JobTask
+    ) -> int:
+        """
+        Args:
+            resource_data (dict): resource information from plugin
+            params(dict): {
+                'collector_id': 'str',
+                'job_id': 'str',
+                'job_task_id': 'str',
+                'workspace_id': 'str',
+                'domain_id': 'str',
+                'plugin_info': 'dict',
+                'task_options': 'dict',
+                'secret_info': 'dict'
+            }
+        Returns:
+            0: NOT_COUNT (for cloud service type and region)
+            1: CREATED
+            2: UPDATED
+            3: ERROR
+        """
+
+        job_task_id = params["job_task_id"]
+        domain_id = params["domain_id"]
+        workspace_id = params["workspace_id"]
+        resource_type = resource_data.get("resource_type")
+        resource_state = resource_data.get("state")
+        match_rules = resource_data.get("match_rules")
+        request_data = resource_data.get("resource", {})
+        request_data["domain_id"] = domain_id
+        request_data["workspace_id"] = workspace_id
+
+        service, manager = self._get_resource_map(resource_type)
+
+        response = ERROR
+
+        if resource_state == "FAILURE":
+            error_message = resource_data.get("message", "Unknown error.")
+            _LOGGER.error(
+                f"[_upsert_resource] plugin response error ({job_task_id}): {error_message}"
+            )
+
+            self.job_task_mgr.add_error(
+                job_task_vo, "ERROR_PLUGIN", error_message, request_data
+            )
+
+            return ERROR
+
+        if not match_rules:
+            error_message = "Match rule is not defined."
+            _LOGGER.error(
+                f"[_upsert_resource] match rule error ({job_task_id}): {error_message}"
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo,
+                "ERROR_MATCH_RULE",
+                error_message,
+                {"resource_type": resource_type},
+            )
+            return ERROR
+
+        try:
+            match_resource, total_count = self._query_with_match_rules(
+                request_data, match_rules, domain_id, workspace_id, manager
+            )
+
+        except ERROR_TOO_MANY_MATCH as e:
+            _LOGGER.error(
+                f"[_upsert_resource] match resource error ({job_task_id}): {e}"
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo,
+                e.error_code,
+                e.message,
+                {"resource_type": resource_type},
+            )
+            return ERROR
+        except Exception as e:
+            if isinstance(e, ERROR_BASE):
+                error_message = e.message
+            else:
+                error_message = str(e)
+
+            _LOGGER.error(
+                f"[_upsert_resource] match resource error ({job_task_id}): {error_message}",
+                exc_info=True,
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo,
+                "ERROR_UNKNOWN",
+                f"Failed to match resource: {error_message}",
+                {"resource_type": resource_type},
+            )
+            return ERROR
+
+        try:
+            if total_count == 0:
+                # Create resource
+                service.create_resource(request_data)
+                response = CREATED
+            elif total_count == 1:
+                # Update resource
+                request_data.update(match_resource[0])
+                service.update_resource(request_data)
+                response = UPDATED
+            else:
+                response = ERROR
+
+        except ERROR_BASE as e:
+            _LOGGER.error(
+                f"[_upsert_resource] resource upsert error ({job_task_id}): {e.message}"
+            )
+            additional = self._set_error_addition_info(
+                resource_type, total_count, request_data
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo, e.error_code, e.message, additional
+            )
+            response = ERROR
+
+        except Exception as e:
+            error_message = str(e)
+
+            _LOGGER.debug(
+                f"[_upsert_resource] unknown error ({job_task_id}): {error_message}",
+                exc_info=True,
+            )
+            self.job_task_mgr.add_error(
+                job_task_vo,
+                "ERROR_UNKNOWN",
+                error_message,
+                {"resource_type": resource_type},
+            )
+            response = ERROR
+
+        finally:
+            if response in [CREATED, UPDATED]:
+                if resource_type in ["inventory.CloudServiceType", "inventory.Region"]:
+                    response = NOT_COUNT
+
+            return response
+
+    def _set_transaction_meta(self, params):
+        secret_info = params["secret_info"]
+
+        self.transaction.set_meta("job_id", params["job_id"])
+        self.transaction.set_meta("job_task_id", params["job_task_id"])
+        self.transaction.set_meta("collector_id", params["collector_id"])
+        self.transaction.set_meta("secret.secret_id", secret_info["secret_id"])
+        self.transaction.set_meta("disable_info_log", "true")
+
+        if plugin_id := params["plugin_info"].get("plugin_id"):
+            self.transaction.set_meta("plugin_id", plugin_id)
+
+        if "provider" in secret_info:
+            self.transaction.set_meta("secret.provider", secret_info["provider"])
+
+        if "project_id" in secret_info:
+            self.transaction.set_meta("secret.project_id", secret_info["project_id"])
+
+        if "service_account_id" in secret_info:
+            self.transaction.set_meta(
+                "secret.service_account_id", secret_info["service_account_id"]
+            )
+
+    def _get_resource_map(self, resource_type: str):
+        if resource_type not in RESOURCE_MAP:
+            raise ERROR_UNSUPPORTED_RESOURCE_TYPE(resource_type=resource_type)
+
+        if resource_type in self._service_and_manager_map:
+            return self._service_and_manager_map[resource_type]
+
+        service: AssetService = self.locator.get_service(RESOURCE_MAP[resource_type][0])
+        manager = self.locator.get_manager(RESOURCE_MAP[resource_type][1])
+
+        self._service_and_manager_map[resource_type] = service, manager
+        return service, manager
+
+    @staticmethod
+    def _set_error_addition_info(
+        resource_type: str, total_count: int, resource_data: dict
+    ) -> dict:
+        additional = {"resource_type": resource_type}
+
+        if resource_type == "inventory.CloudService":
+            additional.update(
+                {
+                    "cloud_service_group": resource_data.get("cloud_service_group"),
+                    "cloud_service_type": resource_data.get("cloud_service_type"),
+                    "provider": resource_data.get("provider"),
+                }
+            )
+
+        if total_count == 1:
+            if resource_type == "inventory.CloudService":
+                additional["resource_id"] = resource_data.get("cloud_service_id")
+            elif resource_type == "inventory.CloudServiceType":
+                additional["resource_id"] = resource_data.get("cloud_service_type_id")
+            elif resource_type == "inventory.Region":
+                additional["resource_id"] = resource_data.get("region_id")
+
+        return additional
+
+    @staticmethod
+    def _query_with_match_rules(
+        resource_data: dict,
+        match_rules: dict,
+        domain_id: str,
+        workspace_id: str,
+        resource_manager: ResourceManager,
+    ):
+        """match resource based on match rules
+
+        Args:
+            resource_data (dict): resource data from plugin
+            match_rules (list): e.g. {1:['reference.resource_id'], 2:['name']}
+
+        Return:
+            match_resource (dict) : resource_id for update (e.g. {'cloud_service_id': 'cloud-svc-abcde12345'})
+            total_count (int) : total count of matched resources
+        """
+
+        match_resource = None
+        total_count = 0
+
+        match_rules = rule_matcher.dict_key_int_parser(match_rules)
+
+        for order in sorted(match_rules.keys()):
+            query = rule_matcher.make_query(
+                order, match_rules, resource_data, domain_id, workspace_id
+            )
+            match_resource, total_count = resource_manager.find_resources(query)
+
+            if total_count > 1:
+                if data := resource_data.get("data"):
+                    raise ERROR_TOO_MANY_MATCH(
+                        match_key=match_rules[order],
+                        resources=match_resource,
+                        more=data,
+                    )
+            elif total_count == 1 and match_resource:
+                return match_resource, total_count
+
+        return match_resource, total_count
diff --git a/src/spaceone/inventory_v2/manager/collection_state_manager.py b/src/spaceone/inventory_v2/manager/collection_state_manager.py
index bbe8208..2b7ef3d 100644
--- a/src/spaceone/inventory_v2/manager/collection_state_manager.py
+++ b/src/spaceone/inventory_v2/manager/collection_state_manager.py
@@ -16,7 +16,7 @@ def __init__(self, *args, **kwargs):
         self.secret_id = self.transaction.get_meta("secret.secret_id")
         self.collection_state_model = CollectionState
 
-    def create_collection_state(self, cloud_service_id: str, domain_id: str) -> None:
+    def create_collection_state(self, asset_id: str, domain_id: str) -> None:
         def _rollback(vo: CollectionState):
             _LOGGER.info(
                 f"[ROLLBACK] Delete collection state: cloud_service_id = {vo.cloud_service_id}, "
@@ -29,7 +29,7 @@ def _rollback(vo: CollectionState):
                 "collector_id": self.collector_id,
                 "job_task_id": self.job_task_id,
                 "secret_id": self.secret_id,
-                "cloud_service_id": cloud_service_id,
+                "asset_id": asset_id,
                 "domain_id": domain_id,
             }
 
diff --git a/src/spaceone/inventory_v2/manager/collector_rule_manager.py b/src/spaceone/inventory_v2/manager/collector_rule_manager.py
index 19a1e22..f45adcc 100644
--- a/src/spaceone/inventory_v2/manager/collector_rule_manager.py
+++ b/src/spaceone/inventory_v2/manager/collector_rule_manager.py
@@ -75,57 +75,55 @@ def list_collector_rules(self, query: dict) -> Tuple[QuerySet, int]:
     def stat_collector_rules(self, query: dict) -> dict:
         return self.collector_rule_model.stat(**query)
 
-    def change_cloud_service_data(
-        self, collector_id: str, domain_id: str, cloud_service_data: dict
+    def change_asset_data(
+        self, collector_id: str, domain_id: str, asset_data: dict
     ) -> dict:
         (
             managed_collector_rule_vos,
             custom_collector_rule_vos,
         ) = self._get_collector_rules(collector_id, domain_id)
 
-        cloud_service_data = self._apply_collector_rule_to_cloud_service_data(
-            cloud_service_data, managed_collector_rule_vos, domain_id
+        cloud_service_data = self._apply_collector_rule_to_asset_data(
+            asset_data, managed_collector_rule_vos, domain_id
         )
 
-        cloud_service_data = self._apply_collector_rule_to_cloud_service_data(
+        cloud_service_data = self._apply_collector_rule_to_asset_data(
             cloud_service_data, custom_collector_rule_vos, domain_id
         )
 
         return cloud_service_data
 
-    def _apply_collector_rule_to_cloud_service_data(
-        self, cloud_service_data: dict, collector_rule_vos: QuerySet, domain_id: str
+    def _apply_collector_rule_to_asset_data(
+        self, asset_data: dict, collector_rule_vos: QuerySet, domain_id: str
     ) -> dict:
         for collector_rule_vo in collector_rule_vos:
-            is_match = self._change_cloud_service_data_by_rule(
-                cloud_service_data, collector_rule_vo
-            )
+            is_match = self._change_asset_data_by_rule(asset_data, collector_rule_vo)
 
             if is_match:
-                cloud_service_data = self._change_cloud_service_data_with_actions(
-                    cloud_service_data, collector_rule_vo.actions, domain_id
+                asset_data = self._change_asset_data_with_actions(
+                    asset_data, collector_rule_vo.actions, domain_id
                 )
 
             if is_match and collector_rule_vo.options.stop_processing:
                 break
 
-        return cloud_service_data
+        return asset_data
 
-    def _change_cloud_service_data_with_actions(
-        self, cloud_service_data: dict, actions: dict, domain_id: str
+    def _change_asset_data_with_actions(
+        self, asset_data: dict, actions: dict, domain_id: str
     ) -> dict:
         for action, value in actions.items():
             if action == "change_project":
                 project_info = self._get_project("project_id", value, domain_id)
 
                 if project_info:
-                    cloud_service_data["project_id"] = project_info["project_id"]
-                    cloud_service_data["workspace_id"] = project_info["workspace_id"]
+                    asset_data["project_id"] = project_info["project_id"]
+                    asset_data["workspace_id"] = project_info["workspace_id"]
 
             elif action == "match_project":
                 source = value["source"]
                 target_key = value.get("target", "project_id")
-                target_value = utils.get_dict_value(cloud_service_data, source)
+                target_value = utils.get_dict_value(asset_data, source)
 
                 if target_value:
                     project_info = self._get_project(
@@ -133,31 +131,27 @@ def _change_cloud_service_data_with_actions(
                     )
 
                     if project_info:
-                        cloud_service_data["project_id"] = project_info["project_id"]
-                        cloud_service_data["workspace_id"] = project_info[
-                            "workspace_id"
-                        ]
+                        asset_data["project_id"] = project_info["project_id"]
+                        asset_data["workspace_id"] = project_info["workspace_id"]
 
             elif action == "match_service_account":
                 source = value["source"]
                 target_key = value.get("target", "service_account_id")
-                target_value = utils.get_dict_value(cloud_service_data, source)
+                target_value = utils.get_dict_value(asset_data, source)
                 if target_value:
                     service_account_info = self._get_service_account(
                         target_key, target_value, domain_id
                     )
                     if service_account_info:
-                        cloud_service_data["service_account_id"] = service_account_info[
+                        asset_data["service_account_id"] = service_account_info[
                             "service_account_id"
                         ]
-                        cloud_service_data["project_id"] = service_account_info[
-                            "project_id"
-                        ]
-                        cloud_service_data["workspace_id"] = service_account_info[
+                        asset_data["project_id"] = service_account_info["project_id"]
+                        asset_data["workspace_id"] = service_account_info[
                             "workspace_id"
                         ]
 
-        return cloud_service_data
+        return asset_data
 
     def _get_service_account(
         self, target_key: str, target_value: any, domain_id: str
@@ -223,8 +217,8 @@ def _get_project(self, target_key: str, target_value: str, domain_id: str) -> di
         ] = project_info
         return project_info
 
-    def _change_cloud_service_data_by_rule(
-        self, cloud_service_data: dict, collector_rule_vo: CollectorRule
+    def _change_asset_data_by_rule(
+        self, asset_data: dict, collector_rule_vo: CollectorRule
     ) -> bool:
         conditions_policy = collector_rule_vo.conditions_policy
 
@@ -233,7 +227,7 @@ def _change_cloud_service_data_by_rule(
         else:
             results = list(
                 map(
-                    functools.partial(self._check_condition, cloud_service_data),
+                    functools.partial(self._check_condition, asset_data),
                     collector_rule_vo.conditions,
                 )
             )
@@ -244,33 +238,31 @@ def _change_cloud_service_data_by_rule(
                 return any(results)
 
     @staticmethod
-    def _check_condition(
-        cloud_service_data: dict, condition: CollectorRuleCondition
-    ) -> bool:
-        cloud_service_value = utils.get_dict_value(cloud_service_data, condition.key)
+    def _check_condition(asset_data: dict, condition: CollectorRuleCondition) -> bool:
+        asset_value = utils.get_dict_value(asset_data, condition.key)
         condition_value = condition.value
         operator = condition.operator
 
-        if cloud_service_value is None:
+        if asset_value is None:
             return False
 
         if operator == "eq":
-            if cloud_service_value == condition_value:
+            if asset_value == condition_value:
                 return True
             else:
                 return False
         elif operator == "contain":
-            if cloud_service_value.lower().find(condition_value.lower()) >= 0:
+            if asset_value.lower().find(condition_value.lower()) >= 0:
                 return True
             else:
                 return False
         elif operator == "not":
-            if cloud_service_value != condition_value:
+            if asset_value != condition_value:
                 return True
             else:
                 return False
         elif operator == "not_contain":
-            if cloud_service_value.lower().find(condition_value.lower()) < 0:
+            if asset_value.lower().find(condition_value.lower()) < 0:
                 return True
             else:
                 return False
diff --git a/src/spaceone/inventory_v2/manager/metric_data_manager.py b/src/spaceone/inventory_v2/manager/metric_data_manager.py
new file mode 100644
index 0000000..b4593b5
--- /dev/null
+++ b/src/spaceone/inventory_v2/manager/metric_data_manager.py
@@ -0,0 +1,329 @@
+import logging
+from typing import Tuple
+from datetime import datetime, date
+from dateutil.relativedelta import relativedelta
+
+from spaceone.core.model.mongo_model import QuerySet
+from spaceone.core.manager import BaseManager
+from spaceone.core import utils, cache
+from spaceone.inventory.model.metric_data.database import (
+    MetricData,
+    MonthlyMetricData,
+    MetricQueryHistory,
+)
+from spaceone.inventory.error.metric import (
+    ERROR_INVALID_DATE_RANGE,
+    ERROR_INVALID_PARAMETER_TYPE,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class MetricDataManager(BaseManager):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_data_model = MetricData
+        self.monthly_metric_data = MonthlyMetricData
+        self.history_model = MetricQueryHistory
+
+    def create_metric_data(self, params: dict) -> MetricData:
+        metric_data_vo: MetricData = self.metric_data_model.create(params)
+        return metric_data_vo
+
+    def create_monthly_metric_data(self, params: dict) -> MonthlyMetricData:
+        monthly_metric_data_vo: MonthlyMetricData = self.monthly_metric_data.create(
+            params
+        )
+        return monthly_metric_data_vo
+
+    def delete_metric_data_by_metric_id(self, metric_id: str, domain_id: str):
+        _LOGGER.debug(
+            f"[delete_metric_data_by_metric_id] Delete all metric data: {metric_id}"
+        )
+        metric_data_vos = self.metric_data_model.filter(
+            metric_id=metric_id, domain_id=domain_id
+        )
+        metric_data_vos.delete()
+
+        monthly_metric_data_vos = self.monthly_metric_data.filter(
+            metric_id=metric_id, domain_id=domain_id
+        )
+        monthly_metric_data_vos.delete()
+
+        cache.delete_pattern(f"inventory:metric-data:*:{domain_id}:{metric_id}:*")
+        cache.delete_pattern(f"inventory:metric-query-history:{domain_id}:{metric_id}")
+
+    def filter_metric_data(self, **conditions) -> QuerySet:
+        return self.metric_data_model.filter(**conditions)
+
+    def filter_monthly_metric_data(self, **conditions) -> QuerySet:
+        return self.monthly_metric_data.filter(**conditions)
+
+    def list_metric_data(self, query: dict, status: str = None) -> Tuple[QuerySet, int]:
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        return self.metric_data_model.query(**query)
+
+    def list_monthly_metric_data(
+        self, query: dict, status: str = None
+    ) -> Tuple[QuerySet, int]:
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        return self.monthly_metric_data.query(**query)
+
+    def stat_metric_data(self, query: dict, status: str = None) -> dict:
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        return self.metric_data_model.stat(**query)
+
+    def stat_monthly_metric_data(self, query: dict, status: str = None) -> dict:
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        return self.monthly_metric_data.stat(**query)
+
+    def analyze_metric_data(
+        self, query: dict, target: str = "SECONDARY_PREFERRED", status: str = None
+    ) -> dict:
+        query["target"] = target
+        query["date_field"] = "created_date"
+        query["date_field_format"] = "%Y-%m-%d"
+
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        _LOGGER.debug(f"[analyze_metric_data] Query: {query}")
+        return self.metric_data_model.analyze(**query)
+
+    def analyze_monthly_metric_data(
+        self, query: dict, target: str = "SECONDARY_PREFERRED", status: str = None
+    ) -> dict:
+        query["target"] = target
+        query["date_field"] = "created_month"
+        query["date_field_format"] = "%Y-%m"
+
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        _LOGGER.debug(f"[analyze_monthly_metric_data] Query: {query}")
+        return self.monthly_metric_data.analyze(**query)
+
+    def analyze_yearly_metric_data(
+        self, query: dict, target: str = "SECONDARY_PREFERRED", status: str = None
+    ) -> dict:
+        query["target"] = target
+        query["date_field"] = "created_year"
+        query["date_field_format"] = "%Y"
+
+        if status != "IN_PROGRESS":
+            query = self._append_status_filter(query)
+
+        _LOGGER.debug(f"[analyze_yearly_metric_data] Query: {query}")
+        return self.monthly_metric_data.analyze(**query)
+
+    @cache.cacheable(
+        key="inventory:metric-data:daily:{domain_id}:{metric_id}:{query_hash}",
+        expire=3600 * 24,
+    )
+    def analyze_metric_data_with_cache(
+        self,
+        query: dict,
+        query_hash: str,
+        domain_id: str,
+        metric_id: str,
+        target: str = "SECONDARY_PREFERRED",
+    ) -> dict:
+        return self.analyze_metric_data(query, target)
+
+    @cache.cacheable(
+        key="inventory:metric-data:monthly:{domain_id}:{metric_id}:{query_hash}",
+        expire=3600 * 24,
+    )
+    def analyze_monthly_metric_data_with_cache(
+        self,
+        query: dict,
+        query_hash: str,
+        domain_id: str,
+        metric_id: str,
+        target: str = "SECONDARY_PREFERRED",
+    ) -> dict:
+        return self.analyze_monthly_metric_data(query, target)
+
+    @cache.cacheable(
+        key="inventory:metric-data:yearly:{domain_id}:{metric_id}:{query_hash}",
+        expire=3600 * 24,
+    )
+    def analyze_yearly_metric_data_with_cache(
+        self,
+        query: dict,
+        query_hash: str,
+        domain_id: str,
+        metric_id: str,
+        target: str = "SECONDARY_PREFERRED",
+    ) -> dict:
+        return self.analyze_yearly_metric_data(query, target)
+
+    def analyze_metric_data_by_granularity(
+        self, query: dict, domain_id: str, metric_id: str
+    ) -> dict:
+        self._check_date_range(query)
+        granularity = query["granularity"]
+        query_hash = utils.dict_to_hash(query)
+
+        # Save query history to speed up the analysis
+        self._update_metric_query_history(domain_id, metric_id)
+
+        if granularity == "DAILY":
+            response = self.analyze_metric_data_with_cache(
+                query, query_hash, domain_id, metric_id
+            )
+        elif granularity == "MONTHLY":
+            response = self.analyze_monthly_metric_data_with_cache(
+                query, query_hash, domain_id, metric_id
+            )
+        else:
+            response = self.analyze_yearly_metric_data_with_cache(
+                query, query_hash, domain_id, metric_id
+            )
+
+        return response
+
+    def list_metric_query_history(self, query: dict) -> Tuple[QuerySet, int]:
+        return self.history_model.query(**query)
+
+    @cache.cacheable(
+        key="inventory:metric-query-history:{domain_id}:{metric_id}",
+        expire=600,
+    )
+    def _update_metric_query_history(self, domain_id: str, metric_id: str):
+        def _rollback(vo: MetricQueryHistory):
+            _LOGGER.info(
+                f"[update_metric_query_history._rollback] Delete metric query history: {metric_id}"
+            )
+            vo.delete()
+
+        history_vos = self.history_model.filter(
+            domain_id=domain_id, metric_id=metric_id
+        )
+        if history_vos.count() == 0:
+            history_vo = self.history_model.create(
+                {
+                    "metric_id": metric_id,
+                    "domain_id": domain_id,
+                }
+            )
+
+            self.transaction.add_rollback(_rollback, history_vo)
+        else:
+            history_vos[0].update({})
+
+    def _check_date_range(self, query: dict) -> None:
+        start_str = query.get("start")
+        end_str = query.get("end")
+        granularity = query.get("granularity")
+
+        start = self._parse_start_time(start_str, granularity)
+        end = self._parse_end_time(end_str, granularity)
+        now = datetime.utcnow().date()
+
+        if len(start_str) != len(end_str):
+            raise ERROR_INVALID_DATE_RANGE(
+                start=start_str,
+                end=end_str,
+                reason="Start date and end date must be the same format.",
+            )
+
+        if start >= end:
+            raise ERROR_INVALID_DATE_RANGE(
+                start=start_str,
+                end=end_str,
+                reason="End date must be greater than start date.",
+            )
+
+        if granularity == "DAILY":
+            if start + relativedelta(months=1) < end:
+                raise ERROR_INVALID_DATE_RANGE(
+                    start=start_str,
+                    end=end_str,
+                    reason="Request up to a maximum of 1 month.",
+                )
+
+            if start + relativedelta(months=12) < now.replace(day=1):
+                raise ERROR_INVALID_DATE_RANGE(
+                    start=start_str,
+                    end=end_str,
+                    reason="For DAILY, you cannot request data older than 1 year.",
+                )
+
+        elif granularity == "MONTHLY":
+            if start + relativedelta(months=12) < end:
+                raise ERROR_INVALID_DATE_RANGE(
+                    start=start_str,
+                    end=end_str,
+                    reason="Request up to a maximum of 12 months.",
+                )
+
+            if start + relativedelta(months=36) < now.replace(day=1):
+                raise ERROR_INVALID_DATE_RANGE(
+                    start=start_str,
+                    end=end_str,
+                    reason="For MONTHLY, you cannot request data older than 3 years.",
+                )
+        elif granularity == "YEARLY":
+            if start + relativedelta(years=3) < now.replace(month=1, day=1):
+                raise ERROR_INVALID_DATE_RANGE(
+                    start=start_str,
+                    end=end_str,
+                    reason="For YEARLY, you cannot request data older than 3 years.",
+                )
+
+    def _parse_start_time(self, date_str: str, granularity: str) -> date:
+        return self._convert_date_from_string(date_str.strip(), "start", granularity)
+
+    def _parse_end_time(self, date_str: str, granularity: str) -> date:
+        end = self._convert_date_from_string(date_str.strip(), "end", granularity)
+
+        if granularity == "YEARLY":
+            return end + relativedelta(years=1)
+        elif granularity == "MONTHLY":
+            return end + relativedelta(months=1)
+        else:
+            return end + relativedelta(days=1)
+
+    @staticmethod
+    def _convert_date_from_string(date_str: str, key: str, granularity: str) -> date:
+        if granularity == "YEARLY":
+            date_format = "%Y"
+            date_type = "YYYY"
+        elif granularity == "MONTHLY":
+            if len(date_str) == 4:
+                date_format = "%Y"
+                date_type = "YYYY"
+            else:
+                date_format = "%Y-%m"
+                date_type = "YYYY-MM"
+        else:
+            if len(date_str) == 4:
+                date_format = "%Y"
+                date_type = "YYYY"
+            elif len(date_str) == 7:
+                date_format = "%Y-%m"
+                date_type = "YYYY-MM"
+            else:
+                date_format = "%Y-%m-%d"
+                date_type = "YYYY-MM-DD"
+
+        try:
+            return datetime.strptime(date_str, date_format).date()
+        except Exception as e:
+            raise ERROR_INVALID_PARAMETER_TYPE(key=key, type=date_type)
+
+    @staticmethod
+    def _append_status_filter(query: dict) -> dict:
+        query_filter = query.get("filter", [])
+        query_filter.append({"k": "status", "v": "DONE", "o": "eq"})
+        query["filter"] = query_filter
+        return query
diff --git a/src/spaceone/inventory_v2/manager/metric_example_manager.py b/src/spaceone/inventory_v2/manager/metric_example_manager.py
new file mode 100644
index 0000000..7cfce50
--- /dev/null
+++ b/src/spaceone/inventory_v2/manager/metric_example_manager.py
@@ -0,0 +1,68 @@
+import logging
+from typing import Tuple
+
+from spaceone.core.model.mongo_model import QuerySet
+from spaceone.core.manager import BaseManager
+from spaceone.inventory.model.metric_example.database import MetricExample
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class MetricExampleManager(BaseManager):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_example_model = MetricExample
+
+    def create_metric_example(self, params: dict) -> MetricExample:
+        def _rollback(vo: MetricExample):
+            _LOGGER.info(
+                f"[create_metric_example._rollback] "
+                f"Delete metric_example: {vo.example_id}"
+            )
+            vo.delete()
+
+        metric_example_vo: MetricExample = self.metric_example_model.create(params)
+        self.transaction.add_rollback(_rollback, metric_example_vo)
+
+        return metric_example_vo
+
+    def update_metric_example_by_vo(
+        self, params: dict, metric_example_vo: MetricExample
+    ) -> MetricExample:
+        def _rollback(old_data):
+            _LOGGER.info(
+                f"[update_metric_example_by_vo._rollback] Revert Data: "
+                f'{old_data["example_id"]}'
+            )
+            metric_example_vo.update(old_data)
+
+        self.transaction.add_rollback(_rollback, metric_example_vo.to_dict())
+
+        return metric_example_vo.update(params)
+
+    @staticmethod
+    def delete_metric_example_by_vo(metric_example_vo: MetricExample) -> None:
+        metric_example_vo.delete()
+
+    def get_metric_example(
+        self,
+        example_id: str,
+        domain_id: str,
+        user_id: str,
+    ) -> MetricExample:
+        conditions = {
+            "example_id": example_id,
+            "domain_id": domain_id,
+            "user_id": user_id,
+        }
+
+        return self.metric_example_model.get(**conditions)
+
+    def filter_metric_examples(self, **conditions) -> QuerySet:
+        return self.metric_example_model.filter(**conditions)
+
+    def list_metric_examples(self, query: dict) -> Tuple[QuerySet, int]:
+        return self.metric_example_model.query(**query)
+
+    def stat_metric_examples(self, query: dict) -> dict:
+        return self.metric_example_model.stat(**query)
diff --git a/src/spaceone/inventory_v2/manager/metric_manager.py b/src/spaceone/inventory_v2/manager/metric_manager.py
new file mode 100644
index 0000000..440bb29
--- /dev/null
+++ b/src/spaceone/inventory_v2/manager/metric_manager.py
@@ -0,0 +1,711 @@
+import logging
+import copy
+import time
+from typing import Tuple, Union
+from datetime import datetime
+from dateutil.relativedelta import relativedelta
+
+from spaceone.core import config, queue
+from spaceone.core.model.mongo_model import QuerySet
+from spaceone.core.manager import BaseManager
+from spaceone.core import utils, cache
+from spaceone.inventory_v2.error.metric import (
+    ERROR_NOT_SUPPORT_RESOURCE_TYPE,
+    ERROR_METRIC_QUERY_RUN_FAILED,
+    ERROR_WRONG_QUERY_OPTIONS,
+)
+from spaceone.inventory_v2.model.metric.database import Metric
+from spaceone.inventory_v2.manager.managed_resource_manager import (
+    ManagedResourceManager,
+)
+from spaceone.inventory_v2.manager.cloud_service_manager import CloudServiceManager
+from spaceone.inventory_v2.manager.metric_data_manager import MetricDataManager
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class MetricManager(BaseManager):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_model = Metric
+        self.metric_data_mgr = MetricDataManager()
+
+    def push_task(self, metric_vo: Metric, is_yesterday: bool = False) -> None:
+        metric_id = metric_vo.metric_id
+        domain_id = metric_vo.domain_id
+
+        task = {
+            "name": "run_metric_query",
+            "version": "v1",
+            "executionEngine": "BaseWorker",
+            "stages": [
+                {
+                    "locator": "SERVICE",
+                    "name": "MetricService",
+                    "metadata": {
+                        "token": self.transaction.get_meta("token"),
+                    },
+                    "method": "run_metric_query",
+                    "params": {
+                        "params": {
+                            "metric_id": metric_id,
+                            "domain_id": domain_id,
+                            "is_yesterday": is_yesterday,
+                        }
+                    },
+                }
+            ],
+        }
+
+        _LOGGER.debug(f"[push_task] run metric({domain_id}) {metric_id}")
+
+        queue.put("inventory_q", utils.dump_json(task))
+
+    def create_metric(self, params: dict) -> Metric:
+        def _rollback(vo: Metric):
+            _LOGGER.info(f"[create_metric._rollback] " f"Delete metric: {vo.metric_id}")
+            vo.delete()
+
+        if params["metric_type"] == "COUNTER":
+            params["date_field"] = params.get("date_field") or "created_at"
+
+        if params.get("metric_id") is None:
+            params["metric_id"] = utils.generate_id("metric")
+
+        params["labels_info"] = self._get_labels_info(params["query_options"])
+
+        metric_vo: Metric = self.metric_model.create(params)
+        self.transaction.add_rollback(_rollback, metric_vo)
+
+        return metric_vo
+
+    def update_metric_by_vo(self, params: dict, metric_vo: Metric) -> Metric:
+        def _rollback(old_data):
+            _LOGGER.info(
+                f"[update_metric_by_vo._rollback] Revert Data: "
+                f'{old_data["metric_id"]}'
+            )
+            metric_vo.update(old_data)
+
+        if "query_options" in params:
+            params["labels_info"] = self._get_labels_info(params["query_options"])
+
+        self.transaction.add_rollback(_rollback, metric_vo.to_dict())
+
+        metric_vo = metric_vo.update(params)
+
+        if "query_options" in params:
+            self.metric_data_mgr.delete_metric_data_by_metric_id(
+                metric_vo.metric_id, metric_vo.domain_id
+            )
+
+        return metric_vo
+
+    def delete_metric_by_vo(self, metric_vo: Metric) -> None:
+        metric_id = metric_vo.metric_id
+        domain_id = metric_vo.domain_id
+        metric_vo.delete()
+
+        self.metric_data_mgr.delete_metric_data_by_metric_id(metric_id, domain_id)
+
+    def get_metric(
+        self,
+        metric_id: str,
+        domain_id: str,
+        workspace_id: Union[str, list] = None,
+    ) -> Metric:
+        conditions = {
+            "metric_id": metric_id,
+            "domain_id": domain_id,
+        }
+
+        if workspace_id:
+            conditions["workspace_id"] = workspace_id
+
+        return self.metric_model.get(**conditions)
+
+    def filter_metrics(self, **conditions) -> QuerySet:
+        return self.metric_model.filter(**conditions)
+
+    def list_metrics(self, query: dict, domain_id: str) -> Tuple[QuerySet, int]:
+        self.create_managed_metric(domain_id)
+        return self.metric_model.query(**query)
+
+    def stat_metrics(self, query: dict) -> dict:
+        return self.metric_model.stat(**query)
+
+    def run_metric_query(self, metric_vo: Metric, is_yesterday: bool = False) -> None:
+        self._check_metric_status(metric_vo)
+
+        metric_job_id = utils.generate_id("metric-job")
+        _LOGGER.debug(
+            f"[run_metric_query] Start metric job ({metric_vo.metric_id}): {metric_job_id}"
+        )
+
+        self.update_metric_by_vo(
+            {"status": "IN_PROGRESS", "metric_job_id": metric_job_id}, metric_vo
+        )
+
+        results = self.analyze_resource(metric_vo, is_yesterday=is_yesterday)
+
+        created_at = datetime.utcnow()
+
+        if is_yesterday and metric_vo.metric_type == "COUNTER":
+            created_at = created_at - relativedelta(days=1)
+
+        try:
+            _LOGGER.debug(
+                f"[run_metric_query] Save query results ({metric_vo.metric_id}): {len(results)}"
+            )
+            for result in results:
+                self._save_query_result(metric_vo, result, created_at, metric_job_id)
+            self._delete_changed_metric_data(metric_vo, created_at, metric_job_id)
+
+            if metric_vo.metric_type == "COUNTER":
+                self._aggregate_monthly_metric_data(
+                    metric_vo, created_at, metric_job_id
+                )
+
+            self._delete_changed_monthly_metric_data(
+                metric_vo, created_at, metric_job_id
+            )
+        except Exception as e:
+            _LOGGER.error(
+                f"[run_metric_query] Failed to save query result: {e}",
+                exc_info=True,
+            )
+            self._rollback_query_results(metric_vo, created_at, metric_job_id)
+            raise ERROR_METRIC_QUERY_RUN_FAILED(metric_id=metric_vo.metric_id)
+
+        metric_vo = self.get_metric(metric_vo.metric_id, metric_vo.domain_id)
+        if metric_vo.metric_job_id != metric_job_id:
+            _LOGGER.debug(
+                f"[run_metric_query] Duplicate metric job ({metric_vo.metric_id}): {metric_job_id}"
+            )
+            self._rollback_query_results(metric_vo, created_at, metric_job_id)
+        else:
+            self._update_status(metric_vo, created_at, metric_job_id)
+            self._delete_invalid_metric_data(metric_vo, metric_job_id)
+            self._delete_old_metric_data(metric_vo)
+            self._delete_analyze_cache(metric_vo.domain_id, metric_vo.metric_id)
+
+        self.update_metric_by_vo({"status": "DONE", "is_new": False}, metric_vo)
+
+    def _check_metric_status(self, metric_vo: Metric) -> None:
+        for i in range(200):
+            metric_vo = self.get_metric(metric_vo.metric_id, metric_vo.domain_id)
+            if metric_vo.status == "DONE":
+                return
+
+            time.sleep(3)
+
+        _LOGGER.warning(f"[_check_metric_status] Timeout: {metric_vo.metric_id}")
+        self.update_metric_by_vo({"status": "DONE"}, metric_vo)
+
+    def analyze_resource(
+        self,
+        metric_vo: Metric,
+        workspace_id: str = None,
+        query_options: dict = None,
+        is_yesterday: bool = False,
+    ) -> list:
+        resource_type = metric_vo.resource_type
+        domain_id = metric_vo.domain_id
+        metric_type = metric_vo.metric_type
+        date_field = metric_vo.date_field
+        query = query_options or metric_vo.query_options
+        query = copy.deepcopy(query)
+        query["filter"] = query.get("filter", [])
+
+        if metric_vo.resource_group == "WORKSPACE":
+            query["filter"].append(
+                {"k": "workspace_id", "v": metric_vo.workspace_id, "o": "eq"}
+            )
+
+        if workspace_id:
+            query["filter"].append({"k": "workspace_id", "v": workspace_id, "o": "eq"})
+
+        if metric_type == "COUNTER":
+            query = self._append_datetime_filter(
+                query, date_field=date_field, is_yesterday=is_yesterday
+            )
+
+        try:
+            if metric_vo.resource_type == "inventory.CloudService":
+                return self._analyze_cloud_service(query, domain_id)
+            elif metric_vo.resource_type.startswith("inventory.CloudService:"):
+                cloud_service_type_key = metric_vo.resource_type.split(":")[-1]
+                return self._analyze_cloud_service(
+                    query, domain_id, cloud_service_type_key
+                )
+            else:
+                raise ERROR_NOT_SUPPORT_RESOURCE_TYPE(resource_type=resource_type)
+        except Exception as e:
+            _LOGGER.error(
+                f"[analyze_resource] Failed to analyze query: {e}",
+                exc_info=True,
+            )
+            raise ERROR_WRONG_QUERY_OPTIONS(
+                query_options=utils.dump_json(metric_vo.query_options)
+            )
+
+    @staticmethod
+    def _append_workspace_filter(query: dict, workspace_id: str) -> dict:
+        query["filter"] = query.get("filter", [])
+        query["filter"].append({"k": "workspace_id", "v": workspace_id, "o": "in"})
+        return query
+
+    @staticmethod
+    def _append_datetime_filter(
+        query: dict,
+        date_field: str = "created_at",
+        is_yesterday: bool = False,
+    ) -> dict:
+        scheduler_hour = config.get_global("METRIC_SCHEDULE_HOUR", 0)
+        end = datetime.utcnow().replace(
+            hour=scheduler_hour, minute=0, second=0, microsecond=0
+        )
+
+        if is_yesterday:
+            end = end - relativedelta(days=1)
+
+        start = end - relativedelta(days=1)
+
+        query["filter"] = query.get("filter", [])
+        query["filter"].extend(
+            [
+                {"key": date_field, "value": start, "o": "gte"},
+                {"key": date_field, "value": end, "o": "lt"},
+            ]
+        )
+        return query
+
+    @staticmethod
+    def _analyze_cloud_service(
+        query: dict,
+        domain_id: str,
+        cloud_service_type_key: str = None,
+    ) -> list:
+        default_group_by = [
+            "collection_info.service_account_id",
+            "project_id",
+            "workspace_id",
+        ]
+        changed_group_by = []
+        changed_group_by += copy.deepcopy(default_group_by)
+
+        for group_option in query.get("group_by", []):
+            if isinstance(group_option, dict):
+                key = group_option.get("key")
+            else:
+                key = group_option
+
+            if key not in default_group_by:
+                changed_group_by.append(group_option)
+
+        query["group_by"] = changed_group_by
+        query["filter"] = query.get("filter", [])
+        query["filter"].append({"k": "domain_id", "v": domain_id, "o": "eq"})
+
+        if cloud_service_type_key:
+            try:
+                (
+                    provider,
+                    cloud_service_group,
+                    cloud_service_type,
+                ) = cloud_service_type_key.split(".")
+                query["filter"].append({"k": f"provider", "v": provider, "o": "eq"})
+                query["filter"].append(
+                    {"k": f"cloud_service_group", "v": cloud_service_group, "o": "eq"}
+                )
+                query["filter"].append(
+                    {"k": f"cloud_service_type", "v": cloud_service_type, "o": "eq"}
+                )
+            except Exception as e:
+                raise ERROR_NOT_SUPPORT_RESOURCE_TYPE(
+                    resource_type=f"inventory.CloudService:{cloud_service_type_key}"
+                )
+
+        if "select" in query:
+            for group_by_key in ["service_account_id", "project_id", "workspace_id"]:
+                query["select"][group_by_key] = group_by_key
+
+        _LOGGER.debug(f"[_analyze_cloud_service] Analyze Query: {query}")
+        cloud_svc_mgr = CloudServiceManager()
+        response = cloud_svc_mgr.analyze_cloud_services(
+            query, change_filter=True, domain_id=domain_id
+        )
+        return response.get("results", [])
+
+    @cache.cacheable(key="inventory:managed-metric:{domain_id}:sync", expire=300)
+    def create_managed_metric(self, domain_id: str) -> bool:
+        managed_resource_mgr = ManagedResourceManager()
+
+        metric_vos = self.filter_metrics(domain_id=domain_id, is_managed=True)
+
+        installed_metric_version_map = {}
+        for metric_vo in metric_vos:
+            installed_metric_version_map[metric_vo.metric_id] = metric_vo.version
+
+        managed_metric_map = managed_resource_mgr.get_managed_metrics()
+
+        for managed_metric_id, managed_metric_info in managed_metric_map.items():
+            managed_metric_info["domain_id"] = domain_id
+            managed_metric_info["is_managed"] = True
+            managed_metric_info["resource_group"] = "DOMAIN"
+            managed_metric_info["workspace_id"] = "*"
+
+            if metric_version := installed_metric_version_map.get(managed_metric_id):
+                if metric_version != managed_metric_info["version"]:
+                    _LOGGER.debug(
+                        f"[create_managed_metric] update managed metric: {managed_metric_id}"
+                    )
+                    metric_vo = self.get_metric(managed_metric_id, domain_id)
+                    self.update_metric_by_vo(managed_metric_info, metric_vo)
+            else:
+                _LOGGER.debug(
+                    f"[create_managed_metric] create new managed metric: {managed_metric_id}"
+                )
+                self.create_metric(managed_metric_info)
+
+        return True
+
+    def _save_query_result(
+        self, metric_vo: Metric, result: dict, created_at: datetime, metric_job_id: str
+    ) -> None:
+        data = {
+            "metric_id": metric_vo.metric_id,
+            "metric_job_id": metric_job_id,
+            "value": result["value"],
+            "unit": metric_vo.unit,
+            "labels": {},
+            "namespace_id": metric_vo.namespace_id,
+            "service_account_id": result.get("service_account_id"),
+            "project_id": result.get("project_id"),
+            "workspace_id": result["workspace_id"],
+            "domain_id": metric_vo.domain_id,
+            "created_year": created_at.strftime("%Y"),
+            "created_month": created_at.strftime("%Y-%m"),
+            "created_date": created_at.strftime("%Y-%m-%d"),
+        }
+
+        for key, value in result.items():
+            if key not in [
+                "service_account_id",
+                "project_id",
+                "workspace_id",
+                "domain_id",
+                "value",
+            ]:
+                data["labels"][key] = value
+
+        self.metric_data_mgr.create_metric_data(data)
+
+        if metric_vo.metric_type == "GAUGE":
+            self.metric_data_mgr.create_monthly_metric_data(data)
+
+    def _aggregate_monthly_metric_data(
+        self, metric_vo: Metric, created_at: datetime, metric_job_id: str
+    ) -> None:
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+        created_month = created_at.strftime("%Y-%m")
+        created_year = created_at.strftime("%Y")
+        group_by = []
+
+        for label_info in metric_vo.labels_info:
+            group_by.append(label_info["key"])
+
+        query = {
+            "group_by": group_by,
+            "fields": {
+                "value": {
+                    "key": "value",
+                    "operator": "sum",
+                }
+            },
+            "filter": [
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+                {"k": "created_month", "v": created_month, "o": "eq"},
+                {"k": "metric_job_id", "v": metric_job_id, "o": "eq"},
+            ],
+        }
+
+        response = self.metric_data_mgr.analyze_metric_data(
+            query, target="PRIMARY", status="IN_PROGRESS"
+        )
+        results = response.get("results", [])
+
+        _LOGGER.debug(
+            f"[_aggregate_monthly_metric_data] Aggregate query results ({metric_id}): {len(results)}"
+        )
+
+        for result in results:
+            data = {
+                "metric_id": metric_vo.metric_id,
+                "metric_job_id": metric_job_id,
+                "value": result["value"],
+                "unit": metric_vo.unit,
+                "labels": {},
+                "namespace_id": metric_vo.namespace_id,
+                "service_account_id": result.get("service_account_id"),
+                "project_id": result.get("project_id"),
+                "workspace_id": result["workspace_id"],
+                "domain_id": metric_vo.domain_id,
+                "created_year": created_year,
+                "created_month": created_month,
+            }
+
+            for key, value in result.items():
+                if key not in [
+                    "service_account_id",
+                    "project_id",
+                    "workspace_id",
+                    "domain_id",
+                    "value",
+                ]:
+                    data["labels"][key] = value
+
+            self.metric_data_mgr.create_monthly_metric_data(data)
+
+    def _delete_changed_metric_data(
+        self, metric_vo: Metric, created_at: datetime, metric_job_id: str
+    ) -> None:
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+        created_date = created_at.strftime("%Y-%m-%d")
+
+        query = {
+            "filter": [
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+                {"k": "created_date", "v": created_date, "o": "eq"},
+                {"k": "metric_job_id", "v": metric_job_id, "o": "not"},
+            ]
+        }
+
+        metric_data_vos, total_count = self.metric_data_mgr.list_metric_data(query)
+
+        _LOGGER.debug(
+            f"[_delete_changed_metric_data] delete count: {metric_data_vos.count()}"
+        )
+        metric_data_vos.delete()
+
+    def _delete_changed_monthly_metric_data(
+        self, metric_vo: Metric, created_at: datetime, metric_job_id: str
+    ):
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+        created_month = created_at.strftime("%Y-%m")
+
+        query = {
+            "filter": [
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+                {"k": "created_month", "v": created_month, "o": "eq"},
+                {"k": "metric_job_id", "v": metric_job_id, "o": "not"},
+            ]
+        }
+
+        (
+            monthly_metric_data_vos,
+            total_count,
+        ) = self.metric_data_mgr.list_monthly_metric_data(query)
+
+        _LOGGER.debug(
+            f"[_delete_changed_monthly_metric_data] delete count: {monthly_metric_data_vos.count()}"
+        )
+        monthly_metric_data_vos.delete()
+
+    def _rollback_query_results(
+        self, metric_vo: Metric, created_at: datetime, metric_job_id: str
+    ):
+        _LOGGER.warning(
+            f"[_rollback_query_results] Rollback Query Results ({metric_vo.metric_id}): {metric_job_id}"
+        )
+        metric_id = metric_vo.metric_id
+        domain_id = metric_vo.domain_id
+
+        metric_data_vos = self.metric_data_mgr.filter_metric_data(
+            metric_id=metric_id,
+            domain_id=domain_id,
+            created_date=created_at.strftime("%Y-%m-%d"),
+            status="IN_PROGRESS",
+            metric_job_id=metric_job_id,
+        )
+        metric_data_vos.delete()
+
+        monthly_metric_data_vos = self.metric_data_mgr.filter_monthly_metric_data(
+            metric_id=metric_id,
+            domain_id=domain_id,
+            created_month=created_at.strftime("%Y-%m"),
+            status="IN_PROGRESS",
+            metric_job_id=metric_job_id,
+        )
+        monthly_metric_data_vos.delete()
+
+    def _update_status(
+        self, metric_vo: Metric, created_at: datetime, metric_job_id: str
+    ) -> None:
+        _LOGGER.debug(
+            f"[_update_status] Update metric data status ({metric_vo.metric_id}): {metric_job_id}"
+        )
+
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+        created_date = created_at.strftime("%Y-%m-%d")
+        created_month = created_at.strftime("%Y-%m")
+
+        metric_data_vos = self.metric_data_mgr.filter_metric_data(
+            metric_id=metric_id,
+            domain_id=domain_id,
+            created_date=created_date,
+            status="IN_PROGRESS",
+            metric_job_id=metric_job_id,
+        )
+        metric_data_vos.update({"status": "DONE"})
+
+        monthly_metric_data_vos = self.metric_data_mgr.filter_monthly_metric_data(
+            metric_id=metric_id,
+            domain_id=domain_id,
+            created_month=created_month,
+            status="IN_PROGRESS",
+            metric_job_id=metric_job_id,
+        )
+        monthly_metric_data_vos.update({"status": "DONE"})
+
+    def _delete_invalid_metric_data(
+        self, metric_vo: Metric, metric_job_id: str
+    ) -> None:
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+
+        query = {
+            "filter": [
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+                {"k": "status", "v": "IN_PROGRESS", "o": "eq"},
+                {"k": "metric_job_id", "v": metric_job_id, "o": "not"},
+            ]
+        }
+
+        metric_data_vos, total_count = self.metric_data_mgr.list_metric_data(
+            query, status="IN_PROGRESS"
+        )
+
+        if metric_data_vos.count() > 0:
+            _LOGGER.debug(
+                f"[_delete_invalid_metric_data] delete metric data count: {metric_data_vos.count()}"
+            )
+            metric_data_vos.delete()
+
+        (
+            monthly_metric_data_vos,
+            total_count,
+        ) = self.metric_data_mgr.list_monthly_metric_data(query, status="IN_PROGRESS")
+
+        if monthly_metric_data_vos.count() > 0:
+            _LOGGER.debug(
+                f"[_delete_invalid_metric_data] delete monthly metric data count: {monthly_metric_data_vos.count()}"
+            )
+            monthly_metric_data_vos.delete()
+
+    def _delete_old_metric_data(self, metric_vo: Metric) -> None:
+        now = datetime.utcnow().date()
+        domain_id = metric_vo.domain_id
+        metric_id = metric_vo.metric_id
+        old_created_month = (now - relativedelta(months=12)).strftime("%Y-%m")
+        old_created_year = (now - relativedelta(months=36)).strftime("%Y")
+
+        delete_query = {
+            "filter": [
+                {"k": "created_month", "v": old_created_month, "o": "lt"},
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+            ]
+        }
+
+        metric_data_vos, total_count = self.metric_data_mgr.list_metric_data(
+            delete_query
+        )
+
+        if total_count > 0:
+            _LOGGER.debug(
+                f"[_delete_old_metric_data] delete metric data count: {total_count}"
+            )
+            metric_data_vos.delete()
+
+        monthly_delete_query = {
+            "filter": [
+                {"k": "created_year", "v": old_created_year, "o": "lt"},
+                {"k": "metric_id", "v": metric_id, "o": "eq"},
+                {"k": "domain_id", "v": domain_id, "o": "eq"},
+            ]
+        }
+
+        (
+            monthly_metric_data_vos,
+            total_count,
+        ) = self.metric_data_mgr.list_monthly_metric_data(monthly_delete_query)
+
+        if total_count > 0:
+            _LOGGER.debug(
+                f"[_delete_old_metric_data] delete monthly metric data count: {total_count}"
+            )
+            monthly_metric_data_vos.delete()
+
+    @staticmethod
+    def _delete_analyze_cache(domain_id: str, metric_id: str) -> None:
+        cache.delete_pattern(f"inventory:metric-data:*:{domain_id}:{metric_id}:*")
+        cache.delete_pattern(f"inventory:metric-query-history:{domain_id}:{metric_id}")
+
+    @staticmethod
+    def _get_labels_info(query_options: dict) -> list:
+        query_options = copy.deepcopy(query_options)
+        labels_info = [
+            {
+                "key": "workspace_id",
+                "name": "Workspace",
+                "reference": {
+                    "resource_type": "identity.Workspace",
+                    "reference_key": "workspace_id",
+                },
+            },
+            {
+                "key": "project_id",
+                "name": "Project",
+                "reference": {
+                    "resource_type": "identity.Project",
+                    "reference_key": "project_id",
+                },
+            },
+            {
+                "key": "service_account_id",
+                "name": "Service Account",
+                "search_key": "collection_info.service_account_id",
+                "reference": {
+                    "resource_type": "identity.ServiceAccount",
+                    "reference_key": "service_account_id",
+                },
+            },
+        ]
+        for group_option in query_options.get("group_by", []):
+            if isinstance(group_option, dict):
+                key = group_option.get("key")
+                name = group_option.get("name")
+                label_info = group_option
+            else:
+                key = group_option
+                name = key.rsplit(".", 1)[-1]
+                label_info = {"key": key, "name": name}
+
+            if key not in ["service_account_id", "project_id", "workspace_id"]:
+                label_info["key"] = f"labels.{name}"
+
+            if "search_key" not in label_info:
+                label_info["search_key"] = key
+
+            labels_info.append(label_info)
+
+        return labels_info
diff --git a/src/spaceone/inventory_v2/manager/region_manager.py b/src/spaceone/inventory_v2/manager/region_manager.py
index 365d6c8..9f489df 100644
--- a/src/spaceone/inventory_v2/manager/region_manager.py
+++ b/src/spaceone/inventory_v2/manager/region_manager.py
@@ -4,7 +4,7 @@
 from spaceone.core.model.mongo_model import QuerySet
 from spaceone.core.manager import BaseManager
 from spaceone.inventory_v2.lib.resource_manager import ResourceManager
-from spaceone.inventory_v2.model.region_model import Region
+from spaceone.inventory_v2.model.region.region_model import Region
 
 _LOGGER = logging.getLogger(__name__)
 
diff --git a/src/spaceone/inventory_v2/model/__init__.py b/src/spaceone/inventory_v2/model/__init__.py
index 43c4165..cdd606b 100644
--- a/src/spaceone/inventory_v2/model/__init__.py
+++ b/src/spaceone/inventory_v2/model/__init__.py
@@ -1,6 +1,9 @@
-from spaceone.inventory_v2.model.region_model import Region
+from spaceone.inventory_v2.model.region.region_model import Region
 from spaceone.inventory_v2.model.collector.database import Collector
 from spaceone.inventory_v2.model.collector_rule.database import CollectorRule
 from spaceone.inventory_v2.model.collection_state.database import CollectionState
+from spaceone.inventory_v2.model.metric.database import Metric
+from spaceone.inventory_v2.model.metric_data.database import MetricData
+from spaceone.inventory_v2.model.metric_example.database import MetricExample
 from spaceone.inventory_v2.model.job.database import Job
 from spaceone.inventory_v2.model.job_task.database import JobTask
diff --git a/src/spaceone/inventory_v2/model/asset/__init__.py b/src/spaceone/inventory_v2/model/asset/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/asset/database.py b/src/spaceone/inventory_v2/model/asset/database.py
new file mode 100644
index 0000000..a0ea594
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/asset/database.py
@@ -0,0 +1,183 @@
+from mongoengine import *
+from datetime import datetime
+
+from spaceone.core.model.mongo_model import MongoModel
+
+# from spaceone.inventory_v2.model.reference_resource_model import ReferenceResource
+from spaceone.inventory_v2.model.asset_type.database import AssetType
+
+from spaceone.inventory_v2.error.asset import ERROR_RESOURCE_ALREADY_DELETED
+from spaceone.inventory_v2.model.region.region_model import Region
+
+
+class CollectionInfo(EmbeddedDocument):
+    service_account_id = StringField(max_length=40)
+    secret_id = StringField(max_length=40)
+    collector_id = StringField(max_length=40)
+    last_collected_at = DateTimeField(auto_now=True)
+
+    def to_dict(self):
+        return dict(self.to_mongo())
+
+
+class Asset(MongoModel):
+    asset_id = StringField(max_length=40, generate_id="asset", unique=True)
+    asset_record_id = StringField(max_length=255, default=None, unique=True)
+    name = StringField(default=None, null=True)
+    state = StringField(
+        max_length=20, choices=("ACTIVE", "DISCONNECTED", "DELETED"), default="ACTIVE"
+    )
+    account = StringField(max_length=255, default=None, null=True)
+    instance_type = StringField(max_length=255, default=None, null=True)
+    instance_size = FloatField(max_length=255, default=None, null=True)
+    ip_addresses = ListField(StringField(max_length=255), default=[])
+    asset_type_id = StringField(max_length=40)
+    provider = StringField(max_length=255)
+    data = DictField()
+    metadata = DictField()
+    # reference = EmbeddedDocumentField(ReferenceResource, default={})
+    tags = DictField()
+    tag_keys = DictField()
+    region_code = StringField(max_length=255, default=None, null=True)
+    ref_region = StringField(max_length=255, default=None, null=True)
+    project_id = StringField(max_length=40)
+    workspace_id = StringField(max_length=40)
+    domain_id = StringField(max_length=40)
+    collection_info = EmbeddedDocumentField(CollectionInfo, default=CollectionInfo)
+    created_at = DateTimeField(auto_now_add=True)
+    updated_at = DateTimeField(auto_now=True)
+    deleted_at = DateTimeField(default=None, null=True)
+
+    meta = {
+        "updatable_fields": [
+            "name",
+            "data",
+            "state",
+            "account",
+            "instance_type",
+            "instance_size",
+            "ip_addresses",
+            "metadata",
+            "reference",
+            "tags",
+            "tag_keys",
+            "project_id",
+            "region_code",
+            "asset_type",
+            "collection_info",
+            "updated_at",
+            "deleted_at",
+        ],
+        "minimal_fields": [
+            "asset_id",
+            "name",
+            "asset_type_id",
+            "provider",
+            "reference.resource_id",
+            "region_code",
+            "project_id",
+        ],
+        "change_query_keys": {
+            "user_projects": "project_id",
+            "ip_address": "ip_addresses",
+        },
+        "reference_query_keys": {
+            "ref_asset_type": {
+                "model": AssetType,
+                "foreign_key": "ref_asset_type",
+            },
+            "ref_region": {"model": Region, "foreign_key": "ref_region"},
+        },
+        "indexes": [
+            {
+                "fields": ["domain_id", "workspace_id", "state"],
+                "name": "COMPOUND_INDEX_FOR_GC_1",
+            },
+            {
+                "fields": ["domain_id", "state", "updated_at"],
+                "name": "COMPOUND_INDEX_FOR_GC_2",
+            },
+            {
+                "fields": ["domain_id", "state", "-deleted_at"],
+                "name": "COMPOUND_INDEX_FOR_GC_3",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "state",
+                    "reference.resource_id",
+                    "provider",
+                    # "cloud_service_group",
+                    "asset_type_id",
+                    "asset_id",
+                    "account",
+                ],
+                "name": "COMPOUND_INDEX_FOR_COLLECTOR",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "state",
+                    "provider",
+                    "cloud_service_group",
+                    "cloud_service_type",
+                    "project_id",
+                    "region_code",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "state",
+                    "ref_cloud_service_type",
+                    "project_id",
+                    "region_code",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_2",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "state",
+                    "-created_at",
+                    "project_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_3",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "state",
+                    "-deleted_at",
+                    "project_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_4",
+            },
+            "reference.resource_id",
+            "state",
+            "workspace_id",
+            "domain_id",
+        ],
+    }
+
+    def update(self, data):
+        if self.state == "DELETED":
+            raise ERROR_RESOURCE_ALREADY_DELETED(
+                resource_type="Asset", resource_id=self.asset_id
+            )
+
+        return super().update(data)
+
+    def delete(self):
+        if self.state == "DELETED":
+            raise ERROR_RESOURCE_ALREADY_DELETED(
+                resource_type="Asset", resource_id=self.asset_id
+            )
+
+        self.update({"state": "DELETED", "deleted_at": datetime.utcnow()})
diff --git a/src/spaceone/inventory_v2/model/asset/request.py b/src/spaceone/inventory_v2/model/asset/request.py
new file mode 100644
index 0000000..e36bcea
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/asset/request.py
@@ -0,0 +1,15 @@
+from typing import Union, Literal, List
+from pydantic import BaseModel
+
+__all__ = [
+    "AssetCreateRequest",
+    "AssetUpdateRequest",
+]
+
+
+class AssetCreateRequest(BaseModel):
+    pass
+
+
+class AssetUpdateRequest(BaseModel):
+    pass
diff --git a/src/spaceone/inventory_v2/model/asset/response.py b/src/spaceone/inventory_v2/model/asset/response.py
new file mode 100644
index 0000000..e459f63
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/asset/response.py
@@ -0,0 +1,5 @@
+from pydantic import BaseModel
+
+
+class AssetResponse(BaseModel):
+    pass
diff --git a/src/spaceone/inventory_v2/model/asset_group/__init__.py b/src/spaceone/inventory_v2/model/asset_group/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/asset_type/__init__.py b/src/spaceone/inventory_v2/model/asset_type/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/asset_type/database.py b/src/spaceone/inventory_v2/model/asset_type/database.py
new file mode 100644
index 0000000..fa69222
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/asset_type/database.py
@@ -0,0 +1,89 @@
+from mongoengine import *
+
+from spaceone.core.model.mongo_model import MongoModel
+
+
+class AssetType(MongoModel):
+    asset_type_id = StringField(max_length=40, generate_id="asset-type", unique=True)
+    name = StringField(
+        max_length=255,
+        unique_with=["provider", "asset_group_id", "workspace_id", "domain_id"],
+    )
+    provider = StringField(max_length=255)
+    asset_grou_id = StringField(max_length=255)
+    # cloud_service_type_key = StringField(max_length=255)
+    # ref_cloud_service_type = StringField(max_length=255)
+    service_code = StringField(max_length=255, default=None, null=True)
+    # is_primary = BooleanField(default=False)
+    # is_major = BooleanField(default=False)
+    resource_type = StringField(max_length=255)
+    labels = ListField(StringField(max_length=255))
+    metadata = DictField()
+    tags = DictField()
+    workspace_id = StringField(max_length=40)
+    domain_id = StringField(max_length=40)
+    updated_by = StringField(default=None, null=True)
+    created_at = DateTimeField(auto_now_add=True)
+    updated_at = DateTimeField(auto_now=True)
+
+    meta = {
+        "updatable_fields": [
+            # "cloud_service_type_key",
+            "service_code",
+            # "is_primary",
+            # "is_major",
+            "resource_type",
+            "metadata",
+            "labels",
+            "tags",
+            "updated_by",
+            "updated_at",
+        ],
+        "minimal_fields": [
+            "asset_type_id",
+            "name",
+            "provider",
+            "group",
+            "service_code",
+            # "is_primary",
+            # "is_major",
+            "resource_type",
+        ],
+        "ordering": ["provider", "group", "name"],
+        "indexes": [
+            {
+                "fields": ["domain_id", "-updated_at", "updated_by"],
+                "name": "COMPOUND_INDEX_FOR_GC_1",
+            },
+            {
+                "fields": ["domain_id", "workspace_id", "asset_type_id"],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "provider",
+                    "group",
+                    "name",
+                    "is_primary",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_2",
+            },
+            # {
+            #     "fields": ["domain_id", "workspace_id", "cloud_service_type_key"],
+            #     "name": "COMPOUND_INDEX_FOR_SEARCH_3",
+            # },
+            # {
+            #     "fields": ["cloud_service_type_id", "ref_cloud_service_type"],
+            #     "name": "COMPOUND_INDEX_FOR_REF_1",
+            # },
+            # {
+            #     "fields": ["labels", "is_primary", "ref_cloud_service_type"],
+            #     "name": "COMPOUND_INDEX_FOR_REF_2",
+            # },
+            # "ref_cloud_service_type",
+            "workspace_id",
+            "domain_id",
+        ],
+    }
diff --git a/src/spaceone/inventory_v2/model/collection_state/database.py b/src/spaceone/inventory_v2/model/collection_state/database.py
index 7c2a218..251213c 100644
--- a/src/spaceone/inventory_v2/model/collection_state/database.py
+++ b/src/spaceone/inventory_v2/model/collection_state/database.py
@@ -6,7 +6,7 @@ class CollectionState(MongoModel):
     collector_id = StringField(max_length=40)
     job_task_id = StringField(max_length=40)
     secret_id = StringField(max_length=40)
-    cloud_service_id = StringField(max_length=40)
+    asset_id = StringField(max_length=40)
     disconnected_count = IntField(default=0)
     domain_id = StringField(max_length=40)
     updated_at = DateTimeField(auto_now=True)
@@ -17,7 +17,7 @@ class CollectionState(MongoModel):
             {
                 "fields": [
                     "domain_id",
-                    "cloud_service_id",
+                    "asset_id",
                     "collector_id",
                     "secret_id",
                 ],
@@ -28,7 +28,7 @@ class CollectionState(MongoModel):
                 "name": "COMPOUND_INDEX_FOR_DELETE_1",
             },
             {
-                "fields": ["domain_id", "cloud_service_id"],
+                "fields": ["domain_id", "asset_id"],
                 "name": "COMPOUND_INDEX_FOR_DELETE_2",
             },
             {
@@ -41,6 +41,6 @@ class CollectionState(MongoModel):
                 ],
                 "name": "COMPOUND_INDEX_FOR_DELETE_3",
             },
-            "cloud_service_id",
+            "asset_id",
         ],
     }
diff --git a/src/spaceone/inventory_v2/model/collector/database.py b/src/spaceone/inventory_v2/model/collector/database.py
index 0e085dc..dbb7aeb 100644
--- a/src/spaceone/inventory_v2/model/collector/database.py
+++ b/src/spaceone/inventory_v2/model/collector/database.py
@@ -47,7 +47,6 @@ class Collector(MongoModel):
     collector_id = StringField(max_length=40, generate_id="collector", unique=True)
     name = StringField(max_length=255)
     provider = StringField(max_length=40, default=None, null=True)
-    capability = DictField()
     plugin_info = EmbeddedDocumentField(PluginInfo, default=None, null=True)
     schedule = EmbeddedDocumentField(Scheduled, default=None, null=False)
     secret_filter = EmbeddedDocumentField(SecretFilter, default=None, null=True)
@@ -72,7 +71,6 @@ class Collector(MongoModel):
             "collector_id",
             "name",
             "provider",
-            "capability",
             "plugin_info",
             "resource_group",
         ],
diff --git a/src/spaceone/inventory_v2/model/collector/request.py b/src/spaceone/inventory_v2/model/collector/request.py
index 89cefdb..5e511b2 100644
--- a/src/spaceone/inventory_v2/model/collector/request.py
+++ b/src/spaceone/inventory_v2/model/collector/request.py
@@ -10,7 +10,7 @@
     "CollectorGetRequest",
     "CollectorSearchQueryRequest",
     "CollectorStatQueryRequest",
-    "CollectorCollectRequest"
+    "CollectorCollectRequest",
 ]
 
 ScheduleState = Literal["ENABLED", "DISABLED"]
@@ -20,9 +20,9 @@
 
 class CollectorCreateRequest(BaseModel):
     name: str
-    plugin_info: dict
-    schedule: dict
     provider: Union[str, None] = None
+    plugin_info: dict
+    schedule: Union[dict, None] = None
     secret_filter: Union[dict, None] = None
     tags: Union[dict, None] = None
     resource_group: ResourceGroup
diff --git a/src/spaceone/inventory_v2/model/collector/response.py b/src/spaceone/inventory_v2/model/collector/response.py
index 4ce5b09..0c31422 100644
--- a/src/spaceone/inventory_v2/model/collector/response.py
+++ b/src/spaceone/inventory_v2/model/collector/response.py
@@ -16,7 +16,6 @@ class CollectorResponse(BaseModel):
     collector_id: Union[str, None] = None
     name: Union[str, None] = None
     provider: Union[str, None] = None
-    capability: Union[dict, None] = None
     secret_filter: Union[dict, None] = None
     plugin_info: Union[dict, None] = None
     schedule: Union[dict, None] = None
@@ -32,7 +31,9 @@ def dict(self, *args, **kwargs):
         data = super().dict(*args, **kwargs)
         data["created_at"] = utils.datetime_to_iso8601(data["created_at"])
         data["updated_at"] = utils.datetime_to_iso8601(data["updated_at"])
-        data["last_collected_at"] = utils.datetime_to_iso8601(data.get("last_collected_at"))
+        data["last_collected_at"] = utils.datetime_to_iso8601(
+            data.get("last_collected_at")
+        )
         return data
 
 
diff --git a/src/spaceone/inventory_v2/model/metric/__init__.py b/src/spaceone/inventory_v2/model/metric/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/metric/database.py b/src/spaceone/inventory_v2/model/metric/database.py
new file mode 100644
index 0000000..e762218
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric/database.py
@@ -0,0 +1,64 @@
+from mongoengine import *
+from spaceone.core.model.mongo_model import MongoModel
+
+
+class Metric(MongoModel):
+    metric_id = StringField(max_length=80, unique_with="domain_id")
+    metric_job_id = StringField(max_length=40)
+    name = StringField(max_length=40)
+    status = StringField(max_length=20, choices=["IN_PROGRESS", "DONE"], default="DONE")
+    metric_type = StringField(max_length=40, choices=["COUNTER", "GAUGE"])
+    resource_type = StringField()
+    query_options = DictField(required=True, default=None)
+    date_field = StringField(default=None)
+    unit = StringField(default=None)
+    tags = DictField(default=None)
+    labels_info = ListField(DictField())
+    is_managed = BooleanField(default=False)
+    is_new = BooleanField(default=True)
+    version = StringField(max_length=40, default=None, null=True)
+    plugin_id = StringField(max_length=40, default=None, null=True)
+    namespace_id = StringField(max_length=80)
+    resource_group = StringField(max_length=40, choices=("DOMAIN", "WORKSPACE"))
+    domain_id = StringField(max_length=40)
+    workspace_id = StringField(max_length=40)
+    created_at = DateTimeField(auto_now_add=True)
+    updated_at = DateTimeField(auto_now=True)
+
+    meta = {
+        "updatable_fields": [
+            "metric_job_id",
+            "name",
+            "status",
+            "query_options",
+            "date_field",
+            "unit",
+            "tags",
+            "labels_info",
+            "is_new",
+            "version",
+            "updated_at",
+        ],
+        "minimal_fields": [
+            "metric_id",
+            "name",
+            "metric_type",
+            "resource_type",
+            "namespace_id",
+        ],
+        "ordering": ["namespace_id", "name"],
+        "indexes": [
+            {
+                "fields": [
+                    "domain_id",
+                    "workspace_id",
+                    "namespace_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_1",
+            },
+            "metric_type",
+            "resource_type",
+            "is_managed",
+            "namespace_id",
+        ],
+    }
diff --git a/src/spaceone/inventory_v2/model/metric/request.py b/src/spaceone/inventory_v2/model/metric/request.py
new file mode 100644
index 0000000..7a8c1a6
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric/request.py
@@ -0,0 +1,86 @@
+from typing import Union, Literal
+from pydantic import BaseModel
+
+__all__ = [
+    "MetricCreateRequest",
+    "MetricUpdateRequest",
+    "MetricDeleteRequest",
+    "MetricRunRequest",
+    "MetricTestRequest",
+    "MetricGetRequest",
+    "MetricSearchQueryRequest",
+    "MetricStatQueryRequest",
+    "MetricType",
+    "ResourceGroup",
+]
+
+MetricType = Literal["COUNTER", "GAUGE"]
+ResourceGroup = Literal["DOMAIN", "WORKSPACE"]
+
+
+class MetricCreateRequest(BaseModel):
+    metric_id: Union[str, None] = None
+    name: str
+    metric_type: MetricType
+    resource_type: Union[str, None] = None
+    query_options: dict
+    date_field: Union[str, None] = None
+    unit: Union[str, None] = None
+    tags: Union[dict, None] = {}
+    namespace_id: str
+    resource_group: ResourceGroup
+    workspace_id: Union[str, None] = None
+    domain_id: str
+
+
+class MetricUpdateRequest(BaseModel):
+    metric_id: str
+    name: Union[str, None] = None
+    query_options: Union[dict, None] = None
+    date_field: Union[str, None] = None
+    unit: Union[str, None] = None
+    tags: Union[dict, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: str
+
+
+class MetricDeleteRequest(BaseModel):
+    metric_id: str
+    workspace_id: Union[str, None] = None
+    domain_id: str
+
+
+class MetricRunRequest(BaseModel):
+    metric_id: str
+    workspace_id: Union[str, None] = None
+    domain_id: str
+
+
+class MetricTestRequest(BaseModel):
+    metric_id: str
+    query_options: Union[dict, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: str
+
+
+class MetricGetRequest(BaseModel):
+    metric_id: str
+    workspace_id: Union[str, list, None] = None
+    domain_id: str
+
+
+class MetricSearchQueryRequest(BaseModel):
+    query: Union[dict, None] = None
+    metric_id: Union[str, None] = None
+    metric_type: Union[MetricType, None] = None
+    resource_type: Union[str, None] = None
+    is_managed: Union[bool, None] = None
+    namespace_id: Union[str, None] = None
+    workspace_id: Union[str, list, None] = None
+    domain_id: str
+
+
+class MetricStatQueryRequest(BaseModel):
+    query: dict
+    workspace_id: Union[str, list, None] = None
+    domain_id: str
diff --git a/src/spaceone/inventory_v2/model/metric/response.py b/src/spaceone/inventory_v2/model/metric/response.py
new file mode 100644
index 0000000..82e12ce
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric/response.py
@@ -0,0 +1,38 @@
+from datetime import datetime
+from typing import Union, List
+from pydantic import BaseModel
+from spaceone.core import utils
+from spaceone.inventory_v2.model.metric.request import MetricType
+from spaceone.inventory_v2.model.metric.request import ResourceGroup
+
+__all__ = ["MetricResponse", "MetricsResponse"]
+
+
+class MetricResponse(BaseModel):
+    metric_id: Union[str, None] = None
+    name: Union[str, None] = None
+    metric_type: Union[MetricType, None] = None
+    resource_type: Union[str, None] = None
+    query_options: Union[dict, None] = None
+    date_field: Union[str, None] = None
+    unit: Union[str, None] = None
+    tags: Union[dict, None] = None
+    labels_info: Union[List[dict], None] = None
+    is_managed: Union[bool, None] = None
+    namespace_id: Union[str, None] = None
+    resource_group: Union[ResourceGroup, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: Union[str, None] = None
+    created_at: Union[datetime, None] = None
+    updated_at: Union[datetime, None] = None
+
+    def dict(self, *args, **kwargs):
+        data = super().dict(*args, **kwargs)
+        data["created_at"] = utils.datetime_to_iso8601(data["created_at"])
+        data["updated_at"] = utils.datetime_to_iso8601(data["updated_at"])
+        return data
+
+
+class MetricsResponse(BaseModel):
+    results: List[MetricResponse] = []
+    total_count: int
diff --git a/src/spaceone/inventory_v2/model/metric_data/__init__.py b/src/spaceone/inventory_v2/model/metric_data/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/metric_data/database.py b/src/spaceone/inventory_v2/model/metric_data/database.py
new file mode 100644
index 0000000..b255a9e
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_data/database.py
@@ -0,0 +1,144 @@
+from mongoengine import *
+from spaceone.core.model.mongo_model import MongoModel
+
+
+class MetricData(MongoModel):
+    metric_id = StringField(max_length=80)
+    metric_job_id = StringField(max_length=40)
+    status = StringField(
+        max_length=20, default="IN_PROGRESS", choices=["IN_PROGRESS", "DONE"]
+    )
+    value = FloatField(default=0)
+    unit = StringField(default=None)
+    labels = DictField(default=None)
+    namespace_id = StringField(max_length=80)
+    service_account_id = StringField(max_length=40, default=None, null=True)
+    project_id = StringField(max_length=40)
+    workspace_id = StringField(max_length=40)
+    domain_id = StringField(max_length=40)
+    created_year = StringField(max_length=4, required=True)
+    created_month = StringField(max_length=7, required=True)
+    created_date = StringField(max_length=10, required=True)
+
+    meta = {
+        "updatable_fields": [],
+        "minimal_fields": [
+            "metric_id",
+            "value",
+            "unit",
+            "service_account_id",
+            "project_id",
+            "workspace_id",
+            "created_date",
+        ],
+        "change_query_keys": {
+            "user_projects": "project_id",
+        },
+        "indexes": [
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "status",
+                    "-created_date",
+                    "workspace_id",
+                    "project_id",
+                    "service_account_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "created_month",
+                    "metric_job_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SYNC_JOB_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "metric_job_id",
+                    "status",
+                    "-created_date",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SYNC_JOB_2",
+            },
+        ],
+    }
+
+
+class MonthlyMetricData(MongoModel):
+    metric_id = StringField(max_length=80)
+    metric_job_id = StringField(max_length=40)
+    status = StringField(
+        max_length=20, default="IN_PROGRESS", choices=["IN_PROGRESS", "DONE"]
+    )
+    value = FloatField(default=0)
+    unit = StringField(default=None)
+    labels = DictField(default=None)
+    namespace_id = StringField(max_length=40)
+    service_account_id = StringField(max_length=40)
+    project_id = StringField(max_length=40)
+    workspace_id = StringField(max_length=40)
+    domain_id = StringField(max_length=40)
+    created_at = DateTimeField(auto_now_add=True)
+    created_year = StringField(max_length=4, required=True)
+    created_month = StringField(max_length=7, required=True)
+
+    meta = {
+        "updatable_fields": [],
+        "change_query_keys": {
+            "user_projects": "project_id",
+        },
+        "indexes": [
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "status",
+                    "-created_month",
+                    "workspace_id",
+                    "project_id",
+                    "service_account_id",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SEARCH_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "created_year",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SYNC_JOB_1",
+            },
+            {
+                "fields": [
+                    "domain_id",
+                    "metric_id",
+                    "metric_job_id",
+                    "status",
+                    "-created_month",
+                ],
+                "name": "COMPOUND_INDEX_FOR_SYNC_JOB_2",
+            },
+        ],
+    }
+
+
+class MetricQueryHistory(MongoModel):
+    metric_id = StringField(max_length=80)
+    domain_id = StringField(max_length=40)
+    updated_at = DateTimeField(auto_now=True)
+
+    meta = {
+        "updatable_fields": ["updated_at"],
+        "indexes": [
+            {
+                "fields": ["domain_id", "metric_id"],
+                "name": "COMPOUND_INDEX_FOR_SEARCH",
+            },
+        ],
+    }
diff --git a/src/spaceone/inventory_v2/model/metric_data/request.py b/src/spaceone/inventory_v2/model/metric_data/request.py
new file mode 100644
index 0000000..deae1ee
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_data/request.py
@@ -0,0 +1,33 @@
+from typing import Union, Literal
+from pydantic import BaseModel
+
+__all__ = [
+    "MetricDataSearchQueryRequest",
+    "MetricDataAnalyzeQueryRequest",
+    "MetricDataStatQueryRequest",
+]
+
+
+class MetricDataSearchQueryRequest(BaseModel):
+    query: Union[dict, None] = None
+    metric_id: str
+    project_id: Union[str, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: str
+    user_projects: Union[list, None] = None
+
+
+class MetricDataAnalyzeQueryRequest(BaseModel):
+    query: dict
+    metric_id: str
+    workspace_id: Union[str, None] = None
+    domain_id: str
+    user_projects: Union[list, None] = None
+
+
+class MetricDataStatQueryRequest(BaseModel):
+    query: dict
+    metric_id: Union[str, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: str
+    user_projects: Union[list, None] = None
diff --git a/src/spaceone/inventory_v2/model/metric_data/response.py b/src/spaceone/inventory_v2/model/metric_data/response.py
new file mode 100644
index 0000000..c082896
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_data/response.py
@@ -0,0 +1,25 @@
+from typing import Union, List
+from pydantic import BaseModel
+
+
+__all__ = ["MetricDataResponse", "MetricDatasResponse"]
+
+
+class MetricDataResponse(BaseModel):
+    metric_id: Union[str, None] = None
+    value: Union[float, None] = None
+    unit: Union[str, None] = None
+    labels: Union[dict, None] = None
+    namespace_id: Union[str, None] = None
+    service_account_id: Union[str, None] = None
+    project_id: Union[str, None] = None
+    workspace_id: Union[str, None] = None
+    domain_id: Union[str, None] = None
+    created_year: Union[str, None] = None
+    created_month: Union[str, None] = None
+    created_date: Union[str, None] = None
+
+
+class MetricDatasResponse(BaseModel):
+    results: List[MetricDataResponse] = []
+    total_count: int
diff --git a/src/spaceone/inventory_v2/model/metric_example/__init__.py b/src/spaceone/inventory_v2/model/metric_example/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/metric_example/database.py b/src/spaceone/inventory_v2/model/metric_example/database.py
new file mode 100644
index 0000000..3a95c6c
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_example/database.py
@@ -0,0 +1,36 @@
+from mongoengine import *
+from spaceone.core.model.mongo_model import MongoModel
+
+
+class MetricExample(MongoModel):
+    example_id = StringField(max_length=40, generate_id="example", unique=True)
+    name = StringField(max_length=40)
+    options = DictField(default=None)
+    tags = DictField(default=None)
+    metric_id = StringField(max_length=80)
+    namespace_id = StringField(max_length=80)
+    user_id = StringField(max_length=40)
+    domain_id = StringField(max_length=40)
+    created_at = DateTimeField(auto_now_add=True)
+    updated_at = DateTimeField(auto_now=True)
+
+    meta = {
+        "updatable_fields": [
+            "name",
+            "options",
+            "tags",
+        ],
+        "minimal_fields": [
+            "example_id",
+            "name",
+            "metric_id",
+            "user_id",
+        ],
+        "ordering": ["name"],
+        "indexes": [
+            "name",
+            "metric_id",
+            "user_id",
+            "domain_id",
+        ],
+    }
diff --git a/src/spaceone/inventory_v2/model/metric_example/request.py b/src/spaceone/inventory_v2/model/metric_example/request.py
new file mode 100644
index 0000000..6016870
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_example/request.py
@@ -0,0 +1,58 @@
+from typing import Union, Literal
+from pydantic import BaseModel
+
+__all__ = [
+    "MetricExampleCreateRequest",
+    "MetricExampleUpdateRequest",
+    "MetricExampleDeleteRequest",
+    "MetricExampleGetRequest",
+    "MetricExampleSearchQueryRequest",
+    "MetricExampleStatQueryRequest",
+]
+
+
+class MetricExampleCreateRequest(BaseModel):
+    metric_id: Union[str, None] = None
+    name: str
+    options: dict
+    tags: Union[dict, None] = {}
+    user_id: str
+    workspace_id: Union[str, list, None] = None
+    domain_id: str
+
+
+class MetricExampleUpdateRequest(BaseModel):
+    example_id: str
+    name: Union[str, None] = None
+    options: Union[dict, None] = None
+    tags: Union[dict, None] = None
+    user_id: str
+    domain_id: str
+
+
+class MetricExampleDeleteRequest(BaseModel):
+    example_id: str
+    user_id: str
+    domain_id: str
+
+
+class MetricExampleGetRequest(BaseModel):
+    example_id: str
+    user_id: str
+    domain_id: str
+
+
+class MetricExampleSearchQueryRequest(BaseModel):
+    query: Union[dict, None] = None
+    example_id: Union[str, None] = None
+    name: Union[str, None] = None
+    metric_id: Union[str, None] = None
+    namespace_id: Union[str, None] = None
+    user_id: str
+    domain_id: str
+
+
+class MetricExampleStatQueryRequest(BaseModel):
+    query: dict
+    user_id: str
+    domain_id: str
diff --git a/src/spaceone/inventory_v2/model/metric_example/response.py b/src/spaceone/inventory_v2/model/metric_example/response.py
new file mode 100644
index 0000000..b564bd1
--- /dev/null
+++ b/src/spaceone/inventory_v2/model/metric_example/response.py
@@ -0,0 +1,30 @@
+from datetime import datetime
+from typing import Union, List
+from pydantic import BaseModel
+from spaceone.core import utils
+
+__all__ = ["MetricExampleResponse", "MetricExamplesResponse"]
+
+
+class MetricExampleResponse(BaseModel):
+    example_id: Union[str, None] = None
+    name: Union[str, None] = None
+    options: Union[dict, None] = None
+    tags: Union[dict, None] = None
+    metric_id: Union[str, None] = None
+    namespace_id: Union[str, None] = None
+    user_id: Union[str, None] = None
+    domain_id: Union[str, None] = None
+    created_at: Union[datetime, None] = None
+    updated_at: Union[datetime, None] = None
+
+    def dict(self, *args, **kwargs):
+        data = super().dict(*args, **kwargs)
+        data["created_at"] = utils.datetime_to_iso8601(data["created_at"])
+        data["updated_at"] = utils.datetime_to_iso8601(data["updated_at"])
+        return data
+
+
+class MetricExamplesResponse(BaseModel):
+    results: List[MetricExampleResponse] = []
+    total_count: int
diff --git a/src/spaceone/inventory_v2/model/region/__init__.py b/src/spaceone/inventory_v2/model/region/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/spaceone/inventory_v2/model/region_model.py b/src/spaceone/inventory_v2/model/region/region_model.py
similarity index 100%
rename from src/spaceone/inventory_v2/model/region_model.py
rename to src/spaceone/inventory_v2/model/region/region_model.py
diff --git a/src/spaceone/inventory_v2/service/asset_service.py b/src/spaceone/inventory_v2/service/asset_service.py
new file mode 100644
index 0000000..5d8b3f8
--- /dev/null
+++ b/src/spaceone/inventory_v2/service/asset_service.py
@@ -0,0 +1,372 @@
+import logging
+import copy
+import pytz
+from datetime import datetime
+from typing import List, Union, Tuple
+
+from spaceone.core.service import *
+from spaceone.core import utils
+
+from spaceone.inventory_v2.manager.asset_manager import AssetManager
+from spaceone.inventory_v2.manager.collector_rule_manager import CollectorRuleManager
+from spaceone.inventory_v2.manager.identity_manager import IdentityManager
+from spaceone.inventory_v2.model.asset.database import Asset
+from spaceone.inventory_v2.model.asset.request import *
+from spaceone.inventory_v2.model.asset.response import *
+from spaceone.inventory_v2.error import *
+
+_KEYWORD_FILTER = [
+    "cloud_service_id",
+    "name",
+    "ip_addresses",
+    "cloud_service_group",
+    "cloud_service_type",
+    "reference.resource_id",
+]
+
+_LOGGER = logging.getLogger(__name__)
+
+
+@authentication_handler
+@authorization_handler
+@mutation_handler
+@event_handler
+class AssetService(BaseService):
+    resource = "Asset"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.asset_mgr = AssetManager()
+        self.collector_rule_mgr = CollectorRuleManager()
+        self.identity_mgr = IdentityManager()
+        self.collector_id = self.transaction.get_meta("collector_id")
+        self.job_id = self.transaction.get_meta("job_id")
+        self.plugin_id = self.transaction.get_meta("plugin_id")
+        self.service_account_id = self.transaction.get_meta("secret.service_account_id")
+
+    @transaction(
+        permission="inventory:CloudService.write",
+        role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @convert_model
+    def create(self, params: AssetCreateRequest) -> Union[AssetResponse, dict]:
+        """
+        Args:
+            params (dict): {
+                'cloud_service_type': 'str',        # required
+                'cloud_service_group': 'str',       # required
+                'provider': 'str',                  # required
+                'name': 'str',
+                'account': 'str',
+                'instance_type': 'str',
+                'instance_size': 'float',
+                'ip_addresses': 'list',
+                'data': 'dict',                     # required
+                'json_data': 'dict',
+                'metadata': 'dict',
+                'reference': 'dict',
+                'tags': 'list or dict',
+                'region_code': 'str',
+                'project_id': 'str',                # required
+                'workspace_id': 'str',              # injected from auth (required)
+                'domain_id': 'str'                  # injected from auth (required)
+            }
+
+        Returns:
+            cloud_service_vo (object)
+
+        """
+        asset_vo = self.create_resource(params.dict())
+        return AssetResponse(**asset_vo.to_dict())
+
+    def create_resource(self, params: dict) -> Asset:
+        # ch_mgr: ChangeHistoryManager = self.locator.get_manager("ChangeHistoryManager")
+
+        if json_data := params.get("json_data"):
+            params["data"] = utils.load_json(json_data)
+            if not isinstance(params["data"], dict):
+                raise ERROR_INVALID_PARAMETER_TYPE(
+                    key="json_data", type=type(params["data"])
+                )
+
+            del params["json_data"]
+        elif "data" not in params:
+            raise ERROR_REQUIRED_PARAMETER(key="data")
+
+        if json_metadata := params.get("json_metadata"):
+            params["metadata"] = utils.load_json(json_metadata)
+            if not isinstance(params["metadata"], dict):
+                raise ERROR_INVALID_PARAMETER_TYPE(
+                    key="json_metadata", type=type(params["metadata"])
+                )
+
+            del params["json_metadata"]
+
+        domain_id = params["domain_id"]
+        workspace_id = params["workspace_id"]
+        secret_project_id = self.transaction.get_meta("secret.project_id")
+        provider = params["provider"]
+
+        if instance_size := params.get("instance_size"):
+            if not isinstance(instance_size, float):
+                raise ERROR_INVALID_PARAMETER_TYPE(key="instance_size", type="float")
+
+        if "tags" in params:
+            params["tags"] = self._convert_tags_to_dict(params["tags"])
+
+        # Change data through Collector Rule
+        if self._is_created_by_collector():
+            params = self.collector_rule_mgr.change_asset_data(
+                self.collector_id, domain_id, params
+            )
+
+        if "tags" in params:
+            params["tags"], params["tag_keys"] = self._convert_tags_to_hash(
+                params["tags"], provider
+            )
+
+        if "project_id" in params:
+            self.identity_mgr.get_project(params["project_id"], domain_id)
+        elif secret_project_id:
+            params["project_id"] = secret_project_id
+
+        params["ref_cloud_service_type"] = self._make_cloud_service_type_key(params)
+
+        if "region_code" in params:
+            params["ref_region"] = self._make_region_key(
+                domain_id, workspace_id, provider, params["region_code"]
+            )
+
+        if "metadata" in params:
+            params["metadata"] = self._convert_metadata(params["metadata"], provider)
+
+        params["collection_info"] = self._get_collection_info()
+
+        asset_vo = self.asset_mgr.create_asset(params)
+
+        # todo: Create New History
+        # Create New History
+        # ch_mgr.add_new_history(asset_vo, params)
+
+        # Create Collection State
+        self.state_mgr.create_collection_state(asset_vo.asset_id, domain_id)
+
+        return asset_vo
+
+    @transaction(
+        permission="inventory-v2:CloudService.write",
+        role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    def update(self, params: AssetUpdateRequest) -> Union[AssetResponse, dict]:
+        """
+        Args:
+            params (dict): {
+                'asset_id': 'str',      # required
+                'name': 'str',
+                'account': 'str',
+                'instance_type': 'str',
+                'instance_size': 'float',
+                'ip_addresses': 'list',
+                'data': 'dict',
+                'json_data': 'dict',
+                'metadata': 'dict',
+                'reference': 'dict',
+                'tags': 'list or dict',
+                'region_code': 'str',
+                'project_id': 'str',
+                'workspace_id': 'str',              # injected from auth (required)
+                'domain_id': 'str',                 # injected from auth (required)
+                'user_projects': 'list'             # injected from auth
+            }
+
+        Returns:
+            cloud_service_vo (object)
+        """
+        asset_vo = self.update_resource(params.dict())
+        return AssetResponse(**asset_vo.to_dict())
+
+    @check_required(["cloud_service_id", "workspace_id", "domain_id"])
+    def update_resource(self, params: dict) -> Asset:
+        # ch_mgr: ChangeHistoryManager = self.locator.get_manager("ChangeHistoryManager")
+
+        if json_data := params.get("json_data"):
+            params["data"] = utils.load_json(json_data)
+            if not isinstance(params["data"], dict):
+                raise ERROR_INVALID_PARAMETER_TYPE(
+                    key="json_data", type=type(params["data"])
+                )
+
+            del params["json_data"]
+
+        if json_metadata := params.get("json_metadata"):
+            params["metadata"] = utils.load_json(json_metadata)
+            if not isinstance(params["metadata"], dict):
+                raise ERROR_INVALID_PARAMETER_TYPE(
+                    key="json_metadata", type=type(params["metadata"])
+                )
+
+            del params["json_metadata"]
+
+        secret_project_id = self.transaction.get_meta("secret.project_id")
+
+        cloud_service_id = params["cloud_service_id"]
+        workspace_id = params["workspace_id"]
+        user_projects = params.get("user_projects")
+        domain_id = params["domain_id"]
+        provider = self._get_provider_from_meta()
+
+        if "ip_addresses" in params and params["ip_addresses"] is None:
+            del params["ip_addresses"]
+
+        if instance_size := params.get("instance_size"):
+            if not isinstance(instance_size, float):
+                raise ERROR_INVALID_PARAMETER_TYPE(key="instance_size", type="float")
+
+        if "tags" in params:
+            params["tags"] = self._convert_tags_to_dict(params["tags"])
+
+        # Change data through Collector Rule
+        if self._is_created_by_collector():
+            params = self.collector_rule_mgr.change_asset_data(
+                self.collector_id, domain_id, params
+            )
+
+        asset_vo: Asset = self.asset_mgr.get_asset(
+            cloud_service_id, domain_id, workspace_id, user_projects
+        )
+
+        if "project_id" in params:
+            self.identity_mgr.get_project(params["project_id"], domain_id)
+        elif secret_project_id and secret_project_id != asset_vo.project_id:
+            params["project_id"] = secret_project_id
+
+        if "region_code" in params:
+            params["ref_region"] = self._make_region_key(
+                asset_vo.domain_id,
+                asset_vo.workspace_id,
+                asset_vo.provider,
+                params["region_code"],
+            )
+
+        old_asset_data = dict(asset_vo.to_dict())
+
+        if "tags" in params:
+            old_tags = old_asset_data.get("tags", {})
+            old_tag_keys = old_asset_data.get("tag_keys", {})
+            new_tags, new_tag_keys = self._convert_tags_to_hash(
+                params["tags"], provider
+            )
+
+            if self._is_different_data(new_tags, old_tags, provider):
+                old_tags.update(new_tags)
+                old_tag_keys.update(new_tag_keys)
+                params["tags"] = old_tags
+                params["tag_keys"] = old_tag_keys
+            else:
+                del params["tags"]
+
+        if "metadata" in params:
+            old_metadata = old_asset_data.get("metadata", {})
+            new_metadata = self._convert_metadata(params["metadata"], provider)
+
+            if self._is_different_data(new_metadata, old_metadata, provider):
+                old_metadata.update(new_metadata)
+                params["metadata"] = old_metadata
+            else:
+                del params["metadata"]
+
+        params["collection_info"] = self._get_collection_info()
+
+        params = self.asset_mgr.merge_data(params, old_asset_data)
+
+        asset_vo = self.asset_mgr.update_asset_by_vo(params, asset_vo)
+
+        # todo: Create Update History
+        # Create Update History
+        # ch_mgr.add_update_history(asset_vo, params, old_asset_data)
+
+        # Update Collection History
+        state_vo = self.state_mgr.get_collection_state(cloud_service_id, domain_id)
+        if state_vo:
+            self.state_mgr.reset_collection_state(state_vo)
+        else:
+            self.state_mgr.create_collection_state(cloud_service_id, domain_id)
+
+        return asset_vo
+
+    @staticmethod
+    def _make_cloud_service_type_key(resource_data: dict) -> str:
+        return (
+            f'{resource_data["domain_id"]}.{resource_data["workspace_id"]}.{resource_data["provider"]}.'
+            f'{resource_data["cloud_service_group"]}.{resource_data["cloud_service_type"]}'
+        )
+
+    @staticmethod
+    def _make_region_key(
+        domain_id: str, workspace_id: str, provider: str, region_code: str
+    ) -> str:
+        return f"{domain_id}.{workspace_id}.{provider}.{region_code}"
+
+    @staticmethod
+    def _convert_metadata(metadata: dict, provider: str) -> dict:
+        return {provider: copy.deepcopy(metadata)}
+
+    def _get_collection_info(self) -> dict:
+        collector_id = self.transaction.get_meta("collector_id")
+        secret_id = self.transaction.get_meta("secret.secret_id")
+        service_account_id = self.transaction.get_meta("secret.service_account_id")
+
+        return {
+            "collector_id": collector_id,
+            "secret_id": secret_id,
+            "service_account_id": service_account_id,
+            "last_collected_at": datetime.utcnow(),
+        }
+
+    @staticmethod
+    def _convert_tags_to_dict(tags: Union[list, dict]) -> dict:
+        if isinstance(tags, list):
+            dot_tags = utils.tags_to_dict(tags)
+        elif isinstance(tags, dict):
+            dot_tags = copy.deepcopy(tags)
+        else:
+            dot_tags = {}
+
+        return dot_tags
+
+    @staticmethod
+    def _convert_tags_to_hash(dot_tags: dict, provider: str) -> Tuple[dict, dict]:
+        tag_keys = {provider: list(dot_tags.keys())}
+
+        tags = {provider: {}}
+        for key, value in dot_tags.items():
+            hashed_key = utils.string_to_hash(key)
+            tags[provider][hashed_key] = {"key": key, "value": value}
+
+        return tags, tag_keys
+
+    @staticmethod
+    def _is_different_data(new_data: dict, old_data: dict, provider: str) -> bool:
+        if new_data[provider] != old_data.get(provider):
+            return True
+        else:
+            return False
+
+    def _get_provider_from_meta(self) -> str:
+        if self._is_created_by_collector():
+            return self.transaction.get_meta("secret.provider")
+        else:
+            return "custom"
+
+    def _is_created_by_collector(self) -> str:
+        return (
+            self.collector_id
+            and self.job_id
+            and self.service_account_id
+            and self.plugin_id
+        )
+
+    @staticmethod
+    def _check_timezone(timezone: str) -> None:
+        if timezone not in pytz.all_timezones:
+            raise ERROR_INVALID_PARAMETER(key="timezone", reason="Timezone is invalid.")
diff --git a/src/spaceone/inventory_v2/service/collector_service.py b/src/spaceone/inventory_v2/service/collector_service.py
index afd9b9e..e079b40 100644
--- a/src/spaceone/inventory_v2/service/collector_service.py
+++ b/src/spaceone/inventory_v2/service/collector_service.py
@@ -50,10 +50,10 @@ def create(self, params: CollectorCreateRequest) -> Union[CollectorResponse, dic
         Args:
             params (dict): {
                 'name': 'str',              # required
+                'provider': 'str',
                 'plugin_info': 'dict',      # required
                 'schedule': 'dict',
                 'secret_filter': 'dict',
-                'provider': 'str',
                 'tags': 'dict',
                 'resource_group': 'str',    # required
                 'workspace_id': 'str',      # injected from auth
@@ -86,32 +86,28 @@ def create(self, params: CollectorCreateRequest) -> Union[CollectorResponse, dic
         plugin_manager = PluginManager()
         collector_plugin_mgr = CollectorPluginManager()
 
-        create_params = params.dict()
         plugin_info = params.plugin_info
         plugin_id = plugin_info["plugin_id"]
 
         plugin_info_from_repository = self._get_plugin_from_repository(plugin_id)
-        capability = plugin_info_from_repository.get("capability", {})
-        plugin_provider = self._get_plugin_providers(
+        params.provider = self._get_plugin_providers(
             params.provider, plugin_info_from_repository
         )
 
-        create_params["capability"] = capability
-        create_params["provider"] = plugin_provider
-
-        if "secret_filter" in params:
-            if create_params["secret_filter"].get("state") == "ENABLED":
+        if secret_filter := params.secret_filter:
+            if secret_filter.get("state") == "ENABLED":
                 self._validate_secret_filter(
                     identity_mgr,
                     secret_mgr,
-                    create_params["secret_filter"],
-                    plugin_provider,
+                    params.secret_filter,
+                    params.provider,
                     domain_id,
                 )
             else:
-                del create_params["secret_filter"]
+                # todo : test
+                params.secret_filter = None
 
-        collector_vo = self.collector_mgr.create_collector(create_params)
+        collector_vo = self.collector_mgr.create_collector(params.dict())
 
         endpoint, updated_version = plugin_manager.get_endpoint(
             plugin_info["plugin_id"],
@@ -205,7 +201,7 @@ def update(self, params: CollectorUpdateRequest) -> Union[CollectorResponse, dic
     )
     @convert_model
     def update_plugin(
-            self, params: CollectorUpdatePluginRequest
+        self, params: CollectorUpdatePluginRequest
     ) -> Union[CollectorResponse, dict]:
         """Update plugin info of collector
         Args:
@@ -408,7 +404,7 @@ def get(self, params: CollectorGetRequest) -> Union[CollectorResponse, dict]:
     @append_keyword_filter(_KEYWORD_FILTER)
     @convert_model
     def list(
-            self, params: CollectorSearchQueryRequest
+        self, params: CollectorSearchQueryRequest
     ) -> Union[CollectorsResponse, dict]:
         """List collectors
         Args:
@@ -602,15 +598,15 @@ def collect(self, params: CollectorCollectRequest) -> Union[JobResponse, dict]:
         return JobResponse(**job_vo.to_dict())
 
     def _get_tasks(
-            self,
-            params: dict,
-            endpoint: str,
-            collector_id: str,
-            collector_provider: str,
-            plugin_info: dict,
-            secret_filter: dict,
-            domain_id: str,
-            collector_workspace_id: str = None,
+        self,
+        params: dict,
+        endpoint: str,
+        collector_id: str,
+        collector_provider: str,
+        plugin_info: dict,
+        secret_filter: dict,
+        domain_id: str,
+        collector_workspace_id: str = None,
     ) -> list:
         secret_mgr: SecretManager = self.locator.get_manager(SecretManager)
         collector_plugin_mgr: CollectorPluginManager = self.locator.get_manager(
@@ -654,7 +650,7 @@ def _get_tasks(
 
     @staticmethod
     def _check_secrets(
-            secret_mgr: SecretManager, secret_ids: list, provider: str, domain_id: str
+        secret_mgr: SecretManager, secret_ids: list, provider: str, domain_id: str
     ) -> None:
         query = {
             "filter": [
@@ -674,10 +670,10 @@ def _check_secrets(
 
     @staticmethod
     def _check_service_accounts(
-            identity_mgr: IdentityManager,
-            service_account_ids: list,
-            provider: str,
-            domain_id: str,
+        identity_mgr: IdentityManager,
+        service_account_ids: list,
+        provider: str,
+        domain_id: str,
     ) -> None:
         query = {
             "filter": [
@@ -702,10 +698,10 @@ def _check_service_accounts(
 
     @staticmethod
     def _check_schemas(
-            identity_mgr: IdentityManager,
-            schema_ids: list,
-            provider: str,
-            domain_id: str,
+        identity_mgr: IdentityManager,
+        schema_ids: list,
+        provider: str,
+        domain_id: str,
     ) -> None:
         query = {
             "filter": [
@@ -729,12 +725,12 @@ def _check_schemas(
             )
 
     def _validate_secret_filter(
-            self,
-            identity_mgr: IdentityManager,
-            secret_mgr: SecretManager,
-            secret_filter: dict,
-            provider: str,
-            domain_id: str,
+        self,
+        identity_mgr: IdentityManager,
+        secret_mgr: SecretManager,
+        secret_filter: dict,
+        provider: str,
+        domain_id: str,
     ) -> None:
         if "secrets" in secret_filter:
             self._check_secrets(
@@ -770,11 +766,11 @@ def _validate_secret_filter(
             )
 
     def _update_collector_plugin(
-            self,
-            endpoint: str,
-            updated_version: str,
-            plugin_info: dict,
-            collector_vo: Collector,
+        self,
+        endpoint: str,
+        updated_version: str,
+        plugin_info: dict,
+        collector_vo: Collector,
     ) -> Collector:
         collector_plugin_mgr = CollectorPluginManager()
         plugin_response = collector_plugin_mgr.init_plugin(
@@ -804,12 +800,12 @@ def _update_collector_plugin(
         return collector_vo
 
     def _get_secret_ids_from_filter(
-            self,
-            secret_filter: dict,
-            provider: str,
-            domain_id: str,
-            secret_id: str = None,
-            workspace_id: str = None,
+        self,
+        secret_filter: dict,
+        provider: str,
+        domain_id: str,
+        secret_id: str = None,
+        workspace_id: str = None,
     ) -> list:
         secret_manager: SecretManager = self.locator.get_manager(SecretManager)
 
@@ -856,11 +852,11 @@ def _get_plugin_from_repository(plugin_id: str) -> dict:
 
     @staticmethod
     def create_collector_rules_by_metadata(
-            collector_rules: list,
-            collector_id: str,
-            resource_group: str,
-            domain_id: str,
-            workspace_id: str = None,
+        collector_rules: list,
+        collector_id: str,
+        resource_group: str,
+        domain_id: str,
+        workspace_id: str = None,
     ):
         collector_rule_mgr = CollectorRuleManager()
 
@@ -887,10 +883,10 @@ def delete_collector_rules(collector_id: str, domain_id: str) -> None:
 
     @staticmethod
     def _make_secret_filter(
-            secret_filter: dict,
-            provider: str,
-            secret_id: str = None,
-            workspace_id: str = None,
+        secret_filter: dict,
+        provider: str,
+        secret_id: str = None,
+        workspace_id: str = None,
     ) -> list:
         _filter = [{"k": "provider", "v": provider, "o": "eq"}]
 
@@ -916,7 +912,7 @@ def _make_secret_filter(
                 _filter.append({"k": "secret_id", "v": exclude_secrets, "o": "not_in"})
 
             if exclude_service_accounts := secret_filter.get(
-                    "exclude_service_accounts"
+                "exclude_service_accounts"
             ):
                 _filter.append(
                     {
@@ -931,6 +927,22 @@ def _make_secret_filter(
 
         return _filter
 
+    @staticmethod
+    def _convert_plugin_provider_to_categories(plugin_info: dict) -> list:
+        categories = []
+        supported_providers = plugin_info.get("capability", {}).get(
+            "supported_providers", []
+        )
+
+        if supported_providers:
+            # Multi providers
+            categories.extend(supported_providers)
+        elif provider := plugin_info.get("provider"):
+            # Single provider
+            categories.append(provider)
+
+        return categories
+
     @staticmethod
     def _get_plugin_providers(provider: str, plugin_info: dict) -> str:
         supported_providers = plugin_info.get("capability", {}).get(
diff --git a/src/spaceone/inventory_v2/service/metric_data_service.py b/src/spaceone/inventory_v2/service/metric_data_service.py
new file mode 100644
index 0000000..890d1d3
--- /dev/null
+++ b/src/spaceone/inventory_v2/service/metric_data_service.py
@@ -0,0 +1,139 @@
+import logging
+from typing import Union, List
+
+from spaceone.core.service import *
+from spaceone.core.service.utils import *
+from spaceone.core.error import *
+
+from spaceone.inventory_v2.model.metric_data.request import *
+from spaceone.inventory_v2.model.metric_data.response import *
+from spaceone.inventory_v2.manager.metric_manager import MetricManager
+from spaceone.inventory_v2.manager.metric_data_manager import MetricDataManager
+
+_LOGGER = logging.getLogger(__name__)
+
+
+@authentication_handler
+@authorization_handler
+@mutation_handler
+@event_handler
+class MetricDataService(BaseService):
+    resource = "MetricData"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_mgr = MetricManager()
+        self.metric_data_mgr = MetricDataManager()
+
+    @transaction(
+        permission="inventory-v2:MetricData.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @append_query_filter(
+        [
+            "metric_id",
+            "project_id",
+            "workspace_id",
+            "domain_id",
+            "user_projects",
+        ]
+    )
+    @append_keyword_filter(["metric_id", "name"])
+    @set_query_page_limit(1000)
+    @convert_model
+    def list(
+        self, params: MetricDataSearchQueryRequest
+    ) -> Union[MetricDatasResponse, dict]:
+        """List metric data
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.Query)',
+                'metric_id': 'str',             # required
+                'project_id': 'bool',
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+                'user_projects': 'list',        # injected from auth
+            }
+
+        Returns:
+            MetricDataResponse:
+        """
+
+        query = params.query or {}
+
+        metric_data_vos, total_count = self.metric_data_mgr.list_metric_data(query)
+
+        metric_datas_info = [
+            metric_data_vo.to_dict() for metric_data_vo in metric_data_vos
+        ]
+        return MetricDatasResponse(results=metric_datas_info, total_count=total_count)
+
+    @transaction(
+        permission="inventory-v2:MetricData.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @append_query_filter(["metric_id", "workspace_id", "domain_id", "user_projects"])
+    @append_keyword_filter(["metric_id", "name"])
+    # @set_query_page_limit(1000)
+    @convert_model
+    def analyze(self, params: MetricDataAnalyzeQueryRequest) -> dict:
+        """Analyze metric data
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.AnalyzeQuery)',    # required
+                'metric_id': 'str',             # required
+                'workspace_id': 'list',         # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+                'user_projects': 'list',        # injected from auth
+            }
+
+        Returns:
+            dict: {
+                'results': 'list',
+                'more': 'bool'
+            }
+        """
+
+        domain_id = params.domain_id
+        metric_id = params.metric_id
+        query = params.query or {}
+        self._check_required(query)
+
+        return self.metric_data_mgr.analyze_metric_data_by_granularity(
+            query, domain_id, metric_id
+        )
+
+    @transaction(
+        permission="inventory-v2:MetricData.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @append_query_filter(["metric_id", "workspace_id", "domain_id", "user_projects"])
+    @append_keyword_filter(["metric_id", "name"])
+    # @set_query_page_limit(1000)
+    @convert_model
+    def stat(self, params: MetricDataStatQueryRequest) -> dict:
+        """
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.StatisticsQuery)', # required
+                'workspace_id': 'list',     # injected from auth
+                'domain_id': 'str',         # injected from auth (required)
+                'user_projects': 'list',    # injected from auth
+            }
+
+        Returns:
+            dict: {
+                'results': 'list',
+                'total_count': 'int'
+            }
+        """
+
+        query = params.query or {}
+
+        return self.metric_data_mgr.stat_metric_data(query)
+
+    @staticmethod
+    def _check_required(query: dict) -> None:
+        for key in ["granularity", "start", "end", "fields"]:
+            if key not in query:
+                raise ERROR_REQUIRED_PARAMETER(key=key)
diff --git a/src/spaceone/inventory_v2/service/metric_example_service.py b/src/spaceone/inventory_v2/service/metric_example_service.py
new file mode 100644
index 0000000..a687e51
--- /dev/null
+++ b/src/spaceone/inventory_v2/service/metric_example_service.py
@@ -0,0 +1,227 @@
+import logging
+from typing import Union
+
+from spaceone.core.service import *
+from spaceone.core.service.utils import *
+
+from spaceone.inventory_v2.model.metric_example.request import *
+from spaceone.inventory_v2.model.metric_example.response import *
+from spaceone.inventory_v2.manager.metric_example_manager import MetricExampleManager
+from spaceone.inventory_v2.manager.metric_manager import MetricManager
+
+_LOGGER = logging.getLogger(__name__)
+
+
+@authentication_handler
+@authorization_handler
+@mutation_handler
+@event_handler
+class MetricExampleService(BaseService):
+    resource = "MetricExample"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_example_mgr = MetricExampleManager()
+        self.metric_mgr = MetricManager()
+
+    @transaction(
+        permission="inventory-v2:MetricExample.write",
+        role_types=["USER"],
+    )
+    @change_value_by_rule("APPEND", "workspace_id", "*")
+    @convert_model
+    def create(
+        self, params: MetricExampleCreateRequest
+    ) -> Union[MetricExampleResponse, dict]:
+        """Create metrix example
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'name': 'str',                  # required
+                'options': 'dict',              # required
+                'tags': 'dict',
+                'user_id': 'str',               # injected from auth (required)
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricExampleResponse:
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id, params.domain_id, params.workspace_id
+        )
+
+        params_dict = params.dict()
+        params_dict["namespace_id"] = metric_vo.namespace_id
+
+        metric_example_vo = self.metric_example_mgr.create_metric_example(params_dict)
+        return MetricExampleResponse(**metric_example_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:MetricExample.write",
+        role_types=["USER"],
+    )
+    @convert_model
+    def update(
+        self, params: MetricExampleUpdateRequest
+    ) -> Union[MetricExampleResponse, dict]:
+        """Update metric example
+
+        Args:
+            params (dict): {
+                'example_id': 'str',            # required
+                'name': 'str',
+                'options': 'dict',
+                'tags': 'dict',
+                'user_id': 'str',               # injected from auth (required)
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricExampleResponse:
+        """
+
+        metric_example_vo = self.metric_example_mgr.get_metric_example(
+            params.example_id,
+            params.domain_id,
+            params.user_id,
+        )
+
+        metric_example_vo = self.metric_example_mgr.update_metric_example_by_vo(
+            params.dict(exclude_unset=True), metric_example_vo
+        )
+
+        return MetricExampleResponse(**metric_example_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:MetricExample.write",
+        role_types=["USER"],
+    )
+    @convert_model
+    def delete(self, params: MetricExampleDeleteRequest) -> None:
+        """Delete metric example
+
+        Args:
+            params (dict): {
+                'example_id': 'str',            # required
+                'user_id': 'str',               # injected from auth (required)
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            None
+        """
+
+        metric_example_vo = self.metric_example_mgr.get_metric_example(
+            params.example_id,
+            params.domain_id,
+            params.user_id,
+        )
+
+        self.metric_example_mgr.delete_metric_example_by_vo(metric_example_vo)
+
+    @transaction(
+        permission="inventory-v2:MetricExample.read",
+        role_types=["USER"],
+    )
+    @convert_model
+    def get(
+        self, params: MetricExampleGetRequest
+    ) -> Union[MetricExampleResponse, dict]:
+        """Get metric example
+
+        Args:
+            params (dict): {
+                'example_id': 'str',            # required
+                'user_id': 'str',               # injected from auth (required)
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricExampleResponse:
+        """
+
+        metric_example_vo = self.metric_example_mgr.get_metric_example(
+            params.example_id,
+            params.domain_id,
+            params.user_id,
+        )
+
+        return MetricExampleResponse(**metric_example_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:MetricExample.read",
+        role_types=["USER"],
+    )
+    @append_query_filter(
+        [
+            "example_id",
+            "name",
+            "metric_id",
+            "namespace_id",
+            "user_id",
+            "domain_id",
+        ]
+    )
+    @append_keyword_filter(["example_id", "name"])
+    @convert_model
+    def list(
+        self, params: MetricExampleSearchQueryRequest
+    ) -> Union[MetricExamplesResponse, dict]:
+        """List metric examples
+
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.Query)',
+                'example_id': 'str',
+                'name': 'str',
+                'metric_id': 'str',
+                'namespace_id': 'str',
+                'user_id': 'str',               # injected from auth (required)
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricExamplesResponse:
+        """
+
+        query = params.query or {}
+        metric_example_vos, total_count = self.metric_example_mgr.list_metric_examples(
+            query
+        )
+
+        metric_examples_info = [
+            metric_example_vo.to_dict() for metric_example_vo in metric_example_vos
+        ]
+        return MetricExamplesResponse(
+            results=metric_examples_info, total_count=total_count
+        )
+
+    @transaction(
+        permission="inventory-v2:MetricExample.read",
+        role_types=["USER"],
+    )
+    @append_query_filter(["user_id", "domain_id"])
+    @append_keyword_filter(["example_id", "name"])
+    @convert_model
+    def stat(self, params: MetricExampleStatQueryRequest) -> dict:
+        """
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.StatisticsQuery)', # required
+                'user_id': 'str',           # injected from auth (required)
+                'domain_id': 'str',         # injected from auth (required)
+            }
+
+        Returns:
+            dict: {
+                'results': 'list',
+                'total_count': 'int'
+            }
+        """
+
+        query = params.query or {}
+        return self.metric_example_mgr.stat_metric_examples(query)
diff --git a/src/spaceone/inventory_v2/service/metric_service.py b/src/spaceone/inventory_v2/service/metric_service.py
new file mode 100644
index 0000000..9fdeb9b
--- /dev/null
+++ b/src/spaceone/inventory_v2/service/metric_service.py
@@ -0,0 +1,376 @@
+import logging
+from typing import Union
+
+from spaceone.core.service import *
+from spaceone.core.service.utils import *
+from spaceone.core.error import *
+
+from spaceone.inventory_v2.model.metric.request import *
+from spaceone.inventory_v2.model.metric.response import *
+from spaceone.inventory_v2.manager.metric_manager import MetricManager
+
+# from spaceone.inventory_v2.manager.namespace_manager import NamespaceManager
+from spaceone.inventory_v2.manager.identity_manager import IdentityManager
+
+_LOGGER = logging.getLogger(__name__)
+
+
+@authentication_handler
+@authorization_handler
+@mutation_handler
+@event_handler
+class MetricService(BaseService):
+    resource = "Metric"
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.metric_mgr = MetricManager()
+        self.identity_mgr = IdentityManager()
+
+    @transaction(
+        permission="inventory-v2:Metric.write",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"],
+    )
+    @convert_model
+    def create(self, params: MetricCreateRequest) -> Union[MetricResponse, dict]:
+        """Create metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',
+                'name': 'str',                  # required
+                'metric_type': 'str',           # required
+                'resource_type': 'str',
+                'query_options': 'dict',        # required
+                'unit': 'str',
+                'tags': 'dict',
+                'namespace_id': 'str',          # required
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricResponse:
+        """
+
+        if params.resource_group == "WORKSPACE":
+            if not params.workspace_id:
+                raise ERROR_REQUIRED_PARAMETER(key="workspace_id")
+
+            self.identity_mgr.check_workspace(params.workspace_id, params.domain_id)
+        else:
+            params.workspace_id = "*"
+
+        if params.resource_type is None:
+            params.resource_type = self._get_resource_type_from_namespace(
+                params.namespace_id, params.domain_id
+            )
+
+        metric_vo = self.metric_mgr.create_metric(params.dict())
+
+        self.metric_mgr.analyze_resource(metric_vo, params.workspace_id)
+        self.metric_mgr.run_metric_query(metric_vo)
+
+        return MetricResponse(**metric_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:Metric.write",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"],
+    )
+    @convert_model
+    def update(self, params: MetricUpdateRequest) -> Union[MetricResponse, dict]:
+        """Update metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'name': 'str',
+                'query_options': 'dict',
+                'unit': 'str',
+                'tags': 'dict',
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricResponse:
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id,
+            params.domain_id,
+            params.workspace_id,
+        )
+
+        if metric_vo.is_managed:
+            raise ERROR_PERMISSION_DENIED()
+
+        if params.query_options:
+            self.metric_mgr.analyze_resource(
+                metric_vo, params.workspace_id, params.query_options
+            )
+
+        metric_vo = self.metric_mgr.update_metric_by_vo(
+            params.dict(exclude_unset=True), metric_vo
+        )
+
+        self.metric_mgr.run_metric_query(metric_vo)
+
+        return MetricResponse(**metric_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:Metric.write",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"],
+    )
+    @convert_model
+    def delete(self, params: MetricDeleteRequest) -> None:
+        """Delete metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            None
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id,
+            params.domain_id,
+            params.workspace_id,
+        )
+
+        if metric_vo.is_managed:
+            raise ERROR_PERMISSION_DENIED()
+
+        self.metric_mgr.delete_metric_by_vo(metric_vo)
+
+    @transaction(
+        permission="inventory-v2:Metric.write",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"],
+    )
+    @convert_model
+    def run(self, params: MetricRunRequest) -> None:
+        """Run query of metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            None
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id,
+            params.domain_id,
+            params.workspace_id,
+        )
+
+        self.metric_mgr.run_metric_query(metric_vo)
+
+    @transaction(
+        permission="inventory-v2:Metric.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"],
+    )
+    @convert_model
+    def test(self, params: MetricTestRequest) -> dict:
+        """Run query of metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'query_options': 'dict',
+                'workspace_id': 'str',          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            dict: {
+                'results': 'list',
+                'more': 'bool'
+            }
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id,
+            params.domain_id,
+            params.workspace_id,
+        )
+
+        results = self.metric_mgr.analyze_resource(
+            metric_vo, params.workspace_id, params.query_options
+        )
+
+        return {"results": results, "more": False}
+
+    @transaction(
+        permission="inventory-v2:Metric.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @convert_model
+    def get(self, params: MetricGetRequest) -> Union[MetricResponse, dict]:
+        """Get metric
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',             # required
+                'workspace_id': 'list'          # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricResponse:
+        """
+
+        metric_vo = self.metric_mgr.get_metric(
+            params.metric_id,
+            params.domain_id,
+            params.workspace_id,
+        )
+
+        return MetricResponse(**metric_vo.to_dict())
+
+    @transaction(
+        permission="inventory-v2:Metric.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @change_value_by_rule("APPEND", "workspace_id", "*")
+    @append_query_filter(
+        [
+            "metric_id",
+            "metric_type",
+            "resource_type",
+            "is_managed",
+            "namespace_id",
+            "workspace_id",
+            "domain_id",
+        ]
+    )
+    @append_keyword_filter(["metric_id", "name"])
+    @convert_model
+    def list(self, params: MetricSearchQueryRequest) -> Union[MetricsResponse, dict]:
+        """List metrics
+
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.Query)',
+                'metric_id': 'str',
+                'metric_type': 'str',
+                'resource_type': 'str',
+                'is_managed': 'bool',
+                'workspace_id': 'list',         # injected from auth
+                'domain_id': 'str',             # injected from auth (required)
+            }
+
+        Returns:
+            MetricsResponse:
+        """
+
+        query = params.query or {}
+        metric_vos, total_count = self.metric_mgr.list_metrics(query, params.domain_id)
+
+        metrics_info = [metric_vo.to_dict() for metric_vo in metric_vos]
+        return MetricsResponse(results=metrics_info, total_count=total_count)
+
+    @transaction(
+        permission="inventory-v2:Metric.read",
+        role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"],
+    )
+    @append_query_filter(["workspace_id", "domain_id"])
+    @append_keyword_filter(["metric_id", "name"])
+    @convert_model
+    def stat(self, params: MetricStatQueryRequest) -> dict:
+        """
+        Args:
+            params (dict): {
+                'query': 'dict (spaceone.api.core.v1.StatisticsQuery)', # required
+                'workspace_id': 'list',     # injected from auth
+                'domain_id': 'str',         # injected from auth (required)
+            }
+
+        Returns:
+            dict: {
+                'results': 'list',
+                'total_count': 'int'
+            }
+        """
+
+        query = params.query or {}
+        return self.metric_mgr.stat_metrics(query)
+
+    @transaction()
+    def run_metric_query(self, params: dict) -> None:
+        """Run metric query
+
+        Args:
+            params (dict): {
+                'metric_id': 'str',
+                'domain_id': 'str',
+                'is_yesterday': 'bool'
+            }
+
+        Returns:
+            None
+        """
+
+        metric_id = params["metric_id"]
+        domain_id = params["domain_id"]
+        is_yesterday = params.get("is_yesterday", False)
+
+        metric_vo = self.metric_mgr.get_metric(metric_id, domain_id)
+
+        self.metric_mgr.run_metric_query(metric_vo, is_yesterday=is_yesterday)
+
+    @transaction()
+    def run_all_metric_queries(self, params: dict) -> None:
+        """Run all metric queries
+
+        Args:
+            params (dict): {}
+
+        Returns:
+            None
+        """
+
+        for domain_info in self._get_all_domains_info():
+            domain_id = domain_info["domain_id"]
+            try:
+                self.run_metric_query_by_domain(domain_id)
+            except Exception as e:
+                _LOGGER.error(
+                    f"[run_all_metric_queries] query error ({domain_id}): {e}",
+                    exc_info=True,
+                )
+
+    def run_metric_query_by_domain(self, domain_id: str) -> None:
+        self.metric_mgr.create_managed_metric(domain_id)
+        metric_vos = self.metric_mgr.filter_metrics(domain_id=domain_id)
+
+        for metric_vo in metric_vos:
+            self.metric_mgr.push_task(metric_vo, is_yesterday=True)
+
+    @staticmethod
+    def _get_all_domains_info() -> list:
+        identity_mgr = IdentityManager()
+        response = identity_mgr.list_domains({"only": ["domain_id"]})
+        domains_info = response.get("results", [])
+
+        _LOGGER.debug(f"[_get_all_domains_info] target domains: {len(domains_info)}")
+        return domains_info
+
+    @staticmethod
+    def _get_resource_type_from_namespace(namespace_id: str, domain_id: str) -> str:
+        try:
+            namespace_mgr = NamespaceManager()
+            namespace_vo = namespace_mgr.get_namespace(namespace_id, domain_id)
+        except Exception as e:
+            raise ERROR_REQUIRED_PARAMETER(key="resource_type")
+
+        return namespace_vo.resource_type
diff --git a/src/spaceone/inventory_v2/service/region_service.py b/src/spaceone/inventory_v2/service/region_service.py
index 43c3db9..6c0e256 100644
--- a/src/spaceone/inventory_v2/service/region_service.py
+++ b/src/spaceone/inventory_v2/service/region_service.py
@@ -4,7 +4,7 @@
 from spaceone.core import utils
 from spaceone.core.model.mongo_model import QuerySet
 from spaceone.inventory_v2.manager.region_manager import RegionManager
-from spaceone.inventory_v2.model.region_model import Region
+from spaceone.inventory_v2.model.region.region_model import Region
 
 _LOGGER = logging.getLogger(__name__)
 _KEYWORD_FILTER = ["region_id", "name", "region_code"]
@@ -82,9 +82,7 @@ def update_resource(self, params: dict) -> Region:
 
         params["updated_by"] = self.transaction.get_meta("collector_id") or "manual"
 
-        region_vo = self.region_mgr.get_region(
-            params["region_id"], params["domain_id"]
-        )
+        region_vo = self.region_mgr.get_region(params["region_id"], params["domain_id"])
         return self.region_mgr.update_region_by_vo(params, region_vo)
 
     @transaction(
@@ -106,9 +104,7 @@ def delete(self, params: dict) -> None:
 
     @check_required(["region_id", "domain_id"])
     def delete_resource(self, params: dict) -> None:
-        region_vo = self.region_mgr.get_region(
-            params["region_id"], params["domain_id"]
-        )
+        region_vo = self.region_mgr.get_region(params["region_id"], params["domain_id"])
         self.region_mgr.delete_region_by_vo(region_vo)
 
     @transaction(
@@ -129,9 +125,7 @@ def get(self, params: dict) -> Region:
 
         """
 
-        return self.region_mgr.get_region(
-            params["region_id"], params["domain_id"]
-        )
+        return self.region_mgr.get_region(params["region_id"], params["domain_id"])
 
     @transaction(
         permission="inventory:Region.read",
@@ -191,4 +185,4 @@ def stat(self, params: dict) -> dict:
         """
 
         query = params.get("query", {})
-        return self.region_mgr.stat_regions(query)
\ No newline at end of file
+        return self.region_mgr.stat_regions(query)