Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed Pagination issue in Model Testing logs. #1084

Merged
merged 2 commits into from
Nov 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions kairon/api/app/routers/bot/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -688,8 +688,7 @@ async def model_testing_logs(
"""
List model testing logs.
"""
logs = ModelTestingLogProcessor.get_logs(current_user.get_bot(), log_type, reference_id, start_idx, page_size)
row_cnt = mongo_processor.get_row_count(ModelTestingLogs, current_user.get_bot())
logs, row_cnt = ModelTestingLogProcessor.get_logs(current_user.get_bot(), log_type, reference_id, start_idx, page_size)
data = {
"logs": logs,
"total": row_cnt
Expand Down
5 changes: 3 additions & 2 deletions kairon/shared/data/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3588,7 +3588,6 @@ def get_row_count(document: Document, bot: str, **kwargs):
:param bot: bot id
:return: Count of rows
"""
query = {"bot": bot}
if document.__name__ == "AuditLogData":
query = {
"attributes": {
Expand All @@ -3598,7 +3597,9 @@ def get_row_count(document: Document, bot: str, **kwargs):
}
}
}
kwargs.update(__raw__=query)
kwargs.update(__raw__=query)
else:
kwargs['bot'] = bot
return document.objects(**kwargs).count()

@staticmethod
Expand Down
21 changes: 15 additions & 6 deletions kairon/shared/test/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from kairon.exceptions import AppException
from kairon.shared.data.constant import EVENT_STATUS, ModelTestingLogType
from kairon.shared.data.data_objects import BotSettings
from kairon.shared.data.processor import MongoProcessor
from kairon.shared.test.data_objects import ModelTestingLogs


Expand Down Expand Up @@ -98,7 +99,6 @@ def is_limit_exceeded(bot: str, raise_exception=True):
@param raise_exception: Raise exception if event is in progress.
@return: boolean flag
"""
from kairon.shared.utils import Utility

today = datetime.today()
initiated_today = today.replace(hour=0, minute=0, second=0)
Expand Down Expand Up @@ -127,10 +127,10 @@ def get_logs(bot: str, log_type: str = None, reference_id: str = None, start_idx
from kairon.shared.utils import Utility

if not Utility.check_empty_string(log_type) and not Utility.check_empty_string(reference_id):
logs = ModelTestingLogProcessor.get_by_id_and_type(reference_id, bot, log_type, start_idx, page_size)
logs, row_count = ModelTestingLogProcessor.get_by_id_and_type(reference_id, bot, log_type, start_idx, page_size)
else:
logs = ModelTestingLogProcessor.get_all(bot, start_idx, page_size)
return logs
logs, row_count = ModelTestingLogProcessor.get_all(bot, start_idx, page_size)
return logs, row_count

@staticmethod
def get_all(bot: str, start_idx: int = 0, page_size: int = 10):
Expand All @@ -141,7 +141,9 @@ def get_all(bot: str, start_idx: int = 0, page_size: int = 10):
@param start_idx: start index in list field
@param page_size: number of rows from start index
"""
return list(ModelTestingLogs.objects(bot=bot).aggregate([
processor = MongoProcessor()
kwargs = {'type': 'common'}
logs = list(ModelTestingLogs.objects(bot=bot).aggregate([
{"$set": {"data.type": "$type"}},
{'$group': {'_id': '$reference_id', 'bot': {'$first': '$bot'}, 'user': {'$first': '$user'},
'status': {'$first': '$status'},
Expand All @@ -157,6 +159,8 @@ def get_all(bot: str, start_idx: int = 0, page_size: int = 10):
'is_augmented': 1
}},
{"$sort": {"start_timestamp": -1}}]))[start_idx:start_idx+page_size]
row_count = processor.get_row_count(ModelTestingLogs, bot, **kwargs)
return logs, row_count

@staticmethod
def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: int = 0, page_size: int = 10):
Expand All @@ -170,6 +174,7 @@ def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: in
@return: list of logs.
"""
logs = []
row_count = 0
filter_log_type = 'stories' if log_type == 'stories' else 'nlu'
filtered_data = ModelTestingLogs.objects(reference_id=reference_id, bot=bot, type=filter_log_type)
if log_type == ModelTestingLogType.stories.value and filtered_data:
Expand All @@ -182,6 +187,7 @@ def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: in
if fail_cnt:
logs = json.dumps(logs)
logs = json.loads(logs)
row_count = fail_cnt
elif log_type == ModelTestingLogType.nlu.value and filtered_data:
intent_failures = []
intent_failure_cnt, intent_success_cnt, intent_total_cnt = 0, 0, 0
Expand All @@ -199,6 +205,7 @@ def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: in
if intent_failure_cnt:
logs = json.dumps(logs)
logs = json.loads(logs)
row_count = intent_failure_cnt
elif log_type in {ModelTestingLogType.entity_evaluation_with_diet_classifier.value,
ModelTestingLogType.entity_evaluation_with_regex_entity_extractor.value} and filtered_data:
entity_failures = []
Expand All @@ -217,6 +224,7 @@ def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: in
if entity_failure_cnt:
logs = json.dumps(logs)
logs = json.loads(logs)
row_count = entity_failure_cnt
elif log_type == ModelTestingLogType.response_selection_evaluation.value and filtered_data:
response_selection_failures = []
response_selection_failure_cnt, response_selection_success_cnt, response_selection_total_cnt = 0, 0, 0
Expand All @@ -237,7 +245,8 @@ def get_by_id_and_type(reference_id: str, bot: str, log_type: str, start_idx: in
if response_selection_failure_cnt:
logs = json.dumps(logs)
logs = json.loads(logs)
return logs
row_count = response_selection_failure_cnt
return logs, row_count

@staticmethod
def delete_enqueued_event_log(bot: str):
Expand Down
38 changes: 20 additions & 18 deletions tests/unit_test/data_processor/model_testing_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
from rasa.shared.importers.rasa import RasaFileImporter

from augmentation.paraphrase.paraphrasing import ParaPhrasing
from kairon.shared.data.data_objects import BotSettings
from kairon.shared.utils import Utility
from kairon.exceptions import AppException
from kairon.shared.data.data_objects import BotSettings
from kairon.shared.data.processor import MongoProcessor
from kairon.shared.test.processor import ModelTestingLogProcessor
from kairon.shared.utils import Utility
from kairon.test.test_models import ModelTester, TestDataGenerator


Expand Down Expand Up @@ -65,7 +65,8 @@ async def test_run_test_on_stories_failure(self):
stories_result=result,
nlu_result={},
event_status='Completed')
logs = list(ModelTestingLogProcessor.get_logs('test_bot'))
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot')
print(logs)
assert logs[0]['data'][0]['conversation_accuracy']['success_count'] == 3
assert logs[0]['data'][0]['conversation_accuracy']['failure_count'] == 2
assert logs[0]['data'][0]['conversation_accuracy']['total_count'] == 5
Expand All @@ -77,7 +78,7 @@ async def test_run_test_on_stories_failure(self):
assert logs[0].get('end_timestamp')
assert logs[0].get('status') == 'FAILURE'
assert logs[0]['event_status'] == 'Completed'
logs = ModelTestingLogProcessor.get_logs('test_bot', 'stories', logs[0]['reference_id'])
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'stories', logs[0]['reference_id'])
assert len(logs['errors']) == 2
assert logs['failure_count'] == 2
assert logs['success_count'] == 3
Expand Down Expand Up @@ -120,7 +121,8 @@ def test_run_test_on_nlu_failure(self):
stories_result={},
nlu_result=result,
event_status='Completed')
logs1 = list(ModelTestingLogProcessor.get_logs('test_bot'))
logs1, row_count = ModelTestingLogProcessor.get_logs('test_bot')
print(logs1)
assert logs1[0]['data'][0]['intent_evaluation']['success_count'] == 29
assert logs1[0]['data'][0]['intent_evaluation']['failure_count'] == 23
assert logs1[0]['data'][0]['intent_evaluation']['total_count'] == 52
Expand All @@ -141,46 +143,46 @@ def test_run_test_on_nlu_failure(self):
assert logs1[0].get('end_timestamp')
assert logs1[0].get('status') == 'FAILURE'
assert logs1[0]['event_status'] == 'Completed'
logs = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs1[0]['reference_id'])
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs1[0]['reference_id'])
assert len(logs['intent_evaluation']['errors']) == 10
assert logs['intent_evaluation']['failure_count'] == 23
assert logs['intent_evaluation']['success_count'] == 29
assert logs['intent_evaluation']['total_count'] == 52
logs = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs1[0]['reference_id'])
assert len(logs['entity_evaluation']['errors']) == 2
assert logs['entity_evaluation']['failure_count'] == 2
assert logs['entity_evaluation']['success_count'] == 2
assert logs['entity_evaluation']['total_count'] == 4
logs = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs1[0]['reference_id'])
assert len(logs['entity_evaluation']['errors']) == 0
assert logs['entity_evaluation']['failure_count'] == 0
assert logs['entity_evaluation']['success_count'] == 0
assert logs['entity_evaluation']['total_count'] == 0
logs = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation', logs1[0]['reference_id'])
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation', logs1[0]['reference_id'])
assert len(logs['response_selection_evaluation']['errors']) == 5
assert logs['response_selection_evaluation']['failure_count'] == 5
assert logs['response_selection_evaluation']['success_count'] == 0
assert logs['response_selection_evaluation']['total_count'] == 5
logs = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs1[0]['reference_id'], 10, 15)
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs1[0]['reference_id'], 10, 15)
assert len(logs['intent_evaluation']['errors']) == 13
assert logs['intent_evaluation']['failure_count'] == 23
assert logs['intent_evaluation']['success_count'] == 29
assert logs['intent_evaluation']['total_count'] == 52
logs = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs1[0]['reference_id'], 10, 15)
assert len(logs['entity_evaluation']['errors']) == 0
assert logs['entity_evaluation']['failure_count'] == 2
assert logs['entity_evaluation']['success_count'] == 2
assert logs['entity_evaluation']['total_count'] == 4
logs = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs1[0]['reference_id'], 10, 15)
assert len(logs['entity_evaluation']['errors']) == 0
assert logs['entity_evaluation']['failure_count'] == 0
assert logs['entity_evaluation']['success_count'] == 0
assert logs['entity_evaluation']['total_count'] == 0
logs = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation',
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation',
logs1[0]['reference_id'], 10, 15)
assert len(logs['response_selection_evaluation']['errors']) == 0
assert logs['response_selection_evaluation']['failure_count'] == 5
Expand All @@ -193,19 +195,19 @@ def test_run_test_on_nlu_failure(self):
stories_result={},
nlu_result=result,
event_status='Completed')
logs = ModelTestingLogProcessor.get_logs('test_bot')
logs2 = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs[0]['reference_id'])
logs, row_count = ModelTestingLogProcessor.get_logs('test_bot')
logs2, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'nlu', logs[0]['reference_id'])
assert len(logs2['intent_evaluation']['errors']) == 10
assert logs2['intent_evaluation']['failure_count'] == 23
assert logs2['intent_evaluation']['success_count'] == 29
assert logs2['intent_evaluation']['total_count'] == 52
logs2 = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs2, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_diet_classifier',
logs[0]['reference_id'])
assert logs2['entity_evaluation'] == {'errors': [], 'failure_count': 0, 'success_count': 0, 'total_count': 0}
logs2 = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs2, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'entity_evaluation_with_regex_entity_extractor',
logs[0]['reference_id'])
assert logs2['entity_evaluation'] == {'errors': [], 'failure_count': 0, 'success_count': 0, 'total_count': 0}
logs2 = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation', logs[0]['reference_id'])
logs2, row_count = ModelTestingLogProcessor.get_logs('test_bot', 'response_selection_evaluation', logs[0]['reference_id'])
assert logs2['response_selection_evaluation'] == {'errors': [], 'failure_count': 0,
'success_count': 0, 'total_count': 0}

Expand Down
40 changes: 19 additions & 21 deletions tests/unit_test/events/definitions_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
from io import BytesIO
from urllib.parse import urljoin

import mock
import mongomock
import pytest
import responses
from fastapi import UploadFile
Expand All @@ -14,6 +16,7 @@
from kairon import Utility
from kairon.events.definitions.data_generator import DataGenerationEvent
from kairon.events.definitions.data_importer import TrainingDataImporterEvent
from kairon.events.definitions.faq_importer import FaqDataImporterEvent
from kairon.events.definitions.history_delete import DeleteHistoryEvent
from kairon.events.definitions.message_broadcast import MessageBroadcastEvent
from kairon.events.definitions.model_testing import ModelTestingEvent
Expand All @@ -26,21 +29,16 @@
from kairon.shared.constants import EventClass, EventRequestType
from kairon.shared.data.constant import EVENT_STATUS, TrainingDataSourceType
from kairon.shared.data.data_objects import EndPointHistory, Endpoints, BotSettings
from kairon.shared.data.data_objects import StoryEvents, Rules
from kairon.shared.data.history_log_processor import HistoryDeletionLogProcessor
from kairon.shared.data.model_processor import ModelProcessor
from kairon.shared.data.processor import MongoProcessor
from kairon.shared.data.training_data_generation_processor import TrainingDataGenerationProcessor
from kairon.shared.data.utils import DataUtility
from kairon.shared.importer.processor import DataImporterLogProcessor
from kairon.shared.multilingual.processor import MultilingualLogProcessor
from kairon.shared.test.processor import ModelTestingLogProcessor

from kairon.shared.data.data_objects import StoryEvents, Rules
from kairon.shared.data.processor import MongoProcessor
import mock
import mongomock

from kairon.events.definitions.faq_importer import FaqDataImporterEvent


class TestEventDefinitions:

Expand Down Expand Up @@ -460,8 +458,8 @@ def _mock_validation(*args, **kwargs):
return None
monkeypatch.setattr(Utility, "is_model_file_exists", _mock_validation)
ModelTestingEvent(bot, user).validate()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 0
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 0

@responses.activate
def test_model_testing_enqueue(self):
Expand All @@ -482,8 +480,8 @@ def test_model_testing_enqueue(self):
body = json.loads(body.decode())
assert body["data"]['bot'] == bot
assert body["data"]['user'] == user
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 1
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 1
assert logs[0]['event_status'] == EVENT_STATUS.ENQUEUED.value
assert logs[0]['is_augmented'] is True

Expand All @@ -496,8 +494,8 @@ def _mock_validation(*args, **kwargs):
monkeypatch.setattr(Utility, "is_model_file_exists", _mock_validation)
with pytest.raises(AppException, match='Event already in progress! Check logs.'):
ModelTestingEvent(bot, user).validate()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 1
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 1
assert logs[0]['event_status'] == EVENT_STATUS.ENQUEUED.value

def test_model_testing_presteps_event_limit_reached(self, monkeypatch):
Expand All @@ -512,8 +510,8 @@ def _mock_validation(*args, **kwargs):
bot_settings.save()
with pytest.raises(AppException, match='Daily limit exceeded.'):
ModelTestingEvent(bot, user).validate()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 0
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 0

@responses.activate
def test_model_testing_enqueue_event_server_failure(self):
Expand All @@ -525,8 +523,8 @@ def test_model_testing_enqueue_event_server_failure(self):
)
with pytest.raises(AppException, match='Failed to trigger model_testing event: Failed'):
ModelTestingEvent(bot, user).enqueue()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 0
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 0

def test_model_testing_enqueue_connection_failure(self, monkeypatch):
bot = 'test_definitions'
Expand All @@ -537,13 +535,13 @@ def _mock_validation(*args, **kwargs):

monkeypatch.setattr(Utility, "is_model_file_exists", _mock_validation)
ModelTestingEvent(bot, user).validate()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 0
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 0

with pytest.raises(AppException, match='Failed to connect to service: *'):
ModelTestingEvent(bot, user).enqueue()
logs = list(ModelTestingLogProcessor.get_logs(bot))
assert len(logs) == 0
logs, row_count = ModelTestingLogProcessor.get_logs(bot)
assert row_count == 0

def test_delete_history_presteps_validate_endpoint(self):
bot = 'test_definitions_unmanaged'
Expand Down
Loading
Loading