Skip to content

Commit

Permalink
do not always warn on map_item error (for example when getting datase…
Browse files Browse the repository at this point in the history
…t keys which is done A LOT)
  • Loading branch information
dale-wahl committed Sep 13, 2023
1 parent 1725a92 commit b32ddca
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 11 deletions.
3 changes: 0 additions & 3 deletions backend/lib/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -657,9 +657,6 @@ def get_mapped_item(cls, item):
Ensure map_item method is compatible with a dataset by checking map_item_method_available first.
"""
if not cls.map_item_method_available(item):
raise MapItemException("map_item method not available for %s" % item)

mapped_item = cls.map_item(item)
if not mapped_item:
raise MapItemException("Unable to map item!")
Expand Down
19 changes: 11 additions & 8 deletions common/lib/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def get_log_iterator(self):
logmsg = ":".join(line.split(":")[1:])
yield (logtime, logmsg)

def iterate_items(self, processor=None, bypass_map_item=False):
def iterate_items(self, processor=None, bypass_map_item=False, warn_unmappable=True):
"""
A generator that iterates through a CSV or NDJSON file
Expand Down Expand Up @@ -307,7 +307,8 @@ def iterate_items(self, processor=None, bypass_map_item=False):
try:
item = own_processor.get_mapped_item(item)
except MapItemException as e:
self.warn_unmappable_item(i, processor, e)
if warn_unmappable:
self.warn_unmappable_item(i, processor, e)
continue

yield item
Expand All @@ -325,15 +326,16 @@ def iterate_items(self, processor=None, bypass_map_item=False):
try:
item = own_processor.get_mapped_item(item)
except MapItemException as e:
self.warn_unmappable_item(i, processor, e)
if warn_unmappable:
self.warn_unmappable_item(i, processor, e)
continue

yield item

else:
raise NotImplementedError("Cannot iterate through %s file" % path.suffix)

def iterate_mapped_items(self, processor=None):
def iterate_mapped_items(self, processor=None, warn_unmappable=True):
"""
Wrapper for iterate_items that returns both the original item and the mapped item (or else the same identical item).
No extension check is performed here as the point is to be able to handle the original object and save as an appropriate
Expand All @@ -359,7 +361,8 @@ def iterate_mapped_items(self, processor=None):
try:
mapped_item = own_processor.get_mapped_item(item)
except MapItemException as e:
self.warn_unmappable_item(i, processor, e)
if warn_unmappable:
self.warn_unmappable_item(i, processor, e)
continue
else:
mapped_item = original_item
Expand All @@ -382,7 +385,7 @@ def get_item_keys(self, processor=None):
dataset
"""

items = self.iterate_items(processor)
items = self.iterate_items(processor, warn_unmappable=False)
try:
keys = list(items.__next__().keys())
except StopIteration:
Expand Down Expand Up @@ -1487,10 +1490,10 @@ def warn_unmappable_item(self, item_count, processor=None, error_message=None):
Log an item that is unable to be mapped and warn administrators.
:param int item_count: Item index
:param Processor processor: Processor calling function
:param Processor processor: Processor calling function8
"""
dataset_error_message = f"MapItemException (item {item_count}): {'is unable to be mapped! Check raw datafile.' if error_message is None else error_message}"
if processor is not None:
if processor is not None and processor.dataset is not None:
# Log to dataset that is using map_item
processor.dataset.log(dataset_error_message)
else:
Expand Down

0 comments on commit b32ddca

Please sign in to comment.