Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add simple eventbuffer and generate temporal results for #95 #96

Merged
merged 6 commits into from
Mar 11, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions pynars/NARS/Control/Reasoner.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pynars.Narsese._py.Budget import Budget
from pynars.Narsese._py.Statement import Statement
from pynars.Narsese._py.Task import Belief
from ..DataStructures import Bag, Memory, NarseseChannel, Buffer, Task, Concept
from ..DataStructures import Bag, Memory, NarseseChannel, Buffer, Task, Concept, EventBuffer
from ..InferenceEngine import GeneralEngine, TemporalEngine, VariableEngine
from pynars import Config
from pynars.Config import Enable
Expand Down Expand Up @@ -38,6 +38,7 @@ def __init__(self, n_memory, capacity, config='./config.json', nal_rules={1, 2,
self.memory = Memory(n_memory, global_eval=self.global_eval)
self.overall_experience = Buffer(capacity)
self.internal_experience = Buffer(capacity)
self.event_buffer = EventBuffer(3)
self.narsese_channel = NarseseChannel(capacity)
self.perception_channel = Channel(capacity)
self.channels: List[Channel] = [
Expand Down Expand Up @@ -131,11 +132,17 @@ def observe(self, tasks_derived: List[Task]):
Process Channels/Buffers
"""
judgement_revised, goal_revised, answers_question, answers_quest = None, None, None, None
# step 1. Take out an Item from `Channels`, and then put it into the `Overall Experience`
# step 1. Take out an Item from `Channels`, and then put it into the `Overall Experience` and Event Buffers
for channel in self.channels:
task_in: Task = channel.take()
if task_in is not None:
self.overall_experience.put(task_in)
if self.event_buffer.can_task_enter(task_in):
self.event_buffer.put(task_in)
# when there's a new event, run the temporal chaining
temporal_results = self.event_buffer.generate_temporal_sentences()
for result in temporal_results:
self.overall_experience.put(result)

# step 2. Take out an Item from the `Internal Experience`, with putting it back afterwards, and then put it
# into the `Overall Experience`
Expand Down
75 changes: 73 additions & 2 deletions pynars/NARS/DataStructures/_py/Buffer.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from pynars.NAL.Functions import Truth_intersection, Stamp_merge
from pynars.NAL.Inference.TemporalRules import induction_composition, induction_implication
from .Bag import Bag
from pynars.Config import Config
from pynars.Narsese import Item, Task
from pynars.Narsese import Item, Task, TermType, Compound, Interval, Statement
from pynars.NAL.Functions.BudgetFunctions import *
from typing import Callable, Any
from typing import Callable, Any, List


class Buffer(Bag):
'''
Expand Down Expand Up @@ -40,3 +43,71 @@ def __init__(self, capacity: int, n_buckets: int=None, take_in_order: bool=False

def is_expired(self, put_time, current_time):
return (current_time - put_time) > self.max_duration


class EventBuffer:
'''
This buffer holds first-order events, sorted by time.
The purpose of this buffer is to generate temporal implication statements, e.g., (A &/ B =/> C)
and compound events, e.g., (A &/ B).

The operation for generating temporal statements is exhaustive. That means, for generating 3-component
implication statements like (A &/ B =/> C), the algorithm scales O(n^3) for n elements

The oldest events are at the lowest index, the newest events are at the highest index.
The larger the event's timestamp, the newer it is.
'''
def __init__(self, capacity: int):
self.buffer: List[Task] = []
self.capacity: int = capacity

def generate_temporal_sentences(self):
results: List[Task] = []
# first event A occurred, then event B occurred, then event C
for i in range(len(self.buffer)):
event_A_task = self.buffer[i]
for j in range(i+1,len(self.buffer)):
# create (A &/ B)
event_B_task = self.buffer[j]
compound_event_task = induction_composition(event_A_task, event_B_task)
results.append(compound_event_task) # append
for k in range(j + 1, len(self.buffer)):
# create (A &/ B) =/> C
event_C = self.buffer[k]
temporal_implication_task = induction_implication(compound_event_task, event_C)
results.append(temporal_implication_task) # append

return results

def put(self, event_task_to_insert: Task):
if not self.can_task_enter(event_task_to_insert):
print("ERROR! Only events with first-order statements can enter the EventBuffer.")
return

if len(self.buffer) == 0: # if nothing in the buffer, just insert it
self.buffer.append(event_task_to_insert)
return

newest_event = self.buffer[-1]

if event_task_to_insert.stamp.t_occurrence >= newest_event.stamp.t_occurrence:
# if its newer than even the newest event, just insert it at the end
self.buffer.append(event_task_to_insert)
else:
# otherwise, we have to go through the list to insert it properly
for i in range(len(self.buffer)):
buffer_event = self.buffer[i]
if event_task_to_insert.stamp.t_occurrence <= buffer_event.stamp.t_occurrence:
# the inserted event occurs first, so insert it here
self.buffer.insert(i, event_task_to_insert)
break


if len(self.buffer) > self.capacity:
# if too many events, take out the oldest event
self.buffer.pop(0)

def can_task_enter(self, task: Task):
return task.is_event \
and task.term.type == TermType.STATEMENT \
and not task.term.is_higher_order
Loading