Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions README.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#LOT
This is the intern version of the project, feel free to play around with this repo

Language of Thought Modelling of Cognitive Algorithms

Hello World
300 changes: 205 additions & 95 deletions cognitive_functions.py
Original file line number Diff line number Diff line change
@@ -1,129 +1,169 @@
import primitive_fucntions as pf
from collections import defaultdict
from utils import Stopwatch, Element, ElementSet, Associations
from utils import Stopwatch, Element, ElementSet, Associations, SpaceComplexity

# 1-D

def iterate(S: ElementSet): # 112233
items = _items_from_S(S)
sc_obj = SpaceComplexity()

# preprocessing (not part of measured cognitive process)
n = len(S.elements) # number of elements in the set
chunks = defaultdict(list)
n = len(items) # number of elements in the set

# persistent container: chunks
sc_obj.startcounttemp()
chunks = defaultdict(list)
stopwatch = Stopwatch()
# --- #
# select attribute which chunking is based on

### ADD CODE HERE
""" graph algo here"""
bias = find_bias(S.elements, stopwatch)
bias = find_bias(items, stopwatch)
for _ in range(n//2):
element = pf.sample(S)
result = None
time_elapsed = None
_ = pf.sample(items)


# """ non graph algo here"""
bias = find_bias(S, stopwatch)
bias = find_bias(items, stopwatch)
stopwatch.start()
for _ in range(n):
element = pf.sample(S) # select an element in the set
element = pf.sample(items) # select an element in the set
sorter = getattr(element, bias)
chunks[sorter].append(element)
pf.setminus(S, element)
pf.setminus(items, element)
stopwatch.stop()
n = len(chunks) # reassign n
chunks = {tuple(v) for k, v in chunks.items()}
stopwatch.start()
sc_obj.startcounttemp()
n_chunks = len(chunks) # reassign n
chunks_set = {tuple(v) for k, v in chunks.items()}

sc_obj.startcounttemp()
result = []
for _ in range(n):
chunk = pf.sample(chunks)
stopwatch.stop()
stopwatch.start()
for _ in range(n_chunks):
chunk = pf.sample(chunks_set)
sc_obj.startcounttemp()
temp = list(chunk)
stopwatch.start()
pf.append(result, temp)
pf.setminus(chunks, chunk)
sc_obj.subtemp()
result = pf.append(result, temp)
pf.setminus(chunks_set, chunk)
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
space_used = sc_obj.get_max()

return (result, time_elapsed)
return (result, time_elapsed, space_used)

def palindrome(S):
# 123321
items = _items_from_S(S)
sc_obj = SpaceComplexity()

# preprocessing (not part of measured cognitive process)
n = len(S) // 2 # number of elements in basis
n = len(items) // 2 # number of elements in basis
stopwatch = Stopwatch()

# select attribute which chunking is based on
bias = find_bias(S, stopwatch)
bias = find_bias(items, stopwatch)
stopwatch.start()
basis, rev = [], []
# write_random() based implementation
### WRITE CODE HERE
while (len(S) > n):
element = pf.sample(S)
if len(basis)==0 or not (any(pf.check_if_same_type(element, chosen, bias) for chosen in basis)):

sc_obj.startcounttemp()
basis = []
sc_obj.startcounttemp()
rev = []

# write_random() fallback implementation:
def _pick_matching_element(pool, attr, value):
sc_obj.startcounttemp()
candidates = [e for e in pool if getattr(e, attr) == value]
sc_obj.subtemp()
if not candidates:
return None
return pf.sample(set(candidates))

while len(items) > n:
element = pf.sample(items)
if len(basis)==0 or not any(getattr(element, bias) == getattr(chosen, bias) for chosen in basis):
pf.pair(basis, element)
pf.setminus(S, element)
pf.setminus(items, element)

for _ in range(n):
element = pf.write_random(S, bias, getattr(basis[n-1-_], bias))
pf.pair(rev,element)
pf.setminus(S, element)
result = pf.append(basis,rev)
target_type = getattr(basis[n-1-_], bias)
element = None
try:
candidate = pf.write_random(items, bias, target_type)
except Exception:
candidate = None
if candidate:
element = candidate
else:
element = _pick_matching_element(items, bias, target_type)
if element is None:
if len(items) == 0:
break
element = pf.sample(items)
pf.pair(rev, element)
pf.setminus(items, element)

result = pf.append(basis, rev)
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()

return (result, time_elapsed)
space_used = sc_obj.get_max()

return (result, time_elapsed, space_used)

def alternate(S):
# 121212
# preprocessing (not part of measured cognitive process)
n = len(S) # number of elements in the set
items = _items_from_S(S)
sc_obj = SpaceComplexity()
stopwatch = Stopwatch()
# --- #
# select attribute which chunking is based on
### ADD CODE HERE
### subject knows what types of attributes are there
### and what type of attribute to select

bias = find_bias(S,stopwatch,two_flag=True)
sc_obj.startcounttemp()
result = []
while (len(S) > 0):
element = pf.sample(S)
if len(result) == 0 or not pf.check_if_same_type(element, result[-1], bias):

# select attribute which chunking is based on
bias = find_bias(items, stopwatch, two_flag=True)
stopwatch.start()
while len(items) > 0:
element = pf.sample(items)
if len(result) == 0 or not (getattr(element, bias) == getattr(result[-1], bias)):
pf.pair(result, element)
pf.setminus(S, element)
pf.setminus(items, element)
else:
# skip and try again
continue
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
space_used = sc_obj.get_max()

return (result, time_elapsed)
return (result, time_elapsed, space_used)

def chaining(S, associations: dict):
# preprocessing (not part of measured cognitive process)
n = len(S) # number of elements in the set
items = _items_from_S(S)
sc_obj = SpaceComplexity()
stopwatch = Stopwatch()
# --- #
stopwatch.start()

sc_obj.startcounttemp()
chunks = []
result = []
while (len(S) > 0):
element = pf.sample(S)
stopwatch.start()
while len(items) > 0:
element = pf.sample(items)
if element in associations.keys():
sc_obj.startcounttemp() # chunk persisted inside chunks
chunk = []
pf.pair(chunk, element)
pf.setminus(S, element)
pf.setminus(items, element)
while True:
next_element = pf.sample(S)
if len(items) == 0:
break
next_element = pf.sample(items)
if next_element == associations[element]:
pf.pair(chunk, next_element)
pf.setminus(S, next_element)
pf.setminus(items, next_element)
pf.pair(chunks, chunk)
break
else:
continue
else:
# element had no association; skip it
continue
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
space_used = sc_obj.get_max()

return (result, time_elapsed)
return (chunks, time_elapsed, space_used)

def seriate(S):
# 123123
Expand All @@ -132,55 +172,125 @@ def seriate(S):
# -------- 2-D -------- #

def serial_crossed(S):
n = len(S) // 2 # number of elements in the basis
items = _items_from_S(S)
sc_obj = SpaceComplexity()
stopwatch = Stopwatch()
bias = find_bias(S, stopwatch, higher_dim=True)

n = len(items) // 2 # number of elements in the basis

bias = find_bias(items, stopwatch, higher_dim=True)

sc_obj.startcounttemp()
result = []
while len(S) > n:
element = pf.sample(S)
if len(result) == 0 or pf.check_if_same_type(element, result[-1], bias[0]):

stopwatch.start()
while len(items) > n:
element = pf.sample(items)
if len(result) == 0 or getattr(element, bias[0]) == getattr(result[-1], bias[0]):
pf.pair(result, element)
pf.setminus(S, element)
pf.setminus(items, element)

for _ in range(n):
element = pf.write_random(S, bias[1], getattr(result[_], bias[1]))
pf.pair(result, element)
pf.setminus(S, element)
target_type = getattr(result[_], bias[1])
chosen = None
try:
chosen = pf.write_random(items, bias[1], target_type)
except Exception:
chosen = None
if not chosen:
sc_obj.startcounttemp()
candidates = [e for e in items if getattr(e, bias[1]) == target_type]
sc_obj.subtemp()
chosen = pf.sample(set(candidates)) if candidates else None
if chosen is None:
if len(items) == 0:
break
chosen = pf.sample(items)
pf.pair(result, chosen)
pf.setminus(items, chosen)
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
return (result, time_elapsed)
space_used = sc_obj.get_max()
return (result, time_elapsed, space_used)

def center_embedded(S):
n = len(S) // 2 # number of elements in the basis
#
items = _items_from_S(S)
sc_obj = SpaceComplexity()
stopwatch = Stopwatch()
bias = find_bias(S, stopwatch, higher_dim=True)

n = len(items) // 2
bias = find_bias(items, stopwatch, higher_dim=True)

sc_obj.startcounttemp()
result = []
while len(S) > 0:
element = pf.sample(S)
if len(result) == 0 or pf.check_if_same_type(element, result[-1], bias[0]):

stopwatch.start()
while len(items) > 0:
element = pf.sample(items)
if len(result) == 0 or getattr(element, bias[0]) == getattr(result[-1], bias[0]):
pf.pair(result, element)
pf.setminus(S, element)
pf.setminus(items, element)

for _ in range(n):
element = pf.write_random(S, bias[1], getattr(result[n - 1 - _], bias[1]))
pf.pair(result, element)
pf.setminus(S, element)
target_type = getattr(result[n - 1 - _], bias[1])
# try write_random
chosen = None
try:
chosen = pf.write_random(items, bias[1], target_type)
except Exception:
chosen = None
if not chosen:
sc_obj.startcounttemp()
candidates = [e for e in items if getattr(e, bias[1]) == target_type]
sc_obj.subtemp()
chosen = pf.sample(set(candidates)) if candidates else None
if chosen is None:
if len(items) == 0:
break
chosen = pf.sample(items)
pf.pair(result, chosen)
pf.setminus(items, chosen)
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
return (result, time_elapsed)
space_used = sc_obj.get_max()
return (result, time_elapsed, space_used)

def tail_recursive(S):
items = _items_from_S(S)
sc_obj = SpaceComplexity()
stopwatch = Stopwatch()
bias = find_bias(S, stopwatch, two_flag=True, higher_dim=True)

bias = find_bias(items, stopwatch, two_flag=True, higher_dim=True)
stopwatch.start()

sc_obj.startcounttemp()
result = []
while len(S) > 0:
element = pf.sample(S)
pf.setminus(S, element)
paired_element = pf.write_random(S, bias[0], getattr(element, bias[0]))

while len(items) > 0:
element = pf.sample(items)
pf.setminus(items, element)
target_type = getattr(element, bias[0])
# try write_random / fallback
paired_element = None
try:
paired_element = pf.write_random(items, bias[0], target_type)
except Exception:
paired_element = None
if not paired_element:
sc_obj.startcounttemp()
candidates = [e for e in items if getattr(e, bias[0]) == target_type]
sc_obj.subtemp()
paired_element = pf.sample(set(candidates)) if candidates else None
if paired_element is None:
if len(items) == 0:
break
paired_element = pf.sample(items)
result = pf.append(result, pf.merge(element, paired_element))
pf.setminus(S, paired_element)
pf.setminus(items, paired_element)
stopwatch.stop()
time_elapsed = stopwatch.get_elapsed_time()
return (result, time_elapsed)
space_used = sc_obj.get_max()
return (result, time_elapsed, space_used)


# ---------------------------------------------------------------------#
Expand Down
Loading