In [1]:
#Homework 4
#Ethan Witkowski
In [1]:
import numpy as np
import random as rnd
from statistics import mean
from psychopy import visual, core, event
pygame 1.9.6
Hello from the pygame community. https://www.pygame.org/contribute.html

1

In [2]:
#Randomizes sequence of conditions across blocks of an experiement
#No consecutive repeats of conditions

def exp_randomization(nblocks,nconds):
    block_list = []
    
    #Initialize condition
    init_condition = np.random.choice(range(1,nconds+1), size=(1,nconds), replace=False, p=None)
    block_list.append(init_condition)

    for block in range(nblocks-1):

        while True:

            #Random condition sequence for current block
            condition_list = np.random.choice(range(1,nconds+1), size=(1,nconds), replace=False, p=None)

            prior_block = block_list[-1]
            
            #If consective conditions, choose new random condition
            if condition_list[0][0] == prior_block[-1][-1]:
                continue    

            #Append random condition to list of blocks
            else:
                block_list.append(condition_list)
                break

    print(block_list)
In [3]:
nblocks = input("Please input the number of blocks: ")
nconds = input("Please input the number of conditions: ")
Please input the number of blocks: 3
Please input the number of conditions: 3
In [6]:
nblocks = int(nblocks)
nconds = int(nconds)

exp_conditions = exp_randomization(nblocks,nconds)
[array([[2, 3, 1]]), array([[3, 1, 2]]), array([[3, 2, 1]])]

2

In [7]:
#Read-in file
f = open('midterm_catfr_data.txt', 'r')
full_data = f.readlines()
f.close()
In [8]:
#Remove spacing elements, transfer to list
for i, observation in enumerate(full_data):
    full_data[i] = observation.strip().split(',')
    
print(full_data[i])
['R1449T', 'catFR1', '1', '12', '12', 'DOVE', 'Birds', '1']
In [9]:
#list to hold variable values
data = []

for i in range(len(full_data[0])):              
    data.append([])                   #Create new list for each variable

    for sub_list in full_data:
        data[i].append(sub_list[i])   #Append ith element in sub_list to correct variable

#Declare variable names
subjects, experiment, sess, list_number, serial_position, word_pres, category, recall = data

#List of unique categories
unique_category = list(set(category))

#List of unique subjects
unique_subjects = list(set(subjects))
In [10]:
#Trial level data np arrays

np_categories = np.array(category)
np_subjects = np.array(subjects)
np_recall = np.array(recall)

#Unique categories/subjects
np_u_categories = np.unique(np_categories)
np_u_subjects = np.unique(np_subjects)
In [11]:
#Pre-allocate 2d array to store means of each subjects/category

np_subjects_mean_recall_by_cat = np.empty([186,26])
In [12]:
#Obtain each subject's performance recall for each category

for i,u_subjects in enumerate(np_u_subjects):
    
    for j,u_categories in enumerate(np_u_categories):
        
        #Inner np.logical_and: 70,000 long, True if subj/cat match loops
        subj_and_cat_match = np.logical_and(np_subjects==u_subjects,np_categories==u_categories)
        
        #indexing np_recall with True, gives us 0/1s for each subject for each category
        recall_index = np_recall[subj_and_cat_match]
        
        #Convert to float
        recall_index = recall_index.astype(np.float)
        
        #Given recall values, taking the mean gives us their performance recall
        #Place in numpy array corresponding to subject/category
        if len(recall_index) > 0:
            np_subjects_mean_recall_by_cat[i,j] = np.mean(recall_index)
            
        else:
            continue
In [13]:
#Transpose for arrays to represent categories, elements in arrays the mean recall for each subject
np_t_subjects_mean_recall_by_cat = np.transpose(np_subjects_mean_recall_by_cat)
np.shape(np_t_subjects_mean_recall_by_cat)
Out[13]:
(26, 186)
In [14]:
#Pre-allocate matrix of correlation coefficients for each subject for each category

corr_cat_matrix = np.empty([26,26])
corr_cat_matrix[:] = np.nan
In [15]:
#Place category recall correlation coefficients in matrix

for m,cat in enumerate(np_t_subjects_mean_recall_by_cat):

    for n,cat_2 in enumerate(np_t_subjects_mean_recall_by_cat):
        corr_cat_matrix[m][n] = np.corrcoef(cat,cat_2)[1,0]

print(len(corr_cat_matrix))
print(len(corr_cat_matrix[0]))
print(corr_cat_matrix)
26
26
[[ 1.          0.33620532  0.47344002  0.57628744  0.44385186  0.40555807
   0.34773137  0.3012153  -0.02958102  0.39140204  0.37793233  0.44829428
   0.44017189  0.32723223  0.49451358  0.41945275  0.45664382  0.25059239
   0.41820855  0.47016521  0.43204532  0.35789562  0.32501954  0.30467306
   0.33924985  0.42425596]
 [ 0.33620532  1.          0.31899512  0.51651602  0.35514625  0.36644797
   0.42716067  0.27507438 -0.02172599  0.48325054  0.36920256  0.42352439
   0.37505843  0.38715487  0.44214568  0.40236885  0.4638761   0.36101879
   0.49222841  0.35074932  0.47487193  0.39731933  0.22278141  0.43071002
   0.42488471  0.3419053 ]
 [ 0.47344002  0.31899512  1.          0.40527716  0.49067997  0.34963534
   0.39285716  0.177379    0.03746076  0.4406243   0.42250268  0.43289163
   0.42228666  0.31725066  0.34008002  0.34785715  0.4586931   0.35041043
   0.36173086  0.42301312  0.3144575   0.35951562  0.23255939  0.35043107
   0.34507766  0.45759506]
 [ 0.57628744  0.51651602  0.40527716  1.          0.47016072  0.49718649
   0.34162659  0.21059127  0.10666452  0.50754669  0.38329172  0.51270019
   0.44902882  0.37371539  0.49095177  0.35405047  0.48754442  0.33168236
   0.44021906  0.42580337  0.43635995  0.34652381  0.32840947  0.36683628
   0.39441992  0.38170724]
 [ 0.44385186  0.35514625  0.49067997  0.47016072  1.          0.45633865
   0.37618938  0.2951877   0.06517999  0.40150408  0.38548085  0.32989218
   0.564482    0.44900484  0.42572589  0.37523041  0.54705993  0.38735636
   0.40958108  0.42169103  0.41475756  0.35507993  0.33204416  0.3900047
   0.37988848  0.41700677]
 [ 0.40555807  0.36644797  0.34963534  0.49718649  0.45633865  1.
   0.46607236  0.34383497  0.15669772  0.4398363   0.3876267   0.47514801
   0.43919993  0.37875284  0.33616597  0.38181709  0.45011806  0.34902374
   0.4054882   0.41642432  0.45844922  0.47348609  0.24964651  0.4046647
   0.36936625  0.29433596]
 [ 0.34773137  0.42716067  0.39285716  0.34162659  0.37618938  0.46607236
   1.          0.33963767  0.10057694  0.49148873  0.41856581  0.44141034
   0.40017702  0.36453418  0.3980534   0.41675827  0.46737737  0.43745591
   0.44116727  0.34175616  0.34812152  0.38204354  0.33979611  0.56949023
   0.44734686  0.37725506]
 [ 0.3012153   0.27507438  0.177379    0.21059127  0.2951877   0.34383497
   0.33963767  1.          0.14370276  0.23967848  0.18865235  0.14949167
   0.20909858  0.27979639  0.17737122  0.23855989  0.23616107  0.17465397
   0.37587673  0.32397789  0.27963586  0.2149836   0.155632    0.26071862
   0.35672164  0.21028487]
 [-0.02958102 -0.02172599  0.03746076  0.10666452  0.06517999  0.15669772
   0.10057694  0.14370276  1.          0.12884684 -0.01970788 -0.0154581
   0.06816997  0.14476875  0.09080619 -0.01655157 -0.02766082  0.04772994
   0.03772168  0.18770901  0.01069582  0.19976065  0.11202823  0.148186
   0.03005424 -0.0587806 ]
 [ 0.39140204  0.48325054  0.4406243   0.50754669  0.40150408  0.4398363
   0.49148873  0.23967848  0.12884684  1.          0.47019726  0.50960969
   0.40536187  0.49177847  0.39037419  0.45121158  0.59957798  0.42406252
   0.44899236  0.42478112  0.44888547  0.41280475  0.32649194  0.46960816
   0.38240919  0.45133378]
 [ 0.37793233  0.36920256  0.42250268  0.38329172  0.38548085  0.3876267
   0.41856581  0.18865235 -0.01970788  0.47019726  1.          0.34276033
   0.48932317  0.46995025  0.38895762  0.61639207  0.55420142  0.40008599
   0.38921059  0.38732316  0.38440385  0.53969463  0.3195792   0.4569279
   0.34173335  0.36874778]
 [ 0.44829428  0.42352439  0.43289163  0.51270019  0.32989218  0.47514801
   0.44141034  0.14949167 -0.0154581   0.50960969  0.34276033  1.
   0.34806851  0.38588787  0.37020844  0.20727429  0.46818432  0.32655948
   0.43343204  0.34701068  0.35913978  0.28790054  0.25224029  0.3435722
   0.27433844  0.40132481]
 [ 0.44017189  0.37505843  0.42228666  0.44902882  0.564482    0.43919993
   0.40017702  0.20909858  0.06816997  0.40536187  0.48932317  0.34806851
   1.          0.48436195  0.382899    0.41917533  0.50937024  0.40008781
   0.42580919  0.46330196  0.40741774  0.4066558   0.34102969  0.45158042
   0.45633677  0.39571153]
 [ 0.32723223  0.38715487  0.31725066  0.37371539  0.44900484  0.37875284
   0.36453418  0.27979639  0.14476875  0.49177847  0.46995025  0.38588787
   0.48436195  1.          0.45619063  0.41030819  0.52455002  0.4972402
   0.4398541   0.44933431  0.38766664  0.40173284  0.36702232  0.35957653
   0.40409307  0.4505299 ]
 [ 0.49451358  0.44214568  0.34008002  0.49095177  0.42572589  0.33616597
   0.3980534   0.17737122  0.09080619  0.39037419  0.38895762  0.37020844
   0.382899    0.45619063  1.          0.42475193  0.37172879  0.31530718
   0.39833656  0.40923364  0.36860513  0.42672241  0.48506086  0.33609935
   0.33304155  0.34957037]
 [ 0.41945275  0.40236885  0.34785715  0.35405047  0.37523041  0.38181709
   0.41675827  0.23855989 -0.01655157  0.45121158  0.61639207  0.20727429
   0.41917533  0.41030819  0.42475193  1.          0.49757571  0.33667087
   0.32972122  0.42422611  0.47127507  0.59148506  0.34344466  0.41926515
   0.40435689  0.38313535]
 [ 0.45664382  0.4638761   0.4586931   0.48754442  0.54705993  0.45011806
   0.46737737  0.23616107 -0.02766082  0.59957798  0.55420142  0.46818432
   0.50937024  0.52455002  0.37172879  0.49757571  1.          0.49444929
   0.46714762  0.4935554   0.43126671  0.44605661  0.30344844  0.45517352
   0.52322937  0.46061074]
 [ 0.25059239  0.36101879  0.35041043  0.33168236  0.38735636  0.34902374
   0.43745591  0.17465397  0.04772994  0.42406252  0.40008599  0.32655948
   0.40008781  0.4972402   0.31530718  0.33667087  0.49444929  1.
   0.48629386  0.36998495  0.44949485  0.34069537  0.26256781  0.3995991
   0.4592663   0.40825038]
 [ 0.41820855  0.49222841  0.36173086  0.44021906  0.40958108  0.4054882
   0.44116727  0.37587673  0.03772168  0.44899236  0.38921059  0.43343204
   0.42580919  0.4398541   0.39833656  0.32972122  0.46714762  0.48629386
   1.          0.43464871  0.46504086  0.29401901  0.35462974  0.49125439
   0.41647458  0.36960069]
 [ 0.47016521  0.35074932  0.42301312  0.42580337  0.42169103  0.41642432
   0.34175616  0.32397789  0.18770901  0.42478112  0.38732316  0.34701068
   0.46330196  0.44933431  0.40923364  0.42422611  0.4935554   0.36998495
   0.43464871  1.          0.36763549  0.46826066  0.38498466  0.36390778
   0.42677423  0.43252224]
 [ 0.43204532  0.47487193  0.3144575   0.43635995  0.41475756  0.45844922
   0.34812152  0.27963586  0.01069582  0.44888547  0.38440385  0.35913978
   0.40741774  0.38766664  0.36860513  0.47127507  0.43126671  0.44949485
   0.46504086  0.36763549  1.          0.4834961   0.33324447  0.41897687
   0.40481364  0.41025741]
 [ 0.35789562  0.39731933  0.35951562  0.34652381  0.35507993  0.47348609
   0.38204354  0.2149836   0.19976065  0.41280475  0.53969463  0.28790054
   0.4066558   0.40173284  0.42672241  0.59148506  0.44605661  0.34069537
   0.29401901  0.46826066  0.4834961   1.          0.43143194  0.49736915
   0.41904948  0.32995192]
 [ 0.32501954  0.22278141  0.23255939  0.32840947  0.33204416  0.24964651
   0.33979611  0.155632    0.11202823  0.32649194  0.3195792   0.25224029
   0.34102969  0.36702232  0.48506086  0.34344466  0.30344844  0.26256781
   0.35462974  0.38498466  0.33324447  0.43143194  1.          0.38597107
   0.27359943  0.32558325]
 [ 0.30467306  0.43071002  0.35043107  0.36683628  0.3900047   0.4046647
   0.56949023  0.26071862  0.148186    0.46960816  0.4569279   0.3435722
   0.45158042  0.35957653  0.33609935  0.41926515  0.45517352  0.3995991
   0.49125439  0.36390778  0.41897687  0.49736915  0.38597107  1.
   0.43991262  0.34316157]
 [ 0.33924985  0.42488471  0.34507766  0.39441992  0.37988848  0.36936625
   0.44734686  0.35672164  0.03005424  0.38240919  0.34173335  0.27433844
   0.45633677  0.40409307  0.33304155  0.40435689  0.52322937  0.4592663
   0.41647458  0.42677423  0.40481364  0.41904948  0.27359943  0.43991262
   1.          0.44190758]
 [ 0.42425596  0.3419053   0.45759506  0.38170724  0.41700677  0.29433596
   0.37725506  0.21028487 -0.0587806   0.45133378  0.36874778  0.40132481
   0.39571153  0.4505299   0.34957037  0.38313535  0.46061074  0.40825038
   0.36960069  0.43252224  0.41025741  0.32995192  0.32558325  0.34316157
   0.44190758  1.        ]]
In [16]:
#Find maximum correlation coefficient, pair of categories

corr_max = 0

for i,corr_array in enumerate(corr_cat_matrix):
    
    for j,corr_value in enumerate(corr_array):
        if (corr_value > corr_max) & (corr_value < 0.99999):
            corr_max = corr_value
            max_i_index = (i)
            max_j_index = (j)
        else:
            continue

print("The maximum correlation coefficient is " + str(corr_max))

print("The pair of categories with the highest correlation in recall performance is: ") 
print(str(np_u_categories[max_i_index]) + " and " + str(np_u_categories[max_j_index]))
The maximum correlation coefficient is 0.6163920742941553
The pair of categories with the highest correlation in recall performance is: 
Flowers and KitchenTools
In [17]:
#Find minimum correlation coefficient, pair of categories

corr_min = 2

for i,corr_array in enumerate(corr_cat_matrix):
    
    for j,corr_value in enumerate(corr_array):
        if (abs(corr_value) < abs(corr_min)):
            corr_min = corr_value
            min_i_index = (i)
            min_j_index = (j)
        else:
            continue

print("The minimum correlation coefficient is " + str(corr_min))

print("The pair of categories with the lowest correlation in recall performance is: ") 
print(str(np_u_categories[min_i_index]) + " and " + str(np_u_categories[min_j_index]))
The minimum correlation coefficient is 0.010695817917302466
The pair of categories with the lowest correlation in recall performance is: 
Fabric and Toys

3

In [2]:
#Read-in word list
f = open('wordpool.txt')
words = f.read().splitlines()
f.close()

#Set number of targets
ntargets = input("Please input the number of targets: ")
ntargets = int(ntargets)

#Set number of lures
nlures = input("Please input the number of lures: ")
nlures = int(nlures)
Please input the number of targets: 5
Please input the number of lures: 5
In [19]:
#Randomly select targets and lures

#Randomly shuffle words, choose targets, delete chosen targets
rnd.shuffle(words)
targets = words[:ntargets]
del words[:ntargets]
print(targets)

#Randomly shuffle remaining words, choose lures
rnd.shuffle(words)
lures = words[:nlures]
del words[:nlures]
print(lures)

#Reset list of words
f = open('wordpool.txt')
words = f.read().splitlines()
f.close()
['CARRY', 'THEATER', 'CONTAIN', 'LONELY', 'OPEN']
['EMBRACE', 'COLONEL', 'IDLE', 'CORRECT', 'RELEASE']
In [20]:
#List of randomly selected targets/lures presented during test phase

#Concatenate lists of targets and lures
test_words = targets + lures
print(test_words)

#Randomly shuffle words
rnd.shuffle(test_words)
print(test_words)

#Pre-allocate numpy array for responses
responses = np.empty([1,len(test_words)], dtype = "object")

#Pre-allocate list of results
results = np.empty([1,len(test_words)], dtype = "object")
['CARRY', 'THEATER', 'CONTAIN', 'LONELY', 'OPEN', 'EMBRACE', 'COLONEL', 'IDLE', 'CORRECT', 'RELEASE']
['IDLE', 'CARRY', 'OPEN', 'RELEASE', 'COLONEL', 'EMBRACE', 'LONELY', 'CORRECT', 'CONTAIN', 'THEATER']
In [23]:
#Experiment
#Encoding phase

#Create window
win = visual.Window([400,400])

#Show target words to participant with 1-second delay
for target in targets:
    stim = visual.TextStim(win,text=target.rstrip('\n'))
    stim.draw()
    win.flip()
    core.wait(1)
    
#Intermediate phase

#Present window asking participant to input 'i' to move to test phase
stim = visual.TextStim(win,text='In the test phase, pressing "j" indicates a word shown in the encoding phase. Pressing "k" indicates a word not shown in the encoding phase. Please press "i" to show your understanding.')
stim.draw()
win.flip()
pressedkey = event.waitKeys(keyList = ["i"])
if pressedkey[0] == 'i':
    win.flip()
    core.wait(1)
    
#Test Phase

for i,test_word in enumerate(test_words):
    stim = visual.TextStim(win,text=test_word.rstrip('\n'))
    stim.draw()
    win.flip()
    pressedkey = event.waitKeys(keyList = ["j", "k"])
    pressedkey = "".join(pressedkey)
    
    responses[0][i] = pressedkey
    
    
    if (responses[0][i] == "j") & (test_words[i] in targets):
        results[0][i] = str("hit")
        
    elif (responses[0][i] == "j") & (test_words[i] in lures):
        results[0][i] = str("false alarm")
     
    elif (responses[0][i] == "k") & (test_words[i] in targets):
        results[0][i] = str("miss")
    
    elif (responses[0][i] == "k") & (test_words[i] in lures):
        results[0][i] = str("correct rejection")

win.close()
print(responses)
print(results)


hit_counter = 0
for result in results[0]:
    if result == "hit":
        hit_counter += 1
    
hit_rate = hit_counter/ntargets
print("The hit rate is " + str(hit_rate))

false_alarm_counter = 0
for result_2 in results[0]:
    if result_2 == "false alarm":
        false_alarm_counter += 1

false_alarm_rate = false_alarm_counter/nlures
print("The false alarm rate is " + str(false_alarm_rate))
194.4218 	WARNING 	Monitor specification not found. Creating a temporary one...
[['k' 'k' 'j' 'k' 'k' 'k' 'j' 'k' 'j' 'j']]
[['correct rejection' 'miss' 'hit' 'correct rejection'
  'correct rejection' 'correct rejection' 'hit' 'correct rejection' 'hit'
  'hit']]
The hit rate is 0.8
The false alarm rate is 0.0
In [ ]: