# Winter Storm 2015 Modeling Workshop # Word Learning Model # ==learner.R== # defines a parallel word learner with optional cost # or benefit to future learning from learned words # learner(vocabulary = when words are due to be learned # penalty = how much learning a word changes the difficulty # of unlearned words) # penalty > 0 slows learning, < 0 speeds learning # ================================================================= # McMurray, B. (2007). Defusing the childhood vocabulary explosion. Science, 317(5838), 631-631. # # code author: Rachael Richardson # rachaelr@umd.edu # turn on for noisy output chatter = TRUE learner <- function(vocabulary, offset = 0) { TOTALWORDS = length(vocabulary) # boolean record of whether or not words have been learned wordslearned <- seq(0,0,length = TOTALWORDS) # record of total words known at each learning step wordsknownbystep <- seq(0,0,length = TOTALWORDS) # total number of learning steps taken timesteps <- 0 # define learner's initial state with sequence of zeros learner = seq(0,0,length = TOTALWORDS) # continue until all words have been learned while( sum(wordslearned) < TOTALWORDS ) { # for each word that has not been learned for( j in which( wordslearned == 0 )) { # if the unlearned word has reached its specified threshold it is now learned if( learner[j] >= vocabulary[j] ) { # mark word j as learned wordslearned[j] = 1 # words learned incur a cost or benefit to future learning vocabulary[-j] = vocabulary[-j] + offset } # end if } # end for # batch update the record of total words known at this time step wordsknownbystep[timesteps] = sum(wordslearned) # increment the time step timesteps = timesteps + 1 if(chatter) { cat("epoch",learner[j],": unlearned words: ", sum( wordslearned == 0 )," total progress: ", sum(wordslearned), "/", TOTALWORDS, "\n") } # end if # add a point to all words learner = learner + 1 } # end while return (wordsknownbystep) } # end learner