Hello!
I have a module script containing 220k+ lines string. I want to use this string to train a perceptron.
Here is my function:
function(pos_tagger, train)
local done = false
local training_set = {}
local lines = train:gmatch("(.-)[\n\r]")
while not done do
local found_end_of_sentence = false
local sentence = {}
while not found_end_of_sentence do
local sent = lines() or ""
local func = string.gmatch(sent, "[^%s]+")
local curr_word, tag, chunk_tag = func(), func(), func()
if curr_word == nil then
found_end_of_sentence = true
elseif curr_word == "END_OF_TRAINING_FILE" then
found_end_of_sentence = true
done = true
else
table.insert(sentence, {curr_word, tag})
end
end
table.insert(training_set, sentence)
end
pos_tagger:train(training_set, 8)
end
But the string is too big and the proccess takes some time. But if I use wait() in my loop this takes too much time and it’s litterally impossible to load in time. How to ignore script timeout and continue executing?