i mean, ive done some very simplistic things so far…:
local ai = {
patterns = {},
dataset = {},
ml = {},
mind = {},
}
function ai.respond(input : string) --//input:string::>generate_response:func({info})::>response:string
local tokenized = tokenize(input)
local termfrequecny = tokenize_termfrequency(tokenized)
local inversedocumentfrequency = tokenize_inversedocumentfrequency(tokenized)
local features = tokenize_features(tokenized, termfrequecny, inversedocumentfrequency)
local subject = determine_subject(input, tokenized)
local isQuestion = check_ifquestion(input)
local verb = find_verb(tokenized)
local knowledge = {
["question"] = isQuestion,
["subject"] = subject,
["verb"] = verb,
}
return generate_response(knowledge)
end
function tokenize(input : string)
return string.split(string.lower(input), " ")
end
function tokenize_termfrequency(tokenized : {}) --//tokenized:{string}::>importance_values:{int}
local importance_values = {}
local total_tokens = #tokenized
for _, token in ipairs(tokenized) do
if importance_values[token] then
importance_values[token] = importance_values[token] + 1
else
importance_values[token] = 1
end
end
for token, count in pairs(importance_values) do
importance_values[token] = count / total_tokens
end
return importance_values
end
function tokenize_inversedocumentfrequency(tokenized : {}) --//tokenized:{string}::>importance_values:{int}
local importance_values = {}
local total_tokens = #tokenized
for _, token in ipairs(tokenized) do
if importance_values[token] then
importance_values[token] = importance_values[token] + 1
else
importance_values[token] = 1
end
end
for token, count in pairs(importance_values) do
importance_values[token] = math.log(total_tokens/count)
end
return importance_values
end
function tokenize_features(tokenized : {}, termfrequency : {}, inversedocumentfrequency : {}) --//tokenized:{string}|inversedocumentfrequency:{int}::>importance_values:{int}
local importance_values = {}
for _, token in ipairs(tokenized) do
importance_values[token] = termfrequency[token]*inversedocumentfrequency[token]
end
return importance_values
end
function check_ifquestion(input : string) --//input:string::>boolean
if string.find(input, "?") then
return true
else
return false
end
end
function determine_subject(input : string, tokenized : {}) --//input:string|tokenized:{string}::>
local verb = find_verb(tokenized)
local l, r = string.find(input, verb)
if l and r then
local left_part = string.sub(input, 1, l - 1)
local last_word = string.match(left_part, "(%w+)%s*$")
return last_word
end
return nil
end
function find_verb(sentence : string) --//sentence:string::>found_verb(s):{string}
local verbs = require(script.verbs)
local found_verbs = {}
for _, word in sentence do
if table.find(verbs, word) then
table.insert(found_verbs, word)
end
end
if #found_verbs > 1 then
print("more than one verb")
else
return found_verbs[1]
end
return nil
end
function structure_sentence(knowledge: {})
local sentnce_structures = {
["simple"] = {}
}
local FANBOYS = {"for, an, nor, but, or, yet, so"}
local ABBISAWAWUWU = {"as, because, before, if, since, although, when, after, while, until, whether, unless"}
return
end
function generate_response(knowledge : {}) --//knowledge:{info}::>response:string
local QUESTION = knowledge.question
local SUBJECT = knowledge.subject
local VERB = knowledge.verb
print("QUESTION: " .. tostring(QUESTION) .. ", SUBJECT: " .. SUBJECT ..", VERB: " .. VERB )
return QUESTION, SUBJECT, VERB
end
return ai
i just havent pieced alot together and have only been working on this for a few combined hours.
(by piece together i mean combine the different features)