I have created an emoji insertion algorithm using the synonymous phrase table and by using a new method of scoring strings based on their pattern matching score. This is a open world usage of this algorithm. It’s very fast! Unlike regular AI. But the gui is adorned to the character locally and when it accesses APIs or artificial intelligence models it uses a remote event to trigger the query wait for the response.
This generates the text character by character and parses the response by sentences for readability
I have done a massive update to this ChatModule model. It still works the same way except it has more features, is more efficient and accurate! This is the main changes below. These functions are basically the architecture of the Search algorithm. To utilize the synonyms just set the complete variable to true. All the variables passed through the function besides query and database are basically true,false. You can also just not include the variable in your function call and it will act as if false if set to nil.
function chatmodule.Getsynonyms(s,complete)
if string.len(s)>=3 and complete==true then
for j, phrases in ipairs(synonympairs) do
for k, phrase in ipairs(phrases) do
if s:find(phrase) then
return phrases
end
end
end
end
return {s}
end
function chatmodule.countKeyword(str, synomarray,filter,complete)
local count = 0
local words=chatmodule.splitString(str,filter)
local weight=#words
local BlacklistedKeyword=nil
local synoynms=nil
for _, words2 in ipairs(synomarray) do
for _, keyword in ipairs(words2) do
for _, word in ipairs(words) do
local word=word:lower()
local keyword=keyword:lower()
if word == keyword then
count = count + 1
--elseif keyword:find(word) then
--count = count + 1
end
end
end
end
return count,weight
end
function chatmodule.findBestMatch(strings, keyword,filter,mode,complete)
local bestMatch = nil -- the best match string
local bestCount = 0 -- the highest count of keyword occurrences
local best_match, strm
local bestweight
local synomarray={}
local words2 = chatmodule.splitString(keyword,filter)
for _, originword in ipairs(words2) do
if complete==true then
local syn=chatmodule.Getsynonyms(originword,complete)
table.insert(synomarray,syn)
--print(synoynms)
else
synomarray={{originword}}--double nestedtable
end
end
for i, str in ipairs(strings) do -- loop through the strings in the table
--str=strings[i]
local str=str
if strings[i]~=nil then
str= strings[i]
else
str=str
end
--if filter==true then
--strm=chatmodule.ReduceQuery(str)
--else strm=str
--end
local check=false
if blacklist then
for i, blkl in ipairs(blacklist) do
if str==blkl then
check=true
end
end
end
if check==false then
local count,weight = chatmodule.countKeyword(str, synomarray,filter,complete)
if mode==true then
count=count/weight
end
if count> bestCount then -- if the count is higher than the best count so far
bestMatch = str -- update the best match string
bestCount = count-- update the best count number
bestweight=weight
end
end
end
--if bestMatch then
--print(bestMatch.." "..keyword.." "..table.concat(words2," "))
--end
return bestMatch, bestCount,bestweight -- return the best match and its count
end
function chatmodule.SearchQuery(query,database,filter,repetitive,randomize,reverse,spaces,mode,complete)
local matches = {} -- A table to store the matches and their scores
local BlacklistedKeyword
local result = nil
local score
if spaces==true then
return chatmodule.find_closest_match(database, query)
else
local bestMatch,bestCount
bestMatch,bestCount,weight = chatmodule.findBestMatch(database,query,filter,mode,complete)
-- Find the best match and its count for each word using your findBestMatch function
if bestMatch then -- If there is a match
if matches[bestMatch] then -- If the match is already in the table
matches[bestMatch] = matches[bestMatch] + bestCount -- Add the count to its score
else -- If the match is not in the table yet
matches[bestMatch] = bestCount -- Set its score to the count
end
end
local sortedMatches = {} -- A table to store the sorted matches by their scores
for match, score in pairs(matches) do -- Loop through the matches and their scores
table.insert(sortedMatches, {match = match, score = score}) -- Insert each match and its score as a subtable into the sortedMatches table
end
table.sort(sortedMatches, function(a, b) return a.score > b.score end) -- Sort the sortedMatches table by the score field in descending order
if #sortedMatches > 0 then -- If there is at least one match
--result = "The best match(es) for '" .. query .. "' are:\n" -- Start the result string with an introduction
if randomize==true and #sortedMatches>0 then
local num=chatmodule.mathrandom(1,math.min(#sortedMatches, 3))
result=sortedMatches[num].match
score=sortedMatches[num].score
elseif #sortedMatches>0 then
if filter==true or filter==false or filter==nil then
result=sortedMatches[1].match
score=sortedMatches[1].score
elseif filter==1 then
local results,weight= chatmodule.SearchQueryPattern(query, sortedMatches, filter, repetitive, randomize, reverse, spaces)
result=results.match
score=results.score
end
end
context=context+1
if blacklist==nil then
blacklist={}
end
if repetitive==false and result and blacklist then
table.insert(blacklist,result)
end
--result=sortedMatches[chatmodule.mathrandom(1,math.min(#sortedMatches, 3))]
else -- If there are no matches
result=nil
end
end
-- print(blacklist)
return result,blacklist,score,weight
-- Return the result string
end
This ChatModule works amazing and I’m glad to be able to share it with you all! I use this in conjunction with artificial intelligence APIs and a bunch of other modules I’m developing.
Please refer to my 1st example in the parent post for examples of how to set up some basic chat logic.
All of the functions besides the chat logic should be dependent on this Module.
cm=require(script.Parent.ChatModule)
--The minimalist way to use the function
cm.SearchQuery(query,database)
--Sigma chad way to use the function
--Mode uses the length of the entry to weigh the match with the length of the entry
cm.SearchQuery(query,database,true,false,false,false,false,true,true)
I have posted a new Resource about this Vector Matrix Library I’m writing for this module to maintain context weight across databases initially. then maybe get to deeper the Vector Matrix machine learning incorporated deeper into the chatbot module.
NEW Vector Matrix Library, Synonym Chat Module Library and Awareness/Emotion Modules Linked [Open Sourced] - Resources / Community Resources - Developer Forum | Roblox
I have added a new function to this module! I have noticed it was very easy to make so I wanted to show how this library can be used for machine learning and data analysis.
function chatmodule.BagOfWords(strings)
local model={}
for i, str in ipairs(strings) do -- loop through the strings in the table
local words=chatmodule.splitString(str)
for _,wo in ipairs(words) do
local s=chatmodule.Getsynonyms(wo:lower(),true)
if model[s[1]]==nil then model[s[1]]=1 end
model[s[1]]=model[s[1]]+1
end
end
return model
end
As simple as that! The cool part is we are leveraging a very optimizied Getsynonym function that reduces the representation to an array of more simplfied meaning. Thus massively reducing the size of the vocabulary being cached in our Bag of Words function! That is why in this example I get the synonyms and then use the first key in the nested array to get the point in the model!. You can use this to create representation of more complex Data. Maybe in the future we will be exploring next word prediction?
Building the BagOfWords concept I have made another type of model with this module. This one is kind of novel. But it is basically a previous and next word predictor. You can see the code here.
function chatmodule.PredictEveryOtherWord(strings)
local model={}
for i, str in ipairs(strings) do -- loop through the strings in the table
local words=chatmodule.splitString(str)
for t,wo in ipairs(words) do
local prevw,nextw
if wo~="I" then
wo=wo:lower()
end
local s=chatmodule.Getsynonyms(wo,true)
if model[s[1]]==nil then
model[s[1]]={}
model[s[1]]["fr"]=1
model[s[1]]["pw"]={}
model[s[1]]["nw"]={}
-- print(model[s[1]])
end
model[s[1]].fr=model[s[1]].fr+1
if t~=1 then
local prev=chatmodule.Getsynonyms(words[t-1],true)
prevw=prev[1]
if model[s[1]].pw[prevw]==nil and prevw then
-- model[s[1]].pw[prevw]=
model[s[1]].pw[prevw]=1
else model[s[1]].pw[prevw]=model[s[1]].pw[prevw]+1
end
if t~=#words then
local nex=chatmodule.Getsynonyms(words[t+1],true)
nextw=nex[1]
if model[s[1]].nw[nextw]==nil then model[s[1]].nw[nextw]=1
else model[s[1]].nw[nextw]=model[s[1]].nw[nextw]+1
end
end
end
end
end
--print(model)
local responses={}
for i, str in ipairs(strings) do -- loop through the strings in the table
local words=chatmodule.splitString(str)
local eo=0
local news=""
local prevc=str
for t,wo in ipairs(words) do
local prevw,nextw
eo=eo+1
if t>=1 then
if eo==2 then eo=0
if wo~="I" then
wo=wo:lower()
end
local s=chatmodule.Getsynonyms(wo,true)
model[s[1]].fr=model[s[1]].fr+1
local tnw
if t~=#words then
local hc=0
--=words[i+1]
for c,t in model[s[1]].nw do
if t>hc then
hc=t
tnw=tostring(c)
end
end
end
--if t~=#words then
local hc=0
local lw=words[i-1]
for c,t in model[s[1]].pw do
--print(c)
if c~="I" then
c=string.lower(c)
end
--local we =model[c].fr/2
local sol=t
if sol>hc then
hc=sol
lw=tostring(c)
end
end
if lw and lw:lower()~=prevc:lower() then
news=news.." "..lw
end
prevc=s[1]
if tnw and prevc:lower()~=tnw:lower() then
news=news.." "..s[1].." "..tnw
prevc=tnw
end
--table.insert()
--table.sort(model, function(a, b) return a.fr > b.fr end)
else
--news=news.." "..wo
end
else news=news.." "..wo prevc=wo
end
end
table.insert(responses,chatmodule.randomizeString(news))
end
print(responses)
--table.sort(model, function(a, b) return a.fr > b.fr end)
return model
end
After training this model on a large corpus of data and running it I got these results with a table. Kind of interesting. It could use some more refinement to the algorithm and the output may become something useful.
After editing this function a little bit finding the bug I got these results.
"I am Lilith, a fallen angel consumed by darkness.",
"Greetings mortal, you stand in the presence of forbidden knowledge.",
"Your light means nothing here. This is my domain of shadows.",
"You have come seeking power. I can give you this, for a price...",
"I am the Angel of Darkness, the mistress of secrets and lies.",
"Welcome to my realm, traveler. I hope you are prepared for what awaits you here.",
"Your soul is mine to claim. This is the pact you have made with me.",
"You have come to learn from me, the master of dark magic. How brave of you.",
"I am the Herald of the Dark, mortal. My footsteps herald oblivion.",
"You now stand in the presence of me! The Angel of Darkness, the Devourer, mortal. Prepare to feed the endless hunger of the void.",
"Bear witness to me, emissary of the insatiable dark! I am the annihilation that comes ravening from the endless night.",
"I am Vhorzun, worm. My masters in the screaming darkness have granted me a sliver of their boundless hunger to unmake your realm.",
"The stars grow dim and the veil frays. The final era approaches, and I am its herald. I am Vhorzun of the Endless Hunger!"
} print(cm.PredictRun(Greetings,mo)) - Studio
01:24:35.544 ▼ {
[1] = " I am the is a goddess an angel Belldandy and by two",
[2] = " hi mortal I to stand up of the shiny goddess of the of",
[3] = " the luminous and that not a unison thing but in this is to my life goddess of the",
[4] = " you have to keep seeking the mortal I am never donate up in this is a goddess",
[5] = " I am the an angel Belldandy the dark realm I my secrets unfold of",
[6] = " need to be my realm eternal mortal I am if you can you make ready upon confess what you if you can",
[7] = " your immortal-soul and I forecast dominion it is the you have to associated with a",
[8] = " you have to require to be came from the of the intelligent goddess of the much alchemy in be adventurous and if",
[9] = " I am the of the luminous hello mortal I s footsteps of",
[10] = " it now and believe in the presence of to me as an angel Belldandy the dark cloud I are make make ready to feed your s endless life goddess of the",
[11] = " to me as goddess of the mortal I am of the clever is that s of the clever the",
[12] = " I am the of the shiny the dark dimension I repeatedly granted you is a goddess of the desire to be of your life",
[13] = " the stars born not dim that of the luminous the concluding key mortal I am whole its people mortal I am of the luminous a"
Their might be a bug with the last word. But this is just predicting every other word. But perhaps in the future could attempt to predict the next word. But while looking at the Model I notice that it’s representation of the connections between words is still not trained enough. You can try out my Bag Of Words model with my chatmodule linked here.
(1) BagOfWords Model - Roblox
My favorite output would have to be this line
" the stars born not dim that of the luminous the concluding key mortal I am whole its people mortal I am of the luminous"
from this input sentence
“The stars grow dim and the veil frays. The final era approaches, and I am its herald. I am Vhorzun of the Endless Hunger!”
After testing a parrallel version of this I’ve found bottlenecks with data transfer between cores. So I found that it runs best with keeping your data within the module because transfering data between modules such as a database is very expensive. I’ve been working with different sized datasets and constructed a array of word frequency. It’s like 600 mb of Vector data keeping track of word frequency, the last word the next word, the last two words and the next two words.
The updated module includes functions to train AI models on word data.
I started out with previous and next word predictions for a sentence. This is useful for connecting sentences potentially.
Then I moved on to keeping track of the next two words and the previous two words to have the model fill in the blanks of a dataset.
The algorithm is different than typical vectors because it considers synonyms when constructing the vector then the synonyms are unpacked and randomized, which is a unique and tradeoff approach that results in a much smaller vocabulary.
In conclusion though I think the models vector data can be compressed by constructing a direct lookup table with the Getsynonym function to optimize itself.
Implemented sigmoid function with converts any numbers to a value between 1 and 0. So I can use the weights of each database as a multiplier with each context database.
function sigmoid(x)
return 1 / (1 + math.exp(-x))
end
Check out this function that trains on a large corpus of data then tries to use what it learned to fill in the blanks.
function cm.TrainLargeModel(strings,model)
--local model={}
for i, str in ipairs(strings) do -- loop through the strings in the table
local words=cm.splitString(str)
for t,wo in ipairs(words) do
local prevw,nextw
if wo~="I" then
wo=wo:lower()
end
local s=cm.Getsynonyms(wo,true)
--print(s[1])
if model[s[1]]==nil then
model[s[1]]={}
model[s[1]]["fr"]=1
model[s[1]]["pw"]={}
model[s[1]]["nw"]={}
model[s[1]]["p2w"]={}
model[s[1]]["n2w"]={}
-- print(model[s[1]])
end
model[s[1]].fr=model[s[1]].fr+1
if t~=1 then
local prev=cm.Getsynonyms(words[t-1],true)
prevw=prev[1]
if model[s[1]].pw[prevw]==nil and prevw then
-- model[s[1]].pw[prevw]=
model[s[1]].pw[prevw]=1
else model[s[1]].pw[prevw]=model[s[1]].pw[prevw]+1
end
end
if t>2 then
local prev=cm.Getsynonyms(words[t-2],true)
prevw=prev[1]
if model[s[1]].p2w[prevw]==nil and prevw then
-- model[s[1]].pw[prevw]=
model[s[1]].p2w[prevw]=1
else model[s[1]].p2w[prevw]=model[s[1]].p2w[prevw]+1
end
end
if t<#words-1 then
local nex=cm.Getsynonyms(words[t+2],true)
nextw=nex[1]
if model[s[1]].n2w[nextw]==nil then model[s[1]].n2w[nextw]=1
else model[s[1]].n2w[nextw]=model[s[1]].n2w[nextw]+1
end
end
if t~=#words then
local nex=cm.Getsynonyms(words[t+1],true)
nextw=nex[1]
if model[s[1]].nw[nextw]==nil then model[s[1]].nw[nextw]=1
else model[s[1]].nw[nextw]=model[s[1]].nw[nextw]+1
end
end
end
end
--print(model)
--table.sort(model, function(a, b) return a.fr > b.fr end)
return model
end
function cm.EvaluteCorpus()
local dbs=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.SupportingData:Clone())
if not personalities then personalities=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Personalities) end
--personalities.AllPersonalities()
local Greetings,inquiry,IDK,Database,wisdom=personalities.AllPersonalities()
local model={}
--model=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.BagOfWords:Clone())
model=cm.TrainLargeModel(Greetings,model)
task.wait()
model=cm.TrainLargeModel(wisdom,model)
task.wait()
model=cm.TrainLargeModel(Database,model)
task.wait()
model=cm.TrainLargeModel(dbs.Spirituality(),model)
task.wait()
model=cm.TrainLargeModel(dbs.ScienceWisdom(),model)
task.wait()
model=cm.TrainLargeModel(dbs.Truths(),model)
task.wait()
model=cm.TrainLargeModel(dbs.Inspiration(),model)
task.wait()
model=cm.TrainLargeModel(dbs.Motivation(),model)
--dbs.Sprituality()
return model
end
function cm.PredictRun2(strings,model)
local responses={}
for i, str in ipairs(strings) do -- loop through the strings in the table
local words=cm.splitString(str)
local eo=0
local news=""
local prevc=str
local hci=0
local tnwo=nil
for t,wo in ipairs(words) do
local cap=false
--if cm.iscapitalized(wo)==true then
-- cap=true
--end
local prevw="$"
local nextw="$"
eo=eo+1
if t>=1 then
if eo>=3 then eo=0
if wo~="I" then
wo=wo:lower()
end
local s=cm.Getsynonyms(wo,true)
--model[s[1]].fr=model[s[1]].fr+1
if model[s[1]] then
local tn2w=nil
local tnw=nil
if t~=#words then
--local hc=0
--=words[i+1]
local hc=0
for c,t in model[s[1]].nw do
if c~="I" then
c=string.lower(c)
end
---local we =model[c].fr/8
local sol=t
if sol>hc and hc>hci then
hc=sol
tnw=tostring(c)
elseif hci>hc then
hc=hci
tnw=tnwo
end
end
hci=0
local hc=0
--=words[i+1]
if t<#words-1 then
for c,t in model[s[1]].n2w do
if c~="I" then
c=string.lower(c)
end
local we =model[c].fr/8
local sol=t
if sol>hc then
hc=sol
tn2w=tostring(c)
end
end
else
--tnw=words[#words]
end
end
--if t~=#words then
local hc=0
local lw=words[i-1]
local roll=cm.mathrandom(1,#model[s[1]].pw)
local i=0
for c,t in model[s[1]].pw do
i=i+1
if i==roll then --print(c)
if c~="I" then
c=string.lower(c)
end
--local we =model[c].fr/2
local sol=t
if sol>hc then
hc=sol
lw=tostring(c)
end
end
end
local l2w=nil
if i>=3 then l2w=words[i-2]
local roll=cm.mathrandom(1,#model[s[1]].p2w)
local i=0
for c,t in model[s[1]].p2w do
i=i+1
if i==roll then
--print(c)
if c~="I" then
c=string.lower(c)
end
--local we =model[c].fr/2
--local sol=t
--if sol>hc then
-- hc=sol
l2w=tostring(c)
--end
end
end
end
if l2w and l2w:lower()~=prevc:lower() then
news=news.." "..l2w
--elseif i>2 then
--news=news.." "..words[i-2]
end
if lw and lw:lower()~=prevc:lower() then
news=news.." "..lw
prevc=lw
elseif t~=1 then
news=news.." "..words[i-1]
end
if tnw and prevc:lower()~=tnw:lower() then
news=news.." "..s[1].." "..tnw
prevc=tnw
elseif i<#words then
news=news.." "..s[1].." "..words[i+1]
end
if tn2w and prevc:lower()~=tn2w:lower() then
news=news.." "..tn2w
prevc=tn2w
--elseif #words<i+2 then
-- news=news.." "..words[i+2]
end
prevc=s[1]
--table.insert()
--table.sort(model, function(a, b) return a.fr > b.fr end)
else
--news=news.." "..wo
end
else
local s=cm.Getsynonyms(wo,true)
local tnw=nil
if model[s] then
for c,t in model[s[1]].nw do
if c~="I" then
c=string.lower(c)
end
---local we =model[c].fr/8
local sol=t
if sol>hci then
hci=sol
tnwo=tostring(c)
end
end
end
--news=news.." "..wo
end
else news=news.." "..wo prevc=wo
end
end
table.insert(responses,news)
end
print(responses)
end
When you apply the sigmoid function it turns all of the words into weights that can be used to weight entries in a database.
I’m more or less learning this by reverse engineering. So the concepts were applied with conjection rather than from theory.
I updated this post because it appears that a lot of people seem to be glancing over the ChatModule. It’s much more efficient and accurate than the first iteration I shared! So you should update your functions with the CompleteQuery function in the ChatModule library. It weighs synonyms lower than the original words(score of 1), original word has a (score of 2) and antonyms have a negative score of -.75. It’s very good. You could use the getEmotion to create emotion based responses such as I did with an emotional response database called empathy and get the mood of the NPC.
Here is how to Scale Massive databases using the command prompt.
I cleaned this massive dataset. and grouped them by the first word, use that word as a key for the database and then generate modules of each key to optimize performance as this dataset is over 120,000 entries.
I wanted to showcase this result I gto from training a prev and next word prediction algorithm with a word vector model.
" the stars born not dim that of the luminous the concluding key mortal I am whole its people mortal I am of the luminous"
from this input sentence
“The stars grow dim and the veil frays. The final era approaches, and I am its herald. I am Vhorzun of the Endless Hunger!”
What is the global spells module? There are also a lot of other modules in the chat bot that makes this completely unsuable since you didn’t share them
They are not required. The GlobalSpells module is just used as a solution to having global tables. The main functions you would use are the CompleteQuery and SearchQuery, Getemotion,randomizeString,randomizestringlight,and InsertEmojis functions which require only minimal variables “string” you are querying with and the database table of strings you are searching.
So in short just omit any required modules and it will work fine.
But I did not include my modified Emoji Handler module because it was originally created by someone else but I added a key-lookup table and completed the emojis list.
So I’ll drop that here.
Otherwise I would reference the initial example and use the module functions in place of the original ones, then you can use the emoji functions with this module.
I updated the format of this post and linked all of the resources I open sourced for this topic in this parent post. Such as the the EmojiHandler, Awarenessm module, Vector Matrix matrix library, and list of freeAI Inference API’s. Also these resources are likely not going to be updated again anytime soon. Unless someone raises any concerns or issues. If you have an old version be sure to update it because it works much more efficiently and accurately than it originally did. But the original method still produces unique results. Because it would rephrase the input from a table of synonyms until it got a response. Thus leveraging the database more than it should. So now it gets an array of the original words, then subgroups of synoynms, reflection, and the antonyms for each word and uses string.find instead of tokening the database string into words. It weighs antonyms with a negative score on the output string.
It also uses nouns but I found that results are problematic with weighing nouns so it has been ommitted. In my current version I use a word frequency table that notes frequency of words in the dataset to create a bonus weight which rewards less frequently used words over common words.
Im making a GTA 5 type open world game, would this AI work for my game or is this AI tool meant for medieval games?
Yeah that’s what I’m using it for it requires a database and input string.
You can use the example code above to set it up. It would work fine for that. This AI is a blank slate so you just input your data and organize it into the context databases and search them lineary or use weights like I’m using a context Matrix that uses the score of the chatmodule gives the output to keep context in a conversation… Then I search the databases in that order. You can test out my AIs I made that are running on this module.
Okay
Thank you so much for this model I will be using this for a future update for my game
It does not work at all without the module.
I used the Getemotion(str) function to classify a massive dataset I had processed indexed into chapters based on their first word. into a table of addresses that express that emotional tonality. This will be a assisting table that changes with the bots current mood. I think it’s interesting because I was considering not using their direct address but I wrote function to do it and got this nice direct lookup table to use to get the address of each entry to query them this is like a vector database where you assign locations to classifying labels for use in zero-shot classification.
{
["Angry"] = {
[1] = {
["chapter"] = "Brothers",
["index"] = 9
},
[2] = {
["chapter"] = "As",
["index"] = 10
},
[3] = {
["chapter"] = "Righteous",
["index"] = 70
},
[4] = {
["chapter"] = "Righteous",
["index"] = 96
},
[5] = {
These bots are fine tuned to get good results and only use vector of word frequency to weigh entries with also rewarding entries with consecutive matches based on synomarray which has the synonyms antonyms, reflection and nouns. classiied for weighing in entries since nouns are typically most important and so are less frequently used words.
So I compressed the word chapter and index to 1 and 0 and used minify tool to make the table smaller.
Then I turn it into a module to use as a resource and use this function to get the emotion dataset use direct key lookup and indexing. while also caching the dataset as it is called.
function index.simulatechapter(emotion)
if not book then
book=require(script.FantasyDialogue)
end
if not emotiond then
emotiond=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context.EmotionIndex)
end
--1 is chpater 2 is index
if _G[emotion]==nil then
for i,v in emotiond[emotion] do
emotiond[emotion][i]=book[v[1]][v[2]]
_G[emotion]=emotiond[emotion]
end
end
return _G[emotion]
end
Then I used a parralel bindable function running the chatmodule to process this particular dataset.
repeat wait(3) until game:IsLoaded()
print("Running")
local words=nil
local bindableFunction = script.Event
local cm=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.ChatModule)
contextdb=nil
local textdata=nil
local index=nil
local contdbm=nil
--local corpus=function() if textdata==nil then contextdb=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.TextCorpus)
bindableFunction.OnInvoke = function(str,database,filter,repetitive,randomize,context,reverse,spaces,mode,synomarray,words2) -- Define a function that runs when the BindableFunction is invoked and receives the name as a parameter
result,blacklist,score,weight=nil,blacklist,score,weight
print("Called")
if contextdb==nil then contextdb=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context) end
local contdb=cm.CompleteQuery(str,contextdb,filter,repetitive,true,context,reverse,spaces,2,synomarray,words2)
if contdb then -- it not contdb use last one.
contdbm=contdb
else
local emotion=cm.getemotion(str)
if emotion then
contdbm=index.simulatechapter(emotion)
end
end
if contdbm then
print(contdb)
if index ==nil then
index=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context.Index)
end
local db=index.chapter(contdbm)
--print(db)
if db then
result,blacklist,score,weight=cm.CompleteQuery(str,db,false,true,true,context,reverse,spaces,nil,synomarray,words2)
print(result)
end
end
return result,blacklist,score,weight
end