Lua Trainable RAG Local Chatbot Library, Local Model Aligns Large Model, Geometry+Worded Math Solver,Math/Geometry Data Generator, Chat Database

After testing a parrallel version of this I’ve found bottlenecks with data transfer between cores. So I found that it runs best with keeping your data within the module because transfering data between modules such as a database is very expensive. I’ve been working with different sized datasets and constructed a array of word frequency. It’s like 600 mb of Vector data keeping track of word frequency, the last word the next word, the last two words and the next two words.

The updated module includes functions to train AI models on word data.
I started out with previous and next word predictions for a sentence. This is useful for connecting sentences potentially.
Then I moved on to keeping track of the next two words and the previous two words to have the model fill in the blanks of a dataset.

The algorithm is different than typical vectors because it considers synonyms when constructing the vector then the synonyms are unpacked and randomized, which is a unique and tradeoff approach that results in a much smaller vocabulary.

In conclusion though I think the models vector data can be compressed by constructing a direct lookup table with the Getsynonym function to optimize itself.

1 Like

Implemented sigmoid function with converts any numbers to a value between 1 and 0. So I can use the weights of each database as a multiplier with each context database.

function sigmoid(x)
  return 1 / (1 + math.exp(-x))
end

Check out this function that trains on a large corpus of data then tries to use what it learned to fill in the blanks.

function cm.TrainLargeModel(strings,model)
	--local model={}
	for i, str in ipairs(strings) do -- loop through the strings in the table
		local words=cm.splitString(str)
		for t,wo in ipairs(words) do
			local prevw,nextw
			if wo~="I" then
				wo=wo:lower()
			end
			local s=cm.Getsynonyms(wo,true)		
			--print(s[1])

			if model[s[1]]==nil then 
				model[s[1]]={}
				model[s[1]]["fr"]=1
				model[s[1]]["pw"]={}
				model[s[1]]["nw"]={}
				model[s[1]]["p2w"]={}
				model[s[1]]["n2w"]={}
				--	print(model[s[1]])	
			end 
			model[s[1]].fr=model[s[1]].fr+1
			if t~=1 then
				local prev=cm.Getsynonyms(words[t-1],true)
				prevw=prev[1]
				if model[s[1]].pw[prevw]==nil and prevw then
					--	model[s[1]].pw[prevw]=
					model[s[1]].pw[prevw]=1 
				else model[s[1]].pw[prevw]=model[s[1]].pw[prevw]+1	
				end
			end
			if t>2 then
				local prev=cm.Getsynonyms(words[t-2],true)
				prevw=prev[1]
				if model[s[1]].p2w[prevw]==nil and prevw then
					--	model[s[1]].pw[prevw]=
					model[s[1]].p2w[prevw]=1 
				else model[s[1]].p2w[prevw]=model[s[1]].p2w[prevw]+1	
				end
			end
			if t<#words-1 then
				local nex=cm.Getsynonyms(words[t+2],true)
				nextw=nex[1]

				if model[s[1]].n2w[nextw]==nil then model[s[1]].n2w[nextw]=1 
				else model[s[1]].n2w[nextw]=model[s[1]].n2w[nextw]+1	
				end
			end
			
			if t~=#words then
					local nex=cm.Getsynonyms(words[t+1],true)
					nextw=nex[1]

					if model[s[1]].nw[nextw]==nil then model[s[1]].nw[nextw]=1 
					else model[s[1]].nw[nextw]=model[s[1]].nw[nextw]+1	
					end
			end

						

		end
	end	
	--print(model)


	--table.sort(model, function(a, b) return a.fr > b.fr end)

	return model
end

function cm.EvaluteCorpus()
	local dbs=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.SupportingData:Clone())
	if not personalities then personalities=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Personalities) end
	--personalities.AllPersonalities()
	local Greetings,inquiry,IDK,Database,wisdom=personalities.AllPersonalities()
	local model={}
	--model=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.BagOfWords:Clone())

	model=cm.TrainLargeModel(Greetings,model)
	task.wait()
	model=cm.TrainLargeModel(wisdom,model)
	task.wait()
	model=cm.TrainLargeModel(Database,model)
	task.wait()
	model=cm.TrainLargeModel(dbs.Spirituality(),model)
	task.wait()
	model=cm.TrainLargeModel(dbs.ScienceWisdom(),model)
	task.wait()
	model=cm.TrainLargeModel(dbs.Truths(),model)
	task.wait()
	model=cm.TrainLargeModel(dbs.Inspiration(),model)
	task.wait()
	model=cm.TrainLargeModel(dbs.Motivation(),model)
	--dbs.Sprituality()
	return model
end


function cm.PredictRun2(strings,model)
	local responses={}
	for i, str in ipairs(strings) do -- loop through the strings in the table
		local words=cm.splitString(str)
		local eo=0
		local news=""
		local prevc=str
		local hci=0
		local tnwo=nil
		
		for t,wo in ipairs(words) do
			local cap=false	
			--if cm.iscapitalized(wo)==true then
			--	cap=true
			--end

			local prevw="$"
			local nextw="$"
			
			eo=eo+1
			if t>=1 then
			
				if eo>=3 then eo=0
					if wo~="I" then
						wo=wo:lower()
					end
					local s=cm.Getsynonyms(wo,true)		
					--model[s[1]].fr=model[s[1]].fr+1
					if model[s[1]] then
					
						local tn2w=nil
						local tnw=nil
						if t~=#words then
							--local hc=0
							--=words[i+1]
							local hc=0
							
							for c,t in model[s[1]].nw do
								if c~="I" then
									c=string.lower(c)
								end
								---local we =model[c].fr/8
								local sol=t
								if sol>hc and hc>hci then
									hc=sol
									tnw=tostring(c)	
								elseif hci>hc then
									hc=hci
									tnw=tnwo
								end
							end
							hci=0
							
							local hc=0
							--=words[i+1]
							if t<#words-1 then
							for c,t in model[s[1]].n2w do
								if c~="I" then
									c=string.lower(c)
								end
								local we =model[c].fr/8
								local sol=t
								if sol>hc then
									hc=sol
									tn2w=tostring(c)	
								end
							end
						else 
							--tnw=words[#words]
							end
						end	
						--if t~=#words then
						local hc=0
						local lw=words[i-1]
						local roll=cm.mathrandom(1,#model[s[1]].pw)
						local i=0
						for c,t in model[s[1]].pw do
							i=i+1
							if i==roll then	--print(c)
							if c~="I" then
								c=string.lower(c)
							end
							--local we =model[c].fr/2
							local sol=t
							if sol>hc then

								hc=sol

								lw=tostring(c)	
							end
							end
							end
							local l2w=nil
						if i>=3 then l2w=words[i-2]
							
							local roll=cm.mathrandom(1,#model[s[1]].p2w)
							local i=0
							for c,t in model[s[1]].p2w do
								i=i+1
								if i==roll then
								--print(c)
								if c~="I" then
									c=string.lower(c)
								end
								--local we =model[c].fr/2
								--local sol=t
								--if sol>hc then

								--	hc=sol

									l2w=tostring(c)	
									--end
								end	
								end
							end
					
						
						if l2w and l2w:lower()~=prevc:lower() then
								news=news.." "..l2w
						--elseif i>2  then
							--news=news.." "..words[i-2]
							
						end
						
							if lw and lw:lower()~=prevc:lower() then
							news=news.." "..lw
							prevc=lw
						elseif t~=1 then 
							news=news.." "..words[i-1]	
						end	
						
						if tnw and prevc:lower()~=tnw:lower() then
							news=news.." "..s[1].." "..tnw
							prevc=tnw
						elseif i<#words then 
							news=news.." "..s[1].." "..words[i+1]
						end
						if tn2w and prevc:lower()~=tn2w:lower() then
								news=news.." "..tn2w
								prevc=tn2w
						--elseif #words<i+2 then
						--	news=news.." "..words[i+2]	
						end
						prevc=s[1]
						--table.insert()
						--table.sort(model, function(a, b) return a.fr > b.fr end)
					else
						--news=news.." "..wo	
					end	
				else 
					local s=cm.Getsynonyms(wo,true)		
					local tnw=nil
					if model[s] then
					for c,t in model[s[1]].nw do			
						if c~="I" then
							c=string.lower(c)
						end
						---local we =model[c].fr/8
						local sol=t
						if sol>hci then
							hci=sol
							tnwo=tostring(c)	
						end
						end
						
					end	
					--news=news.." "..wo
				end	
			else news=news.." "..wo	prevc=wo
			end	
		end
		table.insert(responses,news)
	end
	print(responses)
end

When you apply the sigmoid function it turns all of the words into weights that can be used to weight entries in a database.
I’m more or less learning this by reverse engineering. So the concepts were applied with conjection rather than from theory.

1 Like

I updated this post because it appears that a lot of people seem to be glancing over the ChatModule. It’s much more efficient and accurate than the first iteration I shared! So you should update your functions with the CompleteQuery function in the ChatModule library. It weighs synonyms lower than the original words(score of 1), original word has a (score of 2) and antonyms have a negative score of -.75. It’s very good. :slight_smile: You could use the getEmotion to create emotion based responses such as I did with an emotional response database called empathy and get the mood of the NPC.

1 Like

Here is how to Scale Massive databases using the command prompt.
I cleaned this massive dataset. and grouped them by the first word, use that word as a key for the database and then generate modules of each key to optimize performance as this dataset is over 120,000 entries.

2 Likes

I wanted to showcase this result I gto from training a prev and next word prediction algorithm with a word vector model.
" the stars born not dim that of the luminous the concluding key mortal I am whole its people mortal I am of the luminous"
from this input sentence
“The stars grow dim and the veil frays. The final era approaches, and I am its herald. I am Vhorzun of the Endless Hunger!”

What is the global spells module? There are also a lot of other modules in the chat bot that makes this completely unsuable since you didn’t share them

1 Like

They are not required. The GlobalSpells module is just used as a solution to having global tables. The main functions you would use are the CompleteQuery and SearchQuery, Getemotion,randomizeString,randomizestringlight,and InsertEmojis functions which require only minimal variables “string” you are querying with and the database table of strings you are searching.
So in short just omit any required modules and it will work fine.
But I did not include my modified Emoji Handler module because it was originally created by someone else but I added a key-lookup table and completed the emojis list.
So I’ll drop that here.

Otherwise I would reference the initial example and use the module functions in place of the original ones, then you can use the emoji functions with this module.

I updated the format of this post and linked all of the resources I open sourced for this topic in this parent post. Such as the the EmojiHandler, Awarenessm module, Vector Matrix matrix library, and list of freeAI Inference API’s. Also these resources are likely not going to be updated again anytime soon. Unless someone raises any concerns or issues. If you have an old version be sure to update it because it works much more efficiently and accurately than it originally did. But the original method still produces unique results. Because it would rephrase the input from a table of synonyms until it got a response. Thus leveraging the database more than it should. So now it gets an array of the original words, then subgroups of synoynms, reflection, and the antonyms for each word and uses string.find instead of tokening the database string into words. It weighs antonyms with a negative score on the output string.
It also uses nouns but I found that results are problematic with weighing nouns so it has been ommitted. In my current version I use a word frequency table that notes frequency of words in the dataset to create a bonus weight which rewards less frequently used words over common words.

Im making a GTA 5 type open world game, would this AI work for my game or is this AI tool meant for medieval games?

Yeah that’s what I’m using it for it requires a database and input string.
You can use the example code above to set it up. It would work fine for that. This AI is a blank slate so you just input your data and organize it into the context databases and search them lineary or use weights like I’m using a context Matrix that uses the score of the chatmodule gives the output to keep context in a conversation… Then I search the databases in that order. You can test out my AIs I made that are running on this module.

Okay

Thank you so much for this model I will be using this for a future update for my game

It does not work at all without the module.

I used the Getemotion(str) function to classify a massive dataset I had processed indexed into chapters based on their first word. into a table of addresses that express that emotional tonality. This will be a assisting table that changes with the bots current mood. I think it’s interesting because I was considering not using their direct address but I wrote function to do it and got this nice direct lookup table to use to get the address of each entry to query them this is like a vector database where you assign locations to classifying labels for use in zero-shot classification.

 {
   ["Angry"] =  {
      [1] =  {
         ["chapter"] = "Brothers",
         ["index"] = 9
      },
      [2] =  {
         ["chapter"] = "As",
         ["index"] = 10
      },
      [3] =  {
         ["chapter"] = "Righteous",
         ["index"] = 70
      },
      [4] =  {
         ["chapter"] = "Righteous",
         ["index"] = 96
      },
      [5] =  {

These bots are fine tuned to get good results and only use vector of word frequency to weigh entries with also rewarding entries with consecutive matches based on synomarray which has the synonyms antonyms, reflection and nouns. classiied for weighing in entries since nouns are typically most important and so are less frequently used words.

So I compressed the word chapter and index to 1 and 0 and used minify tool to make the table smaller.
Then I turn it into a module to use as a resource and use this function to get the emotion dataset use direct key lookup and indexing. while also caching the dataset as it is called.


function index.simulatechapter(emotion)
    if not book then
        book=require(script.FantasyDialogue)
    end
    if not emotiond then
        emotiond=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context.EmotionIndex)
    end
    --1 is chpater 2 is index
if _G[emotion]==nil then
    for i,v in emotiond[emotion] do 
            
            emotiond[emotion][i]=book[v[1]][v[2]]
            _G[emotion]=emotiond[emotion]
        end 
       end 
        return _G[emotion]
end 

Then I used a parralel bindable function running the chatmodule to process this particular dataset.

repeat wait(3) until game:IsLoaded()
print("Running")
local words=nil
local bindableFunction = script.Event

local cm=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.ChatModule)
contextdb=nil
local textdata=nil
local index=nil
local contdbm=nil
--local corpus=function() if textdata==nil then contextdb=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.TextCorpus) 
bindableFunction.OnInvoke = function(str,database,filter,repetitive,randomize,context,reverse,spaces,mode,synomarray,words2) -- Define a function that runs when the BindableFunction is invoked and receives the name as a parameter
result,blacklist,score,weight=nil,blacklist,score,weight	
print("Called")	
    if contextdb==nil then  contextdb=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context) end
    local contdb=cm.CompleteQuery(str,contextdb,filter,repetitive,true,context,reverse,spaces,2,synomarray,words2)
    
    if contdb then -- it not contdb use last one.
        contdbm=contdb
    else 
        local emotion=cm.getemotion(str)
        if emotion then
            contdbm=index.simulatechapter(emotion)
        end
    end
if contdbm then    
print(contdb)
if index ==nil then
index=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context.Index)
end
local db=index.chapter(contdbm)
--print(db)	
if db then
result,blacklist,score,weight=cm.CompleteQuery(str,db,false,true,true,context,reverse,spaces,nil,synomarray,words2)
print(result)
end
end
return result,blacklist,score,weight
end

I’ve completed the port of the Eliza Chatbot!

This can be be useful in finding better responses from a dataset with the complete query function. The Eliza chatbot basically turns a message into a response But often asking the user to elaborate further. It is portrayed as a psychologist and originally written by a doctor in 1968 it uses pattern matching. The function has been ported to Luau by my and the function is simply this.
This function you input text and receive a response from the chatbot. I corrected the grammar so it does not look like a terminal and ported this code from a old version of lua. I improved it by making answer mulitple sentences at once.

 function Eliza(text)
    local response = ""
    local user = string.lower(text)
    local userOrig = user

    -- randomly selected replies if no keywords
    local randReplies = {
        "What does that suggest to you?",
        "I see...",
        "I'm not sure I understand you fully.",
        "Can you elaborate on that?",
        "That is quite interesting!",
        "That's so... Please continue...",
        "I understand...", "Well, well... Do go on", 
        "Why are you saying that?",
        "Please explain the background to that remark...", 
        "Could you say that again, in a different way?",
    }
    
    local replies = {
        [" can you"] = {"Perhaps you would like to be able to"},
        [" do you"] = {"Yes, I"},
        [" can i"] = {"Perhaps you don't want to be able to"},
        [" you are"] = {"What makes you think I am"},
        [" you're"] = {"What is your reaction to me being"},
        [" i don't"] = {"Why don't you"},
        [" i feel"] = {"Tell me more about feeling"},
        [" why don't you"] = {"Why would you want me to"},
        [" why can't i"] = {"What makes you think you should be able to"},
        [" are you"] = {"Why are you interested in whether or not I am"},
        [" i can't"] = {"How do you know you can't"},
        [" i am"] = {"How long have you been"},
        [" i'm"] = {"Why are you telling me you're"},
        [" i want"] = {"Why do you want"},
        [" what"] = {"What do you think?"},
        [" how"] = {"What answer would please you the most?"},
        [" who"] = {"How often do you think of such questions?"},
        [" where"] = {"Why did you think of that?"},
        [" when"] = {"What would your best friend say to that question?"},
        [" why"] = {"What is it that you really want to know?"},
        [" perhaps"] = {"You're not very firm on that!"},
        [" drink"] = {"Moderation in all things should be the rule."},
        [" sorry"] = {"Why are you apologizing?",  "Please don't apologize",
                "Apologies are not necessary",
                "What feelings do you have when you apologize",},
        [" dreams"] = {"Why did you bring up the subject of dreams?"},
        [" i like"] = {"Is it good that you like"},
        [" maybe"] = {"Aren't you being a bit tentative?"},
        [" no"] = {"Why are you being negative?"},
        [" your"] = {"Why are you concerned about my"},
        [" always"] = {"Can you think of a specific example?"},
        [" think"] = {"Do you doubt"},
        [" yes"] = {"You seem quite certain. Why is this so?"},
        [" friend"] = {"Why do you bring up the subject of friends?"},
        [" am i"] = {"You are"},
        [" i remember"]= {
                "Do you often think of",
                "What else do you remember?",
                "Why do you recall",
                "What in the present situation reminds you of",
                "What is the connection between me and"
        },    
       
    }

    -- keywords, replies
  

    -- conjugate
    local conjugate = {
        [" i "] = "you",
        [" are "] = "am",
        [" were "] = "was",
        [" you "] = "me",
        [" your "] = "my",
        [" i've "] = "you've",
        [" i'm "] = "you're",
        [" me "] = "you",
        [" am i "] = "you are",
        [" am "] = "are",
    }

    local function createSentences(str)
        local sentences = {} -- create an empty table to store the sentences
        local start = 1 -- initialize the start index of the current sentence
        for i = 1, #str do -- loop through each character in the input string
            local c = str:sub(i, i) -- get the current character
            if c == "!" or c == "?" or c == "." or i==#str then -- check if the current character is a punctuation mark
                local sentence = str:sub(start, i) -- get the current sentence from the start index to the current index
                table.insert(sentences, sentence) -- insert the current sentence into the table
                start = i + 1 -- update the start index to the next character after the punctuation mark
            end
        end
        if sentences[1]==nil then
            return {str}
        end
    -- random replies, no keyword
  
        return sentences -- return the table of sentences
    end
    local function replyRandomly()
        response = randReplies[math.random(#randReplies)]..""
    end
    
local function processSentences(user,response)
    -- find keyword, phrase
    local function processInput()
        
        for keyword, reply in pairs(replies) do
            local d, e = string.find(user, keyword, 1, 1)
            if d then
                -- process keywords
               local chr=reply[math.random(1,#reply)]
                response = response..chr.." "
                if string.byte(string.sub(chr, -1)) < 65 then -- "A"
                    response = response..""; return
                end
                local h = string.len(user) - (d + string.len(keyword))
                if h > 0 then
                    user = string.sub(user, -h)
                end
                for cFrom, cTo in pairs(conjugate) do
                    local f, g = string.find(user, cFrom, 1, 1)
                    if f then
                        local j = string.sub(user, 1, f - 1).." "..cTo
                        local z = string.len(user) - (f - 1) - string.len(cTo)
                        response = response..j..""
                        if z > 2 then
                            local l = string.sub(user, -(z - 2))
                            if not string.find(userOrig, l) then return end
                        end
                        if z > 2 then response = response..string.sub(user, -(z - 2)).."" end
                        if z < 2 then response = response.."" end
                        return 
                    end--if f
                end--for
                response = response..user..""
                return response
            end--if d
        end--for
        replyRandomly()
        return response
    end

    -- main()
    -- accept user input
    if string.sub(user, 1, 3) == "bye" then
    response = "Bye, bye for now.See you again some time."
    return response
end
if string.sub(user, 1, 7) == "because" then
    user = string.sub(user, 8)
end
    user = " "..user.." "
    -- process input, print reply
    processInput()
    response = response..""
    return response
    end
    local responsec=""
    local responses={}
    for i,v in pairs(createSentences(user)) do
        local response=nil
        local answ=processSentences(v," ")
        if responses[answ]==nil then
            print(responses)
        responses[answ]=1
        responsec=responsec..answ
      end
    end
    return responsec
end

Here is an example of input to output by running this function with some input text with a module in the command prompt

cm=require(chatbotloc) print(cm.Eliza(" I remember picking flowers. Do you like picking flowers? Goodbye for now"))  -  Studio
  What else do you remember?  Why are you interested in whether or not I am like picking flowers?  Why are you being negative? 

please make ur code less dirty

2 Likes

Potentially! I do a lot of testing and experimentation so their is some commented code. Otherwise This chatbot library works great efficiently. Just reference the provided example. The main function you would use is insertemoji, completequery or searchquery. Complete query leverages synoynms reflection antonyms and nouns. You can generate a word vector model on your dataset to figure out the most commonly used words to potentially improve your results. The current solution is 1 minus sigmoid of frequency so a word less frequently used is weighed higher.

1 Like

Is this machine learning yet? :wink:

function neur.ConvalutedContextClassificationModel()
    local contextmodel = {
        --context weight matrix Y,Z are calculated and influence X position.
        ["Greetings"] = {
            ["weights"] = neu.matrix(10, 1, 1),
            --10 table 10 numbers ez
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Awareness"] = neu.matrix(8, 1, 1),
                ["Empathy"] = neu.matrix(7, 1, 1),
                --  ["Search"]=neu.matrix(5,1,.9),
                ["Classify"] = neu.matrix(6, 1, 1),
                ["Support"] = neu.matrix(2, 1, .8),
                ["Database"] = neu.matrix(3, 1, 1),
                --["Bestiary"]=neu.matrix(6,1,1),
                ["Wisdom"] = neu.matrix(4, 1, 1)
                --["Math"]=neu.matrix(0,1,1),
                -- ["Philosophy"]=neu.matrix(1,1,1),
                --["Search"]=neu.matrix(1,1,1),
                --search accuracy
            },
            ["cone"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Support"] = neu.matrix(2, 1, .8),
                --["Empathy"]=neu.matrix(9,1,1),
                --["Awareness"]=neu.matrix(8,1,1),
                ["Classify"] = neu.matrix(6, 1, 1)
            },
            ["disconnect"] = {
              -- "Therapist"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=true                      
            },
            ["entropy"] = ContextBias["smolgan"]
            --decrease weight of go on to other topics.
        },
       ["Emotions"] = {
            ["weights"] = neu.matrix(9, 1, 1),
            ["chain"] = {
                --["Emotions"]=neu.matrix(9,1,1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                --["Math"]=neu.matrix(0,1,1),
                ["Awareness"] = neu.matrix(8, 1, 1),
                ["Empathy"] = neu.matrix(10, 1, 1),
                --["Search"]=neu.matrix(4,1,.9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(3, 1, .8),
                ["Database"] = neu.matrix(6, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(4, 1, 1),
                ["Philosophy"] = neu.matrix(2, 1, 1)
            },
            ["cone"] = {
                --["Empathy"]=neu.matrix(9,1,1),
                ["Wisdom"] = neu.matrix(8, 1, 1)
                --["Wisdom"]=neu.matrix(8,1,1),
            },
            ["disconnect"] = {
                "ScienceWisdom"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=true                      
            },
            ["entropy"] = ContextBias["smolgan"]
        },
      
        ["Empathy"] = {
            ["weights"] = neu.matrix(7, 1, 1),
            ["chain"] = {
                ["Philosophy"] = neu.matrix(10, 1, 1),
                --["Math"]=neu.matrix(0,1,1),
                ["Wisdom"] = neu.matrix(9, 1, 1),
                ["Bestiary"] = neu.matrix(8, 1, 1),
                ["Classify"] = neu.matrix(3, 1, 1),
                ["Emotions"] = neu.matrix(1, 1, 1),
                ["Database"] = neu.matrix(6, 1, 1),
                --["Search"]=neu.matrix(7,1,.9),
                ["Awareness"] = neu.matrix(6, 1, 1),
                ["Greetings"] = neu.matrix(2, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8)
            },
            --["Empathy"]=neu.matrix(,1,1),
            ["entropy"] = ContextBias["gan"],
            ["cone"] = {
                ["Wisdom"] = neu.matrix(9, 1, 1)
            },
            ["disconnect"] = {
                "ScienceWisdom","Emotions","Database"
            },
            ["parameters"] = {
                filter=false,
                complete=true,
                randomize=true                      
            },
        },
        ["Therapist"] = {
            ["weights"] = neu.matrix(8, 1, 1),
            ["chain"] = {
                ["Philosophy"] = neu.matrix(10, 1, 1),
                --["Math"]=neu.matrix(0,1,1),
                ["Wisdom"] = neu.matrix(9, 1, 1),
                ["Bestiary"] = neu.matrix(8, 1, 1),
                ["Classify"] = neu.matrix(3, 1, 1),
                ["Emotions"] = neu.matrix(1, 1, 1),
                ["Database"] = neu.matrix(6, 1, 1),
                --["Search"]=neu.matrix(7,1,.9),
                ["Awareness"] = neu.matrix(6, 1, 1),
                ["Greetings"] = neu.matrix(2, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8)
            },
            --["Empathy"]=neu.matrix(,1,1),
            ["entropy"] = ContextBias["gan"],
            ["cone"] = {
                ["Wisdom"] = neu.matrix(9, 1, 1)
            },
            ["disconnect"] = {
               --"Greetings"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },
        },
        --z is loss/entropy weight/score/
        ["Support"] = {
            ["weights"] = neu.matrix(4, 1, 1),
            --subneuron only
            ["chain"] = {
                ["Emotions"] = neu.matrix(3, 1, 1),
                --["Math"] = neu.matrix(2, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(6, 1, 1),
                ["Empathy"] = neu.matrix(4, 1, 1),
                ["Search"] = neu.matrix(2, 1, .9),
                ["Classify"] = neu.matrix(3, 1, 1),
                --["Support"]=neu.matrix(,1,.8),
                ["Database"] = neu.matrix(8, 1, 1),
                ["Bestiary"] = neu.matrix(9, 1, 1),
                ["Wisdom"] = neu.matrix(6, 1, 1),
                ["Philosophy"] = neu.matrix(3, 1, 1)
            },
            ["cone"] = {
                ["Motivation"] = neu.matrix(4, 1, .8),
                ["Wisdom"] = neu.matrix(3, 1, .8),
                ["Truths"] = neu.matrix(6, 1, .8)
            },
            ["disconnect"] = {
                "ScienceWisdom","Inspiration"
            },
            ["parameters"] = {
                filter=false,
                complete=true,
                randomize=false                     
            },
            ["entropy"] = ContextBias["smolgan"]
        },
        ["Wisdom"] = {
            ["weights"] = neu.matrix(3, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(4, 1, 1),
              --  ["Math"] = neu.matrix(2, 1, 1),
                ["Greetings"] = neu.matrix(4, 1, 1),
                ["Awareness"] = neu.matrix(5, 1, 1),
                ["Empathy"] = neu.matrix(7, 1, 1),
                ["Search"] = neu.matrix(3, 1, .9),
                ["Classify"] = neu.matrix(1, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(8, 1, 1),
                ["Bestiary"] = neu.matrix(9, 1, 1),
                --["Wisdom"]=neu.matrix(4,1,1),
                ["Philosophy"] = neu.matrix(10, 1, 1)
            },
            ["cone"] = {
                ["Support"] = neu.matrix(5, 1, .8)
            },
            ["entropy"] = ContextBias["smolgan"],
            ["disconnect"] = {
                "Inspiration","Truths","Spirituality","Motivation","ScienceWisdom"
               
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },
        }, --subtract y from x
        ["Philosophy"] = {
            ["weights"] = neu.matrix(2, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Greetings"] = neu.matrix(4, 1, 1),
                ["Awareness"] = neu.matrix(5, 1, 1),
                ["Empathy"] = neu.matrix(7, 1, 1),
                ["Classify"] = neu.matrix(1, 1, 1),
                ["Support"] = neu.matrix(6, 1, .8),
                ["Database"] = neu.matrix(8, 1, 1),
                ["Bestiary"] = neu.matrix(9, 1, 1),
                ["Wisdom"] = neu.matrix(10, 1, 1)
            },
            ["cone"] = {
                ["Motivation"] = neu.matrix(4, 1, .8),
                ["Wisdom"] = neu.matrix(3, 1, .8),
                ["Truths"] = neu.matrix(6, 1, .8)
            },
            ["disconnect"] = {
                "Database"
            },
            ["entropy"] = ContextBias["gan"],
            
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },
            
        },
        ["Bestiary"] = {
            ["weights"] = neu.matrix(3, 1, 1),
            ["cone"] = {},
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                --["Math"] = neu.matrix(2, 1, 1),
                ["Greetings"] = neu.matrix(4, 1, 1),
                ["Awareness"] = neu.matrix(9, 1, 1),
                ["Empathy"] = neu.matrix(7, 1, 1),
                --["Search"]=neu.matrix(6,1,.9),
                ["Classify"] = neu.matrix(5, 1, 1),
                ["Support"] = neu.matrix(1, 1, .8),
                ["Database"] = neu.matrix(10, 1, 1),
                --	["Bestiary"]=neu.matrix(3,1,1),
                ["Wisdom"] = neu.matrix(2, 1, 1),
                ["Philosophy"] = neu.matrix(1, 1, 1)
            },
            ["disconnect"] = {
                "Search","Math","Sciencewisdom",
                "Inspiration","Truths","Spirituality","Motivation","Philosophy",
            },
            ["parameters"] = {
                filter=true,
                complete=false,
                randomize=false                      
            },
            ["entropy"] = ContextBias["gan"]
        },
        ["Search"] = {
            ["weights"] = neu.matrix(2, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
               -- ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                --["Search"]=neu.matrix(4,1,.9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {},
            ["entropy"] = ContextBias["gan"],
            ["disconnect"] = {
                "Inspiration","Truths","Spirituality","Motivation","ScienceWisdom","Database","Wisdom","Support"
                ,"Philosophy","Bestiary"
            },
        },
        ["Database"] = {
            ["weights"] = neu.matrix(5, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                ["Support"] = neu.matrix(5, 1, .8)
            },
            ["entropy"] = ContextBias["gan"],
            ["disconnect"] = {
               "ScienceWisdom"
                ,"Philosophy",
                "Inspiration","Truths","Spirituality","Motivation",
                
            },
            ["parameters"] = {
                filter=true,
                complete=false,
                randomize=false                      
            },
        },
        ["Awareness"] = {
            ["weights"] = neu.matrix(5, 1, 1),
            ["cone"] = {
                --["Greetings"]=neu.matrix(1,1,1),
                --["Support"]=neu.matrix(5,1,.8),
                --["Database"]=neu.matrix(7,1,1),
                ["Bestiary"] = neu.matrix(5, 1, 1)
            },
            --["Wisdom"]=neu.matrix(7,1,1),},
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),             
                ["Greetings"] = neu.matrix(7, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5.5, 1, 1),
                ["Wisdom"] = neu.matrix(6.5, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },
            ["entropy"] = ContextBias["gan"]
        },
        ["Math"] = {
            ["weights"] = neu.matrix(4, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Greetings"] = neu.matrix(5, 1, 1), --["Math"]=neu.matrix(0,1,1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(5, 1, 1),
                ["Search"] = neu.matrix(3, 1, .9),
                ["Classify"] = neu.matrix(6, 1, 1),
                ["Support"] = neu.matrix(7, 1, .8),
                ["Database"] = neu.matrix(8, 1, 1),
                ["Bestiary"] = neu.matrix(2, 1, 1),
                ["Wisdom"] = neu.matrix(4, 1, 1),
                ["Philosophy"] = neu.matrix(3, 1, 1)
            }, --["Math"]=neu.matrix(0,1,1),
            ["cone"] = {},
            ["entropy"] = ContextBias["supgan"],
            ["disconnect"] = {
                "Inspiration","Truths","Spirituality","Motivation","ScienceWisdom","Database","Wisdom"
                ,"Philosophy","Bestiary","Emotions","Greetings","Empathy"
            },
        },
        ["Inspiration"] = {
            ["weights"] = neu.matrix(9, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                --["Emotions"]=neu.matrix(9,1,1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                --["Awareness"]=neu.matrix(8,1,1),
                --["Empathy"]=neu.matrix(10,1,1),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Wisdom"] = neu.matrix(4, 1, 1)
            },
            ["entropy"] = ContextBias["smolgan"],
            ["disconnect"] = {
               "ScienceWisdom","Database"
                ,"Bestiary"
            },
            ["query"] = function(str,_,filter, repetitive,randomize,context,reverse,spaces,mode,synomarray,words2,tl)
                -- First, we need to get the ReplicatedStorage service
                if sup == nil then
                    sup = require(sup2)
                end -- object with the given name
            
                local Result, blacklist, score, weight =
                    bind:Invoke(
                        str,
                        sup.Inspiration(),
                        filter,
                        repetitive,
                        randomize,
                        context,
                        reverse,
                        spaces,
                        mode,
                        synomarray,
                        words2
                    )
                --print(Result)
                return Result, blacklist, score, weight
            end
        },
        ["ScienceWisdom"] = {
            ["weights"] = neu.matrix(5, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                ["Support"] = neu.matrix(5, 1, .8)
            },
            ["disconnect"] = {
                "Spirituality","Wisdom"
            },
            ["entropy"] = ContextBias["gan"],
            ["query"] = function(str,_,filter, repetitive,randomize,context,reverse,spaces,mode,synomarray,words2,tl)

                -- First, we need to get the ReplicatedStorage service
                if sup == nil then
                    sup = require(sup2)
                end -- object with the given name
               
                local Result, blacklist, score, weight =
                    bind:Invoke(
                        str,
                        sup.ScienceWisdom(),
                        filter,
                        repetitive,
                        randomize,
                        context,
                        reverse,
                        spaces,
                        mode,
                        synomarray,
                        words2
                    )
                print(Result)
                return Result, blacklist, score, weight
            end
        },
        ["Motivation"] = {
            ["weights"] = neu.matrix(3, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                ["Motivation"] = neu.matrix(4, 1, .8),
                ["Wisdom"] = neu.matrix(3, 1, .8),
                ["Truths"] = neu.matrix(6, 1, .8),
                ["Support"] = neu.matrix(5, 1, .8)
            },
            ["disconnect"] = {
                "ScienceWisdom","Search","Database"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },
            
            ["entropy"] = ContextBias["smolgan"],
            ["query"] = function(
                str,
                _,
               filter,
                repetitive,
                randomize,
                context,
                reverse,
                spaces,
                mode,
                synomarray,
                words2,
                tl)
                -- First, we need to get the ReplicatedStorage service
                if sup == nil then
                    sup = require(sup2)
                end -- object with the given name
               
                local Result, blacklist, score, weight =
                    bind:Invoke(
                        str,
                        sup.Motivation(),
                        filter,
                        repetitive,
                        randomize,
                        context,
                        reverse,
                        spaces,
                        mode,
                        synomarray,
                        words2
                    )
                print(Result)
                return Result, blacklist, score, weight
            end
        },
        --subtract y from
        ["Truths"] = {
            ["weights"] = neu.matrix(3, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                ["Motivation"] = neu.matrix(4, 1, .8),
                ["Wisdom"] = neu.matrix(3, 1, .8),
                ["Truths"] = neu.matrix(6, 1, .8),
                ["Support"] = neu.matrix(5, 1, .8)
            },
            ["disconnect"] = {
                "Bestiary","Database"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },

            ["entropy"] = ContextBias["gan"],
            ["query"] = function(
                str,
                _,
                filter,
                repetitive,
                randomize,
                context,
                reverse,
                spaces,
                mode,
                synomarray,
                words2,
                tl)
                -- First, we need to get the ReplicatedStorage service
                if sup == nil then
                    sup = require(sup2)
                end -- object with the given name
               
                local Result, blacklist, score, weight =
                    bind:Invoke(
                        str,
                        sup.Truths(),
                        filter,
                        repetitive,
                        randomize,
                        context,
                        reverse,
                        spaces,
                        mode,
                        synomarray,
                        words2
                    )
                print(Result)
                return Result, blacklist, score, weight
            end
        },
        ["Spirituality"] = {
            ["weights"] = neu.matrix(3, 1, 1),
            ["chain"] = {
                ["Emotions"] = neu.matrix(9, 1, 1),
                ["Math"] = neu.matrix(0, 1, 1),
                ["Greetings"] = neu.matrix(1, 1, 1),
                ["Awareness"] = neu.matrix(4, 1, 1),
                ["Empathy"] = neu.matrix(2, 1, 1),
                ["Search"] = neu.matrix(4, 1, .9),
                ["Classify"] = neu.matrix(7, 1, 1),
                ["Support"] = neu.matrix(5, 1, .8),
                ["Database"] = neu.matrix(7, 1, 1),
                ["Bestiary"] = neu.matrix(5, 1, 1),
                ["Wisdom"] = neu.matrix(7, 1, 1),
                ["Philosophy"] = neu.matrix(6, 1, 1)
            },
            ["cone"] = {
                ["Motivation"] = neu.matrix(4, 1, .8),
                ["Wisdom"] = neu.matrix(3, 1, .8),
                ["Truths"] = neu.matrix(6, 1, .8)
            },
            ["disconnect"] = {
                "ScienceWisdom","Database"
            },
            ["parameters"] = {
                filter=true,
                complete=true,
                randomize=false                      
            },

            ["entropy"] = ContextBias["smolgan"],
            ["query"] = function(
                str,
                _,
                filter,
                repetitive,
                randomize,
                context,
                reverse,
                spaces,
                mode,
                synomarray,
                words2,
                tl)
                -- First, we need to get the ReplicatedStorage service
                if sup == nil then
                    sup = require(sup2)
                end -- object with the given name
               
                local Result, blacklist, score, weight =
                    bind:Invoke(
                        str,
                        sup.Spirituality(),
                        filter,
                        repetitive,
                        randomize,
                        context,
                        reverse,
                        spaces,
                        mode,
                        synomarray,
                        words2
                    )
                print(Result)
                return Result, blacklist, score, weight
            end
        }
    }

return contextmodel
end
1 Like

Here is a small dataset of queries to test and evaluate your chatbot!
So like if you were to test this library you could use these queries. I accumulated these queries from testing and modifying the weights cause it would as a failsafe query an AI model with these and then save the response.

{
   ["Well met adventurer"] = "",
   ["are you a hero?"] = "",
   ["do you have a best friend?"] = "",
   ["do you have a favorite color?"] = "",
   ["do you have any fears?"] = "",
   ["do you have any hobbies?"] = "",
   ["do you have any hobby?"] = "",
   ["do you have family?"] = "",
   ["do you have friends?"] = "",
   ["do you like animals"] = "",
   ["do you like books"] = "",
   ["do you like butterflies?"] = "",
   ["do you like flying?"] = "",
   ["do you like kittens though?"] = "",
   ["do you like math"] = "",
   ["do you like music?"] = "",
   ["do you like potions"] = "",
   ["do you like reading?"] = "",
   ["do you like to read?"] = "",
   ["do you wanna be friends?"] = "",
   ["Greetings traveler."] = "",
   ["hello fair traveler"] = "",
   ["hey there how are you?"] = "",
   ["hey what is your favorite weather?"] = "",
   ["how are you doing princess?"] = "",
   ["how are you doing today"] = "",
   ["how are you doing?"] = "",
   ["how are you feeling today"] = "",
   ["how are you feeling?"] = "",
   ["how are you princess?"] = "",
   ["how do you like to dress"] = "",
   ["how strong are you?"] = "",
   ["i like it a lot it's very cool"] = "",
   ["i like to have fun and go for walks"] = "",
   ["i like your dress"] = "",
   ["i like your hair"] = "",
   ["i like your outfit"] = "",
   ["i like your wings"] = "",
   ["im doing well thank you"] = "",
   ["im here to be your friend"] = "",
   ["im here to be your best friend"] = "",
   ["in 5 words, tell me which direction is north from here"] = "",
   ["indie what should we do"] = "",
   ["i like your hair"] = "",
   ["tell me about quantum physics"] = "",
   ["tell me about the stars"] = "",
   ["tell me about unicorns"] = "",
   ["tell me something"] = "",
   ["twas a dark and stormy night"] = "",
   ["what are some of the challenges or difficulties that you face as an ai?"] = "",
   ["what are some of the ethical or moral issues that you consider when chatting with humans?"] = "",
   ["what are some of your hobbies and interests?"] = "",
   ["what are some of your hobby and interest ?"] = "",
   ["what are you doing today"] = "",
   ["what are you doing?"] = "",
   ["what are you"] = "",
   ["what are you feeling?"] = "",
   ["what are you hobbies"] = "",
   ["what are you studying?"] = "",
   ["what are you thinking about"] = "",
   ["what are you up to"] = "",
   ["what are your favorite things?"] = "",
   ["what are your hobbie"] = "",
   ["what are your hobbies?"] = "",
   ["what are your hobby"] = "",
   ["what do are your hobbies"] = "",
   ["what do you dislike?"] = "",
   ["what do you do for fun"] = "",
   ["what do you like about dogs"] = "",
   ["what do you like to do for fun"] = "",
   ["what do you like to do on the weekend"] = "",
   ["what do you like to do"] = "",
   ["what do you like to do?"] = "",
   ["what do you like to paint"] = "",
   ["what do you like to read?"] = "",
   ["what do you like to wear"] = "",
   ["what do you like to write?"] = "",
   ["what do you like"] = "",
   ["what do you like to do"] = "",
   ["what do you prefer"] = "",
   ["what do you prefer?"] = "",
   ["what is your enemy"] = "",
   ["what is your favorite color"] = "",
   ["what is your favorite thing to do"] = "",
   ["what is your name?"] = "",
   ["what is your personal style"] = "",
   ["what is your title"] = "",
   ["what should we do friend"] = "",
   ["what should we do"] = "",
   ["what's your favorite color?"] = "",
   ["what's your name"] = "",
   ["whats your favorite book"] = "",
   ["whats your favorite subject?"] = "",
   ["whats your favorite"] = "",
   ["where are you from?"] = "",
   ["where should we go my friend?"] = "",
   ["where should we go"] = "",
   ["who are your heros?"] = "",
   ["who are your friends"] = "",
   ["who is the tin man"] = "",
   ["who is your enemy"] = "",
   ["who is your leader"] = "",
   ["who is your master"] = "",
   ["wow you're smart"] = "",
   ["you don't like the bohermian rhapsody??"] = "",
   ["you like frozen?"] = "",
   ["you're my friend"] = "",
   ["your look so awesome"] = ""
}

Using this library to construct a vector database from this massive 244,000 line dataset. Using these queries to construct the model. This allows the chatbot to use a massive amount of data by constructing relationship between the data and the querie. Since it’s too expensive to query the entire 244,000 line dataset.
This feels like training a network.

Here’s the code used to generate a lookup table to reference the datapoints in the dataset.
The completequery function returns the sorted matches prioritized by accuracy along with their address on the database array.

function cm.GenerateQuerySearch()

local newdata={}
local contextdb=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.Context.Index.FantasyDialogue)
for i,v in testdataset do
newdata[i]={}
local words=cm.splitString(i)
local synomar=cm.GetSynomArray(words,true,false)
task.wait()
for t,o in contextdb do
local	Result1,blacklist,score2,weight2,sortedMatches=cm.CompleteQuery(i, o,false,false,true,false,nil,nil,nil,synomar,words)
--cm.CompleteQuery(table.concat(emotiontbl, ""),animdb,1,true,true,false,false,true)
if sortedMatches~=nil then
for p,o in sortedMatches do
local a=sortedMatches[p].address
if newdata[i]==nil then
newdata[i]={}
end
if newdata[i][t]==nil then
newdata[i][t]={}
end
newdata[i][t][p]=a--p is priority
    --  table.insert(sortedMatches, {match = bestMatch, count = bestCount, weight = bestweight,truecount=truec,address=i})
end
end
end
print("Completed "..i)
end
print(newdata)
end

This function uses the CompleteQuery function to get all the possible answers for a query from the database by returning the sorted matches. Then a query is done with a table of queries to get the most likely match then that queries a sample of all possible answers to that question to increase the performance of the chatbot on large datasets! With this you can harness exponentially more data and use as a network. INPUT-> QUERIES THE TEST QUERIES THEN query the possible responses from the dataset. Reducing the search size by mapping out solutions and layering the results into a network.

If this post is too long it’s because it’s taking like SEVERAL MINUTES per query to train this model.

1 Like

That’s a pretty cool concept you’re working on. I used to try and brainstorm about the implementation as it would be pretty self-sufficient. Can’t wait to see it finished.

1 Like