Chatbot & LLM Artificial Intelligence Model API Code Documentation FREE (Open Source) and Other Useful APIs

Here are some free inference AI models from huggingface and free APIs you can use for your game.
I’ve been doing research and chose some of the best and free models to create this open-source documentation for the community.
Please note You must get your bearer key from huggingface.
The bearer key is free and does not require payment information. Visit hugging face and click deploy->interference->get access token
This API gets summary of top result from wikipedia.

function SearchWikipedia(searchq)
	local url = "https://en.wikipedia.org/w/rest.php/v1/search/page?q="
	-- Define the query parameters
	-- Make the request and get the response
	local success, response = pcall(function()
		return HttpService:RequestAsync({
			Url = url..searchq
			--	Method = "GET",
			--Query = params -- Pass the query parameters as a table
		})
	end)

	-- Check if the request was successful
	if success then
		-- Check if the response status code was 200 (OK)
		if response.StatusCode == 200 then
			-- Parse the response body as JSON
			local data = HttpService:JSONDecode(response.Body)
			local Filter=nil
			-- Get the first item from the result list
			local index=0
			local item
			repeat
				index=index+1
				item = data.pages[index]
				Filter=	chatmodule.SearchQuery(item.excerpt,badwords,true,true,false)
			until Filter==nil or data.pages[index]==nil
			-- Extract the title and text from the item
			local title = item.title
			local excerpt = item.excerpt
			local pattern = "<span class=\"searchmatch\">(.-)</span>"

			-- Replace the HTML tags with empty strings
			local text = excerpt:gsub(pattern, "%1")

			-- Print the title and text to the output
			print(title)
			print(text)
		else
			-- Print the response status code and status message to the output
			print("Error: " .. response.StatusCode .. " " .. response.StatusMessage)
		end
	else
		-- Print the error message to the output
		print("Error: " .. response)
	end
end

Search Wikipedia Article. I included my filtering module function to keep out dangerous results but it does not work in this context. Gets entire wikipedia article

function SearchWikipedia2(searchq)
	local url = "https://en.wikipedia.org/w/rest.php/v1/search/page?q="
	-- Define the query parameters
	-- Make the request and get the response
	local success, response = pcall(function()
		return HttpService:RequestAsync({
			Url = url..searchq
			--	Method = "GET",
			--Query = params -- Pass the query parameters as a table
		})
	end)

	-- Check if the request was successful
	if success then
		-- Check if the response status code was 200 (OK)
		if response.StatusCode == 200 then
			-- Parse the response body as JSON
			local data = HttpService:JSONDecode(response.Body)

			-- Get the first item from the result list
			local Filter=nil
			-- Get the first item from the result list
			local index=0
			local item
			repeat
				index=index+1
				item = data.pages[index]
				Filter=	chatmodule.SearchQuery(item.excerpt,badwords,true,true,false)
			until Filter==nil or data.pages[index]==nil

			-- Extract the title and text from the item
			local title = item.title
			local excerpt = item.excerpt
			local pattern = "<span class=\"searchmatch\">(.-)</span>"

			-- Replace the HTML tags with empty strings
			local text = excerpt:gsub(pattern, "%1")

			-- Print the title and text to the output
			print(title)
			print(text)

			-- Extract the key from the item
			local key = item.key

			-- Construct the article URL from the key and the base URL
			local base_url = "https://en.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&explaintext&titles="
			local article_url = base_url .. key.."&format=json"

			-- Print the article URL to the output
			--print(article_url)

			-- Make another request to get the article content
			local success, response = pcall(function()
				return HttpService:RequestAsync({
					Url = article_url,
					Method = "GET"
				})
			end)

			-- Check if the request was successful
			if success then
				-- Check if the response status code was 200 (OK)
				if response.StatusCode == 200 then
					-- Parse the response body as JSON

					-- Access the extract property of the JSON object
					local data = HttpService:JSONDecode(response.Body)

					-- Access the pages table of the JSON object
					local pages = data.query.pages

					-- Use the pairs function to iterate over the pages table
					for key, value in pairs(pages) do
						-- Get the page key and the extract from each subtable
						local page_key = key
						local extract = value.extract

						-- Print the page key and the extract to the output
						--print(page_key)
						return extract
						--	print(extract)
					end
					print(data)
					-- Print the extract to the output
					--print(extract)
				else
					-- Print the response status code and status message to the output
					print("Error: " .. response.StatusCode .. " " .. response.StatusMessage)
				end
			else
				-- Print the error message to the output
				print("Error: " .. response)
			end

		end 
	end
end

Summarize paragraph AI API.

function summarrization(inputq)
	-- Get the HttpService
	--https://huggingface.co/facebook/bart-large-cnn?
	-- Get the HttpService
	

	-- Define the URL and the headers for the request
	local API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
	local headers = {
		["Authorization"] = Bearerkey,
		--["Content-Type"] = "application/json"
	}

	-- Define the payload for the request
	local payload = {
		inputs = inputq
	}

	-- Encode the payload as a JSON string
	local payloadJSON = HttpService:JSONEncode(payload)

	-- Send the request and get the response
	local success, response = pcall(function()
		return HttpService:RequestAsync({
			Url = API_URL,
			Method = "POST",
			Headers = headers,
			Body = payloadJSON
		})
	end)

	-- Check if the request was successful
	if success then
		-- Decode the response as a JSON table
		local responseJSON = HttpService:JSONDecode(response.Body)
print(response)
		-- Check if the response has a summary
		if responseJSON[1].summary_text then -- Use [1] to access the first element of the array
			-- Print the summary
			print(responseJSON[1].summary_text) -- Use [1] to access the first element of the array
			return responseJSON[1].summary_text	
		else
			-- Print an error message
			print(response)
			return inputq
		end
	else
		-- Print an error message
		
		print("Request failed: " .. response)
		return inputq
	end

end

Google Flan T5 Chatbot AI

function Instruct(inputText)
-- Import the HttpService


-- Define the API URL
local API_URL = "https://api-inference.huggingface.co/models/google/flan-t5-base"

-- Define the headers with your authorization key
local headers = {
	["Authorization"] = Bearerkey
}

-- Define a function to query the API with a payload
local function query(payload)
	-- Encode the payload as a JSON string
	local jsonPayload = HttpService:JSONEncode(payload)
	-- Send a POST request to the API URL with the headers and the payload
	local response = HttpService:PostAsync(API_URL, jsonPayload, Enum.HttpContentType.ApplicationJson, false, headers)
	-- Decode the response as a JSON table
	local jsonResponse = HttpService:JSONDecode(response)
	-- Return the JSON table
	return jsonResponse
end

-- Define your input text

-- Query the API with your input text as the inputs field
local output = query({
	["inputs"] = inputText
})
	local generatedText = output[1].generated_text
	-- or
	local generatedText = output[1]["generated_text"]
	return generatedText
-- Print the output
end 

Basic GPT2 sentence completion API.

function GPT2(inputText)
-- Import the HttpService


-- Define the API URL
--local API_URL = "https://api-inference.huggingface.co/models/gpt2"
	local API_URL ="https://api-inference.huggingface.co/models/gpt2-large"
-- Define the headers with your authorization key
local headers = {
	["Authorization"] = Bearerkey
}

-- Define a function to query the API with a payload
local function query(payload)
	-- Encode the payload as a JSON string
	local jsonPayload = HttpService:JSONEncode(payload)
	-- Send a POST request to the API URL with the headers and the payload
	local response = HttpService:PostAsync(API_URL, jsonPayload, Enum.HttpContentType.ApplicationJson, false, headers)
	-- Decode the response as a JSON table
	local jsonResponse = HttpService:JSONDecode(response)
	-- Return the JSON table
	return jsonResponse
end
	
-- Define your input text
	
-- Query the API with your input text as the inputs field
	local output = query({
		["inputs"] = inputText,
		["max_length"] = 200, -- This will limit the output to 150 tokens
		["num_return_sequences"] =1, -- This will generate only one output
		["temperature"] = 2-- This will increase the randomness of the output
	})
	
	-- or
	local generatedText = output[1]["generated_text"]
print(output)
	-- Print the string
	return generatedText
-- Print the output

end

Conversational models input str,previous inputs, responses, and model 1 or 2

function conversationaldialogue(str,context,responses,model)
	local model=model
	local API_URL
	if model==nil then
		model=math.random(1,2)
	end
	if model==1 then
	 API_URL = "https://api-inference.huggingface.co/models/microsoft/GODEL-v1_1-base-seq2seq"
	else
	-- Define the URL and the headers for the request
	 API_URL = "https://api-inference.huggingface.co/models/facebook/blenderbot-400M-distill"
	end
	
	local headers = {
		["Authorization"] = Bearerkey,
		--["Content-Type"] = "application/json"
	}
table.insert(context,str)
	-- Define the payload for the request
	local payload = {
		inputs = {
			past_user_inputs = context,
			generated_responses = responses
		}
	}

	-- Encode the payload as a JSON string
	local payloadJSON = HttpService:JSONEncode(payload)

	-- Send the request and get the response
	local success, response = pcall(function()
		return HttpService:RequestAsync({
			Url = API_URL,
			Method = "POST",
			Headers = headers,
			Body = payloadJSON
		})
	end)

	-- Check if the request was successful
	if success then
		-- Decode the response as a JSON table
		local responseJSON = HttpService:JSONDecode(response.Body)

		-- Check if the response has a generated_text
		if responseJSON.generated_text then
			-- Print the generated_text
			return	(responseJSON.generated_text)
		else
			-- Print an error message
			print(response)
			return nil
		end
	else
		-- Print an error message
		print("Request failed: " .. response)
		return nil
	end

end

Philosophical quotes API. Maintains theme if the same input keyword.

function buildquotes(keyword)
-- Make the request and get the response
	local success, response = pcall(function()
	return HttpService:RequestAsync({
		Url = url2..string.lower(keyword), -- Append the keyword to the URL
		Method = "GET"
	})
end)

-- Check if the request was successful
if success then
	-- Check if the response status code was 200 (OK)
	if response.StatusCode == 200 then
		-- Parse the response body as JSON
		local data = HttpService:JSONDecode(response.Body)

		-- Get a random item from the result list
		local item = data[math.random(#data)]

		-- Extract the quote and author from the item
		local quote = item.q
		local author = item.a
			if quote~="Too many requests. Obtain an auth key for unlimited access." then
			
		-- Print the quote and author to the output
		print(quote)
				print("- " .. author)
			return quote	
                        end
			end	
	else
		-- Print the response status code and status message to the output
		print("Error: " .. response.StatusCode .. " " .. response.StatusMessage)
			return ""
		end
else
	-- Print the error message to the output
		print("Error: " .. response)
		return ""
end

	
end	

Zero shot classification. input string and table of labels maximum of 10 labels={“greetings”,“question”}

function zero_shot_classification (input, labels)
	-- Get the HttpService


	-- Define the URL and the headers for the request
	local API_URL = "https://api-inference.huggingface.co/models/sileod/deberta-v3-base-tasksource-nli"
	local headers = {
		["Authorization"] = Bearerkey, -- Your Hugging Face token
		--["Content-Type"] = "application/json" -- The content type of the request
	}

	-- Define the payload for the request
	local payload = {
		inputs = input, -- The input text to classify
		parameters = {
			candidate_labels = labels -- The candidate labels to choose from
		}
	}

	-- Encode the payload as a JSON string
	local payloadJSON = HttpService:JSONEncode(payload)

	-- Send the request and get the response
	local success, response = pcall(function()
		return HttpService:PostAsync(API_URL, payloadJSON, Enum.HttpContentType.ApplicationJson, false, headers) -- Post the JSON payload to the API URL with the headers
	end)

	-- Check if the request was successful
	if success then
		-- Decode the response as a JSON table
		local responseJSON = HttpService:JSONDecode(response)

		-- Print the scores and labels of the classification
		print("Scores: " .. table.concat(responseJSON.scores, ", "))
		print("Labels: " .. table.concat(responseJSON.labels, ", "))
		return responseJSON.scores,responseJSON.labels
	else
		-- Print an error message
		print("Request failed: " .. response)
	end
end

function get_highest_classifier_value (input, labels)
	-- Call the zero_shot_classification function to get the scores and labels
	local scores, labels = zero_shot_classification (input, labels)

	-- Initialize the variables to store the highest score and label
	local highest_score = -math.huge -- A very small number
	local highest_label = ""

	-- Loop through the scores and labels arrays
	for i = 1, #scores do
		-- Check if the current score is higher than the highest score
		if scores[i] > highest_score then
			-- Update the highest score and label
			highest_score = scores[i]
			highest_label = labels[i]
		end
	end

	-- Return the highest score and label
	return highest_score, highest_label
end

Cheesy chuck norris jokes API.

function ExtractjokeTable()
	local jokeURL = "https://geek-jokes.sameerkumar.website/api?format=json"
	local response = HttpService:GetAsync(jokeURL)
	
	local data =HttpService:JSONDecode(response)
	if data.joke then
		-- response successful
		table.insert(jokes,data.joke)
		return data.joke

	end
end

Search Open library for a book

function search_book (query)
	-- Get the HttpService


	-- Define the URL and the parameter for the request
	local API_URL = "https://openlibrary.org/search.json"
	local param = "q=" .. HttpService:UrlEncode(query) -- Encode the query parameter

	-- Send the request and get the response
	local success, response = pcall(function()
		return HttpService:GetAsync(API_URL .. "?" .. param) -- Append the parameter to the URL
	end)

	-- Check if the request was successful
	if success then
		-- Decode the response as a JSON table
		local responseJSON = HttpService:JSONDecode(response)

		-- Check if the response has any docs
		if responseJSON.num_found > 0 then
			-- Get the first doc from the docs array
			local doc = responseJSON.docs[1]

			-- Print some information about the doc
			print("Title: " .. doc.title)
			print("Author: " .. (doc.author_name and doc.author_name[1] or "Unknown"))
			print("First published year: " .. (doc.first_publish_year or "Unknown"))
			print("Open Library ID: " .. doc.key)
			--print(response)
		else
			-- Print an error message
			print("No results found for query: "..response)
		end
	else
		-- Print an error message
		print("Request failed: " .. response)
	end
end
18 Likes

If you would like to contribute to this documentation you can post examples of API usage of these addtional APIs. I will post future examples here.
||#### EndlessMedicalAPI API Documentation (lukaszkiljanek) | RapidAPI |

6 Likes

After reviewing all of the API’s I have refined the list to only the completely free APIs :slight_smile:

#### [ Nutrition by API-Ninjas API: Pricing & Cost (apininjas) RapidAPI ](Nutrition by API-Ninjas API: Pricing & Cost (apininjas) | RapidAPI)
rapidapi.com [image]
#### [ Planets by API-Ninjas API: Pricing & Cost (apininjas) RapidAPI ](Planets by API-Ninjas API: Pricing & Cost (apininjas) | RapidAPI)
rapidapi.com [image]
#### [ Random Words API: Pricing & Cost (sheharyar566) RapidAPI ](Random Words API: Pricing & Cost (sheharyar566) | RapidAPI)
rapidapi.com [image]
#### [ ScrapTik API: Pricing & Cost (scraptik-api-scraptik-api-default) RapidAPI ](ScrapTik API: Pricing & Cost (scraptik-api-scraptik-api-default) | RapidAPI)
rapidapi.com [image]
#### [ Instagram Bulk Profile Scrapper API Documentation (thekirtan) RapidAPI ](Instagram Bulk Profile Scrapper API Documentation (thekirtan) | RapidAPI)
rapidapi.com [image]
#### [ MoviesDatabase API Documentation (SAdrian) RapidAPI ](MoviesDatabase API Documentation (SAdrian) | RapidAPI)
rapidapi.com [image]
#### [ Word of the day API: Pricing & Cost (jayantur13) RapidAPI ](Word of the day API: Pricing & Cost (jayantur13) | RapidAPI)
rapidapi.com [image]
#### [ Astrologer API: Pricing & Cost (gbattaglia) RapidAPI ](Astrologer API: Pricing & Cost (gbattaglia) | RapidAPI)
rapidapi.com [image]
#### [ Quotes Inspirational Quotes Motivational Quotes API: Pricing & Cost (ipworld) RapidAPI ](Quotes Inspirational Quotes Motivational Quotes API: Pricing & Cost (ipworld) | RapidAPI)
rapidapi.com [image]

apiv1 Web Service

api.chartlyrics.com [image]

#### [ 100% Success Instagram API - Scalable & Robust API: Pricing & Cost (omer-PLP3AhSTgCW) RapidAPI ](100% Success Instagram API - Scalable & Robust API: Pricing & Cost (omer-PLP3AhSTgCW) | RapidAPI)
rapidapi.com [image]

Fun translations API

funtranslations.com [image]

official-joke-api.appspot.com/random_joke

official-joke-api.appspot.com [image]

5 Likes

Here’s a video I made for an aI I made using some of these APIS :slight_smile:

3 Likes

Here is another video utilizing the Wikipedia API along with my chatbot! Soon I’ll be releasing a open source architecture to use with my chatmodule and these api.
Here is a video of it in action so far! It’s demonstrating some funny behaviors

2 Likes

Roleplaying Zephyr 7B Luau API Documentation (Free API) System Message - Resources / Community Resources - Developer Forum | Roblox
I have posted a new API documentation on a seperate post!

Here’s a new Model Documentation for start_of_turn model formats!
This is the model released by Google yesterday called Gemma
You can replace the api url with any of these model endpoints
API_URL = “https://api-inference.huggingface.co/models/google/gemma-2b-it
API_URL = “https://api-inference.huggingface.co/models/google/gemma-7b-it
API_URL = “https://api-inference.huggingface.co/models/google/gemma-2b
API_URL = “https://api-inference.huggingface.co/models/google/gemma-7b

local BearerID="Get your Free API key from Huggingface" 
function cm.Gemma(systemMessage, query, previousConversation)
-- Define the API URL and headers
local API_URL = "https://api-inference.huggingface.co/models/google/gemma-2b-it"
local headers = {Authorization = BearerID}

-- Define a function to query the API with a payload
local function querys(payload)
	-- Use HttpService to make a POST request
	local response = httpService:PostAsync(API_URL, httpService:JSONEncode(payload), Enum.HttpContentType.ApplicationJson, false, headers)
	-- Return the response as a Lua table
	return httpService:JSONDecode(response)
end

-- Define a function to generate a chat response using the API and a chat template
--local function chat(systemMessage, query, previousConversation)
	-- Initialize an empty table to store the conversation history
	local conversation = {}
	-- Chat Start with system message	
	table.insert(conversation, "<start_of_turn>system\n" .. systemMessage .. "<end_of_turn>")
	-- Append the previous conversation to the table, if any	
	if previousConversation then
		local everyother=0 --assume first entry is user input
		for key, line in previousConversation do
			if everyother==0 then
				everyother=1
				table.insert(conversation, "<start_of_turn>user\n" .. line .. "<end_of_turn>")	
			else 
				everyother=0
				table.insert(conversation, "<start_of_turn>model\n" .. line .. "<end_of_turn>")	
			end	
		end
	end
	-- Append the system message to the table, using the chat template
	-- Append the user query to the table, using the chat template
	table.insert(conversation, "<start_of_turn>user\n" .. query .. "<end_of_turn>")
	table.insert(conversation, "<start_of_turn>model\n")--provide a line for the model
	-- Query the API with the conversation table as the input
	local output = querys({inputs = table.concat(conversation, "\n")})
	-- Extract the generated text from the output
	print(output)
	local generatedText = output[1]["generated_text"]
	print(generatedText)
	local lines = {}
	for line in string.gmatch(generatedText, "[^\n]+") do
		table.insert(lines, line)
	end

print(lines)
	-- Loop through the lines in reverse order
	local result = ""
	for i = #lines, 1, -1 do
		-- Check if the line starts with <start_of_turn>model
		if lines[i]=="<start_of_turn>model" then
			-- Remove the <start_of_turn>model tag from the line
		--	local line = lines[i]
			local lastline=#lines
			for i2 = #lines, 1, -1 do
				if i==i2 then break
				else 	
					local line = lines[i2]
					result = line .. "\n" .. result	
				end
			end
			break
			-- Concatenate the line to the result
	
		end
	end
print(result)

--	local result=string.sub(generatedText, startIndex + 20, endIndex - 1)
	if result then 
		-- Check if the result ends with <end_of_turn>
		if string.sub(result, -12) == "<end_of_turn>" then
			-- Remove the <end_of_turn> tag from the result
			result = string.sub(result, 1, -13)
	end

		if previousConversation==nil then
			previousConversation={}
		end
		table.insert(previousConversation,query)
		table.insert(previousConversation,result)
	end
	return result,previousConversation
end

You can test this function in a module using the command line in studio

local cm=require(game.ReplicatedStorage.GlobalSpells.ChatbotAlgorithm.ChatModule) print(cm.Gemma("You are a helpful chatbot", "What is the best way to make eggs for breakfast?", nil))

This returns the conversation and the result

     [1] = "What is the best way to make eggs for breakfast?",
                    [2] = "**How to Make Perfect Hard-Boiled Eggs:**
**Ingredients:**
* 1 large egg
* 1 tablespoon water
* Seasoning (optional)
**Instructions:**
1. **Separate the egg into two portions.** Use a spoon to remove the yolk and place it in a bowl. Keep the membrane with the yolk.
2. **Add the water to a saucepan.** Bring the water to a boil over medium-high heat.
3. **Gently lower```

New code for Inference posted on this thread! Featuring Mistral 7b, compatible with Zephyr 7b and other similarly formatted models.