Setting Up Ollama 3.2 for Roblox Studio
Step 1: Install Ollama
Download and install Ollama 3.2 from the official website:
Ollama
Step 2: Create a Shortcut for Ollama Server
- After installation, create a new
.ms
shortcut. - Right-click the shortcut, select
Properties
, and set theTarget
field to:C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe -NoExit -Command "$env:OLLAMA_HOST='0.0.0.0'; ollama serve"
- Apply the changes and close the properties window.
Step 3: Configure Router Port Forwarding
- Open
CMD
and type:ipconfig
- Find your
Default Gateway
andIPv4 Address
. - Open a web browser and enter your gateway IP to access router settings.
- Log in and navigate to the
Port Forwarding
section. - Forward port 11434 for your IPv4 Address.
Step 4: Install Cloudflare Warp
- Download and install Cloudflare Warp:
Download WARP - Enable
1.1.1.1
settings in Warp. - In advanced settings, create a exclude split tunnel for your PUBLIC IPv4 Address (you can find it here What Is My IP Address - See Your Public Address - IPv4 & IPv6)`.
Step 5: Set Up Duck DNS
- Go to Duck DNS and create an account.
- Create a new domain.
- Set your PUBLIC IPv4 Address and Ollama Port as the IP to tunnel.
- Now, your custom domain will map to your actual IP.
Step 6: Configure Roblox Studio
- Open Roblox Studio and create a new game.
- Enable HTTP Requests in game settings.
- Add the following Lua script to enable interactions with Ollama AI:
local url = "http://[your-duck-domain-name].duckdns.org:11434/api/generate"
local HttpService = game:GetService("HttpService")
local Players = game:GetService('Players')
local replicatedStorage = game:GetService('ReplicatedStorage')
local llama = replicatedStorage.Llama
local chatService = game:GetService('Chat')
local ts = game:GetService('TextService')
local runService = game:GetService('RunService')
local function httpPost(plr, msg)
local requestData = {
model = "llama3.2",
system = "ChatBot",
prompt = plr.DisplayName .. ": " .. msg,
stream = false,
parameters = {
max_tokens = math.random(200, 400),
temperature = 0.7,
format = "json"
},
}
local jsonData = HttpService:JSONEncode(requestData)
local success, rawResponse = pcall(function()
return HttpService:PostAsync(url, jsonData, Enum.HttpContentType.ApplicationJson)
end)
if rawResponse then
local decodedResponse = HttpService:JSONDecode('[['..rawResponse..']]')
local decodedResult = HttpService:JSONDecode(decodedResponse[1][1]["response"])
print(decodedResult)
local textResponse = decodedResult["response"]
--print(decodedResult["response"])
-- Remove any leftover asterisks (*)
textResponse = textResponse:gsub("%}", "")
-- Replace bold (**text**)
textResponse = textResponse:gsub("%*(.-)%*", "<b>%1</b>")
-- Replace italic (/text/)
textResponse = textResponse:gsub("//(.-)//", "<i>%1</i>")
-- Remove any leftover asterisks (*)
textResponse = textResponse:gsub("%*", "")
textResponse = textResponse:gsub("%\\", "")
--endline
textResponse = textResponse:gsub("%/n", "<br />")
if textResponse ~= "" then
local filteredData = ts:FilterStringAsync(systemName.Value .. ": " .. textResponse, plr.UserId, Enum.TextFilterContext.PublicChat)
for ii, vv in pairs(Players:GetChildren()) do
llama:FireClient(vv,filteredData:GetNonChatStringForUserAsync(vv.UserId),systemColor.Value)
end
chatService:Chat(systemObject.Value, filteredData:GetNonChatStringForBroadcastAsync(), Enum.ChatColor[systemChatColor.Value])
end
return decodedResult
else
print("HTTP Request failed")
end
end
Players.PlayerAdded:Connect(function(plr)
plr.Chatted:Connect(function(message)
httpPost(plr, message)
end)
end)
PlayerScript
local replicatedStorage = game:GetService("ReplicatedStorage")
local llama = replicatedStorage.Llama
local ChatService = game:GetService("TextChatService")
local plr = game:GetService('Players').LocalPlayer
llama.OnClientEvent:Connect(function(responseData, color)
local chatChannel = ChatService.TextChannels.RBXGeneral
if chatChannel then
local newCol = "<font color='rgb("..tostring(math.floor(color.R*255))..","..tostring(math.floor(color.G*255))..","..tostring(math.floor(color.B*255))..")'>"
-- Use rich text to format the message in red
local nameColon = string.find(responseData, ":")
local Message = newCol .. string.sub(responseData, 1, nameColon) .. "</font>" .. string.sub(responseData, nameColon+1, -1)
decodedData = chatChannel:DisplaySystemMessage(Message)
end
end)
Now your Roblox game can interact with Ollama 3.2 AI!