How Can I Upscale This To 20x20?

So I made a simple cnn (in Lua not luau), it works amazing with a 5x5 grid. But I’ve been having trouble trying to upscale it to 20x20, I already have correlating training data but I just can’t seem to get it right. Any help would be appreciated.

local helper = require "modules/helper"
local dkjson = require "modules/dkjson"

local conv_layer = require "layers/Conv"
local fully_connected = require "layers/FullyConnected"
local max_pooling = require "layers/MaxPooling"

-- Activation Function (ReLU)
local function relu(x)
    return math.max(0, x)
end

-- Softmax Function
local function softmax(input)
    local sum_exp = 0
    for i = 1, #input do
        sum_exp = sum_exp + math.exp(input[i])
    end
    local output = {}
    for i = 1, #input do
        output[i] = math.exp(input[i]) / sum_exp
    end
    return output
end

-- Loss Function (Cross-Entropy)
local function cross_entropy_loss(output, target)
    local loss = 0
    for i = 1, #output do
        loss = loss - target[i] * math.log(output[i] + 1e-10) -- Add small constant to avoid log(0)
    end
    return loss
end

-- Backpropagation and Training (Basic Gradient Descent)
local function train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
    -- Forward pass
    local conv_out = conv_layer(input, filter, bias_conv, stride)
    local pool_out = max_pooling(conv_out, pool_size, stride)

    -- Flatten pooling output for fully connected layer
    local flattened_input = {}
    for i = 1, #pool_out do
        for j = 1, #pool_out[i] do
            table.insert(flattened_input, pool_out[i][j])
        end
    end

    local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
    for i = 1, #fc_out do
        fc_out[i] = relu(fc_out[i])
    end

    local output = softmax(fc_out)
    local loss = cross_entropy_loss(output, target)

    -- Backward pass (Gradient Descent Update)
    local d_output = {}
    for i = 1, #output do
        d_output[i] = output[i] - target[i]
    end

    -- Update Fully Connected Layer weights and biases
    for i = 1, #weights_fc do
        for j = 1, #weights_fc[i] do
            weights_fc[i][j] = weights_fc[i][j] - learning_rate * d_output[i] * (flattened_input[j] or 0)
        end
        bias_fc[i] = bias_fc[i] - learning_rate * d_output[i]
    end

    -- Update Convolutional Layer filter and bias (Simple version)
    for i = 1, #filter do
        for j = 1, #filter[i] do
            filter[i][j] = filter[i][j] - learning_rate * d_output[1] * (input[i] and input[i][j] or 0)
        end
    end
    bias_conv = bias_conv - learning_rate * d_output[1]

    return loss
end

local function generateWeightsFC(rows, cols)
    local weights_fc = {}
    local value = 0.1

    for i = 1, rows do
        weights_fc[i] = {}
        for j = 1, cols do
            weights_fc[i][j] = value
            value = value + 0.1
        end
    end

    return weights_fc
end

-- Saving the model
local function save_model(filename, filter, bias_conv, weights_fc, bias_fc)
    local file = io.open(filename, "w")
    local data = {
        filter = filter,
        bias_conv = bias_conv,
        weights_fc = weights_fc,
        bias_fc = bias_fc
    }
    file:write(dkjson.encode(data))
    file:close()
end

-- Loading the model
local function load_model(filename)
    local file = io.open(filename, "r")
    local data = dkjson.decode(file:read("*a"))
    file:close()
    return data.filter, data.bias_conv, data.weights_fc, data.bias_fc
end

-- Example of 5x5 grids where digits 0 to 9 are drawn
local datasets = require "dataset"

local targets = {}

for i = 1, 10 do
    targets[i] = {}

    for j = 1, 10 do
        targets[i][j] = 0
    end
    targets[i][i] = 1
end

local filter = {
    { 1, 0 },
    { 0, -1 }
}

local weights_fc = generateWeightsFC(10, 5)

local bias_fc = { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 }

local bias_conv = 0.0
local learning_rate = 0.01
local pool_size = 2
local stride = 1

filter, bias_conv, weights_fc, bias_fc = load_model("model.json")

local function epoch_train(epoches)
    -- Training loop
    for epoch = 1, epoches do
        local total_loss = 0
        for i = 1, #datasets do
            local input = datasets[i]
            local target = targets[i]
            local loss = train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
            total_loss = total_loss + loss
        end
        if epoch % 1000 == 0 then
            print("Epoch:", epoch, "Loss:", total_loss / #datasets)
        end
    end

    save_model("model.json", filter, bias_conv, weights_fc, bias_fc)
end
epoch_train(10000)

-- Testing the model on a new input
local test_input = datasets[5 + 1] -- Testing with digit "5"

local conv_out = conv_layer(test_input, filter, bias_conv, stride)
local pool_out = max_pooling(conv_out, pool_size, stride)

-- Flatten pooling output for fully connected layer
local flattened_input = {}
for i = 1, #pool_out do
    for j = 1, #pool_out[i] do
        table.insert(flattened_input, pool_out[i][j])
    end
end

local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
for i = 1, #fc_out do
    fc_out[i] = relu(fc_out[i])
end

local output = softmax(fc_out)

-- Output the predicted class
local max_val = -math.huge
local predicted_class = 1

for i = 1, #output do
    print(i - 1, output[i])

    if output[i] > max_val then
        max_val = output[i]
        predicted_class = i
    end
end

print("Predicted class:", predicted_class - 1, max_val)

Update it for luau:

local HttpService = game:GetService('HttpService')
local Datastore_Service = game:GetService('DataStoreService')

local Datastore = Datastore_Service:GetDataStore('Files')

local modules = script.modules
local layers = script.layers
local datasets = script.datasets

local helper = require(modules.helper)

local conv_layer = require(layers.ConvolutionLayer)
local fully_connected = require(layers.FullyConnectedLayer)
local max_pooling = require(layers.PoolingLayer)

local huge = math.huge
local exp = math.exp
local log = math.log

-- Activation Function (ReLU)
local function relu(x)
	return math.max(0, x)
end

-- Softmax Function
local function softmax(input)
	local sum_exp = 0
	for i = 1, #input do
		sum_exp = sum_exp + exp(input[i])
	end
	local output = {}
	for i = 1, #input do
		output[i] = exp(input[i]) / sum_exp
	end
	return output
end

-- Loss Function (Cross-Entropy)
local function cross_entropy_loss(output, target)
	local loss = 0
	for i = 1, #output do
		loss = loss - target[i] * log(output[i] + 1e-10) -- Add small constant to avoid log(0)
	end
	return loss
end

-- Backpropagation and Training (Basic Gradient Descent)
local function train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
	-- Forward pass
	local conv_out = conv_layer(input, filter, bias_conv, stride)
	local pool_out = max_pooling(conv_out, pool_size, stride)

	-- Flatten pooling output for fully connected layer
	local flattened_input = {}
	for i = 1, #pool_out do
		for j = 1, #pool_out[i] do
			table.insert(flattened_input, pool_out[i][j])
		end
	end

	local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
	for i = 1, #fc_out do
		fc_out[i] = relu(fc_out[i])
	end

	local output = softmax(fc_out)
	local loss = cross_entropy_loss(output, target)

	-- Backward pass (Gradient Descent Update)
	local d_output = {}
	for i = 1, #output do
		d_output[i] = output[i] - target[i]
	end

	-- Update Fully Connected Layer weights and biases
	for i = 1, #weights_fc do
		for j = 1, #weights_fc[i] do
			weights_fc[i][j] = weights_fc[i][j] - learning_rate * d_output[i] * (flattened_input[j])
		end
		bias_fc[i] = bias_fc[i] - learning_rate * d_output[i]
	end

	-- Update Convolutional Layer filter and bias (Simple version)
	for i = 1, #filter do
		for j = 1, #filter[i] do
			filter[i][j] = filter[i][j] - learning_rate * d_output[1] * (input[i] and input[i][j])
		end
	end
	bias_conv = bias_conv - learning_rate * d_output[1]

	return loss
end

local function generateWeightsFC(rows, cols)
	local weights_fc = {}
	local value = 0.1

	for i = 1, rows do
		weights_fc[i] = {}
		for j = 1, cols do
			weights_fc[i][j] = value
			value = value + 0.1
		end
	end

	return weights_fc
end

-- Saving the model
local function save_model(filename, filter, bias_conv, weights_fc, bias_fc)
	local data = {
		filter = filter,
		bias_conv = bias_conv,
		weights_fc = weights_fc,
		bias_fc = bias_fc
	}
	
	local all_files = Datastore:GetAsync('allFiles')
	
	if not table.find(all_files, filename) then
		table.insert(all_files, filename)
		
		Datastore:SetAsync('allFiles', all_files)
	end
	
	Datastore:SetAsync(filename, data)
end

-- Loading the model
local function load_model(filename)
	local data = Datastore:GetAsync(filename)
	return data.filter, data.bias_conv, data.weights_fc, data.bias_fc
end

-- Example of 5x5 grids where digits 0 to 9 are drawn
local dataset = require(datasets['5x5'])

local targets = {}

for i = 1, 10 do
	targets[i] = {}

	for j = 1, 10 do
		targets[i][j] = 0
	end
	targets[i][i] = 1
end

local filter = {
	{ 1, 0 },
	{ 0, -1 }
}

local weights_fc = generateWeightsFC(10, 5)

local bias_fc = { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 }

local bias_conv = 0.0
local learning_rate = 0.01
local pool_size = 2
local stride = 1

filter, bias_conv, weights_fc, bias_fc = load_model("model")

local function epoch_train(epoches)
	-- Training loop
	for epoch = 1, epoches do
		local total_loss = 0
		
		for i = 1, #dataset do
			local input = dataset[i]
			local target = targets[i]
			local loss = train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
			total_loss = total_loss + loss
		end
		
		if epoch % 1000 == 0 then
			print("Epoch:", epoch, "Loss:", total_loss / #dataset)
			task.wait()
		end
	end

	save_model("model", filter, bias_conv, weights_fc, bias_fc)
end
--epoch_train(10000)

-- Testing the model on a new input
local test_input = dataset[5 + 1] -- Testing with digit "5"

local conv_out = conv_layer(test_input, filter, bias_conv, stride)
local pool_out = max_pooling(conv_out, pool_size, stride)

-- Flatten pooling output for fully connected layer
local flattened_input = {}
for i = 1, #pool_out do
	for j = 1, #pool_out[i] do
		table.insert(flattened_input, pool_out[i][j])
	end
end

local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
for i = 1, #fc_out do
	fc_out[i] = relu(fc_out[i])
end

local output = softmax(fc_out)

-- Output the predicted class
local max_val = -huge
local predicted_class = 1

for i = 1, #output do
	print(i - 1, output[i])

	if output[i] > max_val then
		max_val = output[i]
		predicted_class = i
	end
end

print("Predicted class:", predicted_class - 1, max_val)

I got it working:

local HttpService = game:GetService('HttpService')
local Datastore_Service = game:GetService('DataStoreService')

local Datastore = Datastore_Service:GetDataStore('Files')

local modules = script.modules
local layers = script.layers
local datasets = script.datasets

local helper = require(modules.helper)

local conv_layer = require(layers.ConvolutionLayer)
local fully_connected = require(layers.FullyConnectedLayer)
local max_pooling = require(layers.PoolingLayer)

local huge = math.huge
local exp = math.exp
local log = math.log
local randomseed = math.randomseed

randomseed(os.clock()+tick()) -- really not needed, but its there.

-- Activation Function (ReLU)
local function relu(x)
	return math.max(0, x)
end

-- Softmax Function
local function softmax(input)
	local sum_exp = 0
	for i = 1, #input do
		sum_exp = sum_exp + exp(input[i])
	end
	local output = {}
	for i = 1, #input do
		output[i] = exp(input[i]) / sum_exp
	end
	return output
end

-- Loss Function (Cross-Entropy)
local function cross_entropy_loss(output, target)
	local loss = 0
	for i = 1, #output do
		loss = loss - target[i] * log(output[i] + 1e-10) -- Add small constant to avoid log(0)
	end
	return loss
end

-- Backpropagation and Training (Basic Gradient Descent)
local function train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
	-- Forward pass
	local conv_out = conv_layer(input, filter, bias_conv, stride)
	local pool_out = max_pooling(conv_out, pool_size, stride)

	-- Flatten pooling output for fully connected layer
	local flattened_input = {}
	for i = 1, #pool_out do
		for j = 1, #pool_out[i] do
			table.insert(flattened_input, pool_out[i][j])
		end
	end

	local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
	for i = 1, #fc_out do
		fc_out[i] = relu(fc_out[i])
	end

	local output = softmax(fc_out)
	local loss = cross_entropy_loss(output, target)

	-- Backward pass (Gradient Descent Update)
	local d_output = {}
	for i = 1, #output do
		d_output[i] = output[i] - target[i]
	end

	-- Update Fully Connected Layer weights and biases
	for i = 1, #weights_fc do
		for j = 1, #weights_fc[i] do
			weights_fc[i][j] = weights_fc[i][j] - learning_rate * d_output[i] * (flattened_input[j])
		end
		bias_fc[i] = bias_fc[i] - learning_rate * d_output[i]
	end

	-- Update Convolutional Layer filter and bias (Simple version)
	for i = 1, #filter do
		for j = 1, #filter[i] do
			filter[i][j] = filter[i][j] - learning_rate * d_output[1] * (input[i] and input[i][j])
		end
	end
	bias_conv = bias_conv - learning_rate * d_output[1]

	return loss
end

local function generateWeightsFC(rows, cols)
	local weights_fc = {}
	local value = 0.1

	for i = 1, rows do
		weights_fc[i] = {}
		for j = 1, cols do
			weights_fc[i][j] = value
			value = value + 0.1
		end
	end

	return weights_fc
end

-- Saving the model
local function save_model(filename, filter, bias_conv, weights_fc, bias_fc)
	local data = {
		filter = filter,
		bias_conv = bias_conv,
		weights_fc = weights_fc,
		bias_fc = bias_fc
	}

	local all_files = Datastore:GetAsync('allFiles')

	if not table.find(all_files, filename) then
		table.insert(all_files, filename)

		Datastore:SetAsync('allFiles', all_files)
	end

	Datastore:SetAsync(filename, data)
end

-- Loading the model
local function load_model(filename)
	local data = Datastore:GetAsync(filename)
	return data.filter, data.bias_conv, data.weights_fc, data.bias_fc
end

local function makeTarget(number)
	local t = {}
	
	for i = 1, 10 do
		t[i] = 0
	end
	
	t[number] = 1
	
	return t
end

-- Example of 5x5 grids where digits 0 to 9 are drawn
local dataset = require(datasets['20x20'])

local targets = {}

for i = 1, 10 do
	targets[i] = {}

	for j = 1, 10 do
		targets[i][j] = 0
	end
	targets[i][i] = 1
end

local filter = {
	{ 1, 0 },
	{ 0, -1 }
}

local weights_fc = generateWeightsFC(10, 5)

local bias_fc = { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 }

local bias_conv = 0.0
local learning_rate = 0.01
local pool_size = 2
local stride = 1

--filter, bias_conv, weights_fc, bias_fc = load_model("model")

local count = 0

for _ in pairs(dataset) do
	count += 1
end

local function epoch_train(epoches)
	local last
	
	for epoch = 1, epoches do
		local total_loss = 0

		for i = 1, count do
			local input = dataset[tostring(i - 1)]
			local target = makeTarget(i)
			local loss = train(input, filter, bias_conv, weights_fc, bias_fc, target, learning_rate, pool_size, stride)
			total_loss = total_loss + loss
			last = total_loss
		end

		if epoch % 100 == 0 then
			print("Epoch:", epoch, "Loss:", total_loss / count)
		end
		
		if epoch % 10 == 0 then task.wait() end -- Prevent timeout
	end

	save_model("model", filter, bias_conv, weights_fc, bias_fc)
	return last / count
end
epoch_train(1000)

-- Testing the model on a new input
local test_input = dataset[tostring(5 + 1)] -- Testing with digit "5"

local conv_out = conv_layer(test_input, filter, bias_conv, stride)
local pool_out = max_pooling(conv_out, pool_size, stride)

-- Flatten pooling output for fully connected layer
local flattened_input = {}
for i = 1, #pool_out do
	for j = 1, #pool_out[i] do
		table.insert(flattened_input, pool_out[i][j])
	end
end

local fc_out = fully_connected(flattened_input, weights_fc, bias_fc)
for i = 1, #fc_out do
	fc_out[i] = relu(fc_out[i])
end

local output = softmax(fc_out)

-- Output the predicted class
local max_val = -huge
local predicted_class = 1

for i = 1, #output do
	print(i - 1, output[i])

	if output[i] > max_val then
		max_val = output[i]
		predicted_class = i
	end
end

print("Predicted class:", predicted_class - 1, max_val)

For the people that want to use this:

Fully Connected Layer
local function fully_connected(input, weights, bias)
	local output = {}
	for i = 1, #weights do
		local sum = 0
		for j = 1, #weights[i] do
			sum = sum + (input[j] or 0) * weights[i][j]
		end
		output[i] = sum + bias[i]
	end
	return output
end

return fully_connected
Convolution Layer
local function conv_layer(input, filter, bias, stride)
	local filter_size = #filter
	local input_height = #input
	local input_width = #input[1]
	local output_height = math.floor((input_height - filter_size) / stride + 1)
	local output_width = math.floor((input_width - filter_size) / stride + 1)

	local output = {}
	for i = 1, output_height do
		output[i] = {}
		for j = 1, output_width do
			local sum = 0
			for k = 1, filter_size do
				for l = 1, filter_size do
					sum = sum + input[(i - 1) * stride + k][(j - 1) * stride + l] * filter[k][l]
				end
			end
			output[i][j] = sum + bias
		end
	end
	return output
end

return conv_layer
Pooling Layer
local function max_pooling(input, pool_size, stride)
	local input_height = #input
	local input_width = #input[1]
	local output_height = math.floor((input_height - pool_size) / stride + 1)
	local output_width = math.floor((input_width - pool_size) / stride + 1)

	local output = {}
	for i = 1, output_height do
		output[i] = {}
		for j = 1, output_width do
			local max_val = -math.huge
			for k = 1, pool_size do
				for l = 1, pool_size do
					max_val = math.max(max_val, input[(i - 1) * stride + k][(j - 1) * stride + l])
				end
			end
			output[i][j] = max_val
		end
	end
	return output
end

return max_pooling

– Modules

Compressor
-- https://devforum.roblox.com/t/text-compression/163637/37
-- converted from luau to lua

local dictionary = {}

-- save builtin libraries
local char = string.char
local insert = table.insert
local gsub = string.gsub
local pow = math.pow
local sub = string.sub
local concat = table.concat
local rep = string.rep
local clone = table.clone
local gmatch = string.gmatch
local byte = string.byte
local match = string.match

local uniqueMarker = "CMP_"

do -- populate dictionary
	local length = 0
	for i = 32, 127 do
		if i ~= 34 and i ~= 92 then
			local c = char(i)
			dictionary[c], dictionary[length] = length, c
			length = length + 1
		end
	end
end

local escapemap_126, escapemap_127 = {}, {}
local unescapemap_126, unescapemap_127 = {}, {}

local blacklisted_126 = { 34, 92, 126, 127 }
for i = 128, 180 do
	insert(blacklisted_126, i)
end

do -- Populate escape map
	-- represents the numbers 1-31, 34, 92, 126 and 127 (35 characters)
	-- https://devforum.roblox.com/t/text-compression/163637/5
	for i = 1, 31 + #blacklisted_126 do
		local b = blacklisted_126[i - 31]
		local s = i + 31

		-- Note: 126 and 127 are magic numbers
		local c = char(b or i)
		local e = char(s + (s >= 34 and 1 or 0) + (s >= 92 and 1 or 0))

		escapemap_126[c] = e
		unescapemap_126[e] = c
	end

	for i = 1, 255 - 181 do
		local c = char(i + 180)
		local s = i + 34
		local e = char(s + (s >= 92 and 1 or 0))

		escapemap_127[c] = e
		unescapemap_127[e] = c
	end
end

local function escape(s)
	-- escape the control characters 0-31, double quote 34, backslash 92 and DEL 127 (34 chars)
	-- escape characters 128-180 (53 chars)
	return gsub(gsub(s, '[%c"\\\127-\180]', function(c)
		return "\126" .. escapemap_126[c]
	end), '[\181-\255]', function(c)
		return "\127" .. escapemap_127[c]
	end)
end

local function unescape(s)
	return gsub(gsub(s, "\127(.)", function(e)
		return unescapemap_127[e]
	end), "\126(.)", function(e)
		return unescapemap_126[e]
	end)
end

local b92Cache = {}
local function tobase92(n)
	local value = b92Cache[n]
	if value then
		return value
	end

	local c = n
	value = ""
	repeat
		local remainder = n % 92
		value = dictionary[remainder] .. value
		n = (n - remainder) / 92
	until n == 0

	b92Cache[c] = value
	return value
end

local b10Cache = {}
local function tobase10(value)
	local n = b10Cache[value]
	if n then
		return n
	end

	n = 0
	for i = 1, #value do
		n = n + pow(92, i - 1) * dictionary[sub(value, -i, -i)]
	end

	b10Cache[value] = n
	return n
end

local function compress(text)
	local dictionaryCopy = clone(dictionary)
	local key, sequence, size = "", {}, #dictionaryCopy
	local width, spans, span = 1, {}, 0
	local function listkey(k)
		local value = dictionaryCopy[k]
		if not value then
			print(byte(k))
		end
		value = tobase92(value)
		local valueLength = #value
		if valueLength > width then
			width, span, spans[width] = valueLength, 0, span
		end
		insert(sequence, rep(" ", width - valueLength) .. value)
		span = span + 1
	end
	text = escape(text)
	for i = 1, #text do
		local c = sub(text, i, i)
		local new = key .. c
		if dictionaryCopy[new] then
			key = new
		else
			listkey(key)
			key = c
			size = size + 1
			dictionaryCopy[new], dictionaryCopy[size] = size, new
		end
	end
	listkey(key)
	spans[width] = span
	return uniqueMarker .. concat(spans, ",") .. "|" .. concat(sequence)
end

local function decompress(text)
	if not (string.sub(text, 1, #uniqueMarker) == uniqueMarker) then
		error("Data does not contain the unique compression marker.")
	end
	text = string.sub(text, #uniqueMarker + 1)

	local dictionaryCopy = clone(dictionary)
	local sequence, spans, content = {}, match(text, "(.-)|(.*)")
	local groups, start = {}, 1
	for span in gmatch(spans, "%d+") do
		local width = #groups + 1
		groups[width] = sub(content, start, start + span * width - 1)
		start = start + span * width
	end
	local previous

	for width, group in pairs(groups) do
		for value in gmatch(group, rep(".", width)) do
			local entry = dictionaryCopy[tobase10(value)]
			if previous then
				if entry then
					insert(dictionaryCopy, previous .. sub(entry, 1, 1))
				else
					entry = previous .. sub(previous, 1, 1)
					insert(dictionaryCopy, entry)
				end
				insert(sequence, entry)
			else
				sequence[1] = entry
			end
			previous = entry
		end
	end
	return unescape(concat(sequence))
end

return { compress = compress, decompress = decompress, uniqueMarker = uniqueMarker }
Helper
local helper = {}

function helper.getChildren(folderPath)
	-- Command to list folder contents
	local command = 'ls "' .. folderPath .. '"'

	-- Execute the command and capture the output
	local handle = io.popen(command)
	local result = handle:read("*a")
	handle:close()

	-- Split the result by newline to create a table of files/folders
	local files = {}
	for file in result:gmatch("[^\r\n]+") do
		table.insert(files, file)
	end

	return files
end

function helper.removeExtension(filename)
	return filename:match("(.+)%..+")
end

function helper.iterate(t, f)
	if type(t) == "table" then
		local new_tbl = {}

		for _, v in pairs(t) do
			table.insert(new_tbl, f(v))
		end

		return new_tbl
	else
		return f(t)
	end
end

function helper.createTensor(rows, cols)
	local tensor = {}
	for i = 1, rows do
		tensor[i] = {}
		for j = 1, cols do
			tensor[i][j] = 0
		end
	end
	return tensor
end

return helper

Datasets

5x5
local dataset = {
	-- 0
	{
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 0, 1 },
		{ 1, 0, 0, 0, 1 },
		{ 1, 0, 0, 0, 1 },
		{ 0, 1, 1, 1, 0 }
	},

	-- 1
	{
		{ 0, 0, 1, 0, 0 },
		{ 0, 1, 1, 0, 0 },
		{ 0, 0, 1, 0, 0 },
		{ 0, 0, 1, 0, 0 },
		{ 1, 1, 1, 1, 1 }
	},

	-- 2
	{
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 1, 0 },
		{ 0, 0, 1, 0, 0 },
		{ 0, 1, 0, 0, 0 },
		{ 1, 1, 1, 1, 1 }
	},

	-- 3
	{
		{ 0, 1, 1, 1, 0 },
		{ 0, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 },
		{ 0, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 }
	},

	-- 4
	{
		{ 0, 0, 1, 0, 0 },
		{ 0, 1, 1, 0, 0 },
		{ 1, 0, 1, 0, 0 },
		{ 1, 1, 1, 1, 0 },
		{ 0, 0, 1, 0, 0 }
	},

	-- 5
	{
		{ 1, 1, 1, 1, 1 },
		{ 1, 0, 0, 0, 0 },
		{ 1, 1, 1, 1, 0 },
		{ 0, 0, 0, 1, 0 },
		{ 1, 1, 1, 1, 0 }
	},

	-- 6
	{
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 0, 0 },
		{ 1, 1, 1, 1, 0 },
		{ 1, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 }
	},

	-- 7
	{
		{ 1, 1, 1, 1, 1 },
		{ 0, 0, 1, 0, 0 },
		{ 0, 0, 1, 0, 0 },
		{ 0, 0, 1, 0, 0 },
		{ 0, 0, 1, 0, 0 }
	},

	-- 8
	{
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 }
	},

	-- 9
	{
		{ 0, 1, 1, 1, 0 },
		{ 1, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 1 },
		{ 0, 0, 0, 1, 0 },
		{ 0, 1, 1, 1, 0 }
	}
}

return dataset
20x20
local HttpService = game:GetService('HttpService')

local parent_folder = script:FindFirstAncestorWhichIsA('Script')

local Compressor = require(parent_folder.modules.Compressor)

local data = require(script.Data)

local decompressed = Compressor.decompress(data)
local dataset = HttpService:JSONDecode(decompressed)

return dataset
Data for 20x20
local compressed = [[CMP_1,655|{!  @ 0!$ : [ [ 0 ,!+!-!,!/!.!1!0!3!2!- ] ,!*!4!:!5!;!= 0!7!9!<!B , 1!D!F!E!E!B!@!C!2!H!F!+!I!O!;!K!>!/!N!C!W!4!T!X!R!L!^!0![!=!Q!_!P!S!8!]!d!U!f!5!b!.!Y!e!V!g!A!3!j!q!l!Z!h!:!v!k!G!s!<!|!i!>!n!r!q!p#!!{!`!M#-#+### !N#$#&!z!w!}!J#6#8#;!m#:#7#?!,#'#@!w#B#<#< ]!7!$ 1!'!)#C#F!6#>#O#G#Q#N!e#E!_#*!L#W!c#/#Z#T#]#Y!k#[!o!R#4#9!t#1!j#g#0#,!|#l#=#i#;#p!y#r#)#O#d#]#?#y#1#{#`#X#x$!!U#t!1#'#3#2$+$*#$#|#R#q#U$$#v$0#c$%$2!m#I @ 2#L$4$8#m$?#_$>$5#h$A#8$/$-# #V$%$*#j$6#r$M!}#p$)#R$S$7$R#^!a$W#%$K$C$]$G$[!r#b$B$#$d$a$4$Q$8$T$'#S$i$,$J$n#N$I#g$/$A$v$D#A$b$w$: 3$=$F$^%#$O%%$e$y#D$L$p#C#[#o$Y$E$U%0#u$2$m!?${#,%*$_#a$r#F$g%1$f%3%@$X%?$@#s%B$Z%;!]%:%5%=#@%E$(%8$j$h%'#5%J%)#A$: 4%!%X%$%^%&%`%(%U$1#z%f%_#}%g%a%i%l%k%9#n%N%o#.%6%.%<%L%j#e%t%+$N%w%|!u%H#P%;$t$3$s&'%2$ %W#.&+&(&.&*%q%d!a$: 5%]&3%Q&,%)$x$k#`&&%P%4%x$c$q%C&-%^%u%#&H&G%{&K$C%6%n%y$p&@&$$N#4&S&<%r%V$?$'&X&F%c$5&]%8&_&P&!&?&2%b&A&8&4 ,!$ 6&7&i%e&k&T%2%z&:!f&O%7&:&S&j$X#U$H&#%F%s&;$W&y'%&C$w&L',&N%b&J'.%K%,&h&Q#k'$&z'3& &`'&';&Z$&%p%}%<%S%h&q!Z$: 7&p&s&^$0'L%-&c$D'N'4&r&C&g!Q%?'R#e'<&u&e#7&W'P'Z%T']'B'_&)'f&y&X')'9%G'k$x'k$z%W&|'T':'M'a'v&w'Q$: 8'J'F&t%O&D'?#w$I'E'>'b'[&d&Q((&Y(*'^&!'='c$`'s'q(&'8%I'h(9'l%A&x&#'#()&}(B(0't(/(G&9$#$`&E(G'D'*'6($(.&8'Y'5( #H&m @ 9'}(S'w'O'y'!$b&g(R&f&B(F(-'A(1!x(D(C(2%v(j(Q(q(L'&('(4%>'g(w&0(#(z(y(U(J) '@(|'f)%%G)'(](`'M ] }]]
return compressed

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.