Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions core/src/commonMain/kotlin/sk/ai/net/Tensor.kt
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ interface Tensor {
fun sigmoid(): Tensor

fun ln(): Tensor

fun flatten(startDim: Int = 1, endDim: Int = -1): Tensor
}

fun Shape.toRanges(): Array<IntRange> {
Expand Down
17 changes: 17 additions & 0 deletions core/src/commonMain/kotlin/sk/ai/net/impl/DoublesTensor.kt
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,23 @@ data class DoublesTensor(override val shape: Shape, val elements: DoubleArray) :
override fun ln(): Tensor =
DoublesTensor(shape, elements.map { kotlin.math.ln(it) }.toDoubleArray())

override fun flatten(startDim: Int, endDim: Int): Tensor {
val dims = shape.dimensions.toMutableList()
var s = if (startDim < 0) dims.size + startDim else startDim
var e = if (endDim < 0) dims.size + endDim else endDim

// handle tensors without batch dimension by prepending 1
while (dims.size <= e) {
dims.add(0, 1)
s += 1
e += 1
}

val flatSize = dims.subList(s, e + 1).fold(1) { acc, v -> acc * v }
val newDims = dims.take(s) + flatSize + dims.drop(e + 1)
return DoublesTensor(Shape(*newDims.toIntArray()), elements.copyOf())
}

fun computeStrides(dimensions: IntArray): IntArray {
val strides = IntArray(dimensions.size) { 1 }
for (i in dimensions.lastIndex - 1 downTo 0) {
Expand Down
16 changes: 16 additions & 0 deletions core/src/commonMain/kotlin/sk/ai/net/nn/Flatten.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package sk.ai.net.nn

import sk.ai.net.Tensor

class Flatten(
private val startDim: Int = 1,
private val endDim: Int = -1,
override val name: String = "Flatten"
) : Module() {
override val modules: List<Module>
get() = emptyList()

override fun forward(input: Tensor): Tensor {
return input.flatten(startDim, endDim)
}
}
48 changes: 48 additions & 0 deletions core/src/commonTest/kotlin/sk/ai/net/nn/FlattenTest.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package sk.ai.net.nn

import sk.ai.net.Shape
import sk.ai.net.impl.DoublesTensor
import kotlin.test.Test
import kotlin.test.assertEquals

class FlattenTest {
@Test
fun flatten_basic() {
val flatten = Flatten()
val input = DoublesTensor(Shape(2,1,28,28), DoubleArray(2*1*28*28))
val output = flatten.forward(input) as DoublesTensor
assertEquals(Shape(2,784), output.shape)
}

@Test
fun flatten_with_custom_start_dim() {
val flatten = Flatten(startDim = 1)
val input = DoublesTensor(Shape(2,3,4), DoubleArray(2*3*4))
val output = flatten.forward(input) as DoublesTensor
assertEquals(Shape(2,12), output.shape)
}

@Test
fun flatten_single_sample() {
val flatten = Flatten()
val input = DoublesTensor(Shape(1,3,3), DoubleArray(1*3*3))
val output = flatten.forward(input) as DoublesTensor
assertEquals(Shape(1,9), output.shape)
}

@Test
fun flatten_preserve_batch_dim() {
val flatten = Flatten()
val input = DoublesTensor(Shape(10,5,2,2), DoubleArray(10*5*2*2))
val output = flatten.forward(input) as DoublesTensor
assertEquals(Shape(10,20), output.shape)
}

@Test
fun flatten_no_batch_dim() {
val flatten = Flatten()
val input = DoublesTensor(Shape(1,3,4), DoubleArray(3*4))
val output = flatten.forward(input) as DoublesTensor
assertEquals(Shape(1,12), output.shape)
}
}