
Automatic differentiation library enabling compile-time optimization of mathematical expressions, featuring multi-module architecture, KSP-based code generation, and computational graph visualization through Graphviz.
A Kotlin multiplatform automatic differentiation library inspired by micrograd by Andrej Karpathy, featuring compile-time optimization through KSP (Kotlin Symbol Processing).
import org.mikrograd.diff.BackpropNode
import org.mikrograd.diff.div
import org.mikrograd.diff.plus
typealias Value = BackpropNode
val a = Value(-4.0)
val b = Value(2.0)
var c = a + b
var d = a * b + b.pow(3.0)
c += c + 1
c += 1.0 + c + (-a)
d += d * 2 + (b + a).relu()
d += d * 3.0 + (b - a).relu()
val e = c - d
val f = e.pow(2.0)
var g = f / 2
g += 10.0 / f
println("$g") // prints 24.7041, the outcome of this forward pass
g.backward()
println("${a.grad}") // prints 138.8338, i.e. the numerical value of dg/da
println("${b.grad}") // prints 645.5773, i.e. the numerical value of dg/db```
// Compile-time optimized inference
@Mikrograd(mode = ComputationMode.INFERENCE)
@Mikrograd
fun optimizedInference() {
3.0 * 4.0 + (7.0 + 3.0)
}
// Compile-time optimized training with gradients
@Mikrograd(mode = ComputationMode.TRAINING)
fun optimizedTraining() {
3.0 * 4.0 + (7.0 + 3.0)
}The library includes Graphviz integration for visualizing computational graphs:
fun main() {
val nn = Neuron(2)
val x = listOf(Value(1.0), Value(-2.0))
val y = nn(x)
drawDot(y).toFile("neuron.dot")
}Add the following to your build.gradle.kts:
dependencies {
implementation("org.mikrograd:mikrograd:latest.version")
ksp("org.mikrograd:mikrograd-processor:latest.version")
}./gradlew buildmiKrograd/
├── miKrograd/ # Core automatic differentiation library
├── miKrograd-annotations/ # KSP annotations
├── miKrograd-processor/ # KSP code generation processor
├── samples/ # Example implementations
└── docs/ # Architecture documentation (arc42)
MIT
A Kotlin multiplatform automatic differentiation library inspired by micrograd by Andrej Karpathy, featuring compile-time optimization through KSP (Kotlin Symbol Processing).
import org.mikrograd.diff.BackpropNode
import org.mikrograd.diff.div
import org.mikrograd.diff.plus
typealias Value = BackpropNode
val a = Value(-4.0)
val b = Value(2.0)
var c = a + b
var d = a * b + b.pow(3.0)
c += c + 1
c += 1.0 + c + (-a)
d += d * 2 + (b + a).relu()
d += d * 3.0 + (b - a).relu()
val e = c - d
val f = e.pow(2.0)
var g = f / 2
g += 10.0 / f
println("$g") // prints 24.7041, the outcome of this forward pass
g.backward()
println("${a.grad}") // prints 138.8338, i.e. the numerical value of dg/da
println("${b.grad}") // prints 645.5773, i.e. the numerical value of dg/db```
// Compile-time optimized inference
@Mikrograd(mode = ComputationMode.INFERENCE)
@Mikrograd
fun optimizedInference() {
3.0 * 4.0 + (7.0 + 3.0)
}
// Compile-time optimized training with gradients
@Mikrograd(mode = ComputationMode.TRAINING)
fun optimizedTraining() {
3.0 * 4.0 + (7.0 + 3.0)
}The library includes Graphviz integration for visualizing computational graphs:
fun main() {
val nn = Neuron(2)
val x = listOf(Value(1.0), Value(-2.0))
val y = nn(x)
drawDot(y).toFile("neuron.dot")
}Add the following to your build.gradle.kts:
dependencies {
implementation("org.mikrograd:mikrograd:latest.version")
ksp("org.mikrograd:mikrograd-processor:latest.version")
}./gradlew buildmiKrograd/
├── miKrograd/ # Core automatic differentiation library
├── miKrograd-annotations/ # KSP annotations
├── miKrograd-processor/ # KSP code generation processor
├── samples/ # Example implementations
└── docs/ # Architecture documentation (arc42)
MIT