feat: 初始化Go张量库基础实现

- 添加Tensor数据结构定义,包含数据、梯度和反向传播相关字段
- 实现基本张量操作:加法、减法、乘法、矩阵乘法和数乘
- 实现张量创建函数:全零、全一、单位矩阵
- 实现反向传播机制和梯度计算
- 添加.go文件到gitignore以忽略vendor目录
- 配置go模块依赖和版本信息
```
This commit is contained in:
kingecg 2025-12-30 22:49:34 +08:00
commit 2da5bc6ece
4 changed files with 311 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
vendor

5
go.mod Normal file
View File

@ -0,0 +1,5 @@
module git.kingecg.top/kingecg/gotensor
go 1.25.1
require git.kingecg.top/kingecg/gomatrix v0.0.0-20251230141944-2ff4dcfb0fcd // indirect

2
go.sum Normal file
View File

@ -0,0 +1,2 @@
git.kingecg.top/kingecg/gomatrix v0.0.0-20251230141944-2ff4dcfb0fcd h1:vn3LW38hQPGig0iqofIaIMYXVp3Uqb5QX6eH5B5lVxU=
git.kingecg.top/kingecg/gomatrix v0.0.0-20251230141944-2ff4dcfb0fcd/go.mod h1:CHH1HkVvXrpsb+uDrsoyjx0lTwQ3oSSMbIRJmwvO6z8=

303
tensor.go Normal file
View File

@ -0,0 +1,303 @@
package gotensor
import "git.kingecg.top/kingecg/gomatrix"
const (
Max_Prevs = 10
Max_Args = 10
)
type Arg interface {
int | float64 | []int
}
type Tensor struct {
Data *gomatrix.Matrix
Grad *gomatrix.Matrix
Prevs [Max_Prevs]*Tensor
Num_Prevs int
Args [Max_Args]any // must meet Arg
Op string // 操作类型,用于反向传播
backwardFunc func() // 反向传播函数
}
// NewTensor 创建一个新的Tensor
func NewTensor(data []float64, shape []int) (*Tensor, error) {
matrix, err := gomatrix.NewMatrix(data, shape)
if err != nil {
return nil, err
}
// 创建梯度矩阵,初始为零
grad, err := gomatrix.NewZeros(shape)
if err != nil {
return nil, err
}
return &Tensor{
Data: matrix,
Grad: grad,
Op: "",
}, nil
}
// NewZeros 创建一个全零Tensor
func NewZeros(shape []int) (*Tensor, error) {
matrix, err := gomatrix.NewZeros(shape)
if err != nil {
return nil, err
}
grad, err := gomatrix.NewZeros(shape)
if err != nil {
return nil, err
}
return &Tensor{
Data: matrix,
Grad: grad,
Op: "zeros",
}, nil
}
// NewOnes 创建一个全一Tensor
func NewOnes(shape []int) (*Tensor, error) {
matrix, err := gomatrix.NewOnes(shape)
if err != nil {
return nil, err
}
grad, err := gomatrix.NewZeros(shape)
if err != nil {
return nil, err
}
return &Tensor{
Data: matrix,
Grad: grad,
Op: "ones",
}, nil
}
// NewIdentity 创建一个单位Tensor矩阵
func NewIdentity(size int) (*Tensor, error) {
matrix, err := gomatrix.NewIdentity(size)
if err != nil {
return nil, err
}
grad, err := gomatrix.NewZeros([]int{size, size})
if err != nil {
return nil, err
}
return &Tensor{
Data: matrix,
Grad: grad,
Op: "identity",
}, nil
}
// Add Tensor加法
func (t *Tensor) Add(other *Tensor) (*Tensor, error) {
result, err := t.Data.Add(other.Data)
if err != nil {
return nil, err
}
// 创建结果Tensor
output := &Tensor{
Data: result,
Op: "add",
}
// 记录依赖关系
output.Prevs[0] = t
output.Prevs[1] = other
output.Num_Prevs = 2
// 定义反向传播函数
output.backwardFunc = func() {
if t.Grad != nil {
// 对t的梯度等于输出梯度
grad, _ := t.Grad.Add(output.Grad)
t.Grad = grad
}
if other.Grad != nil {
// 对other的梯度等于输出梯度
grad, _ := other.Grad.Add(output.Grad)
other.Grad = grad
}
}
return output, nil
}
// Subtract Tensor减法
func (t *Tensor) Subtract(other *Tensor) (*Tensor, error) {
result, err := t.Data.Subtract(other.Data)
if err != nil {
return nil, err
}
output := &Tensor{
Data: result,
Op: "sub",
}
output.Prevs[0] = t
output.Prevs[1] = other
output.Num_Prevs = 2
output.backwardFunc = func() {
if t.Grad != nil {
// 对t的梯度等于输出梯度
grad, _ := t.Grad.Add(output.Grad)
t.Grad = grad
}
if other.Grad != nil {
// 对other的梯度等于输出梯度的负值
negGrad := output.Grad.Scale(-1.0)
grad, _ := other.Grad.Add(negGrad)
other.Grad = grad
}
}
return output, nil
}
// Multiply Tensor乘法逐元素相乘
func (t *Tensor) Multiply(other *Tensor) (*Tensor, error) {
result, err := t.Data.Multiply(other.Data)
if err != nil {
return nil, err
}
output := &Tensor{
Data: result,
Op: "mul",
}
output.Prevs[0] = t
output.Prevs[1] = other
output.Num_Prevs = 2
output.backwardFunc = func() {
if t.Grad != nil {
// 对t的梯度 = output.Grad * other.Data
gradWithOther, _ := output.Grad.Multiply(other.Data)
grad, _ := t.Grad.Add(gradWithOther)
t.Grad = grad
}
if other.Grad != nil {
// 对other的梯度 = output.Grad * t.Data
gradWithT, _ := output.Grad.Multiply(t.Data)
grad, _ := other.Grad.Add(gradWithT)
other.Grad = grad
}
}
return output, nil
}
// MatMul Tensor矩阵乘法
func (t *Tensor) MatMul(other *Tensor) (*Tensor, error) {
result, err := t.Data.MatMul(other.Data)
if err != nil {
return nil, err
}
output := &Tensor{
Data: result,
Op: "matmul",
}
output.Prevs[0] = t
output.Prevs[1] = other
output.Num_Prevs = 2
output.backwardFunc = func() {
if t.Grad != nil {
// 对t的梯度 = output.Grad * other.Data^T
otherT, _ := other.Data.Transpose()
gradResult, _ := output.Grad.MatMul(otherT)
grad, _ := t.Grad.Add(gradResult)
t.Grad = grad
}
if other.Grad != nil {
// 对other的梯度 = t.Data^T * output.Grad
tT, _ := t.Data.Transpose()
gradResult, _ := tT.MatMul(output.Grad)
grad, _ := other.Grad.Add(gradResult)
other.Grad = grad
}
}
return output, nil
}
// Scale Tensor数乘
func (t *Tensor) Scale(factor float64) *Tensor {
result := t.Data.Scale(factor)
output := &Tensor{
Data: result,
Op: "scale",
}
output.Prevs[0] = t
output.Num_Prevs = 1
output.Args[0] = factor
output.backwardFunc = func() {
if t.Grad != nil {
// 对t的梯度 = output.Grad * factor
scaledGrad := output.Grad.Scale(factor)
grad, _ := t.Grad.Add(scaledGrad)
t.Grad = grad
}
}
return output
}
// ZeroGrad 将梯度置零
func (t *Tensor) ZeroGrad() {
if t.Grad != nil {
shape := t.Grad.Shape()
zeroGrad, _ := gomatrix.NewZeros(shape)
t.Grad = zeroGrad
}
}
// Shape 返回Tensor的形状
func (t *Tensor) Shape() []int {
return t.Data.Shape()
}
// Size 返回Tensor的大小
func (t *Tensor) Size() int {
return t.Data.Size()
}
// Get 获取指定位置的值
func (t *Tensor) Get(indices ...int) (float64, error) {
return t.Data.Get(indices...)
}
// Set 设置指定位置的值
func (t *Tensor) Set(value float64, indices ...int) error {
return t.Data.Set(value, indices...)
}
// String 返回Tensor的字符串表示
func (t *Tensor) String() string {
return t.Data.String()
}
// Backward 执行反向传播
func (t *Tensor) Backward() {
if t.backwardFunc != nil {
t.backwardFunc()
}
}