Skip to content

Commit 3b2d3f2

Browse files
committed
forward working, not working backward
1 parent fb72ba0 commit 3b2d3f2

4 files changed

Lines changed: 62 additions & 8 deletions

File tree

.vscode/c_cpp_properties.json

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"configurations": [
3+
{
4+
"name": "Linux",
5+
"includePath": [
6+
"${workspaceFolder}/**"
7+
],
8+
"defines": [],
9+
"compilerPath": "/usr/bin/gcc",
10+
"cStandard": "c17",
11+
"cppStandard": "gnu++17",
12+
"intelliSenseMode": "linux-gcc-x64"
13+
}
14+
],
15+
"version": 4
16+
}

include/TensorSANN/activations/ReLU.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
namespace TensorSANN {
99

10-
class ReLU : Layer{
10+
class ReLU : public Layer{
1111
public:
1212
Tensor forward(const Tensor &input) override;
1313

include/TensorSANN/activations/Softmax.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
namespace TensorSANN {
99

10-
class Softmax : Layer{
10+
class Softmax : public Layer{
1111
public:
1212

1313
Tensor forward(const Tensor &input) override;

src/model_main.cpp

Lines changed: 44 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,55 @@
11
#include <iostream>
2+
#include <vector>
3+
#include <memory>
4+
25
#include "TensorSANN/utils/Tensor.hpp"
36
#include "TensorSANN/layers/DenseLayer.hpp"
7+
#include "TensorSANN/layers/Layer.hpp"
48

59
#include "TensorSANN/activations/Softmax.hpp"
610
#include "TensorSANN/activations/ReLU.hpp"
711
#include "TensorSANN/optimizers/SGD.hpp"
812

913
int main() {
1014
std::cout << "MODEL MAIN CPP ==========" << std::endl;
11-
TensorSANN::SGD optimizer = TensorSANN::SGD(0.01f);
12-
15+
1316

1417
std::vector<size_t> shape2 = {4,2}; // 2x3 tensor
1518
std::vector<float> data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0}; // Initialize with values
1619
TensorSANN::Tensor input_tensor(shape2, data);
1720

1821
TensorSANN::DenseLayer dense1(4, 2);
22+
TensorSANN::ReLU relu;
23+
TensorSANN::DenseLayer dense2(2, 1);
24+
TensorSANN::Softmax smax;
25+
26+
std::vector<std::shared_ptr<TensorSANN::Layer>> layers;
27+
layers.push_back(std::make_shared<TensorSANN::DenseLayer>(4, 2));
28+
layers.push_back(std::make_shared<TensorSANN::ReLU>());
29+
layers.push_back(std::make_shared<TensorSANN::DenseLayer>(2, 1));
30+
layers.push_back(std::make_shared<TensorSANN::Softmax>());
31+
32+
TensorSANN::Tensor fwd_op = input_tensor.transpose();
33+
34+
// fwd
35+
for (auto &layer : layers){
36+
fwd_op = layer->forward(fwd_op);
37+
std::cout << "LOOP FWD==" << fwd_op.to_string() << std::endl;
38+
}
39+
40+
TensorSANN::SGD optimizer = TensorSANN::SGD(0.01f);
41+
42+
// calculate loss
43+
TensorSANN::Tensor loss_grad = fwd_op - 1;
44+
45+
// bkwd
46+
for (int i = layers.size(); i >= 0; --i){
47+
std::cout << i << std::endl;
48+
layers[i]->backward(loss_grad);
49+
}
50+
51+
52+
1953
const TensorSANN::Tensor d1w = dense1.weights();
2054
const TensorSANN::Tensor d1b = dense1.biases();
2155

@@ -25,14 +59,18 @@ int main() {
2559

2660
TensorSANN::Tensor f1 = dense1.forward(input_tensor.transpose());
2761
std::cout << f1.to_string() << std::endl;
62+
63+
64+
TensorSANN::Tensor r1 = relu.forward(f1);
65+
std::cout << r1.to_string() << std::endl;
66+
2867

29-
TensorSANN::Softmax smax;
3068

31-
TensorSANN::Tensor f2 = smax.forward(f1);
69+
TensorSANN::Tensor f2 = dense2.forward(r1);
3270
std::cout << f2.to_string() << std::endl;
3371

34-
TensorSANN::Tensor b2 = smax.backward(f2);
35-
std::cout << b2.to_string() << std::endl;
72+
TensorSANN::Tensor s1 = smax.forward(f2);
73+
std::cout << (s1 - 1).to_string() << std::endl;
3674

3775
return 0;
3876

0 commit comments

Comments
 (0)