From 639d0d924287595cae1c0a9b23741f8bc10016f2 Mon Sep 17 00:00:00 2001 From: that-ar-guy Date: Fri, 7 Feb 2025 21:42:32 +0530 Subject: [PATCH 1/3] index updated --- docs/algorithms/deep-learning/neural-networks/index.md | 10 +++++++++- .../neural-networks/recurrent-neural-network.md | 0 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md diff --git a/docs/algorithms/deep-learning/neural-networks/index.md b/docs/algorithms/deep-learning/neural-networks/index.md index ef29ecba..26adbe63 100644 --- a/docs/algorithms/deep-learning/neural-networks/index.md +++ b/docs/algorithms/deep-learning/neural-networks/index.md @@ -12,5 +12,13 @@ - + + + Recurrent Neural Network +
+

Recurrent Neural Network

+

A deep learning model designed for sequential data processing.

+

๐Ÿ“… 2025-01-10 | โฑ๏ธ 3 mins

+
+
diff --git a/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md b/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md new file mode 100644 index 00000000..e69de29b From 05de8782fb4a48205c8020a1ca8301b2a36c2f89 Mon Sep 17 00:00:00 2001 From: that-ar-guy Date: Fri, 7 Feb 2025 21:43:58 +0530 Subject: [PATCH 2/3] added rnn md file --- .../recurrent-neural-network.md | 126 ++++++++++++++++++ 1 file changed, 126 insertions(+) diff --git a/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md b/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md index e69de29b..b7394911 100644 --- a/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md +++ b/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md @@ -0,0 +1,126 @@ +# ๐Ÿงช Recurrent Neural Network (RNN) + +
+ +
+ +## ๐ŸŽฏ Objective +Recurrent Neural Networks (RNNs) are a class of artificial neural networks designed to process sequential data. Unlike feedforward networks, RNNs have connections that allow information to persist, making them suitable for tasks such as speech recognition, text generation, and time-series forecasting. + +## ๐Ÿ“š Prerequisites +- Understanding of basic neural networks and deep learning +- Knowledge of activation functions and backpropagation +- Familiarity with sequence-based data processing +- Libraries: NumPy, TensorFlow, PyTorch + +--- + +## ๐Ÿงฌ Inputs +- A sequence of data points such as text, speech signals, or time-series data. +- Example: A sentence represented as a sequence of word embeddings for NLP tasks. + +## ๐ŸŽŽ Outputs +- Predicted sequence values or classifications. +- Example: Next word prediction in a sentence or stock price forecasting. + +--- + +## ๐Ÿฉ RNN Architecture +- RNNs maintain a **hidden state** that updates with each time step. +- At each step, the hidden state is computed as: + $$ h_t = f(W_h h_{t-1} + W_x x_t + b) $$ +- Variants of RNNs include **LSTMs (Long Short-Term Memory)** and **GRUs (Gated Recurrent Units)**, which help mitigate the vanishing gradient problem. + +## ๐Ÿ… Training Process +- The model is trained using **Backpropagation Through Time (BPTT)**. +- Uses optimizers like **Adam** or **SGD**. +- Typical hyperparameters: + - Learning rate: 0.001 + - Batch size: 64 + - Epochs: 30 + - Loss function: Cross-entropy for classification tasks, MSE for regression tasks. + +## ๐Ÿ“Š Evaluation Metrics +- Accuracy (for classification) +- Perplexity (for language models) +- Mean Squared Error (MSE) (for regression tasks) +- BLEU Score (for sequence-to-sequence models) + +--- + +## ๐Ÿ’ป Code Implementation +```python +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim + +# Define RNN Model +class RNN(nn.Module): + def __init__(self, input_size, hidden_size, output_size): + super(RNN, self).__init__() + self.hidden_size = hidden_size + self.rnn = nn.RNN(input_size, hidden_size, batch_first=True) + self.fc = nn.Linear(hidden_size, output_size) + + def forward(self, x, hidden): + out, hidden = self.rnn(x, hidden) + out = self.fc(out[:, -1, :]) + return out, hidden + +# Model Training +input_size = 10 # Number of input features +hidden_size = 20 # Number of hidden neurons +output_size = 1 # Output dimension + +model = RNN(input_size, hidden_size, output_size) +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=0.001) + +# Sample Training Loop +for epoch in range(10): + optimizer.zero_grad() + inputs = torch.randn(32, 5, input_size) # (batch_size, seq_length, input_size) + hidden = torch.zeros(1, 32, hidden_size) # Initial hidden state + outputs, hidden = model(inputs, hidden) + loss = criterion(outputs, torch.randn(32, output_size)) + loss.backward() + optimizer.step() + print(f"Epoch {epoch+1}, Loss: {loss.item()}") +``` + +## ๐Ÿ” Understanding the Code +- **Model Definition:** + - The `RNN` class defines a simple recurrent neural network with an input layer, a recurrent layer, and a fully connected output layer. +- **Forward Pass:** + - Takes an input sequence, processes it through the RNN layer, and generates an output. +- **Training Loop:** + - Uses randomly generated data for demonstration. + - Optimizes weights using the Adam optimizer and mean squared error loss. + +--- + +## ๐ŸŒŸ Advantages +- Effective for sequential data modeling. +- Capable of handling variable-length inputs. +- Works well for applications like text generation and speech recognition. + +## โš ๏ธ Limitations +- Struggles with long-range dependencies due to vanishing gradients. +- Training can be slow due to sequential computations. +- Alternatives like **LSTMs and GRUs** are preferred for longer sequences. + +## ๐Ÿš€ Applications +### Natural Language Processing (NLP) +- Text prediction +- Sentiment analysis +- Machine translation + +### Time-Series Forecasting +- Stock price prediction +- Weather forecasting +- Healthcare monitoring (e.g., ECG signals) + +--- + + From 6e1bdff099855249e35e15d31b62d1764a2d4529 Mon Sep 17 00:00:00 2001 From: that-ar-guy Date: Sat, 8 Feb 2025 18:52:31 +0530 Subject: [PATCH 3/3] line removed --- .../neural-networks/recurrent-neural-network.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md b/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md index b7394911..c456ab1c 100644 --- a/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md +++ b/docs/algorithms/deep-learning/neural-networks/recurrent-neural-network.md @@ -119,8 +119,4 @@ for epoch in range(10): ### Time-Series Forecasting - Stock price prediction - Weather forecasting -- Healthcare monitoring (e.g., ECG signals) - ---- - - +- Healthcare monitoring (e.g., ECG signals) \ No newline at end of file