help@rskworld.in +91 93305 39277
RSK World
  • Home
  • Development
    • Web Development
    • Mobile Apps
    • Software
    • Games
    • Project
  • Technologies
    • Data Science
    • AI Development
    • Cloud Development
    • Blockchain
    • Cyber Security
    • Dev Tools
    • Testing Tools
  • About
  • Contact

Theme Settings

Color Scheme
Display Options
Font Size
100%
Back to Project
RSK World
tensorflow-deeplearning
/
scripts
RSK World
tensorflow-deeplearning
Deep learning with TensorFlow and Keras
scripts
  • clean_cache.py3.9 KB
  • cleanup.bat715 B
  • cleanup.sh725 B
  • generate_data.py816 B
  • generate_data_standalone.py5.9 KB
transformers.pygenerate_data_standalone.py
src/transformers.py
Raw Download
Find: Go to:
"""
Transformer Models with TensorFlow
Author: RSK World
Website: https://rskworld.in
Email: help@rskworld.in
Phone: +91 93305 39277

This module demonstrates Transformer architecture implementation.
"""

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, Model
import numpy as np
import math

def positional_encoding(position, d_model):
    """
    Create positional encoding for transformer.
    
    Args:
        position: Maximum position
        d_model: Model dimension
    
    Returns:
        Positional encoding matrix
    """
    angle_rads = np.arange(position)[:, np.newaxis] / np.power(10000, (2 * (np.arange(d_model)[np.newaxis, :] // 2)) / np.float32(d_model))
    
    angle_rads[:, 0::2] = np.sin(angle_rads[:, 0::2])
    angle_rads[:, 1::2] = np.cos(angle_rads[:, 1::2])
    
    pos_encoding = angle_rads[np.newaxis, ...]
    return tf.cast(pos_encoding, dtype=tf.float32)

class MultiHeadAttention(layers.Layer):
    """
    Multi-head attention layer.
    Author: RSK World - https://rskworld.in
    """
    
    def __init__(self, d_model, num_heads, **kwargs):
        super(MultiHeadAttention, self).__init__(**kwargs)
        self.num_heads = num_heads
        self.d_model = d_model
        
        assert d_model % self.num_heads == 0
        
        self.depth = d_model // self.num_heads
        
        self.wq = layers.Dense(d_model)
        self.wk = layers.Dense(d_model)
        self.wv = layers.Dense(d_model)
        
        self.dense = layers.Dense(d_model)
    
    def split_heads(self, x, batch_size):
        """Split the last dimension into (num_heads, depth)."""
        x = tf.reshape(x, (batch_size, -1, self.num_heads, self.depth))
        return tf.transpose(x, perm=[0, 2, 1, 3])
    
    def call(self, v, k, q, mask=None):
        batch_size = tf.shape(q)[0]
        
        q = self.wq(q)
        k = self.wk(k)
        v = self.wv(v)
        
        q = self.split_heads(q, batch_size)
        k = self.split_heads(k, batch_size)
        v = self.split_heads(v, batch_size)
        
        scaled_attention, attention_weights = self.scaled_dot_product_attention(
            q, k, v, mask
        )
        
        scaled_attention = tf.transpose(scaled_attention, perm=[0, 2, 1, 3])
        concat_attention = tf.reshape(scaled_attention, (batch_size, -1, self.d_model))
        
        output = self.dense(concat_attention)
        
        return output, attention_weights
    
    def scaled_dot_product_attention(self, q, k, v, mask):
        """Calculate the attention weights."""
        matmul_qk = tf.matmul(q, k, transpose_b=True)
        
        dk = tf.cast(tf.shape(k)[-1], tf.float32)
        scaled_attention_logits = matmul_qk / tf.math.sqrt(dk)
        
        if mask is not None:
            scaled_attention_logits += (mask * -1e9)
        
        attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1)
        output = tf.matmul(attention_weights, v)
        
        return output, attention_weights

def point_wise_feed_forward_network(d_model, dff):
    """
    Point-wise feed-forward network.
    
    Args:
        d_model: Model dimension
        dff: Feed-forward dimension
    
    Returns:
        Sequential model
    """
    return keras.Sequential([
        layers.Dense(dff, activation='relu'),
        layers.Dense(d_model)
    ])

class EncoderLayer(layers.Layer):
    """
    Transformer encoder layer.
    Author: RSK World - https://rskworld.in
    """
    
    def __init__(self, d_model, num_heads, dff, rate=0.1, **kwargs):
        super(EncoderLayer, self).__init__(**kwargs)
        
        self.mha = MultiHeadAttention(d_model, num_heads)
        self.ffn = point_wise_feed_forward_network(d_model, dff)
        
        self.layernorm1 = layers.LayerNormalization(epsilon=1e-6)
        self.layernorm2 = layers.LayerNormalization(epsilon=1e-6)
        
        self.dropout1 = layers.Dropout(rate)
        self.dropout2 = layers.Dropout(rate)
    
    def call(self, x, training, mask=None):
        attn_output, _ = self.mha(x, x, x, mask)
        attn_output = self.dropout1(attn_output, training=training)
        out1 = self.layernorm1(x + attn_output)
        
        ffn_output = self.ffn(out1)
        ffn_output = self.dropout2(ffn_output, training=training)
        out2 = self.layernorm2(out1 + ffn_output)
        
        return out2

class DecoderLayer(layers.Layer):
    """
    Transformer decoder layer.
    Author: RSK World - https://rskworld.in
    """
    
    def __init__(self, d_model, num_heads, dff, rate=0.1, **kwargs):
        super(DecoderLayer, self).__init__(**kwargs)
        
        self.mha1 = MultiHeadAttention(d_model, num_heads)
        self.mha2 = MultiHeadAttention(d_model, num_heads)
        
        self.ffn = point_wise_feed_forward_network(d_model, dff)
        
        self.layernorm1 = layers.LayerNormalization(epsilon=1e-6)
        self.layernorm2 = layers.LayerNormalization(epsilon=1e-6)
        self.layernorm3 = layers.LayerNormalization(epsilon=1e-6)
        
        self.dropout1 = layers.Dropout(rate)
        self.dropout2 = layers.Dropout(rate)
        self.dropout3 = layers.Dropout(rate)
    
    def call(self, x, enc_output, training, look_ahead_mask=None, padding_mask=None):
        attn1, attn_weights_block1 = self.mha1(x, x, x, look_ahead_mask)
        attn1 = self.dropout1(attn1, training=training)
        out1 = self.layernorm1(attn1 + x)
        
        attn2, attn_weights_block2 = self.mha2(enc_output, enc_output, out1, padding_mask)
        attn2 = self.dropout2(attn2, training=training)
        out2 = self.layernorm2(attn2 + out1)
        
        ffn_output = self.ffn(out2)
        ffn_output = self.dropout3(ffn_output, training=training)
        out3 = self.layernorm3(ffn_output + out2)
        
        return out3, attn_weights_block1, attn_weights_block2

def create_transformer_encoder(num_layers, d_model, num_heads, dff, input_vocab_size, maximum_position_encoding, rate=0.1):
    """
    Create a transformer encoder.
    
    Args:
        num_layers: Number of encoder layers
        d_model: Model dimension
        num_heads: Number of attention heads
        dff: Feed-forward dimension
        input_vocab_size: Vocabulary size
        maximum_position_encoding: Maximum position encoding
        rate: Dropout rate
    
    Returns:
        Encoder model
    """
    inputs = keras.Input(shape=(None,))
    x = layers.Embedding(input_vocab_size, d_model)(inputs)
    x *= tf.math.sqrt(tf.cast(d_model, tf.float32))
    
    pos_encoding = positional_encoding(maximum_position_encoding, d_model)
    x += pos_encoding[:, :tf.shape(x)[1], :]
    x = layers.Dropout(rate)(x)
    
    for i in range(num_layers):
        x = EncoderLayer(d_model, num_heads, dff, rate)(x, training=True)
    
    return Model(inputs, x, name='transformer_encoder')

def example_usage():
    """
    Example usage of transformer functions.
    """
    # Create a simple transformer encoder
    encoder = create_transformer_encoder(
        num_layers=2,
        d_model=128,
        num_heads=8,
        dff=512,
        input_vocab_size=10000,
        maximum_position_encoding=1000,
        rate=0.1
    )
    
    print("Transformer Encoder Model:")
    encoder.summary()
    
    # Test with dummy data
    sample_input = tf.random.uniform((32, 100), minval=0, maxval=10000, dtype=tf.int32)
    sample_output = encoder(sample_input, training=False)
    
    print(f"\nInput shape: {sample_input.shape}")
    print(f"Output shape: {sample_output.shape}")
    
    return encoder

if __name__ == '__main__':
    print("Transformer Models with TensorFlow")
    print("Author: RSK World - https://rskworld.in")
    encoder = example_usage()
241 lines•7.8 KB
python
scripts/generate_data_standalone.py
Raw Download
Find: Go to:
"""
Standalone Data Generation Script (No TensorFlow Required)
Author: RSK World
Website: https://rskworld.in
Email: help@rskworld.in
Phone: +91 93305 39277

This script generates data without requiring TensorFlow installation.
"""

import numpy as np
import pandas as pd
import os
import json

def generate_classification_data(n_samples=1000, n_features=20, n_classes=3, noise=0.1):
    """Generate synthetic classification dataset."""
    np.random.seed(42)
    X = np.random.randn(n_samples, n_features)
    y = np.zeros(n_samples, dtype=int)
    
    for i in range(n_samples):
        feature_sum = np.sum(X[i, :n_features//2])
        if feature_sum < -2:
            y[i] = 0
        elif feature_sum < 2:
            y[i] = 1
        else:
            y[i] = 2
    
    noise_mask = np.random.random(n_samples) < noise
    y[noise_mask] = np.random.randint(0, n_classes, np.sum(noise_mask))
    
    return X.astype('float32'), y

def generate_regression_data(n_samples=1000, n_features=10, noise=0.1):
    """Generate synthetic regression dataset."""
    np.random.seed(42)
    X = np.random.randn(n_samples, n_features)
    coefficients = np.random.randn(n_features)
    y = np.dot(X, coefficients)
    y += 0.1 * np.sum(X ** 2, axis=1)
    y += noise * np.random.randn(n_samples)
    return X.astype('float32'), y.astype('float32')

def generate_image_data(n_samples=100, img_size=(28, 28), n_classes=10):
    """Generate synthetic image dataset."""
    np.random.seed(42)
    X = []
    y = []
    
    for i in range(n_samples):
        label = i % n_classes
        y.append(label)
        img = np.zeros(img_size, dtype='float32')
        
        if label % 2 == 0:
            for j in range(0, img_size[0], 3):
                img[j, :] = 0.8
        else:
            for j in range(0, img_size[1], 3):
                img[:, j] = 0.8
        
        center = (img_size[0] // 2, img_size[1] // 2)
        radius = 5 + label
        y_coords, x_coords = np.ogrid[:img_size[0], :img_size[1]]
        mask = (x_coords - center[1])**2 + (y_coords - center[0])**2 <= radius**2
        img[mask] = 1.0 - img[mask]
        img += np.random.randn(*img_size) * 0.1
        img = np.clip(img, 0, 1)
        X.append(img)
    
    return np.array(X), np.array(y)

def generate_sequence_data(n_samples=1000, sequence_length=50, n_features=10, n_classes=3):
    """Generate synthetic sequence data."""
    np.random.seed(42)
    X = []
    y = []
    
    for i in range(n_samples):
        label = i % n_classes
        y.append(label)
        sequence = np.random.randn(sequence_length, n_features)
        trend = np.linspace(0, label * 0.5, sequence_length)
        sequence[:, 0] += trend
        
        if label == 1:
            periodic = np.sin(np.linspace(0, 4 * np.pi, sequence_length))
            sequence[:, 1] += periodic * 0.5
        
        X.append(sequence)
    
    return np.array(X).astype('float32'), np.array(y)

def generate_tabular_data(n_samples=1000, save_path='./data/synthetic_tabular.csv'):
    """Generate synthetic tabular dataset."""
    np.random.seed(42)
    
    data = {
        'age': np.random.randint(18, 80, n_samples),
        'income': np.random.normal(50000, 15000, n_samples),
        'education_years': np.random.randint(8, 20, n_samples),
        'experience_years': np.random.randint(0, 40, n_samples),
        'city_size': np.random.choice(['Small', 'Medium', 'Large'], n_samples),
        'has_car': np.random.choice([0, 1], n_samples),
        'has_house': np.random.choice([0, 1], n_samples),
        'credit_score': np.random.randint(300, 850, n_samples),
        'loan_amount': np.random.normal(100000, 50000, n_samples),
        'interest_rate': np.random.normal(5.5, 2.0, n_samples),
    }
    
    data['loan_approved'] = (
        (data['credit_score'] > 650).astype(int) &
        (data['income'] > 40000).astype(int) &
        (data['loan_amount'] < 200000).astype(int)
    )
    
    df = pd.DataFrame(data)
    os.makedirs(os.path.dirname(save_path), exist_ok=True)
    df.to_csv(save_path, index=False)
    return df

def save_data(X, y, name='dataset', data_dir='./data'):
    """Save generated data to files."""
    os.makedirs(data_dir, exist_ok=True)
    np.save(os.path.join(data_dir, f'{name}_X.npy'), X)
    np.save(os.path.join(data_dir, f'{name}_y.npy'), y)
    
    metadata = {
        'name': name,
        'X_shape': list(X.shape),
        'y_shape': list(y.shape),
        'X_dtype': str(X.dtype),
        'y_dtype': str(y.dtype),
        'n_samples': X.shape[0]
    }
    
    with open(os.path.join(data_dir, f'{name}_metadata.json'), 'w') as f:
        json.dump(metadata, f, indent=2)
    
    print(f"Saved {name}: X{X.shape}, y{y.shape}")

def main():
    """Generate all sample data."""
    print("=" * 60)
    print("Generating Sample Data")
    print("Author: RSK World - https://rskworld.in")
    print("=" * 60)
    print()
    
    data_dir = './data'
    os.makedirs(data_dir, exist_ok=True)
    
    # Generate all datasets
    print("1. Classification data...")
    X, y = generate_classification_data(1000, 20, 3)
    save_data(X, y, 'classification', data_dir)
    
    print("2. Regression data...")
    X, y = generate_regression_data(1000, 10)
    save_data(X, y, 'regression', data_dir)
    
    print("3. Image data...")
    X, y = generate_image_data(200, (28, 28), 10)
    save_data(X, y, 'images', data_dir)
    
    print("4. Sequence data...")
    X, y = generate_sequence_data(500, 50, 10, 3)
    save_data(X, y, 'sequences', data_dir)
    
    print("5. Tabular data...")
    df = generate_tabular_data(1000, os.path.join(data_dir, 'tabular.csv'))
    print(f"Saved tabular: {df.shape}")
    
    print("\n" + "=" * 60)
    print("All data generated successfully!")
    print(f"Data saved in: {data_dir}")
    print("=" * 60)

if __name__ == '__main__':
    main()
183 lines•5.9 KB
python

About RSK World

Founded by Molla Samser, with Designer & Tester Rima Khatun, RSK World is your one-stop destination for free programming resources, source code, and development tools.

Founder: Molla Samser
Designer & Tester: Rima Khatun

Development

  • Game Development
  • Web Development
  • Mobile Development
  • AI Development
  • Development Tools

Legal

  • Terms & Conditions
  • Privacy Policy
  • Disclaimer

Contact Info

Nutanhat, Mongolkote
Purba Burdwan, West Bengal
India, 713147

+91 93305 39277

hello@rskworld.in
support@rskworld.in

© 2026 RSK World. All rights reserved.

Content used for educational purposes only. View Disclaimer