help@rskworld.in +91 93305 39277
RSK World
  • Home
  • Development
    • Web Development
    • Mobile Apps
    • Software
    • Games
    • Project
  • Technologies
    • Data Science
    • AI Development
    • Cloud Development
    • Blockchain
    • Cyber Security
    • Dev Tools
    • Testing Tools
  • About
  • Contact

Theme Settings

Color Scheme
Display Options
Font Size
100%
Back to Project
RSK World
tensorflow-deeplearning
/
src
RSK World
tensorflow-deeplearning
Deep learning with TensorFlow and Keras
src
  • utils
  • __init__.py330 B
  • autoencoders.py8 KB
  • cnns.py6.7 KB
  • custom_layers.py8.3 KB
  • data_generator.py14.2 KB
  • data_preprocessing.py9.9 KB
  • gans.py7 KB
  • model_deployment.py8.7 KB
  • model_evaluation.py10.5 KB
  • model_training.py10.1 KB
  • neural_networks.py4.7 KB
  • rnns.py6.8 KB
  • transfer_learning.py5.4 KB
  • transformers.py7.8 KB
  • visualization.py9.6 KB
clean_cache.pymodel_deployment.py
scripts/clean_cache.py
Raw Download
Find: Go to:
"""
Clean __pycache__ folders and temporary files
Author: RSK World
Website: https://rskworld.in
Email: help@rskworld.in
Phone: +91 93305 39277

This script removes __pycache__ folders and .pyc files from the project.
"""

import os
import shutil
import sys

def remove_pycache(root_dir='.'):
    """
    Remove all __pycache__ directories and .pyc files.
    
    Args:
        root_dir: Root directory to search
    """
    removed_count = 0
    removed_size = 0
    
    print("=" * 60)
    print("Cleaning __pycache__ folders and .pyc files")
    print("Author: RSK World - https://rskworld.in")
    print("=" * 60)
    print()
    
    for root, dirs, files in os.walk(root_dir):
        # Skip virtual environments and common ignore directories
        if any(skip in root for skip in ['venv', 'env', '.git', 'node_modules', '__pycache__']):
            continue
        
        # Remove __pycache__ directories
        if '__pycache__' in dirs:
            pycache_path = os.path.join(root, '__pycache__')
            try:
                # Calculate size
                size = sum(
                    os.path.getsize(os.path.join(dirpath, filename))
                    for dirpath, dirnames, filenames in os.walk(pycache_path)
                    for filename in filenames
                )
                shutil.rmtree(pycache_path)
                removed_count += 1
                removed_size += size
                print(f"Removed: {pycache_path}")
            except Exception as e:
                print(f"Error removing {pycache_path}: {e}")
        
        # Remove .pyc, .pyo, .pyd files
        for file in files:
            if file.endswith(('.pyc', '.pyo', '.pyd')):
                file_path = os.path.join(root, file)
                try:
                    size = os.path.getsize(file_path)
                    os.remove(file_path)
                    removed_count += 1
                    removed_size += size
                    print(f"Removed: {file_path}")
                except Exception as e:
                    print(f"Error removing {file_path}: {e}")
    
    print()
    print("=" * 60)
    print(f"Cleanup complete!")
    print(f"Removed {removed_count} items")
    print(f"Freed {removed_size / 1024:.2f} KB")
    print("=" * 60)

def clean_data_temp_files(data_dir='./data'):
    """
    Clean temporary files from data directory (keep structure).
    
    Args:
        data_dir: Data directory path
    """
    if not os.path.exists(data_dir):
        print(f"Data directory not found: {data_dir}")
        return
    
    print("\nCleaning temporary files from data directory...")
    
    temp_extensions = ['.tmp', '.temp', '.log']
    removed = 0
    
    for root, dirs, files in os.walk(data_dir):
        for file in files:
            if any(file.endswith(ext) for ext in temp_extensions):
                file_path = os.path.join(root, file)
                try:
                    os.remove(file_path)
                    removed += 1
                    print(f"Removed: {file_path}")
                except Exception as e:
                    print(f"Error removing {file_path}: {e}")
    
    if removed > 0:
        print(f"Removed {removed} temporary files from data directory")
    else:
        print("No temporary files found in data directory")

def main():
    """Main function."""
    import argparse
    
    parser = argparse.ArgumentParser(description='Clean __pycache__ and temporary files')
    parser.add_argument('--data', action='store_true', help='Also clean data directory temp files')
    parser.add_argument('--root', default='.', help='Root directory to clean (default: current directory)')
    
    args = parser.parse_args()
    
    # Clean __pycache__
    remove_pycache(args.root)
    
    # Clean data temp files if requested
    if args.data:
        clean_data_temp_files()

if __name__ == '__main__':
    main()
124 lines•3.9 KB
python
src/model_deployment.py
Raw Download
Find: Go to:
"""
Model Deployment and Serving with TensorFlow
Author: RSK World
Website: https://rskworld.in
Email: help@rskworld.in
Phone: +91 93305 39277

This module demonstrates model saving, loading, and deployment strategies.
"""

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import numpy as np
import os
import json

def save_model_complete(model, model_dir='./saved_models'):
    """
    Save model in multiple formats for different deployment scenarios.
    
    Args:
        model: Keras model to save
        model_dir: Directory to save models
    """
    os.makedirs(model_dir, exist_ok=True)
    
    # 1. Save as SavedModel format (recommended)
    savedmodel_path = os.path.join(model_dir, 'savedmodel')
    model.save(savedmodel_path, save_format='tf')
    print(f"Model saved as SavedModel: {savedmodel_path}")
    
    # 2. Save as H5 format
    h5_path = os.path.join(model_dir, 'model.h5')
    model.save(h5_path, save_format='h5')
    print(f"Model saved as H5: {h5_path}")
    
    # 3. Save only weights
    weights_path = os.path.join(model_dir, 'weights.h5')
    model.save_weights(weights_path)
    print(f"Weights saved: {weights_path}")
    
    # 4. Save model architecture as JSON
    json_path = os.path.join(model_dir, 'model_architecture.json')
    model_json = model.to_json()
    with open(json_path, 'w') as f:
        json.dump(json.loads(model_json), f, indent=2)
    print(f"Model architecture saved: {json_path}")
    
    return savedmodel_path, h5_path, weights_path, json_path

def load_model_from_savedmodel(model_path):
    """
    Load model from SavedModel format.
    
    Args:
        model_path: Path to SavedModel directory
    
    Returns:
        Loaded Keras model
    """
    model = keras.models.load_model(model_path)
    print(f"Model loaded from: {model_path}")
    return model

def load_model_from_h5(h5_path):
    """
    Load model from H5 format.
    
    Args:
        h5_path: Path to H5 file
    
    Returns:
        Loaded Keras model
    """
    model = keras.models.load_model(h5_path)
    print(f"Model loaded from: {h5_path}")
    return model

def convert_to_tflite(model, tflite_path='./model.tflite', quantize=False):
    """
    Convert model to TensorFlow Lite format for mobile deployment.
    
    Args:
        model: Keras model to convert
        tflite_path: Path to save TFLite model
        quantize: Whether to apply quantization
    
    Returns:
        Path to TFLite model
    """
    converter = tf.lite.TFLiteConverter.from_keras_model(model)
    
    if quantize:
        # Apply quantization
        converter.optimizations = [tf.lite.Optimize.DEFAULT]
    
    tflite_model = converter.convert()
    
    with open(tflite_path, 'wb') as f:
        f.write(tflite_model)
    
    print(f"TFLite model saved: {tflite_path}")
    return tflite_path

def convert_to_tensorflow_js(model, js_dir='./tfjs_model'):
    """
    Convert model to TensorFlow.js format for web deployment.
    
    Args:
        model: Keras model to convert
        js_dir: Directory to save TensorFlow.js model
    """
    try:
        import tensorflowjs as tfjs
        os.makedirs(js_dir, exist_ok=True)
        tfjs.converters.save_keras_model(model, js_dir)
        print(f"TensorFlow.js model saved: {js_dir}")
    except ImportError:
        print("Warning: tensorflowjs not installed. Install it with: pip install tensorflowjs")
        raise

def create_tf_serving_model(model, serving_dir='./serving_model'):
    """
    Prepare model for TensorFlow Serving.
    
    Args:
        model: Keras model to prepare
        serving_dir: Directory to save serving model
    """
    os.makedirs(serving_dir, exist_ok=True)
    
    # Save model with version number (required by TF Serving)
    version_dir = os.path.join(serving_dir, '1')
    os.makedirs(version_dir, exist_ok=True)
    
    model.save(version_dir, save_format='tf')
    print(f"Model prepared for TF Serving: {serving_dir}")

def create_prediction_function(model):
    """
    Create a prediction function wrapper for easier deployment.
    
    Args:
        model: Trained Keras model
    
    Returns:
        Prediction function
    """
    def predict(input_data):
        """
        Make predictions on input data.
        
        Args:
            input_data: Input data (numpy array or list)
        
        Returns:
            Predictions
        """
        # Preprocess input if needed
        if isinstance(input_data, list):
            input_data = np.array(input_data)
        
        # Make prediction
        predictions = model.predict(input_data, verbose=0)
        
        return predictions
    
    return predict

def create_rest_api_wrapper(model, model_name='tensorflow_model'):
    """
    Create a REST API wrapper template for model serving.
    
    Args:
        model: Trained Keras model
        model_name: Name of the model
    
    Returns:
        Flask app code template (as string)
    """
    flask_code = f"""
# Flask REST API for {model_name}
# Author: RSK World - https://rskworld.in

from flask import Flask, request, jsonify
import numpy as np
import tensorflow as tf
from tensorflow import keras

app = Flask(__name__)

# Load model
model = keras.models.load_model('./saved_models/savedmodel')

@app.route('/predict', methods=['POST'])
def predict():
    try:
        # Get input data
        data = request.json
        input_data = np.array(data['input'])
        
        # Make prediction
        predictions = model.predict(input_data, verbose=0)
        
        # Return results
        return jsonify({{
            'success': True,
            'predictions': predictions.tolist()
        }})
    except Exception as e:
        return jsonify({{
            'success': False,
            'error': str(e)
        }}), 400

@app.route('/health', methods=['GET'])
def health():
    return jsonify({{'status': 'healthy'}})

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=5000)
"""
    
    return flask_code

def benchmark_model(model, test_data, batch_sizes=[1, 8, 16, 32, 64]):
    """
    Benchmark model inference performance.
    
    Args:
        model: Keras model to benchmark
        test_data: Test data for benchmarking
        batch_sizes: List of batch sizes to test
    
    Returns:
        Dictionary with benchmark results
    """
    results = {}
    
    for batch_size in batch_sizes:
        # Warm up
        _ = model.predict(test_data[:batch_size], verbose=0)
        
        # Benchmark
        import time
        start_time = time.time()
        _ = model.predict(test_data[:batch_size*10], batch_size=batch_size, verbose=0)
        elapsed_time = time.time() - start_time
        
        results[batch_size] = {
            'time': elapsed_time,
            'samples_per_second': (batch_size * 10) / elapsed_time
        }
    
    return results

def example_usage():
    """
    Example usage of deployment functions.
    """
    # Create a simple model
    model = keras.Sequential([
        layers.Dense(128, activation='relu', input_shape=(784,)),
        layers.Dense(64, activation='relu'),
        layers.Dense(10, activation='softmax')
    ])
    
    model.compile(
        optimizer='adam',
        loss='sparse_categorical_crossentropy',
        metrics=['accuracy']
    )
    
    # Train model (using dummy data)
    X_train = np.random.randn(1000, 784).astype('float32')
    y_train = np.random.randint(0, 10, 1000)
    
    model.fit(X_train, y_train, epochs=5, verbose=0)
    
    # Save model in multiple formats
    savedmodel_path, h5_path, weights_path, json_path = save_model_complete(model)
    
    # Convert to TFLite
    tflite_path = convert_to_tflite(model, quantize=False)
    
    # Create prediction function
    predict_fn = create_prediction_function(model)
    
    # Test prediction
    test_input = np.random.randn(1, 784).astype('float32')
    predictions = predict_fn(test_input)
    print(f"\nPredictions shape: {predictions.shape}")
    
    # Benchmark model
    test_data = np.random.randn(100, 784).astype('float32')
    benchmark_results = benchmark_model(model, test_data)
    print("\nBenchmark Results:")
    for batch_size, result in benchmark_results.items():
        print(f"Batch size {batch_size}: {result['samples_per_second']:.2f} samples/sec")
    
    return model

if __name__ == '__main__':
    print("Model Deployment and Serving with TensorFlow")
    print("Author: RSK World - https://rskworld.in")
    model = example_usage()
308 lines•8.7 KB
python

About RSK World

Founded by Molla Samser, with Designer & Tester Rima Khatun, RSK World is your one-stop destination for free programming resources, source code, and development tools.

Founder: Molla Samser
Designer & Tester: Rima Khatun

Development

  • Game Development
  • Web Development
  • Mobile Development
  • AI Development
  • Development Tools

Legal

  • Terms & Conditions
  • Privacy Policy
  • Disclaimer

Contact Info

Nutanhat, Mongolkote
Purba Burdwan, West Bengal
India, 713147

+91 93305 39277

hello@rskworld.in
support@rskworld.in

© 2026 RSK World. All rights reserved.

Content used for educational purposes only. View Disclaimer