help@rskworld.in +91 93305 39277
RSK World
  • Home
  • Development
    • Web Development
    • Mobile Apps
    • Software
    • Games
    • Project
  • Technologies
    • Data Science
    • AI Development
    • Cloud Development
    • Blockchain
    • Cyber Security
    • Dev Tools
    • Testing Tools
  • About
  • Contact

Theme Settings

Color Scheme
Display Options
Font Size
100%
Back to Project
RSK World
pytorch-neuralnetworks
/
utils
RSK World
pytorch-neuralnetworks
Neural networks with PyTorch
utils
  • __init__.py362 B
  • hyperparameter_tuning.py6.5 KB
  • tensorboard_logger.py3.4 KB
hyperparameter_tuning.py
utils/hyperparameter_tuning.py
Raw Download
Find: Go to:
"""
Hyperparameter Tuning Utilities - PyTorch Neural Networks
Project: PyTorch Neural Networks
Author: RSK World
Website: https://rskworld.in
Email: help@rskworld.in
Phone: +91 93305 39277
Description: Utilities for hyperparameter tuning
"""

import torch
import torch.nn as nn
import torch.optim as optim
from itertools import product
import json
from training.trainer import Trainer
from training.metrics import evaluate_model


class GridSearch:
    """
    Grid search for hyperparameter tuning
    
    Project: PyTorch Neural Networks
    Author: RSK World
    Website: https://rskworld.in
    """
    
    def __init__(self, param_grid, model_class, train_loader, val_loader, device):
        """
        Initialize grid search
        
        Args:
            param_grid: Dictionary of parameter grids
            model_class: Model class to instantiate
            train_loader: Training data loader
            val_loader: Validation data loader
            device: Device to run on
        """
        self.param_grid = param_grid
        self.model_class = model_class
        self.train_loader = train_loader
        self.val_loader = val_loader
        self.device = device
        self.results = []
    
    def search(self, epochs=10, verbose=True):
        """
        Perform grid search
        
        Args:
            epochs: Number of epochs per configuration
            verbose: Whether to print progress
            
        Returns:
            Best parameters and results
        """
        # Generate all parameter combinations
        param_names = list(self.param_grid.keys())
        param_values = list(self.param_grid.values())
        param_combinations = list(product(*param_values))
        
        best_score = float('-inf')
        best_params = None
        
        for i, params in enumerate(param_combinations):
            param_dict = dict(zip(param_names, params))
            
            if verbose:
                print(f"\n[{i+1}/{len(param_combinations)}] Testing: {param_dict}")
            
            # Create model with current parameters
            model = self.model_class(**param_dict).to(self.device)
            
            # Create optimizer
            optimizer = optim.Adam(model.parameters(), lr=param_dict.get('lr', 0.001))
            criterion = nn.CrossEntropyLoss()
            
            # Train model
            trainer = Trainer(model, criterion, optimizer, self.device)
            history = trainer.train(self.train_loader, self.val_loader, epochs=epochs)
            
            # Evaluate
            results = evaluate_model(model, self.val_loader, self.device, criterion)
            score = results['accuracy']
            
            # Store results
            result_entry = {
                'params': param_dict,
                'score': score,
                'history': history
            }
            self.results.append(result_entry)
            
            if verbose:
                print(f"Score: {score:.4f}")
            
            # Update best
            if score > best_score:
                best_score = score
                best_params = param_dict
        
        return best_params, best_score, self.results
    
    def save_results(self, filepath):
        """Save search results to file"""
        with open(filepath, 'w') as f:
            json.dump(self.results, f, indent=2, default=str)
        print(f"Results saved to {filepath}")


class RandomSearch:
    """
    Random search for hyperparameter tuning
    
    Project: PyTorch Neural Networks
    Author: RSK World
    Website: https://rskworld.in
    """
    
    def __init__(self, param_distributions, model_class, train_loader, val_loader, device, n_iter=10):
        """
        Initialize random search
        
        Args:
            param_distributions: Dictionary of parameter distributions
            model_class: Model class to instantiate
            train_loader: Training data loader
            val_loader: Validation data loader
            device: Device to run on
            n_iter: Number of random iterations
        """
        self.param_distributions = param_distributions
        self.model_class = model_class
        self.train_loader = train_loader
        self.val_loader = val_loader
        self.device = device
        self.n_iter = n_iter
        self.results = []
    
    def _sample_params(self):
        """Sample random parameters from distributions"""
        params = {}
        for key, dist in self.param_distributions.items():
            if isinstance(dist, list):
                params[key] = np.random.choice(dist)
            elif isinstance(dist, tuple) and len(dist) == 2:
                # Assume uniform distribution between min and max
                params[key] = np.random.uniform(dist[0], dist[1])
            else:
                params[key] = dist
        return params
    
    def search(self, epochs=10, verbose=True):
        """
        Perform random search
        
        Args:
            epochs: Number of epochs per configuration
            verbose: Whether to print progress
            
        Returns:
            Best parameters and results
        """
        best_score = float('-inf')
        best_params = None
        
        for i in range(self.n_iter):
            params = self._sample_params()
            
            if verbose:
                print(f"\n[{i+1}/{self.n_iter}] Testing: {params}")
            
            # Create model
            model = self.model_class(**params).to(self.device)
            optimizer = optim.Adam(model.parameters(), lr=params.get('lr', 0.001))
            criterion = nn.CrossEntropyLoss()
            
            # Train
            trainer = Trainer(model, criterion, optimizer, self.device)
            history = trainer.train(self.train_loader, self.val_loader, epochs=epochs)
            
            # Evaluate
            results = evaluate_model(model, self.val_loader, self.device, criterion)
            score = results['accuracy']
            
            self.results.append({
                'params': params,
                'score': score,
                'history': history
            })
            
            if verbose:
                print(f"Score: {score:.4f}")
            
            if score > best_score:
                best_score = score
                best_params = params
        
        return best_params, best_score, self.results

202 lines•6.5 KB
python

About RSK World

Founded by Molla Samser, with Designer & Tester Rima Khatun, RSK World is your one-stop destination for free programming resources, source code, and development tools.

Founder: Molla Samser
Designer & Tester: Rima Khatun

Development

  • Game Development
  • Web Development
  • Mobile Development
  • AI Development
  • Development Tools

Legal

  • Terms & Conditions
  • Privacy Policy
  • Disclaimer

Contact Info

Nutanhat, Mongolkote
Purba Burdwan, West Bengal
India, 713147

+91 93305 39277

hello@rskworld.in
support@rskworld.in

© 2026 RSK World. All rights reserved.

Content used for educational purposes only. View Disclaimer