easyvvuq.sampling.simplex_stochastic_collocation


THE SIMPLEX STOCASTIC COLLOCATION SAMPLER OF JEROEN WITTEVEEN (1980-2015)

Source:

Witteveen, J. A. S., & Iaccarino, G. (2013). Simplex stochastic collocation with ENO-type stencil selection for robust uncertainty quantification. Journal of Computational Physics, 239, 1-21.

Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016). Simplex-stochastic collocation method with improved scalability. Journal of Computational Physics, 310, 301-328.

   1"""
   2-------------------------------------------------------------------------
   3THE SIMPLEX STOCASTIC COLLOCATION SAMPLER OF JEROEN WITTEVEEN (1980-2015)
   4-------------------------------------------------------------------------
   5
   6Source:
   7
   8Witteveen, J. A. S., & Iaccarino, G. (2013).
   9Simplex stochastic collocation with ENO-type stencil selection for robust
  10uncertainty quantification. Journal of Computational Physics, 239, 1-21.
  11
  12Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016).
  13Simplex-stochastic collocation method with improved scalability.
  14Journal of Computational Physics, 310, 301-328.
  15
  16"""
  17
  18from .base import BaseSamplingElement, Vary
  19# import chaospy as cp
  20import numpy as np
  21import pickle
  22from itertools import product, combinations
  23# import logging
  24from scipy.spatial import Delaunay
  25from scipy.special import factorial
  26import multiprocessing as mp
  27
  28
  29__author__ = "Wouter Edeling"
  30__copyright__ = """
  31
  32    Copyright 2018 Robin A. Richardson, David W. Wright
  33
  34    This file is part of EasyVVUQ
  35
  36    EasyVVUQ is free software: you can redistribute it and/or modify
  37    it under the terms of the Lesser GNU General Public License as published by
  38    the Free Software Foundation, either version 3 of the License, or
  39    (at your option) any later version.
  40
  41    EasyVVUQ is distributed in the hope that it will be useful,
  42    but WITHOUT ANY WARRANTY; without even the implied warranty of
  43    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  44    Lesser GNU General Public License for more details.
  45
  46    You should have received a copy of the Lesser GNU General Public License
  47    along with this program.  If not, see <https://www.gnu.org/licenses/>.
  48
  49"""
  50__license__ = "LGPL"
  51
  52
  53class SSCSampler(BaseSamplingElement, sampler_name="ssc_sampler"):
  54    """
  55    Simplex Stochastic Collocation sampler
  56    """
  57
  58    def __init__(self, vary=None, max_polynomial_order=4):
  59        """
  60        Create an SSC sampler object.
  61
  62        Parameters
  63        ----------
  64        vary: dict or None
  65            keys = parameters to be sampled, values = distributions.
  66        max_polynomial_order : int, optional
  67            The maximum polynomial order, default is 4.
  68        Returns
  69        -------
  70        None.
  71
  72        """
  73        # number of random inputs
  74        self.n_xi = len(vary)
  75        # vary dictionary of Chaospy input distribution
  76        self.vary = Vary(vary)
  77        # initial Delaunay triangulation
  78        self.tri = self.init_grid()
  79        # code sample counter
  80        self.count = 0
  81        self._n_samples = self.tri.points.shape[0]
  82        self.set_pmax_cutoff(max_polynomial_order)
  83
  84    def init_grid(self):
  85        """
  86        Create the initial n_xi-dimensional Delaunay discretization
  87
  88
  89        Returns
  90        -------
  91        tri : scipy.spatial.qhull.Delaunay
  92            Initial triagulation of 2 ** n_xi + 1 points.
  93
  94        """
  95
  96        # create inital hypercube points through n_xi dimensional
  97        # carthesian product of the lower and upper limits of the
  98        # chasopy input distributions
  99        corners = [[param.lower[0], param.upper[0]] for param in self.vary.get_values()]
 100        self.corners = corners
 101        xi_k_jl = np.array(list(product(*corners)))
 102
 103        # add a point in the middle of the hypercube
 104        xi_k_jl = np.append(xi_k_jl, np.mean(xi_k_jl, axis=0).reshape([1, -1]), 0)
 105
 106        if self.n_xi > 1:
 107            # Create initial Delaunay discretization
 108            """
 109            NOTE: FOUND AN ERROR IN THE 'incremental' OPTION. IN A 3D CASE IT USES
 110            4 VERTICES TO MAKE A SQUARE IN A PLANE, NOT A 3D SIMPLEX. TURNED IT OFF.
 111            CONSEQUENCE: I NEED TO RE-MAKE A NEW 'Delaunay' OBJECT EVERYTIME THE GRID
 112            IS REFINED.
 113            """
 114            # tri = Delaunay(xi_k_jl, incremental=True)
 115            tri = Delaunay(xi_k_jl)
 116
 117        else:
 118            tri = Tri1D(xi_k_jl)
 119
 120        return tri
 121
 122    def find_pmax(self, n_s):
 123        """
 124        Finds the maximum polynomial stencil order that can be sustained
 125        given the current number of code evaluations. The stencil size
 126        required for polynonial order p is given by;
 127
 128        stencil size = (n_xi + p)! / (n_xi!p!)
 129
 130        where n_xi is the number of random inputs. This formula is used
 131        to find p.
 132
 133        Parameters
 134        ----------
 135        n_xi : int
 136            Number of random parameters.
 137        n_s : int
 138            Number of code samples.
 139
 140        Returns
 141        -------
 142        int
 143            The highest polynomial order that can be sustained given
 144            the number of code evaluations.
 145
 146        """
 147        p = 1
 148        stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
 149        while stencil_size <= n_s:
 150            p += 1
 151            stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
 152
 153        return p - 1
 154
 155    def set_pmax_cutoff(self, pmax_cutoff):
 156        """
 157        Set the maximum allowed polynomial value of the surrogate.
 158
 159        Parameters
 160        ----------
 161        p_max_cutoff : int
 162            The max polynomial order.
 163
 164        Returns
 165        -------
 166        None.
 167
 168        """
 169
 170        self.pmax_cutoff = pmax_cutoff
 171        self.i_norm_le_pj = self.compute_i_norm_le_pj(pmax_cutoff)
 172
 173    def compute_vol(self):
 174        """
 175        Use analytic formula to compute the volume of all simplices.
 176
 177        https://en.wikipedia.org/wiki/Simplex#Volume
 178
 179        Returns
 180        -------
 181        vols : array, size (n_e,)
 182            The volumes of the n_e simplices.
 183
 184        """
 185        n_e = self.tri.nsimplex
 186        vols = np.zeros(n_e)
 187        for j in range(n_e):
 188            xi_k_jl = self.tri.points[self.tri.simplices[j]]
 189            D = np.zeros([self.n_xi, self.n_xi])
 190
 191            for i in range(self.n_xi):
 192                D[:, i] = xi_k_jl[i, :] - xi_k_jl[-1, :]
 193
 194            det_D = np.linalg.det(D)
 195            if det_D == 0:
 196                print('Warning: zero determinant in volume calculation.')
 197            vols[j] = 1. / factorial(self.n_xi) * np.abs(det_D)
 198
 199        return vols
 200
 201    def compute_xi_center_j(self):
 202        """
 203        Compute the center of all simplex elements.
 204
 205        Returns
 206        -------
 207        xi_center_j : array, size (n_e,)
 208            The cell centers of the n_e simplices.
 209
 210        """
 211        # number of simplex elements
 212        n_e = self.tri.nsimplex
 213        xi_center_j = np.zeros([n_e, self.n_xi])
 214        for j in range(n_e):
 215            xi_center_j[j, :] = 1 / (self.n_xi + 1) * \
 216                np.sum(self.tri.points[self.tri.simplices[j]], 0)
 217
 218        return xi_center_j
 219
 220    def compute_sub_simplex_vertices(self, simplex_idx):
 221        """
 222        Compute the vertices of the sub-simplex. The  sub simplex is contained
 223        in the larger simplex. The larger simplex is refined by randomly
 224        placing a sample within the sub simplex.
 225
 226        Parameters
 227        ----------
 228        simplex_idx : int
 229            The index of the simplex
 230
 231        Returns
 232        -------
 233        xi_sub_jl : array, size (n_xi + 1, n_xi)
 234            The vertices of the sub simplex.
 235
 236        """
 237        xi_k_jl = self.tri.points[self.tri.simplices[simplex_idx]]
 238        xi_sub_jl = np.zeros([self.n_xi + 1, self.n_xi])
 239        for l in range(self.n_xi + 1):
 240            idx = np.delete(range(self.n_xi + 1), l)
 241            xi_sub_jl[l, :] = np.sum(xi_k_jl[idx], 0)
 242        xi_sub_jl = xi_sub_jl / self.n_xi
 243
 244        return xi_sub_jl
 245
 246    def sample_inputs(self, n_mc):
 247        """
 248        Draw n_mc random values from the input distribution.
 249
 250        Parameters
 251        ----------
 252        n_mc : int
 253            The number of Monte Carlo samples.
 254
 255        Returns
 256        -------
 257        xi : array, shape(n_mc, n_xi)
 258            n_mc random samples from the n_xi input distributions.
 259
 260        """
 261        xi = np.zeros([n_mc, self.n_xi])
 262        for i, param in enumerate(self.vary.get_values()):
 263            xi[:, i] = param.sample(n_mc)
 264        return xi
 265
 266    def compute_probability(self):
 267        """
 268        Compute the probability Omega_j for all simplex elements;
 269
 270        Omega_j = int p(xi) dxi,
 271
 272        with integration separately over each simplex using Monte Carlo
 273        sampling.
 274
 275        Returns
 276        -------
 277        prob_j : array, size (n_e,)
 278            The probabilities of each simplex.
 279
 280        """
 281        n_e = self.tri.nsimplex
 282
 283        print('Computing simplex probabilities...')
 284        prob_j = np.zeros(n_e)
 285
 286        # number of MC samples
 287        n_mc = np.min([10 ** (self.n_xi + 3), 10 ** 7])
 288
 289        xi = self.sample_inputs(n_mc)
 290
 291        # find the simplix indices of xi
 292        idx = self.tri.find_simplex(xi)
 293
 294        # compute simplex probabolities
 295        for i in range(n_mc):
 296            prob_j[idx[i]] += 1
 297
 298        prob_j = prob_j / np.double(n_mc)
 299        print('done, used ' + str(n_mc) + ' samples.')
 300        zero_idx = (prob_j == 0).nonzero()[0].size
 301        print('% of simplices with no samples = ' + str((100.0 * zero_idx) / n_e))
 302
 303        return prob_j
 304
 305    def compute_i_norm_le_pj(self, p_max):
 306        """
 307        Compute the multi-index set {i | |i| = i_1 + ... + i_{n_xi} <= p},
 308        for p = 1,...,p_max
 309
 310        Parameters
 311        ----------
 312        p_max : int
 313            The max polynomial order of the index set.
 314
 315        Returns
 316        -------
 317        i_norm_le_pj : dict
 318            The Np + 1 multi indices per polynomial order.
 319
 320        """
 321        i_norm_le_pj = {}
 322
 323        for p in range(1, p_max + 1):
 324            # max(i_1, i_2, ...i _{n_xi}) <= p
 325            multi_idx = np.array(list(product(range(p + 1), repeat=self.n_xi)))
 326
 327            # i_1 + i_2 <= N
 328            idx = np.where(np.sum(multi_idx, axis=1) <= p)[0]
 329            multi_idx = multi_idx[idx]
 330
 331            i_norm_le_pj[p] = multi_idx
 332
 333        return i_norm_le_pj
 334
 335    def compute_Psi(self, xi_Sj, pmax):
 336        """
 337        Compute the Vandermonde matrix Psi, given N + 1 points xi from the
 338        j-th interpolation stencil, and a multi-index set of polynomial
 339        orders |i| = i_1 + ... + i_n_xi <= polynomial order.
 340
 341        Parameters
 342        ----------
 343        xi_Sj : array, shape (N + 1, n_xi)
 344            The simplex n_xi-dimensional points of the j-th interpolation
 345            stencil S_j.
 346        pmax : int
 347            The max polynomial order of the local stencil.
 348
 349        Returns
 350        -------
 351        Psi : array, shape (N + 1, N + 1)
 352            The Vandermonde interpolation matrix consisting of monomials
 353            xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}.
 354
 355        """
 356        Np1 = xi_Sj.shape[0]
 357
 358        Psi = np.ones([Np1, Np1])
 359
 360        for row in range(Np1):
 361            for col in range(Np1):
 362                for j in range(self.n_xi):
 363                    # compute monomial xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}
 364                    Psi[row, col] *= xi_Sj[row][j] ** self.i_norm_le_pj[pmax][col][j]
 365
 366        return Psi
 367
 368    def w_j(self, xi, c_jl, pmax):
 369        """
 370        Compute the surrogate local interpolation at point xi.
 371
 372        # TODO: right now this assumes a scalar output. Modify the
 373        code for vector-valued outputs.
 374
 375        Parameters
 376        ----------
 377        xi : array, shape (n_xi,)
 378            A point inside the stochastic input domain.
 379        c_jl : array, shape (N + 1,)
 380            The interpolation coefficients of the j-th stencil, with
 381            l = 0, ..., N.
 382        pmax : int
 383            The max polynomial order of the local stencil.
 384
 385        Returns
 386        -------
 387        w_j_at_xi : float
 388            The surrogate prediction at xi.
 389
 390        """
 391
 392        # number of points in the j-th interpolation stencil
 393        Np1 = c_jl.shape[0]
 394
 395        # vector of interpolation monomials
 396        Psi_xi = np.ones([Np1, 1])
 397        for i in range(Np1):
 398            for j in range(self.n_xi):
 399                # take the power of xi to multi index entries
 400                Psi_xi[i] *= xi[j] ** self.i_norm_le_pj[pmax][i][j]
 401
 402        # surrogate prediction
 403        w_j_at_xi = np.sum(c_jl * Psi_xi)
 404
 405        return w_j_at_xi
 406
 407    def check_LEC(self, p_j, v, S_j, n_mc, max_jobs=4):
 408        """
 409        Check the Local Extremum Conserving propery of all simplex elements.
 410
 411        Parameters
 412        ----------
 413        p_j : array, shape (n_e,)
 414            The polynomial order of each element.
 415        v : array, shape (N + 1,)
 416            The (scalar) code outputs. #TODO:modify when vectors are allowed
 417        S_j : array, shape (n_e, n_s)
 418            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 419            ordered from closest to the neighbour that furthest away. The first
 420            n_xi + 1 indeces belong to the j-th simplex itself.
 421        n_mc : int
 422            The number of Monte Carlo samples to use in checking the LEC
 423            conditions.
 424        max_jobs : int
 425            The number of LEC check (one per element) that can be run in
 426            parallel.
 427
 428        Returns
 429        -------
 430        None.
 431
 432        """
 433
 434        n_e = self.tri.nsimplex
 435        print('Checking LEC condition of ' + str(n_e) + ' stencils...')
 436
 437        # multi-processing pool which will hold the check_LEC_j jobs
 438        jobs = []
 439        queues = []
 440
 441        running_jobs = 0
 442        j = 0
 443        n_jobs = n_e
 444
 445        el_idx = {}
 446
 447        # r = np.array(list(product([0.25, 0.75], repeat=self.n_xi)))
 448
 449        while n_jobs > 0:
 450
 451            # check how many processes are still alive
 452            for p in jobs:
 453                if p.is_alive() == False:
 454                    jobs.remove(p)
 455
 456            # number of processes still running
 457            running_jobs = len(jobs)
 458
 459            # re-fill jobs with max max_jobs processes
 460            while running_jobs < max_jobs and n_jobs > 0:
 461                queue = mp.Queue()
 462                prcs = mp.Process(target=self.check_LEC_j,
 463                                  args=(p_j[j], v, S_j[j, :], n_mc, queue))
 464                prcs.start()
 465                running_jobs += 1
 466                n_jobs -= 1
 467                j += 1
 468                jobs.append(prcs)
 469                queues.append(queue)
 470
 471        # retrieve results
 472        for j in range(n_e):
 473            # jobs[j].join()
 474            tmp = queues[j].get()
 475            p_j[j] = tmp['p_j[j]']
 476            el_idx[j] = tmp['el_idx_j']
 477
 478    #    singular_idx = (p_j == -99).nonzero()[0]
 479    #
 480    #    jobs = []
 481    #    queues = []
 482    #    n_jobs = singular_idx.size
 483    #
 484    #    if singular_idx.size > 0:
 485    #        nnS_j = compute_stencil_j(tri)
 486    #
 487    #    k = 0
 488    #    while n_jobs > 0:
 489    #
 490    #        #check how many processes are still alive
 491    #        for p in jobs:
 492    #            if p.is_alive() == False:
 493    #                jobs.remove(p)
 494    #
 495    #        #number of processes still running
 496    #        running_jobs = len(jobs)
 497    #
 498    #        while running_jobs < max_jobs and n_jobs > 0:
 499    #            #print 'Re-computing S_j for j = ' + str(j)
 500    #            queue = mp.Queue()
 501    #            #S_j[j,:], p_j[j], el_idx[j] = \
 502    #            j = singular_idx[k]
 503    #            k += 1
 504    #            prcs = mp.Process(target=non_singular_stencil_j, \
 505    #            args = (tri, p_j, S_j, j, nnS_j, i_norm_le_pj, el_idx, queue,))
 506    #            prcs.start()
 507    #            jobs.append(prcs)
 508    #            queues.append(queue)
 509    #
 510    #            running_jobs += 1
 511    #            n_jobs -= 1
 512    #
 513    #    #retrieve results
 514    #    idx = 0
 515    #    for j in singular_idx:
 516    #        #jobs[idx].join()
 517    #        tmp = queues[idx].get()
 518    #        S_j[j,:] = tmp['S_j[j,:]']
 519    #        p_j[j] = tmp['p_j[j]']
 520    #        el_idx[j] = tmp['el_idx[j]']
 521    #        idx += 1
 522
 523        print('done.')
 524        return {'p_j': p_j, 'S_j': S_j, 'el_idx': el_idx}
 525
 526    def find_simplices(self, S_j):
 527        """
 528        Find the simplex element indices that are in point stencil S_j.
 529
 530        Parameters
 531        ----------
 532        S_j : array, shape (N + 1,)
 533            The interpolation stencil of the j-th simplex element, expressed
 534            as the indices of the simplex points.
 535
 536        Returns
 537        -------
 538        idx : array
 539            The element indices of stencil S_j.
 540
 541        """
 542        n_e = self.tri.nsimplex
 543        # if the overlap between element i and S_j = n_xi + 1, element i
 544        # is in S_j
 545        idx = [np.in1d(self.tri.simplices[i], S_j).nonzero()[0].size for i in range(n_e)]
 546        idx = (np.array(idx) == self.n_xi + 1).nonzero()[0]
 547
 548        return idx
 549
 550    def check_LEC_j(self, p_j, v, S_j, n_mc, queue):
 551        """
 552        Check the LEC conditin of the j-th interpolation stencil.
 553
 554        Parameters
 555        ----------
 556        p_j : int
 557            The polynomial order of the j-th stencil.
 558        v : array
 559            The code samples.
 560        S_j : array, shape (N + 1,)
 561            The interpolation stencil of the j-th simplex element, expressed
 562            as the indices of the simplex points.
 563        n_mc : int
 564            The number of Monte Carlo samples to use in checking the LEC
 565            conditions.
 566        queue : multiprocessing queue object
 567            Used to store the results.
 568
 569        Returns
 570        -------
 571        None, results (polynomial order and element indices are stored
 572                       in the queue)
 573
 574        """
 575        n_xi = self.n_xi
 576        # n_e = self.tri.nsimplex
 577        N = v[0, :].size
 578
 579        # the number of points in S_j
 580        Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
 581        # select the vertices of stencil S_j
 582        xi_Sj = self.tri.points[S_j[0:Np1_j]]
 583        # find the corresponding indices of v
 584        v_Sj = v[S_j[0:Np1_j], :]
 585
 586        # the element indices of the simplices in stencil S_j
 587        el_idx_j = self.find_simplices(S_j[0:Np1_j])
 588
 589        # compute sample matrix
 590        Psi = self.compute_Psi(xi_Sj, p_j)
 591
 592        # check if Psi is well poised
 593        # det_Psi = np.linalg.det(Psi)
 594        # if det_Psi == 0:
 595        #    #print 'Warning: determinant Psi is zero.'
 596        #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
 597        #    #return an error code
 598        #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
 599
 600        # compute the coefficients c_jl
 601        # c_jl = np.linalg.solve(Psi, v_Sj)
 602        c_jl = DAFSILAS(Psi, v_Sj)
 603
 604        # check the LEC condition for all simplices in the STENCIL S_j
 605        k = 0
 606        LEC_checked = False
 607
 608        while LEC_checked == False and p_j != 1:
 609            # sample the simplices in stencil S_j
 610            xi_samples = self.sample_simplex(n_mc, self.tri.points[self.tri.simplices[el_idx_j[k]]])
 611
 612            # function values at the edges of the k-th simplex in S_j
 613            v_min = np.min(v[self.tri.simplices[el_idx_j[k]]])
 614            v_max = np.max(v[self.tri.simplices[el_idx_j[k]]])
 615
 616            # compute interpolation values at MC sample points
 617            w_j_at_xi = np.zeros([n_mc, N])
 618            for i in range(n_mc):
 619                w_j_at_xi[i, :] = self.w_j(xi_samples[i], c_jl, p_j)
 620
 621            k += 1
 622
 623            # if LEC is violated in any of the simplices
 624            # TODO: make work for vector-valued outputs
 625            eps = 0
 626            if (w_j_at_xi.min() <= v_min - eps or w_j_at_xi.max() >= v_max + eps) and p_j > 1:
 627
 628                p_j -= 1
 629
 630                # the new number of points in S_j
 631                Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
 632                # select the new vertices of stencil S_j
 633                xi_Sj = self.tri.points[S_j[0:Np1_j]]
 634                # find the new corresponding indices of v
 635                v_Sj = v[S_j[0:Np1_j], :]
 636                # the new element indices of the simplices in stencil S_j
 637                el_idx_j = self.find_simplices(S_j[0:Np1_j])
 638                k = 0
 639
 640                if p_j == 1:
 641                    return queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})
 642
 643                # recompute sample matrix
 644                Psi = self.compute_Psi(xi_Sj, p_j)
 645
 646                # check if Psi is well poised
 647                # det_Psi = np.linalg.det(Psi)
 648                # if det_Psi == 0:
 649                #    #print 'Warning: determinant Psi is zero.'
 650                #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
 651                #    #return an error code
 652                #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
 653
 654                # compute the coefficients c_jl
 655                # c_jl = np.linalg.solve(Psi, v_Sj)
 656                c_jl = DAFSILAS(Psi, v_Sj, False)
 657
 658            if k == el_idx_j.size:
 659                LEC_checked = True
 660
 661        queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})
 662
 663    def compute_stencil_j(self):
 664        """
 665        Compute the nearest neighbor stencils of all simplex elements. The
 666        distance to all points are measured with respect to the cell center
 667        of each element.
 668
 669        Returns
 670        -------
 671        S_j : array, shape (n_e, n_s)
 672            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 673            ordered from closest to the neighbour that furthest away. The first
 674            n_xi + 1 indeces belong to the j-th simplex itself.
 675
 676        """
 677
 678        n_e = self.tri.nsimplex
 679        n_s = self.tri.npoints
 680        n_xi = self.n_xi
 681        S_j = np.zeros([n_e, n_s])
 682        # compute the center of each element
 683        xi_center_j = self.compute_xi_center_j()
 684
 685        for j in range(n_e):
 686            # the number of points in S_j
 687            # Np1_j = factorial(n_xi + p_j[j])/(factorial(n_xi)*factorial(p_j[j]))
 688            # k = {1,...,n_s}\{k_j0, ..., k_jn_xi}
 689            idx = np.delete(range(n_s), self.tri.simplices[j])
 690            # store the vertex indices of the element itself
 691            S_j[j, 0:n_xi + 1] = np.copy(self.tri.simplices[j])
 692            # loop over the set k
 693            dist = np.zeros(n_s - n_xi - 1)
 694            for i in range(n_s - n_xi - 1):
 695                # ||xi_k - xi_center_j||
 696                dist[i] = np.linalg.norm(self.tri.points[idx[i]] - xi_center_j[j, :])
 697            # store the indices of the points, sorted based on distance wrt
 698            # simplex center j. Store only the amount of indices allowed by p_j.
 699            S_j[j, n_xi + 1:] = idx[np.argsort(dist)]
 700
 701        return S_j.astype('int')
 702
 703    def compute_ENO_stencil(self, p_j, S_j, el_idx, max_jobs=4):
 704        """
 705        Compute the Essentially Non-Oscillatory stencils. The idea behind ENO
 706        stencils is to have higher degree interpolation stencils up to a thin
 707        layer of simplices containing the discontinuity. For a given simplex,
 708        its ENO stencil is created by locating all the nearest-neighbor
 709        stencils that contain element j , and subsequently selecting the one
 710        with the highest polynomial order p_j . This leads to a Delaunay
 711        triangulation which captures the discontinuity better than its
 712        nearest-neighbor counterpart.
 713
 714        Parameters
 715        ----------
 716        p_j : array, shape (n_e,)
 717            The polynomial order of each simplex element.
 718        S_j : array, shape (n_e, n_s)
 719            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 720            ordered from closest to the neighbour that furthest away. The first
 721            n_xi + 1 indeces belong to the j-th simplex itself.
 722        el_idx : dict
 723            The element indices for each interpolation stencil.
 724            el_idx[2] gives the elements indices of the 3rd interpolation
 725            stencil. The number of elements is determined by the local
 726            polynomial order.
 727        max_jobs : int, optional
 728            The number of ENO stencils that are computed in parallel.
 729            The default is 4.
 730
 731        Returns
 732        -------
 733        ENO_S_j : array, shape (n_e, n_s)
 734            The ENO stencils for each element.
 735        p_j : array, shape (n_e,)
 736            The new polynomial order of each element.
 737        el_idx : dict
 738            The new element indices for each interpolation stencil.
 739
 740        """
 741
 742        n_e = self.tri.nsimplex
 743        n_s = self.tri.npoints
 744
 745        # array to store the ENO stencils
 746        ENO_S_j = np.zeros([n_e, n_s]).astype('int')
 747        # the center of each simplex
 748        xi_centers = self.compute_xi_center_j()
 749
 750        jobs = []
 751        queues = []
 752        running_jobs = 0
 753        j = 0
 754        n_jobs = n_e
 755
 756        print('Computing ENO stencils...')
 757
 758        while n_jobs > 0:
 759
 760            # check how many processes are still alive
 761            for p in jobs:
 762                if p.is_alive() == False:
 763                    jobs.remove(p)
 764
 765            # number of processes still running
 766            running_jobs = len(jobs)
 767
 768            # compute the ENO stencils (in parallel)
 769            while running_jobs < max_jobs and n_jobs > 0:
 770                queue = mp.Queue()
 771                prcs = mp.Process(target=self.compute_ENO_stencil_j,
 772                                  args=(p_j, S_j, xi_centers, j, el_idx, queue))
 773
 774                # print check_LEC_j(tri, p_j, v, S_j, n_mc, j)
 775                jobs.append(prcs)
 776                queues.append(queue)
 777                prcs.start()
 778                n_jobs -= 1
 779                running_jobs += 1
 780                j += 1
 781
 782        # retrieve results
 783        for j in range(n_e):
 784            tmp = queues[j].get()
 785            ENO_S_j[j, :] = tmp['ENO_S_j']
 786            p_j[j] = tmp['p_j_new']
 787            el_idx[j] = tmp['el_idx[j]']
 788
 789        print('done.')
 790
 791        return ENO_S_j, p_j, el_idx
 792
 793    def compute_ENO_stencil_j(self, p_j, S_j, xi_centers, j, el_idx, queue):
 794        """
 795        Compute the ENO stencil of the j-th element.
 796
 797        Parameters
 798        ----------
 799        p_j : array, shape (n_e,)
 800            The polynomial order of each simplex element.
 801        S_j : array, shape (n_e, n_s)
 802            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 803            ordered from closest to the neighbour that furthest away. The first
 804            n_xi + 1 indeces belong to the j-th simplex itself.
 805        xi_centers : array, shape (n_e, )
 806            The center of each simplex.
 807        j : int
 808            The index of the current simplex.
 809        el_idx : dict
 810            The element indices for each interpolation stencil.
 811            el_idx[2] gives the elements indices of the 3rd interpolation
 812            stencil. The number of elements is determined by the local
 813            polynomial order.
 814        queue : multiprocessing queue object
 815            Used to store the results.
 816
 817        Returns
 818        -------
 819        None, results (polynomial order and element indices are stored
 820                       in the queue)
 821
 822        """
 823        n_e = self.tri.nsimplex
 824
 825        # set the stencil to the nearest neighbor stencil
 826        ENO_S_j = np.copy(S_j[j, :])
 827        p_j_new = np.copy(p_j[j])
 828
 829        # loop to find alternative candidate stencils S_ji with p_j>1 that contain k_jl
 830        idx = (p_j == 1).nonzero()[0]
 831        all_with_pj_gt_1 = np.delete(range(n_e), idx)
 832
 833        for i in all_with_pj_gt_1:
 834            # found a candidate stencil S_ji
 835            if np.in1d(j, el_idx[i]):
 836                # the candidate stencil has a higher polynomial degree: accept
 837                if p_j[i] > p_j_new:
 838                    ENO_S_j = np.copy(S_j[i, :])
 839                    p_j_new = np.copy(p_j[i])
 840                    el_idx[j] = np.copy(el_idx[i])
 841                # the candidate stencil has the same polynomial degree: accept
 842                # the one with smallest avg Euclidian distance to the cell center
 843                elif p_j_new == p_j[i]:
 844
 845                    dist_i = np.linalg.norm(xi_centers[el_idx[i]] - xi_centers[j], 2, axis=1)
 846
 847                    dist_j = np.linalg.norm(xi_centers[el_idx[j]] - xi_centers[j], 2, axis=1)
 848
 849                    # if the new distance is smaller than the old one: accept
 850                    if np.sum(dist_i) < np.sum(dist_j):
 851                        ENO_S_j = np.copy(S_j[i, :])
 852                        el_idx[j] = np.copy(el_idx[i])
 853
 854        queue.put({'ENO_S_j': ENO_S_j, 'p_j_new': p_j_new, 'el_idx[j]': np.copy(el_idx[j])})
 855
 856    def sample_simplex(self, n_mc, xi_k_jl, check=False):
 857        """
 858        Use an analytical map from n_mc uniformly distributed points in the
 859        n_xi-dimensional hypercube, to uniformly distributed points in the
 860        target simplex described by the nodes xi_k_jl.
 861
 862        Derivation: Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016).
 863        Simplex-stochastic collocation method with improved scalability.
 864        Journal of Computational Physics, 310, 301-328.
 865
 866        Parameters
 867        ----------
 868        n_mc : int
 869            The number of Monte Carlo samples.
 870        xi_k_jl : array, shape (n_xi + 1, n_xi)
 871            The nodes of the target simplex.
 872        check : bool, optional
 873            Check is the random samples actually all lie insiide the
 874            target simplex. The default is False.
 875
 876        Returns
 877        -------
 878        P : array, shape (n_mc, n_xi)
 879            Uniformly distributed points inside the target simplex.
 880
 881        """
 882
 883        n_xi = self.n_xi
 884        P = np.zeros([n_mc, n_xi])
 885        for k in range(n_mc):
 886            # random points inside the hypercube
 887            r = np.random.rand(n_xi)
 888
 889            # the term of the map is \xi_k_j0
 890            sample = np.copy(xi_k_jl[0])
 891            for i in range(1, n_xi + 1):
 892                prod_r = 1.
 893                # compute the product of r-terms: prod(r_{n_xi-j+1}^{1/(n_xi-j+1)})
 894                for j in range(1, i + 1):
 895                    prod_r *= r[n_xi - j]**(1. / (n_xi - j + 1))
 896                # compute the ith term of the sum: prod_r*(\xi_i-\xi_{i-1})
 897                sample += prod_r * (xi_k_jl[i] - xi_k_jl[i - 1])
 898            P[k, :] = sample
 899
 900        # check if any of the samples are outside the simplex
 901        if check:
 902            outside_simplex = 0
 903            avg = np.sum(xi_k_jl, 0) / (n_xi + 1.)
 904            el = self.tri.find_simplex(avg)
 905            for i in range(n_mc):
 906                if self.tri.find_simplex(P[i, :]) != el:
 907                    outside_simplex += 1
 908            print('Number of samples outside target simplex = ' + str(outside_simplex))
 909
 910        return P
 911
 912    def sample_simplex_edge(self, simplex_idx, refined_edges):
 913        """
 914        Refine the longest edge of a simplex.
 915
 916        # TODO: is allright for 2D, but does this make sense in higher dims?
 917
 918        Parameters
 919        ----------
 920        simplex_idx : int
 921            The index of the simplex.
 922        refined_edges : list
 923            Contains the pairs of the point indices, corresponding to edges that
 924            have been refined in the current iteration. Simplices share edges,
 925            and this list is used to prevent refining the same edge twice within
 926            the same iteration of the SSC algorihm.
 927
 928        Returns
 929        -------
 930        xi_new : array, shape (n_xi,)
 931            The newly added point (along the longest edge).
 932        refined_edges : list
 933            The updated refined_edges list.
 934        already_refined : bool
 935            Returns True if the edge already has been refined.
 936
 937        """
 938
 939        # the point indices of the simplex selected for refinement
 940        simplex_point_idx = self.tri.simplices[simplex_idx]
 941        # the points of the simplex selected for refinement
 942        xi_k_jl = self.tri.points[simplex_point_idx]
 943
 944        # find the indices of all edges, i.e. the combination of all possible
 945        # 2 distinct elements from range(n_xi + 1)
 946        comb = list(combinations(range(self.n_xi + 1), 2))
 947
 948        # compute all edge lengths, select the largest
 949        edge_lengths = np.zeros(len(comb))
 950        for i in range(len(comb)):
 951            edge_lengths[i] = np.linalg.norm(xi_k_jl[comb[i][1], :] - xi_k_jl[comb[i][0], :])
 952        idx = np.argmax(edge_lengths)
 953
 954        # if there are 2 or more edge lengths that are the same, select the 1st
 955        if idx.size > 1:
 956            idx = idx[0]
 957
 958        # edge points
 959        xi_0 = xi_k_jl[comb[idx][0], :]
 960        xi_1 = xi_k_jl[comb[idx][1], :]
 961
 962        # simplices share edges, make sure it was not already refined during
 963        # this iteration
 964        current_edge = np.array([simplex_point_idx[comb[idx][0]],
 965                                 simplex_point_idx[comb[idx][1]]])
 966        already_refined = False
 967
 968        for edge in refined_edges:
 969            if set(edge) == set(current_edge):
 970                already_refined = True
 971
 972        if not already_refined:
 973            refined_edges.append(current_edge)
 974
 975        # place random sample at +/- 10% of edge center
 976        b = 0.6
 977        a = 0.4
 978        U = np.random.rand() * (b - a) + a
 979        xi_new = xi_0 + U * (xi_1 - xi_0)
 980
 981        return xi_new, refined_edges, already_refined
 982
 983    def compute_surplus_k(self, xi_k_jref, S_j, p_j, v, v_k_jref):
 984        """
 985        Compute the hierachical surplus at xi_k_jref (the refinement location),
 986        defined as the difference between the new code sample and the (old)
 987        surrogate  prediction at the refinement location.
 988
 989        Parameters
 990        ----------
 991        xi_k_jref : array, shape (n_xi,)
 992            The refinement location.
 993        S_j : array, shape (n_e, n_s)
 994            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 995            ordered from closest to the neighbour that furthest away. The first
 996            n_xi + 1 indeces belong to the j-th simplex itself.
 997        p_j : array, shape (n_e,)
 998            The polynomial order of each simplex element.
 999        v : array, shape (N + 1,)
1000            The (scalar) code outputs. #TODO:modify when vectors are allowed
1001        v_k_jref : float #TODO:modify when vectors are allowed
1002            The code prediction at the refinement location.
1003
1004        Returns
1005        -------
1006        surplus : float #TODO:modify when vectors are allowed
1007            The hierarchical suplus
1008
1009        """
1010
1011        w_k_jref = self.surrogate(xi_k_jref, S_j, p_j, v)
1012
1013        # compute the hierarcical surplus between old interpolation and new v value
1014        surplus = w_k_jref - v_k_jref
1015
1016        return surplus
1017
1018    def surrogate(self, xi, S_j, p_j, v):
1019        """
1020        Evaluate the SSC surrogate at xi.
1021
1022        Parameters
1023        ----------
1024        xi : array, shape (n_xi,)
1025            The location in the input space at which to evaluate the surrogate.
1026        S_j : array, shape (n_e, n_s)
1027            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
1028            ordered from closest to the neighbour that furthest away. The first
1029            n_xi + 1 indeces belong to the j-th simplex itself.
1030        p_j : array, shape (n_e,)
1031            The polynomial order of each simplex element.
1032        v : array, shape (N + 1,)
1033            The (scalar) code outputs. #TODO:modify when vectors are allowed
1034
1035        Returns
1036        -------
1037        None.
1038
1039        """
1040        n_xi = self.n_xi
1041        idx = int(self.tri.find_simplex(xi))
1042
1043        # the number of points in S_j
1044        Np1_j = int(factorial(n_xi + p_j[idx]) / (factorial(n_xi) * factorial(p_j[idx])))
1045        # the vertices of the stencil S_j
1046        xi_Sj = self.tri.points[S_j[idx, 0:Np1_j]]
1047        # find the corresponding indices of v
1048        v_Sj = v[S_j[idx, 0:Np1_j], :]
1049        # compute sample matrix
1050        Psi = self.compute_Psi(xi_Sj, p_j[idx])
1051
1052    #    #check if Psi is well poised, at this point all stencils should be well-poised
1053    #    det_Psi = np.linalg.det(Psi)
1054    #    if det_Psi == 0:
1055    #        print 'Error, det(Psi)=0 in compute_surplus_k() method, should not be possible'
1056
1057        # compute the coefficients c_jl
1058        # c_jl = np.linalg.solve(Psi, v_Sj)
1059        c_jl = DAFSILAS(Psi, v_Sj, False)
1060
1061        # compute the interpolation on the old grid
1062        w_j = self.w_j(xi, c_jl, p_j[idx])
1063
1064        return w_j
1065
1066    def compute_eps_bar_j(self, p_j, prob_j):
1067        """
1068        Compute the geometric refinement measure \bar{\\eps}_j for all elements,
1069        Elements with the largest values will be selected for refinement.
1070
1071        Parameters
1072        ----------
1073        p_j : array, shape (n_e,)
1074            The polynomial order of each simplex element.
1075        prob_j : array, shape (n_e,)
1076            The probability of each simplex element.
1077
1078        Returns
1079        -------
1080        eps_bar_j : array, shape (n_e,)
1081            The geometric refinement measure.
1082        vol_j : array, shape (n_e,)
1083            The volume of each simplex element.
1084
1085        """
1086
1087        n_e = self.tri.nsimplex
1088        eps_bar_j = np.zeros(n_e)
1089        n_xi = self.tri.ndim
1090        vol_j = self.compute_vol()
1091        vol = np.sum(vol_j)
1092
1093        for j in range(n_e):
1094            O_j = (p_j[j] + 1.0) / n_xi
1095            eps_bar_j[j] = prob_j[j] * (vol_j[j] / vol) ** (2 * O_j)
1096
1097        return eps_bar_j, vol_j
1098
1099    def find_boundary_simplices(self):
1100        """
1101        Find the simplices that are on the boundary of the hypercube   .
1102
1103        Returns
1104        -------
1105        idx : array
1106            Indices of the boundary simplices.
1107
1108        """
1109
1110        idx = (self.tri.neighbors == -1).nonzero()[0]
1111        return idx
1112
1113    def update_Delaunay(self, new_points):
1114        """
1115        Update the Delaunay triangulation with P new points.
1116
1117        Parameters
1118        ----------
1119        new_points : array, shape (P, n_xi)
1120            P new n_xi-dimensional points.
1121
1122        Returns
1123        -------
1124        None.
1125
1126        """
1127
1128        xi_k_jl = np.append(self.tri.points, new_points, 0)
1129        if self.n_xi > 1:
1130            self.tri = Delaunay(xi_k_jl)
1131        else:
1132            self.tri = Tri1D(xi_k_jl)
1133
1134        self._n_samples = self.tri.npoints
1135
1136    def get_Delaunay(self):
1137        """
1138        Return the SciPy Delaunay triangulation.
1139        """
1140        return self.tri
1141
1142    def is_finite(self):
1143        return True
1144
1145    @property
1146    def n_samples(self):
1147        """
1148        Returns the number of samples code samples.
1149        """
1150        return self._n_samples
1151
1152    @property
1153    def n_dimensions(self):
1154        """
1155        Returns the number of uncertain random variables
1156        """
1157        return self.n_xi
1158
1159    @property
1160    def n_elements(self):
1161        """
1162        Returns the number of simplex elements
1163        """
1164        return self.tri.nsimplex
1165
1166    def __next__(self):
1167        if self.count < self._n_samples:
1168            run_dict = {}
1169            i_par = 0
1170            for param_name in self.vary.get_keys():
1171                run_dict[param_name] = self.tri.points[self.count][i_par]
1172                i_par += 1
1173            self.count += 1
1174            return run_dict
1175        else:
1176            raise StopIteration
1177
1178    def save_state(self, filename):
1179        """
1180        Save the state of the sampler to a pickle file.
1181
1182        Parameters
1183        ----------
1184        filename : string
1185            File name.
1186
1187        Returns
1188        -------
1189        None.
1190
1191        """
1192        print("Saving sampler state to %s" % filename)
1193        with open(filename, 'wb') as fp:
1194            pickle.dump(self.__dict__, fp)
1195
1196    def load_state(self, filename):
1197        """
1198        Load the state of the sampler from a pickle file.
1199
1200        Parameters
1201        ----------
1202        filename : string
1203            File name.
1204
1205        Returns
1206        -------
1207        None.
1208
1209        """
1210        print("Loading sampler state from %s" % filename)
1211        with open(filename, 'rb') as fp:
1212            self.__dict__ = pickle.load(fp)
1213
1214
1215def DAFSILAS(A, b, print_message=False):
1216    """
1217    Direct Algorithm For Solving Ill-conditioned Linear Algebraic Systems,
1218
1219    solves the linear system when Ax = b when A is ill conditioned.
1220
1221    Solves for x in the non-null subspace of the solution as described in
1222    the reference below. This method utilizes Gauss–Jordan elimination with
1223    complete pivoting to identify the null subspace of a (almost) singular
1224    matrix.
1225
1226    X. J. Xue, Kozaczek, K. J., Kurtzl, S. K., & Kurtz, D. S. (2000). A direct
1227    algorithm for solving ill-conditioned linear algebraic systems.
1228    Adv. X-Ray Anal, 42.
1229    """
1230
1231    # The matrix A' as defined in Xue
1232    b = b.reshape(b.size)
1233    Ap = np.zeros([A.shape[0], 2 * A.shape[0] + 1])
1234    Ap[:, 0:A.shape[0]] = np.copy(A)
1235    Ap[:, A.shape[0]] = np.copy(b)
1236    Ap[:, A.shape[0] + 1:] = np.eye(A.shape[0])
1237    n, m = Ap.shape
1238
1239    # permutation matrix
1240    P = np.eye(n)
1241
1242    # the ill-condition control parameter
1243    # epsilon = np.finfo(np.float64).eps
1244    epsilon = 10**-14
1245
1246    for i in range(n - 1):
1247        # calc sub matrix Ai
1248        Ai = np.copy(Ap[i:n, i:n])
1249
1250        # find the complete pivot in sub matrix Ai
1251        api = np.max(np.abs(Ai))
1252
1253        if api == 0:
1254            break
1255
1256        # find the location of the complete pivot in Ai
1257        row, col = np.unravel_index(np.abs(Ai).argmax(), Ai.shape)
1258
1259        # interchange rows and columns to exchange position of api and aii
1260        tmp = np.copy(Ap[i, :])
1261        Ap[i, :] = np.copy(Ap[i + row, :])
1262        Ap[i + row, :] = tmp
1263
1264        tmp = np.copy(Ap[:, i])
1265        Ap[:, i] = np.copy(Ap[:, i + col])
1266        Ap[:, i + col] = tmp
1267
1268        # Also interchange the entries in b
1269        # tmp = A[i, n]
1270        # A[i, n] = A[i+col, n]Ap[i+1+j, i:m]
1271        # A[i+col, n] = tmp
1272
1273        # keep track of column switches via a series of permuation matrices P =
1274        # P1*P2*...*Pi*...*Pn ==> at each iteration x = P*xi
1275        Pi = np.eye(n)
1276        tmp = np.copy(Pi[i, :])
1277        Pi[i, :] = np.copy(Pi[i + col, :])
1278        Pi[i + col, :] = tmp
1279        P = np.dot(P, Pi)
1280
1281        # Calculate multipliers
1282        if Ai[row, col] < 0:
1283            api = api * -1.  # sign is important in multipliers
1284
1285        M = Ap[i + 1:n, i] / np.double(api)
1286
1287        # start row reduction
1288        for j in range(M.size):
1289            Ap[i + 1 + j, i:m] = Ap[i + 1 + j, i:m] - M[j] * Ap[i, i:m]
1290
1291    # the largest complete pivot
1292    eta = np.max(np.abs(np.diag(Ap))) * 1.0
1293    # test if |aii/nc| <= epsilon
1294    idx = (np.abs(np.diag(Ap) / eta) <= epsilon).nonzero()[0]
1295
1296    # Perform zeroing operation if necessary
1297    if idx.size > 0:
1298        nullity = idx.size
1299        Arange = Ap[0:n - nullity, 0:n - nullity]
1300        if print_message:
1301            print('Matrix is ill-conditioned, performing zeroing operation')
1302            print('nullity = ' + str(nullity) + ', rank = ' + str(n - nullity) +
1303                  ', cond(A) = ' + str(np.linalg.cond(A)) +
1304                  ', cond(Arange) = ' + str(np.linalg.cond(Arange)))
1305
1306        # ajj = 1, aij = 0 for j = i...n
1307        Ap[idx[0]:n, idx[0]:n] = np.eye(nullity)
1308        # bj = 0
1309        Ap[idx[0]:n, n] = 0
1310        # ejj = 1, eij = 0
1311        Ap[idx[0]:n, idx[0] + n + 1:m] = np.eye(nullity)
1312
1313    # Back substitution
1314    for i in range(n, 0, -1):
1315        Ai = Ap[0:i, :]
1316
1317        # Calculate multipliers
1318        M = Ai[0:i - 1, i - 1] / np.double(Ai[i - 1, i - 1])
1319
1320        # start back substitution
1321        for j in range(M.size):
1322            Ai[j, :] = Ai[j, :] - M[j] * Ai[i - 1, :]
1323
1324        # store result in A
1325        Ap[0:i, :] = Ai
1326
1327    # turn A into eye(n)
1328    D = (1. / np.diag(Ap)).reshape([n, 1])
1329    Ap = np.multiply(D, Ap)
1330    # Calculated solution
1331    return np.dot(P, Ap[:, n]).reshape([n, 1])
1332
1333
1334class Tri1D:
1335    """
1336    1D "triangulation" that mimics the following SciPy Delaunay properties:
1337        * ndim
1338        * points
1339        * npoints
1340        * nsimplex
1341        * simplices
1342        * neighbours
1343        * the find_simplex subroutine
1344    """
1345
1346    def __init__(self, points):
1347        """
1348        Create a 1D Triangulation object that can we used by the SSCSampler
1349        in the same way as the SciPy Delaunay triangulation.
1350
1351        Parameters
1352        ----------
1353        points : array, shape (n_s, )
1354            A 1D array of nodes.
1355
1356        Returns
1357        -------
1358        None.
1359
1360        """
1361        self.ndim = 1
1362        self.points = points
1363        self.npoints = points.size
1364        self.nsimplex = points.size - 1
1365
1366        points_sorted = np.sort(self.points.reshape(self.npoints))
1367        self.simplices = np.zeros([self.nsimplex, 2])
1368        for i in range(self.nsimplex):
1369            self.simplices[i, 0] = (self.points == points_sorted[i]).nonzero()[0]
1370            self.simplices[i, 1] = (self.points == points_sorted[i + 1]).nonzero()[0]
1371        self.simplices = self.simplices.astype('int')
1372
1373        self.neighbors = np.zeros([self.nsimplex, 2])
1374        for i in range(self.nsimplex):
1375            self.neighbors[i, 0] = i - 1
1376            self.neighbors[i, 1] = i + 1
1377        self.neighbors[-1, 1] = -1
1378        self.neighbors = self.neighbors.astype('int')
1379
1380    def find_simplex(self, xi):
1381        """
1382        Find the simplex indices of nodes xi
1383
1384        Parameters
1385        ----------
1386        xi : array, shape (S,)
1387            An array if S 1D points.
1388
1389        Returns
1390        -------
1391        array
1392            An array containing the simplex indices of points xi.
1393
1394        """
1395        Idx = np.zeros(xi.shape[0])
1396        points_sorted = np.sort(self.points.reshape(self.npoints))
1397
1398        for i in range(xi.shape[0]):
1399            idx = (points_sorted < xi[i]).argmin() - 1
1400            # if xi = 0 idx will be -1
1401            if idx == -1:
1402                Idx[i] = 0
1403            else:
1404                Idx[i] = idx
1405
1406        return Idx.astype('int')
class SSCSampler(easyvvuq.sampling.base.BaseSamplingElement):
  54class SSCSampler(BaseSamplingElement, sampler_name="ssc_sampler"):
  55    """
  56    Simplex Stochastic Collocation sampler
  57    """
  58
  59    def __init__(self, vary=None, max_polynomial_order=4):
  60        """
  61        Create an SSC sampler object.
  62
  63        Parameters
  64        ----------
  65        vary: dict or None
  66            keys = parameters to be sampled, values = distributions.
  67        max_polynomial_order : int, optional
  68            The maximum polynomial order, default is 4.
  69        Returns
  70        -------
  71        None.
  72
  73        """
  74        # number of random inputs
  75        self.n_xi = len(vary)
  76        # vary dictionary of Chaospy input distribution
  77        self.vary = Vary(vary)
  78        # initial Delaunay triangulation
  79        self.tri = self.init_grid()
  80        # code sample counter
  81        self.count = 0
  82        self._n_samples = self.tri.points.shape[0]
  83        self.set_pmax_cutoff(max_polynomial_order)
  84
  85    def init_grid(self):
  86        """
  87        Create the initial n_xi-dimensional Delaunay discretization
  88
  89
  90        Returns
  91        -------
  92        tri : scipy.spatial.qhull.Delaunay
  93            Initial triagulation of 2 ** n_xi + 1 points.
  94
  95        """
  96
  97        # create inital hypercube points through n_xi dimensional
  98        # carthesian product of the lower and upper limits of the
  99        # chasopy input distributions
 100        corners = [[param.lower[0], param.upper[0]] for param in self.vary.get_values()]
 101        self.corners = corners
 102        xi_k_jl = np.array(list(product(*corners)))
 103
 104        # add a point in the middle of the hypercube
 105        xi_k_jl = np.append(xi_k_jl, np.mean(xi_k_jl, axis=0).reshape([1, -1]), 0)
 106
 107        if self.n_xi > 1:
 108            # Create initial Delaunay discretization
 109            """
 110            NOTE: FOUND AN ERROR IN THE 'incremental' OPTION. IN A 3D CASE IT USES
 111            4 VERTICES TO MAKE A SQUARE IN A PLANE, NOT A 3D SIMPLEX. TURNED IT OFF.
 112            CONSEQUENCE: I NEED TO RE-MAKE A NEW 'Delaunay' OBJECT EVERYTIME THE GRID
 113            IS REFINED.
 114            """
 115            # tri = Delaunay(xi_k_jl, incremental=True)
 116            tri = Delaunay(xi_k_jl)
 117
 118        else:
 119            tri = Tri1D(xi_k_jl)
 120
 121        return tri
 122
 123    def find_pmax(self, n_s):
 124        """
 125        Finds the maximum polynomial stencil order that can be sustained
 126        given the current number of code evaluations. The stencil size
 127        required for polynonial order p is given by;
 128
 129        stencil size = (n_xi + p)! / (n_xi!p!)
 130
 131        where n_xi is the number of random inputs. This formula is used
 132        to find p.
 133
 134        Parameters
 135        ----------
 136        n_xi : int
 137            Number of random parameters.
 138        n_s : int
 139            Number of code samples.
 140
 141        Returns
 142        -------
 143        int
 144            The highest polynomial order that can be sustained given
 145            the number of code evaluations.
 146
 147        """
 148        p = 1
 149        stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
 150        while stencil_size <= n_s:
 151            p += 1
 152            stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
 153
 154        return p - 1
 155
 156    def set_pmax_cutoff(self, pmax_cutoff):
 157        """
 158        Set the maximum allowed polynomial value of the surrogate.
 159
 160        Parameters
 161        ----------
 162        p_max_cutoff : int
 163            The max polynomial order.
 164
 165        Returns
 166        -------
 167        None.
 168
 169        """
 170
 171        self.pmax_cutoff = pmax_cutoff
 172        self.i_norm_le_pj = self.compute_i_norm_le_pj(pmax_cutoff)
 173
 174    def compute_vol(self):
 175        """
 176        Use analytic formula to compute the volume of all simplices.
 177
 178        https://en.wikipedia.org/wiki/Simplex#Volume
 179
 180        Returns
 181        -------
 182        vols : array, size (n_e,)
 183            The volumes of the n_e simplices.
 184
 185        """
 186        n_e = self.tri.nsimplex
 187        vols = np.zeros(n_e)
 188        for j in range(n_e):
 189            xi_k_jl = self.tri.points[self.tri.simplices[j]]
 190            D = np.zeros([self.n_xi, self.n_xi])
 191
 192            for i in range(self.n_xi):
 193                D[:, i] = xi_k_jl[i, :] - xi_k_jl[-1, :]
 194
 195            det_D = np.linalg.det(D)
 196            if det_D == 0:
 197                print('Warning: zero determinant in volume calculation.')
 198            vols[j] = 1. / factorial(self.n_xi) * np.abs(det_D)
 199
 200        return vols
 201
 202    def compute_xi_center_j(self):
 203        """
 204        Compute the center of all simplex elements.
 205
 206        Returns
 207        -------
 208        xi_center_j : array, size (n_e,)
 209            The cell centers of the n_e simplices.
 210
 211        """
 212        # number of simplex elements
 213        n_e = self.tri.nsimplex
 214        xi_center_j = np.zeros([n_e, self.n_xi])
 215        for j in range(n_e):
 216            xi_center_j[j, :] = 1 / (self.n_xi + 1) * \
 217                np.sum(self.tri.points[self.tri.simplices[j]], 0)
 218
 219        return xi_center_j
 220
 221    def compute_sub_simplex_vertices(self, simplex_idx):
 222        """
 223        Compute the vertices of the sub-simplex. The  sub simplex is contained
 224        in the larger simplex. The larger simplex is refined by randomly
 225        placing a sample within the sub simplex.
 226
 227        Parameters
 228        ----------
 229        simplex_idx : int
 230            The index of the simplex
 231
 232        Returns
 233        -------
 234        xi_sub_jl : array, size (n_xi + 1, n_xi)
 235            The vertices of the sub simplex.
 236
 237        """
 238        xi_k_jl = self.tri.points[self.tri.simplices[simplex_idx]]
 239        xi_sub_jl = np.zeros([self.n_xi + 1, self.n_xi])
 240        for l in range(self.n_xi + 1):
 241            idx = np.delete(range(self.n_xi + 1), l)
 242            xi_sub_jl[l, :] = np.sum(xi_k_jl[idx], 0)
 243        xi_sub_jl = xi_sub_jl / self.n_xi
 244
 245        return xi_sub_jl
 246
 247    def sample_inputs(self, n_mc):
 248        """
 249        Draw n_mc random values from the input distribution.
 250
 251        Parameters
 252        ----------
 253        n_mc : int
 254            The number of Monte Carlo samples.
 255
 256        Returns
 257        -------
 258        xi : array, shape(n_mc, n_xi)
 259            n_mc random samples from the n_xi input distributions.
 260
 261        """
 262        xi = np.zeros([n_mc, self.n_xi])
 263        for i, param in enumerate(self.vary.get_values()):
 264            xi[:, i] = param.sample(n_mc)
 265        return xi
 266
 267    def compute_probability(self):
 268        """
 269        Compute the probability Omega_j for all simplex elements;
 270
 271        Omega_j = int p(xi) dxi,
 272
 273        with integration separately over each simplex using Monte Carlo
 274        sampling.
 275
 276        Returns
 277        -------
 278        prob_j : array, size (n_e,)
 279            The probabilities of each simplex.
 280
 281        """
 282        n_e = self.tri.nsimplex
 283
 284        print('Computing simplex probabilities...')
 285        prob_j = np.zeros(n_e)
 286
 287        # number of MC samples
 288        n_mc = np.min([10 ** (self.n_xi + 3), 10 ** 7])
 289
 290        xi = self.sample_inputs(n_mc)
 291
 292        # find the simplix indices of xi
 293        idx = self.tri.find_simplex(xi)
 294
 295        # compute simplex probabolities
 296        for i in range(n_mc):
 297            prob_j[idx[i]] += 1
 298
 299        prob_j = prob_j / np.double(n_mc)
 300        print('done, used ' + str(n_mc) + ' samples.')
 301        zero_idx = (prob_j == 0).nonzero()[0].size
 302        print('% of simplices with no samples = ' + str((100.0 * zero_idx) / n_e))
 303
 304        return prob_j
 305
 306    def compute_i_norm_le_pj(self, p_max):
 307        """
 308        Compute the multi-index set {i | |i| = i_1 + ... + i_{n_xi} <= p},
 309        for p = 1,...,p_max
 310
 311        Parameters
 312        ----------
 313        p_max : int
 314            The max polynomial order of the index set.
 315
 316        Returns
 317        -------
 318        i_norm_le_pj : dict
 319            The Np + 1 multi indices per polynomial order.
 320
 321        """
 322        i_norm_le_pj = {}
 323
 324        for p in range(1, p_max + 1):
 325            # max(i_1, i_2, ...i _{n_xi}) <= p
 326            multi_idx = np.array(list(product(range(p + 1), repeat=self.n_xi)))
 327
 328            # i_1 + i_2 <= N
 329            idx = np.where(np.sum(multi_idx, axis=1) <= p)[0]
 330            multi_idx = multi_idx[idx]
 331
 332            i_norm_le_pj[p] = multi_idx
 333
 334        return i_norm_le_pj
 335
 336    def compute_Psi(self, xi_Sj, pmax):
 337        """
 338        Compute the Vandermonde matrix Psi, given N + 1 points xi from the
 339        j-th interpolation stencil, and a multi-index set of polynomial
 340        orders |i| = i_1 + ... + i_n_xi <= polynomial order.
 341
 342        Parameters
 343        ----------
 344        xi_Sj : array, shape (N + 1, n_xi)
 345            The simplex n_xi-dimensional points of the j-th interpolation
 346            stencil S_j.
 347        pmax : int
 348            The max polynomial order of the local stencil.
 349
 350        Returns
 351        -------
 352        Psi : array, shape (N + 1, N + 1)
 353            The Vandermonde interpolation matrix consisting of monomials
 354            xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}.
 355
 356        """
 357        Np1 = xi_Sj.shape[0]
 358
 359        Psi = np.ones([Np1, Np1])
 360
 361        for row in range(Np1):
 362            for col in range(Np1):
 363                for j in range(self.n_xi):
 364                    # compute monomial xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}
 365                    Psi[row, col] *= xi_Sj[row][j] ** self.i_norm_le_pj[pmax][col][j]
 366
 367        return Psi
 368
 369    def w_j(self, xi, c_jl, pmax):
 370        """
 371        Compute the surrogate local interpolation at point xi.
 372
 373        # TODO: right now this assumes a scalar output. Modify the
 374        code for vector-valued outputs.
 375
 376        Parameters
 377        ----------
 378        xi : array, shape (n_xi,)
 379            A point inside the stochastic input domain.
 380        c_jl : array, shape (N + 1,)
 381            The interpolation coefficients of the j-th stencil, with
 382            l = 0, ..., N.
 383        pmax : int
 384            The max polynomial order of the local stencil.
 385
 386        Returns
 387        -------
 388        w_j_at_xi : float
 389            The surrogate prediction at xi.
 390
 391        """
 392
 393        # number of points in the j-th interpolation stencil
 394        Np1 = c_jl.shape[0]
 395
 396        # vector of interpolation monomials
 397        Psi_xi = np.ones([Np1, 1])
 398        for i in range(Np1):
 399            for j in range(self.n_xi):
 400                # take the power of xi to multi index entries
 401                Psi_xi[i] *= xi[j] ** self.i_norm_le_pj[pmax][i][j]
 402
 403        # surrogate prediction
 404        w_j_at_xi = np.sum(c_jl * Psi_xi)
 405
 406        return w_j_at_xi
 407
 408    def check_LEC(self, p_j, v, S_j, n_mc, max_jobs=4):
 409        """
 410        Check the Local Extremum Conserving propery of all simplex elements.
 411
 412        Parameters
 413        ----------
 414        p_j : array, shape (n_e,)
 415            The polynomial order of each element.
 416        v : array, shape (N + 1,)
 417            The (scalar) code outputs. #TODO:modify when vectors are allowed
 418        S_j : array, shape (n_e, n_s)
 419            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 420            ordered from closest to the neighbour that furthest away. The first
 421            n_xi + 1 indeces belong to the j-th simplex itself.
 422        n_mc : int
 423            The number of Monte Carlo samples to use in checking the LEC
 424            conditions.
 425        max_jobs : int
 426            The number of LEC check (one per element) that can be run in
 427            parallel.
 428
 429        Returns
 430        -------
 431        None.
 432
 433        """
 434
 435        n_e = self.tri.nsimplex
 436        print('Checking LEC condition of ' + str(n_e) + ' stencils...')
 437
 438        # multi-processing pool which will hold the check_LEC_j jobs
 439        jobs = []
 440        queues = []
 441
 442        running_jobs = 0
 443        j = 0
 444        n_jobs = n_e
 445
 446        el_idx = {}
 447
 448        # r = np.array(list(product([0.25, 0.75], repeat=self.n_xi)))
 449
 450        while n_jobs > 0:
 451
 452            # check how many processes are still alive
 453            for p in jobs:
 454                if p.is_alive() == False:
 455                    jobs.remove(p)
 456
 457            # number of processes still running
 458            running_jobs = len(jobs)
 459
 460            # re-fill jobs with max max_jobs processes
 461            while running_jobs < max_jobs and n_jobs > 0:
 462                queue = mp.Queue()
 463                prcs = mp.Process(target=self.check_LEC_j,
 464                                  args=(p_j[j], v, S_j[j, :], n_mc, queue))
 465                prcs.start()
 466                running_jobs += 1
 467                n_jobs -= 1
 468                j += 1
 469                jobs.append(prcs)
 470                queues.append(queue)
 471
 472        # retrieve results
 473        for j in range(n_e):
 474            # jobs[j].join()
 475            tmp = queues[j].get()
 476            p_j[j] = tmp['p_j[j]']
 477            el_idx[j] = tmp['el_idx_j']
 478
 479    #    singular_idx = (p_j == -99).nonzero()[0]
 480    #
 481    #    jobs = []
 482    #    queues = []
 483    #    n_jobs = singular_idx.size
 484    #
 485    #    if singular_idx.size > 0:
 486    #        nnS_j = compute_stencil_j(tri)
 487    #
 488    #    k = 0
 489    #    while n_jobs > 0:
 490    #
 491    #        #check how many processes are still alive
 492    #        for p in jobs:
 493    #            if p.is_alive() == False:
 494    #                jobs.remove(p)
 495    #
 496    #        #number of processes still running
 497    #        running_jobs = len(jobs)
 498    #
 499    #        while running_jobs < max_jobs and n_jobs > 0:
 500    #            #print 'Re-computing S_j for j = ' + str(j)
 501    #            queue = mp.Queue()
 502    #            #S_j[j,:], p_j[j], el_idx[j] = \
 503    #            j = singular_idx[k]
 504    #            k += 1
 505    #            prcs = mp.Process(target=non_singular_stencil_j, \
 506    #            args = (tri, p_j, S_j, j, nnS_j, i_norm_le_pj, el_idx, queue,))
 507    #            prcs.start()
 508    #            jobs.append(prcs)
 509    #            queues.append(queue)
 510    #
 511    #            running_jobs += 1
 512    #            n_jobs -= 1
 513    #
 514    #    #retrieve results
 515    #    idx = 0
 516    #    for j in singular_idx:
 517    #        #jobs[idx].join()
 518    #        tmp = queues[idx].get()
 519    #        S_j[j,:] = tmp['S_j[j,:]']
 520    #        p_j[j] = tmp['p_j[j]']
 521    #        el_idx[j] = tmp['el_idx[j]']
 522    #        idx += 1
 523
 524        print('done.')
 525        return {'p_j': p_j, 'S_j': S_j, 'el_idx': el_idx}
 526
 527    def find_simplices(self, S_j):
 528        """
 529        Find the simplex element indices that are in point stencil S_j.
 530
 531        Parameters
 532        ----------
 533        S_j : array, shape (N + 1,)
 534            The interpolation stencil of the j-th simplex element, expressed
 535            as the indices of the simplex points.
 536
 537        Returns
 538        -------
 539        idx : array
 540            The element indices of stencil S_j.
 541
 542        """
 543        n_e = self.tri.nsimplex
 544        # if the overlap between element i and S_j = n_xi + 1, element i
 545        # is in S_j
 546        idx = [np.in1d(self.tri.simplices[i], S_j).nonzero()[0].size for i in range(n_e)]
 547        idx = (np.array(idx) == self.n_xi + 1).nonzero()[0]
 548
 549        return idx
 550
 551    def check_LEC_j(self, p_j, v, S_j, n_mc, queue):
 552        """
 553        Check the LEC conditin of the j-th interpolation stencil.
 554
 555        Parameters
 556        ----------
 557        p_j : int
 558            The polynomial order of the j-th stencil.
 559        v : array
 560            The code samples.
 561        S_j : array, shape (N + 1,)
 562            The interpolation stencil of the j-th simplex element, expressed
 563            as the indices of the simplex points.
 564        n_mc : int
 565            The number of Monte Carlo samples to use in checking the LEC
 566            conditions.
 567        queue : multiprocessing queue object
 568            Used to store the results.
 569
 570        Returns
 571        -------
 572        None, results (polynomial order and element indices are stored
 573                       in the queue)
 574
 575        """
 576        n_xi = self.n_xi
 577        # n_e = self.tri.nsimplex
 578        N = v[0, :].size
 579
 580        # the number of points in S_j
 581        Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
 582        # select the vertices of stencil S_j
 583        xi_Sj = self.tri.points[S_j[0:Np1_j]]
 584        # find the corresponding indices of v
 585        v_Sj = v[S_j[0:Np1_j], :]
 586
 587        # the element indices of the simplices in stencil S_j
 588        el_idx_j = self.find_simplices(S_j[0:Np1_j])
 589
 590        # compute sample matrix
 591        Psi = self.compute_Psi(xi_Sj, p_j)
 592
 593        # check if Psi is well poised
 594        # det_Psi = np.linalg.det(Psi)
 595        # if det_Psi == 0:
 596        #    #print 'Warning: determinant Psi is zero.'
 597        #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
 598        #    #return an error code
 599        #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
 600
 601        # compute the coefficients c_jl
 602        # c_jl = np.linalg.solve(Psi, v_Sj)
 603        c_jl = DAFSILAS(Psi, v_Sj)
 604
 605        # check the LEC condition for all simplices in the STENCIL S_j
 606        k = 0
 607        LEC_checked = False
 608
 609        while LEC_checked == False and p_j != 1:
 610            # sample the simplices in stencil S_j
 611            xi_samples = self.sample_simplex(n_mc, self.tri.points[self.tri.simplices[el_idx_j[k]]])
 612
 613            # function values at the edges of the k-th simplex in S_j
 614            v_min = np.min(v[self.tri.simplices[el_idx_j[k]]])
 615            v_max = np.max(v[self.tri.simplices[el_idx_j[k]]])
 616
 617            # compute interpolation values at MC sample points
 618            w_j_at_xi = np.zeros([n_mc, N])
 619            for i in range(n_mc):
 620                w_j_at_xi[i, :] = self.w_j(xi_samples[i], c_jl, p_j)
 621
 622            k += 1
 623
 624            # if LEC is violated in any of the simplices
 625            # TODO: make work for vector-valued outputs
 626            eps = 0
 627            if (w_j_at_xi.min() <= v_min - eps or w_j_at_xi.max() >= v_max + eps) and p_j > 1:
 628
 629                p_j -= 1
 630
 631                # the new number of points in S_j
 632                Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
 633                # select the new vertices of stencil S_j
 634                xi_Sj = self.tri.points[S_j[0:Np1_j]]
 635                # find the new corresponding indices of v
 636                v_Sj = v[S_j[0:Np1_j], :]
 637                # the new element indices of the simplices in stencil S_j
 638                el_idx_j = self.find_simplices(S_j[0:Np1_j])
 639                k = 0
 640
 641                if p_j == 1:
 642                    return queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})
 643
 644                # recompute sample matrix
 645                Psi = self.compute_Psi(xi_Sj, p_j)
 646
 647                # check if Psi is well poised
 648                # det_Psi = np.linalg.det(Psi)
 649                # if det_Psi == 0:
 650                #    #print 'Warning: determinant Psi is zero.'
 651                #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
 652                #    #return an error code
 653                #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
 654
 655                # compute the coefficients c_jl
 656                # c_jl = np.linalg.solve(Psi, v_Sj)
 657                c_jl = DAFSILAS(Psi, v_Sj, False)
 658
 659            if k == el_idx_j.size:
 660                LEC_checked = True
 661
 662        queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})
 663
 664    def compute_stencil_j(self):
 665        """
 666        Compute the nearest neighbor stencils of all simplex elements. The
 667        distance to all points are measured with respect to the cell center
 668        of each element.
 669
 670        Returns
 671        -------
 672        S_j : array, shape (n_e, n_s)
 673            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 674            ordered from closest to the neighbour that furthest away. The first
 675            n_xi + 1 indeces belong to the j-th simplex itself.
 676
 677        """
 678
 679        n_e = self.tri.nsimplex
 680        n_s = self.tri.npoints
 681        n_xi = self.n_xi
 682        S_j = np.zeros([n_e, n_s])
 683        # compute the center of each element
 684        xi_center_j = self.compute_xi_center_j()
 685
 686        for j in range(n_e):
 687            # the number of points in S_j
 688            # Np1_j = factorial(n_xi + p_j[j])/(factorial(n_xi)*factorial(p_j[j]))
 689            # k = {1,...,n_s}\{k_j0, ..., k_jn_xi}
 690            idx = np.delete(range(n_s), self.tri.simplices[j])
 691            # store the vertex indices of the element itself
 692            S_j[j, 0:n_xi + 1] = np.copy(self.tri.simplices[j])
 693            # loop over the set k
 694            dist = np.zeros(n_s - n_xi - 1)
 695            for i in range(n_s - n_xi - 1):
 696                # ||xi_k - xi_center_j||
 697                dist[i] = np.linalg.norm(self.tri.points[idx[i]] - xi_center_j[j, :])
 698            # store the indices of the points, sorted based on distance wrt
 699            # simplex center j. Store only the amount of indices allowed by p_j.
 700            S_j[j, n_xi + 1:] = idx[np.argsort(dist)]
 701
 702        return S_j.astype('int')
 703
 704    def compute_ENO_stencil(self, p_j, S_j, el_idx, max_jobs=4):
 705        """
 706        Compute the Essentially Non-Oscillatory stencils. The idea behind ENO
 707        stencils is to have higher degree interpolation stencils up to a thin
 708        layer of simplices containing the discontinuity. For a given simplex,
 709        its ENO stencil is created by locating all the nearest-neighbor
 710        stencils that contain element j , and subsequently selecting the one
 711        with the highest polynomial order p_j . This leads to a Delaunay
 712        triangulation which captures the discontinuity better than its
 713        nearest-neighbor counterpart.
 714
 715        Parameters
 716        ----------
 717        p_j : array, shape (n_e,)
 718            The polynomial order of each simplex element.
 719        S_j : array, shape (n_e, n_s)
 720            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 721            ordered from closest to the neighbour that furthest away. The first
 722            n_xi + 1 indeces belong to the j-th simplex itself.
 723        el_idx : dict
 724            The element indices for each interpolation stencil.
 725            el_idx[2] gives the elements indices of the 3rd interpolation
 726            stencil. The number of elements is determined by the local
 727            polynomial order.
 728        max_jobs : int, optional
 729            The number of ENO stencils that are computed in parallel.
 730            The default is 4.
 731
 732        Returns
 733        -------
 734        ENO_S_j : array, shape (n_e, n_s)
 735            The ENO stencils for each element.
 736        p_j : array, shape (n_e,)
 737            The new polynomial order of each element.
 738        el_idx : dict
 739            The new element indices for each interpolation stencil.
 740
 741        """
 742
 743        n_e = self.tri.nsimplex
 744        n_s = self.tri.npoints
 745
 746        # array to store the ENO stencils
 747        ENO_S_j = np.zeros([n_e, n_s]).astype('int')
 748        # the center of each simplex
 749        xi_centers = self.compute_xi_center_j()
 750
 751        jobs = []
 752        queues = []
 753        running_jobs = 0
 754        j = 0
 755        n_jobs = n_e
 756
 757        print('Computing ENO stencils...')
 758
 759        while n_jobs > 0:
 760
 761            # check how many processes are still alive
 762            for p in jobs:
 763                if p.is_alive() == False:
 764                    jobs.remove(p)
 765
 766            # number of processes still running
 767            running_jobs = len(jobs)
 768
 769            # compute the ENO stencils (in parallel)
 770            while running_jobs < max_jobs and n_jobs > 0:
 771                queue = mp.Queue()
 772                prcs = mp.Process(target=self.compute_ENO_stencil_j,
 773                                  args=(p_j, S_j, xi_centers, j, el_idx, queue))
 774
 775                # print check_LEC_j(tri, p_j, v, S_j, n_mc, j)
 776                jobs.append(prcs)
 777                queues.append(queue)
 778                prcs.start()
 779                n_jobs -= 1
 780                running_jobs += 1
 781                j += 1
 782
 783        # retrieve results
 784        for j in range(n_e):
 785            tmp = queues[j].get()
 786            ENO_S_j[j, :] = tmp['ENO_S_j']
 787            p_j[j] = tmp['p_j_new']
 788            el_idx[j] = tmp['el_idx[j]']
 789
 790        print('done.')
 791
 792        return ENO_S_j, p_j, el_idx
 793
 794    def compute_ENO_stencil_j(self, p_j, S_j, xi_centers, j, el_idx, queue):
 795        """
 796        Compute the ENO stencil of the j-th element.
 797
 798        Parameters
 799        ----------
 800        p_j : array, shape (n_e,)
 801            The polynomial order of each simplex element.
 802        S_j : array, shape (n_e, n_s)
 803            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 804            ordered from closest to the neighbour that furthest away. The first
 805            n_xi + 1 indeces belong to the j-th simplex itself.
 806        xi_centers : array, shape (n_e, )
 807            The center of each simplex.
 808        j : int
 809            The index of the current simplex.
 810        el_idx : dict
 811            The element indices for each interpolation stencil.
 812            el_idx[2] gives the elements indices of the 3rd interpolation
 813            stencil. The number of elements is determined by the local
 814            polynomial order.
 815        queue : multiprocessing queue object
 816            Used to store the results.
 817
 818        Returns
 819        -------
 820        None, results (polynomial order and element indices are stored
 821                       in the queue)
 822
 823        """
 824        n_e = self.tri.nsimplex
 825
 826        # set the stencil to the nearest neighbor stencil
 827        ENO_S_j = np.copy(S_j[j, :])
 828        p_j_new = np.copy(p_j[j])
 829
 830        # loop to find alternative candidate stencils S_ji with p_j>1 that contain k_jl
 831        idx = (p_j == 1).nonzero()[0]
 832        all_with_pj_gt_1 = np.delete(range(n_e), idx)
 833
 834        for i in all_with_pj_gt_1:
 835            # found a candidate stencil S_ji
 836            if np.in1d(j, el_idx[i]):
 837                # the candidate stencil has a higher polynomial degree: accept
 838                if p_j[i] > p_j_new:
 839                    ENO_S_j = np.copy(S_j[i, :])
 840                    p_j_new = np.copy(p_j[i])
 841                    el_idx[j] = np.copy(el_idx[i])
 842                # the candidate stencil has the same polynomial degree: accept
 843                # the one with smallest avg Euclidian distance to the cell center
 844                elif p_j_new == p_j[i]:
 845
 846                    dist_i = np.linalg.norm(xi_centers[el_idx[i]] - xi_centers[j], 2, axis=1)
 847
 848                    dist_j = np.linalg.norm(xi_centers[el_idx[j]] - xi_centers[j], 2, axis=1)
 849
 850                    # if the new distance is smaller than the old one: accept
 851                    if np.sum(dist_i) < np.sum(dist_j):
 852                        ENO_S_j = np.copy(S_j[i, :])
 853                        el_idx[j] = np.copy(el_idx[i])
 854
 855        queue.put({'ENO_S_j': ENO_S_j, 'p_j_new': p_j_new, 'el_idx[j]': np.copy(el_idx[j])})
 856
 857    def sample_simplex(self, n_mc, xi_k_jl, check=False):
 858        """
 859        Use an analytical map from n_mc uniformly distributed points in the
 860        n_xi-dimensional hypercube, to uniformly distributed points in the
 861        target simplex described by the nodes xi_k_jl.
 862
 863        Derivation: Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016).
 864        Simplex-stochastic collocation method with improved scalability.
 865        Journal of Computational Physics, 310, 301-328.
 866
 867        Parameters
 868        ----------
 869        n_mc : int
 870            The number of Monte Carlo samples.
 871        xi_k_jl : array, shape (n_xi + 1, n_xi)
 872            The nodes of the target simplex.
 873        check : bool, optional
 874            Check is the random samples actually all lie insiide the
 875            target simplex. The default is False.
 876
 877        Returns
 878        -------
 879        P : array, shape (n_mc, n_xi)
 880            Uniformly distributed points inside the target simplex.
 881
 882        """
 883
 884        n_xi = self.n_xi
 885        P = np.zeros([n_mc, n_xi])
 886        for k in range(n_mc):
 887            # random points inside the hypercube
 888            r = np.random.rand(n_xi)
 889
 890            # the term of the map is \xi_k_j0
 891            sample = np.copy(xi_k_jl[0])
 892            for i in range(1, n_xi + 1):
 893                prod_r = 1.
 894                # compute the product of r-terms: prod(r_{n_xi-j+1}^{1/(n_xi-j+1)})
 895                for j in range(1, i + 1):
 896                    prod_r *= r[n_xi - j]**(1. / (n_xi - j + 1))
 897                # compute the ith term of the sum: prod_r*(\xi_i-\xi_{i-1})
 898                sample += prod_r * (xi_k_jl[i] - xi_k_jl[i - 1])
 899            P[k, :] = sample
 900
 901        # check if any of the samples are outside the simplex
 902        if check:
 903            outside_simplex = 0
 904            avg = np.sum(xi_k_jl, 0) / (n_xi + 1.)
 905            el = self.tri.find_simplex(avg)
 906            for i in range(n_mc):
 907                if self.tri.find_simplex(P[i, :]) != el:
 908                    outside_simplex += 1
 909            print('Number of samples outside target simplex = ' + str(outside_simplex))
 910
 911        return P
 912
 913    def sample_simplex_edge(self, simplex_idx, refined_edges):
 914        """
 915        Refine the longest edge of a simplex.
 916
 917        # TODO: is allright for 2D, but does this make sense in higher dims?
 918
 919        Parameters
 920        ----------
 921        simplex_idx : int
 922            The index of the simplex.
 923        refined_edges : list
 924            Contains the pairs of the point indices, corresponding to edges that
 925            have been refined in the current iteration. Simplices share edges,
 926            and this list is used to prevent refining the same edge twice within
 927            the same iteration of the SSC algorihm.
 928
 929        Returns
 930        -------
 931        xi_new : array, shape (n_xi,)
 932            The newly added point (along the longest edge).
 933        refined_edges : list
 934            The updated refined_edges list.
 935        already_refined : bool
 936            Returns True if the edge already has been refined.
 937
 938        """
 939
 940        # the point indices of the simplex selected for refinement
 941        simplex_point_idx = self.tri.simplices[simplex_idx]
 942        # the points of the simplex selected for refinement
 943        xi_k_jl = self.tri.points[simplex_point_idx]
 944
 945        # find the indices of all edges, i.e. the combination of all possible
 946        # 2 distinct elements from range(n_xi + 1)
 947        comb = list(combinations(range(self.n_xi + 1), 2))
 948
 949        # compute all edge lengths, select the largest
 950        edge_lengths = np.zeros(len(comb))
 951        for i in range(len(comb)):
 952            edge_lengths[i] = np.linalg.norm(xi_k_jl[comb[i][1], :] - xi_k_jl[comb[i][0], :])
 953        idx = np.argmax(edge_lengths)
 954
 955        # if there are 2 or more edge lengths that are the same, select the 1st
 956        if idx.size > 1:
 957            idx = idx[0]
 958
 959        # edge points
 960        xi_0 = xi_k_jl[comb[idx][0], :]
 961        xi_1 = xi_k_jl[comb[idx][1], :]
 962
 963        # simplices share edges, make sure it was not already refined during
 964        # this iteration
 965        current_edge = np.array([simplex_point_idx[comb[idx][0]],
 966                                 simplex_point_idx[comb[idx][1]]])
 967        already_refined = False
 968
 969        for edge in refined_edges:
 970            if set(edge) == set(current_edge):
 971                already_refined = True
 972
 973        if not already_refined:
 974            refined_edges.append(current_edge)
 975
 976        # place random sample at +/- 10% of edge center
 977        b = 0.6
 978        a = 0.4
 979        U = np.random.rand() * (b - a) + a
 980        xi_new = xi_0 + U * (xi_1 - xi_0)
 981
 982        return xi_new, refined_edges, already_refined
 983
 984    def compute_surplus_k(self, xi_k_jref, S_j, p_j, v, v_k_jref):
 985        """
 986        Compute the hierachical surplus at xi_k_jref (the refinement location),
 987        defined as the difference between the new code sample and the (old)
 988        surrogate  prediction at the refinement location.
 989
 990        Parameters
 991        ----------
 992        xi_k_jref : array, shape (n_xi,)
 993            The refinement location.
 994        S_j : array, shape (n_e, n_s)
 995            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 996            ordered from closest to the neighbour that furthest away. The first
 997            n_xi + 1 indeces belong to the j-th simplex itself.
 998        p_j : array, shape (n_e,)
 999            The polynomial order of each simplex element.
1000        v : array, shape (N + 1,)
1001            The (scalar) code outputs. #TODO:modify when vectors are allowed
1002        v_k_jref : float #TODO:modify when vectors are allowed
1003            The code prediction at the refinement location.
1004
1005        Returns
1006        -------
1007        surplus : float #TODO:modify when vectors are allowed
1008            The hierarchical suplus
1009
1010        """
1011
1012        w_k_jref = self.surrogate(xi_k_jref, S_j, p_j, v)
1013
1014        # compute the hierarcical surplus between old interpolation and new v value
1015        surplus = w_k_jref - v_k_jref
1016
1017        return surplus
1018
1019    def surrogate(self, xi, S_j, p_j, v):
1020        """
1021        Evaluate the SSC surrogate at xi.
1022
1023        Parameters
1024        ----------
1025        xi : array, shape (n_xi,)
1026            The location in the input space at which to evaluate the surrogate.
1027        S_j : array, shape (n_e, n_s)
1028            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
1029            ordered from closest to the neighbour that furthest away. The first
1030            n_xi + 1 indeces belong to the j-th simplex itself.
1031        p_j : array, shape (n_e,)
1032            The polynomial order of each simplex element.
1033        v : array, shape (N + 1,)
1034            The (scalar) code outputs. #TODO:modify when vectors are allowed
1035
1036        Returns
1037        -------
1038        None.
1039
1040        """
1041        n_xi = self.n_xi
1042        idx = int(self.tri.find_simplex(xi))
1043
1044        # the number of points in S_j
1045        Np1_j = int(factorial(n_xi + p_j[idx]) / (factorial(n_xi) * factorial(p_j[idx])))
1046        # the vertices of the stencil S_j
1047        xi_Sj = self.tri.points[S_j[idx, 0:Np1_j]]
1048        # find the corresponding indices of v
1049        v_Sj = v[S_j[idx, 0:Np1_j], :]
1050        # compute sample matrix
1051        Psi = self.compute_Psi(xi_Sj, p_j[idx])
1052
1053    #    #check if Psi is well poised, at this point all stencils should be well-poised
1054    #    det_Psi = np.linalg.det(Psi)
1055    #    if det_Psi == 0:
1056    #        print 'Error, det(Psi)=0 in compute_surplus_k() method, should not be possible'
1057
1058        # compute the coefficients c_jl
1059        # c_jl = np.linalg.solve(Psi, v_Sj)
1060        c_jl = DAFSILAS(Psi, v_Sj, False)
1061
1062        # compute the interpolation on the old grid
1063        w_j = self.w_j(xi, c_jl, p_j[idx])
1064
1065        return w_j
1066
1067    def compute_eps_bar_j(self, p_j, prob_j):
1068        """
1069        Compute the geometric refinement measure \bar{\\eps}_j for all elements,
1070        Elements with the largest values will be selected for refinement.
1071
1072        Parameters
1073        ----------
1074        p_j : array, shape (n_e,)
1075            The polynomial order of each simplex element.
1076        prob_j : array, shape (n_e,)
1077            The probability of each simplex element.
1078
1079        Returns
1080        -------
1081        eps_bar_j : array, shape (n_e,)
1082            The geometric refinement measure.
1083        vol_j : array, shape (n_e,)
1084            The volume of each simplex element.
1085
1086        """
1087
1088        n_e = self.tri.nsimplex
1089        eps_bar_j = np.zeros(n_e)
1090        n_xi = self.tri.ndim
1091        vol_j = self.compute_vol()
1092        vol = np.sum(vol_j)
1093
1094        for j in range(n_e):
1095            O_j = (p_j[j] + 1.0) / n_xi
1096            eps_bar_j[j] = prob_j[j] * (vol_j[j] / vol) ** (2 * O_j)
1097
1098        return eps_bar_j, vol_j
1099
1100    def find_boundary_simplices(self):
1101        """
1102        Find the simplices that are on the boundary of the hypercube   .
1103
1104        Returns
1105        -------
1106        idx : array
1107            Indices of the boundary simplices.
1108
1109        """
1110
1111        idx = (self.tri.neighbors == -1).nonzero()[0]
1112        return idx
1113
1114    def update_Delaunay(self, new_points):
1115        """
1116        Update the Delaunay triangulation with P new points.
1117
1118        Parameters
1119        ----------
1120        new_points : array, shape (P, n_xi)
1121            P new n_xi-dimensional points.
1122
1123        Returns
1124        -------
1125        None.
1126
1127        """
1128
1129        xi_k_jl = np.append(self.tri.points, new_points, 0)
1130        if self.n_xi > 1:
1131            self.tri = Delaunay(xi_k_jl)
1132        else:
1133            self.tri = Tri1D(xi_k_jl)
1134
1135        self._n_samples = self.tri.npoints
1136
1137    def get_Delaunay(self):
1138        """
1139        Return the SciPy Delaunay triangulation.
1140        """
1141        return self.tri
1142
1143    def is_finite(self):
1144        return True
1145
1146    @property
1147    def n_samples(self):
1148        """
1149        Returns the number of samples code samples.
1150        """
1151        return self._n_samples
1152
1153    @property
1154    def n_dimensions(self):
1155        """
1156        Returns the number of uncertain random variables
1157        """
1158        return self.n_xi
1159
1160    @property
1161    def n_elements(self):
1162        """
1163        Returns the number of simplex elements
1164        """
1165        return self.tri.nsimplex
1166
1167    def __next__(self):
1168        if self.count < self._n_samples:
1169            run_dict = {}
1170            i_par = 0
1171            for param_name in self.vary.get_keys():
1172                run_dict[param_name] = self.tri.points[self.count][i_par]
1173                i_par += 1
1174            self.count += 1
1175            return run_dict
1176        else:
1177            raise StopIteration
1178
1179    def save_state(self, filename):
1180        """
1181        Save the state of the sampler to a pickle file.
1182
1183        Parameters
1184        ----------
1185        filename : string
1186            File name.
1187
1188        Returns
1189        -------
1190        None.
1191
1192        """
1193        print("Saving sampler state to %s" % filename)
1194        with open(filename, 'wb') as fp:
1195            pickle.dump(self.__dict__, fp)
1196
1197    def load_state(self, filename):
1198        """
1199        Load the state of the sampler from a pickle file.
1200
1201        Parameters
1202        ----------
1203        filename : string
1204            File name.
1205
1206        Returns
1207        -------
1208        None.
1209
1210        """
1211        print("Loading sampler state from %s" % filename)
1212        with open(filename, 'rb') as fp:
1213            self.__dict__ = pickle.load(fp)

Simplex Stochastic Collocation sampler

SSCSampler(vary=None, max_polynomial_order=4)
59    def __init__(self, vary=None, max_polynomial_order=4):
60        """
61        Create an SSC sampler object.
62
63        Parameters
64        ----------
65        vary: dict or None
66            keys = parameters to be sampled, values = distributions.
67        max_polynomial_order : int, optional
68            The maximum polynomial order, default is 4.
69        Returns
70        -------
71        None.
72
73        """
74        # number of random inputs
75        self.n_xi = len(vary)
76        # vary dictionary of Chaospy input distribution
77        self.vary = Vary(vary)
78        # initial Delaunay triangulation
79        self.tri = self.init_grid()
80        # code sample counter
81        self.count = 0
82        self._n_samples = self.tri.points.shape[0]
83        self.set_pmax_cutoff(max_polynomial_order)

Create an SSC sampler object.

Parameters
  • vary (dict or None): keys = parameters to be sampled, values = distributions.
  • max_polynomial_order (int, optional): The maximum polynomial order, default is 4.
Returns
  • None.
n_xi
vary
tri
count
def init_grid(self):
 85    def init_grid(self):
 86        """
 87        Create the initial n_xi-dimensional Delaunay discretization
 88
 89
 90        Returns
 91        -------
 92        tri : scipy.spatial.qhull.Delaunay
 93            Initial triagulation of 2 ** n_xi + 1 points.
 94
 95        """
 96
 97        # create inital hypercube points through n_xi dimensional
 98        # carthesian product of the lower and upper limits of the
 99        # chasopy input distributions
100        corners = [[param.lower[0], param.upper[0]] for param in self.vary.get_values()]
101        self.corners = corners
102        xi_k_jl = np.array(list(product(*corners)))
103
104        # add a point in the middle of the hypercube
105        xi_k_jl = np.append(xi_k_jl, np.mean(xi_k_jl, axis=0).reshape([1, -1]), 0)
106
107        if self.n_xi > 1:
108            # Create initial Delaunay discretization
109            """
110            NOTE: FOUND AN ERROR IN THE 'incremental' OPTION. IN A 3D CASE IT USES
111            4 VERTICES TO MAKE A SQUARE IN A PLANE, NOT A 3D SIMPLEX. TURNED IT OFF.
112            CONSEQUENCE: I NEED TO RE-MAKE A NEW 'Delaunay' OBJECT EVERYTIME THE GRID
113            IS REFINED.
114            """
115            # tri = Delaunay(xi_k_jl, incremental=True)
116            tri = Delaunay(xi_k_jl)
117
118        else:
119            tri = Tri1D(xi_k_jl)
120
121        return tri

Create the initial n_xi-dimensional Delaunay discretization

Returns
  • tri (scipy.spatial.qhull.Delaunay): Initial triagulation of 2 ** n_xi + 1 points.
def find_pmax(self, n_s):
123    def find_pmax(self, n_s):
124        """
125        Finds the maximum polynomial stencil order that can be sustained
126        given the current number of code evaluations. The stencil size
127        required for polynonial order p is given by;
128
129        stencil size = (n_xi + p)! / (n_xi!p!)
130
131        where n_xi is the number of random inputs. This formula is used
132        to find p.
133
134        Parameters
135        ----------
136        n_xi : int
137            Number of random parameters.
138        n_s : int
139            Number of code samples.
140
141        Returns
142        -------
143        int
144            The highest polynomial order that can be sustained given
145            the number of code evaluations.
146
147        """
148        p = 1
149        stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
150        while stencil_size <= n_s:
151            p += 1
152            stencil_size = factorial(self.n_xi + p) / (factorial(self.n_xi) * factorial(p))
153
154        return p - 1

Finds the maximum polynomial stencil order that can be sustained given the current number of code evaluations. The stencil size required for polynonial order p is given by;

stencil size = (n_xi + p)! / (n_xi!p!)

where n_xi is the number of random inputs. This formula is used to find p.

Parameters
  • n_xi (int): Number of random parameters.
  • n_s (int): Number of code samples.
Returns
  • int: The highest polynomial order that can be sustained given the number of code evaluations.
def set_pmax_cutoff(self, pmax_cutoff):
156    def set_pmax_cutoff(self, pmax_cutoff):
157        """
158        Set the maximum allowed polynomial value of the surrogate.
159
160        Parameters
161        ----------
162        p_max_cutoff : int
163            The max polynomial order.
164
165        Returns
166        -------
167        None.
168
169        """
170
171        self.pmax_cutoff = pmax_cutoff
172        self.i_norm_le_pj = self.compute_i_norm_le_pj(pmax_cutoff)

Set the maximum allowed polynomial value of the surrogate.

Parameters
  • p_max_cutoff (int): The max polynomial order.
Returns
  • None.
def compute_vol(self):
174    def compute_vol(self):
175        """
176        Use analytic formula to compute the volume of all simplices.
177
178        https://en.wikipedia.org/wiki/Simplex#Volume
179
180        Returns
181        -------
182        vols : array, size (n_e,)
183            The volumes of the n_e simplices.
184
185        """
186        n_e = self.tri.nsimplex
187        vols = np.zeros(n_e)
188        for j in range(n_e):
189            xi_k_jl = self.tri.points[self.tri.simplices[j]]
190            D = np.zeros([self.n_xi, self.n_xi])
191
192            for i in range(self.n_xi):
193                D[:, i] = xi_k_jl[i, :] - xi_k_jl[-1, :]
194
195            det_D = np.linalg.det(D)
196            if det_D == 0:
197                print('Warning: zero determinant in volume calculation.')
198            vols[j] = 1. / factorial(self.n_xi) * np.abs(det_D)
199
200        return vols

Use analytic formula to compute the volume of all simplices.

https://en.wikipedia.org/wiki/Simplex#Volume

Returns
  • vols (array, size (n_e,)): The volumes of the n_e simplices.
def compute_xi_center_j(self):
202    def compute_xi_center_j(self):
203        """
204        Compute the center of all simplex elements.
205
206        Returns
207        -------
208        xi_center_j : array, size (n_e,)
209            The cell centers of the n_e simplices.
210
211        """
212        # number of simplex elements
213        n_e = self.tri.nsimplex
214        xi_center_j = np.zeros([n_e, self.n_xi])
215        for j in range(n_e):
216            xi_center_j[j, :] = 1 / (self.n_xi + 1) * \
217                np.sum(self.tri.points[self.tri.simplices[j]], 0)
218
219        return xi_center_j

Compute the center of all simplex elements.

Returns
  • xi_center_j (array, size (n_e,)): The cell centers of the n_e simplices.
def compute_sub_simplex_vertices(self, simplex_idx):
221    def compute_sub_simplex_vertices(self, simplex_idx):
222        """
223        Compute the vertices of the sub-simplex. The  sub simplex is contained
224        in the larger simplex. The larger simplex is refined by randomly
225        placing a sample within the sub simplex.
226
227        Parameters
228        ----------
229        simplex_idx : int
230            The index of the simplex
231
232        Returns
233        -------
234        xi_sub_jl : array, size (n_xi + 1, n_xi)
235            The vertices of the sub simplex.
236
237        """
238        xi_k_jl = self.tri.points[self.tri.simplices[simplex_idx]]
239        xi_sub_jl = np.zeros([self.n_xi + 1, self.n_xi])
240        for l in range(self.n_xi + 1):
241            idx = np.delete(range(self.n_xi + 1), l)
242            xi_sub_jl[l, :] = np.sum(xi_k_jl[idx], 0)
243        xi_sub_jl = xi_sub_jl / self.n_xi
244
245        return xi_sub_jl

Compute the vertices of the sub-simplex. The sub simplex is contained in the larger simplex. The larger simplex is refined by randomly placing a sample within the sub simplex.

Parameters
  • simplex_idx (int): The index of the simplex
Returns
  • xi_sub_jl (array, size (n_xi + 1, n_xi)): The vertices of the sub simplex.
def sample_inputs(self, n_mc):
247    def sample_inputs(self, n_mc):
248        """
249        Draw n_mc random values from the input distribution.
250
251        Parameters
252        ----------
253        n_mc : int
254            The number of Monte Carlo samples.
255
256        Returns
257        -------
258        xi : array, shape(n_mc, n_xi)
259            n_mc random samples from the n_xi input distributions.
260
261        """
262        xi = np.zeros([n_mc, self.n_xi])
263        for i, param in enumerate(self.vary.get_values()):
264            xi[:, i] = param.sample(n_mc)
265        return xi

Draw n_mc random values from the input distribution.

Parameters
  • n_mc (int): The number of Monte Carlo samples.
Returns
  • xi (array, shape(n_mc, n_xi)): n_mc random samples from the n_xi input distributions.
def compute_probability(self):
267    def compute_probability(self):
268        """
269        Compute the probability Omega_j for all simplex elements;
270
271        Omega_j = int p(xi) dxi,
272
273        with integration separately over each simplex using Monte Carlo
274        sampling.
275
276        Returns
277        -------
278        prob_j : array, size (n_e,)
279            The probabilities of each simplex.
280
281        """
282        n_e = self.tri.nsimplex
283
284        print('Computing simplex probabilities...')
285        prob_j = np.zeros(n_e)
286
287        # number of MC samples
288        n_mc = np.min([10 ** (self.n_xi + 3), 10 ** 7])
289
290        xi = self.sample_inputs(n_mc)
291
292        # find the simplix indices of xi
293        idx = self.tri.find_simplex(xi)
294
295        # compute simplex probabolities
296        for i in range(n_mc):
297            prob_j[idx[i]] += 1
298
299        prob_j = prob_j / np.double(n_mc)
300        print('done, used ' + str(n_mc) + ' samples.')
301        zero_idx = (prob_j == 0).nonzero()[0].size
302        print('% of simplices with no samples = ' + str((100.0 * zero_idx) / n_e))
303
304        return prob_j

Compute the probability Omega_j for all simplex elements;

Omega_j = int p(xi) dxi,

with integration separately over each simplex using Monte Carlo sampling.

Returns
  • prob_j (array, size (n_e,)): The probabilities of each simplex.
def compute_i_norm_le_pj(self, p_max):
306    def compute_i_norm_le_pj(self, p_max):
307        """
308        Compute the multi-index set {i | |i| = i_1 + ... + i_{n_xi} <= p},
309        for p = 1,...,p_max
310
311        Parameters
312        ----------
313        p_max : int
314            The max polynomial order of the index set.
315
316        Returns
317        -------
318        i_norm_le_pj : dict
319            The Np + 1 multi indices per polynomial order.
320
321        """
322        i_norm_le_pj = {}
323
324        for p in range(1, p_max + 1):
325            # max(i_1, i_2, ...i _{n_xi}) <= p
326            multi_idx = np.array(list(product(range(p + 1), repeat=self.n_xi)))
327
328            # i_1 + i_2 <= N
329            idx = np.where(np.sum(multi_idx, axis=1) <= p)[0]
330            multi_idx = multi_idx[idx]
331
332            i_norm_le_pj[p] = multi_idx
333
334        return i_norm_le_pj

Compute the multi-index set {i | |i| = i_1 + ... + i_{n_xi} <= p}, for p = 1,...,p_max

Parameters
  • p_max (int): The max polynomial order of the index set.
Returns
  • i_norm_le_pj (dict): The Np + 1 multi indices per polynomial order.
def compute_Psi(self, xi_Sj, pmax):
336    def compute_Psi(self, xi_Sj, pmax):
337        """
338        Compute the Vandermonde matrix Psi, given N + 1 points xi from the
339        j-th interpolation stencil, and a multi-index set of polynomial
340        orders |i| = i_1 + ... + i_n_xi <= polynomial order.
341
342        Parameters
343        ----------
344        xi_Sj : array, shape (N + 1, n_xi)
345            The simplex n_xi-dimensional points of the j-th interpolation
346            stencil S_j.
347        pmax : int
348            The max polynomial order of the local stencil.
349
350        Returns
351        -------
352        Psi : array, shape (N + 1, N + 1)
353            The Vandermonde interpolation matrix consisting of monomials
354            xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}.
355
356        """
357        Np1 = xi_Sj.shape[0]
358
359        Psi = np.ones([Np1, Np1])
360
361        for row in range(Np1):
362            for col in range(Np1):
363                for j in range(self.n_xi):
364                    # compute monomial xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}
365                    Psi[row, col] *= xi_Sj[row][j] ** self.i_norm_le_pj[pmax][col][j]
366
367        return Psi

Compute the Vandermonde matrix Psi, given N + 1 points xi from the j-th interpolation stencil, and a multi-index set of polynomial orders |i| = i_1 + ... + i_n_xi <= polynomial order.

Parameters
  • xi_Sj (array, shape (N + 1, n_xi)): The simplex n_xi-dimensional points of the j-th interpolation stencil S_j.
  • pmax (int): The max polynomial order of the local stencil.
Returns
  • Psi (array, shape (N + 1, N + 1)): The Vandermonde interpolation matrix consisting of monomials xi_1 ** i_1 + ... + xi_{n_xi} ** i_{n_xi}.
def w_j(self, xi, c_jl, pmax):
369    def w_j(self, xi, c_jl, pmax):
370        """
371        Compute the surrogate local interpolation at point xi.
372
373        # TODO: right now this assumes a scalar output. Modify the
374        code for vector-valued outputs.
375
376        Parameters
377        ----------
378        xi : array, shape (n_xi,)
379            A point inside the stochastic input domain.
380        c_jl : array, shape (N + 1,)
381            The interpolation coefficients of the j-th stencil, with
382            l = 0, ..., N.
383        pmax : int
384            The max polynomial order of the local stencil.
385
386        Returns
387        -------
388        w_j_at_xi : float
389            The surrogate prediction at xi.
390
391        """
392
393        # number of points in the j-th interpolation stencil
394        Np1 = c_jl.shape[0]
395
396        # vector of interpolation monomials
397        Psi_xi = np.ones([Np1, 1])
398        for i in range(Np1):
399            for j in range(self.n_xi):
400                # take the power of xi to multi index entries
401                Psi_xi[i] *= xi[j] ** self.i_norm_le_pj[pmax][i][j]
402
403        # surrogate prediction
404        w_j_at_xi = np.sum(c_jl * Psi_xi)
405
406        return w_j_at_xi

Compute the surrogate local interpolation at point xi.

TODO: right now this assumes a scalar output. Modify the

code for vector-valued outputs.

Parameters
  • xi (array, shape (n_xi,)): A point inside the stochastic input domain.
  • c_jl (array, shape (N + 1,)): The interpolation coefficients of the j-th stencil, with l = 0, ..., N.
  • pmax (int): The max polynomial order of the local stencil.
Returns
  • w_j_at_xi (float): The surrogate prediction at xi.
def check_LEC(self, p_j, v, S_j, n_mc, max_jobs=4):
408    def check_LEC(self, p_j, v, S_j, n_mc, max_jobs=4):
409        """
410        Check the Local Extremum Conserving propery of all simplex elements.
411
412        Parameters
413        ----------
414        p_j : array, shape (n_e,)
415            The polynomial order of each element.
416        v : array, shape (N + 1,)
417            The (scalar) code outputs. #TODO:modify when vectors are allowed
418        S_j : array, shape (n_e, n_s)
419            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
420            ordered from closest to the neighbour that furthest away. The first
421            n_xi + 1 indeces belong to the j-th simplex itself.
422        n_mc : int
423            The number of Monte Carlo samples to use in checking the LEC
424            conditions.
425        max_jobs : int
426            The number of LEC check (one per element) that can be run in
427            parallel.
428
429        Returns
430        -------
431        None.
432
433        """
434
435        n_e = self.tri.nsimplex
436        print('Checking LEC condition of ' + str(n_e) + ' stencils...')
437
438        # multi-processing pool which will hold the check_LEC_j jobs
439        jobs = []
440        queues = []
441
442        running_jobs = 0
443        j = 0
444        n_jobs = n_e
445
446        el_idx = {}
447
448        # r = np.array(list(product([0.25, 0.75], repeat=self.n_xi)))
449
450        while n_jobs > 0:
451
452            # check how many processes are still alive
453            for p in jobs:
454                if p.is_alive() == False:
455                    jobs.remove(p)
456
457            # number of processes still running
458            running_jobs = len(jobs)
459
460            # re-fill jobs with max max_jobs processes
461            while running_jobs < max_jobs and n_jobs > 0:
462                queue = mp.Queue()
463                prcs = mp.Process(target=self.check_LEC_j,
464                                  args=(p_j[j], v, S_j[j, :], n_mc, queue))
465                prcs.start()
466                running_jobs += 1
467                n_jobs -= 1
468                j += 1
469                jobs.append(prcs)
470                queues.append(queue)
471
472        # retrieve results
473        for j in range(n_e):
474            # jobs[j].join()
475            tmp = queues[j].get()
476            p_j[j] = tmp['p_j[j]']
477            el_idx[j] = tmp['el_idx_j']
478
479    #    singular_idx = (p_j == -99).nonzero()[0]
480    #
481    #    jobs = []
482    #    queues = []
483    #    n_jobs = singular_idx.size
484    #
485    #    if singular_idx.size > 0:
486    #        nnS_j = compute_stencil_j(tri)
487    #
488    #    k = 0
489    #    while n_jobs > 0:
490    #
491    #        #check how many processes are still alive
492    #        for p in jobs:
493    #            if p.is_alive() == False:
494    #                jobs.remove(p)
495    #
496    #        #number of processes still running
497    #        running_jobs = len(jobs)
498    #
499    #        while running_jobs < max_jobs and n_jobs > 0:
500    #            #print 'Re-computing S_j for j = ' + str(j)
501    #            queue = mp.Queue()
502    #            #S_j[j,:], p_j[j], el_idx[j] = \
503    #            j = singular_idx[k]
504    #            k += 1
505    #            prcs = mp.Process(target=non_singular_stencil_j, \
506    #            args = (tri, p_j, S_j, j, nnS_j, i_norm_le_pj, el_idx, queue,))
507    #            prcs.start()
508    #            jobs.append(prcs)
509    #            queues.append(queue)
510    #
511    #            running_jobs += 1
512    #            n_jobs -= 1
513    #
514    #    #retrieve results
515    #    idx = 0
516    #    for j in singular_idx:
517    #        #jobs[idx].join()
518    #        tmp = queues[idx].get()
519    #        S_j[j,:] = tmp['S_j[j,:]']
520    #        p_j[j] = tmp['p_j[j]']
521    #        el_idx[j] = tmp['el_idx[j]']
522    #        idx += 1
523
524        print('done.')
525        return {'p_j': p_j, 'S_j': S_j, 'el_idx': el_idx}

Check the Local Extremum Conserving propery of all simplex elements.

Parameters
  • p_j (array, shape (n_e,)): The polynomial order of each element.
  • v (array, shape (N + 1,)): The (scalar) code outputs. #TODO:modify when vectors are allowed
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
  • n_mc (int): The number of Monte Carlo samples to use in checking the LEC conditions.
  • max_jobs (int): The number of LEC check (one per element) that can be run in parallel.
Returns
  • None.
def find_simplices(self, S_j):
527    def find_simplices(self, S_j):
528        """
529        Find the simplex element indices that are in point stencil S_j.
530
531        Parameters
532        ----------
533        S_j : array, shape (N + 1,)
534            The interpolation stencil of the j-th simplex element, expressed
535            as the indices of the simplex points.
536
537        Returns
538        -------
539        idx : array
540            The element indices of stencil S_j.
541
542        """
543        n_e = self.tri.nsimplex
544        # if the overlap between element i and S_j = n_xi + 1, element i
545        # is in S_j
546        idx = [np.in1d(self.tri.simplices[i], S_j).nonzero()[0].size for i in range(n_e)]
547        idx = (np.array(idx) == self.n_xi + 1).nonzero()[0]
548
549        return idx

Find the simplex element indices that are in point stencil S_j.

Parameters
  • S_j (array, shape (N + 1,)): The interpolation stencil of the j-th simplex element, expressed as the indices of the simplex points.
Returns
  • idx (array): The element indices of stencil S_j.
def check_LEC_j(self, p_j, v, S_j, n_mc, queue):
551    def check_LEC_j(self, p_j, v, S_j, n_mc, queue):
552        """
553        Check the LEC conditin of the j-th interpolation stencil.
554
555        Parameters
556        ----------
557        p_j : int
558            The polynomial order of the j-th stencil.
559        v : array
560            The code samples.
561        S_j : array, shape (N + 1,)
562            The interpolation stencil of the j-th simplex element, expressed
563            as the indices of the simplex points.
564        n_mc : int
565            The number of Monte Carlo samples to use in checking the LEC
566            conditions.
567        queue : multiprocessing queue object
568            Used to store the results.
569
570        Returns
571        -------
572        None, results (polynomial order and element indices are stored
573                       in the queue)
574
575        """
576        n_xi = self.n_xi
577        # n_e = self.tri.nsimplex
578        N = v[0, :].size
579
580        # the number of points in S_j
581        Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
582        # select the vertices of stencil S_j
583        xi_Sj = self.tri.points[S_j[0:Np1_j]]
584        # find the corresponding indices of v
585        v_Sj = v[S_j[0:Np1_j], :]
586
587        # the element indices of the simplices in stencil S_j
588        el_idx_j = self.find_simplices(S_j[0:Np1_j])
589
590        # compute sample matrix
591        Psi = self.compute_Psi(xi_Sj, p_j)
592
593        # check if Psi is well poised
594        # det_Psi = np.linalg.det(Psi)
595        # if det_Psi == 0:
596        #    #print 'Warning: determinant Psi is zero.'
597        #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
598        #    #return an error code
599        #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
600
601        # compute the coefficients c_jl
602        # c_jl = np.linalg.solve(Psi, v_Sj)
603        c_jl = DAFSILAS(Psi, v_Sj)
604
605        # check the LEC condition for all simplices in the STENCIL S_j
606        k = 0
607        LEC_checked = False
608
609        while LEC_checked == False and p_j != 1:
610            # sample the simplices in stencil S_j
611            xi_samples = self.sample_simplex(n_mc, self.tri.points[self.tri.simplices[el_idx_j[k]]])
612
613            # function values at the edges of the k-th simplex in S_j
614            v_min = np.min(v[self.tri.simplices[el_idx_j[k]]])
615            v_max = np.max(v[self.tri.simplices[el_idx_j[k]]])
616
617            # compute interpolation values at MC sample points
618            w_j_at_xi = np.zeros([n_mc, N])
619            for i in range(n_mc):
620                w_j_at_xi[i, :] = self.w_j(xi_samples[i], c_jl, p_j)
621
622            k += 1
623
624            # if LEC is violated in any of the simplices
625            # TODO: make work for vector-valued outputs
626            eps = 0
627            if (w_j_at_xi.min() <= v_min - eps or w_j_at_xi.max() >= v_max + eps) and p_j > 1:
628
629                p_j -= 1
630
631                # the new number of points in S_j
632                Np1_j = int(factorial(n_xi + p_j) / (factorial(n_xi) * factorial(p_j)))
633                # select the new vertices of stencil S_j
634                xi_Sj = self.tri.points[S_j[0:Np1_j]]
635                # find the new corresponding indices of v
636                v_Sj = v[S_j[0:Np1_j], :]
637                # the new element indices of the simplices in stencil S_j
638                el_idx_j = self.find_simplices(S_j[0:Np1_j])
639                k = 0
640
641                if p_j == 1:
642                    return queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})
643
644                # recompute sample matrix
645                Psi = self.compute_Psi(xi_Sj, p_j)
646
647                # check if Psi is well poised
648                # det_Psi = np.linalg.det(Psi)
649                # if det_Psi == 0:
650                #    #print 'Warning: determinant Psi is zero.'
651                #    #print 'Reducing local p_j from ' + str(p_j[j]) + ' to a lower value.'
652                #    #return an error code
653                #    return queue.put({'p_j[j]':-99, 'el_idx_j':el_idx_j})
654
655                # compute the coefficients c_jl
656                # c_jl = np.linalg.solve(Psi, v_Sj)
657                c_jl = DAFSILAS(Psi, v_Sj, False)
658
659            if k == el_idx_j.size:
660                LEC_checked = True
661
662        queue.put({'p_j[j]': p_j, 'el_idx_j': el_idx_j})

Check the LEC conditin of the j-th interpolation stencil.

Parameters
  • p_j (int): The polynomial order of the j-th stencil.
  • v (array): The code samples.
  • S_j (array, shape (N + 1,)): The interpolation stencil of the j-th simplex element, expressed as the indices of the simplex points.
  • n_mc (int): The number of Monte Carlo samples to use in checking the LEC conditions.
  • queue (multiprocessing queue object): Used to store the results.
Returns
  • None, results (polynomial order and element indices are stored: in the queue)
def compute_stencil_j(self):
664    def compute_stencil_j(self):
665        """
666        Compute the nearest neighbor stencils of all simplex elements. The
667        distance to all points are measured with respect to the cell center
668        of each element.
669
670        Returns
671        -------
672        S_j : array, shape (n_e, n_s)
673            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
674            ordered from closest to the neighbour that furthest away. The first
675            n_xi + 1 indeces belong to the j-th simplex itself.
676
677        """
678
679        n_e = self.tri.nsimplex
680        n_s = self.tri.npoints
681        n_xi = self.n_xi
682        S_j = np.zeros([n_e, n_s])
683        # compute the center of each element
684        xi_center_j = self.compute_xi_center_j()
685
686        for j in range(n_e):
687            # the number of points in S_j
688            # Np1_j = factorial(n_xi + p_j[j])/(factorial(n_xi)*factorial(p_j[j]))
689            # k = {1,...,n_s}\{k_j0, ..., k_jn_xi}
690            idx = np.delete(range(n_s), self.tri.simplices[j])
691            # store the vertex indices of the element itself
692            S_j[j, 0:n_xi + 1] = np.copy(self.tri.simplices[j])
693            # loop over the set k
694            dist = np.zeros(n_s - n_xi - 1)
695            for i in range(n_s - n_xi - 1):
696                # ||xi_k - xi_center_j||
697                dist[i] = np.linalg.norm(self.tri.points[idx[i]] - xi_center_j[j, :])
698            # store the indices of the points, sorted based on distance wrt
699            # simplex center j. Store only the amount of indices allowed by p_j.
700            S_j[j, n_xi + 1:] = idx[np.argsort(dist)]
701
702        return S_j.astype('int')

Compute the nearest neighbor stencils of all simplex elements. The distance to all points are measured with respect to the cell center of each element.

Returns
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
def compute_ENO_stencil(self, p_j, S_j, el_idx, max_jobs=4):
704    def compute_ENO_stencil(self, p_j, S_j, el_idx, max_jobs=4):
705        """
706        Compute the Essentially Non-Oscillatory stencils. The idea behind ENO
707        stencils is to have higher degree interpolation stencils up to a thin
708        layer of simplices containing the discontinuity. For a given simplex,
709        its ENO stencil is created by locating all the nearest-neighbor
710        stencils that contain element j , and subsequently selecting the one
711        with the highest polynomial order p_j . This leads to a Delaunay
712        triangulation which captures the discontinuity better than its
713        nearest-neighbor counterpart.
714
715        Parameters
716        ----------
717        p_j : array, shape (n_e,)
718            The polynomial order of each simplex element.
719        S_j : array, shape (n_e, n_s)
720            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
721            ordered from closest to the neighbour that furthest away. The first
722            n_xi + 1 indeces belong to the j-th simplex itself.
723        el_idx : dict
724            The element indices for each interpolation stencil.
725            el_idx[2] gives the elements indices of the 3rd interpolation
726            stencil. The number of elements is determined by the local
727            polynomial order.
728        max_jobs : int, optional
729            The number of ENO stencils that are computed in parallel.
730            The default is 4.
731
732        Returns
733        -------
734        ENO_S_j : array, shape (n_e, n_s)
735            The ENO stencils for each element.
736        p_j : array, shape (n_e,)
737            The new polynomial order of each element.
738        el_idx : dict
739            The new element indices for each interpolation stencil.
740
741        """
742
743        n_e = self.tri.nsimplex
744        n_s = self.tri.npoints
745
746        # array to store the ENO stencils
747        ENO_S_j = np.zeros([n_e, n_s]).astype('int')
748        # the center of each simplex
749        xi_centers = self.compute_xi_center_j()
750
751        jobs = []
752        queues = []
753        running_jobs = 0
754        j = 0
755        n_jobs = n_e
756
757        print('Computing ENO stencils...')
758
759        while n_jobs > 0:
760
761            # check how many processes are still alive
762            for p in jobs:
763                if p.is_alive() == False:
764                    jobs.remove(p)
765
766            # number of processes still running
767            running_jobs = len(jobs)
768
769            # compute the ENO stencils (in parallel)
770            while running_jobs < max_jobs and n_jobs > 0:
771                queue = mp.Queue()
772                prcs = mp.Process(target=self.compute_ENO_stencil_j,
773                                  args=(p_j, S_j, xi_centers, j, el_idx, queue))
774
775                # print check_LEC_j(tri, p_j, v, S_j, n_mc, j)
776                jobs.append(prcs)
777                queues.append(queue)
778                prcs.start()
779                n_jobs -= 1
780                running_jobs += 1
781                j += 1
782
783        # retrieve results
784        for j in range(n_e):
785            tmp = queues[j].get()
786            ENO_S_j[j, :] = tmp['ENO_S_j']
787            p_j[j] = tmp['p_j_new']
788            el_idx[j] = tmp['el_idx[j]']
789
790        print('done.')
791
792        return ENO_S_j, p_j, el_idx

Compute the Essentially Non-Oscillatory stencils. The idea behind ENO stencils is to have higher degree interpolation stencils up to a thin layer of simplices containing the discontinuity. For a given simplex, its ENO stencil is created by locating all the nearest-neighbor stencils that contain element j , and subsequently selecting the one with the highest polynomial order p_j . This leads to a Delaunay triangulation which captures the discontinuity better than its nearest-neighbor counterpart.

Parameters
  • p_j (array, shape (n_e,)): The polynomial order of each simplex element.
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
  • el_idx (dict): The element indices for each interpolation stencil. el_idx[2] gives the elements indices of the 3rd interpolation stencil. The number of elements is determined by the local polynomial order.
  • max_jobs (int, optional): The number of ENO stencils that are computed in parallel. The default is 4.
Returns
  • ENO_S_j (array, shape (n_e, n_s)): The ENO stencils for each element.
  • p_j (array, shape (n_e,)): The new polynomial order of each element.
  • el_idx (dict): The new element indices for each interpolation stencil.
def compute_ENO_stencil_j(self, p_j, S_j, xi_centers, j, el_idx, queue):
794    def compute_ENO_stencil_j(self, p_j, S_j, xi_centers, j, el_idx, queue):
795        """
796        Compute the ENO stencil of the j-th element.
797
798        Parameters
799        ----------
800        p_j : array, shape (n_e,)
801            The polynomial order of each simplex element.
802        S_j : array, shape (n_e, n_s)
803            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
804            ordered from closest to the neighbour that furthest away. The first
805            n_xi + 1 indeces belong to the j-th simplex itself.
806        xi_centers : array, shape (n_e, )
807            The center of each simplex.
808        j : int
809            The index of the current simplex.
810        el_idx : dict
811            The element indices for each interpolation stencil.
812            el_idx[2] gives the elements indices of the 3rd interpolation
813            stencil. The number of elements is determined by the local
814            polynomial order.
815        queue : multiprocessing queue object
816            Used to store the results.
817
818        Returns
819        -------
820        None, results (polynomial order and element indices are stored
821                       in the queue)
822
823        """
824        n_e = self.tri.nsimplex
825
826        # set the stencil to the nearest neighbor stencil
827        ENO_S_j = np.copy(S_j[j, :])
828        p_j_new = np.copy(p_j[j])
829
830        # loop to find alternative candidate stencils S_ji with p_j>1 that contain k_jl
831        idx = (p_j == 1).nonzero()[0]
832        all_with_pj_gt_1 = np.delete(range(n_e), idx)
833
834        for i in all_with_pj_gt_1:
835            # found a candidate stencil S_ji
836            if np.in1d(j, el_idx[i]):
837                # the candidate stencil has a higher polynomial degree: accept
838                if p_j[i] > p_j_new:
839                    ENO_S_j = np.copy(S_j[i, :])
840                    p_j_new = np.copy(p_j[i])
841                    el_idx[j] = np.copy(el_idx[i])
842                # the candidate stencil has the same polynomial degree: accept
843                # the one with smallest avg Euclidian distance to the cell center
844                elif p_j_new == p_j[i]:
845
846                    dist_i = np.linalg.norm(xi_centers[el_idx[i]] - xi_centers[j], 2, axis=1)
847
848                    dist_j = np.linalg.norm(xi_centers[el_idx[j]] - xi_centers[j], 2, axis=1)
849
850                    # if the new distance is smaller than the old one: accept
851                    if np.sum(dist_i) < np.sum(dist_j):
852                        ENO_S_j = np.copy(S_j[i, :])
853                        el_idx[j] = np.copy(el_idx[i])
854
855        queue.put({'ENO_S_j': ENO_S_j, 'p_j_new': p_j_new, 'el_idx[j]': np.copy(el_idx[j])})

Compute the ENO stencil of the j-th element.

Parameters
  • p_j (array, shape (n_e,)): The polynomial order of each simplex element.
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
  • xi_centers (array, shape (n_e, )): The center of each simplex.
  • j (int): The index of the current simplex.
  • el_idx (dict): The element indices for each interpolation stencil. el_idx[2] gives the elements indices of the 3rd interpolation stencil. The number of elements is determined by the local polynomial order.
  • queue (multiprocessing queue object): Used to store the results.
Returns
  • None, results (polynomial order and element indices are stored: in the queue)
def sample_simplex(self, n_mc, xi_k_jl, check=False):
857    def sample_simplex(self, n_mc, xi_k_jl, check=False):
858        """
859        Use an analytical map from n_mc uniformly distributed points in the
860        n_xi-dimensional hypercube, to uniformly distributed points in the
861        target simplex described by the nodes xi_k_jl.
862
863        Derivation: Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016).
864        Simplex-stochastic collocation method with improved scalability.
865        Journal of Computational Physics, 310, 301-328.
866
867        Parameters
868        ----------
869        n_mc : int
870            The number of Monte Carlo samples.
871        xi_k_jl : array, shape (n_xi + 1, n_xi)
872            The nodes of the target simplex.
873        check : bool, optional
874            Check is the random samples actually all lie insiide the
875            target simplex. The default is False.
876
877        Returns
878        -------
879        P : array, shape (n_mc, n_xi)
880            Uniformly distributed points inside the target simplex.
881
882        """
883
884        n_xi = self.n_xi
885        P = np.zeros([n_mc, n_xi])
886        for k in range(n_mc):
887            # random points inside the hypercube
888            r = np.random.rand(n_xi)
889
890            # the term of the map is \xi_k_j0
891            sample = np.copy(xi_k_jl[0])
892            for i in range(1, n_xi + 1):
893                prod_r = 1.
894                # compute the product of r-terms: prod(r_{n_xi-j+1}^{1/(n_xi-j+1)})
895                for j in range(1, i + 1):
896                    prod_r *= r[n_xi - j]**(1. / (n_xi - j + 1))
897                # compute the ith term of the sum: prod_r*(\xi_i-\xi_{i-1})
898                sample += prod_r * (xi_k_jl[i] - xi_k_jl[i - 1])
899            P[k, :] = sample
900
901        # check if any of the samples are outside the simplex
902        if check:
903            outside_simplex = 0
904            avg = np.sum(xi_k_jl, 0) / (n_xi + 1.)
905            el = self.tri.find_simplex(avg)
906            for i in range(n_mc):
907                if self.tri.find_simplex(P[i, :]) != el:
908                    outside_simplex += 1
909            print('Number of samples outside target simplex = ' + str(outside_simplex))
910
911        return P

Use an analytical map from n_mc uniformly distributed points in the n_xi-dimensional hypercube, to uniformly distributed points in the target simplex described by the nodes xi_k_jl.

Derivation: Edeling, W. N., Dwight, R. P., & Cinnella, P. (2016). Simplex-stochastic collocation method with improved scalability. Journal of Computational Physics, 310, 301-328.

Parameters
  • n_mc (int): The number of Monte Carlo samples.
  • xi_k_jl (array, shape (n_xi + 1, n_xi)): The nodes of the target simplex.
  • check (bool, optional): Check is the random samples actually all lie insiide the target simplex. The default is False.
Returns
  • P (array, shape (n_mc, n_xi)): Uniformly distributed points inside the target simplex.
def sample_simplex_edge(self, simplex_idx, refined_edges):
913    def sample_simplex_edge(self, simplex_idx, refined_edges):
914        """
915        Refine the longest edge of a simplex.
916
917        # TODO: is allright for 2D, but does this make sense in higher dims?
918
919        Parameters
920        ----------
921        simplex_idx : int
922            The index of the simplex.
923        refined_edges : list
924            Contains the pairs of the point indices, corresponding to edges that
925            have been refined in the current iteration. Simplices share edges,
926            and this list is used to prevent refining the same edge twice within
927            the same iteration of the SSC algorihm.
928
929        Returns
930        -------
931        xi_new : array, shape (n_xi,)
932            The newly added point (along the longest edge).
933        refined_edges : list
934            The updated refined_edges list.
935        already_refined : bool
936            Returns True if the edge already has been refined.
937
938        """
939
940        # the point indices of the simplex selected for refinement
941        simplex_point_idx = self.tri.simplices[simplex_idx]
942        # the points of the simplex selected for refinement
943        xi_k_jl = self.tri.points[simplex_point_idx]
944
945        # find the indices of all edges, i.e. the combination of all possible
946        # 2 distinct elements from range(n_xi + 1)
947        comb = list(combinations(range(self.n_xi + 1), 2))
948
949        # compute all edge lengths, select the largest
950        edge_lengths = np.zeros(len(comb))
951        for i in range(len(comb)):
952            edge_lengths[i] = np.linalg.norm(xi_k_jl[comb[i][1], :] - xi_k_jl[comb[i][0], :])
953        idx = np.argmax(edge_lengths)
954
955        # if there are 2 or more edge lengths that are the same, select the 1st
956        if idx.size > 1:
957            idx = idx[0]
958
959        # edge points
960        xi_0 = xi_k_jl[comb[idx][0], :]
961        xi_1 = xi_k_jl[comb[idx][1], :]
962
963        # simplices share edges, make sure it was not already refined during
964        # this iteration
965        current_edge = np.array([simplex_point_idx[comb[idx][0]],
966                                 simplex_point_idx[comb[idx][1]]])
967        already_refined = False
968
969        for edge in refined_edges:
970            if set(edge) == set(current_edge):
971                already_refined = True
972
973        if not already_refined:
974            refined_edges.append(current_edge)
975
976        # place random sample at +/- 10% of edge center
977        b = 0.6
978        a = 0.4
979        U = np.random.rand() * (b - a) + a
980        xi_new = xi_0 + U * (xi_1 - xi_0)
981
982        return xi_new, refined_edges, already_refined

Refine the longest edge of a simplex.

TODO: is allright for 2D, but does this make sense in higher dims?

Parameters
  • simplex_idx (int): The index of the simplex.
  • refined_edges (list): Contains the pairs of the point indices, corresponding to edges that have been refined in the current iteration. Simplices share edges, and this list is used to prevent refining the same edge twice within the same iteration of the SSC algorihm.
Returns
  • xi_new (array, shape (n_xi,)): The newly added point (along the longest edge).
  • refined_edges (list): The updated refined_edges list.
  • already_refined (bool): Returns True if the edge already has been refined.
def compute_surplus_k(self, xi_k_jref, S_j, p_j, v, v_k_jref):
 984    def compute_surplus_k(self, xi_k_jref, S_j, p_j, v, v_k_jref):
 985        """
 986        Compute the hierachical surplus at xi_k_jref (the refinement location),
 987        defined as the difference between the new code sample and the (old)
 988        surrogate  prediction at the refinement location.
 989
 990        Parameters
 991        ----------
 992        xi_k_jref : array, shape (n_xi,)
 993            The refinement location.
 994        S_j : array, shape (n_e, n_s)
 995            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
 996            ordered from closest to the neighbour that furthest away. The first
 997            n_xi + 1 indeces belong to the j-th simplex itself.
 998        p_j : array, shape (n_e,)
 999            The polynomial order of each simplex element.
1000        v : array, shape (N + 1,)
1001            The (scalar) code outputs. #TODO:modify when vectors are allowed
1002        v_k_jref : float #TODO:modify when vectors are allowed
1003            The code prediction at the refinement location.
1004
1005        Returns
1006        -------
1007        surplus : float #TODO:modify when vectors are allowed
1008            The hierarchical suplus
1009
1010        """
1011
1012        w_k_jref = self.surrogate(xi_k_jref, S_j, p_j, v)
1013
1014        # compute the hierarcical surplus between old interpolation and new v value
1015        surplus = w_k_jref - v_k_jref
1016
1017        return surplus

Compute the hierachical surplus at xi_k_jref (the refinement location), defined as the difference between the new code sample and the (old) surrogate prediction at the refinement location.

Parameters
  • xi_k_jref (array, shape (n_xi,)): The refinement location.
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
  • p_j (array, shape (n_e,)): The polynomial order of each simplex element.
  • v (array, shape (N + 1,)): The (scalar) code outputs. #TODO:modify when vectors are allowed
  • v_k_jref : float #TODO (modify when vectors are allowed): The code prediction at the refinement location.
Returns
  • surplus : float #TODO (modify when vectors are allowed): The hierarchical suplus
def surrogate(self, xi, S_j, p_j, v):
1019    def surrogate(self, xi, S_j, p_j, v):
1020        """
1021        Evaluate the SSC surrogate at xi.
1022
1023        Parameters
1024        ----------
1025        xi : array, shape (n_xi,)
1026            The location in the input space at which to evaluate the surrogate.
1027        S_j : array, shape (n_e, n_s)
1028            The indices of all nearest neighbours points of each simplex j=1,..,n_e,
1029            ordered from closest to the neighbour that furthest away. The first
1030            n_xi + 1 indeces belong to the j-th simplex itself.
1031        p_j : array, shape (n_e,)
1032            The polynomial order of each simplex element.
1033        v : array, shape (N + 1,)
1034            The (scalar) code outputs. #TODO:modify when vectors are allowed
1035
1036        Returns
1037        -------
1038        None.
1039
1040        """
1041        n_xi = self.n_xi
1042        idx = int(self.tri.find_simplex(xi))
1043
1044        # the number of points in S_j
1045        Np1_j = int(factorial(n_xi + p_j[idx]) / (factorial(n_xi) * factorial(p_j[idx])))
1046        # the vertices of the stencil S_j
1047        xi_Sj = self.tri.points[S_j[idx, 0:Np1_j]]
1048        # find the corresponding indices of v
1049        v_Sj = v[S_j[idx, 0:Np1_j], :]
1050        # compute sample matrix
1051        Psi = self.compute_Psi(xi_Sj, p_j[idx])
1052
1053    #    #check if Psi is well poised, at this point all stencils should be well-poised
1054    #    det_Psi = np.linalg.det(Psi)
1055    #    if det_Psi == 0:
1056    #        print 'Error, det(Psi)=0 in compute_surplus_k() method, should not be possible'
1057
1058        # compute the coefficients c_jl
1059        # c_jl = np.linalg.solve(Psi, v_Sj)
1060        c_jl = DAFSILAS(Psi, v_Sj, False)
1061
1062        # compute the interpolation on the old grid
1063        w_j = self.w_j(xi, c_jl, p_j[idx])
1064
1065        return w_j

Evaluate the SSC surrogate at xi.

Parameters
  • xi (array, shape (n_xi,)): The location in the input space at which to evaluate the surrogate.
  • S_j (array, shape (n_e, n_s)): The indices of all nearest neighbours points of each simplex j=1,..,n_e, ordered from closest to the neighbour that furthest away. The first n_xi + 1 indeces belong to the j-th simplex itself.
  • p_j (array, shape (n_e,)): The polynomial order of each simplex element.
  • v (array, shape (N + 1,)): The (scalar) code outputs. #TODO:modify when vectors are allowed
Returns
  • None.
def compute_eps_bar_j(self, p_j, prob_j):
1067    def compute_eps_bar_j(self, p_j, prob_j):
1068        """
1069        Compute the geometric refinement measure \bar{\\eps}_j for all elements,
1070        Elements with the largest values will be selected for refinement.
1071
1072        Parameters
1073        ----------
1074        p_j : array, shape (n_e,)
1075            The polynomial order of each simplex element.
1076        prob_j : array, shape (n_e,)
1077            The probability of each simplex element.
1078
1079        Returns
1080        -------
1081        eps_bar_j : array, shape (n_e,)
1082            The geometric refinement measure.
1083        vol_j : array, shape (n_e,)
1084            The volume of each simplex element.
1085
1086        """
1087
1088        n_e = self.tri.nsimplex
1089        eps_bar_j = np.zeros(n_e)
1090        n_xi = self.tri.ndim
1091        vol_j = self.compute_vol()
1092        vol = np.sum(vol_j)
1093
1094        for j in range(n_e):
1095            O_j = (p_j[j] + 1.0) / n_xi
1096            eps_bar_j[j] = prob_j[j] * (vol_j[j] / vol) ** (2 * O_j)
1097
1098        return eps_bar_j, vol_j

Compute the geometric refinement measure ar{\eps}_j for all elements, Elements with the largest values will be selected for refinement.

Parameters
  • p_j (array, shape (n_e,)): The polynomial order of each simplex element.
  • prob_j (array, shape (n_e,)): The probability of each simplex element.
Returns
  • eps_bar_j (array, shape (n_e,)): The geometric refinement measure.
  • vol_j (array, shape (n_e,)): The volume of each simplex element.
def find_boundary_simplices(self):
1100    def find_boundary_simplices(self):
1101        """
1102        Find the simplices that are on the boundary of the hypercube   .
1103
1104        Returns
1105        -------
1106        idx : array
1107            Indices of the boundary simplices.
1108
1109        """
1110
1111        idx = (self.tri.neighbors == -1).nonzero()[0]
1112        return idx

Find the simplices that are on the boundary of the hypercube .

Returns
  • idx (array): Indices of the boundary simplices.
def update_Delaunay(self, new_points):
1114    def update_Delaunay(self, new_points):
1115        """
1116        Update the Delaunay triangulation with P new points.
1117
1118        Parameters
1119        ----------
1120        new_points : array, shape (P, n_xi)
1121            P new n_xi-dimensional points.
1122
1123        Returns
1124        -------
1125        None.
1126
1127        """
1128
1129        xi_k_jl = np.append(self.tri.points, new_points, 0)
1130        if self.n_xi > 1:
1131            self.tri = Delaunay(xi_k_jl)
1132        else:
1133            self.tri = Tri1D(xi_k_jl)
1134
1135        self._n_samples = self.tri.npoints

Update the Delaunay triangulation with P new points.

Parameters
  • new_points (array, shape (P, n_xi)): P new n_xi-dimensional points.
Returns
  • None.
def get_Delaunay(self):
1137    def get_Delaunay(self):
1138        """
1139        Return the SciPy Delaunay triangulation.
1140        """
1141        return self.tri

Return the SciPy Delaunay triangulation.

def is_finite(self):
1143    def is_finite(self):
1144        return True
n_samples
1146    @property
1147    def n_samples(self):
1148        """
1149        Returns the number of samples code samples.
1150        """
1151        return self._n_samples

Returns the number of samples code samples.

n_dimensions
1153    @property
1154    def n_dimensions(self):
1155        """
1156        Returns the number of uncertain random variables
1157        """
1158        return self.n_xi

Returns the number of uncertain random variables

n_elements
1160    @property
1161    def n_elements(self):
1162        """
1163        Returns the number of simplex elements
1164        """
1165        return self.tri.nsimplex

Returns the number of simplex elements

def save_state(self, filename):
1179    def save_state(self, filename):
1180        """
1181        Save the state of the sampler to a pickle file.
1182
1183        Parameters
1184        ----------
1185        filename : string
1186            File name.
1187
1188        Returns
1189        -------
1190        None.
1191
1192        """
1193        print("Saving sampler state to %s" % filename)
1194        with open(filename, 'wb') as fp:
1195            pickle.dump(self.__dict__, fp)

Save the state of the sampler to a pickle file.

Parameters
  • filename (string): File name.
Returns
  • None.
def load_state(self, filename):
1197    def load_state(self, filename):
1198        """
1199        Load the state of the sampler from a pickle file.
1200
1201        Parameters
1202        ----------
1203        filename : string
1204            File name.
1205
1206        Returns
1207        -------
1208        None.
1209
1210        """
1211        print("Loading sampler state from %s" % filename)
1212        with open(filename, 'rb') as fp:
1213            self.__dict__ = pickle.load(fp)

Load the state of the sampler from a pickle file.

Parameters
  • filename (string): File name.
Returns
  • None.
sampler_name = 'ssc_sampler'
def DAFSILAS(A, b, print_message=False):
1216def DAFSILAS(A, b, print_message=False):
1217    """
1218    Direct Algorithm For Solving Ill-conditioned Linear Algebraic Systems,
1219
1220    solves the linear system when Ax = b when A is ill conditioned.
1221
1222    Solves for x in the non-null subspace of the solution as described in
1223    the reference below. This method utilizes Gauss–Jordan elimination with
1224    complete pivoting to identify the null subspace of a (almost) singular
1225    matrix.
1226
1227    X. J. Xue, Kozaczek, K. J., Kurtzl, S. K., & Kurtz, D. S. (2000). A direct
1228    algorithm for solving ill-conditioned linear algebraic systems.
1229    Adv. X-Ray Anal, 42.
1230    """
1231
1232    # The matrix A' as defined in Xue
1233    b = b.reshape(b.size)
1234    Ap = np.zeros([A.shape[0], 2 * A.shape[0] + 1])
1235    Ap[:, 0:A.shape[0]] = np.copy(A)
1236    Ap[:, A.shape[0]] = np.copy(b)
1237    Ap[:, A.shape[0] + 1:] = np.eye(A.shape[0])
1238    n, m = Ap.shape
1239
1240    # permutation matrix
1241    P = np.eye(n)
1242
1243    # the ill-condition control parameter
1244    # epsilon = np.finfo(np.float64).eps
1245    epsilon = 10**-14
1246
1247    for i in range(n - 1):
1248        # calc sub matrix Ai
1249        Ai = np.copy(Ap[i:n, i:n])
1250
1251        # find the complete pivot in sub matrix Ai
1252        api = np.max(np.abs(Ai))
1253
1254        if api == 0:
1255            break
1256
1257        # find the location of the complete pivot in Ai
1258        row, col = np.unravel_index(np.abs(Ai).argmax(), Ai.shape)
1259
1260        # interchange rows and columns to exchange position of api and aii
1261        tmp = np.copy(Ap[i, :])
1262        Ap[i, :] = np.copy(Ap[i + row, :])
1263        Ap[i + row, :] = tmp
1264
1265        tmp = np.copy(Ap[:, i])
1266        Ap[:, i] = np.copy(Ap[:, i + col])
1267        Ap[:, i + col] = tmp
1268
1269        # Also interchange the entries in b
1270        # tmp = A[i, n]
1271        # A[i, n] = A[i+col, n]Ap[i+1+j, i:m]
1272        # A[i+col, n] = tmp
1273
1274        # keep track of column switches via a series of permuation matrices P =
1275        # P1*P2*...*Pi*...*Pn ==> at each iteration x = P*xi
1276        Pi = np.eye(n)
1277        tmp = np.copy(Pi[i, :])
1278        Pi[i, :] = np.copy(Pi[i + col, :])
1279        Pi[i + col, :] = tmp
1280        P = np.dot(P, Pi)
1281
1282        # Calculate multipliers
1283        if Ai[row, col] < 0:
1284            api = api * -1.  # sign is important in multipliers
1285
1286        M = Ap[i + 1:n, i] / np.double(api)
1287
1288        # start row reduction
1289        for j in range(M.size):
1290            Ap[i + 1 + j, i:m] = Ap[i + 1 + j, i:m] - M[j] * Ap[i, i:m]
1291
1292    # the largest complete pivot
1293    eta = np.max(np.abs(np.diag(Ap))) * 1.0
1294    # test if |aii/nc| <= epsilon
1295    idx = (np.abs(np.diag(Ap) / eta) <= epsilon).nonzero()[0]
1296
1297    # Perform zeroing operation if necessary
1298    if idx.size > 0:
1299        nullity = idx.size
1300        Arange = Ap[0:n - nullity, 0:n - nullity]
1301        if print_message:
1302            print('Matrix is ill-conditioned, performing zeroing operation')
1303            print('nullity = ' + str(nullity) + ', rank = ' + str(n - nullity) +
1304                  ', cond(A) = ' + str(np.linalg.cond(A)) +
1305                  ', cond(Arange) = ' + str(np.linalg.cond(Arange)))
1306
1307        # ajj = 1, aij = 0 for j = i...n
1308        Ap[idx[0]:n, idx[0]:n] = np.eye(nullity)
1309        # bj = 0
1310        Ap[idx[0]:n, n] = 0
1311        # ejj = 1, eij = 0
1312        Ap[idx[0]:n, idx[0] + n + 1:m] = np.eye(nullity)
1313
1314    # Back substitution
1315    for i in range(n, 0, -1):
1316        Ai = Ap[0:i, :]
1317
1318        # Calculate multipliers
1319        M = Ai[0:i - 1, i - 1] / np.double(Ai[i - 1, i - 1])
1320
1321        # start back substitution
1322        for j in range(M.size):
1323            Ai[j, :] = Ai[j, :] - M[j] * Ai[i - 1, :]
1324
1325        # store result in A
1326        Ap[0:i, :] = Ai
1327
1328    # turn A into eye(n)
1329    D = (1. / np.diag(Ap)).reshape([n, 1])
1330    Ap = np.multiply(D, Ap)
1331    # Calculated solution
1332    return np.dot(P, Ap[:, n]).reshape([n, 1])

Direct Algorithm For Solving Ill-conditioned Linear Algebraic Systems,

solves the linear system when Ax = b when A is ill conditioned.

Solves for x in the non-null subspace of the solution as described in the reference below. This method utilizes Gauss–Jordan elimination with complete pivoting to identify the null subspace of a (almost) singular matrix.

X. J. Xue, Kozaczek, K. J., Kurtzl, S. K., & Kurtz, D. S. (2000). A direct algorithm for solving ill-conditioned linear algebraic systems. Adv. X-Ray Anal, 42.

class Tri1D:
1335class Tri1D:
1336    """
1337    1D "triangulation" that mimics the following SciPy Delaunay properties:
1338        * ndim
1339        * points
1340        * npoints
1341        * nsimplex
1342        * simplices
1343        * neighbours
1344        * the find_simplex subroutine
1345    """
1346
1347    def __init__(self, points):
1348        """
1349        Create a 1D Triangulation object that can we used by the SSCSampler
1350        in the same way as the SciPy Delaunay triangulation.
1351
1352        Parameters
1353        ----------
1354        points : array, shape (n_s, )
1355            A 1D array of nodes.
1356
1357        Returns
1358        -------
1359        None.
1360
1361        """
1362        self.ndim = 1
1363        self.points = points
1364        self.npoints = points.size
1365        self.nsimplex = points.size - 1
1366
1367        points_sorted = np.sort(self.points.reshape(self.npoints))
1368        self.simplices = np.zeros([self.nsimplex, 2])
1369        for i in range(self.nsimplex):
1370            self.simplices[i, 0] = (self.points == points_sorted[i]).nonzero()[0]
1371            self.simplices[i, 1] = (self.points == points_sorted[i + 1]).nonzero()[0]
1372        self.simplices = self.simplices.astype('int')
1373
1374        self.neighbors = np.zeros([self.nsimplex, 2])
1375        for i in range(self.nsimplex):
1376            self.neighbors[i, 0] = i - 1
1377            self.neighbors[i, 1] = i + 1
1378        self.neighbors[-1, 1] = -1
1379        self.neighbors = self.neighbors.astype('int')
1380
1381    def find_simplex(self, xi):
1382        """
1383        Find the simplex indices of nodes xi
1384
1385        Parameters
1386        ----------
1387        xi : array, shape (S,)
1388            An array if S 1D points.
1389
1390        Returns
1391        -------
1392        array
1393            An array containing the simplex indices of points xi.
1394
1395        """
1396        Idx = np.zeros(xi.shape[0])
1397        points_sorted = np.sort(self.points.reshape(self.npoints))
1398
1399        for i in range(xi.shape[0]):
1400            idx = (points_sorted < xi[i]).argmin() - 1
1401            # if xi = 0 idx will be -1
1402            if idx == -1:
1403                Idx[i] = 0
1404            else:
1405                Idx[i] = idx
1406
1407        return Idx.astype('int')

1D "triangulation" that mimics the following SciPy Delaunay properties: * ndim * points * npoints * nsimplex * simplices * neighbours * the find_simplex subroutine

Tri1D(points)
1347    def __init__(self, points):
1348        """
1349        Create a 1D Triangulation object that can we used by the SSCSampler
1350        in the same way as the SciPy Delaunay triangulation.
1351
1352        Parameters
1353        ----------
1354        points : array, shape (n_s, )
1355            A 1D array of nodes.
1356
1357        Returns
1358        -------
1359        None.
1360
1361        """
1362        self.ndim = 1
1363        self.points = points
1364        self.npoints = points.size
1365        self.nsimplex = points.size - 1
1366
1367        points_sorted = np.sort(self.points.reshape(self.npoints))
1368        self.simplices = np.zeros([self.nsimplex, 2])
1369        for i in range(self.nsimplex):
1370            self.simplices[i, 0] = (self.points == points_sorted[i]).nonzero()[0]
1371            self.simplices[i, 1] = (self.points == points_sorted[i + 1]).nonzero()[0]
1372        self.simplices = self.simplices.astype('int')
1373
1374        self.neighbors = np.zeros([self.nsimplex, 2])
1375        for i in range(self.nsimplex):
1376            self.neighbors[i, 0] = i - 1
1377            self.neighbors[i, 1] = i + 1
1378        self.neighbors[-1, 1] = -1
1379        self.neighbors = self.neighbors.astype('int')

Create a 1D Triangulation object that can we used by the SSCSampler in the same way as the SciPy Delaunay triangulation.

Parameters
  • points (array, shape (n_s, )): A 1D array of nodes.
Returns
  • None.
ndim
points
npoints
nsimplex
simplices
neighbors
def find_simplex(self, xi):
1381    def find_simplex(self, xi):
1382        """
1383        Find the simplex indices of nodes xi
1384
1385        Parameters
1386        ----------
1387        xi : array, shape (S,)
1388            An array if S 1D points.
1389
1390        Returns
1391        -------
1392        array
1393            An array containing the simplex indices of points xi.
1394
1395        """
1396        Idx = np.zeros(xi.shape[0])
1397        points_sorted = np.sort(self.points.reshape(self.npoints))
1398
1399        for i in range(xi.shape[0]):
1400            idx = (points_sorted < xi[i]).argmin() - 1
1401            # if xi = 0 idx will be -1
1402            if idx == -1:
1403                Idx[i] = 0
1404            else:
1405                Idx[i] = idx
1406
1407        return Idx.astype('int')

Find the simplex indices of nodes xi

Parameters
  • xi (array, shape (S,)): An array if S 1D points.
Returns
  • array: An array containing the simplex indices of points xi.