From 2f86d84193cb4825b547db809ff106972c2d4b4c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 13 Jun 2023 08:58:47 +0200 Subject: [PATCH 001/167] add getting sgtarted in the docs, and add some docstrings examples of code --- pyPLNmodels/models.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 91b01402..0515e053 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -2248,12 +2248,19 @@ class PlnPCA(_model): Examples -------- - >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> from pyPLNmodels import PlnPCA, get_real_count_data, get_simulation_parameters, sample_pln >>> counts, labels = get_real_count_data(return_labels = True) >>> data = {"counts": counts} >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5]) >>> print(pca) >>> pca.viz(colors = labels) + + >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) + >>> counts = sample_pln(plnparam) + >>> data = {"counts": plnparam.counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> plnpca = PlnPCA.from_formula("counts ~ 0 + cov", data = data, rank = 5) + >>> plnpca.fit() + >>> print(plnpca) """ def __init__( @@ -2490,18 +2497,13 @@ class PlnPCA(_model): def _get_max_components(self) -> int: """ - Get the maximum number of components. - - Returns - ------- - int - The maximum number of components. + Get the maximum number of components possible by the model. """ return self._rank def _pring_beginning_message(self): """ - Print the beginning message. + Print the beginning message when fitted. """ print("-" * NB_CHARACTERS_FOR_NICE_PLOT) print(f"Fitting a PlnPCAcollection model with {self._rank} components") -- GitLab From e3fdcdac79773ef8d1ae4bf031683f05965220e5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 13 Jun 2023 12:06:38 +0200 Subject: [PATCH 002/167] add plot_expected_vs_true. Works only for Pln, not yet PlnPCA. change defaults of get_simulation_parameters, add a add_const = True by default. --- pyPLNmodels/_utils.py | 33 ++++++++++++++++++++++++++------- pyPLNmodels/models.py | 28 ++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 11 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index e0179bdb..c4f8a73e 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -462,7 +462,7 @@ def _get_simulation_components(dim: int, rank: int) -> torch.Tensor: def _get_simulation_coef_cov_offsets( - n_samples: int, nb_cov: int, dim: int + n_samples: int, nb_cov: int, dim: int, add_const: bool ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """ Get offsets, covariance coefficients with right shapes. @@ -472,9 +472,14 @@ def _get_simulation_coef_cov_offsets( n_samples : int Number of samples. nb_cov : int - Number of covariates. If 0, covariates will be None. + Number of covariates. If 0, covariates will be None, + unless add_const is True. + If add_const is True, then there will be nb_cov+1 + covariates as the intercept can be seen as a covariates. dim : int Dimension required of the data. + add_const : bool, optional + If True, will add a vector of ones in the covariates. Returns ------- @@ -484,7 +489,10 @@ def _get_simulation_coef_cov_offsets( prev_state = torch.random.get_rng_state() torch.random.manual_seed(0) if nb_cov == 0: - covariates = None + if add_const is True: + covariates = torch.ones(n_samples, 1) + else: + covariates = None else: covariates = torch.randint( low=-1, @@ -493,7 +501,12 @@ def _get_simulation_coef_cov_offsets( dtype=torch.float64, device="cpu", ) - coef = torch.randn(nb_cov, dim, device="cpu") + if add_const is True: + covariates = torch.cat((covariates, torch.ones(n_samples, 1)), axis=1) + if covariates is None: + coef = None + else: + coef = torch.randn(covariates.shape[1], dim, device="cpu") offsets = torch.randint( low=0, high=2, size=(n_samples, dim), dtype=torch.float64, device="cpu" ) @@ -607,7 +620,7 @@ def _check_two_dimensions_are_equal( def get_simulation_parameters( - n_samples: int = 100, dim: int = 25, nb_cov: int = 1, rank: int = 5 + n_samples: int = 100, dim: int = 25, nb_cov: int = 1, rank: int = 5, add_const=True ) -> PlnParameters: """ Generate simulation parameters for a Poisson-lognormal model. @@ -619,9 +632,13 @@ def get_simulation_parameters( dim : int, optional The dimension of the data, by default 25. nb_cov : int, optional - The number of covariates, by default 1. + The number of covariates, by default 1. If add_const is True, + then there will be nb_cov+1 covariates as the intercept can be seen + as a covariates. rank : int, optional The rank of the data components, by default 5. + add_const : bool, optional + If True, will add a vector of ones in the covariates. Returns ------- @@ -629,7 +646,9 @@ def get_simulation_parameters( The generated simulation parameters. """ - coef, covariates, offsets = _get_simulation_coef_cov_offsets(n_samples, nb_cov, dim) + coef, covariates, offsets = _get_simulation_coef_cov_offsets( + n_samples, nb_cov, dim, add_const + ) components = _get_simulation_components(dim, rank) return PlnParameters(components, coef, covariates, offsets) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 0515e053..335d7ae9 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1248,6 +1248,14 @@ class _model(ABC): ax = plt.gca() predictions = self._counts_predictions().ravel().detach() sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors, ax=ax) + max_y = int(torch.max(self.counts.ravel()).item()) + y = np.linspace(0, max_y, max_y) + ax.plot(y, y) + ax.set_yscale("log") + ax.set_xscale("log") + ax.set_ylabel("Predicted values") + ax.set_xlabel("Counts") + ax.legend() return ax @@ -1317,10 +1325,7 @@ class Pln(_model): def _counts_predictions(self): return torch.exp( - self._offsets - + self._covariates @ self._coef - + self._latent_mean - + 1 / 2 * self._latent_sqrt_var**2 + self._offsets + self._latent_mean + 1 / 2 * self._latent_sqrt_var**2 ) def _smart_init_latent_parameters(self): @@ -2403,6 +2408,21 @@ class PlnPCA(_model): """ return self._latent_sqrt_var**2 + def _counts_predictions(self): + # covariance_a_posteriori = self.latent_mean.unsqueeze(1)*self._components.unsqueeze(0) + # covariance_a_posteriori = covariance_a_posteriori @ self._components.T.unsqueeze(0) + covariance_a_posteriori = torch.sum( + (self._components**2).unsqueeze(0) + * (self.latent_sqrt_var**2).unsqueeze(1), + axis=2, + ) + # print('latent_var', self.latent_variables) + # print('cov:', covariance_a_posteriori) + # x + return torch.exp( + self._offsets + self.latent_variables + 1 / 2 * covariance_a_posteriori + ) + @latent_mean.setter @_array2tensor def latent_mean(self, latent_mean: torch.Tensor): -- GitLab From 1135888d5c98b07df6d03be709bc7e3cad3d7758 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 13 Jun 2023 15:52:57 +0200 Subject: [PATCH 003/167] add the plot_expected_vs_true funciton, and works fine for Pln and PlnPCA --- pyPLNmodels/models.py | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 335d7ae9..b4c57420 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -733,7 +733,7 @@ class _model(ABC): axes : numpy.ndarray, optional The axes to plot on. If None, a new figure will be created. Defaults to None. """ - print("Likelihood:", -self.loglike) + print("Likelihood:", self.loglike) if self._fitted is False: nb_axes = 1 else: @@ -1242,15 +1242,34 @@ class _model(ABC): return "" def plot_expected_vs_true(self, ax=None, colors=None): + """ + Plot the predicted value of the counts against the counts. + + Parameters + ---------- + ax : Optional[Any], optional + The matplotlib axis to use. If None, the current axis is used, by default None. + + colors : Optional[Any], optional + The colors to use for plotting, by default None. + + Returns + ------- + Any + The matplotlib axis. + """ if self._fitted is None: raise RuntimeError("Please fit the model before.") if ax is None: ax = plt.gca() predictions = self._counts_predictions().ravel().detach() - sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors, ax=ax) + print("colors:", np.array(colors).ravel().shape) + print("pred", predictions.shape) + colors = np.repeat(np.array(colors), repeats=self.dim) + sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors.ravel(), ax=ax) max_y = int(torch.max(self.counts.ravel()).item()) y = np.linspace(0, max_y, max_y) - ax.plot(y, y) + ax.plot(y, y, c="red") ax.set_yscale("log") ax.set_xscale("log") ax.set_ylabel("Predicted values") @@ -2409,18 +2428,16 @@ class PlnPCA(_model): return self._latent_sqrt_var**2 def _counts_predictions(self): - # covariance_a_posteriori = self.latent_mean.unsqueeze(1)*self._components.unsqueeze(0) - # covariance_a_posteriori = covariance_a_posteriori @ self._components.T.unsqueeze(0) covariance_a_posteriori = torch.sum( (self._components**2).unsqueeze(0) * (self.latent_sqrt_var**2).unsqueeze(1), axis=2, ) - # print('latent_var', self.latent_variables) - # print('cov:', covariance_a_posteriori) - # x return torch.exp( - self._offsets + self.latent_variables + 1 / 2 * covariance_a_posteriori + self._offsets + + self.covariates @ self.coef + + self.latent_variables + + 1 / 2 * covariance_a_posteriori ) @latent_mean.setter -- GitLab From 662204d4f019be271e7624588bd9aae790817bb5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 13 Jun 2023 15:55:25 +0200 Subject: [PATCH 004/167] add the plot_expected_vs_true inside the useful properties --- pyPLNmodels/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index b4c57420..ed4708ed 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1163,7 +1163,7 @@ class _model(ABC): str The string representation of the useful methods. """ - return ".show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()" + return ".show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()" def sigma(self): """ -- GitLab From bb61ac16bc8f448369c68d43dc9fd40d72c6f6fa Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 13 Jun 2023 15:55:56 +0200 Subject: [PATCH 005/167] test for pln.plot_expected_vs_true. Should not pass. --- tests/test_viz.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_viz.py b/tests/test_viz.py index 13510d49..da9f05b4 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -81,3 +81,9 @@ def test_fail_plot_pca_correlation_graph_without_names(pln): @filter_models(["Pln", "PlnPCA"]) def test_plot_pca_correlation_graph_without_names(pln): pln.plot_pca_correlation_graph([f"var_{i}" for i in range(3)], [0, 1, 2]) + + +@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_sim_pln"]) +@filter_models(["Pln", "PlnPCA"]) +def test_expected_vs_true(pln): + pln.plot_expected_vs_true() -- GitLab From 5c8c2dc75e25f7125e4e36a4685c6afaba4c69f6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 20 Jun 2023 17:52:20 +0200 Subject: [PATCH 006/167] add decorator for docstrings (example and params). --- pyPLNmodels/models.py | 55 +++++++++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index ed4708ed..f2fe18ac 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod import warnings import os from typing import Optional, Dict, List, Type, Any, Iterable +import textwrap import pandas as pd import torch @@ -54,7 +55,7 @@ class _model(ABC): Base class for all the Pln models. Should be inherited. """ - _WINDOW = 15 + _WINDOW: int = 15 _counts: torch.Tensor _covariates: torch.Tensor _offsets: torch.Tensor @@ -66,6 +67,7 @@ class _model(ABC): def __init__( self, counts: torch.Tensor, + *, covariates: Optional[torch.Tensor] = None, offsets: Optional[torch.Tensor] = None, offsets_formula: str = "logsum", @@ -2263,6 +2265,24 @@ class PlnPCAcollection: return ".BIC, .AIC, .loglikes" +def add_doc(parent_class, *, params=None, example=None): + def wrapper(fun): + doc = getattr(parent_class, fun.__name__).__doc__ + if doc is None: + doc = "" + doc = textwrap.dedent(doc).rstrip(" \n\r") + if params is not None: + doc += textwrap.dedent(params.rstrip(" \n\r")) + if example is not None: + doc += "\n\nExamples" + doc += "\n--------" + doc += textwrap.dedent(example) + fun.__doc__ = doc + return fun + + return wrapper + + # Here, setting the value for each key in _dict_parameters class PlnPCA(_model): _NAME: str = "PlnPCA" @@ -2287,9 +2307,20 @@ class PlnPCA(_model): >>> print(plnpca) """ + @add_doc( + _model, + params=""" + rank : int, optional + The rank of the approximation, by default 5. + """, + example=""" + totopassword mange des pates. + """, + ) def __init__( self, counts: torch.Tensor, + *, covariates: Optional[torch.Tensor] = None, offsets: Optional[torch.Tensor] = None, offsets_formula: str = "logsum", @@ -2298,28 +2329,6 @@ class PlnPCA(_model): take_log_offsets: bool = False, add_const: bool = True, ): - """ - Initialize the PlnPCA object. - - Parameters - ---------- - counts : torch.Tensor - The counts tensor. - covariates : torch.Tensor, optional - The covariates tensor, by default None. - offsets : torch.Tensor, optional - The offsets tensor, by default None. - offsets_formula : str, optional - The offsets formula, by default "logsum". - rank : int, optional - The rank of the approximation, by default 5. - dict_initialization : Dict[str, torch.Tensor], optional - The dictionary for initialization, by default None. - take_log_offsets : bool, optional - Whether to take the log of offsets. Defaults to False. - add_const: bool, optional - Whether to add a column of one in the covariates. Defaults to True. - """ self._rank = rank super().__init__( counts=counts, -- GitLab From 61e234b6d1c195827f0f2a1a2fdef95497ab730a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 22 Jun 2023 19:54:39 +0200 Subject: [PATCH 007/167] added keywords only and docs inherited with _add_doc (add examples and see_also) --- pyPLNmodels/_utils.py | 32 +++- pyPLNmodels/models.py | 352 ++++++++++++++++++++++++++++-------------- 2 files changed, 266 insertions(+), 118 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index c4f8a73e..ab09750a 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -1,6 +1,10 @@ import os import math import warnings +import textwrap +from typing import Optional, Dict, Any, Union, Tuple, List +import pkg_resources + import numpy as np import pandas as pd import torch @@ -9,8 +13,6 @@ from matplotlib import transforms from matplotlib.patches import Ellipse import matplotlib.pyplot as plt from patsy import dmatrices -from typing import Optional, Dict, Any, Union, Tuple, List -import pkg_resources torch.set_default_dtype(torch.float64) @@ -993,3 +995,29 @@ def _handle_data( ) _check_data_shape(counts, covariates, offsets) return counts, covariates, offsets, column_counts + + +def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also=None): + def wrapper(fun): + doc = getattr(parent_class, fun.__name__).__doc__ + if doc is None: + doc = "" + doc = textwrap.dedent(doc).rstrip(" \n\r") + if params is not None: + doc += textwrap.dedent(params.rstrip(" \n\r")) + if returns is not None: + doc += "\n\nReturns" + doc += "\n-------" + doc += textwrap.dedent(returns) + if see_also is not None: + doc += "\n\nSee also" + doc += "\n--------" + doc += textwrap.dedent(see_also) + if example is not None: + doc += "\n\nExamples" + doc += "\n--------" + doc += textwrap.dedent(example) + fun.__doc__ = doc + return fun + + return wrapper diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index f2fe18ac..52e4fcb7 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3,7 +3,6 @@ from abc import ABC, abstractmethod import warnings import os from typing import Optional, Dict, List, Type, Any, Iterable -import textwrap import pandas as pd import torch @@ -30,6 +29,7 @@ from ._utils import ( _get_dict_initialization, _array2tensor, _handle_data, + _add_doc, ) from ._initialization import ( @@ -82,17 +82,17 @@ class _model(ABC): ---------- counts : torch.Tensor The count data. - covariates : torch.Tensor, optional + covariates : torch.Tensor, optional(keyword-only) The covariate data. Defaults to None. - offsets : torch.Tensor, optional + offsets : torch.Tensor, optional(keyword-only) The offsets data. Defaults to None. - offsets_formula : str, optional + offsets_formula : str, optional(keyword-only) The formula for offsets. Defaults to "logsum". - dict_initialization : dict, optional + dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. - take_log_offsets : bool, optional + take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. - add_const: bool, optional + add_const: bool, optional(keyword-only) Whether to add a column of one in the covariates. Defaults to True. """ ( @@ -113,13 +113,13 @@ class _model(ABC): cls, formula: str, data: dict, + *, offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, ): """ Create a model instance from a formula and data. - See also :func:`~pyPLNmodels.PlnPCAcollection.__init__` Parameters ---------- @@ -127,26 +127,21 @@ class _model(ABC): The formula. data : dict The data dictionary. - offsets_formula : str, optional + offsets_formula : str, optional(keyword-only) The formula for offsets. Defaults to "logsum". - dict_initialization : dict, optional + dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. - take_log_offsets : bool, optional + take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. - - Returns - ------- - _model - The initialized _model instance. """ counts, covariates, offsets = _extract_data_from_formula(formula, data) return cls( counts, - covariates, - offsets, - offsets_formula, - dict_initialization, - take_log_offsets, + covariates=covariates, + offsets=offsets, + offsets_formula=offsets_formula, + dict_initialization=dict_initialization, + take_log_offsets=take_log_offsets, add_const=False, ) @@ -341,6 +336,7 @@ class _model(ABC): def fit( self, nb_max_iteration: int = 50000, + *, lr: float = 0.01, class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, tol: float = 1e-3, @@ -348,30 +344,22 @@ class _model(ABC): verbose: bool = False, ): """ - Fit the model. + Fit the model. The lower tol, the more accurate the model. Parameters ---------- nb_max_iteration : int, optional The maximum number of iterations. Defaults to 50000. - lr : float, optional + lr : float, optional(keyword-only) The learning rate. Defaults to 0.01. class_optimizer : torch.optim.Optimizer, optional The optimizer class. Defaults to torch.optim.Rprop. - tol : float, optional + tol : float, optional(keyword-only) The tolerance for convergence. Defaults to 1e-3. - do_smart_init : bool, optional + do_smart_init : bool, optional(keyword-only) Whether to perform smart initialization. Defaults to True. - verbose : bool, optional + verbose : bool, optional(keyword-only) Whether to print training progress. Defaults to False. - .. code-block:: python - Examples - -------- - >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> pln = Pln(counts,add_const = True) - >>> pln.fit() - >>> print(pln) """ self._pring_beginning_message() self._beginning_time = time.time() @@ -1303,6 +1291,106 @@ class Pln(_model): """ + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts= get_real_count_data() + >>> pln = Pln(counts, add_const = True) + >>> print(pln) + """, + returns=""" + Pln + """, + see_also=""" + :func:`pyPLNmodels.Pln.from_formula` + """, + ) + def __init__( + self, + counts: torch.Tensor, + *, + covariates: Optional[torch.Tensor] = None, + offsets: Optional[torch.Tensor] = None, + offsets_formula: str = "logsum", + dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + take_log_offsets: bool = False, + add_const: bool = True, + ): + super().__init__( + counts=counts, + covariates=covariates, + offsets=offsets, + offsets_formula=offsets_formula, + dict_initialization=dict_initialization, + take_log_offsets=take_log_offsets, + add_const=add_const, + ) + + @classmethod + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data = data) + """, + returns=""" + Pln + """, + see_also=""" + :class:`pyPLNmodels.Pln` + :func:`pyPLNmodels.Pln.__init__` + """, + ) + def from_formula( + cls, + formula: str, + data: Any, + offsets_formula: str = "logsum", + dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + ): + counts, covariates, offsets = _extract_data_from_formula(formula, data) + return cls( + counts, + covariates=covariates, + offsets=offsets, + offsets_formula=offsets_formula, + dict_initialization=dict_initialization, + take_log_offsets=take_log_offsets, + add_const=add_const, + ) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() + >>> print(pln) + """, + ) + def fit( + self, + nb_max_iteration: int = 50000, + *, + lr: float = 0.01, + class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, + tol: float = 1e-3, + do_smart_init: bool = True, + verbose: bool = False, + ): + super().fit( + nb_max_iteration, + lr=lr, + class_optimizer=class_optimizer, + tol=tol, + do_smart_init=do_smart_init, + verbose=verbose, + ) + @property def _description(self): """ @@ -1539,6 +1627,7 @@ class PlnPCAcollection: def __init__( self, counts: torch.Tensor, + *, covariates: Optional[torch.Tensor] = None, offsets: Optional[torch.Tensor] = None, offsets_formula: str = "logsum", @@ -1554,20 +1643,27 @@ class PlnPCAcollection: ---------- counts : torch.Tensor The counts. - covariates : torch.Tensor, optional + covariates : torch.Tensor, optional(keyword-only) The covariates, by default None. - offsets : torch.Tensor, optional + offsets : torch.Tensor, optional(keyword-only) The offsets, by default None. - offsets_formula : str, optional + offsets_formula : str, optional(keyword-only) The formula for offsets, by default "logsum". - ranks : Iterable[int], optional + ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). - dict_of_dict_initialization : dict, optional + dict_of_dict_initialization : dict, optional(keyword-only) The dictionary of initialization, by default None. - take_log_offsets : bool, optional + take_log_offsets : bool, optional(keyword-only) Whether to take the logarithm of offsets, by default False. - add_const: bool, optional + add_const: bool, optional(keyword-only) Whether to add a column of one in the covariates. Defaults to True. + Returns + ------- + A collection where item q corresponds to a PlnPCA object with rank q. + See also + -------- + :class:`~pyPLNmodels.PlnPCA` + :func:`~pyPLNmodels.PlnPCAcollection.from_formula` """ self._dict_models = {} ( @@ -1600,13 +1696,14 @@ class PlnPCAcollection: The formula. data : dict The data dictionary. - offsets_formula : str, optional + offsets_formula : str, optional(keyword-only) The formula for offsets, by default "logsum". - ranks : Iterable[int], optional + Overriden if data["offsets"] is not None. + ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). - dict_of_dict_initialization : dict, optional + dict_of_dict_initialization : dict, optional(keyword-only) The dictionary of initialization, by default None. - take_log_offsets : bool, optional + take_log_offsets : bool, optional(keyword-only) Whether to take the logarithm of offsets, by default False. Returns ------- @@ -1618,16 +1715,20 @@ class PlnPCAcollection: >>> counts = get_real_count_data() >>> data = {"counts": counts} >>> pca_col = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,6]) + See also + -------- + :class:`~pyPLNmodels.PlnPCA` + :func:`~pyPLNmodels.PlnPCAcollection.__init__` """ counts, covariates, offsets = _extract_data_from_formula(formula, data) return cls( counts, - covariates, - offsets, - offsets_formula, - ranks, - dict_of_dict_initialization, - take_log_offsets, + covariates=covariates, + offsets=offsets, + offsets_formula=offsets_formula, + ranks=ranks, + dict_of_dict_initialization=dict_of_dict_initialization, + take_log_offsets=take_log_offsets, add_const=False, ) @@ -1868,7 +1969,8 @@ class PlnPCAcollection: def fit( self, - nb_max_iteration: int = 100000, + nb_max_iteration: int = 50000, + *, lr: float = 0.01, class_optimizer: Type[torch.optim.Optimizer] = torch.optim.Rprop, tol: float = 1e-3, @@ -1876,21 +1978,21 @@ class PlnPCAcollection: verbose: bool = False, ): """ - Fit the PlnPCAcollection. + Fit each model in the PlnPCAcollection. Parameters ---------- nb_max_iteration : int, optional - The maximum number of iterations, by default 100000. - lr : float, optional + The maximum number of iterations, by default 50000. + lr : float, optional(keyword-only) The learning rate, by default 0.01. - class_optimizer : Type[torch.optim.Optimizer], optional + class_optimizer : Type[torch.optim.Optimizer], optional(keyword-only) The optimizer class, by default torch.optim.Rprop. - tol : float, optional + tol : float, optional(keyword-only) The tolerance, by default 1e-3. - do_smart_init : bool, optional + do_smart_init : bool, optional(keyword-only) Whether to do smart initialization, by default True. - verbose : bool, optional + verbose : bool, optional(keyword-only) Whether to print verbose output, by default False. """ self._pring_beginning_message() @@ -2265,30 +2367,10 @@ class PlnPCAcollection: return ".BIC, .AIC, .loglikes" -def add_doc(parent_class, *, params=None, example=None): - def wrapper(fun): - doc = getattr(parent_class, fun.__name__).__doc__ - if doc is None: - doc = "" - doc = textwrap.dedent(doc).rstrip(" \n\r") - if params is not None: - doc += textwrap.dedent(params.rstrip(" \n\r")) - if example is not None: - doc += "\n\nExamples" - doc += "\n--------" - doc += textwrap.dedent(example) - fun.__doc__ = doc - return fun - - return wrapper - - # Here, setting the value for each key in _dict_parameters class PlnPCA(_model): - _NAME: str = "PlnPCA" - _components: torch.Tensor """ - PlnPCA class. + PlnPCA object where the covariance has low rank. Examples -------- @@ -2305,17 +2387,33 @@ class PlnPCA(_model): >>> plnpca = PlnPCA.from_formula("counts ~ 0 + cov", data = data, rank = 5) >>> plnpca.fit() >>> print(plnpca) + + See also + -------- + :class:`pyPLNmodels.Pln` """ - @add_doc( + _NAME: str = "PlnPCA" + _components: torch.Tensor + + @_add_doc( _model, params=""" - rank : int, optional + rank : int, optional(keyword-only) The rank of the approximation, by default 5. """, example=""" - totopassword mange des pates. - """, + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts= get_real_count_data() + >>> pca = PlnPCA(counts, add_const = True) + >>> print(pca) + """, + returns=""" + PlnPCA + """, + see_also=""" + :func:`pyPLNmodels.PlnPCA.from_formula` + """, ) def __init__( self, @@ -2341,6 +2439,26 @@ class PlnPCA(_model): ) @classmethod + @_add_doc( + _model, + params=""" + rank : int, optional(keyword-only) + The rank of the approximation, by default 5. + """, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5) + """, + returns=""" + PlnPCA + """, + see_also=""" + :class:`pyPLNmodels.Pln` + :func:`pyPLNmodels.PlnPCA.__init__` + """, + ) def from_formula( cls, formula: str, @@ -2349,44 +2467,46 @@ class PlnPCA(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, ): - """ - Create a PlnPCA object from a formula. - - Parameters - ---------- - formula : str - The formula. - data : Any - The data. - rank : int, optional - The rank of the approximation, by default 5. - offsets_formula : str, optional - The offsets formula, by default "logsum". - dict_initialization : Dict[str, torch.Tensor], optional - The dictionary for initialization, by default None. - - Returns - ------- - PlnPCA - The created PlnPCA object. - Examples - -------- - >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5) - """ counts, covariates, offsets = _extract_data_from_formula(formula, data) return cls( counts, - covariates, - offsets, - offsets_formula, - rank, - dict_initialization, + covariates=covariates, + offsets=offsets, + offsets_formula=offsets_formula, + rank=rank, + dict_initialization=dict_initialization, add_const=False, ) + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> plnpca = PlnPCA(counts,add_const = True, rank = 6) + >>> plnpca.fit() + >>> print(plnpca) + """, + ) + def fit( + self, + nb_max_iteration: int = 50000, + *, + lr: float = 0.01, + class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, + tol: float = 1e-3, + do_smart_init: bool = True, + verbose: bool = False, + ): + super().fit( + nb_max_iteration, + lr=lr, + class_optimizer=class_optimizer, + tol=tol, + do_smart_init=do_smart_init, + verbose=verbose, + ) + def _check_if_rank_is_too_high(self): """ Check if the rank is too high and issue a warning if necessary. -- GitLab From 2be156e5f9fedbdde3a34158a7d25e1eda3d587a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 23 Jun 2023 13:17:33 +0200 Subject: [PATCH 008/167] minor chagnes. --- tests/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d87255cf..1713f658 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -53,9 +53,8 @@ def add_list_of_fixture_to_dict( RANK = 8 -RANKS = [2, 6] +RANKS = [2, 6, 8] instances = [] -# dict_fixtures_models = [] def convenient_PlnPCA(*args, **kwargs): -- GitLab From 5bff03fbae38331be4d97bdbcee9fe1586efbaea Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 23 Jun 2023 13:17:59 +0200 Subject: [PATCH 009/167] add examples in docstrings. --- pyPLNmodels/_utils.py | 45 +++++++++++++++++++++++++++++-- pyPLNmodels/models.py | 62 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 94 insertions(+), 13 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index ab09750a..b5d910a4 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -742,7 +742,7 @@ def get_real_count_data( def load_model(path_of_directory: str) -> Dict[str, Any]: """ - Load model from the given directory for future initialization. + Load Pln or PlnPCA model (that has previously been saved) from the given directory for future initialization. Parameters ---------- @@ -753,7 +753,26 @@ def load_model(path_of_directory: str) -> Dict[str, Any]: ------- Dict[str, Any] A dictionary containing the loaded model. - + Examples + -------- + >>> from pyPLNmodels import PlnPCA, Pln, get_real_count_data, load_model + >>> counts= get_real_count_data() + >>> pca = PlnPCA(counts, add_const = True) + >>> pca.fit() + >>> pca.save() + >>> dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") + >>> loaded_pca = PlnPCA(counts, add_const = True, dict_initialization = dict_init) + >>> print(loaded_pca) + + >>> pln = Pln(counts, add_const = True) + >>> pln.fit() + >>> pln.save() + >>> dict_init = load_model("Pln_nbcov_1_dim_200") + >>> loaded_pln = Pln(counts, add_const = True, dict_initialization = dict_init) + >>> print(loaded_pln) + See also + -------- + :func:`~pyPLNmodels.load_plnpcacollection` """ working_dir = os.getcwd() os.chdir(path_of_directory) @@ -779,6 +798,13 @@ def load_pln(path_of_directory: str) -> Dict[str, Any]: return load_model(path_of_directory) +def load_plnpca(path_of_directory: str) -> Dict[str, Any]: + """ + Alias for :func:`~pyPLNmodels._utils.load_model`. + """ + return load_model(path_of_directory) + + def load_plnpcacollection( path_of_directory: str, ranks: Optional[List[int]] = None ) -> Dict[int, Dict[str, Any]]: @@ -802,6 +828,20 @@ def load_plnpcacollection( ValueError If an invalid model name is encountered and the rank cannot be determined. + Examples + -------- + >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, load_plnpcacollection + >>> counts = get_real_count_data() + >>> pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6]) + >>> pcas.fit() + >>> pcas.save() + >>> dict_init = load_plnpcacollection("PlnPCAcollection_nbcov_1_dim_200") + >>> loaded_pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6], dict_of_dict_initialization = dict_init) + >>> print(loaded_pcas) + + See also + -------- + :func:`~pyPLNmodels.load_model` """ working_dir = os.getcwd() os.chdir(path_of_directory) @@ -819,6 +859,7 @@ def load_plnpcacollection( datas = {} for rank in ranks: datas[rank] = load_model(f"PlnPCA_rank_{rank}") + datas["ranks"] = ranks os.chdir(working_dir) return datas diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 52e4fcb7..4bfc626e 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -964,6 +964,7 @@ class _model(ABC): """ if path is None: path = f"./{self._directory_name}" + print("paht:", path) os.makedirs(path, exist_ok=True) for key, value in self._dict_parameters.items(): filename = f"{path}/{key}.csv" @@ -1279,14 +1280,18 @@ class Pln(_model): -------- >>> from pyPLNmodels import Pln, get_real_count_data >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True, rank = 5]) + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() >>> print(pln) >>> pln.viz(colors = labels) >>> from pyPLNmodels import Pln, get_simulation_parameters, sample_pln >>> param = get_simulation_parameters() >>> counts = sample_pln(param) - >>> data = {} + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data) + >>> pln.fit() + >>> print(pln) """ @@ -1348,8 +1353,10 @@ class Pln(_model): cls, formula: str, data: Any, + *, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + take_log_offsets: bool = False, ): counts, covariates, offsets = _extract_data_from_formula(formula, data) return cls( @@ -1359,7 +1366,7 @@ class Pln(_model): offsets_formula=offsets_formula, dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, - add_const=add_const, + add_const=False, ) @_add_doc( @@ -1621,6 +1628,30 @@ class Pln(_model): class PlnPCAcollection: + """ + A collection where item q corresponds to a PlnPCA object with rank q. + + Examples + -------- + >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, get_simulation_parameters, sample_pln + >>> counts, labels = get_real_count_data(return_labels = True) + >>> data = {"counts": counts} + >>> pcas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]]) + >>> print(pcas) + >>> pcas.show() + + >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) + >>> counts = sample_pln(plnparam) + >>> data = {"counts": plnparam.counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> plnpcas = PlnPCAcollection.from_formula("counts ~ 0 + cov", data = data, ranks = [5,8,12]) + >>> plnpcas.fit() + >>> print(plnpcas) + >>> pcas.show() + See also + -------- + :class:`~pyPLNmodels.PlnPCA` + """ + _NAME = "PlnPCAcollection" _dict_models: dict @@ -1659,11 +1690,11 @@ class PlnPCAcollection: Whether to add a column of one in the covariates. Defaults to True. Returns ------- - A collection where item q corresponds to a PlnPCA object with rank q. + PlnPCAcollection See also -------- :class:`~pyPLNmodels.PlnPCA` - :func:`~pyPLNmodels.PlnPCAcollection.from_formula` + :meth:`~pyPLNmodels.PlnPCAcollection.from_formula` """ self._dict_models = {} ( @@ -1903,6 +1934,15 @@ class PlnPCAcollection: "Please instantiate with either a list " "of integers or an integer." ) + if dict_of_dict_initialization is not None: + if ranks != dict_of_dict_initialization["ranks"]: + msg = ( + "The given ranks in the dict_initialization are loaded but" + " you should fit the model once again or instantiate the" + " model with the ranks loaded." + ) + warnings.warn(msg) + elif isinstance(ranks, (int, np.integer)): dict_initialization = _get_dict_initialization( ranks, dict_of_dict_initialization @@ -2000,11 +2040,11 @@ class PlnPCAcollection: model = self[self.ranks[i]] model.fit( nb_max_iteration, - lr, - class_optimizer, - tol, - do_smart_init, - verbose, + lr=lr, + class_optimizer=class_optimizer, + tol=tol, + do_smart_init=do_smart_init, + verbose=verbose, ) if i < len(self.values()) - 1: next_model = self[self.ranks[i + 1]] @@ -2613,7 +2653,7 @@ class PlnPCA(_model): str The directory name. """ - return f"{self._NAME}_nbcov_{self.nb_cov}_rank_{self._rank}" + return f"{super()._directory_name}_rank_{self._rank}" @property def covariates(self) -> torch.Tensor: -- GitLab From 338707021440296f4db28b9a234abe5be3690fa8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 23 Jun 2023 13:18:55 +0200 Subject: [PATCH 010/167] add titles for html and function load_plnpca. --- docs/source/conf.py | 1 + docs/source/load.rst | 5 ++++- docs/source/pln.rst | 3 +++ docs/source/plnpca.rst | 3 +++ docs/source/plnpcacollection.rst | 3 +++ 5 files changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f3a032af..091a047b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -68,3 +68,4 @@ exclude_patterns = [] # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] +html_title = "pyPLNmodels" diff --git a/docs/source/load.rst b/docs/source/load.rst index 6a3ea50c..a3e8a23b 100644 --- a/docs/source/load.rst +++ b/docs/source/load.rst @@ -1,3 +1,6 @@ -.. autofunction:: pyPLNmodels.load_pln +Load saved models +================= .. autofunction:: pyPLNmodels.load_model .. autofunction:: pyPLNmodels.load_plnpcacollection +.. autofunction:: pyPLNmodels.load_pln +.. autofunction:: pyPLNmodels.load_plnpca diff --git a/docs/source/pln.rst b/docs/source/pln.rst index bb4bf654..15b03926 100644 --- a/docs/source/pln.rst +++ b/docs/source/pln.rst @@ -1,3 +1,6 @@ +Pln +=== + .. autoclass:: pyPLNmodels.Pln :members: :inherited-members: diff --git a/docs/source/plnpca.rst b/docs/source/plnpca.rst index 1badbc4e..ac521272 100644 --- a/docs/source/plnpca.rst +++ b/docs/source/plnpca.rst @@ -1,3 +1,6 @@ +PlnPCA +====== + .. autoclass:: pyPLNmodels.PlnPCA :members: :inherited-members: diff --git a/docs/source/plnpcacollection.rst b/docs/source/plnpcacollection.rst index 7871a724..ef84ac05 100644 --- a/docs/source/plnpcacollection.rst +++ b/docs/source/plnpcacollection.rst @@ -1,3 +1,6 @@ +PlnPCAcollection +================ + .. autoclass:: pyPLNmodels.PlnPCAcollection :members: :show-inheritance: -- GitLab From 1bf37eb9724f143caa21e715913d31709f442ffe Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 23 Jun 2023 14:25:58 +0200 Subject: [PATCH 011/167] tests should pass. --- pyPLNmodels/_utils.py | 74 +++++++++++++++++++++++++------------------ pyPLNmodels/models.py | 18 ++++++----- tests/conftest.py | 14 ++++++-- tests/import_data.py | 4 +-- tests/test_common.py | 4 +++ 5 files changed, 72 insertions(+), 42 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index b5d910a4..26189a9c 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -95,7 +95,7 @@ def _sigmoid(tens: torch.Tensor) -> torch.Tensor: return 1 / (1 + torch.exp(-tens)) -def sample_pln(pln_param, seed: int = None, return_latent=False) -> torch.Tensor: +def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Tensor: """ Sample from the Poisson Log-Normal (Pln) model. @@ -104,9 +104,9 @@ def sample_pln(pln_param, seed: int = None, return_latent=False) -> torch.Tensor pln_param : PlnParameters object parameters of the model, containing the coeficient, the covariates, the components and the offsets. - seed : int or None, optional + seed : int or None, optional(keyword-only) Random seed for reproducibility. Default is None. - return_latent : bool, optional + return_latent : bool, optional(keyword-only) If True will return also the latent variables. Default is False. Returns @@ -517,21 +517,21 @@ def _get_simulation_coef_cov_offsets( class PlnParameters: - def __init__(self, components, coef, covariates, offsets, coef_inflation=None): + def __init__(self, *, components, coef, covariates, offsets, coef_inflation=None): """ Instantiate all the needed parameters to sample from the PLN model. Parameters ---------- - components : torch.Tensor + components : torch.Tensor(keyword-only) Components of size (p, rank) - coef : torch.Tensor + coef : torch.Tensor(keyword-only) Coefficient of size (d, p) - covariates : torch.Tensor or None + covariates : torch.Tensor or None(keyword-only) Covariates, size (n, d) or None - offsets : torch.Tensor + offsets : torch.Tensor(keyword-only) Offset, size (n, p) - _coef_inflation : torch.Tensor or None, optional + _coef_inflation : torch.Tensor or None, optional(keyword-only) Coefficient for zero-inflation model, size (d, p) or None. Default is None. """ @@ -540,9 +540,10 @@ class PlnParameters: self.covariates = _format_data(covariates) self.offsets = _format_data(offsets) self.coef_inflation = _format_data(coef_inflation) - _check_two_dimensions_are_equal( - "components", "coef", self.components.shape[0], self.coef.shape[1], 0, 1 - ) + if self.coef is not None: + _check_two_dimensions_are_equal( + "components", "coef", self.components.shape[0], self.coef.shape[1], 0, 1 + ) if self.offsets is not None: _check_two_dimensions_are_equal( "components", @@ -622,24 +623,29 @@ def _check_two_dimensions_are_equal( def get_simulation_parameters( - n_samples: int = 100, dim: int = 25, nb_cov: int = 1, rank: int = 5, add_const=True + *, + n_samples: int = 100, + dim: int = 25, + nb_cov: int = 1, + rank: int = 5, + add_const: bool = True, ) -> PlnParameters: """ Generate simulation parameters for a Poisson-lognormal model. Parameters ---------- - n_samples : int, optional + n_samples : int, optional(keyword-only) The number of samples, by default 100. - dim : int, optional + dim : int, optional(keyword-only) The dimension of the data, by default 25. - nb_cov : int, optional + nb_cov : int, optional(keyword-only) The number of covariates, by default 1. If add_const is True, then there will be nb_cov+1 covariates as the intercept can be seen as a covariates. - rank : int, optional + rank : int, optional(keyword-only) The rank of the data components, by default 5. - add_const : bool, optional + add_const : bool, optional(keyword-only) If True, will add a vector of ones in the covariates. Returns @@ -652,15 +658,19 @@ def get_simulation_parameters( n_samples, nb_cov, dim, add_const ) components = _get_simulation_components(dim, rank) - return PlnParameters(components, coef, covariates, offsets) + return PlnParameters( + components=components, coef=coef, covariates=covariates, offsets=offsets + ) def get_simulated_count_data( + *, n_samples: int = 100, dim: int = 25, rank: int = 5, nb_cov: int = 1, return_true_param: bool = False, + add_const: bool = True, seed: int = 0, ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """ @@ -668,17 +678,19 @@ def get_simulated_count_data( Parameters ---------- - n_samples : int, optional + n_samples : int, optional(keyword-only) Number of samples, by default 100. - dim : int, optional + dim : int, optional(keyword-only) Dimension, by default 25. - rank : int, optional + rank : int, optional(keyword-only) Rank of the covariance matrix, by default 5. - nb_cov : int, optional + add_const : bool, optional(keyword-only) + If True, will add a vector of ones. Default is True + nb_cov : int, optional(keyword-only) Number of covariates, by default 1. - return_true_param : bool, optional + return_true_param : bool, optional(keyword-only) Whether to return the true parameters of the model, by default False. - seed : int, optional + seed : int, optional(keyword-only) Seed value for random number generation, by default 0. Returns @@ -686,7 +698,9 @@ def get_simulated_count_data( Tuple[torch.Tensor, torch.Tensor, torch.Tensor] Tuple containing counts, covariates, and offsets. """ - pln_param = get_simulation_parameters(n_samples, dim, nb_cov, rank) + pln_param = get_simulation_parameters( + n_samples=n_samples, dim=dim, nb_cov=nb_cov, rank=rank, add_const=add_const + ) counts = sample_pln(pln_param, seed=seed, return_latent=False) if return_true_param is True: return ( @@ -700,18 +714,18 @@ def get_simulated_count_data( def get_real_count_data( - n_samples: int = 469, dim: int = 200, return_labels: bool = False + *, n_samples: int = 469, dim: int = 200, return_labels: bool = False ) -> np.ndarray: """ Get real count data from the scMARK dataset. Parameters ---------- - n_samples : int, optional + n_samples : int, optional(keyword-only) Number of samples, by default max_samples. - dim : int, optional + dim : int, optional(keyword-only) Dimension, by default max_dim. - return_labels: bool, optional + return_labels: bool, optional(keyword-only) If True, will return the labels of the count data Returns ------- diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 4bfc626e..67e67ab4 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1254,10 +1254,11 @@ class _model(ABC): if ax is None: ax = plt.gca() predictions = self._counts_predictions().ravel().detach() - print("colors:", np.array(colors).ravel().shape) - print("pred", predictions.shape) - colors = np.repeat(np.array(colors), repeats=self.dim) - sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors.ravel(), ax=ax) + print("pred shpae", predictions.shape) + print("counts shape", self.counts.ravel().shape) + if colors is not None: + colors = np.repeat(np.array(colors), repeats=self.dim).ravel() + sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors, ax=ax) max_y = int(torch.max(self.counts.ravel()).item()) y = np.linspace(0, max_y, max_y) ax.plot(y, y, c="red") @@ -2602,11 +2603,12 @@ class PlnPCA(_model): * (self.latent_sqrt_var**2).unsqueeze(1), axis=2, ) + if self.covariates is not None: + XB = self.covariates @ self.coef + else: + XB = 0 return torch.exp( - self._offsets - + self.covariates @ self.coef - + self.latent_variables - + 1 / 2 * covariance_a_posteriori + self._offsets + XB + self.latent_variables + 1 / 2 * covariance_a_posteriori ) @latent_mean.setter diff --git a/tests/conftest.py b/tests/conftest.py index 1713f658..f983df5e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -118,7 +118,12 @@ dict_fixtures = {} @pytest.fixture(params=params) def simulated_pln_0cov_array(request): cls = request.param - pln = cls(counts_sim_0cov, covariates_sim_0cov, offsets_sim_0cov, add_const=False) + pln = cls( + counts_sim_0cov, + covariates=covariates_sim_0cov, + offsets=offsets_sim_0cov, + add_const=False, + ) return pln @@ -126,7 +131,12 @@ def simulated_pln_0cov_array(request): @cache def simulated_fitted_pln_0cov_array(request): cls = request.param - pln = cls(counts_sim_0cov, covariates_sim_0cov, offsets_sim_0cov, add_const=False) + pln = cls( + counts_sim_0cov, + covariates=covariates_sim_0cov, + offsets=offsets_sim_0cov, + add_const=False, + ) pln.fit() return pln diff --git a/tests/import_data.py b/tests/import_data.py index 03930286..154f5644 100644 --- a/tests/import_data.py +++ b/tests/import_data.py @@ -12,14 +12,14 @@ from pyPLNmodels import ( offsets_sim_0cov, true_covariance_0cov, true_coef_0cov, -) = get_simulated_count_data(return_true_param=True, nb_cov=0) +) = get_simulated_count_data(return_true_param=True, nb_cov=0, add_const=False) ( counts_sim_2cov, covariates_sim_2cov, offsets_sim_2cov, true_covariance_2cov, true_coef_2cov, -) = get_simulated_count_data(return_true_param=True, nb_cov=2) +) = get_simulated_count_data(return_true_param=True, nb_cov=2, add_const=False) data_sim_0cov = { "counts": counts_sim_0cov, diff --git a/tests/test_common.py b/tests/test_common.py index ccdd7b92..38711fef 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -46,6 +46,10 @@ def test_find_right_covariance(simulated_fitted_any_pln): true_covariance = true_sim_0cov["Sigma"] elif simulated_fitted_any_pln.nb_cov == 2: true_covariance = true_sim_2cov["Sigma"] + else: + raise ValueError( + f"Not the right numbers of covariance({simulated_fitted_any_pln.nb_cov})" + ) mse_covariance = MSE(simulated_fitted_any_pln.covariance - true_covariance) assert mse_covariance < 0.05 -- GitLab From 695663af08fa2b06ff837398ac974d42df926797 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 25 Jun 2023 22:49:36 +0200 Subject: [PATCH 012/167] add scripts that tests the examples in the docstrings. Running examples works fine but it does not throw an error when the script fails. --- .gitlab-ci.yml | 12 ++++++++ tests/create_example_files.py | 53 +++++++++++++++++++++++++++++++++++ tests/test_examples.sh | 5 ++++ 3 files changed, 70 insertions(+) create mode 100644 tests/create_example_files.py create mode 100755 tests/test_examples.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6bccf9e4..d0f6a915 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -21,6 +21,18 @@ tests: - pip install . - cd tests - pytest +examples: + stage: checks + image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + before_script: + pip install '.[tests]' + script: + - pip install . + - cd tests + - python create_example_files.py + - ./test_examples.sh + + build_package: stage: build diff --git a/tests/create_example_files.py b/tests/create_example_files.py new file mode 100644 index 00000000..8469f58d --- /dev/null +++ b/tests/create_example_files.py @@ -0,0 +1,53 @@ +import ast +import os + + +def get_lines(filename): + with open(f"../pyPLNmodels/{filename}.py") as file: + lines = [line.rstrip() for line in file] + return lines + + +def get_examples(lines): + examples = [] + in_example = False + example = [] + for line in lines: + line = line.lstrip() + if len(line) > 3: + if line[0:3] == ">>>": + in_example = True + example.append(line[4:]) + else: + if in_example is True: + examples.append(example) + example = [] + in_example = False + return examples + + +def write_examples(examples, prefix_filename): + for i in range(len(examples)): + example = examples[i] + nb_example = str(i + 1) + example_filename = f"examples/{prefix_filename}_example_{nb_example}.py" + try: + os.remove(example_filename) + except FileNotFoundError: + pass + with open(example_filename, "a") as the_file: + for line in example: + the_file.write(line + "\n") + + +def filename_to_example_file(filename): + lines = get_lines(filename) + examples = get_examples(lines) + write_examples(examples, filename) + + +# filename_to_example_file("models") +os.makedirs("examples", exist_ok=True) +filename_to_example_file("_utils") +filename_to_example_file("models") +filename_to_example_file("elbos") diff --git a/tests/test_examples.sh b/tests/test_examples.sh new file mode 100755 index 00000000..b8a32f31 --- /dev/null +++ b/tests/test_examples.sh @@ -0,0 +1,5 @@ +search_dir="examples" +for entry in "$search_dir"/* +do + python "$entry" +done -- GitLab From e85ce4b1bee1993cffa7e4b9aebc7c204f1a3ccb Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 25 Jun 2023 22:56:13 +0200 Subject: [PATCH 013/167] print statement remove --- pyPLNmodels/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 67e67ab4..9e84b16d 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -964,7 +964,6 @@ class _model(ABC): """ if path is None: path = f"./{self._directory_name}" - print("paht:", path) os.makedirs(path, exist_ok=True) for key, value in self._dict_parameters.items(): filename = f"{path}/{key}.csv" -- GitLab From 2d019c35bd15949960388f104febbe2d3137a11c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 25 Jun 2023 23:08:43 +0200 Subject: [PATCH 014/167] began to move the loading functions in the loading module. --- pyPLNmodels/_utils.py | 124 ---------------------------------------- pyPLNmodels/load.py | 128 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 128 insertions(+), 124 deletions(-) create mode 100644 pyPLNmodels/load.py diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 26189a9c..5945f5ed 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -754,130 +754,6 @@ def get_real_count_data( return counts, labels -def load_model(path_of_directory: str) -> Dict[str, Any]: - """ - Load Pln or PlnPCA model (that has previously been saved) from the given directory for future initialization. - - Parameters - ---------- - path_of_directory : str - The path to the directory containing the model. - - Returns - ------- - Dict[str, Any] - A dictionary containing the loaded model. - Examples - -------- - >>> from pyPLNmodels import PlnPCA, Pln, get_real_count_data, load_model - >>> counts= get_real_count_data() - >>> pca = PlnPCA(counts, add_const = True) - >>> pca.fit() - >>> pca.save() - >>> dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") - >>> loaded_pca = PlnPCA(counts, add_const = True, dict_initialization = dict_init) - >>> print(loaded_pca) - - >>> pln = Pln(counts, add_const = True) - >>> pln.fit() - >>> pln.save() - >>> dict_init = load_model("Pln_nbcov_1_dim_200") - >>> loaded_pln = Pln(counts, add_const = True, dict_initialization = dict_init) - >>> print(loaded_pln) - See also - -------- - :func:`~pyPLNmodels.load_plnpcacollection` - """ - working_dir = os.getcwd() - os.chdir(path_of_directory) - all_files = os.listdir() - data = {} - for filename in all_files: - if filename.endswith(".csv"): - parameter = filename[:-4] - try: - data[parameter] = pd.read_csv(filename, header=None).values - except pd.errors.EmptyDataError: - print( - f"Can't load {parameter} since empty. Standard initialization will be performed for this parameter" - ) - os.chdir(working_dir) - return data - - -def load_pln(path_of_directory: str) -> Dict[str, Any]: - """ - Alias for :func:`~pyPLNmodels._utils.load_model`. - """ - return load_model(path_of_directory) - - -def load_plnpca(path_of_directory: str) -> Dict[str, Any]: - """ - Alias for :func:`~pyPLNmodels._utils.load_model`. - """ - return load_model(path_of_directory) - - -def load_plnpcacollection( - path_of_directory: str, ranks: Optional[List[int]] = None -) -> Dict[int, Dict[str, Any]]: - """ - Load PlnPCAcollection models from the given directory. - - Parameters - ---------- - path_of_directory : str - The path to the directory containing the PlnPCAcollection models. - ranks : List[int], optional - A List of ranks specifying which models to load. If None, all models in the directory will be loaded. - - Returns - ------- - Dict[int, Dict[str, Any]] - A dictionary containing the loaded PlnPCAcollection models, with ranks as keys. - - Raises - ------ - ValueError - If an invalid model name is encountered and the rank cannot be determined. - - Examples - -------- - >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, load_plnpcacollection - >>> counts = get_real_count_data() - >>> pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6]) - >>> pcas.fit() - >>> pcas.save() - >>> dict_init = load_plnpcacollection("PlnPCAcollection_nbcov_1_dim_200") - >>> loaded_pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6], dict_of_dict_initialization = dict_init) - >>> print(loaded_pcas) - - See also - -------- - :func:`~pyPLNmodels.load_model` - """ - working_dir = os.getcwd() - os.chdir(path_of_directory) - if ranks is None: - dirnames = os.listdir() - ranks = [] - for dirname in dirnames: - try: - rank = int(dirname[-1]) - except ValueError: - raise ValueError( - f"Can't load the model {dirname}. End of {dirname} should be an int" - ) - ranks.append(rank) - datas = {} - for rank in ranks: - datas[rank] = load_model(f"PlnPCA_rank_{rank}") - datas["ranks"] = ranks - os.chdir(working_dir) - return datas - - def _check_right_rank(data: Dict[str, Any], rank: int) -> None: """ Check if the rank of the given data matches the specified rank. diff --git a/pyPLNmodels/load.py b/pyPLNmodels/load.py new file mode 100644 index 00000000..4b59d5e8 --- /dev/null +++ b/pyPLNmodels/load.py @@ -0,0 +1,128 @@ +import os +from typing import Optional, Dict, Any, List + +import pandas as pd + + +def load_model(path_of_directory: str) -> Dict[str, Any]: + """ + Load Pln or PlnPCA model (that has previously been saved) from the given directory for future initialization. + + Parameters + ---------- + path_of_directory : str + The path to the directory containing the model. + + Returns + ------- + Dict[str, Any] + A dictionary containing the loaded model. + Examples + -------- + >>> from pyPLNmodels import PlnPCA, Pln, get_real_count_data, load_model + >>> counts= get_real_count_data() + >>> pca = PlnPCA(counts, add_const = True) + >>> pca.fit() + >>> pca.save() + >>> dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") + >>> loaded_pca = PlnPCA(counts, add_const = True, dict_initialization = dict_init) + >>> print(loaded_pca) + + >>> pln = Pln(counts, add_const = True) + >>> pln.fit() + >>> pln.save() + >>> dict_init = load_model("Pln_nbcov_1_dim_200") + >>> loaded_pln = Pln(counts, add_const = True, dict_initialization = dict_init) + >>> print(loaded_pln) + See also + -------- + :func:`~pyPLNmodels.load_plnpcacollection` + """ + working_dir = os.getcwd() + os.chdir(path_of_directory) + all_files = os.listdir() + data = {} + for filename in all_files: + if filename.endswith(".csv"): + parameter = filename[:-4] + try: + data[parameter] = pd.read_csv(filename, header=None).values + except pd.errors.EmptyDataError: + print( + f"Can't load {parameter} since empty. Standard initialization will be performed for this parameter" + ) + os.chdir(working_dir) + return data + + +def load_pln(path_of_directory: str) -> Dict[str, Any]: + """ + Alias for :func:`~pyPLNmodels._utils.load_model`. + """ + return load_model(path_of_directory) + + +def load_plnpca(path_of_directory: str) -> Dict[str, Any]: + """ + Alias for :func:`~pyPLNmodels._utils.load_model`. + """ + return load_model(path_of_directory) + + +def load_plnpcacollection( + path_of_directory: str, ranks: Optional[List[int]] = None +) -> Dict[int, Dict[str, Any]]: + """ + Load PlnPCAcollection models from the given directory. + + Parameters + ---------- + path_of_directory : str + The path to the directory containing the PlnPCAcollection models. + ranks : List[int], optional + A List of ranks specifying which models to load. If None, all models in the directory will be loaded. + + Returns + ------- + Dict[int, Dict[str, Any]] + A dictionary containing the loaded PlnPCAcollection models, with ranks as keys. + + Raises + ------ + ValueError + If an invalid model name is encountered and the rank cannot be determined. + + Examples + -------- + >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, load_plnpcacollection + >>> counts = get_real_count_data() + >>> pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6]) + >>> pcas.fit() + >>> pcas.save() + >>> dict_init = load_plnpcacollection("PlnPCAcollection_nbcov_1_dim_200") + >>> loaded_pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6], dict_of_dict_initialization = dict_init) + >>> print(loaded_pcas) + + See also + -------- + :func:`~pyPLNmodels.load_model` + """ + working_dir = os.getcwd() + os.chdir(path_of_directory) + if ranks is None: + dirnames = os.listdir() + ranks = [] + for dirname in dirnames: + try: + rank = int(dirname[-1]) + except ValueError: + raise ValueError( + f"Can't load the model {dirname}. End of {dirname} should be an int" + ) + ranks.append(rank) + datas = {} + for rank in ranks: + datas[rank] = load_model(f"PlnPCA_rank_{rank}") + datas["ranks"] = ranks + os.chdir(working_dir) + return datas -- GitLab From 9c060b6b6c14d9428f06446b55be854c5c0069e4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 08:21:51 +0200 Subject: [PATCH 015/167] change import file for load_pln etc. --- pyPLNmodels/__init__.py | 4 +--- pyPLNmodels/load.py | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index 992fcf19..2076e34f 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -4,13 +4,11 @@ from .elbos import profiled_elbo_pln, elbo_plnpca, elbo_pln from ._utils import ( get_simulated_count_data, get_real_count_data, - load_model, - load_plnpcacollection, - load_pln, sample_pln, get_simulation_parameters, PlnParameters, ) +from load import load_model, load_plnpcacollection, load_pln from ._initialization import log_posterior diff --git a/pyPLNmodels/load.py b/pyPLNmodels/load.py index 4b59d5e8..26871ae8 100644 --- a/pyPLNmodels/load.py +++ b/pyPLNmodels/load.py @@ -57,14 +57,14 @@ def load_model(path_of_directory: str) -> Dict[str, Any]: def load_pln(path_of_directory: str) -> Dict[str, Any]: """ - Alias for :func:`~pyPLNmodels._utils.load_model`. + Alias for :func:`~pyPLNmodels.load.load_model`. """ return load_model(path_of_directory) def load_plnpca(path_of_directory: str) -> Dict[str, Any]: """ - Alias for :func:`~pyPLNmodels._utils.load_model`. + Alias for :func:`~pyPLNmodels.load.load_model`. """ return load_model(path_of_directory) -- GitLab From 1c112b4cff42fc08253ca2157d77faedd5a17253 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 08:55:39 +0200 Subject: [PATCH 016/167] add docstrings and examples and wrote test for viz. --- pyPLNmodels/__init__.py | 2 +- pyPLNmodels/models.py | 52 +++++++++++++++++++++++++++++------ tests/create_example_files.py | 1 + tests/test_viz.py | 12 +++++++- 4 files changed, 57 insertions(+), 10 deletions(-) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index 2076e34f..b379b8d2 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -8,7 +8,7 @@ from ._utils import ( get_simulation_parameters, PlnParameters, ) -from load import load_model, load_plnpcacollection, load_pln +from .load import load_model, load_plnpcacollection, load_pln from ._initialization import log_posterior diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 9e84b16d..9c04cc0e 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1247,14 +1247,13 @@ class _model(ABC): ------- Any The matplotlib axis. + >>> """ if self._fitted is None: raise RuntimeError("Please fit the model before.") if ax is None: ax = plt.gca() predictions = self._counts_predictions().ravel().detach() - print("pred shpae", predictions.shape) - print("counts shape", self.counts.ravel().shape) if colors is not None: colors = np.repeat(np.array(colors), repeats=self.dim).ravel() sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors, ax=ax) @@ -1302,6 +1301,7 @@ class Pln(_model): >>> from pyPLNmodels import Pln, get_real_count_data >>> counts= get_real_count_data() >>> pln = Pln(counts, add_const = True) + >>> pln.fit() >>> print(pln) """, returns=""" @@ -1398,6 +1398,23 @@ class Pln(_model): verbose=verbose, ) + @_add_doc( + _model, + example=""" + >>> import matplotlib.pyplot as plt + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() + >>> pln.plot_expected_vs_true() + >>> plt.show() + >>> pln.plot_expected_vs_true(colors = labels) + >>> plt.show() + """, + ) + def plot_expected_vs_true(self, ax=None, colors=None): + super().plot_expected_vs_true(ax=ax, colors=colors) + @property def _description(self): """ @@ -1636,13 +1653,14 @@ class PlnPCAcollection: >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, get_simulation_parameters, sample_pln >>> counts, labels = get_real_count_data(return_labels = True) >>> data = {"counts": counts} - >>> pcas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]]) - >>> print(pcas) - >>> pcas.show() + >>> plncas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]) + >>> plncas.fit() + >>> print(plncas) + >>> plncas.show() >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) >>> counts = sample_pln(plnparam) - >>> data = {"counts": plnparam.counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> data = {"counts":counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} >>> plnpcas = PlnPCAcollection.from_formula("counts ~ 0 + cov", data = data, ranks = [5,8,12]) >>> plnpcas.fit() >>> print(plnpcas) @@ -2417,13 +2435,14 @@ class PlnPCA(_model): >>> from pyPLNmodels import PlnPCA, get_real_count_data, get_simulation_parameters, sample_pln >>> counts, labels = get_real_count_data(return_labels = True) >>> data = {"counts": counts} - >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5]) + >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5) + >>> pca.fit() >>> print(pca) >>> pca.viz(colors = labels) >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) >>> counts = sample_pln(plnparam) - >>> data = {"counts": plnparam.counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> data = {"counts": counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} >>> plnpca = PlnPCA.from_formula("counts ~ 0 + cov", data = data, rank = 5) >>> plnpca.fit() >>> print(plnpca) @@ -2547,6 +2566,23 @@ class PlnPCA(_model): verbose=verbose, ) + @_add_doc( + _model, + example=""" + >>> import matplotlib.pyplot as plt + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> plnpca = Pln(counts,add_const = True) + >>> plnpca.fit() + >>> plnpca.plot_expected_vs_true() + >>> plt.show() + >>> plnpca.plot_expected_vs_true(colors = labels) + >>> plt.show() + """, + ) + def plot_expected_vs_true(self, ax=None, colors=None): + super().plot_expected_vs_true(ax=ax, colors=colors) + def _check_if_rank_is_too_high(self): """ Check if the rank is too high and issue a warning if necessary. diff --git a/tests/create_example_files.py b/tests/create_example_files.py index 8469f58d..b575159f 100644 --- a/tests/create_example_files.py +++ b/tests/create_example_files.py @@ -51,3 +51,4 @@ os.makedirs("examples", exist_ok=True) filename_to_example_file("_utils") filename_to_example_file("models") filename_to_example_file("elbos") +filename_to_example_file("load") diff --git a/tests/test_viz.py b/tests/test_viz.py index da9f05b4..4cab97f9 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -83,7 +83,17 @@ def test_plot_pca_correlation_graph_without_names(pln): pln.plot_pca_correlation_graph([f"var_{i}" for i in range(3)], [0, 1, 2]) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_sim_pln"]) +@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) @filter_models(["Pln", "PlnPCA"]) def test_expected_vs_true(pln): pln.plot_expected_vs_true() + fig, ax = plt.figure() + pln.plot_expected_vs_true(ax=ax) + + +@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_real_pln"]) +@filter_models(["Pln", "PlnPCA"]) +def test_expected_vs_true_labels(pln): + pln.plot_expected_vs_true(colors=labels_real) + fig, ax = plt.subplots() + pln.plot_expected_vs_true(ax=ax, colors=labels_real) -- GitLab From 0b0cc5345522b4f62970e4476190a21b34387a9d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 09:23:42 +0200 Subject: [PATCH 017/167] began to aggregate docstrings for fitting and added docstrings for methods --- pyPLNmodels/models.py | 189 ++++++++++++++++++++++++++++++++---------- 1 file changed, 147 insertions(+), 42 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 9c04cc0e..d89a9bfe 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -49,6 +49,21 @@ else: NB_CHARACTERS_FOR_NICE_PLOT = 70 +str_fit_plnpca_formula = ( + ">>> from pyPLNmodels import PlnPCA, get_real_count_data" + ">>> counts = get_real_count_data()" + ">>> data = {'counts': counts}" + ">>> plnpca = PlnPCA.from_formula('counts ~ 1', data = data)" + ">>> plnpca.fit()" +) +str_fit_pln_formula = ( + ">>> from pyPLNmodels import PlnPCA, get_real_count_data" + ">>> counts = get_real_count_data()" + ">>> data = {'counts': counts}" + ">>> plnpca = PlnPCA.from_formula('counts ~ 1', data = data)" + ">>> plnpca.fit()" +) + class _model(ABC): """ @@ -174,17 +189,17 @@ class _model(ABC): """ return self._fitted - def viz(self, ax=None, colors=None, show_cov: bool = False): + def viz(self, *, ax=None, colors=None, show_cov: bool = False): """ Visualize the latent variables with a classic PCA. Parameters ---------- - ax : Optional[Any], optional + ax : Optional[Any], optional(keyword-only) The matplotlib axis to use. If None, the current axis is used, by default None. - colors : Optional[Any], optional + colors : Optional[Any], optional(keyword-only) The colors to use for plotting, by default None. - show_cov: bool, optional + show_cov: bool, optional(keyword-only) If True, will display ellipses with right covariances. Default is False. Raises ------ @@ -414,15 +429,6 @@ class _model(ABC): ------ ValueError If the number of components asked is greater than the number of dimensions. - Examples - -------- - >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) - >>> pln.fit() - >>> pca_proj = pln.pca_projected_latent_variables() - >>> print(pca_proj.shape) """ pca = self.sk_PCA(n_components=n_components) return pca.transform(self.latent_variables.cpu()) @@ -1415,6 +1421,70 @@ class Pln(_model): def plot_expected_vs_true(self, ax=None, colors=None): super().plot_expected_vs_true(ax=ax, colors=colors) + @_add_doc( + _model, + example=""" + >>> import matplotlib.pyplot as plt + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() + >>> pln.viz() + >>> plt.show() + >>> pln.viz(colors = labels) + >>> plt.show() + >>> pln.viz(show_cov = True) + >>> plt.show() + """, + ) + def viz(self, ax=None, colors=None, show_cov: bool = False): + super().plot_expected_vs_true(ax=ax, colors=colors, show_cov=show_cov) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> pln.fit() + >>> pca_proj = pln.pca_projected_latent_variables() + >>> print(pca_proj.shape) + """, + ) + def pca_projected_latent_variables(self, n_components: Optional[int] = None): + super().pca_projected_latent_variables(n_components=n_components) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> pln.fit() + >>> pln.scatter_pca_matrix(n_components = 5) + """, + ) + def scatter_pca_matrix(self, n_components=None, color=None): + super().scatter_pca_matrix(n_components=n_components, color=color) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> pln.fit() + >>> pln.plot_pca_correlation_graph(["a","b"], indices_of_variables = [4,8]) + """, + ) + def plot_pca_correlation_graph(self, variables_names, indices_of_variables=None): + super().plot_pca_correlation_graph( + variables_names=variables_names, indices_of_variables=indices_of_variables + ) + @property def _description(self): """ @@ -2583,6 +2653,70 @@ class PlnPCA(_model): def plot_expected_vs_true(self, ax=None, colors=None): super().plot_expected_vs_true(ax=ax, colors=colors) + @_add_doc( + _model, + example=""" + >>> import matplotlib.pyplot as plt + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> plnpca = PlnPCA(counts,add_const = True) + >>> plnpca.fit() + >>> plnpca.viz() + >>> plt.show() + >>> plnpca.viz(colors = labels) + >>> plt.show() + >>> plnpca.viz(show_cov = True) + >>> plt.show() + """, + ) + def viz(self, ax=None, colors=None, show_cov: bool = False): + super().plot_expected_vs_true(ax=ax, colors=colors, show_cov=show_cov) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> plnpca.fit() + >>> pca_proj = plnpca.pca_projected_latent_variables() + >>> print(pca_proj.shape) + """, + ) + def pca_projected_latent_variables(self, n_components: Optional[int] = None): + super().pca_projected_latent_variables(n_components=n_components) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> plnpca.fit() + >>> plnpca.scatter_pca_matrix(n_components = 5) + """, + ) + def scatter_pca_matrix(self, n_components=None, color=None): + super().scatter_pca_matrix(n_components=n_components, color=color) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> plnpca.fit() + >>> plnpca.plot_pca_correlation_graph(["a","b"], indices_of_variables = [4,8]) + """, + ) + def plot_pca_correlation_graph(self, variables_names, indices_of_variables=None): + super().plot_pca_correlation_graph( + variables_names=variables_names, indices_of_variables=indices_of_variables + ) + def _check_if_rank_is_too_high(self): """ Check if the rank is too high and issue a warning if necessary. @@ -2943,35 +3077,6 @@ class PlnPCA(_model): """ return torch.linalg.qr(self._components, "reduced")[0] - def pca_projected_latent_variables( - self, n_components: Optional[int] = None - ) -> np.ndarray: - """ - Perform PCA on projected latent variables. - - Parameters - ---------- - n_components : Optional[int] - Number of components to keep. Defaults to None. - - Returns - ------- - np.ndarray - The transformed projected latent variables. - Raises - ------ - ValueError - If the number of components asked is greater than the number of dimensions. - """ - if n_components is None: - n_components = self._get_max_components() - if n_components > self.rank: - raise ValueError( - f"You ask more components ({n_components}) than maximum rank ({self.rank})" - ) - pca = PCA(n_components=n_components) - return pca.fit_transform(self.latent_variables.cpu()) - @property def components(self) -> torch.Tensor: """ -- GitLab From d6e66a85897ab632099dee1370d532523415e1d9 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 17:15:35 +0200 Subject: [PATCH 018/167] add some docstrings --- pyPLNmodels/__init__.py | 3 +- pyPLNmodels/models.py | 73 ++++++++++++++++++++--------------------- 2 files changed, 37 insertions(+), 39 deletions(-) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index b379b8d2..e785b288 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -8,7 +8,7 @@ from ._utils import ( get_simulation_parameters, PlnParameters, ) -from .load import load_model, load_plnpcacollection, load_pln +from .load import load_model, load_plnpcacollection, load_pln, load_plnpca from ._initialization import log_posterior @@ -24,6 +24,7 @@ __all__ = ( "load_model", "load_plnpcacollection", "load_pln", + "load_plnpca", "sample_pln", "log_posterior", "get_simulation_parameters", diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index d89a9bfe..335fa96b 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -49,21 +49,6 @@ else: NB_CHARACTERS_FOR_NICE_PLOT = 70 -str_fit_plnpca_formula = ( - ">>> from pyPLNmodels import PlnPCA, get_real_count_data" - ">>> counts = get_real_count_data()" - ">>> data = {'counts': counts}" - ">>> plnpca = PlnPCA.from_formula('counts ~ 1', data = data)" - ">>> plnpca.fit()" -) -str_fit_pln_formula = ( - ">>> from pyPLNmodels import PlnPCA, get_real_count_data" - ">>> counts = get_real_count_data()" - ">>> data = {'counts': counts}" - ">>> plnpca = PlnPCA.from_formula('counts ~ 1', data = data)" - ">>> plnpca.fit()" -) - class _model(ABC): """ @@ -878,7 +863,7 @@ class _model(ABC): Returns ------- torch.Tensor or None - The latent mean or None. + The latent mean or None if it has not yet been initialized. """ return self._cpu_attribute_or_none("_latent_mean") @@ -1297,8 +1282,6 @@ class Pln(_model): >>> pln = Pln.from_formula("counts ~ 1", data) >>> pln.fit() >>> print(pln) - - """ @_add_doc( @@ -1579,6 +1562,15 @@ class Pln(_model): ------- torch.Tensor The computed ELBO. + Examples + -------- + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() + >>> elbo = pln.compute_elbo() + >>> print("elbo", elbo) + >>> print("loglike/n", pln.loglike/pln.n_samples) """ return profiled_elbo_pln( self._counts, @@ -1643,16 +1635,18 @@ class Pln(_model): """ print(f"Fitting a Pln model with {self._description}") + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(counts,add_const = True) + >>> pln.fit() + >>> print(pln.latent_variables.shape) + """, + ) @property def latent_variables(self): - """ - Property representing the latent variables. - - Returns - ------- - torch.Tensor - The latent variables. - """ return self.latent_mean.detach() @property @@ -1830,10 +1824,10 @@ class PlnPCAcollection: The created PlnPCAcollection instance. Examples -------- - >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pca_col = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,6]) + >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pca_col = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,6]) See also -------- :class:`~pyPLNmodels.PlnPCA` @@ -2730,16 +2724,19 @@ class PlnPCA(_model): warnings.warn(warning_string) self._rank = self.dim + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> plnpca.fit() + >>> print(plnpca.latent_mean.shape) + """, + ) @property def latent_mean(self) -> torch.Tensor: - """ - Property representing the latent mean. - - Returns - ------- - torch.Tensor - The latent mean tensor. - """ return self._cpu_attribute_or_none("_latent_mean") @property -- GitLab From 03b7a82df9556ba38b59d75ad6e6c3a79a11a552 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 19:44:22 +0200 Subject: [PATCH 019/167] @property was placed before @_add_doc --- pyPLNmodels/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 335fa96b..92ffae7e 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1635,6 +1635,7 @@ class Pln(_model): """ print(f"Fitting a Pln model with {self._description}") + @property @_add_doc( _model, example=""" @@ -1645,7 +1646,6 @@ class Pln(_model): >>> print(pln.latent_variables.shape) """, ) - @property def latent_variables(self): return self.latent_mean.detach() @@ -2724,6 +2724,7 @@ class PlnPCA(_model): warnings.warn(warning_string) self._rank = self.dim + @property @_add_doc( _model, example=""" @@ -2735,7 +2736,6 @@ class PlnPCA(_model): >>> print(plnpca.latent_mean.shape) """, ) - @property def latent_mean(self) -> torch.Tensor: return self._cpu_attribute_or_none("_latent_mean") -- GitLab From 47316411dfdbc6f642bc1de7f10ada21d00b2fda Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 26 Jun 2023 23:41:28 +0200 Subject: [PATCH 020/167] add docstrings Union[tensor, np, pd], and fix bug in test_viz --- pyPLNmodels/_utils.py | 44 +++++++----- pyPLNmodels/models.py | 155 +++++++++++++++++++++++------------------- tests/test_viz.py | 2 +- 3 files changed, 113 insertions(+), 88 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 5945f5ed..50683989 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -232,7 +232,9 @@ def _raise_wrong_dimension_error( raise ValueError(msg) -def _format_data(data: pd.DataFrame) -> torch.Tensor or None: +def _format_data( + data: Union[torch.Tensor, np.ndarray, pd.DataFrame] +) -> torch.Tensor or None: """ Transforms the data in a torch.tensor if the input is an array, and None if the input is None. Raises an error if the input is not an array or None. @@ -266,9 +268,9 @@ def _format_data(data: pd.DataFrame) -> torch.Tensor or None: def _format_model_param( - counts: torch.Tensor, - covariates: torch.Tensor, - offsets: torch.Tensor, + counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], + covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame], + offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets_formula: str, take_log_offsets: bool, add_const: bool, @@ -278,11 +280,11 @@ def _format_model_param( Parameters ---------- - counts : torch.Tensor or None, shape (n, ) + counts : Union[torch.Tensor, np.ndarray, pd.DataFrame], shape (n, ) Count data. - covariates : torch.Tensor or None, shape (n, d) or None + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, d) or None Covariate data. - offsets : torch.Tensor or None, shape (n, ) or None + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, ) or None Offset data. offsets_formula : str Formula for calculating offsets. @@ -517,21 +519,29 @@ def _get_simulation_coef_cov_offsets( class PlnParameters: - def __init__(self, *, components, coef, covariates, offsets, coef_inflation=None): + def __init__( + self, + *, + components: Union[torch.Tensor, np.ndarray, pd.DataFrame], + coef: Union[torch.Tensor, np.ndarray, pd.DataFrame], + covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame], + offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], + coef_inflation=None, + ): """ Instantiate all the needed parameters to sample from the PLN model. Parameters ---------- - components : torch.Tensor(keyword-only) + components : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Components of size (p, rank) - coef : torch.Tensor(keyword-only) + coef : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Coefficient of size (d, p) - covariates : torch.Tensor or None(keyword-only) + covariates : : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None(keyword-only) Covariates, size (n, d) or None - offsets : torch.Tensor(keyword-only) + offsets : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Offset, size (n, p) - _coef_inflation : torch.Tensor or None, optional(keyword-only) + _coef_inflation : : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, optional(keyword-only) Coefficient for zero-inflation model, size (d, p) or None. Default is None. """ @@ -778,7 +788,9 @@ def _check_right_rank(data: Dict[str, Any], rank: int) -> None: ) -def _extract_data_from_formula(formula: str, data: Dict[str, Any]) -> Tuple: +def _extract_data_from_formula( + formula: str, data: Dict[str, : Union[torch.Tensor, np.ndarray, pd.DataFrame]] +) -> Tuple: """ Extract data from the given formula and data dictionary. @@ -847,8 +859,8 @@ def _get_dict_initialization( def _to_tensor( - obj: Union[np.ndarray, torch.Tensor, pd.DataFrame, None] -) -> Union[torch.Tensor, None]: + obj: Union[np.ndarray, torch.Tensor, pd.DataFrame] +) -> Union[torch.Tensor]: """ Convert an object to a PyTorch tensor. diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 92ffae7e..f4389312 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -2,7 +2,7 @@ import time from abc import ABC, abstractmethod import warnings import os -from typing import Optional, Dict, List, Type, Any, Iterable +from typing import Optional, Dict, List, Type, Any, Iterable, Union import pandas as pd import torch @@ -66,10 +66,10 @@ class _model(ABC): def __init__( self, - counts: torch.Tensor, + counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Optional[torch.Tensor] = None, - offsets: Optional[torch.Tensor] = None, + covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, @@ -80,14 +80,15 @@ class _model(ABC): Parameters ---------- - counts : torch.Tensor + counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] The count data. - covariates : torch.Tensor, optional(keyword-only) + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The covariate data. Defaults to None. - offsets : torch.Tensor, optional(keyword-only) + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". + The formula for offsets. Defaults to "logsum". Overriden if + offsets is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -112,7 +113,7 @@ class _model(ABC): def from_formula( cls, formula: str, - data: dict, + data: dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, @@ -126,7 +127,8 @@ class _model(ABC): formula : str The formula. data : dict - The data dictionary. + The data dictionary. Each value can be either a torch.Tensor, + a np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) The formula for offsets. Defaults to "logsum". dict_initialization : dict, optional(keyword-only) @@ -180,11 +182,11 @@ class _model(ABC): Parameters ---------- - ax : Optional[Any], optional(keyword-only) + ax : Optional[matplotlib.axes.Axes], optional(keyword-only) The matplotlib axis to use. If None, the current axis is used, by default None. - colors : Optional[Any], optional(keyword-only) + colors : Optional[np.ndarray], optional(keyword-only) The colors to use for plotting, by default None. - show_cov: bool, optional(keyword-only) + show_cov: bool, Optional(keyword-only) If True, will display ellipses with right covariances. Default is False. Raises ------ @@ -468,8 +470,8 @@ class _model(ABC): If not specified, the maximum number of components will be used. Defaults to None. - color (str, optional): The name of the variable used for color coding the scatter plot. - If not specified, the scatter plot will not be color-coded. + color (str, np.ndarray): An array with one label for each + sample in the counts property of the object. Defaults to None. Raises ------ @@ -707,7 +709,9 @@ class _model(ABC): def show(self, axes=None): """ - Show plots. + Show 3 plots. The first one is the covariance of the model. + The second one is the stopping criterion with the runtime in abscisse. + The third one is the elbo. Parameters ---------- @@ -881,13 +885,13 @@ class _model(ABC): @latent_mean.setter @_array2tensor - def latent_mean(self, latent_mean): + def latent_mean(self, latent_mean: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the latent mean property. Parameters ---------- - latent_mean : torch.Tensor + latent_mean : Union[torch.Tensor, np.ndarray, pd.DataFrame] The latent mean. Raises @@ -903,13 +907,15 @@ class _model(ABC): @latent_sqrt_var.setter @_array2tensor - def latent_sqrt_var(self, latent_sqrt_var): + def latent_sqrt_var( + self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): """ Setter for the latent variance property. Parameters ---------- - latent_sqrt_var : torch.Tensor + latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] The latent variance. Raises @@ -1005,13 +1011,13 @@ class _model(ABC): @counts.setter @_array2tensor - def counts(self, counts): + def counts(self, counts: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the counts property. Parameters ---------- - counts : torch.Tensor + counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] The counts. Raises @@ -1029,13 +1035,13 @@ class _model(ABC): @offsets.setter @_array2tensor - def offsets(self, offsets): + def offsets(self, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the offsets property. Parameters ---------- - offsets : torch.Tensor + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame] The offsets. Raises @@ -1051,13 +1057,13 @@ class _model(ABC): @covariates.setter @_array2tensor - def covariates(self, covariates): + def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the covariates property. Parameters ---------- - covariates : torch.Tensor + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] The covariates. Raises @@ -1070,13 +1076,13 @@ class _model(ABC): @coef.setter @_array2tensor - def coef(self, coef): + def coef(self, coef: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the coef property. Parameters ---------- - coef : torch.Tensor or None + coef : Union[torch.Tensor, np.ndarray, pd.DataFrame] The coefficients. Raises @@ -1157,13 +1163,13 @@ class _model(ABC): """ return self.covariance - def predict(self, covariates=None): + def predict(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None): """ Method for making predictions. Parameters ---------- - covariates : torch.Tensor, optional + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional The covariates, by default None. Returns @@ -1174,16 +1180,16 @@ class _model(ABC): Raises ------ AttributeError - If there are no covariates in the model. + If there are no covariates in the model but some are provided. RuntimeError If the shape of the covariates is incorrect. Notes ----- - If `covariates` is not provided and there are no covariates in the model, None is returned. + If there are covariates in the model, then the mean covariates @ coef is returned. - If `covariates` is provided, it should have the shape `(_, nb_cov)`, where `nb_cov` is the number of covariates. - The predicted values are obtained by multiplying the covariates by the coefficients. - """ if covariates is not None and self.nb_cov == 0: raise AttributeError("No covariates in the model, can't predict") @@ -1228,7 +1234,7 @@ class _model(ABC): Parameters ---------- - ax : Optional[Any], optional + ax : Optional[matplotlib.axes.Axes], optional The matplotlib axis to use. If None, the current axis is used, by default None. colors : Optional[Any], optional @@ -1236,7 +1242,7 @@ class _model(ABC): Returns ------- - Any + matplotlib.axes.Axes The matplotlib axis. >>> """ @@ -1261,8 +1267,6 @@ class _model(ABC): # need to do a good init for M and S class Pln(_model): - _NAME = "Pln" - coef: torch.Tensor """ Pln class. @@ -1284,6 +1288,9 @@ class Pln(_model): >>> print(pln) """ + _NAME = "Pln" + coef: torch.Tensor + @_add_doc( _model, example=""" @@ -1302,10 +1309,10 @@ class Pln(_model): ) def __init__( self, - counts: torch.Tensor, + counts: Optional[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Optional[torch.Tensor] = None, - offsets: Optional[torch.Tensor] = None, + covariates: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, + offsets: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, @@ -1341,7 +1348,7 @@ class Pln(_model): def from_formula( cls, formula: str, - data: Any, + data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -1421,7 +1428,7 @@ class Pln(_model): """, ) def viz(self, ax=None, colors=None, show_cov: bool = False): - super().plot_expected_vs_true(ax=ax, colors=colors, show_cov=show_cov) + super().viz(ax=ax, colors=colors, show_cov=show_cov) @_add_doc( _model, @@ -1499,14 +1506,14 @@ class Pln(_model): return None @coef.setter - def coef(self, coef): + def coef(self, coef: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the coef property. Parameters ---------- - coef : torch.Tensor - The coefficients. + coef : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The regression coefficients of the gaussian latent variables. """ def _counts_predictions(self): @@ -1698,7 +1705,8 @@ class Pln(_model): @covariance.setter def covariance(self, covariance): """ - Setter for the covariance property. + Setter for the covariance property. Only here for completeness, since + this function does nothing Parameters ---------- @@ -1710,7 +1718,7 @@ class Pln(_model): class PlnPCAcollection: """ - A collection where item q corresponds to a PlnPCA object with rank q. + A collection where value q corresponds to a PlnPCA object with rank q. Examples -------- @@ -1739,10 +1747,10 @@ class PlnPCAcollection: def __init__( self, - counts: torch.Tensor, + counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Optional[torch.Tensor] = None, - offsets: Optional[torch.Tensor] = None, + covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, + offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets_formula: str = "logsum", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, @@ -1754,11 +1762,11 @@ class PlnPCAcollection: Parameters ---------- - counts : torch.Tensor + counts :Union[torch.Tensor, np.ndarray, pd.DataFrame] The counts. - covariates : torch.Tensor, optional(keyword-only) + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The covariates, by default None. - offsets : torch.Tensor, optional(keyword-only) + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets, by default None. offsets_formula : str, optional(keyword-only) The formula for offsets, by default "logsum". @@ -1794,7 +1802,8 @@ class PlnPCAcollection: def from_formula( cls, formula: str, - data: dict, + data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], + *, offsets_formula: str = "logsum", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, @@ -1808,7 +1817,8 @@ class PlnPCAcollection: formula : str The formula. data : dict - The data dictionary. + The data dictionary. Each value can be either + a torch.Tensor, np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) The formula for offsets, by default "logsum". Overriden if data["offsets"] is not None. @@ -1919,13 +1929,13 @@ class PlnPCAcollection: @counts.setter @_array2tensor - def counts(self, counts: torch.Tensor): + def counts(self, counts: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the counts property. Parameters ---------- - counts : torch.Tensor + counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] The counts. """ for model in self.values(): @@ -1933,13 +1943,13 @@ class PlnPCAcollection: @coef.setter @_array2tensor - def coef(self, coef: torch.Tensor): + def coef(self, coef: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the coef property. Parameters ---------- - coef : torch.Tensor + coef : Union[torch.Tensor, np.ndarray, pd.DataFrame] The coefficients. """ for model in self.values(): @@ -1947,13 +1957,13 @@ class PlnPCAcollection: @covariates.setter @_array2tensor - def covariates(self, covariates: torch.Tensor): + def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the covariates property. Parameters ---------- - covariates : torch.Tensor + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] The covariates. """ for model in self.values(): @@ -1973,13 +1983,13 @@ class PlnPCAcollection: @offsets.setter @_array2tensor - def offsets(self, offsets: torch.Tensor): + def offsets(self, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the offsets property. Parameters ---------- - offsets : torch.Tensor + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame] The offsets. """ for model in self.values(): @@ -2474,7 +2484,7 @@ class PlnPCAcollection: str The string representation of the useful methods. """ - return ".show(), .best_model()" + return ".show(), .best_model(), .keys(), .items(), .values()" @property def _useful_properties_string(self) -> str: @@ -2540,10 +2550,10 @@ class PlnPCA(_model): ) def __init__( self, - counts: torch.Tensor, + counts: Optional[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Optional[torch.Tensor] = None, - offsets: Optional[torch.Tensor] = None, + covariates: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, + offsets: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets_formula: str = "logsum", rank: int = 5, dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -2585,7 +2595,8 @@ class PlnPCA(_model): def from_formula( cls, formula: str, - data: Any, + data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], + *, rank: int = 5, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -2663,7 +2674,7 @@ class PlnPCA(_model): >>> plt.show() """, ) - def viz(self, ax=None, colors=None, show_cov: bool = False): + def viz(self, ax: matplotlib.axes.Axes = None, colors=None, show_cov: bool = False): super().plot_expected_vs_true(ax=ax, colors=colors, show_cov=show_cov) @_add_doc( @@ -2706,7 +2717,9 @@ class PlnPCA(_model): >>> plnpca.plot_pca_correlation_graph(["a","b"], indices_of_variables = [4,8]) """, ) - def plot_pca_correlation_graph(self, variables_names, indices_of_variables=None): + def plot_pca_correlation_graph( + self, variables_names: List[str], indices_of_variables=None + ): super().plot_pca_correlation_graph( variables_names=variables_names, indices_of_variables=indices_of_variables ) @@ -2837,13 +2850,13 @@ class PlnPCA(_model): @covariates.setter @_array2tensor - def covariates(self, covariates: torch.Tensor): + def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ Setter for the covariates. Parameters ---------- - covariates : torch.Tensor + covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] The covariates tensor. """ _check_data_shape(self.counts, covariates, self.offsets) diff --git a/tests/test_viz.py b/tests/test_viz.py index 4cab97f9..3cafe93f 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -87,7 +87,7 @@ def test_plot_pca_correlation_graph_without_names(pln): @filter_models(["Pln", "PlnPCA"]) def test_expected_vs_true(pln): pln.plot_expected_vs_true() - fig, ax = plt.figure() + fig, ax = plt.subplots() pln.plot_expected_vs_true(ax=ax) -- GitLab From c7c9cbbeb5efa258a6eddc782ab4958e5cdf3cc0 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 08:35:38 +0200 Subject: [PATCH 021/167] bug in type hints and viz definition. --- pyPLNmodels/_utils.py | 2 +- pyPLNmodels/models.py | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 50683989..2e7f3ce3 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -789,7 +789,7 @@ def _check_right_rank(data: Dict[str, Any], rank: int) -> None: def _extract_data_from_formula( - formula: str, data: Dict[str, : Union[torch.Tensor, np.ndarray, pd.DataFrame]] + formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]] ) -> Tuple: """ Extract data from the given formula and data dictionary. diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index f4389312..ac0ed77a 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -12,6 +12,7 @@ import matplotlib.pyplot as plt from sklearn.decomposition import PCA import plotly.express as px from mlxtend.plotting import plot_pca_correlation_graph +import matplotlib from ._closed_forms import ( _closed_formula_coef, @@ -1309,10 +1310,10 @@ class Pln(_model): ) def __init__( self, - counts: Optional[torch.Tensor, np.ndarray, pd.DataFrame], + counts: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - covariates: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, - offsets: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, + covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, @@ -1348,7 +1349,7 @@ class Pln(_model): def from_formula( cls, formula: str, - data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], + data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -1802,7 +1803,7 @@ class PlnPCAcollection: def from_formula( cls, formula: str, - data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], + data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, offsets_formula: str = "logsum", ranks: Iterable[int] = range(3, 5), @@ -2550,10 +2551,10 @@ class PlnPCA(_model): ) def __init__( self, - counts: Optional[torch.Tensor, np.ndarray, pd.DataFrame], + counts: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - covariates: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, - offsets: Optional[torch.Tensor, np.ndarray, pd.DataFrame] = None, + covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", rank: int = 5, dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -2595,7 +2596,7 @@ class PlnPCA(_model): def from_formula( cls, formula: str, - data: Dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], + data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, rank: int = 5, offsets_formula: str = "logsum", @@ -2675,7 +2676,7 @@ class PlnPCA(_model): """, ) def viz(self, ax: matplotlib.axes.Axes = None, colors=None, show_cov: bool = False): - super().plot_expected_vs_true(ax=ax, colors=colors, show_cov=show_cov) + super().viz(ax=ax, colors=colors, show_cov=show_cov) @_add_doc( _model, -- GitLab From ed26e61a21c8fca147dda44febc02c4be51a744a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 10:07:55 +0200 Subject: [PATCH 022/167] add docstrings in plnparam class --- pyPLNmodels/_utils.py | 77 ++++++++++++++++++++++++++++++++++--------- tests/test_viz.py | 2 ++ 2 files changed, 63 insertions(+), 16 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 2e7f3ce3..d8d027ac 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -545,37 +545,47 @@ class PlnParameters: Coefficient for zero-inflation model, size (d, p) or None. Default is None. """ - self.components = _format_data(components) - self.coef = _format_data(coef) - self.covariates = _format_data(covariates) - self.offsets = _format_data(offsets) - self.coef_inflation = _format_data(coef_inflation) - if self.coef is not None: + self._components = _format_data(components) + self._coef = _format_data(coef) + self._covariates = _format_data(covariates) + self._offsets = _format_data(offsets) + self._coef_inflation = _format_data(coef_inflation) + if self._coef is not None: _check_two_dimensions_are_equal( - "components", "coef", self.components.shape[0], self.coef.shape[1], 0, 1 + "components", + "coef", + self._components.shape[0], + self._coef.shape[1], + 0, + 1, ) - if self.offsets is not None: + if self._offsets is not None: _check_two_dimensions_are_equal( "components", "offsets", - self.components.shape[0], - self.offsets.shape[1], + self._components.shape[0], + self._offsets.shape[1], 0, 1, ) - if self.covariates is not None: + if self._covariates is not None: _check_two_dimensions_are_equal( "offsets", "covariates", - self.offsets.shape[0], - self.covariates.shape[0], + self._offsets.shape[0], + self._covariates.shape[0], 0, 0, ) _check_two_dimensions_are_equal( - "covariates", "coef", self.covariates.shape[1], self.coef.shape[0], 1, 0 + "covariates", + "coef", + self._covariates.shape[1], + self._coef.shape[0], + 1, + 0, ) - for array in [self.components, self.coef, self.covariates, self.offsets]: + for array in [self._components, self._coef, self._covariates, self._offsets]: if array is not None: if len(array.shape) != 2: raise RuntimeError( @@ -587,7 +597,42 @@ class PlnParameters: """ Covariance of the model. """ - return self.components @ self.components.T + return self._components @ self._components.T + + @property + def components(self): + """ + Components of the model. + """ + return self._components + + @property + def offsets(self): + """ + Data offsets. + """ + return self._offsets + + @property + def coef(self): + """ + Coef of the model. + """ + return self.coef + + @property + def covariates(self): + """ + Data covariates. + """ + return self.covariates + + @property + def coef_inflation(self): + """ + Inflation coefficient of the model. + """ + return self._coef_inflation def _check_two_dimensions_are_equal( diff --git a/tests/test_viz.py b/tests/test_viz.py index 3cafe93f..be24fcf1 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -60,6 +60,8 @@ def test_viz_pcacol(plnpca): colors = np.random.randint(low=0, high=2, size=n_samples) model.viz(colors=colors) plt.show() + model.viz(show_cov=True) + plt.show() @pytest.mark.parametrize("pln", dict_fixtures["real_fitted_pln_intercept_array"]) -- GitLab From 7f0e53d0dace96ca1f0d3294a282bf5803a68d51 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 11:39:01 +0200 Subject: [PATCH 023/167] remove tests for debugging the publish_package ci. --- .gitlab-ci.yml | 60 +++++++++++++++++++++++++------------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d0f6a915..d51311ec 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,36 +1,36 @@ -stages: - - checks - - build - - publish +# stages: +# - checks +# - build +# - publish -black: - stage: checks - image: registry.gitlab.com/pipeline-components/black:latest - script: - - black --check --verbose -- . - tags: - - docker +# black: +# stage: checks +# image: registry.gitlab.com/pipeline-components/black:latest +# script: +# - black --check --verbose -- . +# tags: +# - docker -tests: - stage: checks - image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" - before_script: - pip install '.[tests]' - script: - - pip install . - - cd tests - - pytest -examples: - stage: checks - image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" - before_script: - pip install '.[tests]' - script: - - pip install . - - cd tests - - python create_example_files.py - - ./test_examples.sh +# tests: +# stage: checks +# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" +# before_script: +# pip install '.[tests]' +# script: +# - pip install . +# - cd tests +# - pytest +# examples: +# stage: checks +# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" +# before_script: +# pip install '.[tests]' +# script: +# - pip install . +# - cd tests +# - python create_example_files.py +# - ./test_examples.sh -- GitLab From 715c96d44feb33abc0830274261b7a732df5aa35 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 11:40:24 +0200 Subject: [PATCH 024/167] bug in the ci --- .gitlab-ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d51311ec..05199a35 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,8 +1,8 @@ -# stages: -# - checks -# - build -# - publish +stages: + - checks + - build + - publish # black: # stage: checks -- GitLab From af8c718d82e3ace9f24696b742b0604f6f27a6c9 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 11:41:24 +0200 Subject: [PATCH 025/167] not only on tags for debugging. --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 05199a35..be352639 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -57,8 +57,8 @@ publish_package: - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker - only: - - tags + # only: + # - tags pages: stage: publish -- GitLab From e8e53c2699b2c519df454b87a28e17b034451024 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 11:43:37 +0200 Subject: [PATCH 026/167] debug. --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index be352639..c3935dbe 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -54,6 +54,7 @@ publish_package: before_script: - pip install twine script: + - echo ${pypln_token} - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker -- GitLab From 26804397d30f8321d64f6c647ba9d38a75e903e2 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 13:59:10 +0200 Subject: [PATCH 027/167] small change to try cicd --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c3935dbe..2425e78f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,6 +4,7 @@ stages: - build - publish +# # black: # stage: checks # image: registry.gitlab.com/pipeline-components/black:latest -- GitLab From 6895580667d30c746f942188fbe00bc22af84061 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 14:04:08 +0200 Subject: [PATCH 028/167] right ci --- .gitlab-ci.yml | 58 +++++++++++++++++++++++++------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2425e78f..c21b0e65 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,34 +4,34 @@ stages: - build - publish -# -# black: -# stage: checks -# image: registry.gitlab.com/pipeline-components/black:latest -# script: -# - black --check --verbose -- . -# tags: -# - docker -# tests: -# stage: checks -# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" -# before_script: -# pip install '.[tests]' -# script: -# - pip install . -# - cd tests -# - pytest -# examples: -# stage: checks -# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" -# before_script: -# pip install '.[tests]' -# script: -# - pip install . -# - cd tests -# - python create_example_files.py -# - ./test_examples.sh +black: + stage: checks + image: registry.gitlab.com/pipeline-components/black:latest + script: + - black --check --verbose -- . + tags: + - docker + +tests: + stage: checks + image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + before_script: + pip install '.[tests]' + script: + - pip install . + - cd tests + - pytest +examples: + stage: checks + image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + before_script: + pip install '.[tests]' + script: + - pip install . + - cd tests + - python create_example_files.py + - ./test_examples.sh @@ -59,8 +59,8 @@ publish_package: - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker - # only: - # - tags + only: + - tags pages: stage: publish -- GitLab From a9695db5c58dbd5b1c6ab63bda47bb42512022e7 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 27 Jun 2023 14:58:13 +0200 Subject: [PATCH 029/167] small docstrings for PlnPCAcollection --- pyPLNmodels/models.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index ac0ed77a..e0fd0444 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1726,10 +1726,12 @@ class PlnPCAcollection: >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, get_simulation_parameters, sample_pln >>> counts, labels = get_real_count_data(return_labels = True) >>> data = {"counts": counts} - >>> plncas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]) - >>> plncas.fit() - >>> print(plncas) - >>> plncas.show() + >>> plnpcas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]) + >>> plnpcas.fit() + >>> print(plnpcas) + >>> plnpcas.show() + >>> print(plnpcas.best_model()) + >>> print(plnpcas[5]) >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) >>> counts = sample_pln(plnparam) -- GitLab From d216bd108ea139ff492f3c7621ee64b98a5023e5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:20:33 +0200 Subject: [PATCH 030/167] update README for JOSS. --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 6ca8d043..9a8a2e18 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,9 @@ > The Poisson lognormal model and variants can be used for analysis of mutivariate count data. > This package implements -> efficient algorithms to fit such models. +> efficient algorithms extracting meaningful data from difficult to interpret +> and complex count data. It has been built to scale on large datasets even +> though it has memory limitations. <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From eff91f372eaa26a11087a77a5d5a011f2d981851 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:30:05 +0200 Subject: [PATCH 031/167] test for README. --- README.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 9a8a2e18..fe7f0650 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,13 @@ # PLNmodels: Poisson lognormal models -> The Poisson lognormal model and variants can be used for analysis of mutivariate count data. -> This package implements -> efficient algorithms extracting meaningful data from difficult to interpret -> and complex count data. It has been built to scale on large datasets even -> though it has memory limitations. +The Poisson lognormal model and variants can be used for analysis of mutivariate count data. +This package implements +efficient algorithms extracting meaningful data from difficult to interpret +and complex count data. It has been built to scale on large datasets even +though it has memory limitations. Possible fields of applications are +- test +- test bis <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From 4322d781ad253f62fc78fc3c7499d55190efe108 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:31:09 +0200 Subject: [PATCH 032/167] other test --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index fe7f0650..fd5b82a9 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # PLNmodels: Poisson lognormal models -The Poisson lognormal model and variants can be used for analysis of mutivariate count data. -This package implements -efficient algorithms extracting meaningful data from difficult to interpret -and complex count data. It has been built to scale on large datasets even -though it has memory limitations. Possible fields of applications are -- test -- test bis +> The Poisson lognormal model and variants can be used for analysis of mutivariate count data. +> This package implements +> efficient algorithms extracting meaningful data from difficult to interpret +> and complex count data. It has been built to scale on large datasets even +> though it has memory limitations. Possible fields of applications are +> - test +> - test bis <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From 5fb53379211501f6c61870d7f6d9ed0037e99493 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:32:10 +0200 Subject: [PATCH 033/167] test on README joss --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index fd5b82a9..12664535 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,6 @@ > efficient algorithms extracting meaningful data from difficult to interpret > and complex count data. It has been built to scale on large datasets even > though it has memory limitations. Possible fields of applications are -> - test -> - test bis <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From 846bef1c36cefaec567d0af577ceea3351c0cf49 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:49:32 +0200 Subject: [PATCH 034/167] change to README --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 12664535..b38549bb 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,10 @@ > The Poisson lognormal model and variants can be used for analysis of mutivariate count data. > This package implements > efficient algorithms extracting meaningful data from difficult to interpret -> and complex count data. It has been built to scale on large datasets even -> though it has memory limitations. Possible fields of applications are +> and complex multivariate count data. It has been built to scale on large datasets even +> though it has memory limitations. Possible fields of applications include +> - Genomics (number of times a gene is expressed in a cell) +> - Ecology (number of individuals of some species in a specific site) <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From e22511f7faba480d6dcacc1e1887c68a6251ff72 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:50:59 +0200 Subject: [PATCH 035/167] typo in the README. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b38549bb..411029ef 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyp ## Installation -**PLNmodels** is available on +**pyPLNmodels** is available on [pypi](https://pypi.org/project/pyPLNmodels/). The development version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). -- GitLab From dda16e27490492d7588c365b7ca0f30e1c3d0038 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:52:45 +0200 Subject: [PATCH 036/167] add index.html in the .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 89d5095d..c73ed815 100644 --- a/.gitignore +++ b/.gitignore @@ -151,3 +151,4 @@ test.py ## directories that outputs when running the tests tests/Pln* slides/ +index.html -- GitLab From 996f7af5da8358404a2c57884901da154ef41f64 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 16:55:46 +0200 Subject: [PATCH 037/167] add transform functionality in the README. --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 411029ef..93e01027 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ > though it has memory limitations. Possible fields of applications include > - Genomics (number of times a gene is expressed in a cell) > - Ecology (number of individuals of some species in a specific site) +> One main functionality is to normalize the data to obtain more valueable data. <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> @@ -44,6 +45,7 @@ oaks = load_oaks() pln = Pln.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True) pln.fit() print(pln) +transformed_data = pln.transform() ``` @@ -53,6 +55,7 @@ print(pln) pca = PlnPCAcollection.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) pca.fit() print(pca) +transformed_data = pln.transform() ``` -- GitLab From 484319576ca83e759235cc8d2be8cc2651dd1312 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 17:16:02 +0200 Subject: [PATCH 038/167] add docs for transform variables. --- pyPLNmodels/models.py | 55 +++++++++++++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 15 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index e0fd0444..7176bbd3 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1476,6 +1476,25 @@ class Pln(_model): variables_names=variables_names, indices_of_variables=indices_of_variables ) + @_add_doc( + _model, + returns=""" + torch.Tensor + The transformed counts (latent variables of the model). + """, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> counts = get_real_count_data() + >>> data = {"counts": counts} + >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> pln.fit() + >>> transformed_counts = pln.transform() + >>> print(transformed_counts.shape) + """, + ) + def transform(self): + return super().transform() + @property def _description(self): """ @@ -1671,12 +1690,7 @@ class Pln(_model): def transform(self): """ - Method for transforming the model. - - Returns - ------- - torch.Tensor - The transformed model. + Method for transforming the counts. Can be seen as a normalization of the counts. """ return self.latent_variables @@ -2542,6 +2556,7 @@ class PlnPCA(_model): >>> from pyPLNmodels import PlnPCA, get_real_count_data >>> counts= get_real_count_data() >>> pca = PlnPCA(counts, add_const = True) + >>> pca.fit() >>> print(pca) """, returns=""" @@ -3124,20 +3139,30 @@ class PlnPCA(_model): ) self._components = components - def transform(self, project: bool = True) -> torch.Tensor: - """ - Transform the model. - + @_add_doc( + _model, + params=""" Parameters ---------- project : bool, optional Whether to project the latent variables, by default True. - - Returns - ------- + """, + returns=""" torch.Tensor - The transformed model. - """ + The transformed counts (latent variables of the model). + """, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> counts= get_real_count_data() + >>> pca = PlnPCA(counts, add_const = True) + >>> pca.fit() + >>> transformed_counts_low_dim = pca.transform() + >>> transformed_counts_high_dim = pca.transform(project = False) + >>> print(transformed_counts_low_dim.shape) + >>> print(transformed_counts_high_dim.shape) + """, + ) + def transform(self, project: bool = True) -> torch.Tensor: if project is True: return self.projected_latent_variables return self.latent_variables -- GitLab From bb63c878c339a6c5a0f7aea3ea3957c7ccac7b34 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 17:21:22 +0200 Subject: [PATCH 039/167] add other functionalities of the package. --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 93e01027..2398c85b 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,9 @@ > though it has memory limitations. Possible fields of applications include > - Genomics (number of times a gene is expressed in a cell) > - Ecology (number of individuals of some species in a specific site) -> One main functionality is to normalize the data to obtain more valueable data. +> One main functionality is to normalize the count data to obtain more valuable +> data. It also analyse the significance of each variable and their correlation as well as the weight of +> covariates (if available). <!-- accompanied with a set of --> <!-- > functions for visualization and diagnostic. See [this deck of --> <!-- > slides](https://pln-team.github.io/slideshow/) for a --> -- GitLab From 3794206fe67cd196618cbdaf060b6e37df072698 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 28 Jun 2023 18:01:24 +0200 Subject: [PATCH 040/167] changed individuals of species to species abundances. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2398c85b..9401cfe6 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ > and complex multivariate count data. It has been built to scale on large datasets even > though it has memory limitations. Possible fields of applications include > - Genomics (number of times a gene is expressed in a cell) -> - Ecology (number of individuals of some species in a specific site) +> - Ecology (species abundances) > One main functionality is to normalize the count data to obtain more valuable > data. It also analyse the significance of each variable and their correlation as well as the weight of > covariates (if available). -- GitLab From 7bbdea9eca7f0771d624bb5ce5cb410c51e6de39 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 12:04:29 +0200 Subject: [PATCH 041/167] add qq_plots on Pln. does not work. --- pyPLNmodels/models.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 7176bbd3..091afe0b 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -13,6 +13,7 @@ from sklearn.decomposition import PCA import plotly.express as px from mlxtend.plotting import plot_pca_correlation_graph import matplotlib +from scipy import stats from ._closed_forms import ( _closed_formula_coef, @@ -400,6 +401,21 @@ class _model(ABC): self._update_closed_forms() return loss + def transform(self): + """ + Method for transforming the counts. Can be seen as a normalization of the counts. + """ + return self.latent_variables + + def qq_plots(self): + centered_latent = self.latent_variables - torch.mean( + self.latent_variables, axis=0 + ) + chol = torch.linalg.cholesky(torch.inverse(self.covariance)) + residus = torch.matmul(centered_latent.unsqueeze(1), chol.unsqueeze(0)) + stats.probplot(residus.ravel(), plot=plt) + plt.show() + def pca_projected_latent_variables(self, n_components: Optional[int] = None): """ Perform PCA on the latent variables and project them onto a lower-dimensional space. @@ -1688,12 +1704,6 @@ class Pln(_model): """ return self.dim * (self.dim + self.nb_cov) - def transform(self): - """ - Method for transforming the counts. Can be seen as a normalization of the counts. - """ - return self.latent_variables - @property def covariance(self): """ -- GitLab From f2fcb191a172475e550d7407b0bfee7ca12f3a2c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 15:00:58 +0200 Subject: [PATCH 042/167] changed all counts occurences for endog --- .gitignore | 2 + pyPLNmodels/_initialization.py | 38 ++-- pyPLNmodels/_utils.py | 98 +++++----- pyPLNmodels/elbos.py | 48 ++--- pyPLNmodels/load.py | 16 +- pyPLNmodels/models.py | 320 ++++++++++++++++----------------- pyPLNmodels/oaks.py | 8 +- tests/conftest.py | 38 ++-- tests/import_data.py | 14 +- tests/test_common.py | 10 +- tests/test_pln_full.py | 2 +- tests/test_plnpcacollection.py | 2 +- tests/test_setters.py | 20 +-- 13 files changed, 308 insertions(+), 308 deletions(-) diff --git a/.gitignore b/.gitignore index c73ed815..fb7059f0 100644 --- a/.gitignore +++ b/.gitignore @@ -152,3 +152,5 @@ test.py tests/Pln* slides/ index.html + +tests/examples/*.py diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index 2510b963..ea22106b 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -10,16 +10,16 @@ else: def _init_covariance( - counts: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor + endog: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor ) -> torch.Tensor: """ - Initialization for the covariance for the Pln model. Take the log of counts - (careful when counts=0), and computes the Maximum Likelihood + Initialization for the covariance for the Pln model. Take the log of endog + (careful when endog=0), and computes the Maximum Likelihood Estimator in the gaussian case. Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) @@ -33,15 +33,15 @@ def _init_covariance( torch.Tensor Covariance matrix of size (p,p) """ - log_y = torch.log(counts + (counts == 0) * math.exp(-2)) + log_y = torch.log(endog + (endog == 0) * math.exp(-2)) log_y_centered = log_y - torch.mean(log_y, axis=0) - n_samples = counts.shape[0] + n_samples = endog.shape[0] sigma_hat = 1 / (n_samples - 1) * (log_y_centered.T) @ log_y_centered return sigma_hat def _init_components( - counts: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor, rank: int + endog: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor, rank: int ) -> torch.Tensor: """ Initialization for components for the Pln model. Get a first guess for covariance @@ -49,7 +49,7 @@ def _init_components( Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) @@ -65,13 +65,13 @@ def _init_components( torch.Tensor Initialization of components of size (p,rank) """ - sigma_hat = _init_covariance(counts, covariates, coef).detach() + sigma_hat = _init_covariance(endog, covariates, coef).detach() components = _components_from_covariance(sigma_hat, rank) return components def _init_latent_mean( - counts: torch.Tensor, + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor, coef: torch.Tensor, @@ -86,7 +86,7 @@ def _init_latent_mean( Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) @@ -109,7 +109,7 @@ def _init_latent_mean( torch.Tensor The initialized latent mean with size (n,rank) """ - mode = torch.randn(counts.shape[0], components.shape[1], device=DEVICE) + mode = torch.randn(endog.shape[0], components.shape[1], device=DEVICE) mode.requires_grad_(True) optimizer = torch.optim.Rprop([mode], lr=lr) crit = 2 * eps @@ -117,7 +117,7 @@ def _init_latent_mean( keep_condition = True i = 0 while i < n_iter_max and keep_condition: - batch_loss = log_posterior(counts, covariates, offsets, mode, components, coef) + batch_loss = log_posterior(endog, covariates, offsets, mode, components, coef) loss = -torch.mean(batch_loss) loss.backward() optimizer.step() @@ -155,14 +155,14 @@ def _components_from_covariance(covariance: torch.Tensor, rank: int) -> torch.Te def _init_coef( - counts: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor ) -> torch.Tensor: """ Initialize the coefficient for the Pln model using Poisson regression model. Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (n, p) covariates : torch.Tensor Covariates, size (n, d) @@ -178,12 +178,12 @@ def _init_coef( return None poiss_reg = _PoissonReg() - poiss_reg.fit(counts, covariates, offsets) + poiss_reg.fit(endog, covariates, offsets) return poiss_reg.beta def log_posterior( - counts: torch.Tensor, + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor, posterior_mean: torch.Tensor, @@ -195,7 +195,7 @@ def log_posterior( Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (batch_size, p) covariates : torch.Tensor or None Covariates, size (batch_size, d) or (d) @@ -229,7 +229,7 @@ def log_posterior( - 1 / 2 * torch.norm(posterior_mean, dim=-1) ** 2 ) second_term = torch.sum( - -torch.exp(log_lambda) + log_lambda * counts - _log_stirling(counts), axis=-1 + -torch.exp(log_lambda) + log_lambda * endog - _log_stirling(endog), axis=-1 ) return first_term + second_term diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index d8d027ac..ba6b2a39 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -112,7 +112,7 @@ def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Ten Returns ------- tuple[torch.Tensor, torch.Tensor, torch.Tensor] if return_latent is True - Tuple containing counts (torch.Tensor), gaussian (torch.Tensor), and ksi (torch.Tensor) + Tuple containing endog (torch.Tensor), gaussian (torch.Tensor), and ksi (torch.Tensor) torch.Tensor if return_latent is False See also :func:`~pyPLNmodels.PlnParameters` @@ -143,12 +143,12 @@ def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Ten else: ksi = 0 - counts = (1 - ksi) * torch.poisson(parameter) + endog = (1 - ksi) * torch.poisson(parameter) torch.random.set_rng_state(prev_state) if return_latent is True: - return counts, gaussian, ksi - return counts + return endog, gaussian, ksi + return endog def _log_stirling(integer: torch.Tensor) -> torch.Tensor: @@ -176,13 +176,13 @@ def _trunc_log(tens: torch.Tensor, eps: float = 1e-16) -> torch.Tensor: return torch.log(integer) -def _get_offsets_from_sum_of_counts(counts: torch.Tensor) -> torch.Tensor: +def _get_offsets_from_sum_of_endog(endog: torch.Tensor) -> torch.Tensor: """ - Compute offsets from the sum of counts. + Compute offsets from the sum of endog. Parameters ---------- - counts : torch.Tensor + endog : torch.Tensor Samples with size (n, p) Returns @@ -190,8 +190,8 @@ def _get_offsets_from_sum_of_counts(counts: torch.Tensor) -> torch.Tensor: torch.Tensor Offsets of size (n, p) """ - sum_of_counts = torch.sum(counts, axis=1) - return sum_of_counts.repeat((counts.shape[1], 1)).T + sum_of_endog = torch.sum(endog, axis=1) + return sum_of_endog.repeat((endog.shape[1], 1)).T def _raise_wrong_dimension_error( @@ -268,7 +268,7 @@ def _format_data( def _format_model_param( - counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], + endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets_formula: str, @@ -280,7 +280,7 @@ def _format_model_param( Parameters ---------- - counts : Union[torch.Tensor, np.ndarray, pd.DataFrame], shape (n, ) + endog : Union[torch.Tensor, np.ndarray, pd.DataFrame], shape (n, ) Count data. covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, d) or None Covariate data. @@ -300,29 +300,29 @@ def _format_model_param( Raises ------ ValueError - If counts has negative values or offsets_formula is not None and not "logsum" or "zero" + If endog has negative values or offsets_formula is not None and not "logsum" or "zero" """ - counts = _format_data(counts) - if torch.min(counts) < 0: + endog = _format_data(endog) + if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") covariates = _format_data(covariates) if add_const is True: if covariates is None: - covariates = torch.ones(counts.shape[0], 1) + covariates = torch.ones(endog.shape[0], 1) else: if _has_null_variance(covariates) is False: covariates = torch.concat( - (covariates, torch.ones(counts.shape[0]).unsqueeze(1)), dim=1 + (covariates, torch.ones(endog.shape[0]).unsqueeze(1)), dim=1 ) if offsets is None: if offsets_formula == "logsum": - print("Setting the offsets as the log of the sum of counts") + print("Setting the offsets as the log of the sum of endog") offsets = ( - torch.log(_get_offsets_from_sum_of_counts(counts)).double().to(DEVICE) + torch.log(_get_offsets_from_sum_of_endog(endog)).double().to(DEVICE) ) elif offsets_formula == "zero": print("Setting the offsets to zero") - offsets = torch.zeros(counts.shape, device=DEVICE) + offsets = torch.zeros(endog.shape, device=DEVICE) else: raise ValueError( 'Wrong offsets_formula. Expected either "zero" or "logsum", got {offsets_formula}' @@ -331,7 +331,7 @@ def _format_model_param( offsets = _format_data(offsets).to(DEVICE) if take_log_offsets is True: offsets = torch.log(offsets) - return counts, covariates, offsets + return endog, covariates, offsets def _has_null_variance(tensor: torch.Tensor) -> bool: @@ -352,27 +352,27 @@ def _has_null_variance(tensor: torch.Tensor) -> bool: def _check_data_shape( - counts: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor ) -> None: """ Check if the shape of the input data is valid. Parameters ---------- - counts : torch.Tensor, shape (n, p) + endog : torch.Tensor, shape (n, p) Count data. covariates : torch.Tensor or None, shape (n, d) or None Covariate data. offsets : torch.Tensor or None, shape (n, p) or None Offset data. """ - n_counts, p_counts = counts.shape + n_endog, p_endog = endog.shape n_offsets, p_offsets = offsets.shape - _check_two_dimensions_are_equal("counts", "offsets", n_counts, n_offsets, 0, 0) + _check_two_dimensions_are_equal("endog", "offsets", n_endog, n_offsets, 0, 0) if covariates is not None: n_cov, _ = covariates.shape - _check_two_dimensions_are_equal("counts", "covariates", n_counts, n_cov, 0, 0) - _check_two_dimensions_are_equal("counts", "offsets", p_counts, p_offsets, 1, 1) + _check_two_dimensions_are_equal("endog", "covariates", n_endog, n_cov, 0, 0) + _check_two_dimensions_are_equal("endog", "offsets", p_endog, p_offsets, 1, 1) def _nice_string_of_dict(dictionnary: dict) -> str: @@ -751,21 +751,21 @@ def get_simulated_count_data( Returns ------- Tuple[torch.Tensor, torch.Tensor, torch.Tensor] - Tuple containing counts, covariates, and offsets. + Tuple containing endog, covariates, and offsets. """ pln_param = get_simulation_parameters( n_samples=n_samples, dim=dim, nb_cov=nb_cov, rank=rank, add_const=add_const ) - counts = sample_pln(pln_param, seed=seed, return_latent=False) + endog = sample_pln(pln_param, seed=seed, return_latent=False) if return_true_param is True: return ( - counts, + endog, pln_param.covariates, pln_param.offsets, pln_param.covariance, pln_param.coef, ) - return pln_param.counts, pln_param.cov, pln_param.offsets + return pln_param.endog, pln_param.cov, pln_param.offsets def get_real_count_data( @@ -799,14 +799,14 @@ def get_real_count_data( f"\nTaking the whole max_dim variables. Requested:dim={dim}, returned:{max_dim}" ) dim = max_dim - counts_stream = pkg_resources.resource_stream(__name__, "data/scRT/counts.csv") - counts = pd.read_csv(counts_stream).values[:n_samples, :dim] - print(f"Returning dataset of size {counts.shape}") + endog_stream = pkg_resources.resource_stream(__name__, "data/scRT/endog.csv") + endog = pd.read_csv(endog_stream).values[:n_samples, :dim] + print(f"Returning dataset of size {endog.shape}") if return_labels is False: - return counts + return endog labels_stream = pkg_resources.resource_stream(__name__, "data/scRT/labels.csv") labels = np.array(pd.read_csv(labels_stream).values[:n_samples].squeeze()) - return counts, labels + return endog, labels def _check_right_rank(data: Dict[str, Any], rank: int) -> None: @@ -849,16 +849,16 @@ def _extract_data_from_formula( Returns ------- Tuple - A tuple containing the extracted counts, covariates, and offsets. + A tuple containing the extracted endog, covariates, and offsets. """ dmatrix = dmatrices(formula, data=data) - counts = dmatrix[0] + endog = dmatrix[0] covariates = dmatrix[1] if covariates.size == 0: covariates = None offsets = data.get("offsets", None) - return counts, covariates, offsets + return endog, covariates, offsets def _is_dict_of_dict(dictionary: Dict[Any, Any]) -> bool: @@ -946,7 +946,7 @@ def _array2tensor(func): def _handle_data( - counts, + endog, covariates, offsets, offsets_formula: str, @@ -958,7 +958,7 @@ def _handle_data( Parameters ---------- - counts : The counts data. If a DataFrame is provided, the column names are stored for later use. + endog : The endog data. If a DataFrame is provided, the column names are stored for later use. covariates : The covariates data. offsets : The offsets data. offsets_formula : The formula used for offsets. @@ -967,22 +967,22 @@ def _handle_data( Returns ------- - tuple: A tuple containing the processed counts, covariates, offsets, and column counts (if available). + tuple: A tuple containing the processed endog, covariates, offsets, and column endog (if available). Raises ------ - ValueError: If the shapes of counts, covariates, and offsets do not match. + ValueError: If the shapes of endog, covariates, and offsets do not match. """ - if isinstance(counts, pd.DataFrame): - column_counts = counts.columns + if isinstance(endog, pd.DataFrame): + column_endog = endog.columns else: - column_counts = None + column_endog = None - counts, covariates, offsets = _format_model_param( - counts, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, covariates, offsets = _format_model_param( + endog, covariates, offsets, offsets_formula, take_log_offsets, add_const ) - _check_data_shape(counts, covariates, offsets) - return counts, covariates, offsets, column_counts + _check_data_shape(endog, covariates, offsets) + return endog, covariates, offsets, column_endog def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also=None): diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index a2fa1b2f..b0298a6a 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -6,7 +6,7 @@ from typing import Optional def elbo_pln( - counts: torch.Tensor, + endog: torch.Tensor, offsets: torch.Tensor, covariates: Optional[torch.Tensor], latent_mean: torch.Tensor, @@ -19,7 +19,7 @@ def elbo_pln( Parameters: ---------- - counts : torch.Tensor + endog : torch.Tensor Counts with size (n, p). offsets : torch.Tensor Offset with size (n, p). @@ -39,11 +39,11 @@ def elbo_pln( torch.Tensor The ELBO (Evidence Lower Bound), of size one. """ - n_samples, dim = counts.shape + n_samples, dim = endog.shape s_rond_s = torch.square(latent_sqrt_var) offsets_plus_m = offsets + latent_mean if covariates is None: - XB = torch.zeros_like(counts) + XB = torch.zeros_like(endog) else: XB = covariates @ coef m_minus_xb = latent_mean - XB @@ -52,18 +52,18 @@ def elbo_pln( ) elbo = -0.5 * n_samples * torch.logdet(covariance) elbo += torch.sum( - counts * offsets_plus_m + endog * offsets_plus_m - 0.5 * torch.exp(offsets_plus_m + s_rond_s) + 0.5 * torch.log(s_rond_s) ) elbo -= 0.5 * torch.trace(torch.inverse(covariance) @ d_plus_minus_xb2) - elbo -= torch.sum(_log_stirling(counts)) + elbo -= torch.sum(_log_stirling(endog)) elbo += 0.5 * n_samples * dim return elbo / n_samples def profiled_elbo_pln( - counts: torch.Tensor, + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor, latent_mean: torch.Tensor, @@ -76,7 +76,7 @@ def profiled_elbo_pln( Parameters: ---------- - counts : torch.Tensor + endog : torch.Tensor Counts with size (n, p). covariates : torch.Tensor Covariates with size (n, d). @@ -92,7 +92,7 @@ def profiled_elbo_pln( torch.Tensor The ELBO (Evidence Lower Bound) with size 1. """ - n_samples, _ = counts.shape + n_samples, _ = endog.shape s_squared = torch.square(latent_sqrt_var) offsets_plus_mean = offsets + latent_mean closed_coef = _closed_formula_coef(covariates, latent_mean) @@ -101,16 +101,16 @@ def profiled_elbo_pln( ) elbo = -0.5 * n_samples * torch.logdet(closed_covariance) elbo += torch.sum( - counts * offsets_plus_mean + endog * offsets_plus_mean - torch.exp(offsets_plus_mean + s_squared / 2) + 0.5 * torch.log(s_squared) ) - elbo -= torch.sum(_log_stirling(counts)) + elbo -= torch.sum(_log_stirling(endog)) return elbo / n_samples def elbo_plnpca( - counts: torch.Tensor, + endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor, latent_mean: torch.Tensor, @@ -124,7 +124,7 @@ def elbo_plnpca( Parameters: ---------- - counts : torch.Tensor + endog : torch.Tensor Counts with size (n, p). covariates : torch.Tensor Covariates with size (n, d). @@ -145,7 +145,7 @@ def elbo_plnpca( torch.Tensor The ELBO (Evidence Lower Bound) with size 1, with a gradient. """ - n_samples = counts.shape[0] + n_samples = endog.shape[0] rank = components.shape[1] if covariates is None: XB = 0 @@ -153,7 +153,7 @@ def elbo_plnpca( XB = covariates @ coef log_intensity = offsets + XB + latent_mean @ components.T s_squared = torch.square(latent_sqrt_var) - counts_log_intensity = torch.sum(counts * log_intensity) + endog_log_intensity = torch.sum(endog * log_intensity) minus_intensity_plus_s_squared_cct = torch.sum( -torch.exp(log_intensity + 0.5 * s_squared @ (components * components).T) ) @@ -161,20 +161,20 @@ def elbo_plnpca( mm_plus_s_squared = -0.5 * torch.sum( torch.square(latent_mean) + torch.square(latent_sqrt_var) ) - log_stirling_counts = torch.sum(_log_stirling(counts)) + log_stirling_endog = torch.sum(_log_stirling(endog)) return ( - counts_log_intensity + endog_log_intensity + minus_intensity_plus_s_squared_cct + minus_logs_squared + mm_plus_s_squared - - log_stirling_counts + - log_stirling_endog + 0.5 * n_samples * rank ) / n_samples ## should rename some variables so that is is clearer when we see the formula def elbo_zi_pln( - counts, + endog, covariates, offsets, latent_mean, @@ -189,7 +189,7 @@ def elbo_zi_pln( See the doc for more details on the computation. Args: - counts: torch.tensor. Counts with size (n,p) + endog: torch.tensor. Counts with size (n,p) 0: torch.tensor. Offset, size (n,p) covariates: torch.tensor. Covariates, size (n,d) latent_mean: torch.tensor. Variational parameter with size (n,p) @@ -204,8 +204,8 @@ def elbo_zi_pln( if torch.norm(pi * dirac - pi) > 0.0001: print("Bug") return False - n_samples = counts.shape[0] - dim = counts.shape[1] + n_samples = endog.shape[0] + dim = endog.shape[1] s_rond_s = torch.square(latent_sqrt_var) offsets_plus_m = offsets + latent_mean m_minus_xb = latent_mean - covariates @ coef @@ -213,9 +213,9 @@ def elbo_zi_pln( elbo = torch.sum( (1 - pi) * ( - counts @ offsets_plus_m + endog @ offsets_plus_m - torch.exp(offsets_plus_m + s_rond_s / 2) - - _log_stirling(counts), + - _log_stirling(endog), ) + pi ) diff --git a/pyPLNmodels/load.py b/pyPLNmodels/load.py index 26871ae8..6abeffd1 100644 --- a/pyPLNmodels/load.py +++ b/pyPLNmodels/load.py @@ -20,19 +20,19 @@ def load_model(path_of_directory: str) -> Dict[str, Any]: Examples -------- >>> from pyPLNmodels import PlnPCA, Pln, get_real_count_data, load_model - >>> counts= get_real_count_data() - >>> pca = PlnPCA(counts, add_const = True) + >>> endog= get_real_count_data() + >>> pca = PlnPCA(endog, add_const = True) >>> pca.fit() >>> pca.save() >>> dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") - >>> loaded_pca = PlnPCA(counts, add_const = True, dict_initialization = dict_init) + >>> loaded_pca = PlnPCA(endog, add_const = True, dict_initialization = dict_init) >>> print(loaded_pca) - >>> pln = Pln(counts, add_const = True) + >>> pln = Pln(endog, add_const = True) >>> pln.fit() >>> pln.save() >>> dict_init = load_model("Pln_nbcov_1_dim_200") - >>> loaded_pln = Pln(counts, add_const = True, dict_initialization = dict_init) + >>> loaded_pln = Pln(endog, add_const = True, dict_initialization = dict_init) >>> print(loaded_pln) See also -------- @@ -95,12 +95,12 @@ def load_plnpcacollection( Examples -------- >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, load_plnpcacollection - >>> counts = get_real_count_data() - >>> pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6]) + >>> endog = get_real_count_data() + >>> pcas = PlnPCAcollection(endog, add_const = True, ranks = [4,5,6]) >>> pcas.fit() >>> pcas.save() >>> dict_init = load_plnpcacollection("PlnPCAcollection_nbcov_1_dim_200") - >>> loaded_pcas = PlnPCAcollection(counts, add_const = True, ranks = [4,5,6], dict_of_dict_initialization = dict_init) + >>> loaded_pcas = PlnPCAcollection(endog, add_const = True, ranks = [4,5,6], dict_of_dict_initialization = dict_init) >>> print(loaded_pcas) See also diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 091afe0b..62351a03 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -58,7 +58,7 @@ class _model(ABC): """ _WINDOW: int = 15 - _counts: torch.Tensor + _endog: torch.Tensor _covariates: torch.Tensor _offsets: torch.Tensor _coef: torch.Tensor @@ -68,7 +68,7 @@ class _model(ABC): def __init__( self, - counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], + endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, @@ -82,7 +82,7 @@ class _model(ABC): Parameters ---------- - counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] + endog : Union[torch.Tensor, np.ndarray, pd.DataFrame] The count data. covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The covariate data. Defaults to None. @@ -99,12 +99,12 @@ class _model(ABC): Whether to add a column of one in the covariates. Defaults to True. """ ( - self._counts, + self._endog, self._covariates, self._offsets, - self.column_counts, + self.column_endog, ) = _handle_data( - counts, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, covariates, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False self._plotargs = _PlotArgs(self._WINDOW) @@ -138,9 +138,9 @@ class _model(ABC): take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. """ - counts, covariates, offsets = _extract_data_from_formula(formula, data) + endog, covariates, offsets = _extract_data_from_formula(formula, data) return cls( - counts, + endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -231,26 +231,26 @@ class _model(ABC): @property def n_samples(self) -> int: """ - The number of samples, i.e. the first dimension of the counts. + The number of samples, i.e. the first dimension of the endog. Returns ------- int The number of samples. """ - return self._counts.shape[0] + return self._endog.shape[0] @property def dim(self) -> int: """ - The second dimension of the counts. + The second dimension of the endog. Returns ------- int - The second dimension of the counts. + The second dimension of the endog. """ - return self._counts.shape[1] + return self._endog.shape[1] @property def nb_cov(self) -> int: @@ -270,7 +270,7 @@ class _model(ABC): """ Initialize coefficients smartly. """ - self._coef = _init_coef(self._counts, self._covariates, self._offsets) + self._coef = _init_coef(self._endog, self._covariates, self._offsets) def _random_init_coef(self): """ @@ -403,7 +403,7 @@ class _model(ABC): def transform(self): """ - Method for transforming the counts. Can be seen as a normalization of the counts. + Method for transforming the endog. Can be seen as a normalization of the endog. """ return self.latent_variables @@ -488,7 +488,7 @@ class _model(ABC): Defaults to None. color (str, np.ndarray): An array with one label for each - sample in the counts property of the object. + sample in the endog property of the object. Defaults to None. Raises ------ @@ -530,12 +530,12 @@ class _model(ABC): A list of variable names to visualize. indices_of_variables : Optional[List[int]], optional A list of indices corresponding to the variables. - If None, indices are determined based on `column_counts`, by default None + If None, indices are determined based on `column_endog`, by default None Raises ------ ValueError - If `indices_of_variables` is None and `column_counts` is not set. + If `indices_of_variables` is None and `column_endog` is not set. ValueError If the length of `indices_of_variables` is different from the length of `variables_names`. @@ -544,16 +544,16 @@ class _model(ABC): None """ if indices_of_variables is None: - if self.column_counts is None: + if self.column_endog is None: raise ValueError( "No names have been given to the column of " - "counts. Please set the column_counts to the" + "endog. Please set the column_endog to the" "needed names or instantiate a new model with" "a pd.DataFrame with appropriate column names" ) indices_of_variables = [] for variables_name in variables_names: - index = self.column_counts.get_loc(variables_name) + index = self.column_endog.get_loc(variables_name) indices_of_variables.append(index) else: if len(indices_of_variables) != len(variables_names): @@ -835,7 +835,7 @@ class _model(ABC): The dictionary of data. """ return { - "counts": self.counts, + "endog": self.endog, "covariates": self.covariates, "offsets": self.offsets, } @@ -991,16 +991,16 @@ class _model(ABC): ) @property - def counts(self): + def endog(self): """ - Property representing the counts. + Property representing the endog. Returns ------- torch.Tensor or None - The counts or None. + The endog or None. """ - return self._cpu_attribute_or_none("_counts") + return self._cpu_attribute_or_none("_endog") @property def offsets(self): @@ -1026,29 +1026,29 @@ class _model(ABC): """ return self._cpu_attribute_or_none("_covariates") - @counts.setter + @endog.setter @_array2tensor - def counts(self, counts: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + def endog(self, endog: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ - Setter for the counts property. + Setter for the endog property. Parameters ---------- - counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The counts. + endog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The endog. Raises ------ ValueError - If the shape of the counts is incorrect or if the input is negative. + If the shape of the endog is incorrect or if the input is negative. """ - if self.counts.shape != counts.shape: + if self.endog.shape != endog.shape: raise ValueError( - f"Wrong shape for the counts. Expected {self.counts.shape}, got {counts.shape}" + f"Wrong shape for the endog. Expected {self.endog.shape}, got {endog.shape}" ) - if torch.min(counts) < 0: + if torch.min(endog) < 0: raise ValueError("Input should be non-negative only.") - self._counts = counts + self._endog = endog @offsets.setter @_array2tensor @@ -1086,9 +1086,9 @@ class _model(ABC): Raises ------ ValueError - If the shape of the covariates or counts is incorrect. + If the shape of the covariates or endog is incorrect. """ - _check_data_shape(self.counts, covariates, self.offsets) + _check_data_shape(self.endog, covariates, self.offsets) self._covariates = covariates @coef.setter @@ -1247,7 +1247,7 @@ class _model(ABC): def plot_expected_vs_true(self, ax=None, colors=None): """ - Plot the predicted value of the counts against the counts. + Plot the predicted value of the endog against the endog. Parameters ---------- @@ -1267,11 +1267,11 @@ class _model(ABC): raise RuntimeError("Please fit the model before.") if ax is None: ax = plt.gca() - predictions = self._counts_predictions().ravel().detach() + predictions = self._endog_predictions().ravel().detach() if colors is not None: colors = np.repeat(np.array(colors), repeats=self.dim).ravel() - sns.scatterplot(x=self.counts.ravel(), y=predictions, hue=colors, ax=ax) - max_y = int(torch.max(self.counts.ravel()).item()) + sns.scatterplot(x=self.endog.ravel(), y=predictions, hue=colors, ax=ax) + max_y = int(torch.max(self.endog.ravel()).item()) y = np.linspace(0, max_y, max_y) ax.plot(y, y, c="red") ax.set_yscale("log") @@ -1290,17 +1290,17 @@ class Pln(_model): Examples -------- >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> print(pln) >>> pln.viz(colors = labels) >>> from pyPLNmodels import Pln, get_simulation_parameters, sample_pln >>> param = get_simulation_parameters() - >>> counts = sample_pln(param) - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data) + >>> endog = sample_pln(param) + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data) >>> pln.fit() >>> print(pln) """ @@ -1312,8 +1312,8 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts= get_real_count_data() - >>> pln = Pln(counts, add_const = True) + >>> endog= get_real_count_data() + >>> pln = Pln(endog, add_const = True) >>> pln.fit() >>> print(pln) """, @@ -1326,7 +1326,7 @@ class Pln(_model): ) def __init__( self, - counts: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], + endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, @@ -1336,7 +1336,7 @@ class Pln(_model): add_const: bool = True, ): super().__init__( - counts=counts, + endog=endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -1350,9 +1350,9 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data = data) """, returns=""" Pln @@ -1371,9 +1371,9 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): - counts, covariates, offsets = _extract_data_from_formula(formula, data) + endog, covariates, offsets = _extract_data_from_formula(formula, data) return cls( - counts, + endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -1386,8 +1386,8 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> pln = Pln(counts,add_const = True) + >>> endog = get_real_count_data() + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> print(pln) """, @@ -1416,8 +1416,8 @@ class Pln(_model): example=""" >>> import matplotlib.pyplot as plt >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> pln.plot_expected_vs_true() >>> plt.show() @@ -1433,8 +1433,8 @@ class Pln(_model): example=""" >>> import matplotlib.pyplot as plt >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> pln.viz() >>> plt.show() @@ -1451,9 +1451,9 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data = data) >>> pln.fit() >>> pca_proj = pln.pca_projected_latent_variables() >>> print(pca_proj.shape) @@ -1466,9 +1466,9 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data = data) >>> pln.fit() >>> pln.scatter_pca_matrix(n_components = 5) """, @@ -1480,9 +1480,9 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data = data) >>> pln.fit() >>> pln.plot_pca_correlation_graph(["a","b"], indices_of_variables = [4,8]) """, @@ -1496,16 +1496,16 @@ class Pln(_model): _model, returns=""" torch.Tensor - The transformed counts (latent variables of the model). + The transformed endog (latent variables of the model). """, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pln = Pln.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pln = Pln.from_formula("endog ~ 1", data = data) >>> pln.fit() - >>> transformed_counts = pln.transform() - >>> print(transformed_counts.shape) + >>> transformed_endog = pln.transform() + >>> print(transformed_endog.shape) """, ) def transform(self): @@ -1552,7 +1552,7 @@ class Pln(_model): The regression coefficients of the gaussian latent variables. """ - def _counts_predictions(self): + def _endog_predictions(self): return torch.exp( self._offsets + self._latent_mean + 1 / 2 * self._latent_sqrt_var**2 ) @@ -1608,15 +1608,15 @@ class Pln(_model): Examples -------- >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> elbo = pln.compute_elbo() >>> print("elbo", elbo) >>> print("loglike/n", pln.loglike/pln.n_samples) """ return profiled_elbo_pln( - self._counts, + self._endog, self._covariates, self._offsets, self._latent_mean, @@ -1683,8 +1683,8 @@ class Pln(_model): _model, example=""" >>> from pyPLNmodels import Pln, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) >>> pln.fit() >>> print(pln.latent_variables.shape) """, @@ -1748,9 +1748,9 @@ class PlnPCAcollection: Examples -------- >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data, get_simulation_parameters, sample_pln - >>> counts, labels = get_real_count_data(return_labels = True) - >>> data = {"counts": counts} - >>> plnpcas = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,8, 12]) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> data = {"endog": endog} + >>> plnpcas = PlnPCAcollection.from_formula("endog ~ 1", data = data, ranks = [5,8, 12]) >>> plnpcas.fit() >>> print(plnpcas) >>> plnpcas.show() @@ -1758,9 +1758,9 @@ class PlnPCAcollection: >>> print(plnpcas[5]) >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) - >>> counts = sample_pln(plnparam) - >>> data = {"counts":counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} - >>> plnpcas = PlnPCAcollection.from_formula("counts ~ 0 + cov", data = data, ranks = [5,8,12]) + >>> endog = sample_pln(plnparam) + >>> data = {"endog":endog, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> plnpcas = PlnPCAcollection.from_formula("endog ~ 0 + cov", data = data, ranks = [5,8,12]) >>> plnpcas.fit() >>> print(plnpcas) >>> pcas.show() @@ -1774,7 +1774,7 @@ class PlnPCAcollection: def __init__( self, - counts: Union[torch.Tensor, np.ndarray, pd.DataFrame], + endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, @@ -1789,8 +1789,8 @@ class PlnPCAcollection: Parameters ---------- - counts :Union[torch.Tensor, np.ndarray, pd.DataFrame] - The counts. + endog :Union[torch.Tensor, np.ndarray, pd.DataFrame] + The endog. covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The covariates, by default None. offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) @@ -1815,12 +1815,12 @@ class PlnPCAcollection: """ self._dict_models = {} ( - self._counts, + self._endog, self._covariates, self._offsets, - self.column_counts, + self.column_endog, ) = _handle_data( - counts, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, covariates, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False self._init_models(ranks, dict_of_dict_initialization) @@ -1862,17 +1862,17 @@ class PlnPCAcollection: Examples -------- >>> from pyPLNmodels import PlnPCAcollection, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pca_col = PlnPCAcollection.from_formula("counts ~ 1", data = data, ranks = [5,6]) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pca_col = PlnPCAcollection.from_formula("endog ~ 1", data = data, ranks = [5,6]) See also -------- :class:`~pyPLNmodels.PlnPCA` :func:`~pyPLNmodels.PlnPCAcollection.__init__` """ - counts, covariates, offsets = _extract_data_from_formula(formula, data) + endog, covariates, offsets = _extract_data_from_formula(formula, data) return cls( - counts, + endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -1895,16 +1895,16 @@ class PlnPCAcollection: return self[self.ranks[0]].covariates @property - def counts(self) -> torch.Tensor: + def endog(self) -> torch.Tensor: """ - Property representing the counts. + Property representing the endog. Returns ------- torch.Tensor - The counts. + The endog. """ - return self[self.ranks[0]].counts + return self[self.ranks[0]].endog @property def coef(self) -> Dict[int, torch.Tensor]: @@ -1954,19 +1954,19 @@ class PlnPCAcollection: """ return {model.rank: model.latent_sqrt_var for model in self.values()} - @counts.setter + @endog.setter @_array2tensor - def counts(self, counts: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + def endog(self, endog: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ - Setter for the counts property. + Setter for the endog property. Parameters ---------- - counts : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The counts. + endog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The endog. """ for model in self.values(): - model.counts = counts + model.endog = endog @coef.setter @_array2tensor @@ -2042,7 +2042,7 @@ class PlnPCAcollection: rank, dict_of_dict_initialization ) self._dict_models[rank] = PlnPCA( - counts=self._counts, + endog=self._endog, covariates=self._covariates, offsets=self._offsets, rank=rank, @@ -2067,7 +2067,7 @@ class PlnPCAcollection: ranks, dict_of_dict_initialization ) self._dict_models[rank] = PlnPCA( - self._counts, + self._endog, self._covariates, self._offsets, ranks, @@ -2534,17 +2534,17 @@ class PlnPCA(_model): Examples -------- >>> from pyPLNmodels import PlnPCA, get_real_count_data, get_simulation_parameters, sample_pln - >>> counts, labels = get_real_count_data(return_labels = True) - >>> data = {"counts": counts} - >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> data = {"endog": endog} + >>> pca = PlnPCA.from_formula("endog ~ 1", data = data, rank = 5) >>> pca.fit() >>> print(pca) >>> pca.viz(colors = labels) >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) - >>> counts = sample_pln(plnparam) - >>> data = {"counts": counts, "cov": plnparam.covariates, "offsets": plnparam.offsets} - >>> plnpca = PlnPCA.from_formula("counts ~ 0 + cov", data = data, rank = 5) + >>> endog = sample_pln(plnparam) + >>> data = {"endog": endog, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> plnpca = PlnPCA.from_formula("endog ~ 0 + cov", data = data, rank = 5) >>> plnpca.fit() >>> print(plnpca) @@ -2564,8 +2564,8 @@ class PlnPCA(_model): """, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts= get_real_count_data() - >>> pca = PlnPCA(counts, add_const = True) + >>> endog= get_real_count_data() + >>> pca = PlnPCA(endog, add_const = True) >>> pca.fit() >>> print(pca) """, @@ -2578,7 +2578,7 @@ class PlnPCA(_model): ) def __init__( self, - counts: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], + endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, @@ -2590,7 +2590,7 @@ class PlnPCA(_model): ): self._rank = rank super().__init__( - counts=counts, + endog=endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -2608,9 +2608,9 @@ class PlnPCA(_model): """, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> pca = PlnPCA.from_formula("counts ~ 1", data = data, rank = 5) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> pca = PlnPCA.from_formula("endog ~ 1", data = data, rank = 5) """, returns=""" PlnPCA @@ -2629,9 +2629,9 @@ class PlnPCA(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, ): - counts, covariates, offsets = _extract_data_from_formula(formula, data) + endog, covariates, offsets = _extract_data_from_formula(formula, data) return cls( - counts, + endog, covariates=covariates, offsets=offsets, offsets_formula=offsets_formula, @@ -2644,8 +2644,8 @@ class PlnPCA(_model): _model, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> plnpca = PlnPCA(counts,add_const = True, rank = 6) + >>> endog = get_real_count_data() + >>> plnpca = PlnPCA(endog,add_const = True, rank = 6) >>> plnpca.fit() >>> print(plnpca) """, @@ -2674,8 +2674,8 @@ class PlnPCA(_model): example=""" >>> import matplotlib.pyplot as plt >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> plnpca = Pln(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> plnpca = Pln(endog,add_const = True) >>> plnpca.fit() >>> plnpca.plot_expected_vs_true() >>> plt.show() @@ -2691,8 +2691,8 @@ class PlnPCA(_model): example=""" >>> import matplotlib.pyplot as plt >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts, labels = get_real_count_data(return_labels = True) - >>> plnpca = PlnPCA(counts,add_const = True) + >>> endog, labels = get_real_count_data(return_labels = True) + >>> plnpca = PlnPCA(endog,add_const = True) >>> plnpca.fit() >>> plnpca.viz() >>> plt.show() @@ -2709,9 +2709,9 @@ class PlnPCA(_model): _model, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> plnpca = PlnPCA.from_formula("endog ~ 1", data = data) >>> plnpca.fit() >>> pca_proj = plnpca.pca_projected_latent_variables() >>> print(pca_proj.shape) @@ -2724,9 +2724,9 @@ class PlnPCA(_model): _model, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> plnpca = PlnPCA.from_formula("endog ~ 1", data = data) >>> plnpca.fit() >>> plnpca.scatter_pca_matrix(n_components = 5) """, @@ -2738,9 +2738,9 @@ class PlnPCA(_model): _model, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> plnpca = PlnPCA.from_formula("endog ~ 1", data = data) >>> plnpca.fit() >>> plnpca.plot_pca_correlation_graph(["a","b"], indices_of_variables = [4,8]) """, @@ -2770,9 +2770,9 @@ class PlnPCA(_model): _model, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts = get_real_count_data() - >>> data = {"counts": counts} - >>> plnpca = PlnPCA.from_formula("counts ~ 1", data = data) + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> plnpca = PlnPCA.from_formula("endog ~ 1", data = data) >>> plnpca.fit() >>> print(plnpca.latent_mean.shape) """, @@ -2804,7 +2804,7 @@ class PlnPCA(_model): """ return self._latent_sqrt_var**2 - def _counts_predictions(self): + def _endog_predictions(self): covariance_a_posteriori = torch.sum( (self._components**2).unsqueeze(0) * (self.latent_sqrt_var**2).unsqueeze(1), @@ -2887,7 +2887,7 @@ class PlnPCA(_model): covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] The covariates tensor. """ - _check_data_shape(self.counts, covariates, self.offsets) + _check_data_shape(self.endog, covariates, self.offsets) self._covariates = covariates print("Setting coef to initialization") self._smart_init_coef() @@ -2943,7 +2943,7 @@ class PlnPCA(_model): super()._smart_init_coef() if not hasattr(self, "_components"): self._components = _init_components( - self._counts, self._covariates, self._coef, self._rank + self._endog, self._covariates, self._coef, self._rank ) def _random_init_model_parameters(self): @@ -2969,7 +2969,7 @@ class PlnPCA(_model): if not hasattr(self, "_latent_mean"): self._latent_mean = ( _init_latent_mean( - self._counts, + self._endog, self._covariates, self._offsets, self._coef, @@ -3007,7 +3007,7 @@ class PlnPCA(_model): The ELBO value. """ return elbo_plnpca( - self._counts, + self._endog, self._covariates, self._offsets, self._latent_mean, @@ -3159,17 +3159,17 @@ class PlnPCA(_model): """, returns=""" torch.Tensor - The transformed counts (latent variables of the model). + The transformed endog (latent variables of the model). """, example=""" >>> from pyPLNmodels import PlnPCA, get_real_count_data - >>> counts= get_real_count_data() - >>> pca = PlnPCA(counts, add_const = True) + >>> endog= get_real_count_data() + >>> pca = PlnPCA(endog, add_const = True) >>> pca.fit() - >>> transformed_counts_low_dim = pca.transform() - >>> transformed_counts_high_dim = pca.transform(project = False) - >>> print(transformed_counts_low_dim.shape) - >>> print(transformed_counts_high_dim.shape) + >>> transformed_endog_low_dim = pca.transform() + >>> transformed_endog_high_dim = pca.transform(project = False) + >>> print(transformed_endog_low_dim.shape) + >>> print(transformed_endog_high_dim.shape) """, ) def transform(self, project: bool = True) -> torch.Tensor: @@ -3199,13 +3199,13 @@ class ZIPln(Pln): super()._smart_init_model_parameters() if not hasattr(self, "_covariance"): self._covariance = _init_covariance( - self._counts, self._covariates, self._coef + self._endog, self._covariates, self._coef ) if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) def _random_init_latent_parameters(self): - self._dirac = self._counts == 0 + self._dirac = self._endog == 0 self._latent_mean = torch.randn(self.n_samples, self.dim) self._latent_sqrt_var = torch.randn(self.n_samples, self.dim) self._pi = ( @@ -3215,7 +3215,7 @@ class ZIPln(Pln): def compute_elbo(self): return elbo_zi_pln( - self._counts, + self._endog, self._covariates, self._offsets, self._latent_mean, diff --git a/pyPLNmodels/oaks.py b/pyPLNmodels/oaks.py index 6676d572..c2e126ba 100644 --- a/pyPLNmodels/oaks.py +++ b/pyPLNmodels/oaks.py @@ -15,7 +15,7 @@ def load_oaks(): For each sample, 3 additional covariates (tree, dist2ground, orientation) are known. The data is provided as dictionary with the following keys - counts a 114 x 116 np.array of integer (counts) + endog a 114 x 116 np.array of integer (endog) offsets a 114 x 116 np.array of integer (offsets) tree a 114 x 1 vector of character for the tree status with respect to the pathogen (susceptible, intermediate or resistant) dist2ground a 114 x 1 vector encoding the distance of the sampled leaf to the base of the ground @@ -30,16 +30,16 @@ def load_oaks(): Pathogen Erysiphe alphitoides . Microb Ecol 72, 870–880 (2016). doi:10.1007/s00248-016-0777-x """ - counts_stream = pkg_resources.resource_stream(__name__, "data/oaks/counts.csv") + endog_stream = pkg_resources.resource_stream(__name__, "data/oaks/counts.csv") offsets_stream = pkg_resources.resource_stream(__name__, "data/oaks/offsets.csv") covariates_stream = pkg_resources.resource_stream( __name__, "data/oaks/covariates.csv" ) - counts = pd.read_csv(counts_stream) + endog = pd.read_csv(endog_stream) offsets = pd.read_csv(offsets_stream) covariates = pd.read_csv(covariates_stream) oaks = { - "counts": counts.to_numpy(), + "endog": endog.to_numpy(), "offsets": offsets.to_numpy(), "tree": covariates.tree.to_numpy(), "dist2ground": covariates.distTOground.to_numpy(), diff --git a/tests/conftest.py b/tests/conftest.py index f983df5e..e7a23ddd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,17 +25,17 @@ from tests.import_data import ( ) -counts_sim_0cov = data_sim_0cov["counts"] +endog_sim_0cov = data_sim_0cov["endog"] covariates_sim_0cov = data_sim_0cov["covariates"] offsets_sim_0cov = data_sim_0cov["offsets"] -counts_sim_2cov = data_sim_2cov["counts"] +endog_sim_2cov = data_sim_2cov["endog"] covariates_sim_2cov = data_sim_2cov["covariates"] offsets_sim_2cov = data_sim_2cov["offsets"] -counts_real = data_real["counts"] -counts_real = pd.DataFrame(counts_real) -counts_real.columns = [f"var_{i}" for i in range(counts_real.shape[1])] +endog_real = data_real["endog"] +endog_real = pd.DataFrame(endog_real) +endog_real.columns = [f"var_{i}" for i in range(endog_real.shape[1])] def add_fixture_to_dict(my_dict, string_fixture): @@ -119,7 +119,7 @@ dict_fixtures = {} def simulated_pln_0cov_array(request): cls = request.param pln = cls( - counts_sim_0cov, + endog_sim_0cov, covariates=covariates_sim_0cov, offsets=offsets_sim_0cov, add_const=False, @@ -132,7 +132,7 @@ def simulated_pln_0cov_array(request): def simulated_fitted_pln_0cov_array(request): cls = request.param pln = cls( - counts_sim_0cov, + endog_sim_0cov, covariates=covariates_sim_0cov, offsets=offsets_sim_0cov, add_const=False, @@ -144,7 +144,7 @@ def simulated_fitted_pln_0cov_array(request): @pytest.fixture(params=params) def simulated_pln_0cov_formula(request): cls = request.param - pln = cls("counts ~ 0", data_sim_0cov) + pln = cls("endog ~ 0", data_sim_0cov) return pln @@ -152,7 +152,7 @@ def simulated_pln_0cov_formula(request): @cache def simulated_fitted_pln_0cov_formula(request): cls = request.param - pln = cls("counts ~ 0", data_sim_0cov) + pln = cls("endog ~ 0", data_sim_0cov) pln.fit() return pln @@ -162,7 +162,7 @@ def simulated_loaded_pln_0cov_formula(simulated_fitted_pln_0cov_formula): simulated_fitted_pln_0cov_formula.save() return generate_new_model( simulated_fitted_pln_0cov_formula, - "counts ~ 0", + "endog ~ 0", data_sim_0cov, ) @@ -172,7 +172,7 @@ def simulated_loaded_pln_0cov_array(simulated_fitted_pln_0cov_array): simulated_fitted_pln_0cov_array.save() return generate_new_model( simulated_fitted_pln_0cov_array, - counts_sim_0cov, + endog_sim_0cov, covariates=covariates_sim_0cov, offsets=offsets_sim_0cov, add_const=False, @@ -217,7 +217,7 @@ dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_pln_0cov", sim_p def simulated_pln_2cov_array(request): cls = request.param pln_full = cls( - counts_sim_2cov, + endog_sim_2cov, covariates=covariates_sim_2cov, offsets=offsets_sim_2cov, add_const=False, @@ -235,7 +235,7 @@ def simulated_fitted_pln_2cov_array(simulated_pln_2cov_array): @cache def simulated_pln_2cov_formula(request): cls = request.param - pln_full = cls("counts ~ 0 + covariates", data_sim_2cov) + pln_full = cls("endog ~ 0 + covariates", data_sim_2cov) return pln_full @@ -250,7 +250,7 @@ def simulated_loaded_pln_2cov_formula(simulated_fitted_pln_2cov_formula): simulated_fitted_pln_2cov_formula.save() return generate_new_model( simulated_fitted_pln_2cov_formula, - "counts ~0 + covariates", + "endog ~0 + covariates", data_sim_2cov, ) @@ -260,7 +260,7 @@ def simulated_loaded_pln_2cov_array(simulated_fitted_pln_2cov_array): simulated_fitted_pln_2cov_array.save() return generate_new_model( simulated_fitted_pln_2cov_array, - counts_sim_2cov, + endog_sim_2cov, covariates=covariates_sim_2cov, offsets=offsets_sim_2cov, add_const=False, @@ -303,7 +303,7 @@ dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_pln_2cov", sim_p @cache def real_pln_intercept_array(request): cls = request.param - pln_full = cls(counts_real, add_const=True) + pln_full = cls(endog_real, add_const=True) return pln_full @@ -317,7 +317,7 @@ def real_fitted_pln_intercept_array(real_pln_intercept_array): @cache def real_pln_intercept_formula(request): cls = request.param - pln_full = cls("counts ~ 1", data_real) + pln_full = cls("endog ~ 1", data_real) return pln_full @@ -331,7 +331,7 @@ def real_fitted_pln_intercept_formula(real_pln_intercept_formula): def real_loaded_pln_intercept_formula(real_fitted_pln_intercept_formula): real_fitted_pln_intercept_formula.save() return generate_new_model( - real_fitted_pln_intercept_formula, "counts ~ 1", data=data_real + real_fitted_pln_intercept_formula, "endog ~ 1", data=data_real ) @@ -340,7 +340,7 @@ def real_loaded_pln_intercept_array(real_fitted_pln_intercept_array): real_fitted_pln_intercept_array.save() return generate_new_model( real_fitted_pln_intercept_array, - counts_real, + endog_real, add_const=True, ) diff --git a/tests/import_data.py b/tests/import_data.py index 154f5644..c44353bb 100644 --- a/tests/import_data.py +++ b/tests/import_data.py @@ -7,14 +7,14 @@ from pyPLNmodels import ( ( - counts_sim_0cov, + endog_sim_0cov, covariates_sim_0cov, offsets_sim_0cov, true_covariance_0cov, true_coef_0cov, ) = get_simulated_count_data(return_true_param=True, nb_cov=0, add_const=False) ( - counts_sim_2cov, + endog_sim_2cov, covariates_sim_2cov, offsets_sim_2cov, true_covariance_2cov, @@ -22,7 +22,7 @@ from pyPLNmodels import ( ) = get_simulated_count_data(return_true_param=True, nb_cov=2, add_const=False) data_sim_0cov = { - "counts": counts_sim_0cov, + "endog": endog_sim_0cov, "covariates": covariates_sim_0cov, "offsets": offsets_sim_0cov, } @@ -31,11 +31,9 @@ true_sim_2cov = {"Sigma": true_covariance_2cov, "beta": true_coef_2cov} data_sim_2cov = { - "counts": counts_sim_2cov, + "endog": endog_sim_2cov, "covariates": covariates_sim_2cov, "offsets": offsets_sim_2cov, } -counts_real, labels_real = get_real_count_data( - return_labels=True, n_samples=100, dim=50 -) -data_real = {"counts": counts_real} +endog_real, labels_real = get_real_count_data(return_labels=True, n_samples=100, dim=50) +data_real = {"endog": endog_real} diff --git a/tests/test_common.py b/tests/test_common.py index 38711fef..5904857d 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -6,7 +6,7 @@ import pytest from tests.conftest import dict_fixtures from tests.utils import MSE, filter_models -from tests.import_data import true_sim_0cov, true_sim_2cov, counts_real +from tests.import_data import true_sim_0cov, true_sim_2cov, endog_real @pytest.mark.parametrize("any_pln", dict_fixtures["loaded_and_fitted_pln"]) @@ -60,8 +60,8 @@ def test_find_right_covariance(simulated_fitted_any_pln): @filter_models(["Pln", "PlnPCA"]) def test_right_covariance_shape(real_fitted_and_loaded_pln): assert real_fitted_and_loaded_pln.covariance.shape == ( - counts_real.shape[1], - counts_real.shape[1], + endog_real.shape[1], + endog_real.shape[1], ) @@ -81,9 +81,9 @@ def test_find_right_coef(simulated_fitted_any_pln): @pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) @filter_models(["Pln", "PlnPCA"]) def test_fail_count_setter(pln): - wrong_counts = torch.randint(size=(10, 5), low=0, high=10) + wrong_endog = torch.randint(size=(10, 5), low=0, high=10) with pytest.raises(Exception): - pln.counts = wrong_counts + pln.endog = wrong_endog @pytest.mark.parametrize("instance", dict_fixtures["instances"]) diff --git a/tests/test_pln_full.py b/tests/test_pln_full.py index db0185f1..2d61befd 100644 --- a/tests/test_pln_full.py +++ b/tests/test_pln_full.py @@ -14,4 +14,4 @@ def test_number_of_iterations_pln_full(fitted_pln): @pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) @filter_models(["Pln"]) def test_latent_var_full(pln): - assert pln.transform().shape == pln.counts.shape + assert pln.transform().shape == pln.endog.shape diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index ac77b442..6634f2d2 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -33,7 +33,7 @@ def test_number_of_iterations_plnpca(fitted_pln): @pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) @filter_models(["PlnPCA"]) def test_latent_var_pca(plnpca): - assert plnpca.transform(project=False).shape == plnpca.counts.shape + assert plnpca.transform(project=False).shape == plnpca.endog.shape assert plnpca.transform().shape == (plnpca.n_samples, plnpca.rank) diff --git a/tests/test_setters.py b/tests/test_setters.py index d5716fc2..6814e842 100644 --- a/tests/test_setters.py +++ b/tests/test_setters.py @@ -8,7 +8,7 @@ from tests.utils import MSE, filter_models @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_torch(pln): - pln.counts = pln.counts + pln.endog = pln.endog pln.covariates = pln.covariates pln.offsets = pln.offsets pln.fit() @@ -27,13 +27,13 @@ def test_parameters_setter_with_torch(pln): @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_numpy(pln): - np_counts = pln.counts.numpy() + np_endog = pln.endog.numpy() if pln.covariates is not None: np_covariates = pln.covariates.numpy() else: np_covariates = None np_offsets = pln.offsets.numpy() - pln.counts = np_counts + pln.endog = np_endog pln.covariates = np_covariates pln.offsets = np_offsets pln.fit() @@ -58,13 +58,13 @@ def test_parameters_setter_with_numpy(pln): @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_pandas(pln): - pd_counts = pd.DataFrame(pln.counts.numpy()) + pd_endog = pd.DataFrame(pln.endog.numpy()) if pln.covariates is not None: pd_covariates = pd.DataFrame(pln.covariates.numpy()) else: pd_covariates = None pd_offsets = pd.DataFrame(pln.offsets.numpy()) - pln.counts = pd_counts + pln.endog = pd_endog pln.covariates = pd_covariates pln.offsets = pd_offsets pln.fit() @@ -90,17 +90,17 @@ def test_parameters_setter_with_pandas(pln): @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_fail_data_setter_with_torch(pln): with pytest.raises(ValueError): - pln.counts = pln.counts - 100 + pln.endog = pln.endog - 100 - n, p = pln.counts.shape + n, p = pln.endog.shape if pln.covariates is None: d = 0 else: d = pln.covariates.shape[-1] with pytest.raises(ValueError): - pln.counts = torch.zeros(n + 1, p) + pln.endog = torch.zeros(n + 1, p) with pytest.raises(ValueError): - pln.counts = torch.zeros(n, p + 1) + pln.endog = torch.zeros(n, p + 1) with pytest.raises(ValueError): pln.covariates = torch.zeros(n + 1, d) @@ -116,7 +116,7 @@ def test_fail_data_setter_with_torch(pln): @filter_models(["Pln", "PlnPCA"]) def test_fail_parameters_setter_with_torch(pln): n, dim_latent = pln.latent_mean.shape - dim = pln.counts.shape[1] + dim = pln.endog.shape[1] with pytest.raises(ValueError): pln.latent_mean = torch.zeros(n + 1, dim_latent) -- GitLab From 672eef3e486a6181279f8d81466be3ac50b8a76e Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 16:19:20 +0200 Subject: [PATCH 043/167] changed the occurences of covariates by exog. --- pyPLNmodels/_closed_forms.py | 24 ++-- pyPLNmodels/_initialization.py | 56 ++++----- pyPLNmodels/_utils.py | 138 +++++++++++---------- pyPLNmodels/elbos.py | 32 ++--- pyPLNmodels/models.py | 212 ++++++++++++++++----------------- pyPLNmodels/oaks.py | 14 +-- tests/conftest.py | 18 +-- tests/import_data.py | 8 +- tests/test_common.py | 2 +- tests/test_setters.py | 28 ++--- 10 files changed, 260 insertions(+), 272 deletions(-) diff --git a/pyPLNmodels/_closed_forms.py b/pyPLNmodels/_closed_forms.py index ff99eae3..b57e7850 100644 --- a/pyPLNmodels/_closed_forms.py +++ b/pyPLNmodels/_closed_forms.py @@ -4,7 +4,7 @@ import torch # pylint:disable=[C0114] def _closed_formula_covariance( - covariates: torch.Tensor, + exog: torch.Tensor, latent_mean: torch.Tensor, latent_sqrt_var: torch.Tensor, coef: torch.Tensor, @@ -15,7 +15,7 @@ def _closed_formula_covariance( Parameters: ---------- - covariates : torch.Tensor + exog : torch.Tensor Covariates with size (n, d). latent_mean : torch.Tensor Variational parameter with size (n, p). @@ -31,10 +31,10 @@ def _closed_formula_covariance( torch.Tensor The closed-form covariance with size (p, p). """ - if covariates is None: + if exog is None: XB = 0 else: - XB = covariates @ coef + XB = exog @ coef m_minus_xb = latent_mean - XB closed = m_minus_xb.T @ m_minus_xb + torch.diag( torch.sum(torch.square(latent_sqrt_var), dim=0) @@ -43,14 +43,14 @@ def _closed_formula_covariance( def _closed_formula_coef( - covariates: torch.Tensor, latent_mean: torch.Tensor + exog: torch.Tensor, latent_mean: torch.Tensor ) -> Optional[torch.Tensor]: """ Compute the closed-form coef for the M step of the Pln model. Parameters: ---------- - covariates : torch.Tensor + exog : torch.Tensor Covariates with size (n, d). latent_mean : torch.Tensor Variational parameter with size (n, p). @@ -58,11 +58,11 @@ def _closed_formula_coef( Returns: ------- Optional[torch.Tensor] - The closed-form coef with size (d, p) or None if covariates is None. + The closed-form coef with size (d, p) or None if exog is None. """ - if covariates is None: + if exog is None: return None - return torch.inverse(covariates.T @ covariates) @ covariates.T @ latent_mean + return torch.inverse(exog.T @ exog) @ exog.T @ latent_mean def _closed_formula_pi( @@ -70,7 +70,7 @@ def _closed_formula_pi( latent_mean: torch.Tensor, latent_sqrt_var: torch.Tensor, dirac: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, _coef_inflation: torch.Tensor, ) -> torch.Tensor: """ @@ -86,7 +86,7 @@ def _closed_formula_pi( Variational parameter with size (n, p). dirac : torch.Tensor Dirac tensor. - covariates : torch.Tensor + exog : torch.Tensor Covariates with size (n, d). _coef_inflation : torch.Tensor Inflation coefficient tensor. @@ -97,4 +97,4 @@ def _closed_formula_pi( The closed-form pi with the same size as dirac. """ poiss_param = torch.exp(offsets + latent_mean + 0.5 * torch.square(latent_sqrt_var)) - return torch._sigmoid(poiss_param + torch.mm(covariates, _coef_inflation)) * dirac + return torch._sigmoid(poiss_param + torch.mm(exog, _coef_inflation)) * dirac diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index ea22106b..e0c3f47e 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -10,7 +10,7 @@ else: def _init_covariance( - endog: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor + endog: torch.Tensor, exog: torch.Tensor, coef: torch.Tensor ) -> torch.Tensor: """ Initialization for the covariance for the Pln model. Take the log of endog @@ -23,7 +23,7 @@ def _init_covariance( Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) - covariates : torch.Tensor + exog : torch.Tensor Covariates, size (n,d) coef : torch.Tensor Coefficient of size (d,p) @@ -41,7 +41,7 @@ def _init_covariance( def _init_components( - endog: torch.Tensor, covariates: torch.Tensor, coef: torch.Tensor, rank: int + endog: torch.Tensor, exog: torch.Tensor, coef: torch.Tensor, rank: int ) -> torch.Tensor: """ Initialization for components for the Pln model. Get a first guess for covariance @@ -53,7 +53,7 @@ def _init_components( Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) - covariates : torch.Tensor + exog : torch.Tensor Covariates, size (n,d) coef : torch.Tensor Coefficient of size (d,p) @@ -65,14 +65,14 @@ def _init_components( torch.Tensor Initialization of components of size (p,rank) """ - sigma_hat = _init_covariance(endog, covariates, coef).detach() + sigma_hat = _init_covariance(endog, exog, coef).detach() components = _components_from_covariance(sigma_hat, rank) return components def _init_latent_mean( endog: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, offsets: torch.Tensor, coef: torch.Tensor, components: torch.Tensor, @@ -90,7 +90,7 @@ def _init_latent_mean( Samples with size (n,p) offsets : torch.Tensor Offset, size (n,p) - covariates : torch.Tensor + exog : torch.Tensor Covariates, size (n,d) coef : torch.Tensor Coefficient of size (d,p) @@ -117,7 +117,7 @@ def _init_latent_mean( keep_condition = True i = 0 while i < n_iter_max and keep_condition: - batch_loss = log_posterior(endog, covariates, offsets, mode, components, coef) + batch_loss = log_posterior(endog, exog, offsets, mode, components, coef) loss = -torch.mean(batch_loss) loss.backward() optimizer.step() @@ -155,7 +155,7 @@ def _components_from_covariance(covariance: torch.Tensor, rank: int) -> torch.Te def _init_coef( - endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor + endog: torch.Tensor, exog: torch.Tensor, offsets: torch.Tensor ) -> torch.Tensor: """ Initialize the coefficient for the Pln model using Poisson regression model. @@ -164,7 +164,7 @@ def _init_coef( ---------- endog : torch.Tensor Samples with size (n, p) - covariates : torch.Tensor + exog : torch.Tensor Covariates, size (n, d) offsets : torch.Tensor Offset, size (n, p) @@ -172,19 +172,19 @@ def _init_coef( Returns ------- torch.Tensor or None - Coefficient of size (d, p) or None if covariates is None. + Coefficient of size (d, p) or None if exog is None. """ - if covariates is None: + if exog is None: return None poiss_reg = _PoissonReg() - poiss_reg.fit(endog, covariates, offsets) + poiss_reg.fit(endog, exog, offsets) return poiss_reg.beta def log_posterior( endog: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, offsets: torch.Tensor, posterior_mean: torch.Tensor, components: torch.Tensor, @@ -197,7 +197,7 @@ def log_posterior( ---------- endog : torch.Tensor Samples with size (batch_size, p) - covariates : torch.Tensor or None + exog : torch.Tensor or None Covariates, size (batch_size, d) or (d) offsets : torch.Tensor Offset, size (batch_size, p) @@ -218,10 +218,10 @@ def log_posterior( components.unsqueeze(0), posterior_mean.unsqueeze(2) ).squeeze() - if covariates is None: + if exog is None: XB = 0 else: - XB = torch.matmul(covariates, coef) + XB = torch.matmul(exog, coef) log_lambda = offsets + components_posterior_mean + XB first_term = ( @@ -245,7 +245,7 @@ class _PoissonReg: Methods ------- - fit(Y, covariates, O, Niter_max=300, tol=0.001, lr=0.005, verbose=False) + fit(Y, exog, O, Niter_max=300, tol=0.001, lr=0.005, verbose=False) Fit the Poisson regression model to the given data. """ @@ -256,7 +256,7 @@ class _PoissonReg: def fit( self, Y: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, offsets: torch.Tensor, Niter_max: int = 300, tol: float = 0.001, @@ -270,8 +270,8 @@ class _PoissonReg: ---------- Y : torch.Tensor The dependent variable of shape (n_samples, n_features). - covariates : torch.Tensor - The covariates of shape (n_samples, n_covariates). + exog : torch.Tensor + The exog of shape (n_samples, n_exog). offsets : torch.Tensor The offset term of shape (n_samples, n_features). Niter_max : int, optional @@ -285,13 +285,13 @@ class _PoissonReg: """ beta = torch.rand( - (covariates.shape[1], Y.shape[1]), device=DEVICE, requires_grad=True + (exog.shape[1], Y.shape[1]), device=DEVICE, requires_grad=True ) optimizer = torch.optim.Rprop([beta], lr=lr) i = 0 grad_norm = 2 * tol # Criterion while i < Niter_max and grad_norm > tol: - loss = -compute_poissreg_log_like(Y, offsets, covariates, beta) + loss = -compute_poissreg_log_like(Y, offsets, exog, beta) loss.backward() optimizer.step() grad_norm = torch.norm(beta.grad) @@ -309,7 +309,7 @@ class _PoissonReg: def compute_poissreg_log_like( - Y: torch.Tensor, O: torch.Tensor, covariates: torch.Tensor, beta: torch.Tensor + Y: torch.Tensor, O: torch.Tensor, exog: torch.Tensor, beta: torch.Tensor ) -> torch.Tensor: """ Compute the log likelihood of a Poisson regression model. @@ -320,10 +320,10 @@ def compute_poissreg_log_like( The dependent variable of shape (n_samples, n_features). O : torch.Tensor The offset term of shape (n_samples, n_features). - covariates : torch.Tensor - The covariates of shape (n_samples, n_covariates). + exog : torch.Tensor + The exog of shape (n_samples, n_exog). beta : torch.Tensor - The regression coefficients of shape (n_covariates, n_features). + The regression coefficients of shape (n_exog, n_features). Returns ------- @@ -331,5 +331,5 @@ def compute_poissreg_log_like( The log likelihood of the Poisson regression model. """ - XB = torch.matmul(covariates.unsqueeze(1), beta.unsqueeze(0)).squeeze() + XB = torch.matmul(exog.unsqueeze(1), beta.unsqueeze(0)).squeeze() return torch.sum(-torch.exp(O + XB) + torch.multiply(Y, O + XB)) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index ba6b2a39..805c9dca 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -102,7 +102,7 @@ def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Ten Parameters ---------- pln_param : PlnParameters object - parameters of the model, containing the coeficient, the covariates, + parameters of the model, containing the coeficient, the exog, the components and the offsets. seed : int or None, optional(keyword-only) Random seed for reproducibility. Default is None. @@ -124,10 +124,10 @@ def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Ten n_samples = pln_param.offsets.shape[0] rank = pln_param.components.shape[1] - if pln_param.covariates is None: + if pln_param.exog is None: XB = 0 else: - XB = torch.matmul(pln_param.covariates, pln_param.coef) + XB = torch.matmul(pln_param.exog, pln_param.coef) gaussian = ( torch.mm(torch.randn(n_samples, rank, device=DEVICE), pln_param.components.T) @@ -136,9 +136,7 @@ def sample_pln(pln_param, *, seed: int = None, return_latent=False) -> torch.Ten parameter = torch.exp(pln_param.offsets + gaussian) if pln_param.coef_inflation is not None: print("ZIPln is sampled") - zero_inflated_mean = torch.matmul( - pln_param.covariates, pln_param.coef_inflation - ) + zero_inflated_mean = torch.matmul(pln_param.exog, pln_param.coef_inflation) ksi = torch.bernoulli(1 / (1 + torch.exp(-zero_inflated_mean))) else: ksi = 0 @@ -269,7 +267,7 @@ def _format_data( def _format_model_param( endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], - covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame], + exog: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets_formula: str, take_log_offsets: bool, @@ -282,7 +280,7 @@ def _format_model_param( ---------- endog : Union[torch.Tensor, np.ndarray, pd.DataFrame], shape (n, ) Count data. - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, d) or None + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, d) or None Covariate data. offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, shape (n, ) or None Offset data. @@ -291,7 +289,7 @@ def _format_model_param( take_log_offsets : bool Flag indicating whether to take the logarithm of offsets. add_const: bool - Whether to add a column of one in the covariates. + Whether to add a column of one in the exog. Returns ------- Tuple[torch.Tensor, torch.Tensor, torch.Tensor] @@ -305,14 +303,14 @@ def _format_model_param( endog = _format_data(endog) if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") - covariates = _format_data(covariates) + exog = _format_data(exog) if add_const is True: - if covariates is None: - covariates = torch.ones(endog.shape[0], 1) + if exog is None: + exog = torch.ones(endog.shape[0], 1) else: - if _has_null_variance(covariates) is False: - covariates = torch.concat( - (covariates, torch.ones(endog.shape[0]).unsqueeze(1)), dim=1 + if _has_null_variance(exog) is False: + exog = torch.concat( + (exog, torch.ones(endog.shape[0]).unsqueeze(1)), dim=1 ) if offsets is None: if offsets_formula == "logsum": @@ -331,7 +329,7 @@ def _format_model_param( offsets = _format_data(offsets).to(DEVICE) if take_log_offsets is True: offsets = torch.log(offsets) - return endog, covariates, offsets + return endog, exog, offsets def _has_null_variance(tensor: torch.Tensor) -> bool: @@ -352,7 +350,7 @@ def _has_null_variance(tensor: torch.Tensor) -> bool: def _check_data_shape( - endog: torch.Tensor, covariates: torch.Tensor, offsets: torch.Tensor + endog: torch.Tensor, exog: torch.Tensor, offsets: torch.Tensor ) -> None: """ Check if the shape of the input data is valid. @@ -361,7 +359,7 @@ def _check_data_shape( ---------- endog : torch.Tensor, shape (n, p) Count data. - covariates : torch.Tensor or None, shape (n, d) or None + exog : torch.Tensor or None, shape (n, d) or None Covariate data. offsets : torch.Tensor or None, shape (n, p) or None Offset data. @@ -369,9 +367,9 @@ def _check_data_shape( n_endog, p_endog = endog.shape n_offsets, p_offsets = offsets.shape _check_two_dimensions_are_equal("endog", "offsets", n_endog, n_offsets, 0, 0) - if covariates is not None: - n_cov, _ = covariates.shape - _check_two_dimensions_are_equal("endog", "covariates", n_endog, n_cov, 0, 0) + if exog is not None: + n_cov, _ = exog.shape + _check_two_dimensions_are_equal("endog", "exog", n_endog, n_cov, 0, 0) _check_two_dimensions_are_equal("endog", "offsets", p_endog, p_offsets, 1, 1) @@ -476,29 +474,29 @@ def _get_simulation_coef_cov_offsets( n_samples : int Number of samples. nb_cov : int - Number of covariates. If 0, covariates will be None, + Number of exog. If 0, exog will be None, unless add_const is True. If add_const is True, then there will be nb_cov+1 - covariates as the intercept can be seen as a covariates. + exog as the intercept can be seen as a exog. dim : int Dimension required of the data. add_const : bool, optional - If True, will add a vector of ones in the covariates. + If True, will add a vector of ones in the exog. Returns ------- Tuple[torch.Tensor, torch.Tensor, torch.Tensor] - Tuple containing offsets, covariates, and coefficients. + Tuple containing offsets, exog, and coefficients. """ prev_state = torch.random.get_rng_state() torch.random.manual_seed(0) if nb_cov == 0: if add_const is True: - covariates = torch.ones(n_samples, 1) + exog = torch.ones(n_samples, 1) else: - covariates = None + exog = None else: - covariates = torch.randint( + exog = torch.randint( low=-1, high=2, size=(n_samples, nb_cov), @@ -506,16 +504,16 @@ def _get_simulation_coef_cov_offsets( device="cpu", ) if add_const is True: - covariates = torch.cat((covariates, torch.ones(n_samples, 1)), axis=1) - if covariates is None: + exog = torch.cat((exog, torch.ones(n_samples, 1)), axis=1) + if exog is None: coef = None else: - coef = torch.randn(covariates.shape[1], dim, device="cpu") + coef = torch.randn(exog.shape[1], dim, device="cpu") offsets = torch.randint( low=0, high=2, size=(n_samples, dim), dtype=torch.float64, device="cpu" ) torch.random.set_rng_state(prev_state) - return coef, covariates, offsets + return coef, exog, offsets class PlnParameters: @@ -524,7 +522,7 @@ class PlnParameters: *, components: Union[torch.Tensor, np.ndarray, pd.DataFrame], coef: Union[torch.Tensor, np.ndarray, pd.DataFrame], - covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame], + exog: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], coef_inflation=None, ): @@ -537,7 +535,7 @@ class PlnParameters: Components of size (p, rank) coef : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Coefficient of size (d, p) - covariates : : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None(keyword-only) + exog : : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None(keyword-only) Covariates, size (n, d) or None offsets : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Offset, size (n, p) @@ -547,7 +545,7 @@ class PlnParameters: """ self._components = _format_data(components) self._coef = _format_data(coef) - self._covariates = _format_data(covariates) + self._exog = _format_data(exog) self._offsets = _format_data(offsets) self._coef_inflation = _format_data(coef_inflation) if self._coef is not None: @@ -568,24 +566,24 @@ class PlnParameters: 0, 1, ) - if self._covariates is not None: + if self._exog is not None: _check_two_dimensions_are_equal( "offsets", - "covariates", + "exog", self._offsets.shape[0], - self._covariates.shape[0], + self._exog.shape[0], 0, 0, ) _check_two_dimensions_are_equal( - "covariates", + "exog", "coef", - self._covariates.shape[1], + self._exog.shape[1], self._coef.shape[0], 1, 0, ) - for array in [self._components, self._coef, self._covariates, self._offsets]: + for array in [self._components, self._coef, self._exog, self._offsets]: if array is not None: if len(array.shape) != 2: raise RuntimeError( @@ -618,14 +616,14 @@ class PlnParameters: """ Coef of the model. """ - return self.coef + return self._coef @property - def covariates(self): + def exog(self): """ - Data covariates. + Data exog. """ - return self.covariates + return self._exog @property def coef_inflation(self): @@ -695,13 +693,13 @@ def get_simulation_parameters( dim : int, optional(keyword-only) The dimension of the data, by default 25. nb_cov : int, optional(keyword-only) - The number of covariates, by default 1. If add_const is True, - then there will be nb_cov+1 covariates as the intercept can be seen - as a covariates. + The number of exog, by default 1. If add_const is True, + then there will be nb_cov+1 exog as the intercept can be seen + as a exog. rank : int, optional(keyword-only) The rank of the data components, by default 5. add_const : bool, optional(keyword-only) - If True, will add a vector of ones in the covariates. + If True, will add a vector of ones in the exog. Returns ------- @@ -709,13 +707,11 @@ def get_simulation_parameters( The generated simulation parameters. """ - coef, covariates, offsets = _get_simulation_coef_cov_offsets( + coef, exog, offsets = _get_simulation_coef_cov_offsets( n_samples, nb_cov, dim, add_const ) components = _get_simulation_components(dim, rank) - return PlnParameters( - components=components, coef=coef, covariates=covariates, offsets=offsets - ) + return PlnParameters(components=components, coef=coef, exog=exog, offsets=offsets) def get_simulated_count_data( @@ -742,7 +738,7 @@ def get_simulated_count_data( add_const : bool, optional(keyword-only) If True, will add a vector of ones. Default is True nb_cov : int, optional(keyword-only) - Number of covariates, by default 1. + Number of exog, by default 1. return_true_param : bool, optional(keyword-only) Whether to return the true parameters of the model, by default False. seed : int, optional(keyword-only) @@ -751,7 +747,7 @@ def get_simulated_count_data( Returns ------- Tuple[torch.Tensor, torch.Tensor, torch.Tensor] - Tuple containing endog, covariates, and offsets. + Tuple containing endog, exog, and offsets. """ pln_param = get_simulation_parameters( n_samples=n_samples, dim=dim, nb_cov=nb_cov, rank=rank, add_const=add_const @@ -760,7 +756,7 @@ def get_simulated_count_data( if return_true_param is True: return ( endog, - pln_param.covariates, + pln_param.exog, pln_param.offsets, pln_param.covariance, pln_param.coef, @@ -799,7 +795,7 @@ def get_real_count_data( f"\nTaking the whole max_dim variables. Requested:dim={dim}, returned:{max_dim}" ) dim = max_dim - endog_stream = pkg_resources.resource_stream(__name__, "data/scRT/endog.csv") + endog_stream = pkg_resources.resource_stream(__name__, "data/scRT/counts.csv") endog = pd.read_csv(endog_stream).values[:n_samples, :dim] print(f"Returning dataset of size {endog.shape}") if return_labels is False: @@ -849,16 +845,16 @@ def _extract_data_from_formula( Returns ------- Tuple - A tuple containing the extracted endog, covariates, and offsets. + A tuple containing the extracted endog, exog, and offsets. """ dmatrix = dmatrices(formula, data=data) endog = dmatrix[0] - covariates = dmatrix[1] - if covariates.size == 0: - covariates = None + exog = dmatrix[1] + if exog.size == 0: + exog = None offsets = data.get("offsets", None) - return endog, covariates, offsets + return endog, exog, offsets def _is_dict_of_dict(dictionary: Dict[Any, Any]) -> bool: @@ -947,7 +943,7 @@ def _array2tensor(func): def _handle_data( endog, - covariates, + exog, offsets, offsets_formula: str, take_log_offsets: bool, @@ -959,30 +955,30 @@ def _handle_data( Parameters ---------- endog : The endog data. If a DataFrame is provided, the column names are stored for later use. - covariates : The covariates data. + exog : The exog data. offsets : The offsets data. offsets_formula : The formula used for offsets. take_log_offsets : Indicates whether to take the logarithm of the offsets. - add_const : Indicates whether to add a constant column to the covariates. + add_const : Indicates whether to add a constant column to the exog. Returns ------- - tuple: A tuple containing the processed endog, covariates, offsets, and column endog (if available). + tuple: A tuple containing the processed endog, exog, offsets, and column endog (if available). Raises ------ - ValueError: If the shapes of endog, covariates, and offsets do not match. + ValueError: If the shapes of endog, exog, and offsets do not match. """ if isinstance(endog, pd.DataFrame): column_endog = endog.columns else: column_endog = None - endog, covariates, offsets = _format_model_param( - endog, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, exog, offsets = _format_model_param( + endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) - _check_data_shape(endog, covariates, offsets) - return endog, covariates, offsets, column_endog + _check_data_shape(endog, exog, offsets) + return endog, exog, offsets, column_endog def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also=None): diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index b0298a6a..6dcda361 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -8,7 +8,7 @@ from typing import Optional def elbo_pln( endog: torch.Tensor, offsets: torch.Tensor, - covariates: Optional[torch.Tensor], + exog: Optional[torch.Tensor], latent_mean: torch.Tensor, latent_sqrt_var: torch.Tensor, covariance: torch.Tensor, @@ -23,7 +23,7 @@ def elbo_pln( Counts with size (n, p). offsets : torch.Tensor Offset with size (n, p). - covariates : torch.Tensor, optional + exog : torch.Tensor, optional Covariates with size (n, d). latent_mean : torch.Tensor Variational parameter with size (n, p). @@ -42,10 +42,10 @@ def elbo_pln( n_samples, dim = endog.shape s_rond_s = torch.square(latent_sqrt_var) offsets_plus_m = offsets + latent_mean - if covariates is None: + if exog is None: XB = torch.zeros_like(endog) else: - XB = covariates @ coef + XB = exog @ coef m_minus_xb = latent_mean - XB d_plus_minus_xb2 = ( torch.diag(torch.sum(s_rond_s, dim=0)) + m_minus_xb.T @ m_minus_xb @@ -64,7 +64,7 @@ def elbo_pln( def profiled_elbo_pln( endog: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, offsets: torch.Tensor, latent_mean: torch.Tensor, latent_sqrt_var: torch.Tensor, @@ -78,7 +78,7 @@ def profiled_elbo_pln( ---------- endog : torch.Tensor Counts with size (n, p). - covariates : torch.Tensor + exog : torch.Tensor Covariates with size (n, d). offsets : torch.Tensor Offset with size (n, p). @@ -95,9 +95,9 @@ def profiled_elbo_pln( n_samples, _ = endog.shape s_squared = torch.square(latent_sqrt_var) offsets_plus_mean = offsets + latent_mean - closed_coef = _closed_formula_coef(covariates, latent_mean) + closed_coef = _closed_formula_coef(exog, latent_mean) closed_covariance = _closed_formula_covariance( - covariates, latent_mean, latent_sqrt_var, closed_coef, n_samples + exog, latent_mean, latent_sqrt_var, closed_coef, n_samples ) elbo = -0.5 * n_samples * torch.logdet(closed_covariance) elbo += torch.sum( @@ -111,7 +111,7 @@ def profiled_elbo_pln( def elbo_plnpca( endog: torch.Tensor, - covariates: torch.Tensor, + exog: torch.Tensor, offsets: torch.Tensor, latent_mean: torch.Tensor, latent_sqrt_var: torch.Tensor, @@ -126,7 +126,7 @@ def elbo_plnpca( ---------- endog : torch.Tensor Counts with size (n, p). - covariates : torch.Tensor + exog : torch.Tensor Covariates with size (n, d). offsets : torch.Tensor Offset with size (n, p). @@ -147,10 +147,10 @@ def elbo_plnpca( """ n_samples = endog.shape[0] rank = components.shape[1] - if covariates is None: + if exog is None: XB = 0 else: - XB = covariates @ coef + XB = exog @ coef log_intensity = offsets + XB + latent_mean @ components.T s_squared = torch.square(latent_sqrt_var) endog_log_intensity = torch.sum(endog * log_intensity) @@ -175,7 +175,7 @@ def elbo_plnpca( ## should rename some variables so that is is clearer when we see the formula def elbo_zi_pln( endog, - covariates, + exog, offsets, latent_mean, latent_sqrt_var, @@ -191,7 +191,7 @@ def elbo_zi_pln( Args: endog: torch.tensor. Counts with size (n,p) 0: torch.tensor. Offset, size (n,p) - covariates: torch.tensor. Covariates, size (n,d) + exog: torch.tensor. Covariates, size (n,d) latent_mean: torch.tensor. Variational parameter with size (n,p) latent_sqrt_var: torch.tensor. Variational parameter with size (n,p) pi: torch.tensor. Variational parameter with size (n,p) @@ -208,8 +208,8 @@ def elbo_zi_pln( dim = endog.shape[1] s_rond_s = torch.square(latent_sqrt_var) offsets_plus_m = offsets + latent_mean - m_minus_xb = latent_mean - covariates @ coef - x_coef_inflation = covariates @ _coef_inflation + m_minus_xb = latent_mean - exog @ coef + x_coef_inflation = exog @ _coef_inflation elbo = torch.sum( (1 - pi) * ( diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 62351a03..db42daeb 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -59,7 +59,7 @@ class _model(ABC): _WINDOW: int = 15 _endog: torch.Tensor - _covariates: torch.Tensor + _exog: torch.Tensor _offsets: torch.Tensor _coef: torch.Tensor _beginning_time: float @@ -70,7 +70,7 @@ class _model(ABC): self, endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, @@ -84,7 +84,7 @@ class _model(ABC): ---------- endog : Union[torch.Tensor, np.ndarray, pd.DataFrame] The count data. - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The covariate data. Defaults to None. offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. @@ -96,15 +96,15 @@ class _model(ABC): take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. add_const: bool, optional(keyword-only) - Whether to add a column of one in the covariates. Defaults to True. + Whether to add a column of one in the exog. Defaults to True. """ ( self._endog, - self._covariates, + self._exog, self._offsets, self.column_endog, ) = _handle_data( - endog, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False self._plotargs = _PlotArgs(self._WINDOW) @@ -138,10 +138,10 @@ class _model(ABC): take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. """ - endog, covariates, offsets = _extract_data_from_formula(formula, data) + endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, dict_initialization=dict_initialization, @@ -255,22 +255,22 @@ class _model(ABC): @property def nb_cov(self) -> int: """ - The number of covariates. + The number of exog. Returns ------- int - The number of covariates. + The number of exog. """ - if self.covariates is None: + if self.exog is None: return 0 - return self.covariates.shape[1] + return self.exog.shape[1] def _smart_init_coef(self): """ Initialize coefficients smartly. """ - self._coef = _init_coef(self._endog, self._covariates, self._offsets) + self._coef = _init_coef(self._endog, self._exog, self._offsets) def _random_init_coef(self): """ @@ -836,7 +836,7 @@ class _model(ABC): """ return { "endog": self.endog, - "covariates": self.covariates, + "exog": self.exog, "offsets": self.offsets, } @@ -1015,16 +1015,16 @@ class _model(ABC): return self._cpu_attribute_or_none("_offsets") @property - def covariates(self): + def exog(self): """ - Property representing the covariates. + Property representing the exog. Returns ------- torch.Tensor or None - The covariates or None. + The exog or None. """ - return self._cpu_attribute_or_none("_covariates") + return self._cpu_attribute_or_none("_exog") @endog.setter @_array2tensor @@ -1072,24 +1072,24 @@ class _model(ABC): ) self._offsets = offsets - @covariates.setter + @exog.setter @_array2tensor - def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + def exog(self, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ - Setter for the covariates property. + Setter for the exog property. Parameters ---------- - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The covariates. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The exog. Raises ------ ValueError - If the shape of the covariates or endog is incorrect. + If the shape of the exog or endog is incorrect. """ - _check_data_shape(self.endog, covariates, self.offsets) - self._covariates = covariates + _check_data_shape(self.endog, exog, self.offsets) + self._exog = exog @coef.setter @_array2tensor @@ -1180,14 +1180,14 @@ class _model(ABC): """ return self.covariance - def predict(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None): + def predict(self, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None): """ Method for making predictions. Parameters ---------- - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional - The covariates, by default None. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional + The exog, by default None. Returns ------- @@ -1197,29 +1197,29 @@ class _model(ABC): Raises ------ AttributeError - If there are no covariates in the model but some are provided. + If there are no exog in the model but some are provided. RuntimeError - If the shape of the covariates is incorrect. + If the shape of the exog is incorrect. Notes ----- - - If `covariates` is not provided and there are no covariates in the model, None is returned. - If there are covariates in the model, then the mean covariates @ coef is returned. - - If `covariates` is provided, it should have the shape `(_, nb_cov)`, where `nb_cov` is the number of covariates. - - The predicted values are obtained by multiplying the covariates by the coefficients. - """ - if covariates is not None and self.nb_cov == 0: - raise AttributeError("No covariates in the model, can't predict") - if covariates is None: - if self.covariates is None: - print("No covariates in the model.") + - If `exog` is not provided and there are no exog in the model, None is returned. + If there are exog in the model, then the mean exog @ coef is returned. + - If `exog` is provided, it should have the shape `(_, nb_cov)`, where `nb_cov` is the number of exog. + - The predicted values are obtained by multiplying the exog by the coefficients. + """ + if exog is not None and self.nb_cov == 0: + raise AttributeError("No exog in the model, can't predict") + if exog is None: + if self.exog is None: + print("No exog in the model.") return None - return self.covariates @ self.coef - if covariates.shape[-1] != self.nb_cov: - error_string = f"X has wrong shape ({covariates.shape}). Should" + return self.exog @ self.coef + if exog.shape[-1] != self.nb_cov: + error_string = f"X has wrong shape ({exog.shape}). Should" error_string += f" be ({self.n_samples, self.nb_cov})." raise RuntimeError(error_string) - return covariates @ self.coef + return exog @ self.coef @property def _directory_name(self): @@ -1328,7 +1328,7 @@ class Pln(_model): self, endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, @@ -1337,7 +1337,7 @@ class Pln(_model): ): super().__init__( endog=endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, dict_initialization=dict_initialization, @@ -1371,10 +1371,10 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): - endog, covariates, offsets = _extract_data_from_formula(formula, data) + endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, dict_initialization=dict_initialization, @@ -1533,11 +1533,7 @@ class Pln(_model): torch.Tensor or None The coefficients or None. """ - if ( - hasattr(self, "_latent_mean") - and hasattr(self, "_covariates") - and self.nb_cov > 0 - ): + if hasattr(self, "_latent_mean") and hasattr(self, "_exog") and self.nb_cov > 0: return self._coef.detach().cpu() return None @@ -1617,7 +1613,7 @@ class Pln(_model): """ return profiled_elbo_pln( self._endog, - self._covariates, + self._exog, self._offsets, self._latent_mean, self._latent_sqrt_var, @@ -1645,7 +1641,7 @@ class Pln(_model): torch.Tensor The coefficients. """ - return _closed_formula_coef(self._covariates, self._latent_mean) + return _closed_formula_coef(self._exog, self._latent_mean) @property def _covariance(self): @@ -1658,7 +1654,7 @@ class Pln(_model): The covariance matrix or None. """ return _closed_formula_covariance( - self._covariates, + self._exog, self._latent_mean, self._latent_sqrt_var, self._coef, @@ -1717,7 +1713,7 @@ class Pln(_model): if all( hasattr(self, attr) for attr in [ - "_covariates", + "_exog", "_latent_mean", "_latent_sqrt_var", "_coef", @@ -1759,7 +1755,7 @@ class PlnPCAcollection: >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) >>> endog = sample_pln(plnparam) - >>> data = {"endog":endog, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> data = {"endog":endog, "cov": plnparam.exog, "offsets": plnparam.offsets} >>> plnpcas = PlnPCAcollection.from_formula("endog ~ 0 + cov", data = data, ranks = [5,8,12]) >>> plnpcas.fit() >>> print(plnpcas) @@ -1776,7 +1772,7 @@ class PlnPCAcollection: self, endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, - covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, + exog: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets_formula: str = "logsum", ranks: Iterable[int] = range(3, 5), @@ -1791,8 +1787,8 @@ class PlnPCAcollection: ---------- endog :Union[torch.Tensor, np.ndarray, pd.DataFrame] The endog. - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) - The covariates, by default None. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) + The exog, by default None. offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets, by default None. offsets_formula : str, optional(keyword-only) @@ -1804,7 +1800,7 @@ class PlnPCAcollection: take_log_offsets : bool, optional(keyword-only) Whether to take the logarithm of offsets, by default False. add_const: bool, optional(keyword-only) - Whether to add a column of one in the covariates. Defaults to True. + Whether to add a column of one in the exog. Defaults to True. Returns ------- PlnPCAcollection @@ -1816,11 +1812,11 @@ class PlnPCAcollection: self._dict_models = {} ( self._endog, - self._covariates, + self._exog, self._offsets, self.column_endog, ) = _handle_data( - endog, covariates, offsets, offsets_formula, take_log_offsets, add_const + endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False self._init_models(ranks, dict_of_dict_initialization) @@ -1870,10 +1866,10 @@ class PlnPCAcollection: :class:`~pyPLNmodels.PlnPCA` :func:`~pyPLNmodels.PlnPCAcollection.__init__` """ - endog, covariates, offsets = _extract_data_from_formula(formula, data) + endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, ranks=ranks, @@ -1883,16 +1879,16 @@ class PlnPCAcollection: ) @property - def covariates(self) -> torch.Tensor: + def exog(self) -> torch.Tensor: """ - Property representing the covariates. + Property representing the exog. Returns ------- torch.Tensor - The covariates. + The exog. """ - return self[self.ranks[0]].covariates + return self[self.ranks[0]].exog @property def endog(self) -> torch.Tensor: @@ -1982,19 +1978,19 @@ class PlnPCAcollection: for model in self.values(): model.coef = coef - @covariates.setter + @exog.setter @_array2tensor - def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + def exog(self, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ - Setter for the covariates property. + Setter for the exog property. Parameters ---------- - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The covariates. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The exog. """ for model in self.values(): - model.covariates = covariates + model.exog = exog @property def offsets(self) -> torch.Tensor: @@ -2043,7 +2039,7 @@ class PlnPCAcollection: ) self._dict_models[rank] = PlnPCA( endog=self._endog, - covariates=self._covariates, + exog=self._exog, offsets=self._offsets, rank=rank, dict_initialization=dict_initialization, @@ -2068,7 +2064,7 @@ class PlnPCAcollection: ) self._dict_models[rank] = PlnPCA( self._endog, - self._covariates, + self._exog, self._offsets, ranks, dict_initialization, @@ -2117,12 +2113,12 @@ class PlnPCAcollection: @property def nb_cov(self) -> int: """ - Property representing the number of covariates. + Property representing the number of exog. Returns ------- int - The number of covariates. + The number of exog. """ return self[self.ranks[0]].nb_cov @@ -2543,7 +2539,7 @@ class PlnPCA(_model): >>> plnparam = get_simulation_parameters(n_samples =100, dim = 60, nb_cov = 2, rank = 8) >>> endog = sample_pln(plnparam) - >>> data = {"endog": endog, "cov": plnparam.covariates, "offsets": plnparam.offsets} + >>> data = {"endog": endog, "cov": plnparam.exog, "offsets": plnparam.offsets} >>> plnpca = PlnPCA.from_formula("endog ~ 0 + cov", data = data, rank = 5) >>> plnpca.fit() >>> print(plnpca) @@ -2580,7 +2576,7 @@ class PlnPCA(_model): self, endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - covariates: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets_formula: str = "logsum", rank: int = 5, @@ -2591,7 +2587,7 @@ class PlnPCA(_model): self._rank = rank super().__init__( endog=endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, dict_initialization=dict_initialization, @@ -2629,10 +2625,10 @@ class PlnPCA(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, ): - endog, covariates, offsets = _extract_data_from_formula(formula, data) + endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( endog, - covariates=covariates, + exog=exog, offsets=offsets, offsets_formula=offsets_formula, rank=rank, @@ -2810,8 +2806,8 @@ class PlnPCA(_model): * (self.latent_sqrt_var**2).unsqueeze(1), axis=2, ) - if self.covariates is not None: - XB = self.covariates @ self.coef + if self.exog is not None: + XB = self.exog @ self.coef else: XB = 0 return torch.exp( @@ -2865,30 +2861,30 @@ class PlnPCA(_model): return f"{super()._directory_name}_rank_{self._rank}" @property - def covariates(self) -> torch.Tensor: + def exog(self) -> torch.Tensor: """ - Property representing the covariates. + Property representing the exog. Returns ------- torch.Tensor - The covariates tensor. + The exog tensor. """ - return self._cpu_attribute_or_none("_covariates") + return self._cpu_attribute_or_none("_exog") - @covariates.setter + @exog.setter @_array2tensor - def covariates(self, covariates: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + def exog(self, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame]): """ - Setter for the covariates. + Setter for the exog. Parameters ---------- - covariates : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The covariates tensor. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The exog tensor. """ - _check_data_shape(self.endog, covariates, self.offsets) - self._covariates = covariates + _check_data_shape(self.endog, exog, self.offsets) + self._exog = exog print("Setting coef to initialization") self._smart_init_coef() @@ -2943,7 +2939,7 @@ class PlnPCA(_model): super()._smart_init_coef() if not hasattr(self, "_components"): self._components = _init_components( - self._endog, self._covariates, self._coef, self._rank + self._endog, self._exog, self._coef, self._rank ) def _random_init_model_parameters(self): @@ -2970,7 +2966,7 @@ class PlnPCA(_model): self._latent_mean = ( _init_latent_mean( self._endog, - self._covariates, + self._exog, self._offsets, self._coef, self._components, @@ -3008,7 +3004,7 @@ class PlnPCA(_model): """ return elbo_plnpca( self._endog, - self._covariates, + self._exog, self._offsets, self._latent_mean, self._latent_sqrt_var, @@ -3198,9 +3194,7 @@ class ZIPln(Pln): def _smart_init_model_parameters(self): super()._smart_init_model_parameters() if not hasattr(self, "_covariance"): - self._covariance = _init_covariance( - self._endog, self._covariates, self._coef - ) + self._covariance = _init_covariance(self._endog, self._exog, self._coef) if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) @@ -3216,7 +3210,7 @@ class ZIPln(Pln): def compute_elbo(self): return elbo_zi_pln( self._endog, - self._covariates, + self._exog, self._offsets, self._latent_mean, self._latent_sqrt_var, @@ -3232,9 +3226,9 @@ class ZIPln(Pln): return [self._latent_mean, self._latent_sqrt_var, self._coef_inflation] def _update_closed_forms(self): - self._coef = _closed_formula_coef(self._covariates, self._latent_mean) + self._coef = _closed_formula_coef(self._exog, self._latent_mean) self._covariance = _closed_formula_covariance( - self._covariates, + self._exog, self._latent_mean, self._latent_sqrt_var, self._coef, @@ -3245,7 +3239,7 @@ class ZIPln(Pln): self._latent_mean, self._latent_sqrt_var, self._dirac, - self._covariates, + self._exog, self._coef_inflation, ) diff --git a/pyPLNmodels/oaks.py b/pyPLNmodels/oaks.py index c2e126ba..bfdbad97 100644 --- a/pyPLNmodels/oaks.py +++ b/pyPLNmodels/oaks.py @@ -12,7 +12,7 @@ def load_oaks(): found in each sample, which depend on the technology used for either bacteria (16S) or fungi (ITS1). - For each sample, 3 additional covariates (tree, dist2ground, orientation) are known. + For each sample, 3 additional exog (tree, dist2ground, orientation) are known. The data is provided as dictionary with the following keys endog a 114 x 116 np.array of integer (endog) @@ -32,17 +32,15 @@ def load_oaks(): """ endog_stream = pkg_resources.resource_stream(__name__, "data/oaks/counts.csv") offsets_stream = pkg_resources.resource_stream(__name__, "data/oaks/offsets.csv") - covariates_stream = pkg_resources.resource_stream( - __name__, "data/oaks/covariates.csv" - ) + exog_stream = pkg_resources.resource_stream(__name__, "data/oaks/covariates.csv") endog = pd.read_csv(endog_stream) offsets = pd.read_csv(offsets_stream) - covariates = pd.read_csv(covariates_stream) + exog = pd.read_csv(exog_stream) oaks = { "endog": endog.to_numpy(), "offsets": offsets.to_numpy(), - "tree": covariates.tree.to_numpy(), - "dist2ground": covariates.distTOground.to_numpy(), - "orientation": covariates.orientation.to_numpy(), + "tree": exog.tree.to_numpy(), + "dist2ground": exog.distTOground.to_numpy(), + "orientation": exog.orientation.to_numpy(), } return oaks diff --git a/tests/conftest.py b/tests/conftest.py index e7a23ddd..3a072f20 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,11 +26,11 @@ from tests.import_data import ( endog_sim_0cov = data_sim_0cov["endog"] -covariates_sim_0cov = data_sim_0cov["covariates"] +exog_sim_0cov = data_sim_0cov["exog"] offsets_sim_0cov = data_sim_0cov["offsets"] endog_sim_2cov = data_sim_2cov["endog"] -covariates_sim_2cov = data_sim_2cov["covariates"] +exog_sim_2cov = data_sim_2cov["exog"] offsets_sim_2cov = data_sim_2cov["offsets"] endog_real = data_real["endog"] @@ -120,7 +120,7 @@ def simulated_pln_0cov_array(request): cls = request.param pln = cls( endog_sim_0cov, - covariates=covariates_sim_0cov, + exog=exog_sim_0cov, offsets=offsets_sim_0cov, add_const=False, ) @@ -133,7 +133,7 @@ def simulated_fitted_pln_0cov_array(request): cls = request.param pln = cls( endog_sim_0cov, - covariates=covariates_sim_0cov, + exog=exog_sim_0cov, offsets=offsets_sim_0cov, add_const=False, ) @@ -173,7 +173,7 @@ def simulated_loaded_pln_0cov_array(simulated_fitted_pln_0cov_array): return generate_new_model( simulated_fitted_pln_0cov_array, endog_sim_0cov, - covariates=covariates_sim_0cov, + exog=exog_sim_0cov, offsets=offsets_sim_0cov, add_const=False, ) @@ -218,7 +218,7 @@ def simulated_pln_2cov_array(request): cls = request.param pln_full = cls( endog_sim_2cov, - covariates=covariates_sim_2cov, + exog=exog_sim_2cov, offsets=offsets_sim_2cov, add_const=False, ) @@ -235,7 +235,7 @@ def simulated_fitted_pln_2cov_array(simulated_pln_2cov_array): @cache def simulated_pln_2cov_formula(request): cls = request.param - pln_full = cls("endog ~ 0 + covariates", data_sim_2cov) + pln_full = cls("endog ~ 0 + exog", data_sim_2cov) return pln_full @@ -250,7 +250,7 @@ def simulated_loaded_pln_2cov_formula(simulated_fitted_pln_2cov_formula): simulated_fitted_pln_2cov_formula.save() return generate_new_model( simulated_fitted_pln_2cov_formula, - "endog ~0 + covariates", + "endog ~0 + exog", data_sim_2cov, ) @@ -261,7 +261,7 @@ def simulated_loaded_pln_2cov_array(simulated_fitted_pln_2cov_array): return generate_new_model( simulated_fitted_pln_2cov_array, endog_sim_2cov, - covariates=covariates_sim_2cov, + exog=exog_sim_2cov, offsets=offsets_sim_2cov, add_const=False, ) diff --git a/tests/import_data.py b/tests/import_data.py index c44353bb..9ef5ef7e 100644 --- a/tests/import_data.py +++ b/tests/import_data.py @@ -8,14 +8,14 @@ from pyPLNmodels import ( ( endog_sim_0cov, - covariates_sim_0cov, + exog_sim_0cov, offsets_sim_0cov, true_covariance_0cov, true_coef_0cov, ) = get_simulated_count_data(return_true_param=True, nb_cov=0, add_const=False) ( endog_sim_2cov, - covariates_sim_2cov, + exog_sim_2cov, offsets_sim_2cov, true_covariance_2cov, true_coef_2cov, @@ -23,7 +23,7 @@ from pyPLNmodels import ( data_sim_0cov = { "endog": endog_sim_0cov, - "covariates": covariates_sim_0cov, + "exog": exog_sim_0cov, "offsets": offsets_sim_0cov, } true_sim_0cov = {"Sigma": true_covariance_0cov, "beta": true_coef_0cov} @@ -32,7 +32,7 @@ true_sim_2cov = {"Sigma": true_covariance_2cov, "beta": true_coef_2cov} data_sim_2cov = { "endog": endog_sim_2cov, - "covariates": covariates_sim_2cov, + "exog": exog_sim_2cov, "offsets": offsets_sim_2cov, } endog_real, labels_real = get_real_count_data(return_labels=True, n_samples=100, dim=50) diff --git a/tests/test_common.py b/tests/test_common.py index 5904857d..b1a6837c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -98,7 +98,7 @@ def test__print_end_of_fitting_message(instance): @pytest.mark.parametrize("pln", dict_fixtures["fitted_pln"]) @filter_models(["Pln", "PlnPCA"]) -def test_fail_wrong_covariates_prediction(pln): +def test_fail_wrong_exog_prediction(pln): X = torch.randn(pln.n_samples, pln.nb_cov + 1) with pytest.raises(Exception): pln.predict(X) diff --git a/tests/test_setters.py b/tests/test_setters.py index 6814e842..828989e8 100644 --- a/tests/test_setters.py +++ b/tests/test_setters.py @@ -9,7 +9,7 @@ from tests.utils import MSE, filter_models @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_torch(pln): pln.endog = pln.endog - pln.covariates = pln.covariates + pln.exog = pln.exog pln.offsets = pln.offsets pln.fit() @@ -28,13 +28,13 @@ def test_parameters_setter_with_torch(pln): @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_numpy(pln): np_endog = pln.endog.numpy() - if pln.covariates is not None: - np_covariates = pln.covariates.numpy() + if pln.exog is not None: + np_exog = pln.exog.numpy() else: - np_covariates = None + np_exog = None np_offsets = pln.offsets.numpy() pln.endog = np_endog - pln.covariates = np_covariates + pln.exog = np_exog pln.offsets = np_offsets pln.fit() @@ -59,13 +59,13 @@ def test_parameters_setter_with_numpy(pln): @pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) def test_data_setter_with_pandas(pln): pd_endog = pd.DataFrame(pln.endog.numpy()) - if pln.covariates is not None: - pd_covariates = pd.DataFrame(pln.covariates.numpy()) + if pln.exog is not None: + pd_exog = pd.DataFrame(pln.exog.numpy()) else: - pd_covariates = None + pd_exog = None pd_offsets = pd.DataFrame(pln.offsets.numpy()) pln.endog = pd_endog - pln.covariates = pd_covariates + pln.exog = pd_exog pln.offsets = pd_offsets pln.fit() @@ -93,17 +93,17 @@ def test_fail_data_setter_with_torch(pln): pln.endog = pln.endog - 100 n, p = pln.endog.shape - if pln.covariates is None: + if pln.exog is None: d = 0 else: - d = pln.covariates.shape[-1] + d = pln.exog.shape[-1] with pytest.raises(ValueError): pln.endog = torch.zeros(n + 1, p) with pytest.raises(ValueError): pln.endog = torch.zeros(n, p + 1) with pytest.raises(ValueError): - pln.covariates = torch.zeros(n + 1, d) + pln.exog = torch.zeros(n + 1, d) with pytest.raises(ValueError): pln.offsets = torch.zeros(n + 1, p) @@ -137,10 +137,10 @@ def test_fail_parameters_setter_with_torch(pln): with pytest.raises(ValueError): pln.components = torch.zeros(dim + 1, dim_latent) - if pln.covariates is None: + if pln.exog is None: d = 0 else: - d = pln.covariates.shape[-1] + d = pln.exog.shape[-1] with pytest.raises(ValueError): pln.coef = torch.zeros(d + 1, dim) -- GitLab From 9840eda7cd521d35f1fcafd6116a64ecf034fede Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 17:10:19 +0200 Subject: [PATCH 044/167] began to add tests for the readme file. --- ..._files.py => create_docstrings_example.py} | 34 ++++++++++++------- tests/create_readme_examples.py | 2 ++ tests/test_docstrings_example.sh | 5 +++ ...st_examples.sh => test_readme_examples.sh} | 2 +- 4 files changed, 29 insertions(+), 14 deletions(-) rename tests/{create_example_files.py => create_docstrings_example.py} (50%) create mode 100644 tests/create_readme_examples.py create mode 100755 tests/test_docstrings_example.sh rename tests/{test_examples.sh => test_readme_examples.sh} (65%) mode change 100755 => 100644 diff --git a/tests/create_example_files.py b/tests/create_docstrings_example.py similarity index 50% rename from tests/create_example_files.py rename to tests/create_docstrings_example.py index b575159f..6ebf37d7 100644 --- a/tests/create_example_files.py +++ b/tests/create_docstrings_example.py @@ -2,13 +2,13 @@ import ast import os -def get_lines(filename): - with open(f"../pyPLNmodels/{filename}.py") as file: +def get_lines(filename, filetype=".py"): + with open(f"../pyPLNmodels/{filename}{filetype}") as file: lines = [line.rstrip() for line in file] return lines -def get_examples(lines): +def get_examples_docstring(lines): examples = [] in_example = False example = [] @@ -26,11 +26,11 @@ def get_examples(lines): return examples -def write_examples(examples, prefix_filename): +def write_examples(examples, filename, dirname): for i in range(len(examples)): example = examples[i] nb_example = str(i + 1) - example_filename = f"examples/{prefix_filename}_example_{nb_example}.py" + example_filename = f"{dirname}/{prefix_filename}_example_{nb_example}.py" try: os.remove(example_filename) except FileNotFoundError: @@ -40,15 +40,23 @@ def write_examples(examples, prefix_filename): the_file.write(line + "\n") -def filename_to_example_file(filename): +def filename_to_docstring_example_file(filename, dirname): lines = get_lines(filename) - examples = get_examples(lines) - write_examples(examples, filename) + examples = get_examples_docstring(lines) + write_examples(examples, filename, dirname) + + +def filename_to_readme_example_file(dirname): + lines = get_lines("README", filetype=".md") + examples = get_examples_docstring(lines) + write_examples(examples, "readme") # filename_to_example_file("models") -os.makedirs("examples", exist_ok=True) -filename_to_example_file("_utils") -filename_to_example_file("models") -filename_to_example_file("elbos") -filename_to_example_file("load") +os.makedirs("docstrings_examples", exist_ok=True) +filename_to_docstring_example_file("_utils", "docstrings") +filename_to_docstring_example_file("models", "docstrings") +filename_to_docstring_example_file("elbos", "docstrings") +filename_to_docstring_example_file("load", "docstrings") + +filename_to_readme_example_file("docstrings") diff --git a/tests/create_readme_examples.py b/tests/create_readme_examples.py new file mode 100644 index 00000000..c27d00ef --- /dev/null +++ b/tests/create_readme_examples.py @@ -0,0 +1,2 @@ +import ast +import os diff --git a/tests/test_docstrings_example.sh b/tests/test_docstrings_example.sh new file mode 100755 index 00000000..a11209ac --- /dev/null +++ b/tests/test_docstrings_example.sh @@ -0,0 +1,5 @@ +search_dir="docstrings_examples" +for entry in "$search_dir"/* +do + python "$entry" +done diff --git a/tests/test_examples.sh b/tests/test_readme_examples.sh old mode 100755 new mode 100644 similarity index 65% rename from tests/test_examples.sh rename to tests/test_readme_examples.sh index b8a32f31..4af8b766 --- a/tests/test_examples.sh +++ b/tests/test_readme_examples.sh @@ -1,4 +1,4 @@ -search_dir="examples" +search_dir="readme_examples" for entry in "$search_dir"/* do python "$entry" -- GitLab From 7cdfe9b687e3e4ae07386ac47a26b9e800d6f9b4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:01:19 +0200 Subject: [PATCH 045/167] rewrite how we create the tests for docstrings (and added for the readme). Now they will al be performed by pytest. --- .gitignore | 3 +- ... create_readme_and_docstrings_examples.py} | 36 ++++++++++--------- tests/create_readme_examples.py | 2 -- tests/test_docstrings_example.sh | 5 --- tests/test_readme_examples.sh | 5 --- 5 files changed, 22 insertions(+), 29 deletions(-) rename tests/{create_docstrings_example.py => create_readme_and_docstrings_examples.py} (56%) delete mode 100644 tests/create_readme_examples.py delete mode 100755 tests/test_docstrings_example.sh delete mode 100644 tests/test_readme_examples.sh diff --git a/.gitignore b/.gitignore index fb7059f0..aaf6fd98 100644 --- a/.gitignore +++ b/.gitignore @@ -153,4 +153,5 @@ tests/Pln* slides/ index.html -tests/examples/*.py +tests/test_models* +tests/test_load* diff --git a/tests/create_docstrings_example.py b/tests/create_readme_and_docstrings_examples.py similarity index 56% rename from tests/create_docstrings_example.py rename to tests/create_readme_and_docstrings_examples.py index 6ebf37d7..a1a8a20a 100644 --- a/tests/create_docstrings_example.py +++ b/tests/create_readme_and_docstrings_examples.py @@ -2,8 +2,12 @@ import ast import os -def get_lines(filename, filetype=".py"): - with open(f"../pyPLNmodels/{filename}{filetype}") as file: +dir_docstrings = "docstrings_examples" +dir_readme = "readme_examples" + + +def get_lines(path_to_file, filename, filetype=".py"): + with open(f"{path_to_file}{filename}{filetype}") as file: lines = [line.rstrip() for line in file] return lines @@ -26,11 +30,11 @@ def get_examples_docstring(lines): return examples -def write_examples(examples, filename, dirname): +def write_examples(examples, filename): for i in range(len(examples)): example = examples[i] nb_example = str(i + 1) - example_filename = f"{dirname}/{prefix_filename}_example_{nb_example}.py" + example_filename = f"test_{filename}_example_{nb_example}.py" try: os.remove(example_filename) except FileNotFoundError: @@ -41,22 +45,22 @@ def write_examples(examples, filename, dirname): def filename_to_docstring_example_file(filename, dirname): - lines = get_lines(filename) + lines = get_lines("../pyPLNmodels/", filename) examples = get_examples_docstring(lines) - write_examples(examples, filename, dirname) + write_examples(examples, filename) -def filename_to_readme_example_file(dirname): - lines = get_lines("README", filetype=".md") - examples = get_examples_docstring(lines) +def filename_to_readme_example_file(): + lines = get_lines("../", "README", filetype=".md") + examples = get_examples_readme(lines) write_examples(examples, "readme") -# filename_to_example_file("models") -os.makedirs("docstrings_examples", exist_ok=True) -filename_to_docstring_example_file("_utils", "docstrings") -filename_to_docstring_example_file("models", "docstrings") -filename_to_docstring_example_file("elbos", "docstrings") -filename_to_docstring_example_file("load", "docstrings") +# os.makedirs(dir_readme, exist_ok=True) +# filename_to_readme_example_file(dir_readme) -filename_to_readme_example_file("docstrings") +os.makedirs("docstrings_examples", exist_ok=True) +filename_to_docstring_example_file("_utils", dir_docstrings) +filename_to_docstring_example_file("models", dir_docstrings) +filename_to_docstring_example_file("elbos", dir_docstrings) +filename_to_docstring_example_file("load", dir_docstrings) diff --git a/tests/create_readme_examples.py b/tests/create_readme_examples.py deleted file mode 100644 index c27d00ef..00000000 --- a/tests/create_readme_examples.py +++ /dev/null @@ -1,2 +0,0 @@ -import ast -import os diff --git a/tests/test_docstrings_example.sh b/tests/test_docstrings_example.sh deleted file mode 100755 index a11209ac..00000000 --- a/tests/test_docstrings_example.sh +++ /dev/null @@ -1,5 +0,0 @@ -search_dir="docstrings_examples" -for entry in "$search_dir"/* -do - python "$entry" -done diff --git a/tests/test_readme_examples.sh b/tests/test_readme_examples.sh deleted file mode 100644 index 4af8b766..00000000 --- a/tests/test_readme_examples.sh +++ /dev/null @@ -1,5 +0,0 @@ -search_dir="readme_examples" -for entry in "$search_dir"/* -do - python "$entry" -done -- GitLab From 5aac26014e342bc1116b1674c05fcadf47473593 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:03:44 +0200 Subject: [PATCH 046/167] renaming. --- ...cstrings_examples.py => create_readme_and_docstrings_tests.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{create_readme_and_docstrings_examples.py => create_readme_and_docstrings_tests.py} (100%) diff --git a/tests/create_readme_and_docstrings_examples.py b/tests/create_readme_and_docstrings_tests.py similarity index 100% rename from tests/create_readme_and_docstrings_examples.py rename to tests/create_readme_and_docstrings_tests.py -- GitLab From 7207ee4857f8a92ae7546a04410d3221eeb9f380 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:04:00 +0200 Subject: [PATCH 047/167] rewrite the ci with appropriate names and remove the examples job. --- .gitlab-ci.yml | 44 +++++++++++++++++--------------------------- 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2425e78f..13a9c1dc 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,35 +4,25 @@ stages: - build - publish -# -# black: -# stage: checks -# image: registry.gitlab.com/pipeline-components/black:latest -# script: -# - black --check --verbose -- . -# tags: -# - docker -# tests: -# stage: checks -# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" -# before_script: -# pip install '.[tests]' -# script: -# - pip install . -# - cd tests -# - pytest -# examples: -# stage: checks -# image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" -# before_script: -# pip install '.[tests]' -# script: -# - pip install . -# - cd tests -# - python create_example_files.py -# - ./test_examples.sh +black: + stage: checks + image: registry.gitlab.com/pipeline-components/black:latest + script: + - black --check --verbose -- . + tags: + - docker +tests: + stage: checks + image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + before_script: + pip install '.[tests]' + script: + - pip install . + - cd tests + - python create_readme_and_docstrings_test.py + - pytest . build_package: -- GitLab From e30fe36c291ab9a7f26c19573201a914f789248d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:21:53 +0200 Subject: [PATCH 048/167] create the right examples. Does not take the pip install pyPLNmodels --- tests/create_readme_and_docstrings_tests.py | 24 +++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/create_readme_and_docstrings_tests.py b/tests/create_readme_and_docstrings_tests.py index a1a8a20a..56e6cdf7 100644 --- a/tests/create_readme_and_docstrings_tests.py +++ b/tests/create_readme_and_docstrings_tests.py @@ -30,6 +30,26 @@ def get_examples_docstring(lines): return examples +def get_examples_readme(lines): + examples = [] + example = [] + in_example = False + for line in lines: + line = line.lstrip() + if len(line) > 2: + if line[0:3] == "```": + if in_example is False: + in_example = True + else: + in_example = False + examples.append(example) + example = [] + elif in_example is True: + example.append(line) + examples.pop(0) # The first is pip install pyPLNmodels which is not python code. + return examples + + def write_examples(examples, filename): for i in range(len(examples)): example = examples[i] @@ -56,8 +76,8 @@ def filename_to_readme_example_file(): write_examples(examples, "readme") -# os.makedirs(dir_readme, exist_ok=True) -# filename_to_readme_example_file(dir_readme) +os.makedirs(dir_readme, exist_ok=True) +filename_to_readme_example_file() os.makedirs("docstrings_examples", exist_ok=True) filename_to_docstring_example_file("_utils", dir_docstrings) -- GitLab From e7760d71752ea9cdf97245f0e67b14b48467a27a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:22:43 +0200 Subject: [PATCH 049/167] add tests/test_readme in .gitignore since they are auto-generated. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index aaf6fd98..a0185fcb 100644 --- a/.gitignore +++ b/.gitignore @@ -155,3 +155,4 @@ index.html tests/test_models* tests/test_load* +tests/test_readme* -- GitLab From aa89da92184b1134d3f326b1c4b23351f4279ea4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:25:27 +0200 Subject: [PATCH 050/167] wrong ci. --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 13a9c1dc..9da1a847 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,8 +49,8 @@ publish_package: - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker - # only: - # - tags + only: + - tags pages: stage: publish -- GitLab From a8dd8d31f1b6b91f4733eac7fc4ff391915a04d5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:26:24 +0200 Subject: [PATCH 051/167] forgot s in tests --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9da1a847..d8c20b0f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -21,7 +21,7 @@ tests: script: - pip install . - cd tests - - python create_readme_and_docstrings_test.py + - python create_readme_and_docstrings_tests.py - pytest . -- GitLab From 756015dae220bd92c36354b0c15175cb4b332ba5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:46:22 +0200 Subject: [PATCH 052/167] rewrite counts for oaks instead of endog --- pyPLNmodels/oaks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/oaks.py b/pyPLNmodels/oaks.py index bfdbad97..ab59a3fd 100644 --- a/pyPLNmodels/oaks.py +++ b/pyPLNmodels/oaks.py @@ -37,7 +37,7 @@ def load_oaks(): offsets = pd.read_csv(offsets_stream) exog = pd.read_csv(exog_stream) oaks = { - "endog": endog.to_numpy(), + "counts": endog.to_numpy(), "offsets": offsets.to_numpy(), "tree": exog.tree.to_numpy(), "dist2ground": exog.distTOground.to_numpy(), -- GitLab From fa95906b69d8b06714701b4d995cabfd26e47169 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 20:46:40 +0200 Subject: [PATCH 053/167] fix bugs since it was output more than one file but it is not a bunch of cells. --- tests/create_readme_and_docstrings_tests.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/create_readme_and_docstrings_tests.py b/tests/create_readme_and_docstrings_tests.py index 56e6cdf7..d9f27aeb 100644 --- a/tests/create_readme_and_docstrings_tests.py +++ b/tests/create_readme_and_docstrings_tests.py @@ -30,8 +30,7 @@ def get_examples_docstring(lines): return examples -def get_examples_readme(lines): - examples = [] +def get_example_readme(lines): example = [] in_example = False for line in lines: @@ -42,12 +41,10 @@ def get_examples_readme(lines): in_example = True else: in_example = False - examples.append(example) - example = [] elif in_example is True: example.append(line) - examples.pop(0) # The first is pip install pyPLNmodels which is not python code. - return examples + example.pop(0) # The first is pip install pyPLNmodels which is not python code. + return [example] def write_examples(examples, filename): @@ -72,7 +69,7 @@ def filename_to_docstring_example_file(filename, dirname): def filename_to_readme_example_file(): lines = get_lines("../", "README", filetype=".md") - examples = get_examples_readme(lines) + examples = get_example_readme(lines) write_examples(examples, "readme") -- GitLab From c5f70d4d084a72c4fecfeba06586a20a8d5944de Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 23:10:37 +0200 Subject: [PATCH 054/167] add the notebook in the Getting started. Should be automated but it needs an image of jupyter notebook and a little preprocessing to remove the pip install line. --- Getting_started.py | 139 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 139 insertions(+) create mode 100644 Getting_started.py diff --git a/Getting_started.py b/Getting_started.py new file mode 100644 index 00000000..4fcc9552 --- /dev/null +++ b/Getting_started.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# coding: utf-8 + + +# ## pyPLNmodels + +# We assume the data comes from a PLN model: $ \text{counts} \sim \mathcal P(\exp(\text{Z}))$, where $Z$ are some unknown latent variables. +# +# +# The goal of the package is to retrieve the latent variables $Z$ given the counts. To do so, one can instantiate a Pln or PlnPCA model, fit it and then extract the latent variables. + +# ### Import the needed functions + +from pyPLNmodels import ( + get_real_count_data, + get_simulated_count_data, + load_model, + Pln, + PlnPCA, + PlnPCAcollection, +) +import matplotlib.pyplot as plt + + +# ### Load the data + +counts, labels = get_real_count_data(return_labels=True) # np.ndarray + + +# ### PLN model + +pln = Pln(counts, add_const=True) +pln.fit() + + +print(pln) + + +# #### Once fitted, we can extract multiple variables: + +gaussian = pln.latent_variables +print(gaussian.shape) + + +model_param = pln.model_parameters +print(model_param["coef"].shape) +print(model_param["covariance"].shape) + + +# ### PlnPCA model + +pca = PlnPCA(counts, add_const=True, rank=5) +pca.fit() + + +print(pca) + + +print(pca.latent_variables.shape) + + +print(pca.model_parameters["components"].shape) +print(pca.model_parameters["coef"].shape) + + +# ### One can save the model in order to load it back after: + +pca.save() +dict_init = load_model("PlnPCA_nbcov_1_rank_5") +loaded_pca = PlnPCA(counts, add_const=True, dict_initialization=dict_init) +print(loaded_pca) + + +# ### One can fit multiple PCA and choose the best rank with BIC or AIC criterion + +pca_col = PlnPCAcollection(counts, add_const=True, ranks=[5, 15, 25, 40, 50]) +pca_col.fit() + + +pca_col.show() + + +print(pca_col) + + +# ### One can extract the best model found (according to AIC or BIC criterion). + +# #### AIC best model + +print(pca_col.best_model(criterion="AIC")) + + +# #### BIC best model + +print(pca_col.best_model(criterion="BIC")) + + +# #### Visualization of the individuals (sites) with PCA on the latent variables. + +pln.viz(colors=labels) +plt.show() + + +best_pca = pca_col.best_model() +best_pca.viz(colors=labels) +plt.show() + + +# ### What would give a PCA on the log normalize data ? + +from sklearn.decomposition import PCA +import numpy as np +import seaborn as sns + + +sk_pca = PCA(n_components=2) +pca_log_counts = sk_pca.fit_transform(np.log(counts + (counts == 0))) +sns.scatterplot(x=pca_log_counts[:, 0], y=pca_log_counts[:, 1], hue=labels) + + +# ### Visualization of the variables + +pln.plot_pca_correlation_graph(["var_1", "var_2"], indices_of_variables=[0, 1]) +plt.show() + + +best_pca.plot_pca_correlation_graph(["var_1", "var_2"], indices_of_variables=[0, 1]) +plt.show() + + +# ### Visualization of each components of the PCA +# + +pln.scatter_pca_matrix(color=labels, n_components=5) +plt.show() + + +best_pca.scatter_pca_matrix(color=labels, n_components=6) +plt.show() -- GitLab From 9ae89155e9fd150ebcc6b7f85245e6904806d743 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 23:11:57 +0200 Subject: [PATCH 055/167] renaming --- Getting_started.py => tests/test_getting_started.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Getting_started.py => tests/test_getting_started.py (100%) diff --git a/Getting_started.py b/tests/test_getting_started.py similarity index 100% rename from Getting_started.py rename to tests/test_getting_started.py -- GitLab From 3b09353ef2753b485ed29b036c59a9be0f18543a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 23:19:24 +0200 Subject: [PATCH 056/167] fix bugs in tests. --- pyPLNmodels/models.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index db42daeb..492c9106 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1460,7 +1460,7 @@ class Pln(_model): """, ) def pca_projected_latent_variables(self, n_components: Optional[int] = None): - super().pca_projected_latent_variables(n_components=n_components) + return super().pca_projected_latent_variables(n_components=n_components) @_add_doc( _model, @@ -1759,7 +1759,7 @@ class PlnPCAcollection: >>> plnpcas = PlnPCAcollection.from_formula("endog ~ 0 + cov", data = data, ranks = [5,8,12]) >>> plnpcas.fit() >>> print(plnpcas) - >>> pcas.show() + >>> plnpcas.show() See also -------- :class:`~pyPLNmodels.PlnPCA` @@ -2671,7 +2671,7 @@ class PlnPCA(_model): >>> import matplotlib.pyplot as plt >>> from pyPLNmodels import PlnPCA, get_real_count_data >>> endog, labels = get_real_count_data(return_labels = True) - >>> plnpca = Pln(endog,add_const = True) + >>> plnpca = PlnPCA(endog,add_const = True) >>> plnpca.fit() >>> plnpca.plot_expected_vs_true() >>> plt.show() @@ -2714,7 +2714,7 @@ class PlnPCA(_model): """, ) def pca_projected_latent_variables(self, n_components: Optional[int] = None): - super().pca_projected_latent_variables(n_components=n_components) + return super().pca_projected_latent_variables(n_components=n_components) @_add_doc( _model, -- GitLab From ef7903ff8c9d2342ca107f5f1dea450ac3ca334b Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 29 Jun 2023 23:47:41 +0200 Subject: [PATCH 057/167] fix notebook --- Getting_started.ipynb | 5812 +++++++++++++++++---------------- pyPLNmodels/load.py | 7 +- tests/test_getting_started.py | 4 +- 3 files changed, 2919 insertions(+), 2904 deletions(-) diff --git a/Getting_started.ipynb b/Getting_started.ipynb index 28c863c3..f5f30e8d 100644 --- a/Getting_started.ipynb +++ b/Getting_started.ipynb @@ -74,7 +74,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "id": "5a8e0140", "metadata": {}, "outputs": [], @@ -93,7 +93,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "id": "cee623e0", "metadata": {}, "outputs": [ @@ -119,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "id": "ef9f6c70", "metadata": {}, "outputs": [ @@ -127,7 +127,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Setting the offsets as the log of the sum of counts\n", + "Setting the offsets as the log of the sum of endog\n", "Fitting a Pln model with full covariance model.\n", "Initialization ...\n", "Initialization finished\n", @@ -142,7 +142,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "id": "ac28955b", "metadata": {}, "outputs": [ @@ -158,7 +158,7 @@ "* Useful properties\n", " .latent_variables, .model_parameters, .latent_parameters, .optim_parameters\n", "* Useful methods\n", - " .show(), .coef() .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()\n", + " .show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()\n", "* Additional properties for Pln\n", " None\n", "* Additional methods for Pln\n", @@ -180,7 +180,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "id": "182e24a2", "metadata": {}, "outputs": [ @@ -199,7 +199,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "id": "b8e7b983", "metadata": {}, "outputs": [ @@ -228,7 +228,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "id": "0a4fe7a4", "metadata": { "scrolled": true @@ -238,12 +238,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "Setting the offsets as the log of the sum of counts\n", + "Setting the offsets as the log of the sum of endog\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 5 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 2902 iterations\n" + "Tolerance 0.001 reached in 4204 iterations\n" ] } ], @@ -254,7 +254,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "id": "93db535a", "metadata": {}, "outputs": [ @@ -265,12 +265,12 @@ "A multivariate Poisson Lognormal with 5 principal component. \n", "======================================================================\n", " Loglike Dimension Nb param BIC AIC\n", - " -264802.03 200 1190 268461 265992\n", + " -264724.62 200 1190 268384 265914\n", "======================================================================\n", "* Useful properties\n", " .latent_variables, .model_parameters, .latent_parameters, .optim_parameters\n", "* Useful methods\n", - " .show(), .coef() .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()\n", + " .show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()\n", "* Additional properties for PlnPCA\n", " .projected_latent_variables\n", "* Additional methods for PlnPCA\n", @@ -284,7 +284,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "id": "c8321820", "metadata": {}, "outputs": [ @@ -302,7 +302,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "id": "b3e7db70", "metadata": {}, "outputs": [ @@ -330,7 +330,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 13, "id": "f81ccaf7", "metadata": {}, "outputs": [ @@ -338,16 +338,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Setting the offsets as the log of the sum of counts\n", + "Setting the offsets as the log of the sum of endog\n", "A multivariate Poisson Lognormal with 5 principal component. \n", "======================================================================\n", " Loglike Dimension Nb param BIC AIC\n", - " -264800.09 200 1190 268459 265990\n", + " -264723.8 200 1190 268383 265913\n", "======================================================================\n", "* Useful properties\n", " .latent_variables, .model_parameters, .latent_parameters, .optim_parameters\n", "* Useful methods\n", - " .show(), .coef() .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()\n", + " .show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()\n", "* Additional properties for PlnPCA\n", " .projected_latent_variables\n", "* Additional methods for PlnPCA\n", @@ -357,7 +357,7 @@ ], "source": [ "pca.save()\n", - "dict_init = load_model(\"PlnPCA_nbcov_1_rank_5\")\n", + "dict_init = load_model(\"PlnPCA_nbcov_1_dim_200_rank_5\")\n", "loaded_pca = PlnPCA(counts, add_const = True, dict_initialization= dict_init)\n", "print(loaded_pca)" ] @@ -372,7 +372,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 14, "id": "084290d5", "metadata": {}, "outputs": [ @@ -380,36 +380,36 @@ "name": "stdout", "output_type": "stream", "text": [ - "Setting the offsets as the log of the sum of counts\n", + "Setting the offsets as the log of the sum of endog\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 5 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 2871 iterations\n", + "Tolerance 0.001 reached in 2849 iterations\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 15 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 6305 iterations\n", + "Tolerance 0.001 reached in 6459 iterations\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 25 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 5134 iterations\n", + "Tolerance 0.001 reached in 5989 iterations\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 40 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 4695 iterations\n", + "Tolerance 0.001 reached in 3915 iterations\n", "----------------------------------------------------------------------\n", "Fitting a PlnPCAcollection model with 50 components\n", "Initialization ...\n", "Initialization finished\n", - "Tolerance 0.001 reached in 1660 iterations\n", + "Tolerance 0.001 reached in 1717 iterations\n", "======================================================================\n", "\n", "DONE!\n", - " Best model(lower BIC): 40\n", + " Best model(lower BIC): 25\n", " \n", " Best model(lower AIC): 50\n", " \n", @@ -425,13 +425,13 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 15, "id": "65f42831", "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkIAAAGdCAYAAAD+JxxnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAACCTElEQVR4nO3deVyU1f7A8c8wMOyLqAgIiPuupamZuVQmmpVrmVpZWWZipaV2vUt1+9W1PVtsvaVWmktqmZllCuaCWZa71y3cQTMFZF/m/P44MDBsgg7MA3zfr9d58cwzZ545cND5cp5zzteklFIIIYQQQtRBLs5ugBBCCCGEs0ggJIQQQog6SwIhIYQQQtRZEggJIYQQos6SQEgIIYQQdZYEQkIIIYSosyQQEkIIIUSdJYGQEEIIIeosV2c3wMisViunT5/G19cXk8nk7OYIIYQQogKUUly8eJHQ0FBcXMof85FAqBynT58mPDzc2c0QQgghxGU4ceIEYWFh5daRQKgcvr6+gP5B+vn5Obk1QgghhKiIlJQUwsPDbZ/j5ZFAqBwFt8P8/PwkEBJCiBouOxvefFMfP/44WCzObU+dVw0dUpFpLSZJulq2lJQU/P39SU5OlkBICCFquLQ08PHRx6mp4O3t3PbUeVXYIZX5/JYRISGEEHWCqyuMG1d4LJzMIB0iI0LlkBEhIYQQouaRESEhDEQpRW5uLnl5ec5uihA1htlsxtXVVbYuEVVOAiEhqlB2djYJCQmkp6c7uylC1DheXl6EhIRgkVnNogpJICREFbFarcTHx2M2mwkNDcVischft0JUgFKK7Oxs/vzzT+Lj42nZsuUlN8WriLQ0aNxYH586JZOlnc4gHSKBkBBVJDs7G6vVSnh4OF5eXs5ujhA1iqenJ25ubhw7dozs7Gw8PDwcct3kZIdcRjiKATpEAiEhqpgj/pIVoi5y9L8dT084eLDwWDiZQTpEAiEnyMuDjRshIQFCQqB3bzCbnd0qIYSo3VxcoGVLZ7dC2BikQyQQqmbLl+sNNE+eLDwXFqY31xw+3HntEkIIIeoiGbOvRsuXw8iR9kEQ6DliI0fq54UQFXPfffcxdOjQKru+yWTiq6++qrLri+qXkwNz5uiSk+Ps1gijdIgEQtUkL0+PBJW2fWXBuSlTdD0hnO2+++7DZDLZSv369Rk4cCC7du2yq1dasBATE8Mtt9xC/fr18fLyol27djz55JOcOnXKoW188803mTdvnu1xv379mDJlisOun5CQwKBBgxx2PeF82dkwebIu2dnObo0wSodIIFRNNm4sORJUlFJw4oSuJ0RxeXkQGwtffKG/VkfAPHDgQBISEkhISGDdunW4urpy6623lvuaDz74gP79+xMcHMyyZcvYt28f77//PsnJybz22msOaVdeXh5WqxV/f38CAgIccs2isvP/Qw4ODsbd3d3h1xfOYzbr0feRI2VepiEYpUOUKFNycrICVHJy8hVfa+FCpXS4U35ZuNABDReGkJGRofbt26cyMjKu6DrLlikVFmb/exIWps9XlXHjxqkhQ4bYndu4caMC1NmzZ23nALVixQqllFInTpxQFotFTZkypdRrXrhwocz3u3DhgpowYYIKCgpS7u7uqn379uqbb75RSik1d+5c5e/vr77++mvVtm1bZTabVXx8vF0bx40bpwC7Eh8fr5RSavfu3WrgwIHK29tbBQUFqbvvvlv9+eeftvfu27evio6OVo8//riqX7++6tevX4nvTSmldu3apW644Qbl4eGhAgMD1UMPPaQuXrxY4mf2yiuvqODgYBUYGKgmTZqksrOzy/tRi3I46t+QqHsq8/ktI0LVJCTEsfVE3WCUeWWpqal8/vnntGjRgvr165daZ+nSpWRnZzNjxoxSny9r9MZqtTJo0CA2b97M559/zr59+3jxxRcxF/kLMT09nZdeeon//ve/7N27l6CgILtrvPnmm/Ts2ZOHHnrINooVHh5OUlISN954I1dffTW//vora9as4cyZM9x55512r58/fz4Wi4XNmzfz/vvvl2hjWloaUVFR1KtXj19++YWlS5fy448/MnnyZLt6MTExHDlyhJiYGObPn8+8efPsbt8JIYxHVo1Vk9699eqwU6dKnydkMunne/eu/rYJY7rUvDKTSc8rGzKkakaVV61ahY+PD6ADgZCQEFatWlXm3i6HDh3Cz8+PkEpG8z/++CPbtm1j//79tGrVCoBmzZrZ1cnJyeHdd9+lc+fOpV7D398fi8WCl5cXwcHBtvPvvPMOV199Nf/5z39s5z755BPCw8M5ePCg7f1atmzJyy+/XGYbFy5cSGZmJp9++ine+bvfvvPOO9x222289NJLNGrUCIB69erxzjvvYDabadOmDYMHD2bdunU89NBDlfqZCCGqj4wIVROzWS+RB/0BVprZs+W+tSjk7HllN9xwAzt27GDHjh1s27aNqKgoBg0axLFjx8poj7qsFCI7duwgLCzMFpSUxmKx0KlTp0pfe+fOncTExODj42Mrbdq0AeDIkSO2el27di33Ovv376dz5862IAigV69eWK1WDhw4YDvXvn17u5GskJAQzp49W+l2i6qRnq4zOjRurI+FkxmkQ2REqBoNHw5ffglTH8uj6amNWDGxkb6YTIpPPzXJPkLCTkKCY+tVlre3Ny1atLA9/u9//4u/vz8fffQRzz//fIn6rVq1Ijk5mYSEhEqNCnlWYEdZT0/PywqyUlNTbaM2xRVto7eDchy5ubnZPTaZTFitVodcW1w5peD06cJj4WQG6RAZEapmw1nOUVMksdxALDfQhv0oZeLMj7su/WJRpxhtXpnJZMLFxYWMjIxSnx85ciQWi6XMW0xJSUmlnu/UqRMnT57kYMFW+5fJYrGQV2w5XZcuXdi7dy+RkZG0aNHCrlQm+Gnbti07d+4kLS3Ndm7z5s24uLjQunXrK2q3qD4eHvD777o4KHWZuBIG6RAJhKpT/sxXU/79DhcUT6KXFM+eX4+cJSuc2TphMAXzysoaCDGZIDy86uaVZWVlkZiYSGJiIvv37+fRRx+1jbCUJjw8nDfeeIM333yT8ePHs2HDBo4dO8bmzZt5+OGH+b//+79SX9e3b1/69OnDiBEjWLt2LfHx8Xz33XesWbOmUu2NjIzk559/5ujRo5w7dw6r1Up0dDTnz59n9OjR/PLLLxw5coTvv/+e+++/v0TQVJ6xY8fi4eHBuHHj2LNnDzExMTz66KPcc889tvlBwvjMZrjqKl1kGoIBGKRDJBCqLmXMfL2bz2lEIicJZ/EjsbKjorApb15ZweOqnFe2Zs0aQkJCCAkJoUePHrbVUv369SvzNZMmTeKHH37g1KlTDBs2jDZt2vDggw/i5+fHtGnTynzdsmXL6NatG6NHj6Zdu3bMmDGjUoEKwLRp0zCbzbRr146GDRty/PhxQkND2bx5M3l5eQwYMICOHTsyZcoUAgICKpXQ08vLi++//57z58/TrVs3Ro4cyU033cQ777xTqTYKIYzHpJTcKS1LSkoK/v7+JCcn4+fnd2UXi42FG26wP2cCmsMLh//OP3mBzuzg9/VJmG7od2XvJQwhMzOT+Ph4mjZtiscVDPuWlp8uPFwHQTKvTNRmjvo3VCAnBxYs0Mdjx0KxKV2iulVhh1Tm81tGhKpLwYzWa4C3gNbobd86wsRZ7+PlksZOruLHtRKXCnvDh8PRoxATAwsX6q/x8RIECVFZ2dlw//26SIoNAzBIh8iqseoSEqKDoCn5j6OAA8A6qH/becb3/5i3f3iMV9d25ub/lHkVUUeZzVDOHSkhRAWYzXDLLYXHwskM0iEyIlRdel0H9+d3tAkdFI0BUoBtMDXqDVxMefzwayC7ZAGZEEI4nIcHfPutLrJqzAAM0iESCFWX81sgIE8HQQBm4BagPvA9NG10lJHdvwTg1Ved1EYhhBCijpFAqLpklLLrnQm4GYgHDsG0wToC+uKL8ncUFkIIIYRjSCBUXTzL2PWuP3qm1vfQrfmv9O2ZRG5u4bJpIYQQjpGeDi1b6iIpNgzAIB0igVB1adgbvMIovDeWzxPoAWwD0kOY9jdfAD74AJKTq7mNQghRiykFhw/rIhvHGIBBOkQCoeriYoauBcM8xYKh2wErsOs6brnVTNu2cPEifPRRNbdRCCFqMQ8P2LRJF5ksbQAG6RAJhKpT+HDo/SV4NbY/HwZEAl9swCU7kyef1Kdnz5a9LoQoS79+/ZgyZUqVXPvo0aOYTCZ27NhRJdcXzmE2Q69eusjyeQMwSIdIIFTdwofD7Ufhphi4biE0ulGfH+oN587BokXcfTc0agSnTsHixU5trajj4uLiMJvNDB48uMRzZQULy5Yto1+/fvj7++Pj40OnTp147rnnOH/+vEPbtnz5crv8ZZGRkcyePdsh1w4PDychIYEOHTo45HpCCOOSQMgZXMzQqB9EjoZOz+lzXbPAG3jrLdwtisce06dffVXuZQt0DrrYWL2kMDa22nLSffzxxzz66KP89NNPnD59+pL1//GPfzBq1Ci6devGd999x549e3jttdfYuXMnn332mUPalJ0/TBoYGIivr69Drln8+mazmeDgYFxdZc/Z2iQ3F5Yu1SU319mtEYbpECXKlJycrACVnJxcdW9itSr1bWelFqDU7a5KgVIbN6q//lLK21s//P77qnt7UXUyMjLUvn37VEZGxpVdaNkypcLC9C9DQQkL0+er0MWLF5WPj4/63//+p0aNGqVeeOEFu+fj4+MVoH7//XellFI///yzAtTs2bNLvd6FCxfKfK8TJ06ou+66S9WrV095eXmprl27qq1btyqllHrmmWdU586d1UcffaQiIyOVyWRSSinVt29f9fjjj9uO0UlrbKXAxo0b1fXXX688PDxUWFiYevTRR1Vqaqrt+SZNmqjnnntO3XPPPcrX11eNGzeuxPemlFKxsbGqW7duymKxqODgYPXUU0+pnJwc2/N9+/ZVjz76qJo+fbqqV6+eatSokXrmmWcu9WMW5XDYv6F8qamF/4SK/AoIZ6nCDqnM57eMCDmbyQStovXx7Z56HvVbbxEYCOPH69OywWIdtnw5jBxZcmOpU6f0+eXLq+ytlyxZQps2bWjdujV33303n3zyCaqc4ckFCxbg4+PDpEmTSn0+ICCg1POpqan07duXU6dOsXLlSnbu3MmMGTOwWq22OocPH2bZsmUsX7681Hk7y5cvJywsjOeee46EhAQS8nP7HTlyhIEDBzJixAh27drF4sWL2bRpE5MnT7Z7/auvvkrnzp35/fff+de//lXi+qdOneKWW26hW7du7Ny5k/fee4+PP/6Y559/3q7e/Pnz8fb25ueff+bll1/mueeeY+3atWX+zET1cnGBvn11cZFPP+czSoc4NASrZaplREgppXJSlVrir0eFOqOU2azU8eMqPl4pFxcdLBf5w1TUEFf812xubsmRoKLFZFIqPFzXqwLXXXedbXQnJydHNWjQQMXExNieLz5qMmjQINWpU6dKv88HH3ygfH191V9//VXq888884xyc3NTZ8+etTtfdERIKT2y88Ybb9jVGT9+vJowYYLduY0bNyoXFxdbvzRp0kQNHTrUrk7x7+3vf/+7at26tbJarbY6c+bMUT4+PiovL8/Wnuuvv97uOt26dVNPPfVU+T8AUSZHjwiJukNGhGoaV29o9oA+vitQz/947z0iI+GOO/Tp115zWuuEs2zcWP4W40rBiRO6noMdOHCAbdu2MXr0aABcXV0ZNWoUH3/8cTnNubzJbDt27ODqq68mMDCwzDpNmjShYcOGlb72zp07mTdvHj4+PrYSFRWF1WolPj7eVu+aa64p9zr79++nZ8+emEyFW1/06tWL1NRUThbpo06dOtm9LiQkhLNnz1a63UKI6lOpQGjWrFl069YNX19fgoKCGDp0KAcOHChRLy4ujhtvvBFvb2/8/Pzo06cPGRkZtufPnz/P2LFj8fPzIyAggPHjx5Oammp3jV27dtG7d288PDwIDw/n5ZdfLvE+S5cupU2bNnh4eNCxY0dWr15t97xSiqeffpqQkBA8PT3p378/hw4dqsy3XH1aPqK/RlyAhsCHH0JGBtOm6dOLFunPPFGHJJSSluVK6lXCxx9/TG5uLqGhobi6uuLq6sp7773HsmXLSC5jp89WrVrxxx9/kJOTU6n38vT0vGQdb2/vSl2zQGpqKg8//DA7duywlZ07d3Lo0CGaN29+xdcvzs3Nze6xyWSyu8UnhDCeSgVCGzZsIDo6mq1bt7J27VpycnIYMGAAaWlptjpxcXEMHDiQAQMGsG3bNn755RcmT56MS5H7f2PHjmXv3r2sXbuWVatW8dNPPzFhwgTb8ykpKQwYMIAmTZqwfft2XnnlFZ599lk+/PBDW50tW7YwevRoxo8fz++//87QoUMZOnQoe/bssdV5+eWXeeutt3j//ff5+eef8fb2JioqiszMzMv6YVUpv5YQEgUoGO4Hf/0FixZxzTXQrx+SdqMuCikjLcvl1qug3NxcPv30U1577bUSAURoaChffPFFqa8bM2YMqampvPvuu6U+n5SUVOr5Tp06sWPHjiteXm+xWMgrtpquS5cu7Nu3jxYtWpQoFoulwtdu27YtcXFxdqNemzdvxtfXl7CwsCtqt6g+GRlw1VW6FPnbXDiLUTrkSu7BnT17VgFqw4YNtnM9evRQ//znP8t8zb59+xSgfvnlF9u57777TplMJnXq1CmllFLvvvuuqlevnsrKyrLVeeqpp1Tr1q1tj++88041ePBgu2v36NFDPfzww0oppaxWqwoODlavvPKK7fmkpCTl7u6uvvjiiwp9f9U2R6jAiZV6ntBnXkq5odRVVylltapvv9VTQnx9lUpKqp6miCvnsDlCJlO1zhFasWKFslgsKqmUX7YZM2aoa665RilVch5NwfNms1lNnz5dbdmyRR09elT9+OOPauTIkWWuJsvKylKtWrVSvXv3Vps2bVJHjhxRX375pdqyZYtSqnDVWHHF5wjdfPPN6vbbb1cnT55Uf/75p1JKqZ07dypPT08VHR2tfv/9d3Xw4EH11VdfqejoaNvrSptbVPx7O3nypPLy8lLR0dFq//796quvvlINGjSwWxVWvD1KKTVkyBA1bty4Ur9vcWmyaqyWqw2rxgqGyAvu7Z89e5aff/6ZoKAgrrvuOho1akTfvn3ZtGmT7TVxcXEEBATY3ZPv378/Li4u/Pzzz7Y6ffr0sfuLLSoqigMHDnDhwgVbnf79+9u1Jyoqiri4OADi4+NJTEy0q+Pv70+PHj1sdQwn9BbwbgIu6dDXDXbsgE2bGDgQ2rXTaTeKDIqJ2s5sLhwGNBVLy1LwePZsh+/I+vHHH9O/f3/8/f1LPDdixAh+/fVXdu3aVeprX3rpJRYuXMjPP/9MVFQU7du354knnqBTp06MGzeu1NdYLBZ++OEHgoKCuOWWW+jYsSMvvvgi5kp+X8899xxHjx6lefPmtvlEnTp1YsOGDRw8eJDevXtz9dVX8/TTTxMaGlqpazdu3JjVq1ezbds2OnfuzMSJExk/fjz//Oc/K3Ud4VweHvDDD7pIig0DMEqHXG60lZeXpwYPHqx69eplOxcXF6cAFRgYqD755BP122+/qSlTpiiLxaIOHjyolFLqhRdeUK1atSpxvYYNG6p3331XKaX/siu+0mPv3r0KUPv27VNKKeXm5qYWLlxoV2fOnDkqKChIKaXU5s2bFaBOnz5tV+eOO+5Qd955Z6nfU2ZmpkpOTraVEydOVO+IkFJK7X1Rjwq930BHySNHKqWU+uQT/bBxY6WKDJQJA6vSfYTCw6t8HyEhnE1WjYnLVS0jQtHR0ezZs4dFixbZzhVMCnz44Ye5//77ufrqq3njjTdo3bo1n3zyyRUFbNVh1qxZ+Pv720p4eHj1N6LZeHBxB99z0BxYsQKOH2fMGD0V5NQpPXFa1CHDh8PRoxATAwsX6q/x8fq8EEKIK3JZgdDkyZNZtWoVMTExdhMFQ/InbbZr186uftu2bTl+/DgAwcHBJZaT5ubmcv78eYKDg211zpw5Y1en4PGl6hR9vujrSqtT3MyZM0lOTraVE85YpuXRAJqM0sf3NLItpXd3R9Ju1GVms541P3q0/ioZI4WotNxc+PZbXSTFhgEYpEMqFQgppZg8eTIrVqxg/fr1NG3a1O75yMhIQkNDSyypP3jwIE2aNAGgZ8+eJCUlsX37dtvz69evx2q10qNHD1udn376yW4Z7tq1a2ndujX16tWz1Vm3bp3d+6xdu5aePXsC0LRpU4KDg+3qpKSk8PPPP9vqFOfu7o6fn59dcYqW+TtNt/wLfLEtpX/4YfD2ht279S1VIYQQFZeVBbfeqktWlrNbIwzTIZW55/bII48of39/FRsbqxISEmwlPT3dVueNN95Qfn5+aunSperQoUPqn//8p/Lw8FCHDx+21Rk4cKC6+uqr1c8//6w2bdqkWrZsqUaPHm17PikpSTVq1Ejdc889as+ePWrRokXKy8tLffDBB7Y6mzdvVq6ururVV19V+/fvt+0+u3v3bludF198UQUEBKivv/5a7dq1Sw0ZMkQ1bdq0wvebq33VWFHfXaPnCt0ToOeE/Pe/SimlpkzRD2+6qfqbJCpH5jcIcWUc/W8oPV2pa67RpcjHlnCWKuyQynx+VyoQolhSw4Iyd+5cu3qzZs1SYWFhysvLS/Xs2VNt3LjR7vm//vpLjR49Wvn4+Cg/Pz91//33q4sXL9rV2blzp7r++uuVu7u7aty4sXrxxRdLtGfJkiWqVatWymKxqPbt26tvv/3W7nmr1ar+9a9/qUaNGil3d3d10003qQMHDlT4+3VqIHRkrg6E5tdTyoRSnTopZbWqo0d1Bg5Q6rffqr9ZouIkEBLiysi/IXG5KvP5bVJKZpuUJSUlBX9/f5KTk6v/NlluBnwVBtnn4R0LxGVDbCz07cuYMfDFFzBmDCxYUL3NEhWXmZlJfHw8TZs2xUPW6gpRafJvSFyuynx+S64xo3L1hOb56efHBOmvb70FYEu7sXgx5M9BF0IIIcRlkEDIyFo+Apgg8CQEA199BceO0aUL3HijXlA2e7ZzmyiEEDVFRgb06qWLpNgwAIN0iARCRubTVO82DXB/OFitkJ/HqWBU6KOPoIwUTkIIIYqwWmHLFl0kF64BGKRDJBAyulb5S+k7nAd3dOSTns7AgdChA6SmwgcfOLWFQjhNZGQks6t4WLQ63iM2NhaTyWRLTDtv3jwCAgJszz/77LNcddVVl339S13vvvvuY+jQoZd9/StRHT/fAu7ueo/aFSv0sXAyg3SIBEJGFxIFPs2BNLi9Ply4AAsWYDLBk0/qKm++CdnZTm2lqGXuu+8+TCYTL774ot35r776ClPxvGfVoPgHeYFffvmFCRMmVHt7qtqoUaM4ePBglV1/2rRpJfZhqwtcXWHoUF1cXZ3dGmGUDpFAyOhMLvlzhYDB+RHzW2+BUowZA6GhkJCgMy+IWsyaB2di4egX+qs1r8rf0sPDg5deesmW6NiIGjZsiJeXl7Ob4XCenp4EBQVV2fV9fHyoX79+lV1fiJpEAqGaoNn9YPYAt9PQyR327IHYWCwWSbtRJ5xYDisjYd0NsGWM/royUp+vQv379yc4OJhZs2aVW2/Tpk307t0bT09PwsPDeeyxx0hLS7M9n5CQwODBg/H09KRp06YsXLiwxO2Q119/nY4dO+Lt7U14eDiTJk0iNTUV0LeN7r//fpKTkzGZTJhMJp599lnA/rbKmDFjGDVqlF3bcnJyaNCgAZ9++img8yHOmjWLpk2b4unpSefOnfnyyy8r9XM5fvw4Q4YMwcfHBz8/P+68884SqXyef/55goKC8PX15cEHH+Rvf/tbpW5tlTUCVuDIkSM0a9aMyZMno5QiKyuLadOm0bhxY7y9venRowexsbFlvr6sW22vvvoqISEh1K9fn+joaLvd/S9cuMC9995LvXr18PLyYtCgQRw6dMju9cuWLaN9+/a4u7sTGRnJa6+9Zvf82bNnue2222y/Cwuqef+PvDy9C0lsrD4WTmaQDpFAqCZwD4QmY/TxA/mJYPOX0j/8MPj4wN69sGaNk9onqs6J5bBxJKSftD+ffkqfr8JgyGw285///Ie3336bkydPllrnyJEjDBw4kBEjRrBr1y4WL17Mpk2bmDx5sq3Ovffey+nTp4mNjWXZsmV8+OGHJfINuri48NZbb7F3717mz5/P+vXrmTFjBgDXXXcds2fPxs/Pj4SEBBISEphWsFqgiLFjx/LNN9/YAiiA77//nvT0dIYNGwboxMqffvop77//Pnv37mXq1KncfffdbNiwoUI/E6vVypAhQzh//jwbNmxg7dq1/PHHH3YB2IIFC3jhhRd46aWX2L59OxEREbz33nsVun5F7Nq1i+uvv54xY8bwzjvvYDKZmDx5MnFxcSxatIhdu3Zxxx13MHDgwBKBSnliYmI4cuQIMTExzJ8/n3nz5jFv3jzb8/fddx+//vorK1euJC4uDqUUt9xyiy1Y2r59O3feeSd33XUXu3fv5tlnn+Vf//pXiWucOHGCmJgYvvzyS959990SvwtVKTMTbrhBl8zMantbURajdEgVb+5Yozl1Z+ni/tqud5pe6KqUP0q5uCj1xx9KKaWmTtU7Td94o5PbKOxc8a64eblKrQjT/V5qMSm1IlzXc7Bx48apIUOGKKWUuvbaa9UDDzyglFJqxYoVquh/G+PHj1cTJkywe+3GjRuVi4uLysjIUPv371eA+uWXX2zPHzp0SAHqjTfeKPP9ly5dqurXr297PHfuXOXv71+iXpMmTWzXycnJUQ0aNFCffvqp7fnRo0erUaNGKaWUyszMVF5eXmrLli121xg/frxdip/y3uOHH35QZrNZHT9+3Pb83r17FaC2bdumlFKqR48eKjo62u4avXr1Up07dy7zPWJiYhSgLly4UOr3+8wzz6jOnTurzZs3q3r16qlXX33V9tyxY8eU2WxWp06dsrvmTTfdpGbOnFnu9QqMGzdONWnSROXmFv4u3XHHHbaf3cGDBxWgNm/ebHv+3LlzytPTUy1ZskQppdSYMWPUzTffbNeG6dOnq3bt2imllDpw4IDdz0kpZfv9KOt3wdE7S6elKdWunS5paQ65pLgSVdghlfn8lhGhmiKwCzToCSoXHmxut5R+yhSdjHz9evjtN+c2UzjQnxtLjgTZUZB+QterQi+99BLz589n//79JZ7buXMn8+bNw8fHx1aioqKwWq3Ex8dz4MABXF1d6dKli+01LVq0sCVPLvDjjz9y00030bhxY3x9fbnnnnv466+/SE9Pr3A7XV1dufPOO223W9LS0vj6668ZO3YsAIcPHyY9PZ2bb77Zrr2ffvopR44cqdB77N+/n/DwcMLDw23n2rVrR0BAgO3nc+DAAbp37273uuKPL8fx48e5+eabefrpp3myYKUEsHv3bvLy8mjVqpXd97Vhw4YKf18A7du3x2w22x6HhITYRmv279+Pq6urLTE2QP369WndurXt+96/fz+9evWyu2avXr04dOgQeXl5tmt07drV9nybNm3KvQXoaF5eevR87159LJzMIB0i8+ZrkpbRcC4Ouibpm5r//S88+ywREd6MGqUnTL/6qkycrjUyEhxb7zL16dOHqKgoZs6cyX333Wf3XGpqKg8//DCPFUxWKyIiIqJCK5+OHj3KrbfeyiOPPMILL7xAYGAgmzZtYvz48WRnZ1dqMvTYsWPp27cvZ8+eZe3atXh6ejJw4EBbWwG+/fZbGjdubPc69xqwlrphw4aEhobyxRdf8MADD9jSBqSmpmI2m9m+fbtdIAN6UnRFubm52T02mUxYZbMdUQfIiFBNEjES3BuC+gsGN9I7Keb/9VswZWLJEjh2zHlNFA7kGeLYelfgxRdf5JtvviEuLs7ufJcuXdi3bx8tWrQoUSwWC61btyY3N5fff//d9prDhw/brUTbvn07VquV1157jWuvvZZWrVpx+vRpu/exWCzkVWAy5XXXXUd4eDiLFy9mwYIF3HHHHbYP+Hbt2uHu7s7x48dLtLXoCE952rZty4kTJzhx4oTt3L59+0hKSqJdu3YAtG7dml9++cXudcUfXw5PT09WrVqFh4cHUVFRXLx4EYCrr76avLw8zp49W+L7Cg4OvuL3Bf195+bm8vPPP9vO/fXXXxw4cMD2fbdt25bNmzfbvW7z5s20atUKs9lMmzZtyM3NZfv27bbnDxw4YNs7SQhnkUCoJjG7Q4uH9PFwX/01fyn91VfDTTdJ2o1apWFv8AoDytq3xwRe4bpeFevYsSNjx47lrfxJ+gWeeuoptmzZwuTJk9mxYweHDh3i66+/tk2WbtOmDf3792fChAls27aN33//nQkTJuDp6Wnbj6hFixbk5OTw9ttv88cff/DZZ5/x/vvv271PZGQkqamprFu3jnPnzpV7y2zMmDG8//77rF271nZbDMDX15dp06YxdepU5s+fz5EjR/jtt994++23mT9/foV+Dv3797f9LH777Te2bdvGvffeS9++fbnmmmsAePTRR/n444+ZP38+hw4d4vnnn2fXrl0O2X/J29ubb7/9FldXVwYNGkRqaiqtWrVi7Nix3HvvvSxfvpz4+Hi2bdvGrFmz+Pbbb6/4PQFatmzJkCFDeOihh9i0aRM7d+7k7rvvpnHjxgwZMgSAJ598knXr1vF///d/HDx4kPnz5/POO+/YJra3bt2agQMH8vDDD/Pzzz+zfft2HnzwQTw9PR3SxorIyICbb9ZFUmwYgEE6RAKhmqbFw3pvIcthaOGp763GxAAwfbqu8tFHet9FUcO5mKHrm/kPin+I5j/uOlvXqwbPPfdciVslnTp1YsOGDRw8eJDevXtz9dVX8/TTTxMaGmqr8+mnn9KoUSP69OnDsGHDeOihh/D19bVlE+/cuTOvv/46L730Eh06dGDBggUlluxfd911TJw4kVGjRtGwYUNefvnlMts5duxY9u3bR+PGjUvMWfm///s//vWvfzFr1izatm3LwIED+fbbb2natGmFfgYmk4mvv/6aevXq0adPH/r370+zZs1YvHix3fvPnDmTadOm0aVLF+Lj47nvvvsclj3dx8eH7777DqUUgwcPJi0tjblz53Lvvffy5JNP0rp1a4YOHcovv/xCRESEQ94TYO7cuXTt2pVbb72Vnj17opRi9erVthG3Ll26sGTJEhYtWkSHDh14+umnee655+xup86dO5fQ0FD69u3L8OHDmTBhQpXul1Sc1Qo//qiL3PUzAIN0iEkp2X2mLCkpKfj7+5OcnGy7H28IPw2Dk19BQkeYthuGDIGvvkIp6NwZdu+GWbPgb39zdkPrtszMTOLj42natOmVfQieWA7bH7efOO0VroOg8OFX3M7qdvLkScLDw20TpOuCm2++meDgYD777DNnN6VGcdi/oXy5uVAQs44aJbtLO10VdkhlPr/l16AmahWtA6HGf4AHsHIlxMdjatqUadNg3DiddmPqVMmnUyuED4fGQ/TqsIwEPSeoYe9qGwm6UuvXryc1NZWOHTuSkJDAjBkziIyMpE+fPs5uWpVIT0/n/fffJyoqCrPZzBdffMGPP/7I2rVrnd20Os/VFYrcLRXOZpAOkVtjNVGjm8CvNVjT4KE2ekvpOXMAuOsuaNwYEhNl9Vit4mKGRv0gcrT+WkOCINC7O//973+nffv2DBs2jIYNGxIbG1tilVJtYTKZWL16NX369KFr16588803LFu2jP79+zu7aUKIUsitsXIY9tYYwIG39O0SlwgYfRz8/eHkSfDx4ZVXYMYMaNdO3yZzkXDXKRw9rC9EXePof0N5eYV7rXXpovdfE05UhR1Smc9v+YisqZqOA1dvsB6Hm0IhORk+/xyACRPA1xf27ZO0G0IIUSAzE7p310VSbBiAQTpEAqGayuIPkXfr47EN9df8pfT+/joYAr3BohBCCDCZoEkTXRywm4G4UgbpEAmEarJW0fqrxx4I84L9+2HdOgAef1zPQ4uJgSL7lwknkLvPQlweR//b8fKCo0d1kRQbBmCQDpFAqCYL6KhXD6k8mNxen8vf8C48XE+cBhkVcpaCycCVyZclhChU8G+ntk6sF8Ygk6XLYejJ0gWOLYbNd4FrQxjzJ1hNcOgQNG/Ozp1w1VV6/tnhwxAZ6ezG1j0JCQkkJSURFBSEl5eXQ3YXFqK2U0qRnp7O2bNnCQgIICSk6tPIiNpF9hGqS8KGgUcwZCbCg53hg516Kf3rr9O5s965fO1anXZDUm9Uv4JcTwVZvIUQFRcQEOCwfGmg5+MWjJQvWgSymNPJDNIhMiJUjhoxIgSw6xnY8xy4todRe8HPD06dAh8ffvgBoqLA2xtOnIB69Zzd2LopLy+PnJwcZzdDiBrDzc0Ns4PXt6elgY+PPk5N1f8vCieqwg6REaG6psUE2PsC5O6F6yNg03H49FOYNImbb4ZOnWDXLnj/fZg509mNrZvMZrPD/1MXQlSOxQIfflh4LJzMIB0iI0LlqDEjQgAb74ATX0LmdTB+C7RpoxOyurjw2Wdw770QHKwn50vaDSGEELWZbKhYFxUspffeAUE+8L//6Yy+2KfdWLDAeU0UQgghjEYCodoiqC/4t4e8dHi8iz6Xv5TezQ2mTNGnXn0VrFbnNFEIIZzJatUD5Xv3yv+DhmCQDpFAqLYwmQpHhdqcABPw7bd6KT16p2k/P73n4nffOa+ZQgjhLBkZ0KGDLhkZzm6NMEqHSCBUm0TeDa6+kBkP93fT5/Kz0vv5FabdeOUVJ7VPCCGcrEEDXYRBGKBDJBCqTdx8odk4fTwwf0HgJ5/AxYtAYdqNDRvgl1+c1EYhhHASb2/4809dZOm8ARikQyQQqm1aTtJfc3+Gbs10EPTppwCEhcHo0fppSbshhBBCSCBU+/i3hUY3grLCwy30ubfftk1EmzZNn/ryS4iPd1IbhRBCCIOQQKg2Kpg07f8bBPrCgQM6zwZ6c8UBA3Rc9MYbTmyjEEJUs8xMGDtWl8xMZ7dGGKVDZEPFctSoDRWLsubCyqaQfhL+iIJ/fQ+33KJXkaG3F7r5ZvDy0mk3AgOd3F4hhKgGkmLDYAySYkNGhGojF1do8bA+7nhGL61fvdq2lP6mm3RW+vR0eO895zVTCCGqk8WiR8LfeENSbBiCQTpERoTKUWNHhAAyzsDX4WDNge+uh883wWOPwZtvAnqH6bvvhkaNdNoNycIshBCitpARIQGejSB8pD4emj/cOHcupKQAcOedEB4OZ87A5587qY1CCCGEk0kgVJsVTJrO3QBXtdRL6efPB+zTbrz2mmw3L4So/axWPQJ+9Kj8n2cIBukQCYRqswbXQUBnyMuEyR30uSJL6R98UO84/b//2eZRCyFErZWRAU2b6iIpNgzAIB0igVBtVjT/WP2d4O+rJ0x//z2gg6CJE/XTssGiEKIu8PLSRRiEATpEAqHaLnIMuPlD+h8w5SZ9Lj8rPej5025u8NNPsG2bk9oohBDVwNtbr9hOS5Ol84ZgkA6RQKi2c/WGZvfr4+7JepRozRq9ySLQuDGMGaOfllEhIYQQdY0EQnVBQf6xpFgYdYM+fucd29NPPqm/LlsGf/xRvU0TQgghnEkCobrAryUEDwAUjKqvz82bB8nJAHTsCAMHStoNIUTtlpUFDz2kS1aWs1sjjNIhEgjVFbal9OugY2u9nfm8ebanC5KxfvIJ/PVX9TdPCCGqWm4u/Pe/uuTmOrs1wigdIoFQXRE6GLybQPZ5eLy7PldkKf2NN8LVV0vaDSFE7eXmBs8/r4ubm7NbI4zSIZJioxw1OsVGafa9BDv+Bv5Xw9gjkJwCq1bB4MEALFyokwAHBcGxY5J2QwghRM0kKTZE6ZqNBxd3SP4dHrtVnyuylP6OOyAiAs6ehc8+c1IbhRBCiGokgVBd4tEAmozSx9dn6KX0P/wA+/cDknZDCFG7KQV//qmL3AsxAIN0iARCdU3L/EnT57+FO6L0cZGl9A8+CP7+epuhVauc0D4hhKgi6en61n9QkD4WTmaQDpFAqK5p0B0CrwFrNowN0+fmz7ctpff1LUy78corTmqjEEIIUU0kEKqLCpbSq7XQoZ3e3nzuXNvTBWk3Nm2CrVud1EYhhHAwb299B0YpSbFhCAbpEAmE6qKIUWAJhLRjMKWfPvf225CXB0BoqF49BpJ2QwghRO0mgVBd5OoJzcfr44gDUK+ezq3x3Xe2KgUbLC5fDkeOOKGNQgghRDWoVCA0a9YsunXrhq+vL0FBQQwdOpQD+ck7i1NKMWjQIEwmE1999ZXdc8ePH2fw4MF4eXkRFBTE9OnTyS22q2RsbCxdunTB3d2dFi1aMK/ILsgF5syZQ2RkJB4eHvTo0YNtxdKnZ2ZmEh0dTf369fHx8WHEiBGcOXOmMt9y7dXyEcAEf66D6OH6XJGl9O3bw6BBesTy9ded00QhhHCkrCy9MnbKFEmxYQgG6ZBKBUIbNmwgOjqarVu3snbtWnJychgwYABpaWkl6s6ePRuTyVTifF5eHoMHDyY7O5stW7Ywf/585s2bx9NPP22rEx8fz+DBg7nhhhvYsWMHU6ZM4cEHH+T777+31Vm8eDFPPPEEzzzzDL/99hudO3cmKiqKs2fP2upMnTqVb775hqVLl7JhwwZOnz7N8OHDK/Mt114+TSH0Fn18owIXF1i7Fvbts1WZPl1/nTsXzp1zQhuFEMKBcnPhzTd1kRQbBmCUDlFX4OzZswpQGzZssDv/+++/q8aNG6uEhAQFqBUrVtieW716tXJxcVGJiYm2c++9957y8/NTWVlZSimlZsyYodq3b293zVGjRqmoqCjb4+7du6vo6Gjb47y8PBUaGqpmzZqllFIqKSlJubm5qaVLl9rq7N+/XwEqLi6uQt9fcnKyAlRycnKF6tc4p1YrtQCllvgrNeJWPWXtkUdsT1utSnXpok//+9/Oa6YQQjhCVpZSf/+7LvkfN8KZqrBDKvP5fUVzhJLzl1wHBgbazqWnpzNmzBjmzJlDcHBwidfExcXRsWNHGjVqZDsXFRVFSkoKe/futdXp37+/3euioqKIi4sDIDs7m+3bt9vVcXFxoX///rY627dvJycnx65OmzZtiIiIsNUpLisri5SUFLtSq4VEgU9zyEmG+1vpc/PnQ1ISoPdbLBgVeucdyMhwTjOFEMIRLBZ44QVdLBZnt0YYpUMuOxCyWq1MmTKFXr160aFDB9v5qVOnct111zFkyJBSX5eYmGgXBAG2x4mJieXWSUlJISMjg3PnzpGXl1dqnaLXsFgsBAQElFmnuFmzZuHv728r4eHhl/gp1HAml/y5QoBpHXRorze1+uQTW5WRI6FJE73x56efOqmdQgghRBW57EAoOjqaPXv2sGjRItu5lStXsn79embPnu2ItlW7mTNnkpycbCsnTpxwdpOqXrP7wewBSTthik6+yjvv2JbSu7rC1Kn6tKTdEELUZErpbdPS0iTFhiEYpEMuKxCaPHkyq1atIiYmhrCwMNv59evXc+TIEQICAnB1dcXV1RWAESNG0K9fPwCCg4NLrNwqeFxwK62sOn5+fnh6etKgQQPMZnOpdYpeIzs7m6T82zyl1SnO3d0dPz8/u1LruQdCkzH6uGW8XkofHw/ffmur8sADEBAAhw7BypXOaaYQQlyp9HTw8dFFUmwYgEE6pFKBkFKKyZMns2LFCtavX0/Tpk3tnv/b3/7Grl272LFjh60AvPHGG8zN37m4Z8+e7N69225119q1a/Hz86Ndu3a2OuvWrbO79tq1a+nZsycAFouFrl272tWxWq2sW7fOVqdr1664ubnZ1Tlw4ADHjx+31RH5CnaaPv0VPDJaHxdZSl807YZssCiEEKJWqcws7EceeUT5+/ur2NhYlZCQYCvp6ellvoZiq8Zyc3NVhw4d1IABA9SOHTvUmjVrVMOGDdXMmTNtdf744w/l5eWlpk+frvbv36/mzJmjzGazWrNmja3OokWLlLu7u5o3b57at2+fmjBhggoICLBbjTZx4kQVERGh1q9fr3799VfVs2dP1bNnzwp/v7V+1VhRa67VK8g2PqGUi4teKrZnj+3p06eVcnPTp7dscWI7hRDiMlmtSqWm6mK1Ors1oio7pDKf35UKhIBSy9y5c8t9TdFASCmljh49qgYNGqQ8PT1VgwYN1JNPPqlycnLs6sTExKirrrpKWSwW1axZs1Lf4+2331YRERHKYrGo7t27q61bt9o9n5GRoSZNmqTq1aunvLy81LBhw1RCQkKFv986FQj98ZkOhJY3VmrEUB3xPPywXZX779enhw93UhuFEEKICqjM57dJKZkyVpaUlBT8/f1JTk6u/fOF8rLgq3DI+hMC/w2DngFPTzh1Ss8bAvbuhQ4d9LL6gwehRQsnt1kIIYQoRWU+vyXXmNDM7tD8QX3sGgudOumNgz7+2FalfXu45RZJuyGEqJmys+Ef/9AlO9vZrRFG6RAZESpHnRoRAkg7DiubgrJC5nMw/mm9idCRI2A2AxAbCzfcAB4ecPw4NGzo3CYLIURFpaXpBUoAqang7e3c9tR5VdghMiIkLo93BDS+XR+3OwWBgXDsGHzzja1K377QtStkZsK77zqpnUIIcRlcXeHxx3XJ391FOJNBOkRGhMpR50aEABJ/hPU3g6sv7HwQ/vOGHgJav95WZfFiuOsuaNBAjwp5ejqxvUIIIUQxMiIkLl+jm8CvNeRehNsD9S2xmBjYvdtWZcQIiIzUGennz3deU4UQQogrJYGQsGcyQctJ+vjcFzBsqD5++21blaJpN15/3ZaNQwghhKhxJBASJTUdB67ekLwPJvTR5z7/HP76y1blgQf0qnpJuyGEqCnS0vTfeiaTPhZOZpAOkUBIlGTxh8i79bHHRrjqqhJL6X184JH8xPWSdkMIIURNJYGQKF1B/rGTK+Cxe/TxnDmQm2urMnkyWCywZYsuQghhZF5ecPasLl5ezm6NMEqHSCAkShfQERr2BpUHnf8qXCJWZCl9SAjckx8jyaiQEMLoTCa991nDhvpYOJlBOkQCIVG2glGho5/Aw+P1cZGs9ABPPqm/fvWVTrshhBBC1CQSCImyhQ0Dj2DITIThEXopfWws7Nplq9K2Ldx6q0678cYbzmuqEEJcSnY2vPCCLpJiwwAM0iGyoWI56uSGisXtegb2PKdvk30UDEuXwoMPwkcf2aps2AD9+um0G8eOQVCQ85orhBBlkRQbBiMpNkSN0GICmMzw50aYOFifK7aUvk8f6NZN0m4IIYzN1VX/Hffgg5JiwxAM0iESCInyeTXWt8gAfLbA1VfriOe//7VVMZlg2jR9/M47kJ7uhHYKIcQluLvrweyPPtLHwskM0iESCIlLs02aXgCPPaiPiy2lHz4cmjbVA0WSdkMIIURNIYGQuLSgvuDfHnLT4JoMvZT+xAn4+mtblaJpN157TdJuCCGEqBkkEBKXVjT/WPyH8PAEfVxsKX1B2o0jR+xiJCGEMIS0ND0f19tbUmwYgkE6RAIhUTFN7wFXX7h4EEZ10Evpf/oJduywVfH2hkn58dIrr+gl9UIIYSTp6TKP0VAM0CESCImKcfOFpvfq4wuLYeRIfVwkKz3Ao4/qtBtbt0raDSGEsXh6Qny8Lp6ezm6NMEqHSCAkKq5g0vSpb2DSnfp4wQI4d85WpVEjuDc/XnrllWpunxBClMPFBSIjdXGRTz/nM0iHyK+CqDj/ttDoRlBW8PsFunaFrCy7zRWhMO3GypVw4IAT2imEEEJUkARConIKRoWOfAyPPaKP330XcnJsVdq0gdtu03OEXn/dCW0UQohS5OTA7Nm6FPkvSziLQTpEUmyUQ1JslMKaCyubQvpJ6PYJXP83OHsWliyBO+6wVdu4Ue847e6uk9ZL2g0hhLNJig2DkRQbokZycYUWD+vjPz6Eh/OPiy2lv/566N5d3zl7551qbqMQQpTCbIYxY3Qxm53dGmGUDpERoXLIiFAZMs7A1+FgzYGua6DDrXqX6e3boUsXW7Uvv9SDRIGBev9FLy8ntlkIIUSdISNComp5NoLw/OXzF4rcEiu2lH7YMGjWDM6fh7lzq7mNQgghRAVIICQuT8Gk6WMLIXqcPl64UM8Xymc2wxNP6OPXX5e0G0IIIYxHAiFxeRpcBwGdIS8T6u2Gbt0gO7vEUvr77tO3xv74A1ascE5ThRAC9Nzchg11kRQbBmCQDpFASFwek6lwVOjw+/DYZH383nt2yyC9vSE6v5qk3RBCONu5c3Z7wApnM0CHSCAkLl/kGHDzh9Qj0DtAbyt96lSJoZ/oaL2Mfts22LTJOU0VQghPT9izRxdJsWEABukQCYTE5XP1hmb36+P4D2HiRH1cbCl9o0YwLn8a0auvVmP7hBCiCBcXaN9eF0mxYQAG6RD5VRBXpmV+uvnTq+HeQeDqCps366X0RTzxhL6btnIl/O9/TminEEIIUQoJhMSV8WsJwQMABcnL4M78ZKzFltK3bg23366PJe2GEMIZcnL0eo6PPpIUG4ZgkA6RDRXLIRsqVtDJlfDTELAEQuPlcF0/sFj0LopFcmts2gS9e+v5QseO6VtmQghRXSTFhsFIig1Ra4QOBu8mkH0eGsXr3BrZ2fDhh3bVevWCa6+VtBtCCOcwm2HIEF0kxYYBGKRDZESoHDIiVAl7X4SdMyGwK/w1Be6+B0JC9NCPm5ut2rJlMHKk3lvo+HH5i0wIIYTjyYiQqH7Nx4OLBc5vhxsjITgYEhJ05FPE0KHQvLmk3RBCCGEMEggJx/BoCBGj9HE5S+mLp93Iza3GNgohhBDFSCAkHKdgp+nji+H+EfqWWFwc/PKLXbX77oP69SE+XtJuCCGqT3o6REbqkp7u7NYIo3SIBELCcep3h8BrwJoNF1fBqPwRomJL6b28JO2GEKL6KaWnLR47Jv/vGIJBOkQCIeE4xfOPPZp/vGgRJCbaVY2OBg8PPVi0cWM1t1MIUSd5eOhUP9u26WPhZAbpEAmEhGNFjNL7CaUdg9Czer18Tk6JpfRBQZJ2QwhRvcxm6NZNF1k+bwAG6RAJhIRjuXrqFWQAB+fAY4/p4/fe03sLFVGQduObb2D//mpupxBCCIEEQqIqtHwEMEHiDzCgo95PKDERvvzSrlqrVnofLZC0G0KIqpebCwsW6CIrVg3AIB0iGyqWQzZUvAKxt8Lpb6H1FFgdCE8/DT16wNatdtW2bNE7Tlsser5ccLBzmiuEqP0kxYbBSIoNUasVTJr+Yy6MH6sjnZ9/1qWI666Dnj31XTNJuyGEqEouLtC/vy4u8unnfAbpEPlVEFUjJAp8mkNOMqSvg7vu0ueLLaUHmD5df333Xf1HgRBCVAVPT1i7VhdPT2e3RhilQyQQElXD5JI/Vwg9afrRR/XxkiU69UYRt98OLVrAhQuSdkMIIUT1kkBIVJ1m94PZA5J2QpMsfR8sJwc++MCumtkMTz6pjyXthhBCiOokgZCoOu6B0GSMPi66lP7990sspR83Dho0gKNHYfny6m2mEKJuSE+H9u11kRQbBmCQDpFASFStgknTJ76EW3pBaCicOQNLl9pV8/SEyZP1saTdEEJUBaVg3z5d5P8YAzBIh0ggJKpWYBeofy1Yc+DYPHgkf95Qsaz0AJMm6V3Wf/0VfvqpepsphKj9PDwgJkYXSbFhAAbpEAmERNWz5R/7AB58QC+l37atxFL6hg3h/vv18SuvVHMbhRC1ntkM/frpIik2DMAgHSKBkKh6EXeAe0NIPwk5W2H0aH2+lFGhqVN12o1vv9WjpUIIIURVqlQgNGvWLLp164avry9BQUEMHTqUAwcO2J4/f/48jz76KK1bt8bT05OIiAgee+wxkpOT7a5z/PhxBg8ejJeXF0FBQUyfPp3cYkuFYmNj6dKlC+7u7rRo0YJ58+aVaM+cOXOIjIzEw8ODHj16sG3bNrvnMzMziY6Opn79+vj4+DBixAjOnDlTmW9ZOILZHZo/qI+LL6U/fdquasuWMGyYPn7ttWpsoxCi1svNha++0kVWpxqAQTqkUoHQhg0biI6OZuvWraxdu5acnBwGDBhAWloaAKdPn+b06dO8+uqr7Nmzh3nz5rFmzRrGjx9vu0ZeXh6DBw8mOzubLVu2MH/+fObNm8fTTz9tqxMfH8/gwYO54YYb2LFjB1OmTOHBBx/k+++/t9VZvHgxTzzxBM888wy//fYbnTt3JioqirNnz9rqTJ06lW+++YalS5eyYcMGTp8+zfDhwy/7hyWuQMuJem+hM+uhhafOq5GbW2IpPcC0afrr55+X2HJICCEuW1aW/kNr2DB9LJzMKB2irsDZs2cVoDZs2FBmnSVLliiLxaJycnKUUkqtXr1aubi4qMTERFud9957T/n5+amsrCyllFIzZsxQ7du3t7vOqFGjVFRUlO1x9+7dVXR0tO1xXl6eCg0NVbNmzVJKKZWUlKTc3NzU0qVLbXX279+vABUXF1eh7y85OVkBKjk5uUL1xSVsGKLUApTaFq3U4sVKgVJBQUplZpao2quXfnrmzOpvphCidkpPV+q663RJT3d2a0RVdkhlPr+vaI5QwS2vwMDAcuv4+fnh6uoKQFxcHB07dqRRo0a2OlFRUaSkpLB3715bnf79+9tdJyoqiri4OACys7PZvn27XR0XFxf69+9vq7N9+3ZycnLs6rRp04aIiAhbneKysrJISUmxK8KBWuZPmo7/FG7tD40bw9mz+hZZMQWjQu+9J2k3hBCO4ekJmzfrIik2DMAgHXLZgZDVamXKlCn06tWLDh06lFrn3Llz/N///R8TJkywnUtMTLQLggDb48TExHLrpKSkkJGRwblz58jLyyu1TtFrWCwWAgICyqxT3KxZs/D397eV8PDwS/wURKUE3wS+rSD3IpxcpNfLA7z5Zok9JG6/Xc8XSkqCjz+u/qYKIYSoGy47EIqOjmbPnj0sWrSo1OdTUlIYPHgw7dq149lnn73ct6lWM2fOJDk52VZOnDjh7CbVLiaXwqX0h+bAgw+Cuzts3w5bt9pVdXEpTLvxxhsysVEIIUTVuKxAaPLkyaxatYqYmBjCwsJKPH/x4kUGDhyIr68vK1aswM3NzfZccHBwiZVbBY+Dg4PLrePn54enpycNGjTAbDaXWqfoNbKzs0lKSiqzTnHu7u74+fnZFeFgTceBqzck7wO1D8bkp+AoZSn9vffqvYWOHYMvv6zmdgohap2MDOjWTZeMDGe3RhilQyoVCCmlmDx5MitWrGD9+vU0bdq0RJ2UlBQGDBiAxWJh5cqVeBTbLbJnz57s3r3bbnXX2rVr8fPzo127drY669ats3vd2rVr6dmzJwAWi4WuXbva1bFaraxbt85Wp2vXrri5udnVOXDgAMePH7fVEU5g8YfIu/Vx0aX0X34Jp07ZVZW0G0IIR7Ja9c71v/6qj4WTGaVDKjML+5FHHlH+/v4qNjZWJSQk2Ep6/mzv5ORk1aNHD9WxY0d1+PBhuzq5ublKKaVyc3NVhw4d1IABA9SOHTvUmjVrVMOGDdXMIsuD/vjjD+Xl5aWmT5+u9u/fr+bMmaPMZrNas2aNrc6iRYuUu7u7mjdvntq3b5+aMGGCCggIsFuNNnHiRBUREaHWr1+vfv31V9WzZ0/Vs2fPCn+/smqsilzYpVePLTQrlXZSqd699RKxf/6zRNU//1TK01M/vX69E9oqhKg1cnKUWrVKl/yFzMKZqrBDKvP5XalACCi1zJ07VymlVExMTJl14uPjbdc5evSoGjRokPL09FQNGjRQTz75pG15fYGYmBh11VVXKYvFopo1a2Z7j6LefvttFRERoSwWi+revbvaunWr3fMZGRlq0qRJql69esrLy0sNGzZMJSQkVPj7lUCoCv3QWwdDO59WaulSHek0bKhURkaJqpMm6advucUJ7RRCCFHjVObz26SU3HAoS0pKCv7+/rYtAIQDHVsMm+8Cj2AYfARatIaTJ2HePBg3zq7qkSN6BZlSsGcPtG/vnCYLIYSoGSrz+S25xoRzhA3TQVBmIiR+A9H5q8lKWUrfvDkUbAguaTeEEJcrLw/WrtUlL8/ZrRFG6RAZESqHjAhVsV3PwJ7noGFvuHo5hIdDZiZs2qRTcBTx889w7bXg5gZHj0JoqHOaLISoudLSwMdHH6emgre3c9tT51Vhh8iIkKgZWkwAkxn+3AiuCTB2rD5fylL6Hj3g+ushJwfefrua2ymEqBVcXKBzZ11c5NPP+QzSITIiVA4ZEaoGG++AE19Ci4fB/RG46iowm/WwT7E9qlauhCFDwN8fTpwAX1+ntFgIIYTByYiQqDkKdpo++jm0jYS+ffW94vfeK1H11luhdWtITpa0G0IIIRxDAiHhXEF9wb895KZB/Hx47DF9/sMP9XyhIoqn3cjJqea2CiGEqHUkEBLOZTJBy/zkq4fehdtug4gIOHcOSsljd889EBQEx49L2g0hROVkZEC/frpIig0DMEiHSCAknK/pPeDqCykH4K8NhVnp33qrxFJ6D4/CrBySdkMIURlWK2zYoIuk2DAAg3SIBELC+dx8oem9+vhgflZ6Dw/4/XfYvLlE9UceAS8v/XRMTDW3VQhRY7m7w5Iluri7O7s1wigdIoGQMIZW+aNAp1aCRxrcnZ+YtZSl9PXrwwMP6ONXXqmm9gkhajxXV7jjDl1cXZ3dGmGUDpFASBiDfztodAMoKxx6v/D+1/Lleq18MVOn6snTa9botBtCCCHE5ZBASBhHy/yl9Ef+C+1b6wl0ZSylb9YMRozQx6++Wn1NFELUXHl5+m775s2SYsMQDNIhsqFiOWRDxWpmzYWvIyHjFPT8DHZ46yRj9evrUSFPT7vq27bpHafd3CA+Hho3dk6zhRA1g6TYMBhJsSFEMS6ueodp0JOmb7sNmjSBv/6CL74oUb17d+jTR+8nVMpUIiGEsGMyQYsWuphMzm6NMEqHSCAkjKXFQ+DiBn9thZRdhVnpS1lKDzBtmv76/vuQklKN7RRC1DheXnDokC5eXs5ujTBKh0ggJIzFMxjCR+rjg3Ng/Hh9S2znTti4sUT1wYOhTRsdBP33v9XcViGEEDWeBELCeAryjx1bCN7o7aSh1PtfRdNuzJ4taTeEEEJUjgRCwngaXAcBnSEvE/6YW7iUfsUKnVujmLvvhkaN9HzqJUuqua1CiBojM1OPIg8eXCKVoXAGg3SIBELCeEymwlGhQ+9B+3Zw4416C/Z33y1RvWjajVdflbQbQojS5eXB6tW6yPJ5AzBIh0ggJIwpcgy4+UPqEUj4vjAr/UcfQXp6ieoTJ+q5djt2wLp11dtUIUTNYLHA3Lm6WCzObo0wSodIICSMydUbmt2vjw/OgVtvhchIOH8eFi4sUb1+fT2vGmSDRSFE6dzc4L77dHFzc3ZrhFE6RAIhYVwt8/OPnV4NGcdh8mT9uIyl9AVpN77/HnbtqsZ2CiGEqLEkEBLG5dcSggcASs8VeuABff9r927YsKFE9aZNYWT+yvvXXqvepgohjC8vT98+37FD5ggZgkE6RAIhYWwFk6aPfAy+HnDvvfpxGVtJF2ywuHAhnDxZDe0TQtQYmZlw9dW6yKoxAzBIh0ggJIwtdDB4N4Hs83B8ceHysK+/hqNHS1Tv1g369oXcXEm7IYSwZzJBaKgukmLDAAzSIRIICWNzMUOLifr44Bxo1w769y9zKT0Ujgp98IGk3RBCFPLyglOndJEUGwZgkA6RQEgYX/Px4GKB87/CuW2FS+n/+99Sl9Lfcgu0bauDoI8+qua2CiGEqFEkEBLG59EQIkbp40NzdKTTtClcuAALFpSoLmk3hBBCVJQEQqJmsOUfWww55y+5lL4g7cbJk7B4cTW2UwhhWJmZcMcdushkaQMwSIdIICRqhvrdIbArWLP0CrKCpfR79kBsbInq7u6Fd9Ak7YYQAvQK7S+/1EWWzxuAQTpEAiFRM5hM0DJ/VOjw++DnC+PG6cdlLA+bOBG8vWHnTvjxx2pqpxDCsCwWeOcdXSTFhgEYpENMSsnfymVJSUnB39+f5ORk/Pz8nN0ckZsBX4XppfR9voaUFtC+vZ4UdPiwnjdUzOOP6zhpwAC947QQQojarzKf3zIiJGoOV09o/oA+LlhKf/PN5S6lL0i78cMPemRICCGEKEoCIVGztHwEMEHiD5By0H4pfVpaieqRkXoeHkjaDSHqOqsVDh3SxWp1dmuEUTpEAiFRs/g0g9Bb9PGh9/RS+ubNISkJPv+81JcUbLD4xRdw4kT1NFMIYTwZGdCqlS4ZGc5ujTBKh0ggJGqegqX0f8wFa8Yll9Jfcw306ydpN4QQ4O+vizAIA3SIBEKi5gmJAp/mkJMMRxfC/ffr5WH79sH69aW+ZPp0/fWDDyA5uRrbKoQwDG9vPXiclKSPhZMZpEMkEBI1j8klf64QetK0nx/cd59+XMaQz8CBem71xYuSdkMIIUQhCYREzdTsfjB7QNJOOLel8PbYN9/AH3+UqO7iUjhXaPZsyM6uvqYKIYQwLgmERM3kHghNxujjg3OgTRuIitJzhObMKfUlY8ZASIhOdCxpN4Soe7Ky9ODxfffpY+FkBukQ2VCxHLKhosGd/w3WdAUXNxhyAmK2w+DBeuLdyZPg41PiJS++CDNnQseOel8hk8kJ7RZCOEVaWuF/C6mpMk/I6aqwQ2RDRVE3BHaB+teCNQeOfKQnArVooWdDf/ZZqS95+GH9b233bli7tprbK4RwKjc3ePllXdzcnN0aYZQOkRGhcsiIUA0Q/znE3QNeYXB7PLzzrs6r0aaNXkVWypDP1Kl6nlD//hIMCSFEbSQjQqLuiLgD3BtC+kk4tVLfa/bxgf/9r8xMq1OmgNmsn96xozobK4QQwmgkEBI1m9kdmj+ojwuW0t9/v35cxlL6Jk3gzjv18auvVkMbhRCGYLXqxRKnTkmKDUMwSIfIrbFyyK2xGiLtOKxsCsoKg/fBGTO0bq1vix08qOcNFfPbb9C1qx4Zio+H8HAntFsIUa1ksrTByGRpIRzEOwIa36aPD72r89YMGlTuUvouXeDGGyEvT88XEkLUDa6uugiDMECHSCAkaoeWBfnH5kPOxcKs9J98ov/SKEXBBosffqh3eBdC1G7e3pCTo4uMBhmAQTpEAiFROwTfBL6tIPcixH8GAwZAy5aQkgKfflrqSwYOhA4ddJz01FM6O31srB4lEkIIUTdIICRqB5MLtJykjw/N0fODHn1UP3777VIn4plM0LevPv7wQ73z9A03QGQkLF9ePc0WQgjhXBIIidqj2Tgwe0HyPji7AcaNA1/fMpfSL19e+hSiU6dg5EgJhoSobbKyIDpaF0mxYQAG6RAJhETtYQmApnfr40sspc/L0/sulqZgHeWUKXKbTIjaJDcX3n1Xl9xcZ7dGGKVDJBAStUvBpOmTKyD9VGFW+m+/hUOHbNU2btTpyMqiFJw4oesJIWoHNzd45hldJMWGARikQyQQErVLvU7Q8HpQeXD4Qz1h+pZb9HNF7oMlJFTscv/6F3zzjQyjC1EbWCzw7LO6WCzObo0wSodIICRqn1b5o0CHP4S8bPul9BcvAhASUrFLbdoEt98OjRrp7B3ffQfZ2Y5vshBCCOeoVCA0a9YsunXrhq+vL0FBQQwdOpQDBw7Y1cnMzCQ6Opr69evj4+PDiBEjOHPmjF2d48ePM3jwYLy8vAgKCmL69OnkFrs/GBsbS5cuXXB3d6dFixbMmzevRHvmzJlDZGQkHh4e9OjRg23btlW6LaIWChsGHsGQmahvkd18s95p+uJFmD8fgN69ISys1JysNkFBOoYKDdUJ7efP14NLwcHw4IPwww96+wshRM2glN4zLCmpcC6gcCKjdIiqhKioKDV37ly1Z88etWPHDnXLLbeoiIgIlZqaaqszceJEFR4ertatW6d+/fVXde2116rrrrvO9nxubq7q0KGD6t+/v/r999/V6tWrVYMGDdTMmTNtdf744w/l5eWlnnjiCbVv3z719ttvK7PZrNasWWOrs2jRImWxWNQnn3yi9u7dqx566CEVEBCgzpw5U+G2XEpycrICVHJycmV+TMIIdj6t1AKU+qG3fvzOO0qBUq1aKZWXp5RSatkypUwmXfS/Ql0Kzi1bpl+al6fUTz8pNXmyUo0a2detX1+pCROU+vFHpXJynPS9CiEqJDW18N9ukY8t4SxV2CGV+fyuVCBU3NmzZxWgNmzYoJRSKikpSbm5uamlS5fa6uzfv18BKi4uTiml1OrVq5WLi4tKTEy01XnvvfeUn5+fysrKUkopNWPGDNW+fXu79xo1apSKioqyPe7evbuKjo62Pc7Ly1OhoaFq1qxZFW7LpUggVIOlnVRqoVkHQxd2KZWSopSfn/4H9913tmrLlikVFmYf3ISHFwZBxeXmKhUTo9TEiUo1bGj/uqAgpR55RKnYWF1PCGEsEggZjEECoSuaI5ScnAxAYGAgANu3bycnJ4f+/fvb6rRp04aIiAji4uIAiIuLo2PHjjRq1MhWJyoqipSUFPbu3WurU/QaBXUKrpGdnc327dvt6ri4uNC/f39bnYq0pbisrCxSUlLsiqihvBrrW2Sgl9L7+sIDD+jHRZbSDx8OR49CTAwsXKi/xsfr86Uxm6FfP3jvPTh9GtauhYcegsBAOHtWn+/XT992e/RRPcdIslwLYQxeXnqOX3a2PhZOZpAOuexAyGq1MmXKFHr16kWHDh0ASExMxGKxEBAQYFe3UaNGJCYm2uoUDYIKni94rrw6KSkpZGRkcO7cOfLy8kqtU/Qal2pLcbNmzcLf399WwiUlec3WKn8p/dHPITtZb9plMukZzwcP2qoVBDejR+uvZnPFLu/qCv37612pExNhzRodawUE6MfvvKPnIkVEwNSpEBcn8xKEcCaTSa/SdnMrf36gqCYG6ZDLDoSio6PZs2cPixYtcmR7nGrmzJkkJyfbyokTJ5zdJHElgvqCf3vITYP4+dCiBQwerJ975x2HvpWbG0RFwccfw5kzetuiceP0no6nTukM99ddp9N3TJsG27ZJUCSEEEZwWYHQ5MmTWbVqFTExMYSFhdnOBwcHk52dTVKxVN5nzpwhODjYVqf4yq2Cx5eq4+fnh6enJw0aNMBsNpdap+g1LtWW4tzd3fHz87MrogYzmYrkH3tXRx4FS+nnztUJWauAxaJXl82bp2+XrVwJY8eCjw8cPw6vvQY9ekCzZjrZ62+/SVAkRHXIzobp03WRbTAMwCAdUqlASCnF5MmTWbFiBevXr6dp06Z2z3ft2hU3NzfWrVtnO3fgwAGOHz9Oz549AejZsye7d+/m7Nmztjpr167Fz8+Pdu3a2eoUvUZBnYJrWCwWunbtalfHarWybt06W52KtEXUAU3vAVdfSDkAZ9bpe1lt2+qU86VsyeBo7u5w223w+ec6KFq+HO66C7y99dykl1+Grl2hVSv4xz9g504JioSoKjk58OqrusjWFwZglA6pzCzsRx55RPn7+6vY2FiVkJBgK+np6bY6EydOVBEREWr9+vXq119/VT179lQ9e/a0PV+wfH7AgAFqx44das2aNaphw4alLp+fPn262r9/v5ozZ06py+fd3d3VvHnz1L59+9SECRNUQECA3Wq0S7XlUmTVWC2xLVqvHtswVD9+9129SqFFC9tS+uqWlqbU0qVKjRyplKen/eqz1q2V+te/lNq92ylNE6LWyspSato0XfIXKQtnqsIOqbLl80CpZe7cubY6GRkZatKkSapevXrKy8tLDRs2TCUkJNhd5+jRo2rQoEHK09NTNWjQQD355JMqp9gmLDExMeqqq65SFotFNWvWzO49Crz99tsqIiJCWSwW1b17d7V161a75yvSlvJIIFRLJO3VgdBCF6VSjyl18aJS/v466vj2W2e3Tl28qNSiRUoNG6aUu7t9UNSunVLPPqvUvn3ObqUQQtQclfn8NiklA/FlSUlJwd/fn+TkZJkvVNOtuxHOxED7v0PnF+DJJ+H11/UM5zVrnN06m4sX9ZyiJUt0s4reNu/YEe68E0aN0inUhBBClK4yn98SCJVDAqFa5Pgy2DQS3BvC0BNw7JReRaYU7N8Pbdo4u4UlJCfD11/roKh4Oo+rrtIB0R13QPPmTmuiEDWKUlCQzcnVVZbQO10VdkhlPr8l6aqoG8KGgGdjyPoTjn+pl2zddpt+zsFL6R3F3x/uvRdWrdJL8j/5RA9gmc2wYwfMnKljuWuugVde0ZOvhRBlS0/XqzotFn0snMwgHSKBkKgbXFyhxcP6+GB+4FOwlH7ePD38YmD16sH99+vbZYmJehPH/v3BxQW2b4cZM6BpU7j2Wn3HT7bAEkKIipFbY+WQW2O1TEYifB0B1hwYuB3qXQ0dOsC+fXrX6V69ICREbwdd0e2lnaxgSf6SJRAba7/0/rrr9O2zkSMhNNRpTRTCMJQq/JvH319ujTldFXaIzBFyEAmEaqHNo+HYImj2AFz7MTz8sB5eKSosDN58s+yEYwaVmAjLlsHixTrHWcG/bJMJrr9eB0UjRkAZ+4kKIUStIYGQg0ggVAud3QQ/9gazB7i+DyPvK1mn4K+SL7+sccFQgVOndPOXLIEtWwrPm0zQt68OioYPh6Ag57VRCCGqigRCDiKBUC2kFHx3FSTtglX+8EUZc4NMJj0yFB9fY26TleXECVi6VAdFP/9ceN7FBW68US/JHz4c6td3XhuFqA7Z2fCf/+jjv/9dz9EVzpOXkc2JR/7DxYtwfuLfuf5Gi8P+u5VAyEEkEKqlDn8E2yZAIjANvS1oWWJidEr6WuLo0cKg6NdfC8+bzXry9ahRMHSonpwtRG2TlqZz/oHOsuPt7dz21GXLl8PfHk3j4GndId6kEhjm7bBZCbJ8XojyRI4BvCAY6HSJugkJ1dCg6hMZqfMb/vILHD4Ms2bpPYny8uD77+GBB6BRIxg8GD791PCL6YSoFFdXmDRJF1dXZ7em7lq+XC/iOHbalTlMYg6TyMWVU6f0+eXLq7c9MiJUDhkRqsW+vgPSvoTfgVfLqde5sx5LHzSoVi8xOXhQjxItWQK7dxeet1hg4EB9++z228HX13ltFELUfHl5+g+ykydLf95RsxLk1piDSCBUiyX9D1a3BSvwBPDnJep36KCHUkaPBje3amig8+zfrwOixYv1cQF3d7jlFh0U3Xpr4S0GIYQoLiNDbwSbkKBXtBaU7dvh228v/fornZUggZCDSCBUyy26Cqw7YRXwRZHzBSM/H3wABw7or6mp+lx4OEydCg8+WOuHR5SCvXsLg6KDBwuf8/TUt89GjdLBkZeX89ophKgeViucO2cf2BQPdAoeX+lt9YUL9d+dl0sCIQeRQKiWO7kSfhoCaS4QbYWCXF7h4TB7duGMvaQkeP99fe7MGX0uIEBPNHjsMT2pppZTCnbt0gHRkiVw5Ejhc15eOlvJqFH6Npqnp/PaKUR50tL0P13Q/6xlsrSWllZ+UFNQzpzRt7Yqyt1d71EbHFxYMjP1Zv4AXqSRRAAAASSRTmGHyIiQQUggVMtZ8+Cb5pB2DLxHQ1p9qN8cbpwEbqWsq83MhM8+g1dfLRwecXeHceNg2rQ6kxJeKfj998KgqGiOMx8fGDJE3z6LitI/HiGMoi6tGsvL0zvPV2T0pmDAu6IaNiwMbIoHOkUfl7ZZdMEcoVOnwFOlkUbhqrF0vGWOkNFIIFQHbLkbji6wP+cVBl3fhPAy1nDm5cHKlfDSS4Ub85hMegRpxgzo3r1q22wgSukVaAUTrYvmOPPz00vx77wTbr5Z9mwRzme1Fi4EDQnRe2nVJErBxYuXDmwSE+HPP/X3W1FeXuUHNQUlKOjKp0kWrBozKSvB6A5JIARMukMcsZetBEIOIoFQLXdiOWwcUcoT+X/C9P6y7GAI9P9KmzbByy/rFPEF+vbVAVEtX2lWnNWq48LFi/VeRadPFz4XEADDhunbZzfeWOvnmwtRKTk5+rZTeaM3BecyMip+XRcXHbhUZPTGx6d6/7tavhwef9x+9VjxWQlXQgIhB5FAqBaz5sHKSEgvYw0nJj0ydHs8uFRgfHbPHn3LbMECyM3V5wpWmt11V50bDrFadWqPxYv1X3eJiYXP1a+v/6O78049B6D4fi55ebBxo/5Pv4blwBUGV52/W0rpeUgVGb05d65y1/b1vfToTUgINGhg7H87VdkfEgg5iARCtdiZWFh3w6Xr3RQDjfpV/LonT+o/aYquNAsLgyeeqBMrzUpT8J/dkiU6KPqzyFYFDRvqRLB33gl9+sDXX5f8K7GG5sAVBrN8uV7bcOpU4bnL+d3Kyio5UlPWCE5WVsWv6+qq111cavSmUaNaNLcpO1t3AOh/+A78g1ECIQeRQKgWO/oFbBlz6XrXLYTIy1jDKSvNSpWbCxs26KBo2TL466/C5wIC9I+tuFqQA1c4WcGclOKfdgW/W0uW6NHJiozeXLhQufcOCKjY3Jv69WvenKUrVoWz1yUQchAJhGqxio4IdXsPWk68/Pcpb6XZk09Cq1aXf+0aLidHL5FdvBhWrLj0B4yvL0ycWAc/LCqoDk1HqxSrFd57T08ydhSL5dK3pQpGbzw8HPe+tU5WFjz8sD7+4AOHLjOVQMhBJBCqxWxzhE5RftZVIGwodPo/COhw+e9X1kqzYcPgqafq1Eqz0qxdCwMGOLsVoq6rX//Sk4qDg3VSYgk8jU0CIQeRQKiWO7EcNo7Mf1D0n4FJP250A5zdAMqqz0WOgY7Pgm+Ly39PWWlWqi++gDEVuFM5aBC0aVP17RG1x//+B999d+l6n34K99xT9e0R1UMCIQeRQKgOOLEctj9uv3rMKxy6ztZL55P3w+5n4PhS/ZzJDM0egA7/Au/wK3vvvXvhlVdkpRkQGws3VOBO5ZXuNivqHvndqpskEHIQCYTqCGse/LkRMhLAMwQa9i65ZP7877Drn3B6tX7s4g4tH4H2M8Ej6MreX1aa2e02W9r/SI7abVbUPfK7ZWBpadC4sT4+dcppk6Vl2qEQLma9RD5ytP5a2r5BgVdDv2/h5k0Q1BesWXBgNqxsBjv/CdlJl//+YWF6MvWJEzBrlp5hefKkDoQiIuAf/yhceVZLmc2Fq2iL3xkseDx7tnxQicor+rtVnPxuGUBy8pVnaL1CEggJURkNe+m9hW74AQKvgdw02PsCfN0U9s7Sjy9XQAD87W86eddHH+kVZUlJ8J//QJMmenVF0RTwtczw4XqJfMEfiAXCwmTpvLgy8rtlUJ6e+v+0gwedmq1Zbo2VQ26NiXIpBSe/1rfMkvfqcx5B0P4f0OJhMF/hUtA6utJMdpYWVUV+t+oOmSPkIBIIiQqx5sGxRXpSdeoRfc4rHDo+A03HgYtr+a+/FFlpJoQQlSJzhISoTi5maDoWbt0P3T8Az8aQfgJ+fhC+bad3sVaVSANdnMmk/3T95hud0+y++3TW0g0bYPBg6NRJr/3NznbYtyREbZSTA3Pm6JKT4+zWCKN0iIwIlUNGhMRlycuEQ+/pOUNZ+Ym1AjrpTRkb3+aY0RtZaSZEpVVhRgdxOQySYkNGhIRwNLMHtJkKt/8BnZ4HN39I2gU/DYEfekLiuit/D1lpJkSlmc0659jIkTI3yBAM0iEyIlQOGRESDpF1Hva/CgfehLx0fa7RDdDpBWjY0zHvkZkJn3+uN2iUnGZCiDpORoSEMBL3QLjqP3D7EWj1GLhY4EwMrL0OYm+DCzuv/D08PPQtsf37dartHj10QsMPP9Q5KUaMKFx5JoQQwkYCISGqi2cwXPMm3HYImo/X6TpOr4LvroJNd0HKgSt/DxcXvbw+Lg5++gluvVWvOlu+HK69VucQWL269C12hRCiDpJASIjq5h0BPf4Lg/dBk7v0ueOL9QqzreMh7diVv4esNBOihPR0vali48b6WDiZQTpE5giVQ+YIiWpxYSfs+hec+kY/drHoDRnb/12PIjnKyZM618AHH8DFi/pcWBhMnQoPPSQrzUStJ6vGDMYgq8YkECqHBEKiWp3bCjv/AWfW68dmL2j9GLSdrucZOUpSErz/vl5+X7CyLCAAJk2Cxx7TK9CEqIXy8mD3bn3csaOsHHO6KuwQCYQcRAIh4RSJ63VA9NdW/djNTwdDrR8HNweO2shKMyFELSWrxoSoyYJvhAFboM9KvRFjToq+dbayGex/HXIzHPM+RVearVihJ1PLSjMhRB0jgZAQRmQyQdhtMOh36LUIfFtC1jn4/Un4piUc+gCsDtqS3sUFhg6FLVtkpZmo1XJyYN48XSTFhgEYpEPk1lg55NaYMAxrLsR/Cruf1XnMAHyaQcd/Q5PROt+ZI+3dq3euXrCg8D+oDh1g+nS46y6wWBz7fkJUA5ksbTAGmSwtI0JC1AQurtD8Ab0HUde3wCMIUv+AuHvgu85wYoVjR2zat4e5c+GPP2DaNL2ibM8ePX+oeXN4/fXClWdC1BBmM9xyiy4yUdoADNIhMiJUDhkREoaVmwYH3oZ9L0FOkj4XeA10fgGCb3ZMYteiZKWZEKIGkVVjDiKBkDC87CTY/xoceEMHRwBBfXQes6DrHf9+stJMCFEDyK0xIeoKSwB0/j+d6b71VHBxh7M/wY+9IeYWOP+bY99PVpoJIWoZCYSEqA08gqDr63D7YWgxAUyukPAdrOkKG++A5P2Ofb+iK802bpSVZqJGSE+Hli11kRQbBmCQDpFbY+WQW2Oixrp4RK8wO7oAUGBygch7oOMz4NO0at5TVpoJg5NVYwZjkFVjEgiVQwIhUeMl7YFdT8PJFfqxixs0fwja/wO8QqvmPSWnmTCovDzYmr9h+7XXysoxp6vCDpFAyEEkEBK1xl+/wM5/QuIP+rHZA1pNhrZPgUeDqnnPpCQdDM2eDYmJ+lzBSrNHH4VgByaUFUKIIiQQchAJhEStc2YD7PoH/LlZP3b1hTZPQNsndE6zqlDZlWZ5eXreUUIChIRA797yp7sQolIkEHIQCYREraQUJKzRiV0v/K7PWQKh3d+gVTS4elXN+1qtsHIlvPRS4XC4yQTDhsGMGdCjh55s/fjj+vZagbAwfatt+PCqaZeoM3Jz9WJH0L92rq7ObU+dV4UdIoGQg0ggJGo1ZYUTy3VC15T/6XOeIdD+n9D8QTBX0eRmpWDzZh0QrVpVeL59ez3huriCzSG//FKCIXFFZLK0wRhksrQsnxeirjK5QMRIuGU3XDsPvCMhIwF+jYZVreGP+WDNq4L3NcH118M33+i0HffdB25upQdBULgEf8oUfdtMiMvk4gJ9++riIp9+zmeQDpERoXLIiJCoU/Ky4ch/Ye/zOiAC8GsDnZ6D8BE6cKoqS5fCnXdeut5rr8Ftt0F4uN7cUQghSiG3xhxEAiFRJ+Wmw8E5sO9FyD6vz9W7Gjo9D6GDHJ/HDOCLL2DMmMq9pmFDiIjQQVFEhP1xeLhelSaTrIWok6r01thPP/3EbbfdRmhoKCaTia+++sru+dTUVCZPnkxYWBienp60a9eO999/365OZmYm0dHR1K9fHx8fH0aMGMGZgkSO+Y4fP87gwYPx8vIiKCiI6dOnk5uba1cnNjaWLl264O7uTosWLZg3b16J9s6ZM4fIyEg8PDzo0aMH27Ztq+y3LETd4uoF7abDkHjo8IxeWXbhd9gwWKfuOLPB8e8ZElKxek2aFM4j+PNP2L4dvvoK3noLpk2DUaOgZ089wdrDAyIjoU8fGDsWZs6Ed9/V85J27oQLF2TnayEElZ6inZaWRufOnXnggQcYXsrExSeeeIL169fz+eefExkZyQ8//MCkSZMIDQ3l9ttvB2Dq1Kl8++23LF26FH9/fyZPnszw4cPZvFkv6c3Ly2Pw4MEEBwezZcsWEhISuPfee3Fzc+M///kPAPHx8QwePJiJEyeyYMEC1q1bx4MPPkhISAhRUVEALF68mCeeeIL333+fHj16MHv2bKKiojhw4ABBQUGX/UMTok5w84NOz+r9hva/DAff1svu1/WD4AHQ+Xmo380x79W7tw5eTp0qPTgxmfTzR47ouQRJSXD8OJw4Yf+14PjkSb0i5dgxXcri7V1yJKno17Aw8PR0zPconC4jQ8fJAHFx0rVOZ5QOUVcAUCtWrLA71759e/Xcc8/ZnevSpYv6xz/+oZRSKikpSbm5uamlS5fant+/f78CVFxcnFJKqdWrVysXFxeVmJhoq/Pee+8pPz8/lZWVpZRSasaMGap9+/Z27zNq1CgVFRVle9y9e3cVHR1te5yXl6dCQ0PVrFmzKvT9JScnK0AlJydXqL4QtVraKaW2TVJqoatSC9BlwzClLuxxzPWXLVPKZNJFh0O6FJxbtqzi18rNVerkSaW2bFFq8WKlXnlFqcceU2roUKW6dlWqYUP79yivNGyoXzN0qL7GK6/oa27Zot8jN9cx37+ocqmphd2amurs1oiq7JDKfH47fBeF6667jpUrV/LAAw8QGhpKbGwsBw8e5I033gBg+/bt5OTk0L9/f9tr2rRpQ0REBHFxcVx77bXExcXRsWNHGjVqZKsTFRXFI488wt69e7n66quJi4uzu0ZBnSlTpgCQnZ3N9u3bmTlzpu15FxcX+vfvT1xcnKO/bSFqP69Q6DYH2k6D3f+Go5/p1B0nv4LIMdDxWfBtcfnXHz5cL5EvbR+h2bMrt3TebIbGjXUp+IuzuIwM/T7FR5WKji6lpelbcAW34Urj6qrfp6xRpfBwqFevauZWiUrx8IAffig8Fk5mkA5xeCD09ttvM2HCBMLCwnB1dcXFxYWPPvqIPn36AJCYmIjFYiEgIMDudY0aNSIxfxv+xMREuyCo4PmC58qrk5KSQkZGBhcuXCAvL6/UOv/73/9KbXtWVhZZWVm2xykpKZX87oWoA3yaQs950O4pncfsxJc6ueuxxdD8AejwL/AKu7xrDx8OQ4ZUz87Snp6Fma9Lo1TZt+AKvp46VfFbcGVN6pZbcNXGbIabb3Z2K4SNQTqkSgKhrVu3snLlSpo0acJPP/1EdHQ0oaGhJUZwjGbWrFn8+9//dnYzhKgZ/NtC76Vw/jedxyzhOzj8od5/qOUkaD8TPBpW/rpmM/Tr5/DmVprJpEdy6tWDzp1Lr5OXp/OolRUonTgBZ8/qkaX//U+XsjRsWP6oUkiIrIITogo4NBDKyMjg73//OytWrGDw4MEAdOrUiR07dvDqq6/Sv39/goODyc7OJikpyW5U6MyZMwTnJ2EMDg4usbqrYFVZ0TrFV5qdOXMGPz8/PD09MZvNmM3mUusEl5HscebMmTzxxBO2xykpKYSHh1/GT0KIOiSwC9ywGs5u0nnMzv4EB96AIx9C66nQ9kmwBDi7lVWj6C24a68tvU5lb8H99lv571Xe5G65BVeu3Fz4/nt9HBUlKTacziAd4tB3zcnJIScnB5diO0SazWasVisAXbt2xc3NjXXr1jFixAgADhw4wPHjx+mZfy+/Z8+evPDCC5w9e9a2umvt2rX4+fnRrl07W53Vq1fbvc/atWtt17BYLHTt2pV169YxdOhQAKxWK+vWrWPy5Mmltt/d3R13d3cH/CSEqIOCroebYiFxLez8O5zfrjdnPDQH2s6A1o+Cax3MaVDRW3DljSoVrIIrCJzK4uVV9u238HBd6vAtuKwsuPVWfZyaKoGQ0xmkQyr9rqmpqRw+fNj2OD4+nh07dhAYGEhERAR9+/Zl+vTpeHp60qRJEzZs2MCnn37K66+/DoC/vz/jx4/niSeeIDAwED8/Px599FF69uzJtfl/UQ0YMIB27dpxzz338PLLL5OYmMg///lPoqOjbYHKxIkTeeedd5gxYwYPPPAA69evZ8mSJXz77be2tj3xxBOMGzeOa665hu7duzN79mzS0tK4//77r+iHJoQog8kEIQMg+GY9iXrXvyB5L+ycCQdmQ/u/Q4uHwSx/cNgUvQXXqVPpdfLy4MwZ+y0CSrsFl55+6VtwDRqUP6pUi2/BubjANdcUHgsnM0qHVHZJWkxMjAJKlHHjximllEpISFD33XefCg0NVR4eHqp169bqtddeU1ar1XaNjIwMNWnSJFWvXj3l5eWlhg0bphISEuze5+jRo2rQoEHK09NTNWjQQD355JMqJyenRFuuuuoqZbFYVLNmzdTcuXNLtPftt99WERERymKxqO7du6utW7dW+HuV5fNCXKG8XKX++Eypr5sVLrlfEaHU4f8qlZdz6deLisvIUOrQIaXWrVNq3jylnntOqYceUioqSql27ZTy8anYdgFms1IREUr16qXU6NFKPfWUUu+8o9TKlUr9/rtSf/2lVJH/z4Uwosp8fkuKjXJIig0hHMSaA3/Mhd3PQcYpfc63FXT8NzS50z6PmTUP/tyo8515hkDD3uBSO0coqpVSkJxc/qhSwS24Sym4BVfe5O46fAtOOJ/kGnMQCYSEcLDcDDj8Puz9D2Sd0+cCOuk8Zo1v1fsSbX8c0ovsI+QVBl3fhPBK7CMkLk/RW3BlzVk6e7Zi12rQoGSAVEduwQnnk0DIQSQQEqKK5FyEA2/C/lcgJ3+/Lt9WcPFgKZXzV0H1/lKCISPIzLRfBVdasJSaeunrFKyCK29UKTDQoavgMlLz6H9tKuRk8+Nb+/Hs30uCMWfKyICCbXV+/NGho4gSCDmIBEJCVLGs8zoYOvAW5KWXU9GkR4Zuj5fbZEZX9BZcWaNKlbkFV16gFB6u61TE8uWkPfo3fE7rYDsVb7zDAuHNNyu3a7lwnJQU8PfXx6tXw4ABDgtMJRByEAmEhKgmx5fDphGXrndTDDTqV+XNEVWs4BZcWUlzK3MLrn798rcMCAmBlSth5EhylQur0Mu1b2UVria9rQtffinBUHVbvhwee0zvzl4gLMxhgakEQg4igZAQ1eToF7BlzKXr+bWFRjdCQHvw7wD+7cE9sOrbJ6pf8VtwpQVNFb0FBzr4Kou/PzzzDLi56fqXU1xdL/+1Zb2+tm6OuXw5jBypRw+LKvh+HRCYSiDkIBIICVFNzsTCuhsu77WeoTog8u8AAR3yA6R24Obj0CYKgym4BVdeoFTRW3BGZTI5LwirqtebTDB1Kpw/X/b3HBYG8fFXdJtMAiEHkUBIiGpizYOVkZB+Cr01WSk8GkHnl+DifkjaA8l7IK28RKeRRYKj9vqrXxswS9rxOiMvDz74AKKjAbBi4hShuGAlhATstvDr2VN/AOflVa7k5lb+NUWLKF1MzBXlHKzM57dsMC6EcD4Xs14iv3EkepVY0WAof7i827slV43lXITkfTooKgiOkvfqPYjSjupyelWRS7mAb8uSI0i+LcDFrUq/ReEEZjPkp2UCyMCTCPTWDKl4402RCfr/+U/1J/tVCqzWKwukrjQQq+7XnzoFu3df+meTkFD1P/98EggJIYwhfLheIl/qPkKzS1867+YLDXroUlTWXzogSsoPjAoCpezzkHJAlxPLC+u7uOnRoqIjSP4dwKep/WaPoubp3VuP9Jw6hUkp2rEXAFNBsF1wK6Z37+pvW9FbX3VFbCzcUIHb4CEhVd6UAnJrrBxya0wIJ6iqnaWVgszEIgHSnsJAKbeMSbdmLz3fqODWWkGg5Nm49k5krY0KJueC/QRdB07OFRWUlweRkXpkqLTwQ+YIGYsEQkLUAUpB+vEiwVH+CFLyPrBmlf4aN79ik7PzAyWPoOptu6i45cvh8cf1BOoC4eEwe7YEQdWtGgJTCYQcRAIhIeowax6kHrEfQUreAykHQZWxEsm9of2ttYJjS0C1Nl2UIS8PNm7U809CQvTtsLp0W8pIqjgwlUDIQSQQEkKUkJetU4EUnZydtEcHTWWtePNsbH9rzb+9vuXm6l2tTa/rMjLg9tv18cqVkhfW6VJT9QT1rCx49VWdbkN2ljYWCYSEEBWWmw4p+wtvrRUESuknyniBSU/GLr6Cza81mN2rtel1RVoa+ORvL5WaCt4ShzpXFXaILJ8XQojq5uoFgV11KSo7udgS//xAKfMMpP6hy6lvCuubzPlL/IutYPNtAS7yX/aVcHeHzz8vPBZOZpAOkRGhcsiIkBCiymT+mR8UFVvFlpNUen0Xi04xUnwFm3cTWeIvRDEyIiSEEEbn0RA8+tknkVVKbxtQfIPI5L2QmwZJO3UpuqG2qzf4tSu5gs0zVJb4C1EBEggJIYRRmEzgFapLyIDC88qq04kU3yAyZb8OkM7/oktRbgGlrGDrAB4NqvVbMpK8PPjtN33cpYssGHM6g3SI3Borh9waE0IYmjVXr1YrvkHkxYOgyshj5RGUP3JUdA5Se7D4V2/bnUAmSxuMTJYWQghxRVxc9Sozv9bAiMLzeVk6jUjR5f3Je/TE7MyzkLkezqy3v5ZXeH5wVGQEya+tngReS5hM0KRJ4bFwMoN0iIwIlUNGhIQQtUpuGiTvLzkHqWhuNzsm8GlWZP5RfqDk2xrMlmptuhCVIfsIOYgEQkKIOiE7qZQktbsh61zp9U2u4NfKfnK2fwfwae6YvHBCXCG5NSaEEKLiLAHQsJcuRWWeLT1JbU7B3kj77OubPcCvTck8bN4RssRfGJYEQkIIIUrnEaRLoxsKzykFGadKphhJ3gt5GXBhhy5FufoUGTkqMgfJI7ha54ZkZsJdd+njRYvAw6Pa3lqUxiAdIrfGyiG3xoQQooKUFVLj7Zf3J++BlP+BNaf011gCS24Q6d8e3OtXSRNl1ZjByKoxIYQQtYbJBXyb6xJ2e+F5aw5cPFwyxcjFQ5B9Hv7cqEtRHsElN4j0bwduV/YHqcU1jw9fPQzZyVguZIDn9TKnyZksFvjww8JjJ5ERoXLIiJAQQlSRvEw9WlQ8SW3a0bJf492k2AaR7fOX+FcgjfyJ5bD9cfsVcl5h0PVNCB9+xd+OMBZZNeYgEggJIUQ1y0ktPUltxunS65tc9Gq14ivY/FqBi5uuc2I5bBwJFP+4y5+f1PtLCYZqGQmEHEQCISGEMIis8yWT1Cbvgay/Sq/v4ga+rXRwlLAGclKwWmH/6baAibah+3FxUYBJjwzdHi+3yaqb1Qr79+vjtm3BxXErCyUQchAJhIQQwsCUgswzpS/xz71Y6ksysj1ITAomosExzC5FPv7CR0JAR7DU09sJuAXor0Ufu3rLltSOZJDJ0hIIlUMCISGEqIGUgvQTOig6+jkc+8J2+oriGJM5PyjKD45sAVPxx/nnbMf5j82yXt9OWhpERurjo0dl1ZgQQgjhECaT3sTRO0LnSssPhMoNgpqMBjdfvct2dhLkJEH2hfzHF0Dl6kS2WX+VfTvuUlzciwRMZQRP5QVTBXOeagtPD9izFDISIPUX8OztlNuTEggJIYSovRr21nOA0k9RcrI02OYI9fys7A9hpfRmkQVBUU5SYcBU9LFd8JRkf15ZwZqlb+Vlnrm878XsVcroUzm38oqORrn6GWsOlIFW8UkgJIQQovZyMesP140j0avEigZD+UNEXWeXHySYTHpkydULvEIr3wZlhdzU0oOn4gFUTpE6BY9zUvR18tIhI13v7H053Pwqdyuv6GNXX8fNjyprFV/6KX2+mlfxyRyhcsgcISGEqCVOLCczbgbj3/w3AB8/NB6PgCAdBBl96bw1T+d3Kyt4ulQwlZd+5W0wuRQGRhW9lVf0sdlTB1LWPFgZqUeCsoGP8q//EGABR63ik8nSDiKBkBBC1B5pF/Pw8dMfrqlHfsI7spexbhdVlbxsHUhV9Fae3YjVBbBmX3kbXNx0UGT20BPZAfKAWCANGAAUnUt+Uww06nfZbyeTpYUQQohiLB5m3ngj/zi8Dzhu2xpjM1vA3BA8Gl7e63MzLjEvKqn00aiCY5WnU61k/VmsXcBN6ICo+JBMRsLltfUySCAkhBCiTnBzgylTnN2KGsjVUxfPkMq/VinITSsMjBJj4LfH7euUNih3Oe91mepKPCyEEEKI6mYygZsPeIfrDStbRes5QJQ18doEXuF6tV81kUBICCFEnWC16n37jh7Vx8IJClbxAViBP/OLFSq8is/RTaq2dxJCCCGcKCMDmjbVJSPD2a2pw8KH6yXyrqEwBV2y0SNFTkiAK3OEhBBC1BleXs5ugQB0sDO4P3g2AhT0WwGRA2RnaSGEEKKqeHvr9FbCIHz9IN35Q3Nya0wIIYQQdZYEQkIIIYSosyQQEkIIUSdkZcFDD+mSleXs1gijdIik2CiHpNgQQojaIy0NfHz0cWqqnjMknKgKO0RSbAghhBDFuLnB888XHgsnM0iHyIhQOWRESAghhKh5KvP5LXOEhBBCCFFnya0xIYQQdYJScO6cPm7QQKfBEk5kkA6RQEgIIUSdkJ4OQUH6WCZLG4BBOkQCoXIUTJ9KSUlxckuEEEJcqaK7SqekQF6e89oiqNIOKfjcrsg0aAmEynHx4kUAwsPDndwSIYQQjhQa6uwWCDtV1CEXL17E39+/3DqyaqwcVquV06dP4+vri8nB9y5TUlIIDw/nxIkTsiLNAKQ/jEX6Q1QV+d0ylqrqD6UUFy9eJDQ0FBeX8teFyYhQOVxcXAgLC6vS9/Dz85N/jAYi/WEs0h+iqsjvlrFURX9caiSogCyfF0IIIUSdJYGQEEIIIeosCYScxN3dnWeeeQZ3d3dnN0Ug/WE00h+iqsjvlrEYoT9ksrQQQggh6iwZERJCCCFEnSWBkBBCCCHqLAmEhBBCCFFnSSAkhBBCiDpLAqFq9uyzz2IymexKmzZtnN2sOuOnn37itttuIzQ0FJPJxFdffWX3/H333VeifwYOHOicxtYBs2bNolu3bvj6+hIUFMTQoUM5cOCAXZ1+/fqV6JOJEyc6qcWiJnrxxRcxmUxMmTLFdi4zM5Po6Gjq16+Pj48PI0aM4MyZM85rZC12qc89Z/eFBEJO0L59exISEmxl06ZNzm5SnZGWlkbnzp2ZM2dOmXUGDhxo1z9ffPFFNbawbtmwYQPR0dFs3bqVtWvXkpOTw4ABA0grmowReOihh+z65OWXX3ZSi0VN88svv/DBBx/QqVMnu/NTp07lm2++YenSpWzYsIHTp08zfPhwJ7Wy9ivvc8/ZfSEpNpzA1dWV4OBgZzejTho0aBCDBg0qt467u7v0TzVZs2aN3eN58+YRFBTE9u3b6dOnj+28l5eX9ImotNTUVMaOHctHH33E888/bzufnJzMxx9/zMKFC7nxxhsBmDt3Lm3btmXr1q1ce+21zmpyrVXW554R+kJGhJzg0KFDhIaG0qxZM8aOHcvx48ed3SRRRGxsLEFBQbRu3ZpHHnmEv/76y9lNqjOSk5MBCAwMtDu/YMECGjRoQIcOHZg5cybp6enOaJ6oYaKjoxk8eDD9+/e3O799+3ZycnLszrdp04aIiAji4uKqu5l1Qlmfe0boCxkRqmY9evRg3rx5tG7dmoSEBP7973/Tu3dv9uzZg6+vr7ObV+cNHDiQ4cOH07RpU44cOcLf//53Bg0aRFxcHGaz2dnNq9WsVitTpkyhV69edOjQwXZ+zJgxNGnShNDQUHbt2sVTTz3FgQMHWL58uRNbK4xu0aJF/Pbbb/zyyy8lnktMTMRisRAQEGB3vlGjRiQmJlZTC+uO8j73jNAXEghVs6K3ZTp16kSPHj1o0qQJS5YsYfz48U5smQC46667bMcdO3akU6dONG/enNjYWG666SYntqz2i46OZs+ePSXmzE2YMMF23LFjR0JCQrjppps4cuQIzZs3r+5mihrgxIkTPP7446xduxYPDw9nN6fOK+9zz9PT04kt0+TWmJMFBATQqlUrDh8+7OymiFI0a9aMBg0aSP9UscmTJ7Nq1SpiYmIICwsrt26PHj0ApE9EmbZv387Zs2fp0qULrq6uuLq6smHDBt566y1cXV1p1KgR2dnZJCUl2b3uzJkzMhetGhT93AsODnZ6X0gg5GSpqakcOXKEkJAQZzdFlOLkyZP89ddf0j9VRCnF5MmTWbFiBevXr6dp06aXfM2OHTsApE9EmW666SZ2797Njh07bOWaa65h7NixtmM3NzfWrVtne82BAwc4fvw4PXv2dGLL64ain3tdu3Z1el/IrbFqNm3aNG677TaaNGnC6dOneeaZZzCbzYwePdrZTasTUlNT7UYS4uPj2bFjB4GBgQQGBvLvf/+bESNGEBwczJEjR5gxYwYtWrQgKirKia2uvaKjo1m4cCFff/01vr6+tjkB/v7+eHp6cuTIERYuXMgtt9xC/fr12bVrF1OnTqVPnz4llkMLUcDX19dunhmAt7c39evXt50fP348TzzxBIGBgfj5+fHoo4/Ss2dPWTFWBcr73PP393d+XyhRrUaNGqVCQkKUxWJRjRs3VqNGjVKHDx92drPqjJiYGAWUKOPGjVPp6elqwIABqmHDhsrNzU01adJEPfTQQyoxMdHZza61SusLQM2dO1cppdTx48dVnz59VGBgoHJ3d1ctWrRQ06dPV8nJyc5tuKhx+vbtqx5//HHb44yMDDVp0iRVr1495eXlpYYNG6YSEhKc18Ba7FKfe87uC5NSSlVPyCWEEEIIYSwyR0gIIYQQdZYEQkIIIYSosyQQEkIIIUSdJYGQEEIIIeosCYSEEEIIUWdJICSEEEKIOksCISGEEELUWRIICSGEEKLOkkBICCGEEHWWBEJCCCGEqLMkEBJCCCFEnSWBkBBCCCHqrP8Ht/JVkpkplgwAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkIAAAGdCAYAAAD+JxxnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAACAU0lEQVR4nO3dd3hU1dbA4d9kkkkvtJCEhN57ERGQoiIBUWkqIioogkJAUUAv6lWvVz8sqCiiWAELogjYQLxIEyEConQEhGAoAQRMQnoys78/djLJpJHAtEzW+zz7ycmZM2d2ysms7LPXXgallEIIIYQQohrycnUHhBBCCCFcRQIhIYQQQlRbEggJIYQQotqSQEgIIYQQ1ZYEQkIIIYSotiQQEkIIIUS1JYGQEEIIIaotCYSEEEIIUW15u7oD7sxisXDy5EmCg4MxGAyu7o4QQgghKkApxYULF4iKisLLq/wxHwmEynHy5EliYmJc3Q0hhBBCXIJjx44RHR1d7jESCJUjODgY0N/IkJAQF/dGCCGEEBWRmppKTEyM9X28PBIIlaPgdlhISIgEQkI4SU4OvP663n7oITCZXNsfIYSDOOFir8i0FoMUXS1bamoqoaGhpKSkSCAkhJOkp0NQkN5OS4PAQNf2RwjhIA682Cvz/i0jQkIIt+LtDaNHF24LITyUm1zsMiJUDhkREkIIIaoeGRESQogqRClFXl4eZrPZ1V0RosowGo14e3tf9vI2EggJIYQL5eTkkJSUREZGhqu7IkSVExAQQGRkJKbLmGgtgZAQwq2kp0O9enr7xAnPnixtsVhISEjAaDQSFRWFyWSSxVtF9WE2w4EDertFCzAaK/xUpRQ5OTn8/fffJCQk0KxZs4sunFgWCYSEEG4nJcXVPXCOnJwcLBYLMTExBAQEuLo7QjiX2QwWi97286tUIATg7++Pj48Pf/31Fzk5Ofj5+V1SNyQQEkK4FX9/OHiwcLs6uNT/ZIWo0ry8oG3bwu1LOsXlXzsSCLmA2QwbN0JSEkRGQq9elQ6EhfBYXl7QrJmreyGEcDiDQY8EuZgEQk62bJleQPP48cJ90dF6cc1hw1zXLyGEEKI6kvFYJ1q2DG65xTYIAj0h9JZb9ONCVHe5uTB3rm65ua7ujaiqxowZw5AhQxx2foPBwFdffeWw81cLFgucOaNbwVwhF5BAyEnMZj0SVNrylQX7pkzRxwlRneXkwKRJuuXkuLo3ojRjxozBYDBYW61atRgwYAC7du2yOa60YGHdunXccMMN1KpVi4CAAFq3bs3UqVM5ceKEXfv4+uuvs2DBAuvnffv2ZcqUKXY7f1JSEgMHDrTb+aolpSAxUTcXru0sgZCTbNxYciSoKKXg2DF9nBDVmdGoR0hvuUXmzlWU2Qzr18Nnn+mPzviHasCAASQlJZGUlMSaNWvw9vbmxhtvLPc577zzDv369SMiIoKlS5eyb98+5s2bR0pKCq+88opd+mU2m7FYLISGhhIWFmaXcxaVkx+dR0RE4Ovra/fzVysGA9SooZsrl41QokwpKSkKUCkpKZd9rkWLlNLhTvlt0SI7dFwIUSVkZmaqffv2qczMzEs+x9KlSkVH2/4diY7W+x1l9OjRavDgwTb7Nm7cqAB15swZ6z5ALV++XCml1LFjx5TJZFJTpkwp9Zz//PNPma/3zz//qPHjx6vw8HDl6+ur2rRpo7799lullFLz589XoaGh6uuvv1atWrVSRqNRJSQk2PRx9OjRCrBpCQkJSimldu/erQYMGKACAwNVeHi4uvPOO9Xff/9tfe0+ffqouLg49dBDD6latWqpvn37lvjalFJq165d6pprrlF+fn6qZs2aaty4cerChQslvmcvv/yyioiIUDVr1lQTJ05UOTk55X2rxUWUdQ1V5v1bRoScJDLSvscJIYS7zDtMS0vjk08+oWnTptSqVavUY5YsWUJOTg6PPvpoqY+XNXpjsVgYOHAgmzZt4pNPPmHfvn288MILGIsMF2ZkZPDiiy/y/vvvs3fvXsLDw23O8frrr9O9e3fGjRtnHcWKiYkhOTmZa6+9lk6dOvHrr7+yatUqTp8+zW233Wbz/IULF2Iymdi0aRPz5s0r0cf09HRiY2OpUaMG27ZtY8mSJfz4449MmjTJ5rh169Zx+PBh1q1bx8KFC1mwYIHN7TvhGpI15iS9eunssBMnSr8VajDox3v1cn7fhBBVz8XmHRoMet7h4MGOucX43XffERQUBOhAIDIyku+++67MdV0OHTpESEgIkZX8b+/HH39k69at7N+/n+bNmwPQuHFjm2Nyc3N566236NChQ6nnCA0NxWQyERAQQEREhHX/m2++SadOnfi///s/674PP/yQmJgYDh48aH29Zs2a8dJLL5XZx0WLFpGVlcVHH31EYP5S6G+++SY33XQTL774InXr1gWgRo0avPnmmxiNRlq2bMmgQYNYs2YN48aNq9T3RNiXjAg5idGoU+Sh7Fuhs2fLnAghMjJ0iY169fS2KJ2r5x1ec8017Nixgx07drB161ZiY2MZOHAgf/31Vxn9UZdUPmTHjh1ER0dbg5LSmEwm2rdvX+lz79y5k3Xr1hEUFGRtLVu2BODw4cPW47p06VLuefbv30+HDh2sQRBAz549sVgsHCgoIQG0adPGZiQrMjKSM2fOVLrfHsNshp07dXNhppCMCDnRsGHw5Zfw8INmGp3YiAUvNtIbg0Hx8ccGWUdICPQb+MmThduidElJ9j2usgIDA2natKn18/fff5/Q0FDee+89nnvuuRLHN2/enJSUFJKSkio1KuRfgeXF/f39LynISktLs47aFFe0j4F2Knjn4+Nj87nBYMDiwrRxt+AGa2TIiJCTDWMZRw0NWc81rKcvLfgDpQyc+XHXxZ8sRDXg5we//66bGyw667bcbd6hwWDAy8uLzMzMUh+/5ZZbMJlMZd5iSk5OLnV/+/btOX78OAcL6q5cIpPJhLnYqEPnzp3Zu3cvDRs2pGnTpjatMsFPq1at2LlzJ+np6dZ9mzZtwsvLixYtWlxWvz2alxe0bq2bC8vMSCDkTPkzGw3549leKKaiU0ZnLwgl94vlruydEG7BaISOHXWTW8VlK5h3WNZAiMEAMTGOm3eYnZ3NqVOnOHXqFPv372fy5MnWEZbSxMTE8Nprr/H6668zduxYNmzYwF9//cWmTZu4//77+e9//1vq8/r06UPv3r0ZPnw4q1evJiEhge+//55Vq1ZVqr8NGzZky5YtHD16lLNnz2KxWIiLi+P8+fOMHDmSbdu2cfjwYX744QfuueeeEkFTeUaNGoWfnx+jR49mz549rFu3jsmTJ3PXXXdZ5weJUhgMEBCgmwvT5yUQcpYyZjbexceEc5pEGvDlhDWyoqIQokLKm3dY8Lkj5x2uWrWKyMhIIiMj6datmzVbqm/fvmU+Z+LEifzvf//jxIkTDB06lJYtW3LfffcREhLCtGnTynze0qVL6dq1KyNHjqR169Y8+uijlQpUAKZNm4bRaKR169bUqVOHxMREoqKi2LRpE2azmf79+9OuXTumTJlCWFhYpYp5BgQE8MMPP3D+/Hm6du3KLbfcwnXXXcebb75ZqT4K1zAoJXfhy5KamkpoaCgpKSmEhIRc3snWr4drrrHdZwCawH//fJKn+C+d+I3ta1MxXNP38l5LiCosNxc+/VRvjxoFxaZVeJSsrCwSEhJo1KgRfpd4H7C0+oUxMToIknmHwq1ZLHD+vN6uWfOSbo+VdQ1V5v1bRoScpWDG4hXAG0AL9LJebWHC/72Nv1cGv9OZdWuq+cQ5Ue3l5MA99+gmJTYubtgwOHoU1q2DRYv0x4QECYJEFaCU/uU9elRKbFQLkZE6CJoC1ARi8/evhdp1z3HvdR8CMOvH0tfBEKK6MBrhhht0kzlCFWM0Qt++MHKk/ijfN1ElGAwQGqqbzBGqBnr2gHvy/zoZ0EHRSCAV2AYPx76GwWDh+y212LPHdd0UwtX8/GDFCt0ka0wID+blBc2a6SZZY9XA+c0QZtZBEIARGATUAn6AJhFHGHaFXg/fTrUHhRBCCHEREgg5S2Ypq5oZgOuBBOAQTBs0C9ATRQsWlBNCCCGE40gg5Cz+Zaxq1g+9vvf/4KpmW7i6WzK5uTBnjjM7J4T7yMgoHC2XEhtCeDCzGXbv1s2FS8dIIOQsdXpBQDSF98by+QNXAluBzEimPRYMwLx5cOGCk/sohBtQCv78UzdZ3EMID5edrZsLSSDkLF5G6JK/+lnxYOhmwAzs6slNg400awbJyfDBB87tohDuwM8Pfv5ZN5ksLYQH8/KCli11k8nS1UTMMOj1JQTUK7YfaAB8tgGvnCymTtW7X3sN8vKc3UkhXMtohJ49dZM0cHGp+vbty5QpUxxy7qNHj2IwGNixY4dDzl9tGAwQFKSbpM9XIzHD4OajcN066LEI6l6r9w8JhL//hi++4O67oU4dSEzU1eqFEMIdxcfHYzQaGTRoUInHygoWli5dSt++fQkNDSUoKIj27dvz7LPPcr5ghWE7WbZsmU39soYNGzJ79my7nDsmJoakpCTatm1rl/MJ15JAyBW8jFC3LzQcCe3+o/ddkQMBwBtv4O+nmDRJ7375ZZknIaqXvDxYskQ3GRGtILNZl/H57DP90UkTTz/44AMmT57MTz/9xMkKpLo+8cQTjBgxgq5du/L999+zZ88eXnnlFXbu3MnHH39slz7l5C9HXrNmTYKDg+1yzuLnNxqNRERE4O3tbffzVytK6RIb58+79o1OiTKlpKQoQKWkpDjuRSwWpVa0V+pTlLrRqBQotWmT+vtvpfz89Kdr1zru5YVwN2lp+vce9LYny8zMVPv27VOZmZmXfpKlS5WKji78poH+fOlS+3W0FBcuXFBBQUHqjz/+UCNGjFDPP/+8zeMJCQkKUL///rtSSqktW7YoQM2ePbvU8/3zzz9lvtaxY8fU7bffrmrUqKECAgJUly5d1C+//KKUUurpp59WHTp0UO+9955q2LChMhgMSiml+vTpox566CHrNrqokbUV2Lhxo7r66quVn5+fio6OVpMnT1ZpRX7xGjRooJ599ll11113qeDgYDV69OgSX5tSSq1fv1517dpVmUwmFRERoR577DGVm5trfbxPnz5q8uTJavr06apGjRqqbt266umnn77Yt9mz5eUptW2bbnl5l3SKsq6hyrx/y4iQqxkM0DxObw8O0POo33iD2rV1rSWAWbNc1jshnM7LC/r00c2F8yerhmXL4JZbbCuuApw4ofcvW+awl/7iiy9o2bIlLVq04M477+TDDz9ElfNf/aeffkpQUBATJ04s9fGwsLBS96elpdGnTx9OnDjBN998w86dO3n00UexWArrMv75558sXbqUZcuWlTpvZ9myZURHR/Pss8+SlJREUn7tx8OHDzNgwACGDx/Orl27+Pzzz/n555+ZVDAkn2/WrFl06NCB33//nX//+98lzn/ixAluuOEGunbtys6dO3n77bf54IMPeO6552yOW7hwIYGBgWzZsoWXXnqJZ599ltWrV5f5PfN4BgMEB+vmwjlCMiJUDqeMCCmlVM4Fpb4I0aNC7VDK21up48fVoUNKGQz6H7w9exzbBSGE813WiFBeXsmRoKLNYFAqJuaS/9O+mB49elhHd3Jzc1Xt2rXVunXrrI8XHzUZOHCgat++faVf55133lHBwcHq3LlzpT7+9NNPKx8fH3XmzBmb/UVHhJTSIzuvvfaazTFjx45V48ePt9m3ceNG5eXlZf2ZNGjQQA0ZMsTmmOJf2+OPP65atGihLBaL9Zi5c+eqoKAgZTabrf25+uqrbc7TtWtX9dhjj5X/DRDlkhEhT+ETBI3G6O3ba+qJEfPm0bQpDB2qd7/6qst6J4RwRxs3lhwJKkopOHZMH2dnBw4cYOvWrYwcORIAb29vRowYwQflrPmhLnEOyI4dO+jUqRM1a9Ys85gGDRpQp06dSp97586dLFiwgKCgIGuLjY3FYrGQkJBgPe6KK64o9zz79++ne/fuGIqMavTs2ZO0tDSOF/kZtW/f3uZ5kZGRnDlzptL9FvZVqUBo5syZdO3aleDgYMLDwxkyZAgHDhwocVx8fDzXXnstgYGBhISE0Lt3bzIzM62Pnz9/nlGjRhESEkJYWBhjx44lLS3N5hy7du2iV69e+Pn5ERMTw0svvVTidZYsWULLli3x8/OjXbt2rFy50uZxpRRPPfUUkZGR+Pv7069fPw4dOlSZL9l5mucPFzf8B2oD77wDWVlMm6Z3f/IJJJVSpUMIUU1V9A+CA/5wfPDBB+Tl5REVFYW3tzfe3t68/fbbLF26lJSUlFKf07x5c44cOUJubm6lXsvf3/+ixwQGBlbqnAXS0tK4//772bFjh7Xt3LmTQ4cO0aRJk8s+f3E+Pj42nxsMBptbfMI1KhUIbdiwgbi4OH755RdWr15Nbm4u/fv3Jz093XpMfHw8AwYMoH///mzdupVt27YxadIkvIrc7B81ahR79+5l9erVfPfdd/z000+MHz/e+nhqair9+/enQYMGbN++nZdffplnnnmGd99913rM5s2bGTlyJGPHjuX3339nyJAhDBkyhD1FSre/9NJLvPHGG8ybN48tW7YQGBhIbGwsWVlZl/TNcqiQFhDRD1AwJFin0n/+Od27Q48ekJMjZTdE9ZCZCR076lbk/ydRXGQZZXsu9bgKysvL46OPPuKVV14pEUBERUXx2Weflfq8O+64g7S0NN56661SH09OTi51f/v27dmxY8dlp9ebTCbMxbLpOnfuzL59+2jatGmJZjKZKnzuVq1aER8fbzPqtWnTJoKDg4mOjr6sfns0iwX27tXNlQHh5dybO3PmjALUhg0brPu6deumnnzyyTKfs2/fPgWobdu2Wfd9//33ymAwqBMnTiillHrrrbdUjRo1VHZ2tvWYxx57TLVo0cL6+W233aYGDRpkc+5u3bqp+++/XymllMViUREREerll1+2Pp6cnKx8fX3VZ599VqGvz2lzhAokLtfzhD4KUMoHpbp0UcpiUcuX61v+YWFKXbjgnK4I4SqSNVZBBXOECiYSOmmO0PLly5XJZFLJycklHnv00UfVFVdcoZQqOY+m4HGj0aimT5+uNm/erI4ePap+/PFHdcstt5SZTZadna2aN2+uevXqpX7++Wd1+PBh9eWXX6rNmzcrpQqzxoorPkfo+uuvVzfffLM6fvy4+vvvv5VSSu3cuVP5+/uruLg49fvvv6uDBw+qr776SsXFxVmfV9rcouJf2/Hjx1VAQICKi4tT+/fvV1999ZWqXbu2TVZY8f4opdTgwYPV6NGjS/26qwVPyBorGAItuHd75swZtmzZQnh4OD169KBu3br06dOHn3/+2fqc+Ph4wsLCbO659uvXDy8vL7Zs2WI9pnfv3jYReWxsLAcOHOCff/6xHtOvXz+b/sTGxhIfHw9AQkICp06dsjkmNDSUbt26WY9xO/VuhIAYMGZAT2/Yvh3i47npJqxlNz780NWdFMKx/Pzgf//TTUpslMNohNfzy/YUz7gp+Hz2bLsvz/3BBx/Qr18/QkNDSzw2fPhwfv31V3bt2lXqc1988UUWLVrEli1biI2NpU2bNjzyyCO0b9+e0aNHl/ock8nE//73P8LDw7nhhhto164dL7zwAsZKfl3PPvssR48epUmTJtb5RO3bt2fDhg0cPHiQXr160alTJ5566imioqIqde569eqxcuVKtm7dSocOHXjggQcYO3YsTz75ZKXOU+14eUHz5rq5MkX0kkIwpZTZbFaDBg1SPXv2tO6Lj49XgKpZs6b68MMP1W+//aamTJmiTCaTOnjwoFJKqeeff141b968xPnq1Kmj3nrrLaWUjtyLz+Tfu3evAtS+ffuUUkr5+PioRYsW2Rwzd+5cFR4erpRSatOmTQpQJ0+etDnm1ltvVbfddlupX1NWVpZKSUmxtmPHjjl3REgppXY/p0eF3qqj/6sbMUIppdTbb+tPGzZUqsjSFEKIKsxh6wjFxDh8HSEh3IFLR4Ti4uLYs2cPixcvtu4rmPR1//33c88999CpUydee+01WrRowYdVYChj5syZhIaGWltMTIzzO9HkPvDygdC/oSGwdCmcOMHo0VC7Nhw9qncJIQQAw4bpPwzr1sGiRfpjQoLeL4S4qEsKhCZNmsR3333HunXrbCaCReZPymvdurXN8a1atSIxMRGAiIiIEumCeXl5nD9/noiICOsxp0+ftjmm4POLHVP08aLPK+2Y4mbMmEFKSoq1HTt2rLxvg2P414WYW/X2XXWtqfT+/kjZDVEt5OXBihW6SYmNCjIaoW9fGDlSf5RqtaIqUErP+UhOdumbWqUCIaUUkyZNYvny5axdu5ZGjRrZPN6wYUOioqJKpNQfPHiQBg0aANC9e3eSk5PZvn279fG1a9disVjo1q2b9ZiffvrJJs1y9erVtGjRgho1aliPWbNmjc3rrF69mu7duwPQqFEjIiIibI5JTU1ly5Yt1mOK8/X1JSQkxKa5RMFK0y3PQyDWVPqJE/Wcie3b4aefXNM1IRwtOxtuvFG37GxX90YI4TAWC/z5p25VJWtswoQJKjQ0VK1fv14lJSVZW0ZGhvWY1157TYWEhKglS5aoQ4cOqSeffFL5+fmpP//803rMgAEDVKdOndSWLVvUzz//rJo1a6ZGjhxpfTw5OVnVrVtX3XXXXWrPnj1q8eLFKiAgQL3zzjvWYzZt2qS8vb3VrFmz1P79+62ri+7evdt6zAsvvKDCwsLU119/rXbt2qUGDx6sGjVqVOH78U7PGitgsSi1sqOeKzQyVN/zX7BAKaXUAw/oT4slzAnhMTIylLriCt2K/GnxSHaZIyREVWU2K7V3r275K3BXlj3mCFUqEKJY0bqCNn/+fJvjZs6cqaKjo1VAQIDq3r272rhxo83j586dUyNHjlRBQUEqJCRE3XPPPepCsbzwnTt3qquvvlr5+vqqevXqqRdeeKFEf7744gvVvHlzZTKZVJs2bdSKFStsHrdYLOrf//63qlu3rvL19VXXXXedOnDgQIW/XpcFQkopdeg9HQgtqKmUoTCV/uDBwmzZvXud3y0hhP1IICTE5bFHIGRQSmablCU1NZXQ0FBSUlKcf5ssLx2W14PcFHjNG37Ng02boEcPhg2D5cth7Fh4/33ndksIYT9ZWVkkJCTQqFEj/GStACEqraxrqDLv31JrzF15B0Lj/PLzd+WvDPvGGwDWshsffwynTrmgb0IIIYSHkEDInTXLrz9W+zjUAb78Ek6coEcP6N5dym4Iz5SZCT176iYlNoTwYBYL/PGHbi6cLC2BkDsLaQYR/QEFo6PBbIZ584DCUaG334Zi9WqFqNIsFti8WTepRymEB1NKv4GlpVWd9HnhAgWp9J1SwAdrKv3gwdC0KfzzD8yf79IeCmFXvr56Dtzy5XpbiEvVsGFDZs+eXeVfY/369RgMBmth2gULFhAWFmZ9/JlnnqFjx46XfP6LnW/MmDEMGTLkks9fJi8vaNJEtzJKbDjj+yuBkLuLGgSBDYALMLCGtSq90QiPPKIPee01WXhOeA5vbxgyRDdvb1f3RpRmzJgxGAwGXnjhBZv9X331FYbidc+coPgbeYFt27Yxfvx4p/fH0UaMGMHBgwcddv5p06aVWKfPIQwGqFFDNxf83hSQQMjdeRmh6QN6e0ig/jhnDijF6NFQq5ZeTX/ZMtd1UQjhYhYznF4PRz/THy1mh7+kn58fL774orUQtjuqU6cOAQEBru6G3fn7+xMeHu6w8wcFBVGrVi2Hnd/dSCBUFTQZC14m8D0OLXysVekDAiAu/87ZrFlSdkN4BrMZ1q/Xzez49/Oq79gy+KYhrLkGNt+hP37TUO93oH79+hEREcHMmTPLPe7nn3+mV69e+Pv7ExMTw4MPPkh6err18aSkJAYNGoS/vz+NGjVi0aJFJW6HvPrqq7Rr147AwEBiYmKYOHEiafmTI9evX88999xDSkoKBoMBg8HAM888A9jeVrnjjjsYMWKETd9yc3OpXbs2H330EaDrZc6cOZNGjRrh7+9Phw4d+PLLLyv1fUlMTGTw4MEEBQUREhLCbbfdVqLU03PPPUd4eDjBwcHcd999/Otf/6rUra2yRsAKHD58mMaNGzNp0iSUUmRnZzNt2jTq1atHYGAg3bp1Y/369WU+v6xbbbNmzSIyMpJatWoRFxdnU/3hn3/+4e6776ZGjRoEBAQwcOBADh06ZPP8pUuX0qZNG3x9fWnYsCGvzJoFFy7ophRnzpzhpptusv4ufPrppxX+nlwOCYSqAr86UP82vX2fLlVSkEofF6fLbmzbBhs3uqh/QthRVhZcc41uWVmu7o2bO7YMNt4CGcdt92ec0PsdGAwZjUb+7//+jzlz5nD8+PFSjzl8+DADBgxg+PDh7Nq1i88//5yff/6ZSQWFE4G7776bkydPsn79epYuXcq7775boh6ll5cXb7zxBnv37mXhwoWsXbuWRx99FIAePXowe/ZsQkJCSEpKIikpiWkF2SRFjBo1im+//dYaQAH88MMPZGRkMHToUEAX3v7oo4+YN28ee/fu5eGHH+bOO+9kw4YNFfqeWCwWBg8ezPnz59mwYQOrV6/myJEjNgHYp59+yvPPP8+LL77I9u3bqV+/Pm+//XaFzl8Ru3bt4uqrr+aOO+7gzTffxGAwMGnSJOLj41m8eDG7du3i1ltvZcCAASUClfKsW7eOw4cPs27dOhYuXMiCBQtYsGCB9fExY8bw66+/8s033xAfH49SihtuuMEaLG3fvp3bbruN22+/nd27d/PMM8/w76eeYsGsWXDgAFgsjBkzhmPHjrFu3Tq+/PJL3nrrrRK/Cw7hiJUePYVLV5Yu7u94vdL0IpNSQShlNCp1/LhSSqn779crTd90k4v7KIQdpKcr1bq1bunpru6NY13WytLmPKWWR+u/C6U2g1LLY/RxdjZ69Gg1ePBgpZRSV111lbr33nuVUkotX75cFX1bGTt2rBo/frzNczdu3Ki8vLxUZmam2r9/vwLUtm3brI8fOnRIAeq1114r8/WXLFmiatWqZf18/vz5KjQ0tMRxDRo0sJ4nNzdX1a5dW3300UfWx0eOHKlGjBihlFIqKytLBQQEqM2bN9ucY+zYsTYloMp7jf/973/KaDSqxMRE6+N79+5VgNq6datSSqlu3bqpuLg4m3P07NlTdejQoczXWLdunQLUP//8U+rX+/TTT6sOHTqoTZs2qRo1aqhZs2ZZH/vrr7+U0WhUJ06csDnnddddp2bMmFHu+QqMHj1aNWjQQOXlFf4u3Xrrrdbv3cGDBxWgNm3aZH387Nmzyt/fX33xxRdKKaXuuOMOdf3119v0YfrUqap1kyZK7d6tDuzbZ/N9UkpZfz/K+12wx8rSMiJUVdTqBjU6g8qBMY1sUukffljPM/v2W9i/38X9FOIyBQTA3r26eeD0Dvv5e2PJkSAbCjKO6eMc6MUXX2ThwoXsL+WPz86dO1mwYAFBQUHWFhsbi8ViISEhgQMHDuDt7U3nzp2tz2natKm1uHaBH3/8keuuu4569eoRHBzMXXfdxblz58jIyKhwP729vbntttust1vS09P5+uuvGTVqFAB//vknGRkZXH/99Tb9/eijjzh8+HCFXmP//v3ExMQQExNj3de6dWvCwsKs358DBw5w5ZVX2jyv+OeXIjExkeuvv56nnnqKqVOnWvfv3r0bs9lM8+bNbb6uDRs2VPjrAmjTpg1Go9H6eWRkpHW0Zv/+/Xh7e1sLpwPUqlWLFi1aWL/u/fv307NnT5tz9uzVi0OJiZhbtWL/wYN4e3vTpUsX6+MtW7Ys9xagvUhORlVhMOhU+i1joXs6zEWn0j/xBC1a+HHzzfD11/Dqq/Dee67urBDC4TKT7HvcJerduzexsbHMmDGDMWPG2DyWlpbG/fffz4MPPljiefXr169Q5tPRo0e58cYbmTBhAs8//zw1a9bk559/ZuzYseTk5FRqMvSoUaPo06cPZ86cYfXq1fj7+zNgwABrXwFWrFhBvXr1bJ7nWwXWcahTpw5RUVF89tln3HvvvdayEmlpaRiNRrZv324TyICeFF1RPj4+Np8bDAYsHrLQl4wIVSUNbgdTDeAMXFfLmkoPMH26PuSjj6TshhDVgn+kfY+7DC+88ALffvst8fHxNvs7d+7Mvn37aNq0aYlmMplo0aIFeXl5/P7779bn/PnnnzaZaNu3b8disfDKK69w1VVX0bx5c06ePGnzOiaTCXMFZtb36NGDmJgYPv/8cz799FNuvfVW6xt869at8fX1JTExsURfi47wlKdVq1YcO3aMY8eOWfft27eP5ORkWrduDUCLFi3Ytm2bzfOKf34p/P39+e677/Dz8yM2NpYLFy4A0KlTJ8xmM2fOnCnxdUVERFz264L+uvPy8tiyZYt137lz5zhw4ID1627VqhWbNm2yed6mTZto3rw5RqORli1bkpeXx/bt262PHzhwwLp2kiNJIFSVeAcU1h+7PT+18Y03QCl69ICrrtJlN+bOdV0XhbhcmZlw/fW6SYmNctTpBQHRQFnrrxggIEYf52Dt2rVj1KhRvJGfxFHgscceY/PmzUyaNIkdO3Zw6NAhvv76a+tk6ZYtW9KvXz/Gjx/P1q1b+f333xk/fjz+/v7W9YiaNm1Kbm4uc+bM4ciRI3z88cfMy58WUKBhw4akpaWxZs0azp49W+4tszvuuIN58+axevVq620xgODgYKZNm8bDDz/MwoULOXz4ML/99htz5sxh4cKFFfo+9OvXz/q9+O2339i6dSt33303ffr04YorrgBg8uTJfPDBByxcuJBDhw7x3HPPsWvXLrusvxQYGMiKFSvw9vZm4MCBpKWl0bx5c0aNGsXdd9/NsmXLSEhIYOvWrcycOZMVK1Zc9msCNGvWjMGDBzNu3Dh+/vlndu7cyZ133km9evUYPHgwAFOnTmXNmjX897//5eDBgyxcuJA333yTaXfeCQcP0qJZMwYMGMD999/Pli1b2L59O/fddx/+/v526WN5JBCqappN0B/9D0G0CX77DeLjMRgKy2689RYUyU4VokqxWODHH3XzkJF3x/AyQpfX8z8p/iaa/3mX2fo4J3j22WdL3Cpp3749GzZs4ODBg/Tq1YtOnTrx1FNPERUVZT3mo48+om7duvTu3ZuhQ4cybtw4goODrZXEO3TowKuvvsqLL75I27Zt+fTTT0uk7Pfo0YMHHniAESNGUKdOHV566aUy+zlq1Cj27dtHvXr1SsxZ+e9//8u///1vZs6cSatWrRgwYAArVqygUaNGFfoeGAwGvv76a2rUqEHv3r3p168fjRs35vP8kfuC158xYwbTpk2jc+fOJCQkMGbMGJvK6ZcjKCiI77//HqUUgwYNIj09nfnz53P33XczdepUWrRowZAhQ9i2bRv169e3y2sCzJ8/ny5dunDjjTfSvXt3lFKsXLnSOuLWuXNnvvjiCxYvXkzbtm156qmnePaZZxjTrx+kpoJSzJ8/n6ioKPr06cOwYcMYP368Q9dLKmBQSlafKUtqaiqhoaGkpKRY77e6hXUDIWkV/NUWHt8DI0bA4sWYzdCiBRw+rNdcLJKhKkSVkZdnvePLiBGevbp0VlYWCQkJNGrU6NLfCI8tg+0P2U6cDojRQVDMMLv005mOHz9OTEyMdYJ0dXD99dcTERHBxx9/7OquOJdScP683q5Z85JWly7rGqrM+7cH/4nxYM3jdCDUKFHXH/vySzh+HGN0NI88otcWevVVmDABjM75Z1AIu/H2hiJ3LMTFxAyDeoN1dlhmkp4TVKeX00aCLtfatWtJS0ujXbt2JCUl8eijj9KwYUN69+7t6q45REZGBvPmzSM2Nhaj0chnn33Gjz/+yOrVq13dNeczGHR5BBeTW2NVUeRACGwIllQY3cwmlX7MmMKyG8uXu7SXQghn8TJC3b7QcKT+WEWCINCrOz/++OO0adOGoUOHUqdOHdavX18iS8lTGAwGVq5cSe/evenSpQvffvstS5cupV+/fq7uWrUlt8bK4ba3xgD2vQQ7HgNDY7jjCNSpA4mJ4OfHU0/Bf/8LV14Jv/zi0lp2QlSa2aynvgF07uzZo5p2uTUmRFWlFBRMbA8IcNmtMRkRqqoa3wtevqCOQPc6Nqn0cXHg6wtbt8LPP7u4n0JUUlaWDuKvvFJKbAjh0SwWvQrw/v0uzYyQQKiq8qsNDfLr14yJ1h/zU+nr1oXRo/WuWbNc0z0hLpXBAA0a6CajmUJ4OJNJNxeSQKgqa5Zfej54L9QuTKUHeOQR/dA33+h6dkJUFQEBcPSobtWlxIbMUBDVktEI7dvrdon3wO1x7UggVJXVvhJqXqHrj01up/flL2jWogXcfLPe9corLuqfEKJcBROCK1MzSwhRqODauZzJ9TJZuhxuPVm6wJEF8Ms94BMJI5J0tsjRoxAdzcaN0Lu3ni/0119Qt66rOyuEKC4pKYnk5GTCw8MJCAiwywrDQng6pRQZGRmcOXOGsLAwIiNtS8nIOkLVSf0R8NtUyEmCUa3hk306lf6557j6aujWDbZs0WU3nn3W1Z0V4uKysuD22/X24sXg6clUBfWeCip5C1FtKKUTfUBnPl/CPwFhYWGXXTNNRoTKUSVGhAB+fxT2vwyGDnDHTptU+i+/hFtv1Yt2HjtWfeZciKorPR0KimKnpUFgoGv74yxms5nc3FxXd0MI58nI0GtkgJ7jWsk3KB8fH4xlzC2SEaHqptkDsH8WqJ3QoS7sPK1T6UePZuhQaNRIL7A4f75OrRfCnZlM8O67hdvVhdFoLPOPuhAeyWiEJ57Q28HB4KJFNGVEqBxVZkQIYP0gOLkSUnvChE3QqRNs3w4GA2++CZMnQ+PGcPCgZy9QJ4QQQsiCitVRQSp9zT0QbILff4fNmwG45x59a+zIEfjqK9d1UQghhHA3Egh5iqgBENQY8lLgwSv1vjlzAD3HYuJEvevll/X8NCHclcUCe/fq5sLFZoUQjuYmF7sEQp7C4AXNJujtzmf1x/yq9ACTJun5Flu2wKZNLuqjEBWQmQlt2+qWmenq3gghHMZNLnYJhDxJ43vA6AdZf8CtHW2q0tetC3ffrQ+TshvC3dWurZsQwsO5wcUugZAn8a0FDfIXYBkWrD+++661cuXUqXqXlN0Q7iwwUC8t8vff1Sd1XohqyU0udgmEPE3BpGm2QItIm6r0LVvCTTfpOUKvvea6LgohhBDuQgIhT1PrCqh1JVhy4MH8+mOvv26dIT1tmt61YAHIQrZCCCGqOwmEPFHBqFDEXvC3TaXv1Qu6doXsbF12Qwh3k5UFo0bpln9XVwjhidzkYpdAyBM1uA18a0PWCZjcS+/LT6U3GGD6dL1r7ly9wrkQ7sRshkWLdDObXd0bIYTDuMnFLoGQJzL6QZOxertHuv5YJJW+oOzGuXOwcKGL+ihEGUwmPYfttdeqV4kNIaodN7nYpcRGOapUiY3i0o7CN40BBYuvgG9/1TVdnnsO0ANEDz4ITZroDDIpuyGEEMJTSIkNAUENIWqQ3r4jXH985x3rfdh77oEaNeDwYfj6a9d0UQghhHA1CYQ8WfP8SdPGTdAoCs6etabSBwUVlt2QBRaFO7FY4OhR3aTEhhAezE0udgmEPFlkfwhqCrkp8HA3va9IKn1B2Y34eGtSmRAul5mp57A1aiQlNoTwaG5ysUsg5MmK1h9rcBD8fG1S6SMi4K679MMvv+yiPgpRioAA3YQQHs4NLnYJhDxdk3vA6A9pe2FCP73vjTesDxeU3fj6azh40AX9E6KYwEBIT9dNSmwI4cHc5GKXQMjTmWpAwzv09rX56zQsXWpNpW/VCm68UcpuCCGEqJ4kEKoOmuXPik5bA7FX2VSlB9uyG3//7fzuCSGEEK4igVB1ULMz1LoKLLlwT0O9r0gqfe/ecMUV+lMpuyFcLTsbxo3TLTvb1b0RQjiMm1zsEghVFwWp9KafoUG0TSq9wVA4KiRlN4Sr5eXB++/rlpfn6t4IIRzGTS52CYSqi/q3gm8dyDwOD/fR+4qk0g8fDg0b6vjoo49c100hfHz0AujPPae3hRAeyk0udimxUY4qXWKjNDseh30zoWZvGLpV3wv7+Wfo2RPQyWQPPQTNmsH+/VJ2QwghRNUkJTZE6Zo9oNcWOv8T3HeD3lcklf7eeyEsDA4dgm++cU0XhRBCCGeSQKg6CawP9W7S24PyK/0WSaUPCoIJ+esvStkN4SpK6ezFv/+23rkVQngiN7nYJRCqbgpS6S+shGt7lkilnzxZl93YvFnKbgjXyMiA8HDdZOK+EB7MTS52CYSqm4h+ENwMclPh/tZ6X5FU+shIuPNOvfuVV1zURyGEEMJJJBCqbgxehaNCgZshJj+VfvFi6yGPPKI/Ll+u5wsJ4UyBgXqUXCkpsSGER3OTi10Coeqo8RgwBkDKXni4yKTp/Hu0bdrADTdI2Q0hhBCeTwKh6sgUBg1H6e0OZ8DPz6YqPcD06frj/PlSdkMIIYTnqlQgNHPmTLp27UpwcDDh4eEMGTKEAwcOlHqsUoqBAwdiMBj46quvbB5LTExk0KBBBAQEEB4ezvTp08krtqrk+vXr6dy5M76+vjRt2pQFCxaUeI25c+fSsGFD/Pz86NatG1u3brV5PCsri7i4OGrVqkVQUBDDhw/n9OnTlfmSPVfz/Ntjp7+De4bq7SKp9H36QJcueurQ22+7oH+i2srOhilTdJMSG0J4MDe52CsVCG3YsIG4uDh++eUXVq9eTW5uLv379yc9Pb3EsbNnz8ZgMJTYbzabGTRoEDk5OWzevJmFCxeyYMECnnrqKesxCQkJDBo0iGuuuYYdO3YwZcoU7rvvPn744QfrMZ9//jmPPPIITz/9NL/99hsdOnQgNjaWM2fOWI95+OGH+fbbb1myZAkbNmzg5MmTDBs2rDJfsueq0RFq9wCVB0PD9L4iqfRFy268+SZkZrqkl6IaysvTi56//rqU2BDCo7nLxa4uw5kzZxSgNmzYYLP/999/V/Xq1VNJSUkKUMuXL7c+tnLlSuXl5aVOnTpl3ff222+rkJAQlZ2drZRS6tFHH1Vt2rSxOeeIESNUbGys9fMrr7xSxcXFWT83m80qKipKzZw5UymlVHJysvLx8VFLliyxHrN//34FqPj4+Ap9fSkpKQpQKSkpFTq+ykn4VKlPUWpZlFJ9r9ZT1p54wvpwbq5SDRro3fPmua6bonrJzlbq8cd1y/+TIITwRA682Cvz/n1Zc4RSUlIAqFmzpnVfRkYGd9xxB3PnziUiIqLEc+Lj42nXrh1169a17ouNjSU1NZW9e/daj+nXr5/N82JjY4mPjwcgJyeH7du32xzj5eVFv379rMds376d3Nxcm2NatmxJ/fr1rccUl52dTWpqqk3zaDHDwS8cMk/ChG56X5FUem9vePhhvfuVV8BicVE/RbViMsHzz+tmMrm6N0IIh3GTi/2SAyGLxcKUKVPo2bMnbdu2te5/+OGH6dGjB4MHDy71eadOnbIJggDr56dOnSr3mNTUVDIzMzl79ixms7nUY4qew2QyERYWVuYxxc2cOZPQ0FBri4mJuch3oYoz+kKTcXq7xq8QE1Milb5o2Y1vv3VNN4UQQghHueRAKC4ujj179rC4yJvmN998w9q1a5k9e7Y9+uZ0M2bMICUlxdqOHTvm6i45XtP79dpCf2+AB4frfUVS6YOD4YEH9O6XX3ZRH0W1ohSkp+smJTaE8GBucrFfUiA0adIkvvvuO9atW0d0dLR1/9q1azl8+DBhYWF4e3vj7e0NwPDhw+nbty8AERERJTK3Cj4vuJVW1jEhISH4+/tTu3ZtjEZjqccUPUdOTg7JycllHlOcr68vISEhNs3jBcZAvfzRu26ppabST54MPj6waROUcVdRCLvJyNB174KCpMSGEB7NTS72SgVCSikmTZrE8uXLWbt2LY0aNbJ5/F//+he7du1ix44d1gbw2muvMX/+fAC6d+/O7t27bbK7Vq9eTUhICK1bt7Yes2bNGptzr169mu7duwNgMpno0qWLzTEWi4U1a9ZYj+nSpQs+Pj42xxw4cIDExETrMSJfQSp90hK4+1a9XSSVPipKym4IIYTwUJWZhT1hwgQVGhqq1q9fr5KSkqwtIyOjzOdQLGssLy9PtW3bVvXv31/t2LFDrVq1StWpU0fNmDHDesyRI0dUQECAmj59utq/f7+aO3euMhqNatWqVdZjFi9erHx9fdWCBQvUvn371Pjx41VYWJhNNtoDDzyg6tevr9auXat+/fVX1b17d9W9e/cKf70enzVWwGJR6tsWOoNs7QydJmY0KnXsmPWQPXv0boNBqT//dGFfhcezWJRKS9PNYnF1b4QQDuPAi70y79+VCoSAUtv8+fPLfU7RQEgppY4ePaoGDhyo/P39Ve3atdXUqVNVbm6uzTHr1q1THTt2VCaTSTVu3LjU15gzZ46qX7++MplM6sorr1S//PKLzeOZmZlq4sSJqkaNGiogIEANHTpUJSUlVfjrrTaBkFJK/fG6DoS+a61U714lUumVUmrgQL174kQX9VEIIYSogMq8fxuUkumIZUlNTSU0NJSUlBTPny+UkwJf1YO8dPB5Bm57BmrXhmPH9LwhYO1auO468PeHxET9sBBCCOFuKvP+LbXGhGYKhYb5E4Hq7ik1lf6aa6BzZ73KtJTdEI6SkwNPPKFbTo6reyOEcBg3udhlRKgc1WpECCB5N6xsDwZvOD4dHp0JnTrB9u265gbw2Wdwxx1Qp44eFcofLBLCbtLTdRIJQFoaBAa6tj9CCAdx4MUuI0Li0oS1gzq9dP2xXnmlptLfcgvUr68r0n/0kQv7KjyWtzc89JBu+StwCCE8kZtc7DIiVI5qNyIEcHQxbB4J/pGwcgC8Px9uuw0+/9x6yGuvwSOPQPPmsH8/eEk4LYQQwo3IiJC4dDHDwK8uZCbBXS31viJV6QHuuw9CQ+HgQfjuOxf1UwghhLADCYSELaMJmo7X2zkroHdvMJth3jzrIUXLbsya5YI+CiGEEHYigZAoqen9YDDCmZ9gUn75jSJV6aGw7MbGjbBli4v6KTxSerqem28w6G0hhIdyk4tdAiFRUkA9iB6it+sfLDWVvl49nT0GMiokhBCi6pJASJSueZz++NenEHev3i5SlR5g2jT9cdkyOHzYyf0THisgAM6c0S0gwNW9EUI4jJtc7BIIidKF94WQVpCXBtf7lZpK37YtDBgAFgvMnu2yngoPYzDodarq1LEuXyWE8ERucrFLICRKZzBAs/yq9Cc/glH598GKVKWHwlGhDz+Ec+ec2D8hhBDCDiQQEmVrfDd4B0HqfrjnKr2vWCr9tddCx46QkSFlN4R95OTA88/rJiU2hPBgbnKxSyAkyuYTAo3u0tuWVYWp9EUiHoMBpk/X23Pm2CSWCXFJcnPhySd1y811dW+EEA7jJhe7BEKifM3yJ00f/xomj9Lb775rE/HceqtOLDtzBj75xAV9FB7F21sv2nnffVJiQwiP5iYXuwRConxhbSC8DygzNE0sNZXexwemTNHbs2bpydNCXCpfX3jvPd18fV3dGyGEw7jJxS6BkLi4glT6I+9D3P16u1gq/X33QUgIHDgAK1a4oI9CCCHEJZBASFxc9BBdhDXrNAwKLzWVPiREym4IIYSoeiQQEhfn5QNN8uuPJX0Eo/LnChVLpX/wQX2b96efYOtWJ/dReIz0dAgM1E1KbAjhwdzkYpdASFRM0/Fg8Ia/f4ZxsXpfsVR6Kbsh7CUjQzchhIdzg4tdAiFRMQFREDNUbxt+hD59SqTSA0ydqj8uXQpHjji5j8Ij+PtDQoJu/v6u7o0QwmHc5GKXQEhUXEEqfcInMCm//lixVPr27SE2VspuiEvn5QUNG+rmJX+hhPBcbnKxy58ZUXHhvSG0DZgzoPW5UlPpobDsxgcfwPnzLuinEEIIUUESCImKK1p/7Mg7EJe/XSyV/rrroEMHKbshLk1urh5NnD1bVpYWwqO5ycVuUKrIO5iwkZqaSmhoKCkpKYSEhLi6O+4h9wIsrwd5F+CKpdBhlL41tnEjXH219bBPPoG77oK6deHoUZ1xL0RFpKdDUJDeTkvTCSVCCA/kwIu9Mu/fMiIkKscnGBrdrbdPfVyYSj9njs1hI0ZAdDScPg2ffurkPooqzWjU2Yd33KG3hRAeyk0udhkRKoeMCJUhZR+saAMGL2i6Aq4cqH+Jjx7V0U++V17R84VatYI9e2TiqxBCCOeQESHhWKGtoe41oCxg+rnMVPpx4/SK0/v3w8qVLuqrEEIIUQ4JhMSlKUilP/weTMqvrVEslT4kBO7PL00mCywKIYRwRxIIiUsTPRj8oyDrDHTKLTOVvqDsxoYNsG2bi/oqqpT0dKhTRzcpsSGEB3OTi10CIXFpvLyhaf5wz+F5EJc/QlQslT46GkaO1NuvvOLkPooq6+xZ3YQQHs4NLnYJhMSlazpO1x87uxlu61ZYlX7TJpvDCspuLFmiV1IXojz+/npy/Z49UmJDCI/mJhe7BELi0vlHQsxwvX3m0zJT6Tt0gP79peyGqBgvL2jTRjfJNBTCg7nJxS5/ZsTlaZ5/S+zopzBxtN4uVpUepOyGEEII9ySBkLg8da6GsHZgzoSA7WWm0vfrpwuypqfDO++4qK+iSsjNhffe001KbAjhwdzkYpcFFcshCypW0KF3YNsDENwMsp+HW2+D2rXh2DGb2hoffwx33w0REXrtRV9f13VZuC8psSFENSElNoTHaDgKfELgwiG4KqjMVPrbb4d69eDUKSm7IcpmNMLgwbpJiQ0hPJibXOwSCInL5xMEjfLnB5WTSu/jA1Om6O1Zs/TkaSGK8/ODr77STYr1CuHB3ORil0BI2Eezifrjye9gVGyZqfTjxkFwsC67sWqVC/ophBBCFCGBkLCP0JZQ9zpdf+zs54Wp9G+8YXtYKIwfr7dfftnJfRRCCCGKkUBI2E9BKv3h9yEuf9XpZctKpNI/9JAuu7F+Pfz6q3O7KNxfRgY0bKhbRoareyOEcBg3udglEBL2U+8mCIiG7LMQdqDMVPqYGD1xGqTshihJKfjrL90kp1UID+YmF7sEQsJ+itYfOzhXV1yFElXpwbbsxtGjzuuicH9+frB1q24yWVoID+YmF7sEQsK+mowDLx849wv0ii4zlb5jR73IotksZTeELaMRunbVTdLnhfBgbnKxSyAk7Mu/LsTcorcPv1NmKj3A9On64/vvwz//OLGPQgghRD4JhIT9FUya/msR3D28zFT666+Hdu2k7IawlZenF9z89FO9LYTwUG5ysUsgJOyvdg8I6wDmLEj+Gu68U+8vlkpvMBQWY33jDcjOdnI/hVvKzta/MnfeKb8TQng0N7nYJRAS9mcwFI4KHXobJuVvl5JKX1B2IykJFi1ycj+FW/Ly0vPH+vXT20IID+UmF7v8mRGO0fAO8AmFtMNQ51SZqfQmk15XCHTZDUmXFv7+sHq1bv7+ru6NEMJh3ORil0BIOIZ3IDQeo7eLptK/8w5kZtocOn68Lruxb5+U3RBCCOFcEggJx7HWH1sB17bTqfTnzpVIpQ8N1TXIQI8KCSGEEM4igZBwnJDmEHE9oCDh/cJU+jlzStwDe+ghvYzE2rXw22/O76pwHxkZ0KaNblJiQwgP5iYXuwRCwrEKJk0f+QDuubPMVPr69QvLbsioUPWmlL5Num+fzBkTwqO5ycUugZBwrKgbIaA+ZJ+DtDVlptJDYdmNL77QpWdE9eTnB+vW6SYlNoTwYG5ysUsgJBzLywjNHtDbh+bC5Ml6e9kyOHbM5tBOneC663Ry2euvO7mfwm0YjdC3r25SYkMID+YmF7sEQsLxmowFLxOc2wr1cgpT6efNK3FowQKL770HycnO7aYQQojqp1KB0MyZM+natSvBwcGEh4czZMgQDhw4YH38/PnzTJ48mRYtWuDv70/9+vV58MEHSUlJsTlPYmIigwYNIiAggPDwcKZPn05eseW1169fT+fOnfH19aVp06YsWLCgRH/mzp1Lw4YN8fPzo1u3bmzdutXm8aysLOLi4qhVqxZBQUEMHz6c06dPV+ZLFvbgFw71b9Xbh8pPpY+NhbZtIS1Nym5UV3l58NVXukmJDSE8mLtc7KoSYmNj1fz589WePXvUjh071A033KDq16+v0tLSlFJK7d69Ww0bNkx988036s8//1Rr1qxRzZo1U8OHD7eeIy8vT7Vt21b169dP/f7772rlypWqdu3aasaMGdZjjhw5ogICAtQjjzyi9u3bp+bMmaOMRqNatWqV9ZjFixcrk8mkPvzwQ7V37141btw4FRYWpk6fPm095oEHHlAxMTFqzZo16tdff1VXXXWV6tGjR4W/3pSUFAWolJSUynybRGnObFbqU5Ra7KdU2iml6tdXCpT68MMShy5YoB+KjFQqO9sFfRUulZamf/6gt4UQHsqBF3tl3r8rFQgVd+bMGQWoDRs2lHnMF198oUwmk8rNzVVKKbVy5Url5eWlTp06ZT3m7bffViEhISo7/13v0UcfVW3atLE5z4gRI1RsbKz18yuvvFLFxcVZPzebzSoqKkrNnDlTKaVUcnKy8vHxUUuWLLEes3//fgWo+Pj4Cn19EgjZkcWi1MpOOhja+5JSL7ygf/k7ddKPFZGdrVRUlH54wQIX9Ve4TEaGUj166JaR4ereCCEcxoEXe2Xevy9rjlDBLa+aNWuWe0xISAje3t4AxMfH065dO+rWrWs9JjY2ltTUVPbu3Ws9pl+/fjbniY2NJT4+HoCcnBy2b99uc4yXlxf9+vWzHrN9+3Zyc3NtjmnZsiX169e3HlNcdnY2qampNk3YSfH6Y2PvKTOV3mQqvHsmZTeqH39//SuxaZOU2BDCo7nJxX7JgZDFYmHKlCn07NmTtm3blnrM2bNn+e9//8v48eOt+06dOmUTBAHWz0+dOlXuMampqWRmZnL27FnMZnOpxxQ9h8lkIiwsrMxjips5cyahoaHWFhMTc5HvgqiUBiPBVAPSEyD713JT6e+/H4KCYM8e+OEHJ/dTCCFEtXHJgVBcXBx79uxhcbFyCQVSU1MZNGgQrVu35plnnrnUl3GqGTNmkJKSYm3HiqV3i8vkHQCN79HbF0mlDwuTshtCCCEc75ICoUmTJvHdd9+xbt06oqOjSzx+4cIFBgwYQHBwMMuXL8fHx8f6WERERInMrYLPIyIiyj0mJCQEf39/ateujdFoLPWYoufIyckhuVgOdtFjivP19SUkJMSmCTtrmr+m0MnvoXFQuan0BWU31qzRd9BE9ZCZCV276lYsqVAI4Unc5GKvVCCklGLSpEksX76ctWvX0qhRoxLHpKam0r9/f0wmE9988w1+xVaL7N69O7t37+bMmTPWfatXryYkJITWrVtbj1mzZo3N81avXk337t0BMJlMdOnSxeYYi8XCmjVrrMd06dIFHx8fm2MOHDhAYmKi9RjhAiHNIDIWUHquUDmp9A0awG236W0ZFao+LBb49VfdLBZX90YI4TDucrFXZhb2hAkTVGhoqFq/fr1KSkqytoz82d4pKSmqW7duql27durPP/+0OSYvL08pVZg+379/f7Vjxw61atUqVadOnVLT56dPn67279+v5s6dW2r6vK+vr1qwYIHat2+fGj9+vAoLC7PJRnvggQdU/fr11dq1a9Wvv/6qunfvrrp3717hr1eyxhzk2Dc6e2xJTaUyU8tNpd++XT9kNCr1118u6Ktwutxcpb77Trf8ZFMhhCdy4MXusPR5oNQ2f/58pZRS69atK/OYhIQE63mOHj2qBg4cqPz9/VXt2rXV1KlTren1BdatW6c6duyoTCaTaty4sfU1ipozZ46qX7++MplM6sorr1S//PKLzeOZmZlq4sSJqkaNGiogIEANHTpUJSUlVfjrlUDIQcx5Sn3VQAdDf35YmErfsWOJVHqllLr2Wv3wI484v6tCCCGqnsq8fxuUkuTksqSmphIaGmpdAkDY0b4XYce/oGYXuGIVxMRAVhZs3AhXX21z6Pffww036CyyY8f0RGohhBCiLJV5/5ZaY8I1Go8FL184vx04Um4q/YAB0KaNLrvx7rvO7aZwPrMZVq/WzWx2dW+EEA7jJhe7jAiVQ0aEHGzz3XD0Y2h0NwROhQ4ddJpYQoIeISpiwQK45x6IitIPm0yu6bJwvPR0PfoHOvgNDHRtf4QQDuLAi11GhETVULDS9F+fQ/OoclPpR46EyEg4eRLKWLpKeAgvLx0Td+igt4UQHspNLnb5MyNcp9aVeo6QJRuOfFBuKr2vr5TdqC78/WHHDt2kxIYQHsxNLnYJhITrGAzQrEj9sRsHQf36cO5cqcM+99+vR05374b//c/JfRVCCOGRJBASrtXgdjDVhPS/4Mz/IC4/MHrjjRLDPjVqSNkNIYQQ9iWBkHAtb39ocq/ePvQWjB2rq9Lv2FGiKj0Ult348Ud9iPA8mZnQt69uUmJDCA/mJhe7BELC9Zo+ABggaRWY/ik3lb5hQ7j1Vr39yitO66FwIosFNmzQTUpsCOHB3ORil0BIuF5wE4gcoLcPvV1uVXqAadP0x8WLS31YVHG+vvDFF7r5+rq6N0IIh3GTi10CIeEeClLpD38IrZsWptK//XaJQ7t0gWuugbw8eP11J/dTOJy3tx71u/VWvS2E8FBucrFLICTcQ+QACGwEucnw12eFufLvvlvqveOCUaF334WUFOd1UwghhGeRQEi4By8jNJugtw/OhZtuKjeVfsAAaN0aLlyA995zcl+FQ5nNep78pk1SYkMIj+YmF7sEQsJ9NLkXjH7wz++Q/Gu5qfReXjB1qt6ePRtycpzbVeE4WVm67u7VV+ttIYSHcpOLXQIh4T58a+l1haBkKv3PP5c4fNQoiIiAEyfg88+d21XhOAYDNG2qm8Hg6t4IIRzGTS52CYSEe2k2UX9M/AICzYWp9HPmlDhUym54poAAOHRIt4AAV/dGCOEwbnKxSyAk3EutrlCzK1hy4PAHF02lLyi7sWuXXmRRCCGEqAwJhIT7KUilPzQP2rbRq46WkUpfs6a+gwbw8svO66IQQgjPIIGQcD8NRuj5QhmJcPK7wlGhMlLpH35YT55evRp27nRyX4XdZWXBoEG6yWRpITyYm1zsEggJ92P0g8b5wzwH58LNN5ebSi9lNzyL2QwrV+om6fNCeDA3udglEBLuqVl+/bFTqyHjSLmp9FC4wOJnn8Hx487rprA/kwnmz9fNZHJ1b4QQDuMmF7tBKcm1KUtqaiqhoaGkpKQQEhLi6u5UP+tvhJMroMUUaPRviI7Wt8Z++gl69SpxeN++unbftGkyX0gIIaqzyrx/y4iQcF8Fk6aPzIcQX71wEJSaSg8wfbr++M47UnZDCCFExUggJNxXZCwENYHcFDi66KKp9AMHQqtWuuzG++87ua/CbsxmvYbmjh0yR0gIj+YmF7sEQsJ9Gbxs64+1a1duKn3xshu5uU7rqbCjrCzo1Ek3yRoTwoO5ycUugZBwb43v0VlkyTvh7OaLptKPGgV16+oJ01J2o2oyGCAqSjcpsSGEB3OTi10CIeHefGtCgzv0dgVS6f38pOxGVRcQoOvHnTghJTaE8GhucrFLICTcX8Gk6WNfQu65i6bSP/CAvqZ27oQ1a5zYTyGEEFWOBELC/dXsDLW6gSUXDr8P990H/v5lVqUvWnZj1izndlUIIUTVIoGQqBoKRoX+nAdhIRdNpS8ou/HDD7ogq6g6srL0SuG33iqTpYXwaG5yscuCiuWQBRXdiDkLvoqB7LPQaxn80wQ6dACjERISICamxFNGjIAvvoC774aFC13QZ3FJ0tMhKEhvp6VBYKBr+yOEcBAHXuyyoKLwPEY/aHKf3j44F9q3LzeVHgrLbixaJGU3qhKTCd58UzcpsSGEB3OTi11GhMohI0JuJv0v+KYxKAsM2g9r98OwYVCrll5g0d+/xFP69NEVOR59FF580QV9FkII4XQyIiQ8U2ADiLpRbx96G266qdxUeigcFZo3D1JTndRPIYQQVYYEQqJqaTZRf0xYACrroqn0gwZBy5Y6CJKyG1WDxQKHDulmsbi6N0IIh3GTi10CIVG1RF4PQU0hNxWOfnrRVHopu1H1ZGZC8+a6lbJ4uBDCU7jJxS6BkKhaDF7QPH9U6NBcqFED7rxTf/7GG6U+5c47ITxcTyNassRJ/RSXJTRUNyGEh3ODi10CIVH1NB4DRn9I3g1//1xYf2z58lKr0vv5FR7y8stSdsPdBQZCcrJukjovhAdzk4tdAiFR9ZhqQMP8BRUrUJUeYMIEXXZjxw5Yu9ZpPRVCCOHmJBASVZO1/thSyEwqrLRaRlX6WrXg3nv1tpTdEEIIUUACIVE11egItbuDyoM/37dNpf/ss1KfMmWKnjy9ahXs2ePU3opKyM6GMWN0y852dW+EEA7jJhe7BEKi6mpWUH/sHf2bXJBKP2dOqROBmjTR6y+CjAq5s7w8XRJl4UK9LYTwUG5ysUsgJKqu+reAbx3IPAHHv75oKj3Ylt04ccJ5XRUV5+MDL72km4+Pq3sjhHAYN7nYJRASVZfRF5qO09uH5kLNmhdNpe/WDXr10usJlVG4XriYyQTTp+smtcaE8GBucrFLICSqtqb367WFTq+DlH0XTaUH27IbFy44qZ9CCCHckgRComoLrA/1btbbB9+qUCr9jTdCixaQkiJlN9yRxaJvW544ISU2hPBobnKxSyAkqr6CVPqEjyD3wkVT6aXshnvLzIToaN2kxIYQHsxNLnYJhETVV/daCG4OeRfg6Cc6lb5Bg3JT6e+6S5fdSEyEL790cn/FRXl76yaE8HBucLFLICSqPoNXYVX6g3PBaISJ+Z+XkUrv5weTJuntWbOk7IY7CQzUo3S5uVJiQwiP5iYXu0EpeQsoS2pqKqGhoaSkpBASEuLq7ojy5CTD8npgzoDr1oNPu8Lh1p9+0qlixZw9q9dgzMyEV16ByEjdevXSsZQQQoiqqTLv3zIiJDyDKQwa5afOVzCVvnZt6NNHb0+dCnfcAddcAw0bwrJlDu+xEEIINyCBkPAcBStNH1sOGScvmkq/bJkut1HciRNwyy0SDLlKdrZeJDwuTkpsCOHR3ORil0BIeI4a7aHO1fn1x94tN5XebIaHHir9NAU3i6dM0ccJ58rLg7fe0k1KbAjhwdzkYpdASHiWgknTh98FS26ZqfQbN8Lx42WfRik9iLRxowP7Kkrl4wNPP62blNgQwoO5ycUuCarCs8QMB7+6kJkEx7+Cm4bqVPq//tKp9PfeC0BSUsVOFx+v5xEZDI7rsrBlMsEzz7i6F0IIh3OTi11GhIRnMZqgSX79sYNz9foUpaTSR0ZW7HSPP65XoX7ySdi1S9LshRDC01QqEJo5cyZdu3YlODiY8PBwhgwZwoEDB2yOycrKIi4ujlq1ahEUFMTw4cM5ffq0zTGJiYkMGjSIgIAAwsPDmT59OnnF7g+uX7+ezp074+vrS9OmTVmwYEGJ/sydO5eGDRvi5+dHt27d2Lp1a6X7IjxQs/vBYIQzGyB5T6lV6Xv10tn15Y30+Pvr9YYOHYLnn4cOHaB1az2Ku3evc76U6kgpSE7WTQJPITyYm1zslQqENmzYQFxcHL/88gurV68mNzeX/v37k56ebj3m4Ycf5ttvv2XJkiVs2LCBkydPMmzYMOvjZrOZQYMGkZOTw+bNm1m4cCELFizgqaeesh6TkJDAoEGDuOaaa9ixYwdTpkzhvvvu44cffrAe8/nnn/PII4/w9NNP89tvv9GhQwdiY2M5c+ZMhfsiPFRANEQP1tuH3io1ld5ohNdf17uKB0MGg26ffAJnzsCiRTBkCPj6wh9/wLPPQtu20KYN/Oc/sH+/c76s6iIjA2rU0C0jw9W9EUI4jLtc7OoynDlzRgFqw4YNSimlkpOTlY+Pj1qyZIn1mP379ytAxcfHK6WUWrlypfLy8lKnTp2yHvP222+rkJAQlZ2drZRS6tFHH1Vt2rSxea0RI0ao2NhY6+dXXnmliouLs35uNptVVFSUmjlzZoX7cjEpKSkKUCkpKRU6XriRpDVKfYpSnwcplZOi1K5dSoFSRqNSiYnWw5YuVSo6Wj9U0GJi9P7iUlKU+vhjpW66SSmTyfY5bdsq9eyzSv3xhxO/Rg+Vllb4fU1Lc3VvhBAO48CLvTLv35c1RyglJQWAmjVrArB9+3Zyc3Pp16+f9ZiWLVtSv3594uPjAYiPj6ddu3bUrVvXekxsbCypqanszb/fEB8fb3OOgmMKzpGTk8P27dttjvHy8qJfv37WYyrSl+Kys7NJTU21aaKKqnsNhLSEvDRI+LjMVPphw+DoUVi3To/8rFsHCQl6f3EhIXpg6Ztv4PRpWLgQBg3SyQ579sBTT0HLltCxI/zf/+lbaqLyAgIgJ0e3gABX90YI4TBucrFfciBksViYMmUKPXv2pG3btgCcOnUKk8lEWFiYzbF169bl1KlT1mOKBkEFjxc8Vt4xqampZGZmcvbsWcxmc6nHFD3HxfpS3MyZMwkNDbW2mJiYCn43hNsxGGzrjylVZiq90ahjpJEj9ceKlNcIC4O774bvvtNB0fz5MGCAnpu9cyc88QQ0bw6dO8MLL8Dhw/b+Aj2XwaCDSx8fydYTwqO5ycV+yYFQXFwce/bsYfHixfbsj0vNmDGDlJQUaztWymrEogppdDd4B0LqfjizvkJV6S9FjRowZgx8/z2cOgXvvw/9++uA6vffYcYMaNoUrrgCXnpJjzgJIYRwD5cUCE2aNInvvvuOdevWER0dbd0fERFBTk4OycnJNsefPn2aiIgI6zHFM7cKPr/YMSEhIfj7+1O7dm2MRmOpxxQ9x8X6Upyvry8hISE2TVRhplBoeJfeLkilj8svw/HGGw7JUqhVC8aOhR9+0EHRu+9Cv37g5QXbt8Njj0HjxtCtmy70mpho9y5UeTk5MH26bjk5ru6NEMJh3ORir1QgpJRi0qRJLF++nLVr19KoUSObx7t06YKPjw9r1qyx7jtw4ACJiYl0794dgO7du7N7926b7K7Vq1cTEhJC69atrccUPUfBMQXnMJlMdOnSxeYYi8XCmjVrrMdUpC+iGmieH/gc/woyjusoxd9f37/KT6V3lNq1Ydw4WL1aL+D49tu6qKuXF2zdCtOm6QGq7t3htddKLYdWLeXmwqxZuuXmuro3QgiHcZeLvTKzsCdMmKBCQ0PV+vXrVVJSkrVlZGRYj3nggQdU/fr11dq1a9Wvv/6qunfvrrp37259PC8vT7Vt21b1799f7dixQ61atUrVqVNHzZgxw3rMkSNHVEBAgJo+fbrav3+/mjt3rjIajWrVqlXWYxYvXqx8fX3VggUL1L59+9T48eNVWFiYTTbaxfpyMZI15iFW99YZZDv/rT8fN05nKdxyi0u6k5Sk1Ny5SvXpo5TBYJt91qOHUrNnK3X8uEu65hays5WaNk23/ERSIYQncuDFXpn370oFQkCpbf78+dZjMjMz1cSJE1WNGjVUQECAGjp0qEpKSrI5z9GjR9XAgQOVv7+/ql27tpo6darKzc21OWbdunWqY8eOymQyqcaNG9u8RoE5c+ao+vXrK5PJpK688kr1yy+/2Dxekb6URwIhD3H0cx0ILa2rVF62bSr9X3+5tGsnTyo1Z45SvXrZBkUGg943Z44+RgghRMVV5v3boJSs3VqW1NRUQkNDSUlJkflCVZk5B75uAFmnoOdiaDBC36Navx7+9S+YOdPVPQTgxAn48kv44gvYvLlwv8EAvXvDbbfB8OFQLFlSCCFEMZV5/5ZaY8LzGU3QdLzePjhXfyxIpX/vPZtUeleqVw8eegg2bdKTqF99Fa66So8Rbdig53lHRcG118I778Dff7u6x46hlJ4ukJsrJTaE8GhucrHLiFA5ZETIg2Sc0KNCygw37IKgVjqn/a+/4IMPrFXp3dFffxWOFBUtp2c06oGt226DoUP15GxPkJ4OQUF6Oy0NAgNd2x8hhIM48GKXESEhiguoB9FD9baTUuntpUEDmDoVtmyBI0fgxRehSxe9SPaPP8L48RARAbGx8OGHcP68q3sshBBVh4wIlUNGhDzM6fWw5hq9yOKQE5Bm1iXoMzN1BdY6dSAyUpemr8jy0i52+DAsWaJHin7/vXC/tzdcf70eKRo8WC/4WJUoBfnVewgNldWlhfBYDrzYK/P+LYFQOSQQ8jBKwcp2kLIXurwOLR7UEcOPP9oeFx2tA6PSCo65qYMHC4OiXbsK9/v46FWuC4Ki0FDX9VEIIZxFAiE7kUDIAx18C36Ng5AWkP083HJLyWMK/iv58ssqFQwV+OOPwqBoz57C/SaTvn02YoSuNiK/0kIITyWBkJ1IIOSBclNheT1dlf7d2rDhbOnHGQx6ZCghoUrcJivLvn06KPr8c9i/v3C/ry8MHKhHim68EYKDXdfH4nJy4P/+T28//rgO4IQQnsecmcOxCf/HhQtw/oHHufpak93+3EogZCcSCHmobXFw6C3YBsy+yLHr1umS9FWcUrB3rx4l+vxzfSutgJ8f3HBDYVDk6iwtyRoTwvMtWwb/mpzOwZP6Yg8kjZrRgXablSBZY0KUp9lE/bELUPMixyYlObo3TmEwQNu28Oyz+tbZzp3wxBN6BYGsLP1H6fbb9Xzx227TdwUzMlzTV29vmDhRN29v1/RBCOE4y5bpWQl/nfRmLhOZy0Ty8ObECb1/2TLn9kdGhMohI0Ie7MtOkLMDvgKWlHPczJm6ZLyHpi4ppYOigpGiI0cKHwsI0HOJbrtN30bz93ddP4UQnsFshoYN4fjx0h+316wEuTVmJxIIebCjX8DmEZACPAjklXNs27YwfboeMvHgCStKwW+/6aDoiy/g6NHCx4KCCoOiAQP07TQhhCiNUvq2dlISnDpl+3HHDvjhh4uf43JnJUggZCcSCHkwSy4sjgDOw1ygSG0vDAZ9Jd98s74aL1zQ++vVg4cfhnHjPD7lSin49dfCoCgxsfCx4GCdin/bbTo139fXdf0UQjiP2axL+5QW4BT/eLm31hctgpEjL/35EgjZiQRCHm73f2D3M5BggidzCvfHxMDs2XrGXnKyLuw1e7a+wkEvxjNhgq5XFhnp/H47mVK6tMfnn+sMtKJD2iEhMGSIDoquv94+A2bp6RAWpreTk2WytBCOlp5+8cDm1Ck4cwYsloqfNyhI/4mMiCj8mJmpSzwCBJBOMmEAhJFMBoUXu4wIuQkJhDxcZhJ8VR9UHtR5D84Glr2ydHY2fPIJzJqlZxuDfte/6y6YNg1atnR+/13AYoFfftGjREuWwMmThY+FhRUGRf366cUcL4VkjQlx+SwWOHu2YgFOwaB3RRgMEB5eMsAp/jEiovA6LqpgjtCJE+Cv0kmnMGssg0CZI+RuJBCqBn4eAYlfQNQgaDgK/COhTi/wKuMKtFjgu+/gpZd0mfgCgwfDo49Cjx7O6bcbsFhg8+bCoKhgwAx0WY9hw3RQdM01lQuKLJbCZL3ISPCS3FYhrLKyLh7cJCXB6dM66Kgof399vV0swKlT5/KzOQuyxgzKQgT6Yk8iEgz6YrfHWrYSCNmJBELVwJ7/wq6nbPcFROsSHDEXuRI3bYKXX4avvy7c17OnDohuvLFavYObzfrb8fnn+o/YmTOFj9WqVRgU9e0rKfFCFKeULpZckdGb5OTKnbtOnfIDm4KPwcHOTY5dtgweesj2VnvRWQmXSwIhO5FAyMMdWwYbh5fyQP5fg15fXjwYAn2rbNYs+PhjvSwy6Ftl06bBnXdWu9nEZjP89JMeKVq6VE+uLFCnDgwfroOi3r1LH/o2m2HjRv2HvwrVwBVVgLN/t3JydPBSkQAnN7fi5/X1vXhgExmpb2Fd6i1qZ3Dkz0MCITuRQMiDWczwTUPIKGMxCwx6ZOjmhLJvkxWXlARvvAFvv11YUTkyUv/bc//9hTOAq5G8PNiwoTAoOneu8LG6dQuDoquv1n8Aly3Tc9BPnCg8rgrWwBVuqLQRiEv53SoomF6RzKnz5yvXx5o1Kxbg2LlQu+vk5OgfAOgfjh2XJ5FAyE4kEPJgp9fDmmsuftx166Bu38qdOzVVp0W89lrhO3pwMIwfD1Om6L++1VBurs4E+eIL/ab0zz+Fj0VEQKdO8P33JZ9XxWvgCjdQMCel+Ltd0d+tm27St3QvFuCcOqXn6VSUj0/Fbk3VrVvtBo8dmhkhgZCdSCDkwY5+BpvvuPhx3T+FRhU4rjQ5ObB4sZ5YvXev3uftDaNG6dtmbdte2nk9QG4urFmjg6Llyys276FuXfjqKz1yVPBXq+hfr+L7KvLY5T7fEY/J69r3MbMZnnuucJC2NF5e+jmVeTcMDa3Y6E3Nmh4yeuMI2dl6tBz0MiV2jAQlELITCYQ8WEVHhMI6Qte5UOcyssGU0kMdL72k7xMVGDRIT6zu1ata/6XMyYFXXtGV5oVwJaNRB9wVSQ2XkjPuTQIhO5FAyINZ5widACpwCUTdAO2fg5qdLu91t2zRmWbLlhX++9mtmy7hMWRItZ0V/NlncEcFBt5q19Y10KAwdiz+sbzHKnLM5T7fGa8hfaz4848etV3poixvv63vXlejZE+PJoGQnUgg5OGOLYONt+R/UvQyyP8L2vVtOP8rHJkPKn9BjphboP2zENrq8l770CE9DLJggR4eBl0Kfto0uPvuavfv5vr1er2hi7nc1WZF9SO/W9WTBEJ2IoFQNXBsGWx/yDZ7LCAGuswuTJ1PPaRLcfz1GaD0ol8N74R2z0BQo8t7/dOn4c03Ye7cwtnD4eE6dWrCBD3BoBooutpsaX+R7LXarKh+5HfLjaWn6xqOoH9AMlna/UggVE1YzPD3Rl1yo7yVpZN3w65/w/H8BRS9fKDJfdDmSQiIurw+pKXBBx/Aq68WVjgNDIT77tOFXhs0uLzzVwEVyeyRrDFxKQp+t8D290t+t1xMssbcnwRColRnt8KuJ+HUav250Q+axUHrf4Ff7cs7d26urlfx0kuwc2f++Y1w++16HlGHDpd3fjdX2jpC9lxtVlRfjl7JWFwCiwUOH9bbTZrYdYKWBEJ2IoGQKNfpDbDrCfg7fyamdxC0fBhaTgVT6OWdWylYvVoHRGvWFO6PjdWZZtdc47GZZrKytHAU+d2qPiQQshMJhMRFKQVJq2DnE/DP73qfqQa0ehRaTAZvOwz1bt+uM82WLNH/QQF07qwDouHDpXiXEEIUI4GQnUggJCpMWfTE611PQep+vc+vLrR5ApqOB6MdFgo7ckSvVv3BB5CZqfc1agRTp8I99xTmlVdxubnw7rt6e/x4966VJIS4DA682CUQshMJhESlWcxw9FOdZZaeoPcF1Id2T0Gj0eBlh9Gbs2d1ltmcOYXFu2rVgsmTIS5OL7ZThTlw/qQQwp24yWRpWTpKCHvyMkLju+HGP/Q6RP5RkJEIW+6DFa11aQ9lubzXqF0bnn5aZ5e9+aYeFTp3Dp55BurXh0mT9OhRFWU06gyfW26R+RtCeDQ3udhlRKgcMiIkLlteJhx6G/bNhOyzel9YO2j/X6h3s30mPOfl6ZSYl17S84lAZ1/ceqvONOvS5fJfQwghqhAZERLCXXj7Q6tH4OYjOvjxCdXrEf00BP53FSStrlylx1Jfwxtuuw22bdMZZrGxelL155/DFVdAv37www+X/zpCCOGBJBASwhl8gqHtkzogav0vMAbAua2wrj+suRb+3nz5r2EwwLXXwqpVsGMH3HmnHm5eswYGDIBOneDTT/UERSGEEIAEQkI4l29N6DgTbj4MzR8ELxOcWQ+re8L6QXD+d/u8TocO8PHHerGyKVP0JMSdO3Vw1LQpvP66npzohjIy9Kr79erpbSGEh3KTi13mCJVD5ggJh0tPhD3/dUxh16LOn9fltd94A86c0ftq1NBZZpMn6/pmbkKyxoSoJtwka0wCoXJIICScxpGFXYvKzISPPtKV7w8d0vt8fWHMGL0eUbNm9nutS2Q2w+7dertdO8kcE8JjOfBil0DITiQQEk7nyMKuRZnN8PXX8OKLsHWr3mcw6KJL06dDt272ey0hhHAyyRoToqoKawe9v4L+WyDierDk6vT7b5vAb9Mg66x9Xsdo1EHPL7/Ahg0waJDOKlu6FK66Cvr0gRUrCkt6CCGEh5IRoXLIiJBwOUcWdi1u716YNcs2s6xNGz1CNHIkmEz2fb0y5ObqLgCMGiUlNoTwWA682OXWmJ1IICTcgjMKuxZ1/LjOKnvnHbhwQe+rVw8efhjGjQMHXwsyWVqIasJNJkvLrTEh3J3BAFEDYcCvcPUSCGkFOf/AzhnwTRM4MAfM2fZ7vehoXe0+MRFeeAEiIuDECZg2TZfwmDEDkpLs93rFGI1www26yURpITyYm1zsMiJUDhkREm7JGYVdi8rO1sPXL78Mf/yh95lMcNddOjhq2dK+ryeEEJdJRoSE8GTOKOxalK8v3HuvnkP09dfQsyfk5MAHH0CrVjBkCGzaZL/XE0IIJ5JASIiqymiCZg/ATX9Cp1fAtzZcOASb74DvO+oUfHsO+Hp5wc03w88/68BnyBC9/+uv4eqrdYD09deSaSaEqFIkEBKiqnNGYdfievSA5cth/3647z59q2zzZh0ctW6tR4uyL23eUkaGXtexWTMpsSGER3OTi13mCJVD5giJKin7POx/GQ68Aeb8Py7hfaHD81Cnh2NeMylJl+94+21ISdH7IiJ0nbP774ewsAqfSrLGhKgm3CRrTAKhckggJKq0zFOwdyb8OQ8sOXpf1A3Q/jmo2ckxr5maCu+9B6+9pjPNAIKDYfx4HRRFR1/0FGazXucR9NqOkjkmhIdy4MUugZCdSCAkPIKzCrsWlZMDixfDSy/pSdYA3t560bRp06BtW8e8rhBCIFljQoiiAutDt/dg0H5ocAdggGNfwsq2ED8a0hLs/5omE9x9ty6ouGKFLtmRlwcLF+riijfeCD/9ZP+5S0IIUUkyIlQOGRESHslZhV2L27pVr0W0dGlhAHTllfDoo3qSdf6weF62meUz/4B//mHozWa8+14t98eE8ER5eTrpAmDoUD1qbCdya8xOJBASHu3sVtj1JJxarT83+kGzOGj9L/Cr7bjXPXQIXn0V5s8vzCxr2lTfMgsJIX3a0wSdPAhAGoEERtfUJT+GDXNcn4QQzieTpd2fBEKiWnBmYVeb1z0Nb74Jc+fCP/9Yd+fizTCWcoEQvmcg/ob8YOnLLyUYEsKTZGbCwIF6+/vvwd/fbqeWQMhOJBAS1YazC7sWlZamM82mT9dZJGWpVw/++ktukwkhLkoCITuRQEhUO8oCx5bBrqcgdb/e51cX2jwBTceD0dcxr7t+PVxzzcWPCwmBxo0hJqb0Vq+enqgthKjWJBCyEwmERLXl7MKun30Gd9xx+ecxGKBuXR0U1a9ferAUESGjSkJ4OIemz//000/cdNNNREVFYTAY+Oqrr2weT0tLY9KkSURHR+Pv70/r1q2ZN2+ezTFZWVnExcVRq1YtgoKCGD58OKdPn7Y5JjExkUGDBhEQEEB4eDjTp08nLy/P5pj169fTuXNnfH19adq0KQsWLCjR37lz59KwYUP8/Pzo1q0bW7dureyXLET14+zCrpGR1s1M/OjI73TkdzLxsz3ugw90Ov68efDEE3DXXdC3LzRpoovDKgWnTsG2bTo7bfZsmDoVbrsNunfXCzr6+UGDBro+2siROmvtzTd1nbTffoO//5a0fiGcITMTOnbULTPTZd2o9L916enpdOjQgXvvvZdhpUxcfOSRR1i7di2ffPIJDRs25H//+x8TJ04kKiqKm2++GYCHH36YFStWsGTJEkJDQ5k0aRLDhg1jU34Fa7PZzKBBg4iIiGDz5s0kJSVx99134+Pjw//93/8BkJCQwKBBg3jggQf49NNPWbNmDffddx+RkZHExsYC8Pnnn/PII48wb948unXrxuzZs4mNjeXAgQOEh4df8jdNiGqjoLBro9Fw6G3YN7OwsOu+mbq2Wb2b9UjM5ejVSwcpJ05gUV7spCMAloL/1QwG/fjo0WWP5iilg5hjx0pviYlw8qRO2U1M1K0sfn769UobUSoYaQp14ERyIaoDiwV27izcdpHLujVmMBhYvnw5QwqqUANt27ZlxIgR/Pvf/7bu69KlCwMHDuS5554jJSWFOnXqsGjRIm655RYA/vjjD1q1akV8fDxXXXUV33//PTfeeCMnT56kbt26AMybN4/HHnuMv//+G5PJxGOPPcaKFSvYs2eP9XVuv/12kpOTWbVqFQDdunWja9euvPnmmwBYLBZiYmKYPHky//rXvy769cmtMSGKyb0AB16H/bMgN7+mWK0rddmOiH6XFxAtWwa33IJZebEWPV/oWtZiNOT/ibJH1pjZrOuilRUsHTumR5QqIji47LlKBS0g4PL6K4QnM5th7Vq9fe21LiuxYecb/dCjRw+++eYb7r33XqKioli/fj0HDx7ktddeA2D79u3k5ubSr18/63NatmxJ/fr1rYFQfHw87dq1swZBALGxsUyYMIG9e/fSqVMn4uPjbc5RcMyUKVMAyMnJYfv27cyYMcP6uJeXF/369SM+Pr7UvmdnZ5NdpGJ2amrqZX8/hPAoPsHQ9kloNrGwsOu5rbCu/+UXdh02DL78EuNDD3H98R8L90fH6Ftc9kidNxr1SE90tL5VVpqcHF0nLTGx7GDp/Hm4cAH27dOtLDVrlj1XSSZ3i+rOaITrr3d1L+wfCM2ZM4fx48cTHR2Nt7c3Xl5evPfee/Tu3RuAU6dOYTKZCCtWjbpu3bqcyv9P7NSpUzZBUMHjBY+Vd0xqaiqZmZn8888/mM3mUo/5448/Su37zJkz+c9//nNpX7gQ1YlvTeg4E1o8VFjY9cx6WN3z8gq7DhsGgwfDxo165CYyUt82c+bkZpMJGjXSrSzp6XD8uO1tt+LBUlqaDpjOn4cdO0o/j0zuFsLlHBII/fLLL3zzzTc0aNCAn376ibi4OKKiokqM4LibGTNm8Mgjj1g/T01NJSYmxoU9EsLN+UfAFa9Dq6mFhV1PrtTtEgu75ikjP6T3hRCIvRq83TEGCAyEFi10K41SkJJS9lylY8d0IJWdrW/FFUzwLo23N0RFlT5PqaDVrn3587SEcLa8PPjhB70dG2vXEhuVYddXzczM5PHHH2f58uUMGjQIgPbt27Njxw5mzZpFv379iIiIICcnh+TkZJtRodOnTxMREQFAREREieyugqyyoscUzzQ7ffo0ISEh+Pv7YzQaMRqNpR5TcI7ifH198fV10DopQniygsKurR7VKfd/faYLux5fBg3vhHbPQFA5IyxFZGfrmqygB1Vc9Lfx8hgMEBamW7t2pR9zscndx47pW3QyuVt4Kje52O36qrm5ueTm5uLlZZuVbzQaseTPCO/SpQs+Pj6sWbOG4cOHA3DgwAESExPpnn/Pvnv37jz//POcOXPGmt21evVqQkJCaN26tfWYlStX2rzO6tWrrecwmUx06dKFNWvWWCdzWywW1qxZw6RJk+z5ZQshCoQ0g56fQpt/FRZ2TfhIB0YVLOzq5QVXXFG47bEMBggP161Ll9KPqejk7qws+PNP3coik7uFu3GTi73SWWNpaWn8mX+xderUiVdffZVrrrmGmjVrUr9+ffr27cvZs2d58803adCgARs2bGDChAm8+uqrTJgwAYAJEyawcuVKFixYQEhICJMnTwZg8+bNgE6f79ixI1FRUbz00kucOnWKu+66i/vuu88mfb5t27bExcVx7733snbtWh588EFWrFhhkz4/evRo3nnnHa688kpmz57NF198wR9//FFi7lBpJGtMiMvkqsKu1UnB5O6y5ioVTO6uCJncLTxEpd6/VSWtW7dOASXa6NGjlVJKJSUlqTFjxqioqCjl5+enWrRooV555RVlsVis58jMzFQTJ05UNWrUUAEBAWro0KEqKSnJ5nWOHj2qBg4cqPz9/VXt2rXV1KlTVW5ubom+dOzYUZlMJtW4cWM1f/78Ev2dM2eOql+/vjKZTOrKK69Uv/zyS4W/1pSUFAWolJSUin+DhBAlnVqv1P96KvUpun0epNTOfyuVnezqnlUPaWlK/fGHUqtXK/Xhh0r95z9KjR2rVP/+SrVqpVRQkFL6Zl35zWBQKiJCqa5dlRo+XKkpU5R65RWlvvhCqfh4pY4fVyovz9VfrRCVev+WEhvlkBEhIeyoooVdLWb4eyNkJoF/JNTppVe6Fo5T3uTuoq3I8iJlksndwg1IrTE7kUBICAcor7CrXziZvzxJv38vAODHx/vhH1YburwOMXZYR0hcuopO7jabL34uV03uNptduzSDsJWZCQXZ5D/+CP7+dju1BEJ2IoGQEA5UWmFXIDvXROC96Zgt3qR9EEigX34Nol5fSjDk7sxmPXm7vMUoXbVy97Jl8NBDetmCAtHR8Prr9lmsU1ReamphwLtyJfTvb7fAVAIhO5FASAgnMOfA4fdh+2RrIde0rED+ONmCjg124G20AAYIiIabE+Q2WVVXdHJ3WRO87T25O798S4liugW35+xRvkVUzrJl8OCD+nehgB0DUwmE7EQCISGc5PR6WHPNxY9rMQVihkJoG/Ct5eheCVcpvnJ3aQtSpqVd/DwFK3efOwe5uWUfV7OmfgP28dEjEkajTucu2C7aStt/ucd6eelWXeZNOSEwlUDITiQQEsJJjn6mK9pXhl+EDohC20BY28Jtkywc6PHsObnbnRQPkhwRdDkymKvI8w0GePjhskf9DAY9MpSQcFm3yVxadFUIISrNP9K6abZ4sfGPXgD0avkTRq8i/6vV6gZZpyD9L/0x6xScXmN7roDowqAotCBAag0+Qc74SoQzVGbl7vfegyefvPg527SBOnX0PCezGSyWwu2irbT9FTk2f1HhclksupU3euXplNJB7MaN0LevU15SAiEhhOvV6aUDmIwTZOX4cc3z6wHyJ0tnYJ0jdP0mPUco9wKk7IeUPZCyV7fkPZB5AjKO65b0g+1rBDYoEhjljyKFtAJv+2WqCDdSsHJ3z54VO/7NNx37xqtUYaBzKYGUvY519WudOAG7d1/8+5WU5LifRTESCAkhXM/LqFPkN96CwQCt6+0FwGBQQP68gS6zCydK+wRD7St1KyonGVL26QApOT9AStkDWaf1KFL6X3ByRZEnGCCose2ttdC2ENICjFJ30CP06qVvtZw4UXJOChTeiunVy7H9MBgKbw/5+Dj2tdzZ+vVwTQXmA0ZGXvwYO5E5QuWQOUJCONmxZbD9IT2iUyAgRgdBl5M6n32ucNQopUiAlH2u9OMNRghuWnIEKbgZeFXjN7GqqmByLtgGQ5I15nxmMzRsePHA1IlzhCQQKocEQkK4gLNWllYKss7YBkYpe/VIUm5y6c/x8oHg5oUBUlj+CFJQE0nrd3elrSMUEwOzZ0sQ5GxOCEwlELITCYSEqIaUgsyTtnOPCrbzykjZ9vKFkJbFbrG1gaBGYHBdVW1RjKws7T4cHJhKIGQnEggJ4XyZmXDzzXr7m2/suur+5VEKMhJt5x6l7NVzksyZpT/HGAChrWwz2MLaQED96rNmjBBlSUvTE9Szs2HWLF1uQ1aWdi8SCAnhfOnpEJSf6Z6WBoGBru3PRSkLpCWUHEFK/QMsZaxj4x2sU/qLr4HkHyUBkqg+HHixyzpCQogqy9cXPvmkcNvtGbwguIlu0TcX7rfkQdrhwnlHBSNIqQcg7wKc26JbUT5h+fOO2thO1PYLlwBJeB43udhlRKgcMiIkhLA7Sy5cOFQyg+3Cn6DKqNzuW7v0VbSlzIgQpZIRISGEcFdePvm3xVrb7jdn69Gi4otEph2B7LNwZoNuRdmUGSkyiiRlRoSoMAmEhBBuxWyG337T2507V6OkHqMv1GivW1F5GXq+UfERpPLKjPjXK7lIpJQZEe7GTS52uTVWDrk1JoTzVbnJ0q5iLTOy13YUqehilMVJmRHhTmSytBBClGQwQIMGhduiDBctM1JsDaSCYrVSZkS4Cze52GVEqBwyIiSE8BgFZUZsAiQpMyI8k6wjZCcSCAkhPJqUGREeSm6NCSGEuDiDAfzr6hZxbeH+i5UZKdguSsqMiCpKAiEhhFvJyoLbb9fbixeDn59r+1MtGQwQUE+3yP6F+5WCjGMlM9gKyowk79StKCkzIsriJhe73Borh9waE8L5JGusClIWSD9qGyAl75EyI6J8kjUmhBAlmUzw7ruF26IKMHjpzLOgxqWUGTmiR42KFqu9cNA1ZUYsZvh7I2QmgX8k1Okl85pcyU0udhkRKoeMCAkhhAMUlBkpPv/owiHHlRk5tgy2P2S7zlJANHR5HWKGXf7XJNyKZI3ZiQRCQgjhRNYyI8Uy2NIOA2W8VVWkzMixZbDxllLOkT/C1OtLCYY8jARCdiKBkBDOZ7HA/v16u1Ur8JKEI1FQZqT4CFL60bKfU1BmJKQVJCyEnH/KONCgR4ZuTpDbZM7mwItdAiE7kUBICOeTydKiwnLTClfRrmiZkbI0HQ81u+g5Sqawwo+mMPAJlVW2HUEmSwshROlq13Z1D0SV4BN08TIjx5ZC0g8XP9ef75b/uNHfNkCyCZRK2fYJtf1cAqnSucHFLoGQEMKtBAbC33+7uheiSjOFQZ0eugU3q1ggFBkLRj8dROUk65W1c5IhN0U/bs6EzEydcXYpjH6VCKJK2eeJgZS/H+xZor+nadvA3zVZfBIICSGE8Fx1euk5QBknKH3Cdf4coT4rSn8Ttph1qn/R4KisbZt9KUUCKQXmLDCf0sVvL4WnBVJulMUngZAQQgjP5WXUb64bb0FniRUNhvKzxrrMLnskwstYGFRcCmWB3AuVDKKSbQMqlwdSofr59lJWFl/GCb3fyVl8Mlm6HDJZWgjny8qCsWP19gcfSIkNYSeljkDE6CDInVPn7RVIXS4v34qNPJUVZBUEUhYzfNNQ/xxygPfyzz8OMIG9svgka8xOJBASwvkka0w4THVcWdrdAikvk65XB2AG1gPpQH+g6D89162Dun0v+eUka0wIUWWZTPDaa4XbQtiNl/Gy3lyrJIOXvrVlCoXABpV/fvFAqmDuU2UCKpSuOZd12vbcRuA6dEBUPNa61Enpl0ACISGEW/HxgSlTXN0LIQRgn0AqL60wODq9Dn6bYntMaYNy/pGVf61LJGu2CiGEEMIxDF7gEwKB9aFGe2g+Sc8BoqwCugY9d6tOL6d1UQIhIYRbsVjg6FHdLBZX90YIYVcFWXwAFuDv/GaBCmXxOaJLTnslIYSogMxMaNRIt8xMV/dGCGF3McN0irx3FExBtxz0SJELCuDKHCEhhNsJCHB1D4QQDhUzDAb1A/+6gIK+y6Fhf1lZWgghAgN1Cr0QwsMFh0CG64d95daYEEIIIaotCYSEEEIIUW1JICSEcCvZ2TBunG7Z2a7ujRDCYdzkYpcSG+WQEhtCOJ+U2BCimnDgxS4lNoQQVZaPDzz3XOG2EMJDucnFLiNC5ZARISGEEKLqqcz7t8wREkIIIUS1JbfGhBBuRSk4e1Zv164NhrJKEgkhqjY3udglEBJCuJWMDAgP19syWVoID+YmF7sEQuUomD6Vmprq4p4IUX0UXVU6NRXMZtf1RQjhQA682AvetysyDVoCoXJcuHABgJiYGBf3RIjqKSrK1T0QQjiFgy72CxcuEBoaWu4xkjVWDovFwsmTJwkODsZg53uXqampxMTEcOzYMclIcwPy83Av8vMQjiK/W+7FUT8PpRQXLlwgKioKL6/y88JkRKgcXl5eREdHO/Q1QkJC5GJ0I/LzcC/y8xCOIr9b7sURP4+LjQQVkPR5IYQQQlRbEggJIYQQotqSQMhFfH19efrpp/H19XV1VwTy83A38vMQjiK/W+7FHX4eMllaCCGEENWWjAgJIYQQotqSQEgIIYQQ1ZYEQkIIIYSotiQQEkIIIUS1JYGQkz3zzDMYDAab1rJlS1d3q9r46aefuOmmm4iKisJgMPDVV1/ZPD5mzJgSP58BAwa4prPVwMyZM+natSvBwcGEh4czZMgQDhw4YHNM3759S/xMHnjgARf1WFRFL7zwAgaDgSlTplj3ZWVlERcXR61atQgKCmL48OGcPn3adZ30YBd733P1z0ICIRdo06YNSUlJ1vbzzz+7ukvVRnp6Oh06dGDu3LllHjNgwACbn89nn33mxB5WLxs2bCAuLo5ffvmF1atXk5ubS//+/UkvWowRGDdunM3P5KWXXnJRj0VVs23bNt555x3at29vs//hhx/m22+/ZcmSJWzYsIGTJ08ybNgwF/XS85X3vufqn4WU2HABb29vIiIiXN2NamngwIEMHDiw3GN8fX3l5+Mkq1atsvl8wYIFhIeHs337dnr37m3dHxAQID8TUWlpaWmMGjWK9957j+eee866PyUlhQ8++IBFixZx7bXXAjB//nxatWrFL7/8wlVXXeWqLnusst733OFnISNCLnDo0CGioqJo3Lgxo0aNIjEx0dVdEkWsX7+e8PBwWrRowYQJEzh37pyru1RtpKSkAFCzZk2b/Z9++im1a9embdu2zJgxg4yMDFd0T1QxcXFxDBo0iH79+tns3759O7m5uTb7W7ZsSf369YmPj3d2N6uFst733OFnISNCTtatWzcWLFhAixYtSEpK4j//+Q+9evViz549BAcHu7p71d6AAQMYNmwYjRo14vDhwzz++OMMHDiQ+Ph4jEajq7vn0SwWC1OmTKFnz560bdvWuv+OO+6gQYMGREVFsWvXLh577DEOHDjAsmXLXNhb4e4WL17Mb7/9xrZt20o8durUKUwmE2FhYTb769aty6lTp5zUw+qjvPc9d/hZSCDkZEVvy7Rv355u3brRoEEDvvjiC8aOHevCngmA22+/3brdrl072rdvT5MmTVi/fj3XXXedC3vm+eLi4tizZ0+JOXPjx4+3brdr147IyEiuu+46Dh8+TJMmTZzdTVEFHDt2jIceeojVq1fj5+fn6u5Ue+W97/n7+7uwZ5rcGnOxsLAwmjdvzp9//unqrohSNG7cmNq1a8vPx8EmTZrEd999x7p164iOji732G7dugHIz0SUafv27Zw5c4bOnTvj7e2Nt7c3GzZs4I033sDb25u6deuSk5NDcnKyzfNOnz4tc9GcoOj7XkREhMt/FhIIuVhaWhqHDx8mMjLS1V0RpTh+/Djnzp2Tn4+DKKWYNGkSy5cvZ+3atTRq1Oiiz9mxYweA/ExEma677jp2797Njh07rO2KK65g1KhR1m0fHx/WrFljfc6BAwdITEyke/fuLux59VD0fa9Lly4u/1nIrTEnmzZtGjfddBMNGjTg5MmTPP300xiNRkaOHOnqrlULaWlpNiMJCQkJ7Nixg5o1a1KzZk3+85//MHz4cCIiIjh8+DCPPvooTZs2JTY21oW99lxxcXEsWrSIr7/+muDgYOucgNDQUPz9/Tl8+DCLFi3ihhtuoFatWuzatYuHH36Y3r17l0iHFqJAcHCwzTwzgMDAQGrVqmXdP3bsWB555BFq1qxJSEgIkydPpnv37pIx5gDlve+Fhoa6/mehhFONGDFCRUZGKpPJpOrVq6dGjBih/vzzT1d3q9pYt26dAkq00aNHq4yMDNW/f39Vp04d5ePjoxo0aKDGjRunTp065epue6zSfhaAmj9/vlJKqcTERNW7d29Vs2ZN5evrq5o2baqmT5+uUlJSXNtxUeX06dNHPfTQQ9bPMzMz1cSJE1WNGjVUQECAGjp0qEpKSnJdBz3Yxd73XP2zMCillHNCLiGEEEII9yJzhIQQQghRbUkgJIQQQohqSwIhIYQQQlRbEggJIYQQotqSQEgIIYQQ1ZYEQkIIIYSotiQQEkIIIUS1JYGQEEIIIaotCYSEEEIIUW1JICSEEEKIaksCISGEEEJUWxIICSGEEKLa+n9QErKQPOX+ewAAAABJRU5ErkJggg==", "text/plain": [ "<Figure size 640x480 with 1 Axes>" ] @@ -446,7 +446,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 16, "id": "7d0d778f", "metadata": {}, "outputs": [ @@ -461,20 +461,20 @@ " - Ranks considered:[5, 15, 25, 40, 50]\n", " - BIC metric:\n", " rank 5 15 25 40 50\n", - " criterion 268485 202026 191816 191814 194678\n", + " criterion 268473 201896 191725 191802 194696\n", "\n", - " Best model(lower BIC): 40\n", + " Best model(lower BIC): 25\n", " \n", " - AIC metric:\n", " rank 5 15 25 40 50\n", - " criterion 266015 195603 181647 176415 176053\n", + " criterion 266004 195472 181556 176403 176070\n", "\n", " Best model(lower AIC): 50\n", "----------------------------------------------------------------------\n", "* Useful properties\n", " .BIC, .AIC, .loglikes\n", "* Useful methods \n", - " .show(), .best_model()\n", + " .show(), .best_model(), .keys(), .items(), .values()\n", "----------------------------------------------------------------------\n", "\n" ] @@ -502,7 +502,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 17, "id": "a4b86b44", "metadata": {}, "outputs": [ @@ -513,12 +513,12 @@ "A multivariate Poisson Lognormal with 50 principal component. \n", "======================================================================\n", " Loglike Dimension Nb param BIC AIC\n", - " -167078.03 200 8975 194678 176053\n", + " -167095.75 200 8975 194696 176070\n", "======================================================================\n", "* Useful properties\n", " .latent_variables, .model_parameters, .latent_parameters, .optim_parameters\n", "* Useful methods\n", - " .show(), .coef() .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()\n", + " .show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()\n", "* Additional properties for PlnPCA\n", " .projected_latent_variables\n", "* Additional methods for PlnPCA\n", @@ -540,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 18, "id": "abace013", "metadata": {}, "outputs": [ @@ -548,15 +548,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "A multivariate Poisson Lognormal with 40 principal component. \n", + "A multivariate Poisson Lognormal with 25 principal component. \n", "======================================================================\n", " Loglike Dimension Nb param BIC AIC\n", - " -168995.39 200 7420 191814 176415\n", + " -176656.73 200 4900 191725 181556\n", "======================================================================\n", "* Useful properties\n", " .latent_variables, .model_parameters, .latent_parameters, .optim_parameters\n", "* Useful methods\n", - " .show(), .coef() .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix()\n", + " .show(), .transform(), .sigma(), .predict(), .pca_projected_latent_variables(), .plot_pca_correlation_graph(), .viz(), .scatter_pca_matrix(), .plot_expected_vs_true()\n", "* Additional properties for PlnPCA\n", " .projected_latent_variables\n", "* Additional methods for PlnPCA\n", @@ -578,7 +578,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 19, "id": "8be11bac", "metadata": {}, "outputs": [ @@ -600,13 +600,13 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 20, "id": "212f3777", "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGdCAYAAAA8F1jjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd3jT1RrA8W92kzTdu6W0lL2XIEOWIKBwVVCGKENQUZQhLtyCiIoDRYaCggNFUXAAgsoQRfYS2bOD7pnOzN/9oxAampQWuoDzeZ4+9/Z3fuO0luTNGe8rkyRJQhAEQRAEoRaS13QHBEEQBEEQ3BGBiiAIgiAItZYIVARBEARBqLVEoCIIgiAIQq0lAhVBEARBEGotEagIgiAIglBriUBFEARBEIRaSwQqgiAIgiDUWsqa7sDVstvtJCYmYjAYkMlkNd0dQRAEQRDKQZIkcnNzCQsLQy53P25yzQcqiYmJ1KlTp6a7IQiCIAjCFYiPjyciIsJt+zUfqBgMBqD4B/Xy8qrh3giCIAiCUB5Go5E6deo43sfdueYDlQvTPV5eXiJQEQRBEIRrzOWWbYjFtIIgCIIg1FoiUBEEQRAEodYSgYogCIIgCLVWta1RefPNN5k2bRqTJk1izpw5ABQVFTF16lSWL1+OyWSib9++zJ8/n+Dg4OrqliAIQo2w2WxYLJaa7oYgVBmFQoFSqbzq1CHVEqjs2rWLjz/+mJYtWzodnzJlCmvWrGHFihV4e3vz+OOPM2jQILZu3Vod3RIEQagReXl5JCQkIElSTXdFEKqUTqcjNDQUtVp9xfeo8kAlLy+PESNGsGjRIl5//XXH8ZycHD799FO+/vprevXqBcCSJUto0qQJ27dv5+abb67qrgmCIFQ7m81GQkICOp2OwMBAkahSuC5JkoTZbCYtLY0zZ87QoEGDMpO6laXKA5UJEyZwxx130Lt3b6dAZc+ePVgsFnr37u041rhxYyIjI9m2bZvbQMVkMmEymRzfG43Gquu8IAhCJbNYLEiSRGBgIFqttqa7IwhVRqvVolKpiI2NxWw24+HhcUX3qdJAZfny5ezdu5ddu3aVaktOTkatVuPj4+N0PDg4mOTkZLf3nDVrFq+99lpld1UQBKFaiZEU4UZwpaMoTveohH64FB8fz6RJk1i2bNkVR1GuTJs2jZycHMdXfHx8pd1bEARBEITapcpGVPbs2UNqaipt27Z1HLPZbGzZsoWPPvqI9evXYzabyc7OdhpVSUlJISQkxO19NRoNGo2mqrotCMINym42Y01Lx5aeBjI5ygB/lEFByJTXfAJvQbimVdmIyq233srBgwfZv3+/46t9+/aMGDHC8f9VKhUbNmxwXHPs2DHi4uLo1KlTVXVLEAShFFteHrm//srpgQM5O3QYZ4cM4fSdd5H35xZshYU13T2hDD169GDy5MmVdj+ZTMaPP/5Yafe71pw9exaZTMb+/fsB2Lx5MzKZjOzs7BrrU5UFKgaDgebNmzt96fV6/P39ad68Od7e3owdO5Ynn3ySTZs2sWfPHsaMGUOnTp3Ejh9BEKqV+fRpEp99DqmgwHHMnptLwhNPYBHTywCMHj0amUzm+PL396dfv378+++/Nd014TpXo5lp33//fQYMGMDgwYPp1q0bISEhrFy5sia7JAjCDcZWUED6woWuG+12Mr/8ErvZXL2dKgebXWLbqQx+2n+ObacysNmrPidLv379SEpKIikpiQ0bNqBUKhkwYECVP1e4sVVroLJ582ZHVloADw8P5s2bR2ZmJvn5+axcubLM9SmCIAiVTSoowHzmjNt284mTSEVF1dijy1v3XxJd39rI8EXbmbR8P8MXbafrWxtZ919SlT5Xo9EQEhJCSEgIrVu35rnnniM+Pp60tLTLXvvss8/SsGFDdDod9erV46WXXnLKzPvqq6/SunVrvvzyS6KiovD29mbYsGHk5uY6zsnPz2fkyJF4enoSGhrKu+++W6H+R0VFMWPGDIYPH45eryc8PJx58+ZdVb8PHDhAz549MRgMeHl50a5dO3bv3g3A0qVL8fHxYfXq1TRq1AidTsc999xDQUEBn3/+OVFRUfj6+jJx4kRsNpvjnl9++SXt27fHYDAQEhLCfffdR2pqarl/zkOHDjFgwAC8vLwwGAzccsstnDp1ytG+ePFimjRpgoeHB40bN2b+/PnlvndsbCwDBw7E19cXvV5Ps2bNWLt2bbmvvxJilZggCDc0mVaLpkEDzGfOumzXNGmCrBblO1n3XxKPfrWXS8dPknOKePSrvSy4vy39modWeT/y8vL46quvqF+/Pv7+/pc932AwsHTpUsLCwjh48CAPPfQQBoOBZ555xnHOqVOn+PHHH1m9ejVZWVkMGTKEN998k5kzZwLw9NNP8+eff/LTTz8RFBTE888/z969e2ndunW5+z179myef/55XnvtNdavX8+kSZNo2LAhffr0uaJ+jxgxgjZt2rBgwQIUCgX79+9HpVI5ri8oKODDDz9k+fLl5ObmMmjQIO6++258fHxYu3Ytp0+fZvDgwXTp0oWhQ4cCxbl2ZsyYQaNGjUhNTeXJJ59k9OjR5QoIzp07R7du3ejRowcbN27Ey8uLrVu3YrVaAVi2bBkvv/wyH330EW3atGHfvn089NBD6PV6Ro0addn7T5gwAbPZzJYtW9Dr9Rw+fBhPT8/LXndVpGtcTk6OBEg5OTk13RVBEK5RBf/9Jx1u3EQ63Kix81fTZlLRyZOV+qzCwkLp8OHDUmFhYYWvtdrs0s1v/CHVfXa1y6+oZ1dLN7/xh2S12Su1z5IkSaNGjZIUCoWk1+slvV4vAVJoaKi0Z8+eK7rf7NmzpXbt2jm+f+WVVySdTicZjUbHsaefflrq2LGjJEmSlJubK6nVaum7775ztGdkZEharVaaNGlSuZ5Zt25dqV+/fk7Hhg4dKvXv39/xPSCtWrWq3P02GAzS0qVLXZ67ZMkSCZBOlvgbeuSRRySdTifl5uY6jvXt21d65JFH3D5z165dEuB0jTvTpk2ToqOjJbPZ7LI9JiZG+vrrr52OzZgxQ+rUqZMkSZJ05swZCZD27dsnSZIkbdq0SQKkrKwsSZIkqUWLFtKrr7562X5cUNbfe3nfv0X1ZEEQbniaqCjCP5iDokSqBEVAAHU+XogqIqLmOnaJnWcyScpxPw0lAUk5Rew8k1klz+/Zs6djF+fOnTvp27cv/fv3JzY29rLXfvvtt3Tp0oWQkBA8PT158cUXiYuLczonKioKg8Hg+D40NNQx5XHq1CnMZjMdO3Z0tPv5+dGoUaMK/QyX7irt1KkTR44cueJ+P/nkk4wbN47evXvz5ptvOk2xQHGtm5iYGMf3wcHBREVFOY1CBAcHO03t7Nmzh4EDBxIZGYnBYKB79+4ApX5fruzfv59bbrnFaVTngvz8fE6dOsXYsWPx9PR0fL3++uul+u3OxIkTef311+nSpQuvvPJKtSymFoGKIAg3PLlej6FXL6JXrSTqh++JXvkD0d+vQN+5M/JalLcpNbd8a2XKe15F6fV66tevT/369bnppptYvHgx+fn5LFq0qMzrtm3bxogRI7j99ttZvXo1+/bt44UXXsB8ySLlS99cZTIZdru90n+O8ipPv1999VUOHTrEHXfcwcaNG2natCmrVq1ytLv6mcr6OfPz8+nbty9eXl4sW7aMXbt2Oe536e/LlbLKMuTl5QGwaNEip9Qh//33H9u3b7/svQHGjRvH6dOneeCBBzh48CDt27dn7ty55br2Sok1KoIgCIBMqUQVGooqtOrXd1ypIEP5snyX97yrJZPJkMvlFF4m18w///xD3bp1eeGFFxzHyjMKU1JMTAwqlYodO3YQGRkJQFZWFsePH3eMOJTHpW/I27dvp0mTJlfV74YNG9KwYUOmTJnC8OHDWbJkCXfffXe5+1TS0aNHycjI4M0336ROnToAjsW55dGyZUs+//xzLBZLqYAoODiYsLAwTp8+zYgRI66ofwB16tRh/PjxjB8/nmnTprFo0SKeeOKJK77f5YhARRAE4RrRIdqPUG8PknOKSi2mBZABId4edIj2q5Lnm0wmRy22rKwsPvroI/Ly8hg4cGCZ1zVo0IC4uDiWL1/OTTfdxJo1a5xGHcrD09OTsWPH8vTTT+Pv709QUBAvvPBChWvJbN26lbfffpu77rqL33//nRUrVrBmzZor6ndhYSFPP/0099xzD9HR0SQkJLBr1y4GDx5coT6VFBkZiVqtZu7cuYwfP57//vuPGTNmlPv6xx9/nLlz5zJs2DCmTZuGt7c327dvp0OHDjRq1IjXXnuNiRMn4u3tTb9+/TCZTOzevZusrCyefPLJy95/8uTJ9O/fn4YNG5KVlcWmTZvcBnqVRUz9CIIgXCMUchmvDGwKFAclJV34/pWBTVHIq6bg4bp16wgNDSU0NJSOHTuya9cuVqxYQY8ePcq87n//+x9Tpkzh8ccfp3Xr1vzzzz+89NJLFX7+7NmzueWWWxg4cCC9e/ema9eutGvXrkL3mDp1Krt376ZNmza8/vrrvPfee/Tt2/eK+q1QKMjIyGDkyJE0bNiQIUOG0L9//6sqnBsYGMjSpUtZsWIFTZs25c033+Sdd94p9/X+/v5s3LiRvLw8unfvTrt27Vi0aJFjdGXcuHEsXryYJUuW0KJFC7p3787SpUuJjo4u1/1tNhsTJkygSZMm9OvXj4YNG1Zoe/OVkEmSVPVZgqqQ0WjE29ubnJwcvLy8aro7giAIZSoqKuLMmTNER0dfccHWdf8l8dovh50W1oZ6e/DKwKbVsjX5WhUVFcXkyZMrNeW+ULay/t7L+/4tpn4EQRCuMf2ah9KnaQg7z2SSmltEkKF4uqeqRlIEoSaJqR9BEIRrkEIuo1OMP3e2DqdTjH+NBilvvPGG03bXkl/9+/evlj789ddfbvtQ5QnJqtH48ePd/ozjx4+v6e5VCTH1IwiCUI0qY+qntsnMzCQz03XuFq1WS3h4eJX3obCwkHPnzrltr1+/fpX3oTqkpqZiNBpdtnl5eREUFFTNPSqbmPoRBEEQapyfnx9+flWz06i8tFrtdROMlCUoKKjWBSNVTUz9CIIgCIJQa4lARRAEQRCEWksEKoIgCIIg1FoiUBEEQRAEodYSgYogCIIgCLWWCFQEQRCEWq9Hjx6XzSh79uxZZDIZ+/fvv+p7VYdL+7t582ZkMhnZ2dk12q/aRgQqgiAIwmWNHj0amUyGTCZDpVIRHBxMnz59+Oyzz7Db7TXdPaC4qm9SUhLNmzcH3L/xr1y5skKF/oSaJQIVQRCEa5HdBmf+goPfF/+v3Vblj+zXrx9JSUmcPXuWX3/9lZ49ezJp0iQGDBiA1Wqt8ueXxWw2o1AoCAkJQaksO0WYn58fBoOhmnomXC0RqAiCIFxrDv8Mc5rD5wPgh7HF/zunefHxKqTRaAgJCSE8PJy2bdvy/PPP89NPP/Hrr7+ydOlSALKzsxk3bhyBgYF4eXnRq1cvDhw44LjHq6++SuvWrfnyyy+JiorC29ubYcOGkZub6zgnPz+fkSNH4unpSWhoKO+++26pvkRFRTFjxgxGjhyJl5cXDz/8sNNUytmzZ+nZsycAvr6+yGQyRo8eDZSe+jGZTDz77LPUqVMHjUZD/fr1+fTTT8v1Ozl06BADBgzAy8sLg8HALbfcwqlTpxztixcvpkmTJnh4eNC4ceMKVRqOjY1l4MCB+Pr6otfradasGWvXri339dcLEagIgiBcSw7/DN+NBGOi83FjUvHxKg5WLtWrVy9atWrFypUrAbj33ntJTU3l119/Zc+ePbRt25Zbb73VKcX+qVOn+PHHH1m9ejWrV6/mzz//5M0333S0P/300/z555/89NNP/Pbbb2zevJm9e/eWevY777xDq1at2LdvHy+99JJTW506dfjhhx8AOHbsGElJSXzwwQcuf4aRI0fyzTff8OGHH3LkyBE+/vjjctUHOnfuHN26dUOj0bBx40b27NnDgw8+6BhdWrZsGS+//DIzZ87kyJEjvPHGG7z00kt8/vnnl703wIQJEzCZTGzZsoWDBw/y1ltvXVd1i8pLpNAXBEG4VthtsO5ZwFWJNgmQwbrnoPEdIFdUW7caN27Mv//+y99//83OnTtJTU1Fo9EAxcHEjz/+yPfff8/DDz9c/GPY7SxdutQx/fLAAw+wYcMGZs6cSV5eHp9++ilfffUVt956KwCff/45ERERpZ7bq1cvpk6d6vj+7Nmzjv+vUCgcaf2DgoLw8fFx2ffjx4/z3Xff8fvvv9O7d28A6tWrV66fe968eXh7e7N8+XJUKhUADRs2dLS/8sorvPvuuwwaNAiA6OhoDh8+zMcff8yoUaMue/+4uDgGDx5MixYtKtSv640IVARBEK4Vsf+UHklxIoHxXPF50bdUW7ckSUImk3HgwAHy8vLw9/d3ai8sLHSaDomKinJaIxIaGkpqaipQPNpiNpvp2LGjo93Pz49GjRqVem779u2vuu/79+9HoVDQvXv3K7r2lltucQQpJeXn53Pq1CnGjh3LQw895DhutVrx9vYu1/0nTpzIo48+ym+//Ubv3r0ZPHgwLVu2rHA/r3UiUBEEQbhW5KVU7nmV5MiRI0RHR5OXl0doaCibN28udU7JEY1L39hlMtkV7RzS6/UVvuZSWq22Sq7Ny8sDYNGiRU5BFxSP9pTHuHHj6Nu3L2vWrOG3335j1qxZvPvuuzzxxBNX3OdrkVijIgiCcK3wDK7c8yrBxo0bOXjwIIMHD6Zt27YkJyejVCqpX7++01dAQEC57hcTE4NKpWLHjh2OY1lZWRw/frzCfVOr1QDYbO53RLVo0QK73c6ff/5Z4fu3bNmSv/76C4vFUqotODiYsLAwTp8+Xep3ER0dXe5n1KlTh/Hjx7Ny5UqmTp3KokWLKtzPa50YUREEQbhW1O0MXmHFC2ddrlORFbfX7VwljzeZTCQnJ2Oz2UhJSWHdunXMmjWLAQMGMHLkSORyOZ06deKuu+7i7bffpmHDhiQmJrJmzRruvvvuck3VeHp6MnbsWJ5++mn8/f0JCgrihRdeQC6v+OfqunXrIpPJWL16NbfffjtarbbUYtSoqChGjRrFgw8+yIcffkirVq2IjY0lNTWVIUOGlHn/xx9/nLlz5zJs2DCmTZuGt7c327dvp0OHDjRq1IjXXnuNiRMn4u3tTb9+/TCZTOzevZusrCyefPLJy/Z/8uTJ9O/fn4YNG5KVlcWmTZto0qRJhX8P1zoxoiIIgnCtkCug31vnv5Fd0nj++35vVtlC2nXr1hEaGkpUVBT9+vVj06ZNfPjhh/z0008oFApkMhlr166lW7dujBkzhoYNGzJs2DBiY2MJDi7/KM/s2bO55ZZbGDhwIL1796Zr1660a9euwv0NDw/ntdde47nnniM4OJjHH3/c5XkLFizgnnvu4bHHHqNx48Y89NBD5OfnX/b+/v7+bNy4kby8PLp37067du1YtGiRY2pr3LhxLF68mCVLltCiRQu6d+/O0qVLyz2iYrPZmDBhAk2aNKFfv340bNiwQtubrxcySZJcheXXDKPRiLe3Nzk5OXh5edV0dwRBEMpUVFTEmTNniI6OxsPD48pucvjn4t0/JRfWeoUXBylN/1c5HRWESlDW33t537/F1I8gCMK1pun/ircgx/5TvHDWM7h4uqcatyQLQnURgYpQa9mMRqxpaeRv3w52CX2nm1EGBqIo59Y+QbiuyRXVugX5RjR+/Hi++uorl233338/CxcurOYe3ZhEoCLUStasLDI+W0LmJSvc/UaPxv/hh1CeT+QkCIJQVaZPn85TTz3lsk0sNag+IlARaiXTsWOlghSAzKVL0d/SFc8uXWqgV4Ig3EiCgoIICgqq6W7c8MSuH6HWsRUUkPHZErftGYs/xXY+mZIgCIJwfROBilDrSGYztox0t+22jAwkc+kES4IgCML1RwQqQq2j8PREX8bUjr5zZ+SGG6+CqCAIwo1IBCpCrSNTKvG55x7kLup4yHQ6fO8bjtxFETBBEATh+iMCFaFWUkVEUHf5N+hKFPPS3nQTUd98gyo8vAZ7JgiCIFQnEagIlc5mNGI6fRrj+vXkbf0H87lEJBdFu8oik8vxaNCAiA8/IOb334j5/TciPpqLR6OGyMpZeVQQhNpt8+bNyGQysrOzr+o+S5cudarOfCXOnj2LTCZj//79V9y3Hj16MHny5Kvqh1CaCFSESmVNTyflzTc5ffsdnJs0mfixYzk9cCD5O3ZiN5srfD+FtzfqOnVQ16mDUiR6E4QaM3r0aGQyWamvfv36VWs/oqKimDNnTpU/p3PnziQlJeEtXndqnMijIlQaSZIwrl9PzspVzscLCoh/9FFiVv+Cum7dGuqdIFxfbHYbe1P3klaQRqAukLZBbVFUcQr9fv36sWSJc+oAjUZTpc+sKWq1mpCQkJruhkAVj6gsWLCAli1b4uXlhZeXF506deLXX391tBcVFTFhwgT8/f3x9PRk8ODBpKSkVGWXhCpkTUsj4+NPXDdaLORu/rN6OyQI16k/Yv+g7w99eXD9gzz717M8uP5B+v7Qlz9i/6jS52o0GkJCQpy+fH19AZDJZCxevJi7774bnU5HgwYN+Pnnn52uX7t2LQ0bNkSr1dKzZ0/Onj1b6hk//PADzZo1Q6PREBUVxbvvvuto69GjB7GxsUyZMsUxolPS+vXradKkCZ6envTr14+kpCSn9sWLF9OkSRM8PDxo3LhxmZWIL536ycjIYPjw4YSHh6PT6WjRogXffPNNRX59whWq0kAlIiKCN998kz179rB792569erFnXfeyaFDhwCYMmUKv/zyCytWrODPP/8kMTGRQYMGVWWXhKpks2FNTXXbbD59uho7IwjXpz9i/+DJzU+SUuD8oS61IJUnNz9Z5cFKWV577TWGDBnCv//+y+23386IESPIzMwEID4+nkGDBjFw4ED279/PuHHjeO6555yu37NnD0OGDGHYsGEcPHiQV199lZdeeomlS5cCsHLlSiIiIpg+fTpJSUlOgUhBQQHvvPMOX375JVu2bCEuLs4p/f2yZct4+eWXmTlzJkeOHOGNN97gpZde4vPPPy/Xz1ZUVES7du1Ys2YN//33Hw8//DAPPPAAO3fuvMrfmnBZUjXz9fWVFi9eLGVnZ0sqlUpasWKFo+3IkSMSIG3btq3c98vJyZEAKScnpyq6K1SAJSNDOnXnXdLhRo1dfmWvWVPTXRSEGldYWCgdPnxYKiwsrPC1VptVuvW7W6XmS5u7/GqxtIXU+7vektVmrfR+jxo1SlIoFJJer3f6mjlzpiRJkgRIL774ouP8vLw8CZB+/fVXSZIkadq0aVLTpk2d7vnss89KgJSVlSVJkiTdd999Up8+fZzOefrpp52uq1u3rvT+++87nbNkyRIJkE6ePOk4Nm/ePCk4ONjxfUxMjPT11187XTdjxgypU6dOkiRJ0pkzZyRA2rdvnyRJkrRp0yanvrlyxx13SFOnTnV83717d2nSpEluz78RlfX3Xt7372pbo2Kz2VixYgX5+fl06tSJPXv2YLFY6N27t+Ocxo0bExkZybZt27j55ptd3sdkMmEymRzfG43GKu+7UD5KPz+Cnn6a+LFjS7UpAgLQtW5d/Z0ShOvI3tS9pUZSSpKQSC5IZm/qXm4KuanSn9+zZ08WLFjgdMyvRIHQli1bOv6/Xq/Hy8uL1POjrEeOHKFjiXQDAJ06dXL6/siRI9x5551Ox7p06cKcOXOw2Wwoytjxp9PpiImJcXwfGhrqeHZ+fj6nTp1i7NixPPTQQ45zrFZruRfL2mw23njjDb777jvOnTuH2WzGZDKh0+nKdb1w5ao8UDl48CCdOnWiqKgIT09PVq1aRdOmTdm/fz9qtbrUlrLg4GCSk5Pd3m/WrFm89tprVdxr4UppW7Yg7P33SH1jFta0tOJj7doR+voMVGFhNdw7Qbi2pRWkVep5FaXX66lfv77bdtUliRhlMhl2u71K+lKeZ0uSBEDe+dpgixYtKhUslRX8lDR79mw++OAD5syZQ4sWLdDr9UyePBnzFexmFCqmygOVRo0asX//fnJycvj+++8ZNWoUf/555Ysqp02bxpNPPun43mg0UqdOncroqlAJFAYDXn37omvTBltuLjKVCoWvr9haLAiVIFAXWKnnVacmTZqUWly7ffv2Uuds3brV6djWrVtp2LChI6BQq9XYbLYKPTs4OJiwsDBOnz7NiBEjrqD3xf248847uf/++wGw2+0cP36cpk2bXtH9hPKr8kBFrVY7IvB27dqxa9cuPvjgA4YOHYrZbCY7O9tpVCUlJaXMLWEajea63Q53vZDJ5ahCQlCJrX2CUKnaBrUlWBdMakEqElKpdhkygnXBtA1qWyXPN5lMpUa8lUolAQEBl712/PjxvPvuuzz99NOMGzeOPXv2OBbJXjB16lRuuukmZsyYwdChQ9m2bRsfffSR0+6cqKgotmzZwrBhw9BoNOV6NhQv9J04cSLe3t7069cPk8nE7t27ycrKcvrw606DBg34/vvv+eeff/D19eW9994jJSVFBCrVoNoTvtntdkwmE+3atUOlUrFhwwZH27Fjx4iLiys1bykIgiCAQq7guQ7FO2VkOG/NvfD9sx2erbJ8KuvWrSM0NNTpq2vXruW6NjIykh9++IEff/yRVq1asXDhQt544w2nc9q2bct3333H8uXLad68OS+//DLTp09n9OjRjnOmT5/O2bNniYmJITCw/CNH48aNY/HixSxZsoQWLVrQvXt3li5dSnR0dLmuf/HFF2nbti19+/alR48ehISEcNdddwFgs0uYLDYsNjsFZisFZitWW/VMed0IZNKFSbwqMG3aNPr3709kZCS5ubl8/fXXvPXWW6xfv54+ffrw6KOPsnbtWpYuXYqXlxdPPPEEAP/880+5n2E0GvH29iYnJwcvL6+q+lFueLb8fOznFy7LvbxQuCgYeL2zpKZiSUzEkpiIOiICZWgoqgq8UAoCFG9zPXPmDNHR0Xh4eFzRPf6I/YM3d77ptLA2RBfCsx2epXfd3mVcKVQ2m91OVoGFxOxCp+NeHirCfbWoFDd2Aviy/t7L+/5dpVM/qampjBw50pGGuGXLlo4gBeD9999HLpczePBgTCYTffv2LTMBj1D9JEnCEhtH6gdzyP3tdwAMt/UhaNIkVHXrlkq4dL0yxcYSP+4hLPHxjmPq+vWps3Ah6ghRJFGoXr3r9qZnnZ7VnplWKM1slUoFKQDGIguGQiV+evUN8zpZVap0RKU6iBGVqmVOSODsPfdiu6Qwl8LHh6jvV6COiKiZjlUja3o6saPHYD55slSbtm1b6syfh+IqC6IJN47KGFERao/E7ELS80wu29RKOTGBnjf0qEpljKjcuL894bIkm42cVatKBSkAtuxsclatQqrg6vuqYDebMScmYo6NxVJGZtwrZc3IcBmkABTu3Yv1fOZNQRBuLJIkYSljLYrNJnFtDwXUDiJQEdyyGY3kbtzktj13w0ZsNZxwz5KSQuq773L6jgGc6tuP2OHDyVm3DltOTqU9w34+B4Pb9oKCSnuWIAjXDplMhpdW5bZdr1FyAw+mVBrxKxQc7GYz5nPnKDxwgMKDB7Hn56Nt09rt+QpvL2Qq9/9Iq5o1M5Nzzz5H1udfIBUWzxFbziWSOHkKeX/+SWXNair8/d03KpUoxJSjINyw9Gqly6kdGTKCvTxQyMXb7NWqthT6Qu1my80ld/16kme+4XjTV/j4EDrzdaxJyeRtKj2y4vfggyg8Pau8b9b0DGzZWUiShMLbG1VQEACW5GQKL0kYdUHqO++g69ARVUjwVT9f6eeHoW9fctevL9Xmc889KMuZx0EQhOuPWimnXqCelJwicgqtSEjo1ArCfLRolCJIqQwiUBEAMB0/TtKLLzkds2VnkzBxEnU/X0re33+DxeJo8777brTNm1dpnySrlaIjR0h8bhrmU6cAUIWHEzrzdbRt2lB0+LDba62padjz84CrD1QUXl4Ev/A8Ch8fclauRLJYkHl44HvfffiNGYO8Ftf6sKalYTpzhrwtf6EM8MezRw9UQUG1us+CcK3RKBWE++oI8bYjAQqZDKWY86k0IlARsOXmkvbRPDeNNoy//Ua9n38mZ9UqAAx9+6IKC0Xp61ul/bKcO0fsAyORioqcjsWNHUf0Tz+iDCgjh4lCgUytrrS+qIKCCJ72HP7jxmEvLESu06EMDEBei7MkW5KTSZjwOEWHDjmOpb71NqGz3sDrtttEsCIIlUghl4nt4VVEBCoCUmEh5rNn3babjh1DGRxE0JNTqq9PNhvZP6x0ClIcbDbS588naMqTyHQ6JBeLWQ19+pS9tuQKyD08UNe5NrZj2y0WMpd+7hSkACBJJE17Hm2rVmjKmZFTEAShJomxKQGZToemjIqoHk2bVfvIgb2wkII9e9y2Fx38D5lWS52PP0Z2yd58dYMGBD3zNIrrfMTAmpGBJSUFW35+qTZbRgZZ333n+kJJIm/T5qrtnCDcYEaPHu1IqQ/Qo0cPJk+eXGP9uZ6IERUBhacnARMmkP/XX6UbVSp8htyLrJyl0CuLTK1GHVmHQjfBiio0FLlaha5Na+qt/oXCf//Fci4RbetWqOtGoQq6flPbWzMyyP/nHzIWLcaamYnupvYEPPYY6rp1kV+Y7rLZXI40Oe6RnlZNvRWuN8nJycycOZM1a9Zw7tw5goKCaN26NZMnT+bWW2+t6e4J1yERqAgAaOrHEPbOOyRPn+6o6aMMCiTs7bdRhVd/ini5Wo3vAyPJWfWjy/aAR8c7tgWrIyJuiAy5ANbsbFJnzybnx58cx3J/XUfuHxuIWrYMbcsWQPEomUerVhQdOODyPp7dulVLf4WqI9lsFOzegzUtDWVgILr27ar8A8XZs2fp0qULPj4+zJ49mxYtWmCxWFi/fj0TJkzg6NGjVfr8K2U2m1FX4po1oXqJqR8BKB5V8erXl3o//UjUD98TvWolUd99h65jx4uf0quZOrIOoW/Ocs7VolAQ+OQUNDdoaXVraqpTkOJgsZA8/TWsWVkAKH19CZ72HLjI4aBp2hR1vZiq7qpQhYy//cbJW3sTN2oUiU89RdyoUZy8tTfG336r0uc+9thjyGQydu7cyeDBg2nYsCHNmjXjySefZPv5VAHvvfceLVq0QK/XU6dOHR577DHySiRNXLp0KT4+Pqxfv54mTZrg6elJv379SEpKcnrWZ599RrNmzdBoNISGhvL444872rKzsxk3bhyBgYF4eXnRq1cvDpQIyl999VVat27N4sWLy12qwG638/bbb1O/fn00Gg2RkZHMnDnT0R4fH8+QIUPw8fHBz8+PO++8k7NlrO271Pz582nQoAEeHh4EBwdzzz33lPvaG50IVAQHmVKJKjQUbbNmeDRpgiokpEqKadlyczHHxWE6cwZLmvspiOLgqR/1fl1LnUWfELFgPjG/rsVvxAiU3t6V3q9rQcGOHW7biv475BgNA9A0akTdr5fh0eL8KItWi++okdSZP++6nhq73hl/+41zkyZjTU52Om5NSeHcpMlVFqxkZmaybt06JkyYgN5F9XSf8/Wu5HI5H374IYcOHeLzzz9n48aNPPPMM07nFhQU8M477/Dll1+yZcsW4uLieOqppxztCxYsYMKECTz88MMcPHiQn3/+mfol1tHde++9pKam8uuvv7Jnzx7atm3LrbfeSmaJchYnT57khx9+YOXKlezfv/+yP9+0adN48803eemllzh8+DBff/01wcHF6Q0sFgt9+/bFYDDw119/sXXrVkeAZTabL3vv3bt3M3HiRKZPn86xY8dYt24d3cSoZrmJqR+hWpnj4kh+fWbxehhJQhUZSchLL6Fr2wa5ixc/uYeHY2rHkp4OFkvx9mAX594IZJoyPhnKZE4jKAqtFl3r1tT55GPsBQXI5HIU/v41NkImXD3JZiPljVm4LCAjSSCTkfLGLAy33lrp00AnT55EkiQaN25c5nklF5BGRUXx+uuvM378eObPn+84brFYWLhwITExxSN7jz/+ONOnT3e0v/7660ydOpVJkyY5jt10000A/P333+zcuZPU1FQ05xf5v/POO/z44498//33PPzww0DxdM8XX3xBYODlg/Lc3Fw++OADPvroI0aNGgVATEwMXbt2BeDbb7/FbrezePFix4e3JUuW4OPjw+bNm7ntttvKvH9cXBx6vZ4BAwZgMBioW7cubdq0uWy/hGIiUBGqjSUpidgHRmJNSbl4LC6O+Icfpu6yr9C1bevyOmtmJvl//U3a3LlYEhJQR0cROHkKuo4dUN5gVYt1HW4qDkhcvFHpu3VzWcVZ6esLVZzzRqgeBbv3lBpJcSJJWJOTKdi9B33HDpX67PKWpPjjjz+YNWsWR48exWg0YrVaKSoqoqCgAN35nXg6nc4RpACEhoaSer6gaGpqKomJiW4X5h44cIC8vDz8L0k/UFhYyKnziSEB6tatW64gBeDIkSOYTKYyn3ny5EkMBoPT8aKiIqdnutOnTx/q1q1LvXr16NevH/369ePuu+92/D6EsolARag2Bfv3OwUpDpJE6uzZRCxYUCrwsBcWkvnlV2QsWOA4Zj5zlnOTJhH03LP43nffVY8Q2ItM2LKzQAK5lwFFJY7W2IuKsKanY46NBUlCHRWFwt8fhVZ7RfdTBgYSPO254k/VJSj8/Ah+7jkUl7yQCtcXaxlTpVdyXkU0aNAAmUxW5oLZs2fPMmDAAB599FFmzpyJn58ff//9N2PHjsVsNjvemFWX1AiTyWSOQEh7mX8beXl5hIaGsnnz5lJtPiVeP1xNT7lTnme2a9eOZcuWlWorTzBkMBjYu3cvmzdv5rfffuPll1/m1VdfZdeuXU59FlwTgYpQLpLNVjx9oFZfcU6V/C0utj+fV/jvweIaQ5f8o7Wmp5OxaJHLa9I++BBDnz6or2JXkjkhgYyFC8n5+Rckux3DrbcSOHky6rqRyK6ymJgtL4/cdetIfm060oXyAyoVwdOm4T3gjisqZqjQ6/G+62607duT9c1yrMnJePbojmePHlf0e7AVFmJLS6Ngzx7sublo27dHFRKC0s+vwvcSqp6ynCME5T2vIvz8/Ojbty/z5s1j4sSJpQKB7Oxs9uzZg91u591330V+/t/Pd+7y+bhhMBiIiopiw4YN9OzZs1R727ZtSU5ORqlUEhUVdcU/T0kNGjRAq9WyYcMGxo0b5/KZ3377LUFBQXhdYRFSpVJJ79696d27N6+88go+Pj5s3LiRQYMGXW33r3siUBHKJNlsWBISyF61ioKdu1CFheE3amTxyEAFP72r6tRx26YMCAAXc+rWtDSwWl33rbAQW1YWXGGgYklMJHbE/U6jPLnr15P/zz9E//A96sjIK7rvBeYzZ0vVT8JiIWX6dDyaNEZ3hXPUCi8D2qZN8XjlZSSrDbnHlQWO9oIC8jZuJPHZ58Bmcxz37NmDkOnTUVXBm51wdXTt26EMCSn+m3U1FSOToQwORte+XZU8f968eXTp0oUOHTowffp0WrZsidVq5ffff2fBggUsX74ci8XC3LlzGThwIFu3bmXhwoUVfs6rr77K+PHjCQoKon///uTm5rJ161aeeOIJevfuTadOnbjrrrt4++23adiwIYmJiaxZs4a7776b9u3bV/h5Hh4ePPvsszzzzDOo1Wq6dOlCWloahw4dYuzYsYwYMYLZs2dz5513Mn36dCIiIoiNjWXlypU888wzRFwmPcLq1as5ffo03bp1w9fXl7Vr12K322nUqFGF+3ojErt+hDKZjh3n9N2DyFj4MYV792JcvZqz9w4hZ/Vq7GUkFHPFq39/l9tlAfzHjnX5KVB2mdEb2SVDyOUlSRK5Gze6nIqy5+aS+dUy7OVYze+OvaiIzM8+c9uesWgRtgr+/i4lUyqvOEiB4lpAiU8/4xSkAORt2ozxl9VIdvtV9U+ofDKFguDnp53/5pIdeee/D35+WpXlU6lXrx579+6lZ8+eTJ06lebNm9OnTx82bNjAggULaNWqFe+99x5vvfUWzZs3Z9myZcyaNevyN77EqFGjmDNnDvPnz6dZs2YMGDCAEydOAMXTRGvXrqVbt26MGTOGhg0bMmzYMGJjYx27dK7ESy+9xNSpU3n55Zdp0qQJQ4cOdayb0el0bNmyhcjISAYNGkSTJk0YO3YsRUVF5Rph8fHxYeXKlfTq1YsmTZqwcOFCvvnmG5o1a3bF/b2RyKTyrpCqpYxGI97e3uTk5FzxkJzgmjUzk/hxD7muUqxQELPuV9RljJJcylZYSP6Wvzj31FNOlZgN/foS/MKLqAIDSl1jSU7m7JAhWFNLz7mro6KI/PKLK/rkb8vLI+GxxyjYuctlu6pOHep+8zWqgNJ9Kg9rdjbx48ZR9N8hl+2aBg2I/HxpjU6xpM2bT/rcuS7blEGBRK34HlVwUDX36vpXVFTEmTNnyp3fwxXjb7+R8sYsp4W1ypAQgp+fhtdldqAIQnUq6++9vO/fYupHcMuWk+M6SAGw2Sg6fLhCgYpCq8Wzezdifl1L0X+HsOUa0bZqhTIw0G0lZmVwMBFz5xI7ekzxGpbz5AYD4e+/f8XTEzKlCrnB/T8MhcFwVZ9K5TodHi1bug1UPFq0QK6r2S3WloQEt23WjEyQxIhKbeV1220Ybr212jPTCkJNEIGK4N7lBtsumTIoj5J5UcpDJpPh0awZ9X75mfytWyk6fARtq1boOnZAFRZW4edf7IcGv5EPkLdhg8t2v9Gj3QZP5bq/Wo3f/feTveJ7p9EjAJRK/B8cc1XTNpXBs3t3clatctmmbd0a2RXuTBKqh0yhqPQtyNeruLg4mpaRzfrw4cNEXuWaNKHqiEBFcEvh5YW6Xj3Mp0+XbpTJ0FTT/KpMqSwOboYOrdB11qwsLElJ5P+9FZmHBs+uXVEGBjoWAWsaNMBn2DCyly93us6zZ0/0nW6+6n6rIiKou3QJic9NwxIfX3wsPJzQN2aWubC4umhbtUIVHoblXKJzg0xG8DNP37DZf4XrT1hYWJnZacOu4kOPUPXEGhWhTAV79hA7anSpnTcBjz2G34NjUHh61kzHLsOank7yG7PIXbvW6XjglMn4DhuG4vyb8IVgxrh6DZLVglf//qgjI1FekkzqaljS0rBlZ4MkofDxQRVUe9Z9mBMSSH17NrkbNoDNhjomhpCXX8KjZcsrzvUilK0y1qgIwrVCrFERqpxHixbU+3EV6YsWU7h/P6rgIPwffgRt82a1NkgByNu6tVSQApD2/hz0nTujPV//Runri9LXF20VFjlUBQbW2q2+6ogIQt+cRVBmJlityA2GSg3SBEEQrpYIVG5g1qwsbJmZ2IuKUHh7owwIQH5JxCtXq9HUr0/oq69gz89HplZfUaKy6mTNzCTz00/dtmct+xqP12cgU4o/fwCFTodCpPIWBKGWEq/UNyjT2VgSn5rq2JUi02gImDwZn8GDULoIRORaLfJrZCpAslqLp1rcsKanIVmtIlARBEG4BoiEbzcgS3IycWPGOIIUdXQ0YbPfRuHhQeann5G7YQOWxKQa7uWVUxgM6G52vxjWs9etpUaOSpJsNizJyZhOnMAcF4ctL68quikIgiCUg/hIeQMynT6NNak4EFGFhxE87TkSn32uOB39eQo/PyI/X4pHgwY11c0KkWw2rJlZyGTFfQ945BFy161HuiS7rCIgAM/u3dzex5qdTe7vv5P23vvFvw+5HM9evQieNg11uNgZIAiCUN3EiMoNyHTsmOP/+z04luSZbzgFKQC2zEzOTZxYJVVYK5slMZH0BQuJHTGC2PsfIPPTz5B7ehL17XK07c7XPFEoMNx2G1HLvnJbvE+SJPL//JPkl16++Puw28n74w/iH34Yy/l02oIgXNSjRw8mT57s+D4qKoo5c+ZUybNkMhk//vij2/azZ88ik8nK3IosXHvEiMp1zm4yYU1Lw5aRCSolSn9/p/wnqpBgLLGxLq81nzmLNTOzSiqxVhZzYiKx9z+ANfFiLpDUd98le9UqIpcuIWL+POzGXJDLUPj4oCij9Ls1NZXU9953/ZxTp7DExdWqrcXlIdntWFNTsWVnI1MqUfj4FBeAFIQKGj16NJ9//nmp4zt27KBJkyY10KOq9+qrr/Ljjz+KwKeGiUDlOmbNziZn1Y+kzZmDZDIBxTVcwt57D333buT/uQXp0qypl7hwXW0k2WwYV692ClIuMJ8+Tf5ff+EzeDCUM3GZVFjoskjhBYX//YfuCiqz1hRbfj75W7eS/Np0bBkZAKhjYgib/TYejRqJdOvXOLtdIulENvlGE3ovDaENfJDLZZe/8Cr069ePJUuWOB0LDAxEcZV/SxaLBdUVFhgVrn9i6uc6Vrh3H6lvveUUbFhT04gfO47gZ55BFR5WXJ3Y3e4XlQrFVaSRr2q27GyMv6x22569chW23Nzy31CtRlbGIltVmOspo9rKdPw45yZOcgQpUDwyFPfAA1hcBHfCtePUvlS+eP4ffnx/H79/epgf39/HF8//w6l9VTs9qdFoCAkJcfq69dZbnaZ+AHJzcxk+fDh6vZ7w8HDmzZvn1C6TyViwYAH/+9//0Ov1zJw5E4AFCxYQExODWq2mUaNGfPnll6X6kJSURP/+/dFqtdSrV4/vv//ebX9tNhtjx44lOjoarVZLo0aN+OCDD5zO2bx5Mx06dECv1+Pj40OXLl2IjY1l6dKlvPbaaxw4cACZTIZMJmPp0qVX9osTrooIVK5T1sxM0i75B3mBZDKRt3kzUStWoKpbF78RI1ye5z96VI1ME0hWK/YSBQjdksvdB1mATKUqPqeclAEB+AwZ4vpReh0ezaouKVxlsxmNpM1x/d/fnl+A8bffqrlHQmU5tS+VdR//R36282hnfraJdR//V+XBSnnMnj2bVq1asW/fPp577jkmTZrE77//7nTOq6++yt13383Bgwd58MEHWbVqFZMmTWLq1Kn8999/PPLII4wZM4ZNmzY5XffSSy8xePBgDhw4wIgRIxg2bBhHjhxx2Q+73U5ERAQrVqzg8OHDvPzyyzz//PN89913AFitVu666y66d+/Ov//+y7Zt23j44YeRyWQMHTqUqVOn0qxZM5KSkkhKSmJoBct4CJVDTP1cpySzGfPZs27bC/89iN+DD6L080P58EOowsNIX/gxtsxMFP7+BIwfj9ft/as1d4otJwdzbCyZy5ZhS0vHs1dPPHv2dLv4Venri+/wYSS//IrTcY9mTfG55148WrbAZjQiV6uRqVTYcnOx5RgBCYWXV6nEdXK1Gv+xD2I6dYqCrVsvHjcYqLPoE1TBwZX+M1cVe2Gh06LpSxXu3o39gQeQq9XV2CvhatntEn99e6LMc/7+7gTRrQKrZBpo9erVeJbISN2/f3+X53Xp0oXnnnsOgIYNG7J161bef/99+vTp4zjnvvvuY8yYMY7vhw8fzujRo3nssccAePLJJ9m+fTvvvPMOPXv2dJx37733Mm7cOABmzJjB77//zty5c5k/f36pfqhUKl577TXH99HR0Wzbto3vvvuOIUOGYDQaycnJYcCAAcTExAA4rbfx9PREqVQSEhJS/l+SUOlEoHKdkqnVqOvVw+Tmk4a2dStksuIXMqW/P77334/httuQzGZkag3KoEBkFRiNuFq23Fyyvv2WtBKLWfP/+Yf0BQup+/UyNHXrurzOs3t3PFq2pOjffwHwf+QRlAH+pC9ahDUxEbnBgP+kiXjefDOps98hf8sWAHSdOxMy7TnU9eo5/Zyq4GDCZ7+NNS0N0/HjKPz90URFoQwOvqbWdMjUalRhYW4T32li6hePOAnXlKQT2aVGUi6Vl2Ui6UQ24Y0qf9q2Z8+eLFiwwPG9Xq9n+PDhpc7r1KlTqe8v3QnU/pL1XkeOHOHhhx92OtalS5dSUzWu7l3WYtd58+bx2WefERcXR2FhIWazmdatWwPg5+fH6NGj6du3L3369KF3794MGTKE0NBQt/cTqp+Y+rlOKf38CJwy2WWbTKvF0Lu38zG5HFVICOrISFQhwdUapABY09KcgpQLbBkZpM1+B1t+vsvrVMHBRHw0l/APPsBv3FgUPt6kzHzDscDWnpeHR1QUsfeNIP/PP0GSQJIo2LqVs0OHYUlIKHVPpZ8fHo0a4T1wIJ6dO6MKC7tskCLZbFiSkig8dIjCQ4ewJCUh2WxX8JuoHEpfXwLOfzItRaHAe9DdjkBVuHbkG8u3uL2851WUXq+nfv36jq+reUPXl7EDr7IsX76cp556irFjx/Lbb7+xf/9+xowZg7lEfqUlS5awbds2OnfuzLfffkvDhg3Zvn17lfdNKD8RqFzHtK1aEfzii8hKTN8oQ0KIXLoEVS37xJD/91a3bbkbN2LLynbbrgoKwqvvbfgOG076wo+d2vRdupD/19/YjcZS19nz88lavhz7ZXY+XY6tsJC8LVs4c/cgzg6+h7OD7+HMoMHkbdmCrcRaG8lmw5KYSN5ff5O9ciWFhw5hLbHQtbJp27Uj4PEJUCLIkut1RHw0F5Uoa39N0ntpKvW8qnLpG/327dsvu4W5SZMmbN3q/DqwdetWml5SMLQi9966dSudO3fmscceo02bNtSvX59Tp06VOq9NmzZMmzaNf/75h+bNm/P1118DoFarsdXgBw6hmJj6uY4pvb3xGXIvnj17YMvMRKZUofD3qxW5QCxpadhzckAuR+HjA+oypiHsdpDsl72nPT+v+J4leDRuTP72bW6vyf/rb/zHjUPu51ferpdiiYsjYcLjxf08z5aVRcKEx4letRJFo0ZINhuF//1H/LiHsJfYiaRt25bw995DFVL561+Uvj74jxmD9513Yo6NRabRoA4PRxkYKKZ9rlGhDXzQ+2jKnP7x9C3eqlyTtm7dyttvv81dd93F77//zooVK1izZk2Z1zz99NMMGTKENm3a0Lt3b3755RdWrlzJH3/84XTeihUraN++PV27dmXZsmXs3LmTT90UIW3QoAFffPEF69evJzo6mi+//JJdu3YRHR0NwJkzZ/jkk0/43//+R1hYGMeOHePEiROMHDkSKE5ed+bMGfbv309ERAQGgwGNpmaDwBuRGFG5zsnVatTh4WhbtMCjSeNKCVIsKSkU/nuQvL/+Kk7Hf0lwUBa7yUT+jh3E3jeC0wMGcvr2O4gbPQaPxk3waNHC5TXatm2RGwyXvbfMxQuIPT8fhY/7uXqFr89VvWnbi0xkfPqZU5BysdFOxmdLsBeZsKSkED92nFOQAlC4dy9p8z7CXlR0xX0oi1yvR12nDp5du6K/6abiaSwRpFyz5HIZtwwtu6xF1yENqjyfyuVMnTqV3bt306ZNG15//XXee+89+vbtW+Y1d911Fx988AHvvPMOzZo14+OPP2bJkiX06NHD6bzXXnuN5cuX07JlS7744gu++eabUqMuFzzyyCMMGjSIoUOH0rFjRzIyMhyLdQF0Oh1Hjx5l8ODBNGzYkIcffpgJEybwyCOPADB48GD69etHz549CQwM5Jtvvrm6X4xwRWSSJEk13YmrYTQa8fb2JicnBy8XVX+FyiNJEqbjx4l/ZDzW5GTHcUPfvgS/+AKqcmSwLTp2jDODBsMlw6kyrZbITxcTO+L+4nUkF46r1dRd/g1aNy9EJdmMRhImTqKgxNCwIiCA4KefJvHZZ11eU+fjj8us/XM51sws4saMcbvDRtO4MZFLl1C4fz8J4x91eY5MpaLer2tRR0RccT+Ea0dRURFnzpwhOjoajzLy9pTl1L5U/vr2hNPIiqevhq5DGhDTpuZHTAXhgrL+3sv7/i2mfoTLshcVYS8oQLJYSHj8CacgBSB3/XpUYWEETplc5nZXe0FB8RoSF3O+UmEheX/9RZ1PF5P61tvYsrPRdexIwCMPo46MLFc/FV5ehM6YTtzoMVjOnQPAlp6OJS0V35EPkPWFc/Ion2HD8GjRvFz3dkeu06KJqec+UKlfH7lWW+p3VpJksZQqnigIZYlpE0R0q8Bqz0wrCDWhSgOVWbNmsXLlSo4ePYpWq6Vz58689dZbNGrUyHFOUVERU6dOZfny5ZhMJvr27cv8+fMJvoZyVlyvbIWFWOLiyPj0U0xHjqLt2IHw997FlpNDwc6d5Kz60VG0MGv5cnwfuB91GYs0bfn5jm3ErhTu3oP/2LFELvkMyWpFYTBUOI+L3MubOp98gunYMYqOHEYdFYW2XTsU3t743Hsv+Vu2INklPLt3QxkUhNLHp0L3L/U8Dw/8x43D+Os6p5EgAGQy/Mc+iFyjwaOMhYSKgADkOt1V9UO48cjlsirZgizUPLtdAhnIxc48oIoDlT///JMJEyZw0003YbVaef7557nttts4fPiwY2valClTWLNmDStWrMDb25vHH3+cQYMGlVr9LVQvyW6ncNcu4sc/CnY7Qc8+g0ylInHa81iTktA0bkzwiy+Qt2kTOT/+hFRYiHSZdRZyjQZlSIhjtONSqjoRyDQaFCUSSlWEzWgk64svSJ83D0VAAOqICIzrf0OyWKj75Rd4NGiAR4Oy5/evhLpuXcLef4/kF1/CnpcHgNzTk9DXZ6A+n/9FFR6OR7NmFB06VOr6oMmTUNaCBc6CINQss9VOgdlKVoEFuQz89Ro8VHKUiht7OWm1rlFJS0sjKCiIP//8k27dupGTk0NgYCBff/0199xzDwBHjx6lSZMmbNu2jZtvvvmy9xRrVCrOVlDg2B2j8PFxOWphSU7mzD33YktPx2fYMACyly8vdV7ojBlkffstloQEoletvOy257y//yZ+3EMu26JWrUR7FVVYi06c4MzA/7ls87r7bkJffqnUz2rNyECyWJHrtKUy1VaEZLFgSUvDlp4BMlD4+6O6ZHeNJSmJ1HfexbhuHdhsKPz8CJw0EcNtt6GsxTWVhMpVGWtUhOuP2WrnTHo+Jqvz1LivTk2ot8c1G6xcc2tUcs6/Ofqd3wq6Z88eLBYLvUskH2vcuDGRkZFuAxWTyYSpRJE9o4v8GIJrkiRhPnuWtPfnkLthA8jlePXvT+ATj6OuU8fpXFtWFrb0dAAMPXsSP368y3umzZ1L4ORJ2IzGco0KaJs3J+Dxx0mfP//iThmVitDXXi33WhR3cv/Y4L5t9WqCnnjcEahYMzLI37aN9AULsaak4NG8OUFPTileU3IF0zAylap42quMqS9VaCihM6YTOHkSktmMXKdHGRxU7cn1hNrhGt/HIFQiSZLIzDeXClIAsgrM+OnV12ygUhl/59UWqNjtdiZPnkyXLl1o3rx4AWNycjJqtRqfS9YJBAcHk+xm8eGsWbOcajcI5WeJj+fs0GEXk5/ZbBh//pn8f/4h6tvlLmvqKHx9iyvtuvljs6amogqPwLNH/XKlmFf4+OA3ZjTe/xtI0dGjyJQqNA0aoAgIQKG9uk+XUpH7QoaS1er4/zajkfQFC8j6apnjWMH27ZwdOqx4F1C3W66qH2WR63SoxXqUG5ri/L8Ts9mMthpraQm1l9UmkVXgfkF9Zr4ZnVpxTWaTLigoAIrrLl2pagtUJkyYwH///cfff/99VfeZNm0aTz75pON7o9FInUtGA4TS7BZLcRZWFyNQtvR0ctetx2/MaMene4WfH8qgQOyFRcg8yk5wpPT3R1mBhGkKvR6FXl/uERRLSgqm48cxrl+P0tcXr4EDUYWEovByzq1i6NWLjI8/cXkPXefOyM+vfbGmZzgFKQ6SRPJrr1L3m29qRVI84fqkVCrR6XSkpaWhUqmQixG1G57FasdqMSHZXX8gNJvsFBXJrqlARZIkCgoKSE1NxcfHxxGgX4lqCVQef/xxVq9ezZYtW4gokSsiJCQEs9lMdna206hKSkqK22qVGo1GZAa8AnajkbxNm9225/7xOz733nNxnYZMRsirr5LwxEQUBgMyrRapsPSIhUfTpij8qm59hSU5mfhHxjtt/81YtJjAqU/iO3SYU7CiiohA3717cU2fEmQaDcHPPuP42UxHj7p/3rnE4mBOBCpCFZHJZISGhnLmzBliY2NrujtCLSBJErmFFvJNrtP12z3VmLOvnaKoJfn4+Fx19ekqDVQkSeKJJ55g1apVbN682ZG2+IJ27dqhUqnYsGEDgwcPBuDYsWPExcWVqpApXCWlEnkZi5Xk3t6OhZ+ms2eJvf8BtC2aU2fBfAoPHSbk5ZdIeuFFpwysci8vQmfNqtBoSkXYLRYyv1rmFKTI1Goks5m0d9/Ds2cvp0BF6e9P2OszyN20icwlS7EZjei7dCFg/CNOozcy3WWG25UivZBQtdRqNQ0aNHAqjifc2M5lFTDh673kFlmdjjcO8eK1O2MI8Lz2PqCrVKqrGkm5oEp3/Tz22GN8/fXX/PTTT065U7y9vR1zs48++ihr165l6dKleHl58cQTTwDwzz//lOsZYtdP+RnXrePc5Cku2+os+QzPTp2Ks7tOnkzBP8X1cWQaDYbet6Lv1AlVVBR5W7ZgOX0GXccOePbsiSo8vMqGIy0pKZy+YwB2kwn/0aPQtb8JW042yrAw5Fot5rh4ZHI5Ho0aofD3Q1Eizb41PR3JZkPh5VVqp485IYHTt9/hMsmaR+tWRC5cWFx/SBAEoZpIkkR8ZgGfbT3Duv9S0KoVPNCpLrc3DyXE+/rcHVbe9+8qDVTcvYEtWbKE0aNHAxcTvn3zzTdOCd/KO1QkApXys6Snk/L6THLXrXM67jN0KIGTJqL088McG8cpdzU5FAoiv/gCXauWyKph1MGSlMTJ2/oS/vZbGNesJfePPzD064f+5o6kzHoT6cLuL5kMv9Gj8H/ooXKN7thNJvI2by4O2kr8+St8fKj75RdoqiDXiiAIQnmYrXayCszIZTICPNXX1LqUiqoVgUp1EIFKxVgzMrGcS8C4bj0ypRJDv76oQkMdeTxMp09z+vY73F4fOusNfO6+u1r6asvNJeubb7DEJ5C9YgUoldSZP4/4R8a73IUUPud9vPr1K9e97UVFWJKTMZ88RcG/B1CFhODZvXtx0b7r+IVBEAShtqiVeVSEmqf090Pp74e2ZUuX7XJPT1QREVgSEly2a91UOK4KCoMBQ69exUUMAX2nTsULgiUJZVAgvsOGo2nQAEmyY01NJeenn9HddBNKf/8y72tJSSH/r7/IWr4cyWrD+847MfTsedlkdYIgCEL1E4GK4EQVFETIKy8T//AjpUYtvO78H4pyVEiuTJIkOdaSKHx9sSQno2ncmKApU0j76CPSPvwQAHV0NIETJyJZLGXez5KSQsLjT1B08KDjWOrRo2R98w11P18qghVBEIRaRmzgF0rRtWtH3a+/RtuuHTKVClV4OCGvvkrw08+g9Pau1r7IdTpUdSNRhoVhjovDo2kTgp6cwrmpU52CDfOZM5x76ilsWdll3q9gz16n6y6wxMWRs3o1UoldTYIgCELNEyMqQilynQ5dm9ZEzJ+HVFiITKFAWc0jKVC8jgS7nYAJE7BlZKCpF4MyNBTjLz87iv85sdlIX7CA0FlvoDhf9NKpuaCA7O++c/u8nJWrMNx2W3HxxMDAcmXaFQRBEKqWCFQEt5Te3lDNIygX2AsKyN20maTnnnOazvG+5x48u3Zxe13hgQPY8/NdBioyKLuujlxO7tpfyVi6FP8xY/C5ZzDKgICr+TEEQRCEqyQCFaFWsiQmkvjUU6XWyeR8/z0ezZujadgAc8I5vAcORN+lM0gS1rQ0CvbsQaZWu7ynXKfDZ9gw8t3k6PG67Tby/voLe04OaXPmYDp9mpAXX7iqqsqCIAjC1RFrVIRaKfuHlW4LIWZ9+SXeQ4YS8cEcbJmZnJvyJOcmTSbrq2V4DxzoyLDrirZ1K7Q3tS91XB0Tg0ezphTu3es4Zvz5Z6zpGVf/wwiCIAhXTIyoCJXOkpaGLTML7DYUvr4og4LKnnK5hGS1Yo6Lc9tuTU1F3+lmEsY/iiU+3nHcfPYsCY9NIPKLL9B3uMnltaqgIMLffZeC3XvI+vprsFnx7NkLdd1Ikl58qdT55tizaOpFu7iTIAiCUB1EoCJUGslqpejQIc499bQjgFD4+RHy6ivou3RxuW7EFZlSib5LZ/I2bHDZru/WDUtsnFOQUlLqW29SZ9Eit1lqVUFBeN/eH89bumLNyiJx8hSKDh92ea6Y9hEEQahZYupHqDSWc+eIHTnKKYCwZWZybuIkzCdPVuhent27u6y3owwKJHDiE8g99YR/MIeIBfPxufdeKJFNtujQYaQi02WfoTAYUHh5IfNwXUdD4eODKiysQv0WBKF2yC2ycCIll/d+O860lQfZdCyVZGNRTXdLuAJiREWoFJLdTvbPP1+sv3OJtLkfEf7+e06FA8uiCguj7rKvSHrpZce6EW3btoS8/DLnJk66WFFZqcT7zjsJnT6dpJdfBklC7u0NivLF4EofH8JmvUHsqNFYk5Mdx2U6HRELF6IMDi7XfWoDu10iNbeI9DwzkgT+nmqCDBqU5fxd1BS7xYItLQ1rejrI5Sj9/Yu3h4sq1sIVyiuysGrfOV7+6ZDj2Dc742gY7MnSMR0I87lMBXWhVhGvBEK5WDMzsWVmYi8sROHjg9LfH7lO52iXzGaKDhxwe73p2LHia8sZqMhkMjQxMUTMn4c9OxvJakWm0RQHFImJJTpmJeeHH1B4GfDs0YO8TZvwG/lAhbYVq+vWJWr5NxQdO07RgQOo69VD274dMrkcm9GI8hqopGyy2Ngdm8Xkb/eTllscLHprVbxxdwu6NwrEU1M7/6nb8vLI27SJ5Fdfw56fDxSPZIW++Sa6jh1RaK/PqrE3kgKTlfR8MyaLDb1GSbCXBwp51dbTSjGanIKUC46n5PHJllM8f3tT1MraHcALF9XOVy+hVjHFxnJu8hRMR44UH1Aq8R0+jIBHHnEEBDKVCnVMffL/3uryHqqICCSrFWtGJkr/y1c4vkDp4wPnA4X8HTudg5QSsr9bQcj015AsFnyHDKlwsjZVSEhxYcKuXbDEJ5DxySfk/70Vhbc3/mMfRNe+fa3OqRKfVcioz3ZitV/cKZVTaGHC13v5aUIXWtXxqbnOlcF86jSJTz/jdMyWnU3CY49R76efUDSoX0M9EypDUnYhb60/yuoDSVjtEr46FU/2acgdLcPw07tOI1AZfjuc4rZt+a54Hu4WI0ZVriEipBSQrFYsSUkU7N9P/s5dmGJjMcfFUXTiBOa4eFJmzrwYpABYrWR9+VVxUb/zydhkCgW+Q+4FN7t7fO8fQfxDDxM3ejQ5a3/FmplZ4X6az5x222bPz0ddrx5hb791VVl0TadPc3rQILK//gZLXBxFBw9ybvIUUt6ejTUr64rvW5UsNjvLtsc6BSklfbjhBHmmsmsg1QRbfj5pCxe6brTbyVz2FfbL1G4Saq/0XBOPLtvDj/sSHX+bWQUWXvrpEGsPJmFz8/daGbIKzG7biix2JDepD4TaSQQqNzi72Uz+jh2cvvMuYocNJ27kSE4PGEj299+Tu24dcWNG43X77Rj69y91beaSpVjT0hzfK8PCifjoI+QldvfIVCoCHn0U07FjmE+dwnTiBIlPPknG4k+xuUqDXwZ1VJTbNrleh8Lb2+1On/KwGY2kvPkWUkFBqTbjzz87rWGpTYosNv5LzHHbfjw1l0Jz7athZC8owHzqlNt207HjSEVi8eO1KjGnkP3xrv8u3/3tGClVuLC1V+Mgt203Rfmir6VToYJr4r/WDc5yLpH4R8aD1VrioIWMTxYROmMGyBUkTXueOh8vxKN5M6SiIvL/3krhvn3Y8/OxF158sVFoPfC8pSv1fvkZS3IydpMJLFayV60i99dfnZ6buWQJvkOHoPD0LHdf1VFRKIODsaaUHtb1fWDkVdcjsuXmUrDV9dQVQO7GTXg0aXJVz6gKHio5jYIN7DrresSneZg3njYT5vg0sFmRGwwo/f2ruZelyfV6NPXrY3GTM0fTpLHbHVlC7Xc0OddtW1aBhXyT1W371aoXqKdNpA/74rKdjivlMl4e0BQfXdVNOwmVT4yo3OByfvrJOUgpIWv5N3jfdRdIEpmffw42O7nrf0PfuRMRc+eiCA0t9UYiU6lQhYWha9sWZUAA8Q8/XCpIAUCSMFVwy7IqJITIpUtQx8RcPCiX4zNkCH73j0BeIiOtJTUNc1w8lqSkck8fyM7fz227qnbG9SqFglGdo3C1PjHcR8vbnfxIf34ap/r25VS//sSNHkP+rl3YCmt2tEKh0xHw6Hg3jQr8Rjj/NxWuLSFe7oNMpVyGRlV1RT+DDB4sGNGOibc2wFurQi6DLvX9+XFCFxoGl29Bv1B71M5XXqFa2K3Wi9t8XTDHJzhGKYoOHcZw222Yjh/HdPw46pgYIt57F2WQ+1GMyy1olWkrvphNEx1N3c+XYs3IRCosQOHrh8LfzzEyYzMayd+xg9S3Z2OJj0eu1+M7YgS+949AFeR+OBhA7uODofet5P72u8t2z549K9zf6hLhp2PxyPZM+e4AOYXFgZlOrWDlPTEkPzjKaRTKdOIEcaNGE/Xdt2ibN6+pLgMUryt6/32SX3kFu9EIFCcJDHvrTVQRETXaN6GY2WojI794y7unRomXtnzBY/0gT7y1KsffY0n/ax1GQBUupgUI8fZgYq/63NchErskodco8S5n34XaRQQqNzC5Uom2TWvyNm1y2a6JicFyLgEAZXAwthKLSc2nTlF07Bi6Nm3c3l/h44OmSRPnhbjnybRaNHXrXlG/lQEBLnfgSJJE3p9bSHz6accxe34+GZ98QtHRo4S99SZKX1/3/dXrCZo6lYLde7BdstjXb+zYywY6NUmrUtCtYSC/TrqFtFwTdkkizFuL5s/fXU6VYbeT+t57RMyZU6PZdxV6PV59eqNr3QprRgbIZMV5VCpYdkGoGonZhSz+6zTLd8VTaLHRtX4A0/o3IdJfS1a+hawCM1q1An+9Gj+9xunaEC8PvhzbgQc+3ekUrLSJ9OHp2xqhq4Z1IkqFnBBvMX14rROByg3Mlp2NvlMn0rULkQoLS7X73n8/qW+9CYDPkCFkffWlU3vOylV49e+P0tvb5f2V5z8Zx464H3tuiflquZzwd95BcZVrSi5lTUkh9e23Xbblb9mCNTW1zEAFzudUWfEduevWk7txIwo/P/xHjUQdE4PCzc9ZWygVcsJ8tE7bLhPcBKEAhXv3YS8oqPEyATKlElVoKKrQ0Brth+AsOaeIBz7dyam0i4ve/zqRTr/mWazal8Dn/8RithUv0m4Z4c2Hw9oQFXBxIb1cLqN5mDdrJ93CqdQ8MvJMtK7jg8FDib+nptTzBMEdEajcwAr27yd9wULCZ79NytuzHYsa5d7eBE54jIKdO7GmpeMzdAiS2YT5zFmn62UKOTJZ2YmbNA0aEL1qJXmbNlGwYyfqevXwvutOlKFhyNWVO/Rrz8tz2oV0qaIjR/Bo1Oiy91GHh+M3ZjQ+Q4cgU6qQe1y7L6rqiHC3bUp/vwrnmxFuHP+dy3EKUgB6NArkXFYhi/4643T834Qc7v90B9+P7+w0giGXywj30eKpURCbUcC8zadIMRZxa+MgejcNJsJXhyBcjghUblDWrCzSP/qIov8OkfrOu/iPHo0yKBCZWo0yKAhrRgbKoGB87r2HnJWrSH2r9EiFz/Dhl/00LpPJUEdE4PfAA/gOH16ladFlak3xYli76624Fdm6LJPLK7QjqbbyvvNOMhZ/Ci7yRviNHYuiFiexE2rWmoNJpY7d1Tqcl3/6z+X5CVmFnM3ILzXVkltkYfnOeGb9etRx7K8T6czdeJIV4ztRL7B8/85SjEUk5RSSlmuijq+OQIOm1ozM5BSayS4ont7y1qrErqJKJgKVG5RkNmNJOAeA+exZkqdPd2r3uv12Qme9gS07m8J//y11vUerVug7dqzQM6u6dovCz7c4jf7GjaWfrdOhjrnxspyqQkMJe3MWic+/ADab47ihb18Mt9122REx4cbl71n6zVatlGMscr+t+ERqLjfXc976npZrcgpSLsjINzNz7RHmDG2NwaPsRa4nU/MYvWQnCVkXp6g7RPnxwbDWhNZghlm7XeJkWh4v//Qf208Xr2trV9eXGXc2p2GwZ62vs3WtEIHKDUqu1aJp1IiCHTtctmsaNUKu0SAPDiZi3kcU7NpF9nffgUyGz9Bh6Nq1RRUcjHR+9KI2LHxUeHoS/PzzmE6dwhIb6zgu02ios3ABquDauxi2qsj1ejxvu42YNm0o2LMXe35ecTmA4ODLrtcRbmyD20aw+JIpHptdQqdWUGC2ubwm2l9f6tjWUxlun7HxaCrZBZYyA5UUY1GpIAVg59lMZqw+zNv3tsRTUzO7eRKyChg8/x9yS+SE2RObxeAF/7B20i1EB5T+fQgVJwKVG5TCy4vAyZOIHX5fqTaZVotXiUy0quBgvAcMwLNHD5DJUOj1WFJTyfvrL7J/WIlMrcZ3yL2oo6NrPJGYOiKcul98junUKQr37UMVEYGubTtUIcE3bDVehVaLIjISdWRkTXdFuIaE+2iZ1r+x02jI6n+TGHpTHZZsPVvq/ECDhhgX0zgmi+ugBopnJO2XSWefmF1YKki5YN2hZJ7t37hGAhWbzc53uxOcgpQLCi02lm49wwt3NEGtFOvArtaN+cotAKBp2JDwOXNInj7dsR1XHRVF2DuzUYWHOZ0rSRK2nBzyt25FHRlJ2odzKdy719Fu/PlnvAbcQfC0aTUerKiCg1EFB+PZuXON9kMQrmVeWhX3dYikZ+Mg1v6bRHahhb7Ngon005FTaGHVvnOOpU+Rfjo+HdXe5TRMl/ru10G1ifTB6zLTPheqgbtil3A7ulPVck1W/jrhfvH+1lMZ5BZZ8fcUgcrVEoHKDUyh12O4rQ/a1q2wZWWDUoHS19dljhLTyZPE3v8A6jp1MPS+1SlIucC4eg3egwa5DRAsaWlYEhIoOnwEVUgImsaNi0c6xM4T4TpjsthIzTWRVWBGrZTjp1cTZLj28nkYtCoMWhUN+zhnc33tf814olcDUnOLMGiKtxsHu8lEG+LlwaA24azcd87puFohZ/r/miEhcSIlF6tdwkenIsTLw2ntVB0/9zuDNEo5njVUt0etlBNo8ABc1zMK8FSjVtb8lPj1QAQqNziZXI4qJARVSIjbc6yZmSQ+8yz2nBwMo0djXLvW7blZX36Frm27Ult6LYlJxD/2GKajF4eR5Xo9dT79FG2L5iJYEa4bWfkmvtkZzwcbTmCyFq/hignUM39EWxoGG66LBcwGDxUGD5XLNRg5hWaSc0ysP5RModlGn2bBPNW3Ed0bBfLxn6fJyDdxcz1/Hu9ZHxkw9OPtnEgt3gYdaNAw867mdKkf4CgcGGTQcFOUr8taVmO6RBHkVTM7f3RqJQ91i+aPIy4SKgKPdo+57CJhoXxEoCJcli0ry5FdVqZSORUivJS9sBDJ7jwUa8vPJ2X2bKcgBYqzxsY/9BD1fvoRVZjzVNOVsubkYMvMxJaZ6Si+V9NTUcKNZdOxNN5e71ya4lRaPkM/2c7qJ7pe17lDsgvMfPb3GT7ceLGO14I/T9GjUSDv3tuKrvUDsNolDBolGflm+s3ZQn6JqZu0XBMPf7mHVY91pk1k8WJvf08NHw5vw/RfDrP+UDJ2qXgk5cEuUYztWg9NDa4BaRRsYGKv+k4/L8DYrtG0iKjdCSKvJSJQES5LKlG0sGDvXjy7diXrm29cnut95/9Q6JxfiG2ZmeT+9pvL8+25uZhOn3YEKvaCguI6PhYzcr0eVXBwuftpSUkh+bXpTtuTNY0aETH3Q7GQVKgWqcYi3v3tuMu27AIL++Kyr5lAJd9kJTPfjF2S8NSUL5vs2Yz8Um/aAJuPpfHboRSGd7z47/CLbbFOQUpJ7/52nPkj2jrqCoV6a5l9Tyue7deYQosNT42SIC9NjQYpAD46NQ/dUo8724Sz7VQGdkmic4w/gQYPUVeoEolARbgshbc3Cj8/bJmZ5P35J3U+Xohx/fpS9XBUdeui79Sp1PWSyeSUw+NS1rR0ACyJiaS+9z7GdevAakUZGkrws8+i79zpsonl7AUFpL73fqkcKqZjx4gf/yiRny9FVckp+wXhUmarnXPZrneoAPybkM3AVpUzeliV4jILeOvXo6w7lIzNLtEszIvpdzanWZgXHm6qHlvtdr7cHuf2nov/Pk2fpsEEGDSYrXZ2nMl0e+6hxBwKzFanAoieHko8PWrfW9aFdTyudjwJlUOs9LnO2PPzMcfFYVy7lpzVqzGfjcVWss7OFVAGBRH8wvPF39hsZHz6GXUWL8Jn6FAUPj4oAwPxH/8IdZcucbnWRa73ROEmK6xMq0XbqiWms2eJe2Q8xtWr4fwIjjUpiXOTJ5O/Ywf2IvfTTTajEUtKSvG1LphPn3ZdmE8QKplKKSe4jDUTTUJrtq5SeSRmFzL8k+2sOZiEzV68redQopEhH2/jZGqe2+tsdonMPLPb9pxCC9bz91PKZdQPcv/GHu6rFdt6BQcRqFxHbEYj2T/8wKl+/Tn35FQSn3qaU/37k7lkKbbs7Cu+r0wux7NbNyKXLsV31Ch8h9xL4vMvYM/LJXDKZIJffBHfoUPdFpVTBgUSOGVy6ft6eFBnwXxS359D0b//Yj5xwuX1qW/PJu+fbZjjnD+t2Yy55O/cSfyECZhOnip71CYltfw/sCBcoSCDhkm3NnDZ5qlRclN0+cs41JTdsVkuR4Vsdom31h3FWKISckkapYL+Ldwvyu/WIBBvXfGIiFwuY+hNEcjdrCuedGsD/PQiDb1QTAQq1xHzmTOkvDHLudaNJJE+fz5Fh49c1b0VBgP6mzviPeAOzk19CvPRoxjXrCX5lVc5N2kSZ4cOxZJYujYIgEyhwNCnD6FvzER5fvpFplYT+voMMr/8CmtKCkVHj7m8FsASHw8WM2eHDcd8rniLo91iIff334kbOYrCXbuRUbzQ1x1VmKjMK1Q9mUxG32YhjO9eD2WJd+FQbw++eehmwr1rLt17eUiSxG+Hkt227zqbSYHZfQr9rvUDCHeRS8VDJWdCz/poVRenbsJ9dXz8QHt06osjJwq5jEm3NqBd3dof0AnVp/ZN+AlXxF5kImPJUrft6R9/jEeL5igMBrfnXI7NaCR1zgcui/5ZU9PI37Edn7vvdnmt0scH77vvRt+5C/bCAmRqNZLZTOLTz6CuWxdloPukUDKtFslqw5aZSc7PvxDw8EPY0tJIeWOm45zcjRvwGjiQnJUrS12vadoUZWD50+dbMzIwJyRQuHcvysBAtK1bowwKqvRqz8K1zWKzkZ5bvNhUp1Hie74Qnb+nhom9GnBfh0jS8kxolAoCDe7zjNQmMpmMsDJq5/jp1MjL2F4d5qNl+cM3M3fDCX7cn4jFbqdbg0Cev70xdf2dFxFrVQq6NwrgtyndiM8soMhqp16AngBPjWNrsiCACFSuG5LZhDXZ/Scha2oKktn9/HF52AsKKNy3z2173ubNeN95p9u6PzKZDFXIxV08hYcPgyRhPnsWdXS0I3i5lPfAAeT+tr74GX/8ju/wYVgzMrDnFzjOyfn5F8LeehPJYinO83J+GkjXoQOhs2ahDCjfFmVLSgoJkyZTtH//xX6rVETM+wjdzTeLYEUAICmnkCVbz7Jse/HOlTaRPrx0R1OahBrQqpXoNEoiNUoiXdS+qe3uaRvBJ1tOu2wb160egYayd//U8dPx2p3NmNynIZIEBg+l06LYktQKBRG+umtmJ5RQM8TUz3VCrtOhu+kmt+26du2Q66/uRVOmVJaZk0QVEVGh4oQKT084n+gt87PPCH1jJjIP50+d2jZt8OzWjdwNxbt55J6eyJTK0gnibDYSn30OuV5PxNwPqfv1MuqtXUP4hx+gDi/fLgu72UzG4k+dghQAyWIhfsLjWJNrz4JcW34+5oQEzLGxWDPcF30TKl+qsYhHvtjNJ1tOO7bX7ovL5p6F/3A46eoWrtcGYT4ezBrUgksHTm5tHMSAFqHlSlinVSsJ89ES7qt1G6QIQnmJEZXrhEypxOeee8hctgypoMC5TaXCb8wY5B5XN/SsDAjA/6FxJL/8ist2bzfTPu4o/P3xGjAA408/UbBzF5JdIuKDOdiMRsxx8WgaNMCSmMi5p5527ATyGz0Ghacndj8/lEGBWFNL1Nqw2chevpzc338n+ofvy8y264otPZ3s77933WixULB7F+rIOhW6Z1UwxyeQOvvt4uDNZkPToAHBL7+ER/PmKLS1ew3E9eBMej7/njOWOm6XYPovh1gypsM1vRDU00PF/1qF0ameP/+cyiC3yELnGH/CfLTlyqUiCJVNBCrXEVV4GFFffUXSSy9SdOgwAJqGDQidMQNVncp5gzX06kX+jp3krllz8aBCQejM193u+nFHodcTNPVJkCSMq1dTuHs38bt3E7FwAba8PBKffRap8OLuA6+BA9G2bAGAMjiYsHffI/7BB5EsJXYhqFSEv/sOyqDyr0m5QLJanZ7n3FkFyqBgCv/9l5yffwabHa+BA4rX11Rj5ltLUhKxo0ZhTUx0HDOdOEHcqNFEfbscbYsW1daXG9Wfx90XojuQkEO+yVprAhWT1Uaq0UROoQWtWoG/Xo2P7vJ902uU6DVKolykyBeE6iaTpMvU2K7ljEYj3t7e5OTk4HWZpGA3CmtmJracHJAkFN7elf5Gas3KwpqWRsHu3Sj0nmjbtEYRGHjFn+Zt+flYU1KwZRXX8rAajWiiorDl5GD8dR0yuQyv229HFR6OskQ+FrvFguXcOXJ++omi/w7h0bQJ3nffjSosrEJrSewFBVhSU7GmpZH88suYz5wtdU7wtGkU/ncQ4y/OuVo8+/Qm9JVXXBZyrAo569aROHmKyzZdhw5EzP0Qhfe1k7rbZrNTkGPGlG9BoZLj4alC61k73uTdWfzXaV5f43oXnYdKzsapPcpckFpdMvJMfLHtLAv/PO2oOdQx2o/Z97YisoxCf4JQXcr7/l2lIypbtmxh9uzZ7Nmzh6SkJFatWsVdd93laJckiVdeeYVFixaRnZ1Nly5dWLBgAQ0auM5DIJSP0s/P6Q290u/v64vS1xePhg0r5X72ggJSZ88mb9Nmp+P+48cT+MTjxWtZXJCrVGiiogh84gkkswWZWuV2jYw1JwdbejpFhw4j1+vRNGpYvFXaZiP3jz9IfPY51PXq4f/wwyRNe9755w0ORu7tVSpIAcj7/Q/y77gD7379ruyHr6C8jZvcthXs24e9sPCaCVSK8i2c3JvK9lWnMBUUT+0FRRnoPbopviG195P8rU2CmLn2CK4+4g27qQ4B1Tg9YrHaScop5O+T6ZxIzaN9XV/aRPoSZNCwYk8CH2xwTme/40wmoz/byTcP3+x2F1JGnokCsw2FXEagQY1KFAwValiVBir5+fm0atWKBx98kEGDBpVqf/vtt/nwww/5/PPPiY6O5qWXXqJv374cPnwYj6tcTyHUbnaLBXteHjKNhoKdO0sFKQAZCxfi2b0bujZtyryXTC5H5uH+zcGank7qe++Rs3LVxWvUasLefRdN40YkPvtc8e6jU6co3LuP0Jmvk/7xJ1ji4pCpVARMmoTxl1/c3j9r6efItVpkSiWamJgKr42pCFUZC4OVfn5QgcXMNe3csSz+XOacPyf1bC4/vrePe55rj8Gv5l8DCkxW0vNM5JqseGqUBHhqCDZ48M49rXjq+wNOwUqTUAOPdItBraye/wY2u8T++Gzu/3SHY8RkydazBHiq+fqhm/l2l+t09qfT84nLLCgVqOSbrBw8l8P0Xw5zOMmIXq3g/pvrMqZLNCHeNf/fQrhxVWmg0r9/f/r37++yTZIk5syZw4svvsidd94JwBdffEFwcDA//vgjw4YNq8quCTVEstkwJySQvXw5+dt3oAwMxOeuOwmY8Bjp8+aXOj/zy6/waN4ceRnJ3C4nd+NGpyAFQDKbOTdpElHLvwGZjAvvONkrVlB4YD9+o0ahDAhA07AhCm8vsr/91u39bXl5FP33H+lzP0IVHkbk0qWoK2lN0KW8BwwgY+HHuPo47/fgmGqbgrpa+Tkmtv14ymVbgdFM6lljjQcqKcYi3vvtGD/sPYfVLqGQy7izdRjP9G1Mv+YhtIn04Y8jKaTmmujZKIj6QZ7VmislxVjEuC92O4KUC9LzzDy94gCD2ka4LZB4LDmXm6KcR133x2czYvEOx/f5ZhsfbznNntgsFtzf7rLbkgWhqtTYx68zZ86QnJxM7969Hce8vb3p2LEj27Ztq6luCVXMdOwYZ+4eROaSpZiOHCF/yxbOPTkVm9GI7/Dhpc63Z2c7dvxcCWtaGhkff+K60W4n948N6Nq3c+7j8ROkzJjBuUmTkKlUKHx88LrtNrfP0He6mcID/wJgOZdI0gsvYs3JueI+l0UVEkLYm7NKjZx49uyJ9+23V2h7eE2yWe3kpLov3pd0ump+f+WVW2ThjbVH+HZ3gqM+jc0usXLvOV795RA2u0S9QE8e7hbDi3c0pUv9gAoFKem5JpJzCsktKp2OPjmnkF1nMlm17xz747NINbquc5WQVUCOm3T2BxJyaBziPrmjr05Fdv7FnEXpeSZe+fmQy3N3x2YRn1ngsk0QqkON7fpJPp+cLDg42Ol4cHCwo80Vk8mEyWRyfG80lt4mKNROltRUkmfMKLV9GiDry6+o8/HHZH37rVPmW0O/vsivYsutZLNhTXVf58eSmIjCx8dlm0fzZsj1OmRyOV79+pL5+VLn7dCAwscHz27diH9kvONYwc6d2DKzUFbBWhG5Xo/nbbcR06YNBbv3YMs1ou/QAWVwcJWuS6pscoUcrUFFYa7rN1q/0Jpdo5KRZ+bnA4ku29b9l8wzfRtdUX6Q9DwTW46nsWDzKdLyTLSt48PUvo2ICfTEQ6XgVGoeD3y6g8Sci8FJw2BPPht9U6mkaHmmsgN4rZsqx4GeGmx2ieTcIiQgI9+ExSaVWXBw2+kM2tb1Lf8PKgiV6Nr4+FXCrFmz8Pb2dnzVqaIhdqHy2XJyKNy332170dEjqKOjHd8rQ0Lw7HrLVT1TptXi0by523Z9l86gcBGvy2QET5uG0rf4xVkVHk7dr7/GZ8gQZB4eyNRqvG6/nbB33yHljTdKlRWQSgTTlU2h1aKOjMRn0N34jxqFR5Mm11SQAqD3UtOuX12XbUqVnIhGNfummF1ocblYtmR7he9ZYGb2+mM8+d0BTqTmkVdkRatWcjotn9NpeSRkFfDa6kNOQQrA8ZQ8nvn+X7ILnLM2Rwe4rz7srVUR7qulT1PnD4LhPlpm39uST7acxmKVeHTZHnq/t4VTaXmoFe7fDgJq+U4s4fpWY4FKyPkFhykpztk+U1JSHG2uTJs2jZycHMdXfHx8lfZTqESWsl/cZR4eoFAgU6nwHjyYul99edXFBJXe3gQ9/ZTLNoW/P/qOHQl+9ln8HhyDXF/8idWjRQvqfr0MTdOmTuerIyIIfn4aMet+Jfqnn5B5aEiY8Hip7cxygwGF15XXVLoRyOQyGtwUQtOuYVAi0alGr+R/k1uj963Z9RCel6k1c7l2V1JzTXy7q/j1SqdWsOD+dnh6KHn6+wPc/uHfjPx0J/9rFcbwDqU/fP1zKoPMElM1eSYLHko5w29y/UFt2u2N0aoUNAzyZMnom5h9T0sW3t+Oibc24OWfDnFbsxBeX3OY7aczAdh4NJXbW7j+t6aQy+hUr/pyBQnCpWps6ic6OpqQkBA2bNhA69atgeJpnB07dvDoo4+6vU6j0aDRiEVd1yS1Gm3r1hRekqL+Am3LlhhuvRWZQoHC1/eqM+leoGnUiIiFC0mZMR3LueLhfF2HDoS89iqqsOJdNIFTpuA3ciSSzYZcp3OMpFxK7uGBPCQEa04O9sIipKLS6wcCJ0++ooRzNxqdl5rOg+rTpk8k2akFqLVKDH4e6H00yOWXT9Nelfz1am6K8mXX2axSba0ivPG/goRue2Mv3uvZfo2Zt+kk++OzHcdOp+fz1Ip/efueluyLy+ZosnM6/gKzjXyTlVNpeczdeIKjSbm8OKAJMUGefLzlNGm5JuoF6Hmuf2M6RPuhlMs5nprLvM2n0KsVFFnt2M6vt2ke7s17v19caLvm3yQW3t+OQ4k5nCgxBSSXwbz72hB0DRRUFK5fVRqo5OXlcfLkxX38Z86cYf/+/fj5+REZGcnkyZN5/fXXadCggWN7clhYmFOuFeH6ofDyIuCxR0mYNLlUBljf+0dgXL0a85mzhL0zu9KCFCjOgGvo0R2Ppt9gNxqRKVXIfX2c1pDIVSrkFdhWrPT2JuT5aaijo8n64gvseXkoQ0IImjIZfbduyJQi6XN5aHRKNDolPsG1KwGZr17N+0NbM/6rPfxXIl1+k1ADH93X9opSyevUxWtGLmxzLhmklDRv00nmDm/D5mNpfL0jjmRjEWqFHH+9mr9PpjP+qz2OaalHvtxL+7q+fPxAO/z1aiQJrHY7xkIrAQY10/o3YffZLLIKLo5mymSguCQONFntTP52Py/e0QS5XMbx5FzCfLT0aBhIkLcHHm7WuwhCdajSzLSbN2+mZ8+epY6PGjWKpUuXOhK+ffLJJ2RnZ9O1a1fmz59PwwokEhOZaa8dkiRhOnECyWIh+7sVFO7fjzLAH+//3Yk5IYH0uXMBiPrhB7TNml7mbrWDZLFgTU9HsliQaTxQBYuRlOtJep6JVKOJZGMRwV7FOVQCrnCbbmxGPr3e/ZPGIQa6Nwxk/mbX27MB5t3Xlk+2nOLxXvVZ/NcZWkZ4M6pzFAPn/u0UdFzgoZKzZPRN3Ld4B5IEKoWMYTdF8kSv+pisdn45kMgfR1Lx91Qz7pZo/HRq+ry/xeWzAzzV/Px4F8J8alfwKFx/akVm2h49elBWHCSTyZg+fTrTp0+vym4ILlgzM7EXFhZPs/j7X1WekvKSyWRo6tfHdPIktowMDL16Ycs1kjJ7Nrb0dMd5hfv2XVGgcmGHjz0/H5nGA6W/H3Jd1b7YylSqCtc4Eq4dAZ4aAjw1NA27+g9BgQYNswa14KONJ/EvY3HqhUWtBxJymLBsH988fDORfjpSjEUugxSAIoudjDwzCpkMqyRhsUl8uT0WSZJ4/vYm3Ncxkv+1CkOjkhNo8CAr30zX+gH8fTK91L3aRPrgqREVj4XaQ4xPVwJLcjKm48cpPPgf6qgotK1boQoJQVYLU0/b8vMp+u8/Uma9ienoUeR6PT7Dh+H3wAOoLtkqXhVkcjlynY7cTZvI/eMPl+co/Cq+48OalYVx7a+kz52LLTsblEq8br+doCenVGmmWEEoL51aye0tQmkZ4U2+yYZaIcdss5c6r3+LEDYeLd5kYLbZWbY9lpmDmpOeV/a6HblcxqUfC212iYPncpjzx3GOpeRRx1fL5N4NaRPpw+x7WjJt5UE2lyiyeFuzIF4e0IyE7EL+3hWPRimna/0AAr00eHmI4EWoGSJQuUrm2FhiR43GWiL3i1yvI3LpUjyaNat1CbgKDxwg/sGxju/t+flkLv6Uwn37ifhgTrVkNlX4+mLoexu5a38t1SZTqdC2aFmh+0k2G8a1v5IyY8bFg1Yrxp9/xhwXR52PPkIZIHYtCDXPU6OkcYgXJouNT0a246EvdmOxXQwvGocYuKt1OOO/2uM4tv10BsZCK746NSFeHiS7SABn0CiRJByLZQFuivKlaZg3Qz/Z7jiWmW9mzNJdPNevESM7RzFnWGsy880Yi6x4eSjRqhTMXn+MlfvOOd3/mb6NuK9jZLkqLwtCZROBylWwZmeTOO15pyAFwJ5fQPzDjxC98odaNS1gTc8g5fWZLtsK9+zBnHCuegIVvZ6gqVMxHT6C+ezZiw1KJeFzP0RZwXUe1tRUx/qWSxXt348lOVkEKkKtolEp6Bzjz4apPdhxOoMz6fk0DDZQZLUx6dt9Tmnx/T01qBVyfHQqPhjWmvs/3eEU3Mhl8MKAJny+7azTM+6/uS6vusk2++7vx7m9ZRiRfjqn4OPn/YmlghSAt9cfo2O0H3vjssgtsnJrk2AifLVXtKhYOM+YBAXpYLOAPgA8Q0ApAkFXRKByFWxZWRTu3eu2zZKUVKsCFXtBPubTp922F+zeha51q2rpizo8nMjPl2I6cYL87TtQhYXh2bULyqAg5OqK/WO15+cXT/e4YTp+HG3zZlfZY6GkIouNtFwT6XkmVAo5/p7Fn/ZlsprdVnwtUSsVRPrpiPTTcSIll3s/3ka2izUo47vH4Ht+O3SbSB/WT+7G1zvjOJiQQ4MgT0bcXJfvdsWz80ym03UqhdztmhaLTSIxq5BIv4truDLzzXy8xf0C38+3xQLw84FEPtx4kj5Ng5h5VwuxdbmibFZIPgArRkP2+cKRaj30ng4tBoNWZAC+lAhUrsLlso/aall6f9n5ZGqSm8Rr1Z3dVBUcjCo4GM+uXa/qPjKNBpRKtzWBlCFVv/bmRpKVb2bF7nje/f2445N/kEHDgvvb0irCB2UZGU4F14K9PHi0ewyzfj3qdHxI+whurnfx36VaqaBeoCfP9WtMocWGh0qOSqFgVOcotpxI51TaxRwoF7ZDu6NSOgeVFlvxglx3MvPNToHN74dT6dMkjSFuks4JbuQkwNIBYClRSsScD2ungm9daNCn5vpWS4lA5SrIvbyQe3piz3NdI0Nd13WK8Jqi8PfHa+BAclauLN2oVKLr0KH6O1UJlP7+eN3eH+PPv5RqU/j4oCmRll+4ervOZvLGJW+oqbkm7lu0g9+mdKOuf83W6bkWeWlVjOgYyW1Ng9l2OgOT1U7nmACCvTQu14UoFXIMJQLCqAA93zzckVRj8ShXqLcHeo2Suv46YjNK19by8lAScslIiJdWSecYf5dTPwDt6vry1wnnWlef/HWaXk2CCBBTQOV3dLVzkFLSxhkQ1hb0Yqq6JPHR5yqoAgMJnDTRZZvXwIEo/WvXH5vcw4PAxx9HXb++03Fd505Er/gOc2wsuRs3YY6NxeYm+CrJXliIOSEB05kzWFJSytyKXpXkOh1BTz6JRyvnaSu5tzd1Fi9CWQ27mW4UGXkmp4ymJZmsdtYfcl9QVCibp4eK6EBP7utYlzFdomkUYqjQ4tUggwfNw73p0SiIRiFeRPjq+HBYm1LFCRVyGR8Ma1Oq2rNWpeSxnvXRKEu/Lfjr1bSM8GZvXLbT8ZwCi9MC3upgsdk5l1XIsWQjsRn55BVdeXX1aidJcG63+/b042B1XS37RiZGVK6CTKXCe+BAFF7epM6ZgzUpCbmXF36jR+E7ZAiKWpiAThUWSuRnn2I6eZKCbdvxaN0KrFbODr/vYjp4uRz/cWPxGz3a7XSQJTmZ1A8+wLh6DVgsKIODCXr6KTxvuQVFFVQNvhxVSAh15s/DkpSE6fhxlMHBaKKjUYaE1LqdV9cys9XOmfR8t+3/JuRUY2+Ey2kW5sXqiV35ef85/k0wEhWg4952EQR7eTim6Kw2O6m5JlJzTagUsPKxzhyIz8ZPXzxKkm+yEu6r5bkf/i11/64NAjB4VN/bSGaeme/3xjN3w0lyTVbkMujdJJhX/teMcJ8rr7JebWQyCG0Dh1a5bveLAaUYnbpUlWamrQ61JTOtJSUVyWxCplShDAqslTlUXDGdOs3pgQNLVf8FiJj7IYY+pedLrenpxD8ynqJDpXcUhL37Dt533FElfb1atvx8bFnZINlRGAwofHxqukvXnMx8E/cv3snhJNfrr14a0JSxXcVUW20Rn1nA0I+3Eemvo46vjrRcE3+dTGdc12ge7RGDVqVg+5lMJn6zj5xCC51i/HiiZwNmrz/GvvMp/tvU8WFS7wbM+eOEU9p/jVLOmom3UD/IfRXnymSz2fl8eyzTfzlcqq1pmBefj+lA4BVmDa5WmWdg/s2uR06GfQ2Na+frZ1WoFZlpbyTXYup0SZLIXvmDyyAFIG3efLTt2pUaVTEnJLgMUgBSZ7+Drn37akkeVxHmuDhSZ79D7oYNYLfj0boVIS++iKZhwwrvMrqR+ek1PHVbQx78vPTwtValoE+Ta+/fQbnZzJCTCCf/gPQTULcTRLQH74ia7pmD1WYnxVjE8dQ8VHIZP+4/R2JOEYk5RWzn4q6gj7ec5q424WgUch5cugsZ8MLtjelcP4B7Fmyj0GJznLsvPptHv9rLV+M6MGLRDoqsdrrWD+DFO5oQ5V99afZTck18uOGEy7bDiUYSsgqujUDFKwIe+BFWjIS81OJjSg30eAEiO9Vo12orEajcwCSrtcztypakJKwZGVjT01H6+TtykRQdPOj2GmtyMvaCQrftJdmMRmxZWUgWC3KDAWVQUJVsb7UkJhJ7//1YUy8uBCzaf4DY4fcRtXIlHg3ql3G1cKm2dX159X/NeOvXo443tDBvD+aPaEvYtTD8fiVsVojfCV8NAuv53X47F4JnMIxeAwENarZ/FAcpB+KzGbVkF3kmK3OHt+Hn/Uluz//1YBJBXh7Y7BIz7mxOVoGJZdvjnIKUCwotNtYeTObPp3tgsUsYPFR4a6s3U22B2epy+/YFx1JyaRN5DWztVaqgTkd46E/ITy0OgD2DwTMIVNfpv5+rJAKVG5hcpUJ3UwfyNm122e7RuDE5q34k87PP0DRoQPic99HExKAMdp+SXqZSIVNf/gXMHBtL0muvUfDPNgCUwcEEv/A8+k6dUBgMV/TzuJP391anIOUCyWIhfd48Qme+jkIvdqqUl49OzX0d6tC7SRAZeWZUShl+ek2pXSTXldwkWH7fxSDlgrwU+GkCDF8OuktGHq02knOK2HkmkxSjifbRvkT764vzjuSmFr9JmXORdIEUqn1B441Oc+UvycnGIh74bCcF5uJAQyYrrqTsToHFRkJmAZF+OmQyMNsktxWdoThD7uM96xOsr5kRSI1S4bbsAEC49zX0Ji+Xg3dY8ZdwWSJQucEZ+vQhff780lusZTL87r+fpFdeAcB04gRxo8cQ9d23eDRtikyrRSosPXLifdedKP3Lzm5rSUoiduQorCkpjmPWlBTOTZxEnc8+xbNz56v/wc6zm0zkbdjgtr1g507seXkiUKkgtVJBhK+OCN8bpMJu1lkocrNQOH4HFGQ4BSpmq41tpzIYd0mK/KahBj69vxWhKwZBcvHIpEwmQ9l4EKfbTsOo9KNhcMV2+1xwKNHoCFIA9sZm0a1BoFMtn5Jubx7K1pNp3NYsmJ8PJNIo2EBAGcUSAzzVqO2FkHYaLEXFP68+GFTVM90S4KlmUNtwlu+KL9Xmq1MRU01rZYTqJ7ZD3OBU4WHU/eorNI0aOY4pg4IIe2c2isAAQl5+iYiFC/AbPQprdjZFx0+gCgmmzqJPkGmdP8F4tGpFwIQJyD3KfuEq/PegU5BSUupbb2PNzHTZdiVkSiWKIPfrJhS+vsiUtStet9rsxGcW8OO+c7z/+3E2H0slKad802lCFTFdJnnjJSMtyUYTD32xxylIATiclMucP45TFNT64kFJQn3kB+oe+ZgV20/xzY448kzut9za7BKJ2YX8m5DNnthM4jMLKLLYOJft/Dfy/d4ExnSJcpn47dbGQUT66xjQKgy9WkG+ycqv/yVxd1v3623G3xKFfuX9MK8jfNId5nWAbXMhv3QFZley8s0cTjQyf9NJFm05zYmUXIyF7qdyLqVVK5ncuyFdYpzTPvjr1Xw1riOh3h5kFZg5mZrHvwnZxGbkk1/G71G4dtSuV2ih2snkcjwaNyJyyWfYsrKwFxUhkyvI+OorjD/8cP4kGYY+vQl/ZzamkycwdO+GrnVr6q3+haIjR7CmpqJt3hxVeHi5cscU7Nzhts107BhSUdkZfytCplDgO2wYOStWuGz3H/tgqT5b0tKxJMRTdOgQypAQPJo0QRUcXC0Bjc0u8W9CDvd/usPp03G4j5avH+ookqnVlICG7tt0/qXSnu+Ny3I7RbHq33SeGD6UiH+/dDqu/fdLHrhrJHctO8btLUPxdDENZLba2BObxYSv95GZX5xFVqOU80y/RnSJcR7JNBZaeee34yy8vx0/7T/HP6cy8PJQMe6WaLo3CiTAU4O3VkW/ZqEUWux8suU0Z9PzGdMliiVbzzrda2KvGBqn/AJntlw8aCksTlCmD4S2I4vnmtxIzzXxxtojTsnkZq49whO96vNg12h8yzmCFOLtwdz72pBiNHE6LY8Ag4Y6vjpCvT1IyCpkynf72X02CyjOFzO0fR0m92lAkOE6npa8AYhARQCK0+cr/fwwJ5zj3ORJFP1XYlePJJH72+/I1Gp8HxgJFI9UqMPDUYeHV/hZqjIy9ir8/UFZuVu71XUiCHrmGVLfftvpuKFfPzy7dXM6ZklKIv6xCZiOHHEck+t11Fm0GG3LFlUerKQYi3jw811OQQrAuexCnvn+Xz55oB3eooJt9bvwZrz3i9Jtt80Eg/O6rVQXFY4vMNvsWFy99FqL0GLCLhXno3EVlJ7LLmTkZzudRmpMVjszVh9hyej2dG8YwJ/HL45wHDyXw7jPd7N0zE0807cxSoXMqZCgSiGnSZgXWrWClXsT+GDDCe7vGMmS0TdxOMmIXCbj1iZBhJrjMHw61fUPtHlWcdp3L/frLXacyXSZ8XbuxpP0aBRIu7rlL9/hp9fgp9fQJPTidtbU3CIeXLqLE6kXp7Btdomvd8ahVct5um9jPFTXRsoIoTQx9SM4seflOQcpJRh/XYfC8+rngQ09ehTX5nHBf9zYSq/grPDywmfIvdT7dS3BL75I4FNTiV61kpBXXnZ6lr2ggNR333MKUuB8NeyHHsKSklqp/XIlPqvA7c6GHWcyHZ+ihYpLNRZxONHIjjPF1Ypzi8o/7YDWB3q9BP3eKt6hARDYCO77Fhr1B7nzm2D7KPdvvHX9dejzS6+zQKUjXyoOItwNTqzam1hqOumC9/84wUt3NOWbhzryeM/6eGtVBHpqmHl3c5qGeRHs7eG22nFUgJ4fHu3MkPYRfL83gce/3kuRxcadrcNoGGzAcGq125+H3KTSi4xLyLpMscPP/j6LyVp6p1FFJOcUOQUpJX21PY603MobpRWqnxhREZxYU8t4M7bZkMzl/wdvy83FXmRCrtM6LVZVBgdTZ8F8Ep6YeDEbLmC44w68Bg6skkyyCk9PFJ6eZdb9sWZkYly3zmWbPT8f08kTqMOrdpV+ThnbLwGKrO53cQjunUjJZdwXux11b2QyuLtNOM/1b1z+aQHPIOjwMDS7s3i7slJTfMyFOr5a2kT6sO+SlPMAL98aQtD250sdz2/zEJ8dKEQug+ZhpbM7m612Die5z/wbm1HAzrNZPL/qIJ3q+bHy0c54eigI9PRALr/8tv+6/nqm39mcKb0bggx8deqLoxCBjdxfqPMDhftRPovdXmaAnZ5nwmKzo7mKkdT4TDe1cygecSowi7Uq1zIRqAhOlGUsPEUuR667/BoJm9GI6fhx0hYswBIXj0fjxvg/Oh5NVBRynQ65RoPu5pupt2Y1plOnsOcY8WjSGEVgIMoaSL9/gWQ2ua3ADLjc4lzZ6gW6H7Hy1lZ/7orrQVJ2Ifct3uH0qVqSYOXecwR7eTCldwPU5X2TlMvBEHrZ0wINxXllFmw+xXe74ymy2IkO0PPSgKa0D5JgT4mF6HIlhS1HsjNoCL/8FcuzfRu7TFymVsppXceHP464/jARE+hJ4vlF19tOZzJm6S6+e6RTuYKUCzxUCkJd5cIJbwsePlCUXbqty5RSU19A8RqW3BS8ctPoFuPL17udF/sq5DJubRzEQ7fUI7fQihzZFW/PDvPR4qVV0qNhEB4qBQfPZXMkKRcAlUKGVi3e6q5l4r+e4EQZEICmcWNMR4+WavO6/XYUAWUvlrUXFZGzZg0pr013HLPEx5P7xx9EzJuHZ4/uyORy5CrVFa9xqQhrejrWzEzsBf9n76zjpKr3N/6e7pntLrq7JSVEUQELRLET82fHNe/16jWu3R2oGKQgGIAo3d2w7LLdMzudvz++bMzOzLLAUtd9Xq99KeecOXNmdme+z/l8ns/z2JHHxCCLiYnYvpLqdMji4vCVhZ9iUHfqeDIvFRAjmON7pDBvS0HIvofHdiDxbHDePMNwoNQasfT/xcpDXD0g46SMWSebNDxxYWduG94Gr8+PVimvIyBX/UDAXobXaaXSr2X2Xi/rdjr4/tZBtE/Uo2uwYLu9PgqqnAxtF89bS/bjClNZu+6cTF74ue5zm1th52CZlSRTMwhJjWlw3U/w7WSwHPnblEihzw3QY0pI6wuXFfb8DHOmoZbKuGXyH8zeIqs1k4vVKXnlih4s3lXMzV+ux+31c16XRP5vdDsyY3THRK4AEgwqXrqsO/O3FlJicTKhZyr3jtLx9LztjOqYQLyhRdd1NqMl66cFIXDn5ZF//wM4t9aFkBlGjybxySfCWuN7zWZ8FRUEnC4kahXZEyYScIeWemVxcbT68QcUSZEN45oTrkOHyL/7Hlz7jthuS6WYJk4k4b77kMeH6mACfj/muXMpfCy0LK/p04e0N984JYnYpdVOvl17mI+XH8Ti8JIWreHhsR0Z1j7uuPw1/u74avUhnpwTXncFsPj+4SfFg8Pm8mJxepAAMTplxKqN2+vD6vKhVkjRhrnz9/kDrM2u4LpP19ItzcTNQ1vx3PxdtePIBpWcu0e1I7/KwRcrDwU99h/jOnHLsNbN84ICAaFHsZaA2wqGFNDHgyqMQWPJbnhvoHgM4E3tz/5hb/DssipWHazknat68cqve0MCLg0qOT/dPYSsuKZPt1XZ3by79AAf/hXssp0WreH1yT3JiNEKk70WnHFoyfo5i+Cz2fCVl+N3OJHqdcgTEpAqTl+JX5mWRvr77+GrqMBntSIzmZDHxoZNg3YfPkzB44/jWLceiVJJ8gvPhyUpAL6yMnyVlaeEqHiKi8m98Sa8BfUqE34/5lmzkMfEEHfP3SEZPxKpFP3IkaS8+B9K/vsq3pISJEolpksmEnfHHaeEpIBoG9wxog1X9EnD4xe9+8SWL9rjRruEyE7HRo0cTRifkROBzx/gUJmN//62h8W7SlDJpUzqm86NQ1qFjRhQymXENNJ6KrE4mfb1Btw+PxtyKjE7PNx5blvi9EqitAoqrG6mr8ll+f7QSmBWXDNWiiQSMdnTyHRPLTZ9WUtSAOT5a+k472Le7zUN+5gLWFUZPoW72uXloz8P8tT4zk3WrBRUOUNICkBepYPZm/J56uLOTTpPC85ctBCV0wRPaSl4vQR8Piq/+YaKL74Enw+JVkvsTTcSfeWVp2xhDIeaceXGUEMGPIePTDAEAsBRSrYnQSgbDu7c3GCSUg8VX39N1JQrw7ad5CYTxvHj0Q4ciN9uR6JUIo+NRao+tURBLpOG1wq04JiRFacjLVpDXmWoad7tw9qQ0MzttNwKG+PfXo7tyIi5y+vn4+XZLN5dwje3DCDZpKHK7ia3ws736w5jdXmZ0DNVTOaEIaTF1a6gSbD9JVYeny1cbcd2SaRVnC4sSTGq5XROPvlV5iKzg235FhZuKyTOoOLSXqmkRnUihB7ayjAt/xdqZynzSi+LeL5fdxVzz+h2JBqbRlTmbw3/OQf4cUMed53btuWzdJajhaicYngrKrD++Rdlb72FJz8fZVYWMddeS9ztt1H2zrsE7HbK3nobAgFib7vttFZWjgZ3dnYdSUFk50gUciRaLQF7qApfkZqCLPrUhIa5c3Ii7gs4HEHTRg0hkUjOuPTnFhw/koxqpt80gHtnbGJLnpiaUcqk3DA4i0n90pHLmo88O9xe3l16oJak1Ed2mY31hyoZ1k7G20v28/Hy7Np9czYX0C3VyEfX9iPJpMbu8mL3+NAqZXgiGMcB/LazmMUPjCCnws7CbUW12+P0Sj67vj/JJzn/pqDKwTWfrOFAaV115MM/D/KPsf2Y3Hsaxo3vhTxGltqLEdFxuDx+1mSX428gPtCr5EiPFk7q84DXCQpt2BDFGnh8fs5qbUMLgBaickrhdzio+Go65e/VfXjdhw5R9M9/EnfHHehHjcJvsaAbfA4oFHgrK1E2NoVzmuEMI7it/PobEh96iKJ//jOo9CtRKEh+8UUUp+j1qLKyIu6TaLVI1C13WGcyKm1uLE4PUomEKK0Cg/rECHtWnI7PbuhHuc2N0+0jSqskTq9s9mkQs8PLkt2hUzldUoxcPSADk0ZBqdWN2elBJZcGiWK35VvYcKiczDg97/1xgD3F1bSJ13Hb8DZM6pvG9+vzQs6rV8lRyqW8cEk3HhjTgZxyG1FaBSkmDUkmNRJHFVjyYNdP4HVDp4sgOgt0J+5V5Pb6+WDZgSCSUoN//3KAETdeg3HzR+AXk3S+5F7kj3idX4t1rMuppEuqkduGt+bT5dn8ua+uInTdOVmRM4dcVpG7tPZDqDgArc5lXJdrQ5x0azC6UyLGlkm5sx4tROUUwltWRvlHH4XdVzV7NukfvI9lwQLM83468gAvpvHjUSQffRzydECZkRGyzb5mDfLYWNLef4/q337HnZ2Npls3oiZPQpEWOUekuaFIT0eRloYnL/TLPebaa5AnxJ+ya2lB0+Hx+tlbXM2T87azMacKiQRGtI/nyYs6Nzq63RTUOJo2hNfnp8zqIgCY1IqwI7IOt5eSahdb88w4vT56pUcTb1CFjItLpZAeo8Xj82NxigX62kGZdEwy8O4fB8irdKCQSbigazLvT+3DvTM21R7XJzOaapePi99eXsvx95dY+WVHMf+5tBv5VQ5W7C8Per4nL+pMokGFXCYlSqukbX1RsL0c/vovrHqnbtuK16DTeBj3ChhOrGpYbnXx3fowxnVH8MthOe363gRrP4DoVuwc8SFXfn0wqNr0xcpDvHx5D6qdXjYdrqJfVjQXdE1CEq6i4nHCnoUw6+a6bYeWk3VBBsPaZvBng/dGq5Tx8PkdwkYRtODsQsvUzymEfdMmcqZcFXZf6ptvUvraa7izs4O2KzIzyfz8szOSrHgKCjh4yaX4zaEmVLHTphF7263g8SDVaE5L8J87J4e8/7uvzmlWJiNq0iTi77yj2d1vQSQ1e0tLcR04QMDpRN2hA7LYWGSGyGLOFgTjQImVcW/+FTJ+G61V8NNdQ0iLad4x4kKzg2/W5PLt2lwcbh+jOydy76h2ZMbqkB0ZkbU6PSzcXsSjs7bhq9enmDowk/tGt6t1ezU7PJRVu9iSV0UA0Chk/LazmP6tYnhs1raQ5+6UbOCagVm1epM3r+zJU/N2hHUm1qvkfH/bQP7vu83kVthpn2jg4fM70D01KnLFIGclfHZB+H2XfgTdJx3DOxWKgioH5/xnScT9Nw9txROjM8BWQqnfyKTPt4cV0Bo1ct6/ug9+oH2CPvKETmUOvNMv1AVXrqLk0ln8bknjkxU5VDs9nNshnttHtCEjpu732IIzDy1TP2cgJMrwoj111664DxwIISkAnpwcqn//neipU8PfZZxGyJOTyfz8Mw7fehve0jozNMMFFxB91RRkajWcYhFqfSgzM8n45GN8R3xUZFFRgjjomj/Yz2+3U/3nnxQ+/Ejd1JNEQsz11xF7yy1HFSa3QFQt3l92IKxHSKVdkIWbh7Zqts9BkdnB9Z+tZU9RnfX63M0FLN5Vwk93DabVkQpOXpWDh37cGvL46atzGJBp4uKEUsrUmby8NI/v6rVn9Co502/uzz3fbg77/LsKq9GrZJg0CswOD0q5LGJ8gtXlxe0L8M0tA2tdXGN04dsjNpcHp8uDLn87arlaaDkaYtXb0GYU6I4I9l3VomKhMoCiaZ9ZnUrGwNYxrD4YPu18TKdEUOlBpaeiqDosSQERnqhWyOideRT9WsWB8Fb9XhcJ31/IVfds4bwuA/EFApg0ihPP9vH5wFUFUgWoz+yb4P91tBCVUwh5bCzyhPgQh1PtgAFUL10a8XHm2XMwXnQR8lMkRG0qJBIJqo4dyfrhe7zFxfjMZhRpachiY0+rw2x9NGV6qTngLiig4L77g3Q5BAJUfPY5mp49MY4de9Kv4WxHtdPLygPlEfcv2VPCVQMyQszQjhdb8sxBJKUGVpeXt5fu57lLuqKUyfh6dWRh9rt/ZjPsqjbM33yY79YHh+5ZXV4Kq5zkNmLvvrPQQmaslq155sbChwExTxcXIasHREVnT1E17/6xn7xKB71SenPLpKVkrXkG5YFfgg92VAntiKMSiraLFlF1AaT1h3PuhuhWIG/cs8ekUfLkRZ2Z+M6KkPyhPhlRtI6vuyHw+huPfmhS1o//KDb4HjtxMc00wVWZA1tmwO75grwNuhPS+kWMTGjByUULUTmFkCcmkPbW2+TccEPQVIxEIUcia4T9y2SnbKz3WCGRSFAkJZ0yE7czEYFAAPOPM4NJSj2Uvfse2n79WqoqR4FCJiVGp6w1MmuIRIMKRTNN6Pj8fmaHSfOtwe+7SnhorJdonYS8ysgTYiUWF3aflJ2VEvpmRrMhtzLoz8DnD4SIZusjwaDG4/WL8L8EPbE6JeVhcnGMajlxjYxR291eftxwmH/NrwvU3F9iZfZWCV9f+QwDzNlQtrfuAe3OA4UG1n0Ei+tcpCndA1tnwHULIGNAxOerPU2Cnnl3DeHlRbtZvr8ck0bBtYMyuaJvGvGBSjD7QKkjWquurRw1hFwqIbUpzsCx7YQDrj8MqTGlg7aZbuQqDsLHo4XGpwY5K6DrZXDBS80iRG7BsaGFqJxCSCQS1F270HreXGwrV+LctQtN9+7ozjkHVZu2ODZtCvu46KuvOiUVioDHg6ekFHfOIfxWK6q2bZHHxYU1emtBHQJeL+6cQ0HblK2yUHfujN/hxLlzJwHPMST1ngVweLxU2cRritIp0Cia9lXi9fuRRyDd0Toltw9vw53fbAy7/7pzslDKm4eoSCUSjOrI16xVyvAHAqjkMoa1j2PpntBJnjbxej67JIGEvF94nhXY2ralYuQF/HtFNb/vrQLg153FXNo7lW/XhopOVXIp53aI54JuSZjUolXx30k9uOmL9UFaGIkEXr6iR9j4BJ8/QLHFid3tDbLPr4HXH+DhX4r5YfjDJPx8RISqMsDAaWAvgyXPhb54nwfm3QXXLzhqBUEpl9Ep2cgbU3phdXqRSCXES63I1r0uRLTOKsgYTOKFr/LMxZ257/stIee4Z1Rb4iK0sYKgT4Bzn4TFzwRvl0jh4jeblMEEgN8Plnwo3y+cdhM7gzEVdPHgscMfLwaTlBpsnwkD72ghKqcBLUTlFEMik6FMS0M5KVjIph3QH02fPjg2bAjaru7ZA92gQSf9uvxuN/Y1a8m7996gak/UpCuIv/fe02o+d6ZDqlCgHTgQ69I/kMXGkvSPf+AtL8O+dh2yqCiS//nsGVsROx7klNt4c/E+5m8tBODC7sm1AtRw8Pr85Fc5+HlbIZsPV9E1xcTFPVJIiVajbFBJHNA6hiv6pvFDg1HcB89rT6tjsFU/GiQSCVcNyAg78gswoWcKf+0rZUznJEZ3SuSNxfuC9CMJBhVfXGwk7cdxYBcaDRNgWvUS/xn/Nd9ntaNtohFfIEDHRAN7i61syKmsfbxKLuXT6/uRqgdlwAEKJUglDGwdy8J7h/Lp8mx2FVpol2jg9mGtSY/Rhvi9mB1uft9ZwjtL93Pz0NZ4GxqSHEFOuZ0qfVsSJFJofz6MfkaMKO+eD4EILZmyvaIt1MRWh0F9ZITcWgLfXwt560AbC3IN5CxH9v4gRt+6gW9vHcjLv+xmT2E16TFa7h3Zmr5pWrQ+C3CUiqNSB32uh5SesOwlMXad3AOGPwIxbZp0nfj9ULgFpl8iXl8N0gfCFZ+JqujO2ZEfv+0HSOvbtOdqQbOhZernNMFbVYW3uBjbylVIlAp055yDRKXCuW07Vd99RyAQIHryJDS9eqNIPPl9UVdODgcvvChsenDSM88QfeXkk34NZzPceXkcmjSZlBf/Q9Fz/8bTwHAu7p67iZk69ayvTuVV2JnwzoqQ9kSsTsmcOweTHmYqZ1NuJVM+Wo3TU7coquRSvrypP/0yY0IC6KrsbootTpbvK0MhlzK4bRwJBtUJe6nUh6eggPIqK+/vsvH5umBn0y4pRu4d1Y5bv9rAB9f04bzOiRwosfCPOdtZk10FwNsTMrlo211Q0KAKKpVRculMXtsbw48b8vH4AuhVMl6d1JM4vZKteWYSDCq6pehJtO9G+ce/wWmGjhdD9ysgSoz8u71+HG5h+ra7sJqfthRg1Ci4ok8aqdEaorRKFu8q5qYv1hOlVfDoBR15dGboZFENfrv3HNppbaA21WXz7PoJvpsa+U26cx3Etz+2N/bwWiqyN1ESfw6HqrzE6uSk+QtJ+utxJImdKRvzFjZPAD8S9pfa+G59PjGqADcPSCQjzoDa0MT2qKNKiIRVBkFgmoqqw/D+kPAp0D2nwohH4e0+4UW7AAPugAteaPrztaBRtEz9nMHwlpdT8uqrmGfOCtoe/3/3EnXV1eiHDiEAyLTNn+gaCdbffg9LUgDKPvwQ/aiRKOJPvfeIp6wMfyBAicLOHwV/sr1sO93iujE8fTjJumTk0jPjT1iRmkrmjG8p/+CDEJICUPbmWxjGjDmriYrPH2D2pvywGopym5vZG/O589w2yOrd+Rebndz59cYgkgLCVv7Orzfy011DQuzNo7RKorRKOiSdnPfKdfAgOddci6+8nGvuvZ+LLx3MzGwnVh+M6JCALxDg/iMtijd+30e/zGjaJpr4YKyeKpsCXwAyTU74JbRVa+t1K//doee7TXWVGqvLx61fbWBs50RevqIHxoAVfn0UNn9d98CCTbDmPbjpN4hphVIupdzm46bP17GzsLr2sM9XHuLukW25qn8GLywUrZ4qu4d4vQqFTBIiagVoE68jSq8FQwMNR2LXyJqPxC7HpfkoIo6H9nTlr59za7fFG1TMuG4++worePXzzeSU22mboOeGwVlkxen4+K9sZm0t44truzG4YxOJiibqmK8NEBqccCQFYNt3cO5j0OUy2PJN+GO6Rbb+b8HJw/9OPfosgn3duhCSAlD6+ht4sg8i1WpPKUkB8eUdCd6iImiKKr8Z4a2owPzTfAr/8Q92lG7l0vmX89K6l/g5+2deXPcil827jF0Vu45+olMEiUSCVKnEsuDniMdUL1p0Cq+o+WFxePhlZ1HE/Yt2FNWal9Wg3OaiwBxejFpmdVNmDR9gebLgrayk4OGH8ZULDYL7jVfRT7uGacs+4ZFtP5IpsfPQj1uxusTryKu04z5iYR8Vm0jW+udo8+MY5NXhhbhlba/gxy2lYff9srOY0moXVGYHk5Qa2Erhj/+A247X5+fbNblBJKUGby3ZT6nVRV6laNHG6ZWYNAoeu6BTyLEquZSXL+9BfDghrj4BxoapDig0MP5todk4BjjcXt5YY+avA1VB2we1juXH7VXcPjObvcVWXF4/OwosPPjDVhIMKq4dlIkvEOCh2Xsprgj1ZGpWWCLnAuHzgNsOwx8Or0PpdoVombXglKOFqJxieCsrKf/444j7K776Cn+E9OGTCe3AyAp/dceOSNTNG9zWGHzV1ZR/+BEFDz2E97KxPLzjBZy+4MXO4XVw/x/3U2IPFTmeLgT8fgKuCCVjwGcN7yNxtkAuk2BQRW6/6NVy5LLgNk4k3UQNGsuxORnwVVbi3L4jaJvfZse6ZAmW2bOJqipFVU+w2zHZUOfHYUgSRmnXzBHthjALucUrCxLCNkRJtRN2zot8gTtmgqOSsiOJyJEwe1M+YzuLSbvnL+nGgz9s4WCZjfen9uH8rkl0TzMxpX868+8eQtfUCJUppQ56XAk3L4bOEyG1Dwy6C25fAUndQw53enwcrrCzMaeS7flmCs0O6isHyqxuZm4KJbLje6bwyV+hHlEAby7ez009dSy9Pp1BGVoqHCf5hiipS+R92ljxnsS0gluWwsgnhQYmayhM+Q7GPt8ipD1NODPq5n8jBLxevBWVEfd7S8sIeL2gbIIKvhmh7dsXWWxs7Z1mfSQ89OAp9XDxlldQ8cUXAFiTTRQXFoc9rshWRIWzggTtmeFtIDMY0A0Zgu2vv8LuN5435hRfUfPCoFZw09BWrDoY3uvkliGtQnQkMTolOqUsbEifSi4l3qCixOIku9zGptwqUqI09EqPIsmoRtFMEz71ETjKTUDAZkWlqBsnfui8jkRp630WDYnix+cVo6o/3hD0eN1RZDTRWiX4jzYBFiAQCGBzRfYNMds9jO+ZQm6FnV2FFg6V2zlUnsPcTfmc3zWJc9rEkl1mZ8WBMtomZEV+KrVJiEMveV9oPpR6kIW+iEq7mxlrc3ntt321FaZ4g4r3ru5Nz/Qo5DIpLq+vdl8NZFIJTnfo9hpYXV4s1dV0mzmMf4x9B6ei7VHemxOEKR1SeoVqiwBGPiHIKAit0OD7oO+NRwzfWtylTydaKiqnGDKDAd3AgRH360cMR6o59YF5ypQUMr/6Ek2vnrXb5PHxpLz+GuquXU/ptbgPHqz1JPEGGr/D8vjOnLFfmcFAwkMPIlGFVp80AwaEzUY629AzPYrxPULHQC/unkzPjFAym2hU8cRFncOe66GxHZDLJEz9ZA2TP1jNfxbu5p5vNzHmtWVsyK3EexKqLTKTCWkknZBEgi85DYvDS6xOyTtX9aJjsligLA4POeU2DpZaRftGJoe2o+HaeWLhk8ohuhU6mY9h7cPfdXdMMgjDti6XRr7AThNBE41BI2dEh8gEfHzPFHqmR3HtOVlBIYjVLi8/bMjj/WUH+WVHET9uyIvodhsEhQY00WFJCo4qysvK+H59XhDhKK12cfXHa2p9b5QyaUj2kT8QOGo6tUIKBALE/HIXCZKT3PrRJ8Dk6dD1cqHPAdDGwIWvQucJddtA+FdpY0CuEC0jS0FkkW0LTipaKiqnGFK1mtibb8KyYEFIm0AWFYVhzJjTZpWvat2a9Pfew1tZScDjRWYyIk9IOOXXI9XUWXib3HJUMhUuX+gXhFqmJlZzZoxN+x0OvOXlSKRSWs2ejXnRQio/+xyZ0UD01GswXjgO+WkQIzc34vQqnhnfhRsHt2LelgICAbFoZsRoazNv6kMhkzGuWzLp0Rpe/mUPB0ptZMZqefC8DnRNNfLPn3aytzjYHdbp8XPj5+v49b5hpDXFCOwYIE9IIOGhhyh68smQfabJk1GmJbHw3nSiNAoSjWqkUgkHS608NXcHy/eLhN828Tqem9iVnhnRaFoPh6kzcTnsbC+y8dKSSm4f3gaLw8vmw1W1526boOeDa/oI0zZppjAP2z4z+AK0MULMqdShR4xkL9tbEiJE7pxsoFOykVi9iiFtY5neiHOuQiqldqjK5xW+IfZyQUi0cWGDCX1+Pz4/KB2lYjJo67e0lSn5fsh17FB04bbZebUVJ5fXz687i7llaGtKql1cd04mby7eX3uuQECY0UUyskuP0RBj3XfkYD/y7CUQf7KrKmkw/k0Y9eSR2AC98GCRhjHdrMyBFW/C9h/Fv7tPEu2x6MyTe40tCELLePJpQMDjwbVvH0XP/RvHxo0gkaAbMYLEhx9C1arV6b68JsFucWEzu7GbXeii1GhNSrSG5mlXufMLOHjxxQTsdlQXjGHppLa8vjc0dfqhvg8xueNkVLJTp58JB09xMaVvvoV53jwRwmgyEXfHNAyjxyBRyJHHxSH5H/JROV5U2tw4vb7anJrcCjvnvvJHRE3Hx9f2ZXTnE0v4DQef2Yx9/XpK/vsq7oMHkSfEE3vzjRj7t0MusUBSN7GYAfmVdsa/HTqOLZXA3DuH0C2tzojR5vJSbnXh9ftRymVUO70UWZzE61UkGlXEG+pl6FhLhNfIyrfAZYEOF0KvqaLlcOTGwOvzk11m47+/7WXp7hL0KjlXD8hgyoAMkk11Vdd5m/O5Z8ZmQDx0aNs4xvdMQauUk2hU0SczRozz7l4AvzwmxqEBYlrD5Z+J1yuVUWlzc6jcxvTVuVQ53FzcTs0AYyXJC28UQl/AkzGUP7s+z02z6gzsxvdI5t+XdOPKD1czsVcqlTY3X67KweryIpHA1AEZXNA1mRs+Xxfk0KtTyvj2yky6L74ayg+IjcMeEi2YMwFVucKh1tqg9WxMgRt/haj003Nd/0No6vrdQlROI7xVVfirq0EiQWYynTUpu5YyBz+/t5Xy/DpxaHymgQtu64Yh5sRDCP0eD/aVqzh8xx3g86F+4A729U3kvdxvybHkkGXM4u5ed9M7oTcm9enNFPJWVJD3f/fhWLs2ZF/Cww8Rc+21pyU5+mzA/hIro19dFnH/C5d2Y0r/k9cu85aVEbBWQv5a5Ls+R5J35HdoSBaurLFt+HZtbtjkY4Bh7eN5e0qvyOnFTYHDLDJs1CbRTgoDq8tDtdOLFAmxemVIK6XY4uSebzex+XAVr07qyebDVXy//jBmh4ceaSaeuKgzXeSFaD8+J/TkSj1MW0GVKoV3lx7gw7+Cp/8yY7V8M8FE6owxtVk7xRd9xSW/6WqnuZ66uDMTeiRz7ivLsDi9jO6UwMSeqUilEhQyCasPVmBQyRjcLp7VB8rYU1RN70QJo9MlpP5xP7LCek7ENyyEzAbXaS0RRGbbj6IS1H2SmL7RnsRICr8Plr8GS/4Vfv+Y50T+T8sNyAnhrPJReeedd3j55ZcpKiqiR48evPXWW/Tv3/90X9ZJhzwqCqKiTvdlHBPs1W4Wfbg9iKQAlOZUs/jzHYy5qhXaeFPj2UVHgVShQDugP60XzMcyfwGuHfsZkNKaXkPexKdWoJAqzpiWj7ekJCxJASh7732M55+PIiXlFF/VscPt9VFS7WJ/iRWL00vnZCPxeiUm7ckTdetVMhKNKoot4fv+XVNO7o2HXOGE78+rdZatRXUhzL4d79Wz+COMdX4NNudWYnd7T4yoaI5OtPUqBfpGpq0SjWrenNKL0moXzy3YGZRmvCXPzKQPVvHt1e3pcc5DVOsyUTjLid7xhci0cVth7y/kp10ZQlJAuNp+tE3P4x3Go9wlLBUS907n3HYP8/V6J1qljNEdE5FJpbRLNJBTbiM1WsuBUisHSm0s2l6E2+fnjhFteHHhbrLidPx3pBbV/DthebALNym96xxmbWWCGElkMO8e2Luw7rg17+HvfT3SUU+evCkcRxXsnBN5/45Z0Ovqk0uWWlCL005UvvvuO+6//37ef/99BgwYwOuvv87YsWPZs2cPCQlnxjRHC+rgqHZTmhvq7QCQv9eM9XAJ0rICNMcowPU7HKBQID1SfZCq1aiysoi/604CgcBp0+0cDe6D4ccuAfzV1fisNprPT/XkwOnxsepAOdO+3hCkh7iiTxqPXNCx0cTeE0GiUc3jF3Ti3u82h+wb2DqGlKiTLCo354WSlBrkrUXutjSqkYk3qJCdIXfUiUY1xRZnEEmpQf+sGPzqaP5RMY41G6uI1bdiWr9z6S/ZTuwvd4LTzJzNkQMaf9hSzu2XTiHpCFEhEECChLRoDe9c1ZvUaA0yqYTnJnRhT7GVmRvzWGVx0jXVxMfX9eXT5Qe5rHcaE3ulolfJkWv8YtR34SNQuFmMBPe+Hs65S5x/8zew6h1wVYvwxJFPQP9bxFTSlhmwax7SjZ/j6nQJ/swhaJQnYRmTykDRiD5KqRXTQC04JTjtROXVV1/llltu4YYbxJjf+++/z4IFC/j000959NFHT/PVtaAh3PbGo9Y97gD5TzxI5vSvmuRk687Px/rHMqyLFyOLiSFm6lQUrbKCQhibk6RUVzgpOWQhb08lUQlasrrFoo9WIVMcXwVIFtfIHZ1UivQU+s8cL4rMTm75cn2I58kPG/LolmbimoGZJ4UoSiQSRnRM4J2revHCwt3kVTpQK6RM6ZfBbcPbhBXnNiucR5kwcVQyuW8mnywPT0anDW8T3kjtNGFNGJKSZFRz89BWXP/ZutqJnfwqB9PyzEzp2YZHznmCKGMSjorIn2uX10+gng7M0+s6ro9rz93nCcExiDHjJXtKePmXuoTmvcVW5m8pZPrN/cmMbZBVlDEQps4Cj01UTXTxwjF2zp2w/9e649Z9BNu+F1qaeXdDv5uErf+fryBf9yHF0T3QxJ0E6wRNlAggPLwm/P6Bd7SMLJ9CnFai4na72bBhA4899ljtNqlUyujRo1m1alXYx7hcLlz1pmUsFstJv84WCPjMZpREjryXSEAp81Odk4OvshJpTCx2sxu304tcIUVjUKKsl1jrzs0l5+qpeEvrnDwt8+cTd9ddxFx7TbPbzVeV2Jnz6kZsVXXCyJWz9nPhnd1JbR+N7Dh8O5QZ6chiYvBVhC4ShlGjkJ0FYY6LthdFNGZ7748DnN8liQTjiWuPwsGkUXBh9xT6ZsXgcPtQyCTEG9RHT0m2FAqRo9MsxI26+GO3VY9pHXmf2gQaE6lqDa9O6sFDP24NEv1e3juNER3PrIpvdBgTl6sGZPDWkv1hfUy+3VzODddfQJSyjIs6RTN9TWjCM8DoDtEYcueLf2QMQpHel7YN9HTl1S7+++vekMe6fX6emLOdb24ZGFqZ08UC9T4f5QeCSUoNnGbY8JkYH172Eox/C2JaI3NbWL2/hLEGAzrVSVjKMgZB+wuC204gcpnS+jX/87UgIk4rUSkrK8Pn85GYGKzsT0xMZPfu0MhygBdeeIFnn332VFxeCxrAuWMnzg1byerSl0M7Qu9G2/aMwbNUfKj9bg/bl+Wzbn42LruIf2/dM57Bl7fFEKPGZ7NR8uprQSSlBmVvv43x/POblag4bR7+mL47iKQA+H0BFr6/jSlPD8AYe+ytBnliIhmffEzuDTfiq6qq3a7q1InExx5Fpmu+xN+ThYNl1oj7ii3ORp1WmwuJx0KESnbBN5Ohqt5YbpfL4Pzn6wy7mgJdPPS8Oryd/cinQJ+MTibngq5J9M2MYUteFTaXl96Z0SQYVMFGcPVQbnXhcPsIAEVmB3/sKcWgUTCmUyKJRhX64wxXLK12UWZ1YXZ4iDeoiNUpg66hf1YMMqkk6PfVLlHPq7+FEogarCyV075yJq373s/ATCOrc4Jv/LRKGQ8OiUf/12aY+D60HgGGJMwON4VVTn7eXoRcCukxOiL9mewttmK2e47eQtweGitSd5JF4vnXfwprP4SeV+NWmkiJj8Xh9p4comJIFGPMZftg03Sxrfc1ENu28VRpt01MSVlLQa4Sf2fGUO+hFjQdp731c6x47LHHuP/++2v/bbFYSE9vGRM72fCazZS+9RbOnTsZ8PbHyBXRHNhSSSAAUqmE9v3i6NVNSvENXyI1GvHIdSz/fl/t4wP+AAc2lmAps3PhXT1QWs1U//ZbxOezLluGqm0To9ubAKfNQ/7eqvCvze2nstB+XERFIpGg6tiRVrNn4crOxlNYiLpdOxQpKcgbawudQRjcNo7v1+eF3dclxYTqONtixwyPQ/wodeILPhzMefDleDEJUh87ZoIpRdieR3psQ2iiYPQzENceVr4h9CpRmTD6aWh9bu0UjkYpJyNWTkZs454uNpeXbflmnv1pB3ePbMeny7NZnyNcqI0aOdXmSiZ1j0YZY0RpPDZPnYJKO0t2l2D3+NmQU8Hvu0oY2TGB5yZ2rSV58UY1b17Zi7u/3VhLGiQ03rJTSgJQlUNC8QreGJLMom4d+GzVYawuLyM7JDBtRBsytB5hkqYRLZYqu5uP/8rm7aXCLyXRqGLaiGbwPpE3ItyWyiFwpCpUfgBiWrFX2pFbv9rAPy7szPldEzFpToLwW58gfrIGN+14Wxms+QBWvCayg0CMuk/+WsQSnCGaprMNp5WoxMXFIZPJKC4OnlMvLi4mKSn8nZFKpUIVxvmzBScXAacTd24uAZeLsjtvosvkq+l1zfl4AzLkeOHANlzz9oDHQ8Kzz7JycfiYgNJcK9YKJ9GKAPgiu876nZFbTMcDvze09F0frqa4d0aARCJBkZyMIvnsvGvqmxVDvF5FqTV0+ubxcR2J8ZZCXpH4ktXFRzbHOl64rGICZdXbULZXfKEPnCZGUBV15NHn8+OyWVEl9UR2cHFo6u+6T6D/rcKLpEnPawOPXTjFdp4gepfyI5WdQ8vFnXRyd0joDKbUo55ua14VUz5aw0Xdk9HhYHQrJVpFDFGqAI/1lxO37iUUP2wUraqhD4ox3CZMrRyusDNnUz5zNhfg8voY0T6ez67vx1Nzt/PfX/fwzPguaJVyNAoZIzvGs/iB4fy6o5gCs4MorYIhbWNZvj809kAigQEZeki8ExY+QmJVDtfeu41xPQbh84NJIxdCVbdNEMPqQlDqqXQbakkKQLHFRWaMFqmEsFWVTskGorRNqCJ1u0L8DYRDl0tEVQUgrh1VMd25+fNcLE4vj8zcSkpUf4a2OwMMFfcugj9fCt5mzoMvLhIZSi1GcceF00pUlEolffr0YfHixUycOBEAv9/P4sWLueuuu07npbWgAaQaDaq2bbGXlxNwu6n+6jP46rPa/THXXwdyORmffYY/sx0H/7kl4rnK823EddWjHTAA+5rwYjX9iBHNev1KjRytSYndHD7rRR+txuXwoDqRUdOzFKlRGr67bSCPzdrGmmyhtUkyqnl2fCe6cgDeuVSMsYIYx7z0Y8gcDIpm0K343LD/N5GZU2PpVLAJNk+Hq36ENufi9PrJr3Tw7dpcdhVZ6B7/CFdc+TRpq55Cmb247lweu3AaBaFr8LpAaQBlmEpZ1WH4/VnYOVuMwUZlwvkviLL+J+cJYWcNDMlw/XyxLwLKrS7++dNOXr8olfOiC9Guf4bhTjNTu1yNOrEN8m8ur6sI2Erh+2uED8ewRxodUS62OLn7241sPlzXap2+JpeF24t4bXJPbp++nrtGtiUjpq760ypOz23D9Xh8fj5Ytp+bh7Zme4ElxEr//0a1I17ugJ+eEESx59VI1CbiVfV+r5YC+O0Z2PGjIIZKPcn97+LdiRdyx5w6TcuczfncPbIdbyzeF/QcaoWU/1zavWnC6KgM6HczrGsQ2mpKE5b3314pLumcR7j0mzyKLHU3My8t2kOXFBMxulObkRaE6iKRfh0OrmpBfs8yolJscVJkdlJS7SItWkOCQXXyRe5hcNpbP/fffz/XXXcdffv2pX///rz++uvYbLbaKaAWnBmQGY3E33M3OVeHEguJUknUpEnIoqORR0djKXMglUnw+8I3rfVRKmQmE4mPP8ahSZNDogQM55+PIvkYtAZNgC5KxZAr2vHrxztC9rXuFU/ujnK0JuXfkqgAtI4XFu+Vdg8enx+jWk6ifT+SD84LPtBeAd9cAdNWQXyHE3/i6mKYe1cdSamB3wdzbsd723LWFkq48fN1tYLfFfvhk7VSvpz8HANsxUhKtovHqKNApoT9S+Cv/0J1AaQPhMH3ikTcmpaQpRCmXyqqNzWoyoEZV8ElH4hSf32iUl0IM2+Gq3+MWAGxuXxM6abnvML30f7+be12fa+psPDBOpJSH6vegT43NkpUtuebg0gKgFEtx+31M39rISM7JmIPE/gIoJBJGdclkTtnbOa1ST1Zn1PJxtxK4nRKLuyeTOdoP8aPugAS6H4ljHpK2MnXvqgymHWLWGBr4LaiXv4fhg3ycnHX8/hpu4gVmLu5gJuGtOK9q3szd3M+BWYn/bJimDowg/SmxiBoY2DEY6LCtfo9cJlFCy6+A8y7C2QKzMOeZfrhRA6WBVfh95VU4/Kc5OTlo8HnAXN4QTIAReGNA89UZJdZuf6zdeSU22u39c6I4u2rep9864AGOO1EZfLkyZSWlvLUU09RVFREz549WbRoUYjAtgWnH6r27Ul56UWKnvs3/iPTVvLkZFJfeRllejoShVjkNUYl7QcksXtlYeg5dHKik4XAVNWmDa1mz6Lso4+xr1yJLCqKmJtuRH/OOchjmtdISSKREJuqZ/T1ndn0Wy7l+Va0RiWdh6RgjNew9KvdpLSLIiqhebNlziZEaeuJM50WmBuahwMIErH+MzjvuYhuqk1GdVFdtaYhrMWUOqTcM2NjyFSS2+fn3vkFzD3/cZLmXSU2Dvk/yFkJc6fVHVhxELb/ANctgIwBYlvZ3mCSUh9/vSIScxc9Fry9YJPIyIlAVOQyCaOSXWj/+jZ4hz5RXEMkFG6GuPCVGq/fH6QdGtctict6p1FucyOVQKxOhS/gRxfJR6Qql4w/nuKV8x7mySV7kUmldEwyEm9Q0ipGTbLKDDcvEVodXTyoGozbWouDSUr9l7X+XW4eP4Gfttdt+2R5Ni6Pj+cmdqXa5UUulaJTyY4aShgEXZz4SesrKl1uB1gLYfLXlPgNPLWkjEW7QtPUM2K0KI7leU4GZApRmasv8q6P5B6n9npOACUWJzd+vj6IpABszK3iyTnbeW1yzxMzOjxGnHaiAnDXXXe1tHrOAsgMBozjxqHt2xdvZSUSmQxZTAyKBsZ8CqWMARe3wlLioGB/Ve12tU7B+Ht7oo8Sd7YSuRxV69YkP/0UPosFiVze7ASlIdYtyKbT4BT6jsvC5fCyZ3UR638+JK5bfYpEo2cDPHYo3xd5f/E2YcAl00c+pklofKKo1BGImP5bbHFhNbaFVsOg9UjoPBHe6hV6oM8DP90D1/0kqiU5KyI/Ydk+MEbQo3gj66biDEoCuQtDdxxNPCmPfGcqAVQyIYa9cXAWJq2S26dvwHOkUqmSS3n64s70zQzzd+uxw9IXkO+aTZfyvXww4gWqdelIvQ6iD83HpLoEYlsBWZGvrSKymSEeOwaJI2iTWiHl6oGZXPvpOnYWihuZLilGXrq8Ox0SDU0jLD6vyD6Sq4SoWqkDvSCH9jIbS/buD/uw+8a0F4GPR2B2eMirtPPD+jwq7W4u6p5Ct1QTSaZ6ba1AoC6kMeAHbWzT9FdelyBxrmphCqeLPxJsmCTM6WbdEvoYtanpgtwzACXVLrLLbGH3LdlTQrnN/fcjKi04eyCRy1GkpBzVFl4freb827pirXJRUWBDZ1JiStCij1aFmIdJ1Wqk6pPj01EfGr0ChVrG6jkHQvapdQr00Sf/Gs4aKLQQ11EIAcMhqXud8PREYEgSi5E7zJeiLgFvIPzUyuBME4/3jyVq0zZKq0ahoQ+qShsKXXzoRBBA6W5hi65PAFMjU4JKvdDNNIRC26hPi1Imwy8Lc63FOyC1D+RvCN0nU0BCRyjeKUihLl7oNAwpIJUik0q5ul8yf+0vo3dGNHd9uyno4S6vn8dnb6drqokobQPdgLUMtv9A1dBn2R07ivdX2yiuzmFAmobruo9FV7AZeexRAlAbG8GVSImPNhGtNVPl8DCifTwPju3Ak3O21ZIUgB0FFq54fxUL7x1KZmwjo/p+v6hEbPoaDvwu3otz7oHEzoJAACnRar68qT/Tpm+g8gh5Vcgk3D2yLQNa1fmxmB1uPltxiNd/ryPaczcX0D5Rzxc39Cc5SiPIxuE1MOtWQVZAtJ4uegPajAxugQW9r6XChG7lW4IMSmXQaSKc9y8huG47SrTQlr1UR2xjWsOkrxr/uzvDEC7pugY1idinEi2hhC34W6Gi0MacVzfiqK67S5crpIz/v54ktjIhlZ6ZVv2nBYfXwidjQrdL5XDHKjHWe6LwuoWgddatwdslUpgyg7y4oYx57U8c9fQH/dKNvNwxgPO+uwm4675QFenpZLzyOMqfrxF35Q1x13qIaycqBe/0qxsfrY/+t4PlsEgarkH6ABj9LKiNIFOJBS1cxkvBJvhwRPA2fSJMeFtoXBo64U54TyzOy+oJMLWxMHUmJPUAqZSy4gJ+z3awcGc5y/aGeg4BXNwjhZcv7466/hh52T6qV37Mp7IreO2v4FaJWiHlxxu60bV1Wtjz1cJSAB+PEv9tiPbnE2h/Pt5WIymTJ6CSSXn1971MX50b9lR3ntuG+0e3RxapqlKyGz49L/Q9OudeGHp/LUn0+QMUW4S40+XxkWxSE2dQoa3X/tpVaOGCN/4K+zS3DWvNg2Pbo6g8AO8Oqg1arIVEArcshZQwlTmvC/58JXSqByDjHJj8lWhbeZyi4mIvF5UhbZzwZGlu2MqgMluQO68LelwJ8R2b5bn2Fldz3mt/ht2nlElZ/MBw0mNOvE3e1PW7Zai7BX8rxCTruOKxfpx3cxe6j0xjxNUduPKpASRkGVtISkPEd4JLPxJl6xroE8RCamqmVGO5EjqME4tDx/GQ0EmMqd72F2QNJcGo4qmLOwc95LF+sTgfui+IpAB4Dh+m6N2v8XWdGvo8yT3ryIUxBabMCPVbyRoK59wJKX1rPUMYcj/0vlZM6bx3DrzdR4huy8K0xUwZQpRaH9Zi+PO/cPNioelpOxr63QLTVgL+YJICYnH7ciJYRPZOnNrP0AQX+VXBbZb6yC234WwoJPV5KOtyI68vD9VzOD1+HltwiIpG7poB8T5NnSXaIfWR0hv63ojkl8dRfHMJyZIqfIEAy/eVRTzV6oMV2CKIfnGYYdGj4SMNVr4h3sMjkEklpERp6JkexYDWsWTE6oJICtBobtG3a3Ox2p2w7tNQkgKiXLDsZTEy3xDVRZHHp3NXiv0gpuGiMyG1NyR2OUkkpRR+exo+Hi1ce7d8I/yFZt1SVyE6AcTpVQxuE95V++qBGcTpT+10VUvrpwV/Oxhi1Bhi1LTr2yLYbhRqg/CvyBgE9jJR5dDFgT65eY2rVAbxpX7J++B1iMC3I4FwSuDCbsm0jtPx+u97qbS7ia8oxGq3hz2V7c8V+G5+FRnvig3aWMqHPE1F5oXYSnxEaauIM2jQZw2DO9eJKoitVIg3jWmgjxdTQj0mQ8An7vS/uSL4SXJXwedHyJWpXlVCFytaAF0mitaAswo6XAi9poqWzqC7BEmRKQQR+fnB8O+Hs0q0jKLSQZdIrKWUHik69peEFx33yohCq2ygq/B72VimChmmqsG2fDNmh/vo47wJneCGRUL0a8kXr8N8WFSIPHZhvla6G3XKEJJMag6Vh/+9pEZpUCki/M04K+Hg0sjXsH/xMU2YWRweUkxquqdH4fL4WXWwrDZs0+72Ife5xOuJhJKdohWp0gvNjNsmSK3niO9OJFQegqRjC2M9bpTsEiP8DZG9DPb+Bn2uPaHTx+iUvDKpB/9esIuftxXiDwhN1DWDMrltWOuTEwTZCFqISgta0ILIkCnEghl1CvrrKp34aQCjRsGA1rG8f01fPF4/ssWLiGj6HwgQMBzx3XDbyB32CnfOPsS2eWKsXiqBK3on88CY9iREZ4b3tZDJhN7AWgq/PxX+eawlkLcumKiAqDh1uABb6lCq3X7kcgVxxnqC2RrvGa9buPBGQs3kiCSAXBfLLUO0zNlaEhJnoJRJuWZQFkp5A6KijQVfVeTzc3TX2noHwtw7RWXNXibaDPWRuwpDm3O5c0RbVh9cG/LwjBgtT41KRGUrErom3bHmXx2bOuHhc0z8X/pB4nJm4DGaKBtwOXOyZbzyVwkjOsQjV2lEtTA3fJ4cMW0EMak4CBu/gkN/CX3JgNvFz5r3wz+uMU1Pc8LjhNURrgFg7fvQcVyTzAQbQ7JJw38u68ZDYztgc/kwqOXEG1TBLcZThBai0oIWtOCsgOnIlIGzU8eIx8gT4pFGxcHEdyk227n+sy0crDe94A/AdxsKMagVPHRBZ1QNF/j68LnEnWskHFohKk714Pb6yC6z8frv+1h9sJwYnYppw1szvEM88YZ64mOFRghGbeF1JyR1A2c17F2EfMF9ZKUPY/rkJ3hoURF5lYLgZMZqeemy7qTHhJkc0sbQO0OCRHIobFWle5qp6VMbUrnQblgitFSOiES7ppm4d1Q73lyyr/Y5/3FuElPTitHMmgzl+4WuaeSTkNanrr2mjoKsIRFHoWkzsmnXCWApIGr2VUiKxdy0DEjb+BG3nPMgvadeSXJiChqNBgbcAhs/D+9vM/wh8Vo/GVMn8s5bBztmCaFs18tg+8wG70FaKGk9WQj4wF0deb/LGurafJzQqxToVaffW6pFo9KCFjQBxWYny/aW8My8Hby9ZB/7S6zYnKdW+d4CAXlCAvpzR4Tdl/DQw8gTE0GuIr/SFkRS6mP62jwKKxsp4wNIZEIMGwlx7UI27Sqs5qK3lrNwexGVdg8HSq08+ONW/jl/Z7AmxJAszM0kEqFbOfdxGPqAaHHEtRd39WV7YNbN4KpGvX8Bg1bcxMyRZhZdk8KCO/pzy9DWHK50oJSFIVsKDXEmA/eNDE2I1ihkPH9Jt6a7uOoToe/N4ffJlELbA0Rrldw8tBVLHxjBW1N6MfvWPtxkXIPmhylQtFW0TQo3w9eXwbYf6iozmii44GUxcdUQ/W8FfRPNH30eWPdJLUmpD9XKV2irrGJ7QRUlFidEZYn8nfr6K4VGJDNHZcKcaeEn0Zb+G3pfF7zNmCq0PMbGJyGbDUqdqBhGQseL6kjg/whapn5a0IKjoKDKwfWfrWVvcXDD4aXLu3Nht+STk9zagkbhKSmhcsYMKr+ajr+6GkVmJokPP4S2X7/a1O3Zq3Zy39zIXiBf3NCPzinG4EpHffj9sPYDIfRsCJlCaFxi6kZ8K2xurv9sLVvzwohCgYX3DqVTshFs5UI063WKFkPOKiwON9WJfZFoY4jRa1FrtDDzRtj7S9hzOXpcz8O2q0Em560pvSO+xiq7mz2FFj748yDF1S4Gto7hmoFZpEVrjs2IrbpILN4HltRtU2jgyhkis+hIoKDZ7sbu9iGVSoinEumbPcP7zyi0cOeaulwmv0+0u9Z+DAeXgDYeBt4uqk4KjSB2R2tlWArg/cHCPTkMHH1v5/aSy3B7fbx+ZS8SdXIhPLUWicqKIVmQsqpceLtv5Oe5/FOR/1S6V4zXR6WfOpJSA3MefH6h0MXUhyYabv1D5GSdBWjq+t3yDduCvwV8FgsBtxup0YhU2XTFusvr471lB0JICsAjM7fSNzOa1vEnanrWgmOFIiGB+GnTiL7iCgJeL1K1Gnl8cChdZpwhwqOFMNDu8bHyQDkTekYweJNKRZm/aHuwcFGphyu/qTWG8/r8eP1+qp2eiCQFYMW+UjqpK+GH64SIF/C2PY/9Q9/g37/m8NeCQpSyYiZ2j+fuUe1Id1RFPJemYhetE6WYohq/c47SKhnQJo5uaVG4vH70ahmKhhUYj0NoblyWIwZmCUJIXR+GJJwXv4/PnI8rdwNedSzW6I64VAmk+6TIAj72FVXzwsJdbMipIkanZNqwTC69+mcMtkNimubAYtg+SxAXj12M19YQFalM+I2MehJ6Xi0qLj8/VNduajMSJrwLxkaCPwOB8FWQI1C4qlArpCzbW8r2fDOJnRLD66+Odu8eCAiRcUKnxo87mTClwXXzhV5m89eimtR5Igx7QFSE/sfQQlT+x+Bx+7CbXeTvqcRmdpPaIRpTvAad6diDpBxeB5XOSgKBADqFjih1VPNf8EmGt6ICx5YtlH/4Eb7KSrTnDCL22mtRpKUhkR/9z7/c6uaH9eHzOwIBWLqnpIWonCZIFIrIidXWYlJNKjJjtSE24ACX9k7ll+3F7CupZmi7+MhtEH0CjP03DLlXTACpTWJBNSRjdvvJLTLz1epDFFtc3D+mfcQEYQBNwAYWa519v1LHoQH/ZMJHW3AdSfd2+/x8v6mYlYcsfH/Zf0mZPiTsuRyxXcizwqWDmibg1KrkNPSEAwRBWfGGMDHzukQrqsNFcMGLQYnRZoeHF5cUM3NjMalR7bC5vRRbsoFsPrymDykmNRPfXVH72nUqKecmOVGvfxd2zxUTY53GwzVzRG5P+X5RlQq5nlL4dEwo4TiwBFa+CaOfCR0rr4HaCG3HwO75YXeXZV3Mll8Ekfx6TS5D2sahCicM1UQJP5LS3aH7JJLwHisnC9YSMZ5tKRBGgIbEOtFuVDqMelqEW4KopijC6JX+B9BCVP6H4HH7OLyzgl8+3I6/5hvjp2ziM/SMm9b9mJxXC6wFvLv5XRZkL8Dr99I9rjuP9n+U9jHtUcmaPz3T7DJT6azE4XWgV+qJ08ShacRevCnwms2UvvU2Vd/W5a+4Dx3CPHsOWd9+i7rj0Uce/f5A7WhjOJRXuyLua8FpQkU2zJhCoqOSz6/8nTtnHap1S5VI4KJuyQxqHct9328h2aTG64v8+wXEwqWJCjK4q3Z6+H5dHv/+uU5s2ypOx/D2CSzdE+qMK5HAOckS4anS/1ZY/hr2rlfx5hpzLUmpj7xKB2uqDFyS0ClU0CuVUdX1Oq5VZpJ6IuFwHocgKfW9QRK6iPHwNR/C4Ltr2y1l1S6+WSsIe0Pdz1Nzd/DEhZ1qSYpUAh9dnEDGzAvBUVl34LYfIHsZrmsW4N70HQZtmFZOwcbIVZENn8PAOyJPoKkMQqh7YHHIRJUvsQd7yKTIIgzpGi2a6BOEVuXzcaGmgMMeFhWnU4GqHJhxDRTVS6JP7CqqeTXTanLlqW87nQa0iGn/h2A3u1lUn6QcQWmulQ2LcvA2suDWR7GtmJt+vYm5B+biPWKKtLVsK9csvIaDVY0ErB0n8q35PPDHA1w852ImzZ/EhDkTeHPjm5Q7yk/ovN7ikiCSUoOAw0HRv5/Da45cpq+BTiWnZ3rkdNshiQo8ZZGNrlpwilFdAjOmiMW9uoisZffx9Li2fHxdX965qjefXNePeIOK+7/fgs8fYHi7+OPKLCmrdgWRFICZG/K4YXAW8YZQIv/U6BTit38i7OEzBgJgSRrMXwfDOOgewYIdpbgufCtY8KmLwzP5OzQJremREX1sOpOGsJaISgqIFsyVXwuzPVc1OCug6rAIpwR2F0W+ziKLM8gscWT7WBL2fx9MUuo9p3PbPFam3IBVFR+6P1KgH4h2Ubh4g/qIbQu3/EGg04TaySrLwAfZMfwD7ppX57B79cCM8NWU2vO0g1v+gO6TxTmzhgmjwwG3h7bFTgZs5fDjzcEkBaB4O8y8SbTN/kZoqaj8DyF/TwWBCHXn3SsL6T02E0PM0asqW0q3kFcdmvHiC/h4Y+MbvDTsJYyq5hEul9nLuHvx3eyrqnP69Pg9TN81HY1cw+09bkcpOz4XRNuqCD4JgGPdepEAbYpMQgCidUqevrgLl723MqSk3zPVQFLOHqp3VBA9dWpIhlEkmF1mzC5BkkxKEyZ149fQgmOArTioAiHZ/xupffO4ZKaZUmtw9UujkHHT0FbkVzn4aUsB2WU2hrWLZ2DrGFKjG7cHX3EglERXu7w8NXc7P9zYg5W78/g9x0uSTsLUrmrSDn6Hfutn4sAj0y4yjwWjJqo2t6YhorRKvjqk5ZyJi8hU2VAr5cj0CSgMSUQdLTivKXBVi2tRGeCi12HuHXXuqgAbv4Rxr0CPKUcVjMvrEZVhGQr0e8KLgAFM2T+zzz2CtslR6Bu2TVMbEbEaU2uNACNCJoeEjkgmvovTVsmGnCo+2mhl2bKDBALQIdHAzf1jGZjSyOtxVgun18X/hM4ToMcUcFTAkufgis/Dxyc0N+ylkBfqSQOIUWlb2Qn7pJxNaCEq/0OwmSPfbXg9fvy+pg14LT0c2SVyXdE67F57sxGVIntREEmpj+m7pnN5+8tJ0R9fadPfGHGQSKCJhledko38eE13XliWy7qcKoxqOVd1i+PKpADOu++jIjoK4/nnh4g5G8Lr93Kg6gD/Wv0vtpSKO6Ve8b14YtATtDG1QdYci8/fHfZQApH2y838MGk6/11rZ+HOcnyBAEPaxvHw2A4Ump1c88maWhI6d3MBcXol3982qFHtUYhl/REcKrcjseRx1d57uSKhOzJnFdJZv9e1IiSSWo1F3I4vuKn3Kzz1W/gx6dGdErn/+808d8R6flTHBF68PI64ZiEpNqERkUjFQrz2g2CSUoOFD0Hrc2mbkIJaIQ3bBu2TEY3VVTeqb3UjyE8E+FUmqt2wZFcJbRq+x7FtRIutRsdTH6OfrRPT+n3iej12YSKnT6ydPAJApUet0tNKGsdQWxF2b4Cnh8fQxrYB9ZZ/wx6gzw3QanioQNdaBL88Lv5/63fB+xb/Cy5+M6wxYZNRXSQiGA7+IaaG2owS11BfX+KKLAoGwB3R8vB/Ei1E5X8Iqe2jgfDjmDHJOhSqpn3BxWvj6ZvQm1GmAfjws7BiOTvKdwAQpY5CKmm+jmFudfgQMxBiXpvnKB/YCLC5vLh7RL470w4ejCyqaZUMlUxC0rcf8mJyBoELuoHDgXTupziW/QF+Pz6JhIDv6AZLBdYCrll4DQ5vXf98U+kmpv48lR8v/pEMYzPl5/ydEc5zo/IQWT+O5cVu1/DIvQ/hd1ShCrgxam2c9/WekEpZmdXN47O38f7UPkRpw1fzBreNfDe7vUpJhjoKxZavQvYFOl6MJFe45EoKNnD+gDJ+bxPNnweC2yQ3DWnFptxK7PXycRbvLiG3wk6cvkFrye8TY7YehyBB+sTIglOvGyoOwJ8vixHW9hdAq2HwfQTL9UAADv5BYu8bePuq3tz21YYgd9xYnZIXL++ORiHho2v6YHP72JBTSUnXW0g4HL4iUNLlJub+Yuay3mGIoCFJeJIsehT2/CzGhnVxgqS0Gy2OsZWJ6aFl/xHEVKEVpGPwPeLx9ZASpeHGwVlc20WJ/PurkNS3zs9dLTKgpnwbrPPY91v49wJg5xwh6D1eomLOh28miRZODaQyuPxz8fpqKkaaKEFqw4lpJJJGk7z/F9FCVP6HYErQEJ+up/RwKNseMqkdWmPTWii3Jl5G6XoZgQW/IJHLGTphDOW9b+Cebc8wtdNU4jTNV3JM1gXfzUglUmLVsbh8Lhxex3ELaqudHn485OTim27F9cmHwc9hNKK45358Gh1NoW4SqRRd/wGYH3oo7H7NgP5I9Y1P/nh8HmbsmRFEUmrg8Dr4Ye8P3NvrXuSylo/kCUEXLxbe7AbJr24bWm8V2rX/EqJMgKgMvrx0BhO+zsPSwLxv9cEKKu3uYKLisoocHiBTb2By33S+azARppRJyUxLRdLxPZj/f7DvV7FDIsHT/mIO9XkCm9ND4iWDkbitBIypvHx5OnlmD7/tLEGjlNE11cRf+8r4ZHnoTcfPWwvpnVFvJNlWJhxT/6hZtDVHFu17QxZtQBivfXa+EIkqNKKVIZWHd2itgb0Uhc/BkLZx/HbfMOZvLeBAqY0hbeMY2DoWh8fH/d9vY012BTqljMv6pKFqdQ7+TuOR7poXfKrOV7LakUaRJZ8RHSOIUqPSRe6TrayuNWU4ki/ldQtb+8XP1B3vscPqd4S+ZfxbIa0ZiUSCIntp+Hyfws2istHzquDzRYLfK5xhjwcepyCIDQ3p/D748Xq4a0OdL48uHjpfIn63DdFpgvCZ+Ruh5Vvxfwg6k4pxd3Rnw6Icdq0sxOfxE5OsY8ikdiS2alqrxlNYSNGNt+LJrVfp2LmTmM6d+Pj5l0jI6NisFZVUXSopuhSK7cVc3+V6eif25nD1YfQKPSn6FKLVx+ewKJNKmbffjLrTCEa/2x/5zG8JVJTj7zcI97nn8V2RhHu6Nj0tWdu3D4rUFDz5DSLvFQoS7r0X2VGIitVjZU3hmoj71xSuwdrVSpQsijJHGQXWAnIsOaToU0jTp5GoawlQbBJ0sTDxfVj4MOxZIO5IpXLodrkYXZ19a92xVblkrHqSm/v9g1f/Cm17eGtapYGACN9b/E/YI0ZftR0u4vmRTzKmcx9e+XUv5TY3g1rHcNfIdmTFakEeJZKnbWW4bFUcdij4ab+X9z8/iMvrR6uUoZJrqXIcYvH9mfTJiqVPVixl1S5GvLIUqyv8YqiS1/vs+Tyw6evgPCKPA1a/K4zAJrwTvGjbymDB/XWTLB4H/HgTXDtXVBYiBfUl9QCnGbVJT+t4PfeMqpt+2p5vZuI7K/AeqbLY3D6+XJXDxtxKfrjmZWQD78a35QeQSKloPZ4lxRqeXpBPv8xo8T5FgsoQvn1kLYK/Xgn/mN3zhcV9Qw2JvRI2fBr5udZ/Ah0uqHNzbTMKlvwr/LHpA+B42962UtgSKu4HBFnJ/rOOqKiNcP4LgkxunSH2S2XQbRKMfho0fy9dWwtR+R+DPlrN4Mvb0XtsJn5fAIVK1uRKSsDvxzJ/QTBJOQL3zl2k7KsiumPzMvkEXQLvj3mfvRV7+SXnFz7Z/kntPp1Cx5vnvkmvhF4ownkuNIJ4g4qbh7bmsVnbeE+r4KJhNxCjlLCpzMXy7w/w011DkEmbTlQUyclkfP45pW+9hWXhIvB40PTqSeI//oGyVaujPl4pVRKrjhzGFquJRSlTkl+dz11L7mJ/1f7afYnaRD467yNamY7+PC1A+H9MfE8sDG6b0GGs/VCQlAYZKLLsJYzt+zSv/hV8irRoTd00UFUOfDI6eIpl11xkh/5k9K1/0OvmAXj9AQwqOdr6otMjY83LS4u56Yv1Qee3u321bZ3648k6lYwp/TPoGuWmZ4wXmd9FtdTIZ5vtfL+ljAu712tRVBfBny+Ffw/2/Cz8N1wWAtUluCUqlPpoJIldobDeJInbCms+EH4c31whKgb10eECKN8HKT1DnsLs8PDOkn1kxmopqHLiqKfb2Z5vYV2JjAGt+lCs68J36w/z9Q85KGR2/m90eyb3S4/sCNwYnJbG9RkV2RGSlhvR5wX8wbuj0kXq9Z4FwcfJFMJf5njFtH5veJfeGlgbjLUbkmDcyzDsISF8VhnE6LTyBPQxZylaiMr/IOQKaZOmexrCV1lJ1dy5EfdbfvwR48iRyHTN+0HJNGbyx+E/+C0nuDds89i4/ffbmTtxLumGUO8En9eH0+oFCWgNSiQNiMfoTgks7ZzIrzuLmb6p7o75sQs6hg9yiwC7105+dT4/FvxI2pXtGXHLlUQrTaiMMSiim1bx0Sl13NjtRlYVhp9EuqHLDXj9Xp5c+WQQSQEothdz5+I7+fz8z0nQniIPh7MdaqP4Adj6A2z8IvxxgQBSf6gI/Z8TupJoVIPPK6oW4UZtHZWw+Rtihz0spk3CwVpCl3gV0wYlcqDKy5I9FbXVBwCDSo5RXfdYjVLOI30lyH+8FUr3iI1yFU/1vYurb7iKFFO9z7XT3Oii7baUkk88s3bJ2Fbmont8KZf0fpTUjMEo591Rd+DOOaJdNOVbWPcxHF4r0pd7TRX6kKrDod4hPi8aez7PdMxDll6EM747G6u0PPpLUS0B+21nEcPax5MVp2Pa8DZcOzATqURCnF6J7HjHquVH+V4LRyK00dDrWsjfKEirKR38HmGiBiK7R1vvc6yLg4teg3ZjYNVbwpI/a6jIZopte3zXDcLROJwvTg1aDQ3zGF1QTMPfFS1EpQV1kEhCFvvg/SfHdqfUXsrnOz4Pu8/j97CyYCWTO0wO2l5ZYmfbH3lkbypFKpPSaUgyHQYkYahnahdvUPPCpd24e2Rb/thTilYpY0SHBOKNKozqplVo3D43y/OW8+CyBwkcue16CVDJVHxy3if0oOmtqY7RHbm52818vO3joO23db+N9tHtqXBWsK5oXdjHHq4+TLW7uoWoNITPK3JP9vwM+eshtQ90GCcWoxrykNQl8uP1CRhMMaTHVFNicdEtzcSj53cUmTwALjPsXRT58XsWQvcrwZJ35I43UegpJBKozIXKgyTumMsjlXuxxXaj/PopPPOXlSX7qgC4/7z2JBjrLb7mPORfXhTsk+F1oV/9X7rFpiHV3lC3vREXUn9iDzb623DtF1txHzG0+2MPvL+qhK+uG0H/VuciyT4y3edzw85ZENVK+Kl0uFCQoO0/AhJBYOoTMZ8X8tah/PpyEusRpaTUAbSa8gaXTs/G4wsQq6sT9Bo1iuPyqwmBLk5M6mQvC92nTwxy0w1C+7Fw7hOQ0kMQQJlSiIn3LxYtwYYwJELfG6DjOFGFUxlBdYIO1Pp4UZH5YnzovtS+LYSkEbSEEragFoFAgIqvvqLk+RfC7k9943WMY8c2+/MWWAsYOzPyead2msoj/R+p/XdFiZ05L2/AUR3sPxGbquPCO3scVzWpsWubOHdiWBFsuiGdL87/gvhjELZVu6spc5Sxvmg9EomEvol9idXEYlAa2F2xmyt+uiLkMQMTB3FH23uRFepxVvlIbhdFTJIOXVTzOwSfdchbD19cHCyAVGjg2nmQ1k8QBnsFzLsHdv8U+vhLP4Kul1Nic+PzBdAoZUJA67QI0aREAd9NCRXn1qDVMBFQt+Z98W9DMkyZIYjS/l9h7p3B7SaZgtKJM3hgjZ4JvdIY2TGB6Pr2/bsWwHdXERb6RBE4VzOh4qiCH66Hg6F2AoWXzmb8PH+IdwxAgkHF3ClJJH85uG7j+S9C10vBXgbbZgnfkE7jIaFjqCi3MhfeHRBWdOrocT1Pu67m+82l/HbfMLLitLi9AdQKWV2r1WMXwlKlPnikuKmozIHplwjdUA000eJ3ntTtiPVAA9jK4M9XYM17ddukcrjwVehy6akxcQMhyC7YJKaaireLiknv6+Gcu/4WDrMN0RJK2IJjhkQiwXjeeZh//BHX3mBvE03fPmh6nZyMC5VMRWtTaw6aw7ve9knsU/v/LpeXLUsOh5AUgPJ8GwUHzHRoRqKSb80PS1JAVDnMLvMxERWD0oBBaajVmxTZithetp1cSy5ZpiyeH/I8r214jVJHKQD94vtzZ+JDrHo9D7+3uPY8UYlaLr6nB8bY/81sjybBUijGahsumB6H2H7LEvHlr42BC18Ri9jqd8XkTmwbGPOcSP6VSkmo0UtUF8GOVULT4nOLasn5L8KX44XmpSG6TYI/6hH76kJBnG5eDAsfDdHE4PMQ/+tdfHTdL6hiksF8GHYuhvwNkNJbaEFSegsr+YawFteaxQFCA3PxGzD9UpGdU7s9mnJTF0qt60NOAVBS7aJcFk9ym5EiP2bArRCVJQSa+gQY1TnCG34EhZsjTsZodnzLVRNuonNGZ6QSeHLODnLK7fTLiuaGPrFEOXKQrHgDzLmQ1h/63yLs4I/F1DE6E65fIPQoRdsgupVoqZjSwpMUgNxVwSQFhGbkp3sgtbf42zgVUOlFi+fauUI/JZWJttrxELa/EVqISjPBZnbh9wWQyiTHFQB4pkCRlET6hx9iW7mKqpkzkcikRF11FdrevVEknJy2Q6wmlgf6PsCdi+8M2ZegTaBrXNfaf1dbXORuiWwfvW9NEW16xiFvzB77GOBruNA03H+8o4rAgaoD3PTLTZQ760zK0gxp/Gvwv3jsr8eodFUyrd3drH4zH783uPBZVWxn5cwDjLy2I0r13/RjbC+tS9dtiOpCcRddc5dqSIKhDwjdhd8rtA6GBpNU1cUw+zYxrlqDvPUihHDy1/DZ2GBfi66Xi8Thhtfgsoi7Zk1U7Thzw2tTeauhcCt8cWFdts3mr8Ud9qUfwu/PCFOw+tDFiQXdnC8qHxKpGGO9fv6RRXu7aGfEtcVX0bjVvE+iEJb5MqWoLBwLzKGu1bXwuugYr8auNTDq1boqVKLWj2THH0iWPFx3bMEm2PCZIB3p/Y/tGgzJ4ifznKMfa6+Av/4bef+6T4QD76m0BtDF/a2cZU8Uf9NvuOaDo9pN9tYy1s3PxlrpwhSvYeDE1qR2iEajP3NYssvhJeAPoNLKj2r1rkhKIurSSzCMGQ0SyVFHb5sDPeN78sLQF3hl3Su1C3ffxL48c84zJOnqSs/+AMgUkbUyUrmkyVb2TUGaIQ25VF6beVQf8Zp4olRRx3XeEnsJdy2+K4ikAORV5/Hu5ne5rst1fLP7GwJlKnwRMpoObi5l0KVt/r5EpWFgXMj+Bou1TB5ZwwBiGqY+SalBxUHRXrlzLWz5XmzrcIFo7fz6RPhzle0VrZrKQ+H3S+Xw7ZWhAXxuGyx8RITf/XRP8L4hD4gKzHdXCxKm0Ai9xrCHRRUm8xzRlvniQmKv/B2NQhY0iVMDnVJGrEF9dDv6SEjtE3mfMZWAQssNnwVrrab1NRD1wz9Cj/e5Yc40uP7nUOLYXPC5BHGNhKoccR0tHkZnLFp+MycAj8vL5t9z2fhL3TivudTBLx/t4JzL29J9eCqyZrqzP17YzC6KDpjZsiQPn9dHu76JtOmd0CQdh8xwivq2gFFlZFyrcfRN7Eu1uxqFVEG0OhqTKtgvQKaRk9EvgW0LwoeXdRySgkzefKLfWE0s9/W+j5fXvxy0XYKEpwc9fdzi1nJHOXnW8HemW8u28vQ5TzM6czSWzZFJV8AfwB8mefdvA228WKw9YVpzcvWxpdx6XY17bWz+RkzGjDpCTCyFIgsnklFaSg/YMSf8Pk00eB2CdNRH1hCIaSOM2+rrFaQy6H8btDsP3ukrKikjn4D4jmJCZ8/PIvFYFy+0D1W5JGx6mycuuI5/zNsT8vRPXNiJhDDBiU1GdBYkdRfmcQ0x5p/ssmmDRq6jtApM1kORiWX5fjFBdbKIitIgxKq754ff32p4o8LkFpx+tKQnnwDsFg+bfjscdt/aeQexWY6S9HmSYTe7WfrVbhZ9uJ3C/VWUHKpmxY/7mf3KRizl4XUXpxNSiZQkXRLtotuRZcoKISkAsQYVrfskEJsWOiKd0SOOqOTmHZ3WyDVMbDuRj8d8RN/EviTrkjk3ZTjfjPyM3qp2x129sR4lq8Pr95JpzCQpK/JUkT5ahUL9N84H0icKg69wOPcfQm/RVAQCjbuzEiDIbMOQJPwtIl2XOkoIJMNh3Cvgr/dcyT3h6h9FpcJ8WBAZbRzcsQZu/FU4lg57CJb8W1znha+KUdvvporKTVpfocHYsxCG/B/0vwXlmje52PUz31zXlb6ZUcTolPTNjOabWwYwrnsySvkJ/N0YEsUkULfJdW0jQxJc8iG0GUWFLZiQSOAo7y006nNyolDpxWhxuIwktQm6XBJZ29KCMwItFZUTgM3siphW7HX7cVo9J1Xs6LR68Hp8yORSNIbQNlNFoZWc7XWtBYVahkwupbrSyfZl+QwY37pZqw+nCoZoNQOv60TJIQslW8qRyiVk9E8kOcNATBPfb7fTi6PaTWWRHZlciileg86kDFsB0weUdNpt56l9XfAnDkO2Pg/X49MolMtRTp+Oqk3rY34NjQlwlVIlRqVQwOuiVaR1iiZvV6iPx5Ar2qGPaj7h8AnDVi7Si6sOCx8OY8rJnWRQqESgXnQr4SRavl9UJEY+ARmDQHEM741CDX2uh70RUn+7TxHkoQYSCbQZLXQvq9+rE5cmdRfOoQf/hIHThPnYshfFhEpCJxj+sKiE2CuOVH3i4NzHxPROffK6eTpc8BJkDoGoTFFlKdkmJozcVlFF6T5JkJvvr61rc0kkYorkvOcw/voE5xjfo3P3m3H2aYU6Pg5tegw2txeHx4tG0eDr3+8X/iIy5dEXblMaXPwajHxc2Nqr9LVj2e0SbMikktpMoEq7h2pjG5Kk8lBDORAaIE1kQt4siG0D1/4k2mk1wuO0fiJgMOo4M7Y8drBXCSamjWsRxJ5EtIwnnwBKci388Hx4ZT3AlU/2Jza1+fUdLruXkhwLq+ceoLLQjileQ/+LW5HcJgq1XngV+P0BfvtkB/s3lJDcxkSPUel43X48bh+GGBUF+8x0OzcN/Vk64hoIBCizuvB6/MhlUnTqBq6gjcBhdbN1aR4bfj5Uq42UK6SMvqEzGV1jUSiDyYr7cB4Hxo0DT2jpWtOvH2lvv4XcdGyW1haXhRfWvsD8g6Hl6Bu73MjkjpOJUkWhVWixVbnYsvQw25fl43H6iErUMviytiS3NaHSNoM3RXPAUgBz7oSDS+q2RWWISkFYp9Bmhq1ULJhypWiBHA+qiwRhyG1gyheVITQUUQ1MBwMBseiV7RVtDZlCCGALt8LYf9el8jotYlFT6ups4T1O4QgrAbZ+H5r/AiJYcPLXgthoomHWzUKLsnOueM7JXwmdS7iv8Av/KyacjozwutpcwOHR7/Lpqny25FXRKlbHbcNbkxWnwyDziUrOpulQskNM43S7HEwZx6XbsLm8fLYim1d+rUtAntQ9hn8kr8O07Mngg2UKMVbcFFFsc6C6BJyVorqiiTk+l9lAQOiWlr0kRt5lSuh5NQy4PfRvpAWNoqnrdwtROQFYK5388J/12M2hLZ6oRC2XPNALrbF5iYDP52fvmiKWfLk7ZN/Aia3pMTIduVJWS1TcDi/t+yex7Ns9eGryQyTQaVAyfS9shTH21N2RVzgqKLQVsrZoLUalkX5J/YjXxKNpYn/YfSQ47njEo4FAALfDh1QmIW9PBT+/uy30IAlMebI/MSnB5NLy22/k331P6PFH0OaXRSgzM4/5msrsZUzfNZ1vdn+Dw+vAqDQyucNkErQJ/Gftf7io9UXc0/seErQJ+Lx+7Ba3iEVQStGeSZNlLhv8/ED4HBNjKtz8+9njEWEpFGZiNePJ3SYJfxFTWvjjAwFB0qpywFoKcW1FgrMuclxCLewVYlrn45GRj5n8tbhbzxwIeevEFNLKNyG5h9DgRHLcTe4BHS+Epc9DQmdWDZ/ONV/vCXLFBXj58u6MTyxD9dmo4FFqhUZUINL7Hf11hEGlzc22fDNvLt5HodlJt1QT/xybRpzjINLlr4nx5NR+oj0WnRU57bkp8DiEuNjvEd4sx9LyOx5UHIQPzw2d6IpuJSawIv2ttCAELT4qpwC6KBXjbu/G3Nc315EAQKWVc/6tXZudpADYzS6Wf78v7L6187Np1zcRY5wGqVRC5yEp+H0Bfn53K/76X1AB2LWykKS2Jjqfc2oWkFJ7KU+teIrlBctrt0klUv41+F+MzhiNtpEJBGuVi/w9lexcUYBEIqHX+elIUuwsy1vGltItdInrwqiMUaToU5CHGbWsrnByYFMpBzaW0OmcZHb8GWGkNQA7lhcw+PJ2SOs59Prtjet5Ar7jG1GO08ZxZ887uaz9ZeRV51HhrGD+wfl8tO0jAOYemIvdY+fZwc8K/5Vm9IdpVthKYdsP4fdZ8qEq9+whKsZk6HGlcDL1+0UlQ9pIe1QiEZNEjU0TRYI2RnijNAZrsVjIAeI6gNt+pK2lheqCyI+zFIj2myGJ4nGf8cC3h0JICsATc7YzcGo86Q3H8D0OUcG58ZfwKcxHQbROybD28fRIM+Hy+tGp5OhUciBReJZ4jxi+HUt7LhzMefDHiyK4z+cW1bvzXxRtnRN1kg0Hr0u0+sKNnVdmC2PA+knMLWgWtBCVE4BEIiE+08iVT/Ynb08lZYerSWxlIqWtCf1JWlQcVg9uZ/iF0e8NYK10YYwTFYr4dD2bF+cFk5R62PBzDlldYo/p7tzn8eGweggEQKGSodYdvfXgD/j56eBPQSSlZvsTy5+g64SutI4Kr/OwVjpZ8O5Wyg6L/n1UopZCRQ73zJ9Wa8T2S84vvLP5HT467yN6xvcMErhayhzMemUjtiphlNW+fyLVFeGDwSQSkMml+H1+pPWEd5qukS3YFenpSE9gOkohUxAIBLjtt9tqLfrr47fc37i3970YlKduAuuY4bWH1x7UwByBGJ4s+NzCE8VWWuc1YkgKL6b0+cBaKLxP5BpxrEp/8jUTNVBHCRfbhhNAIK49Kp3Ahs+RnPdvkV2UMVBc48KHIWMA7Pst9HEAKb2g/fnQ8UIqrToKzAfCHuby+sl3KElXaENN3CoPCd3RcRCVGpi0YXQbKn3zkIjqIvh6kog5GPNP0aLzecBjEy25MEGKJwxHpdAHRcK2H6DzRDGF1YJmQwtROUFIpRKMcRo6x52a8TbpUcK86nuMqPVKqhuZ7rFWOPH7A7jsXiTSo7dUrJVONv9+mJ3LC/C4fKS0MzH48nbEpOgaNVgrc5Tx1c6vwu4LEGDRoUXc0fOOsPsPbS2rJSkA7caZeHLz/4W4xbp8Lu7/435mXDiDRJ0Yc/R6/Gz8NbeWpACYSxzEpRk4vKsi6PEZnWPoPjKdwv1V/P7ZTpLbRtGqexyGGDXyuDiiJl1B1fcNqgYSCUlPP40i/sQSpc0uc1iSUoMqdxWZHHtr6ZRBqRf6i4aeIDWIbXPqrsVZLRaSnx8QibMgKguXfCjGf+vfwdvKYftM+ON5sQBJpKJdMvaF5tEauKxi4VSbaqsyVpeXsmoXOwrMSCQSeqZHkXzRa0i+mRQ6GdP/Ftg1H8mhvyguuweFMYkYnQri2sP5L4g8nrUfif/Wh0Qqpp5qWhDmYK+ehmi0999UQ0O/T7xWuerUTdCU7RUeKRe9Dr/+o16Io1oImXVxzd+GkcgaTy9WGsIT4hacEM6+kY+/OTR6RW3FpCHUOgU6U/AdTFrHyHeGsWl6LGUOFry7hZ/f28aBjSXYzKHZIAC2Khc/vb2FLYsP17a5CvaZmfniBioKIixQR+AP+Kl0hkmfPYJCa3gzJke1mx1/BZe3JdEuCmzhS95ljrIgAzWn1c3eNUVBx+xZXUTX4alHZiYFklobaT8giZ/f3cqGRTkc2FjK8u/38d1zaynPtyIzmYi/916SX3geZatWSHU6tAMHkvXdDLS9TzxWQKcI/8XXPro9d/e6G4PCENHG/4yAPgkGRRjFTe55ats+pbth9q11JAXExMy3k4SOpAZ+P+yaBwsfqktGDvhh109CoFp9lJZMY7CWiOmhGVOEvf3KN6Aylyq7m89XZDPyv39w5zebuOPrjQx9aSn7lZ0JTJ0l0nr1icLS/eI3xf9v/AI0UczdWsp7fxzA7PAIgWtSN0jsBtfNF8fXIDoLps4SZMbrgdzVRFVsItkUvsKrkktJU7vCW+Jroo8uNnVboWQnLHpMZBSteF1UYvxHG0duBhxcBqOeFm7CNSQFRFtp+WsicLC5JZj6eOFpEwkDbj0xvU0LwqKlonKWQWdSMfbmLsx5bVOQLkYqlzD21q4hbZzUDtGodQqcDbwNAHqdl8Hvn++iuly0QvL3VJLWMZrRN3QOiQEoz7dSkR9KSPz+ACtn7uf827pFbANp5Bp6JfRifXH4Canh6cMjvt6GWm9voJEWAyJtubHHO20e9q8vYeQ1nVgz7yC2Khc9RqWzdPqekBaZ2+njt093MuG+nmhjY4m65BJ0Q4eC14tUq0XWDOJtn9+HUqbknVHv4Pa5cfvcfLf7Oya0m4DZZWbegXnM2D2DgckDuaX7LaQb0sPqcE4r5Erod4u4q171tlgoJBJhUHbhqydf3FgDV7UYBQ4Hv09YpZ/7mGgZuKyw9LnwxxZvF6TmeAzIbGXwy+PBmp2CjbD1e3aNmRM0CQPg8weY/OVOVk/Vo4xuJZJ87WWw/NVaV9vS7rfz7V/VZJcVcUXfNEw1KcQaE2i6w1U/ihBBv0/Y9te0asoPwpcTSIxpyyvjPuLa7w7VjgzX4NmLO5Hg2xH+tYx7RZDQSPA4Ye+vMPPGOkKwfzH89Src8PPJz8+JbSc0PPYIFaNl/xF/gzXTV82FjhfC9lmQE9zKpvd1YkKrBc2OM+wbrwVNQVyGgclP9id7cymFB8zEpelp1y8RQ4w6SAQKYIzVcMkDvfn9852U5oq7TI1BwaBL2pC/p7KWpNQgb3clpTnV6LoHE5XsrZHzdfL3VeFx+SISFZPKxP1972fqz1PxNyhvp+pTg7J86kOtU9BhYBIrZ9b11zVePRq5JmyFQSlVEqeu87pQ6RS06ZPAnlXBVZV964upKrFz8d09gCO6H0d4AlRRaMNp9dQKoxVxR8/n8PsDlFpd+P0BdCp5xHh7h8fBmqI1PLniSapcVQBEqaJ4YuAT5FhyeGvTW7XH/nTwJ37N+ZWvx31Nh5hTMO57rNDHw/CHoPe1Qu+hOOKUqm46mXO6vZTb3azNrsDi8NIvK4Ykk0q0O5oCt020AyKheBvsWQRzbodJXwpSEQlF2449fwaEoDKMsLi61VjeXZYd9iEVNjerzNEMkymRLHw4aJ+j46VsoDPZZcL9eunuEtonNtAr6WJDp4z8fjGF5XUiKdlOn+3/ZuH1T/DRhmq2FjnJilEx7Zwk2pgkqAznwNSZIlyx8hDEdxJeNAmdGx9PthaL97Jh1cJlEanRU2ed3Dyb1sNEBScSLAWiNdTcMCTB5Z+K6t2WGaLV1GuqqGY1ZdqrBceMFqJyFkIqlWCK09BzdAbdRwZCyElDxKTouPieHjitHvzeAHKVlFWzD3BgY5g0WGD7n/mkdYxGXs9PRGOILJpVqo+eH9TW1JZPx37K82ueZ2/lXuQSOedlncc9ve4hSZeE1+PDVukiZ0c5ljInaR2iiUvX065PIjv+LMBcKohJ7h927hh0N//d9lLIc9zV6y7iNHVfjAqljH7jssjZWh5SUdKalGgMCrRGVYhepSH8vqaXj0ssTn7aWshHfx6kwuamf6sYHjm/I+0S9agb6HgOWw9z79J7g8hblauKh/98mLdGvhVCyFw+Fy+te4nXRryGUXVqR/GbBLlaJNseI5xeJyX2MjYVb6WwuoJ2pi4UVkuZ8M5ORnVM4J8Tu9alG4PwS7EWidFej120OXTxQjsQ3yG4xVMfCZ3qbNQDAVGi90ZYyI5X21CTBdQALk0ShZbIi+ZrK8oZcM19yLtfhW3rXKR+D+bM8/ijSMXTc+uEtv6mtlR8Tsivq2Cq982n/aHFPNfxMqw9OqL1VKGxZELccNHiaTtaeLR4nYJkaqKO/hxleyO/f4VbxPj1ySQqmhjRWowEfcKxpTIfCwyJ4qd15GpwC5oPLUTlLMfRSEoNNHplbUiizewKEqg2hN8fKu1s2yeRdfMPhT2+6/BUNMbGp380Cg19Evvw0ZiPsHqsyCQyotXRaBVafB4febsrWfjeNpLbRdF1eCpej5/iQxZiU/RMuL8X+9YVs2dVEfYKD4P159Lq3Eze3vIW2eZsMo2Z3NnzTnon9EbVoD9sitdyxWN92bmigIObSlGoZfQYlU5a++jaKokxToNEKgnrMqzWKZo02QRQbnXx6KxtLNldUrtt+f4yVr27gu9uHUjfrLp+v8vr4rNtn4VUmEBoeuYdmMeYzDHMOzAvaN/aorVUe6rPTKJyHHB6nawoWMGDyx4MCn4cnDyc/065i3umH2Bgm1iuG5QlyLDHCYf+gh+uqxPvSiQw4A7hEjv8Ydj3a+gTSWXQaTx8fbn4954F0PUykeHTEGqTqCYcFwJC19FzKiR3F9e4fRb6si30SenN/pLwn7v+WdHIdDHYFSbecI1nW4GF7SvNIaGC57aNatplyFSiMnKgngGfx4Fq23RUAOkDIKtfcHvrWM3P6pMUQ7IgBua8ulbMCSSLNwkKtTCKU5tCBcUAQx9svHXVgrMGLUSlAfz+ANYKJznbyynOthCfaSCrW6xoqxxl4uZsgUavoP2AyMSjy5CUEHdWfZSKYVPa8+e3waX1+EwD3YanIovw3ridXtwOL1KZFK1RSYwmhhiN+EK0mV2UWqpxWNxIJDDyuo54XT6WfLkLz5ERbLlSyrApHegyJIWOA5ORSECtV9BOkkb3hG64fW4UUkXtOcPBGKeh30Wt6H5uOlKZJIR4aA0Keo/NZMPC0Pdj6OR2aJvo3ltQ5QgiKTXw+QM8NXcHX93Un1i9OJfda2dvVeQ2RbY5mxHpI0K2yyVyJJyiqYpTgCJbEff/cX8IYVtRuIyO0d0Z1LorHy47yLiuySQY1WIh/PbK4HHoQABWvyPGUdtfAJd+DAvuFy0IEHf1E98XpKQmGG/HbLjic6jMgZwVdefSRMPU2cKo7njQ51poN1p4bax5T5yvx1WoU/twqy6J2dvKcfuCX6taIWXKgEwUMikKjZQre8bw3fq8EJIypWcsiYW/Q9z5Rx+flspEJMDa94ON3Gow+F7R2pKfgI1CQiehyTj3cVE9MR8WE15ShTDMU0cd/7mbClOGEBTPmCL+NuBIiOPtwqivMQ+cFpw1aCEqDVB2uJo5r9YJVfesKWL17ANMuK8Xia2Mxx1CdyZBKpPS6ZwUdq8qCtKoaI1K+lyQSVy6HmuVE41BWUtAlBo5HfonkdYhmoObSnFYPWR1jyU6SRcivAXweX1UlThYN/8QeXsq0OiV9ByTQVa3WHQmFZVFNha8uxVzSV1rI7NrLJ0GJ+Pz1H2Re91+lnyxi5hH+pDYKtimPlrddK8L2RGiFA4KtZweI9OIS9OxbsEhqsudxKTqGDShDXEZ+iZXrVYeiDwGurPQgtXlrSUqGrmGNsa27K0MT1YyjZmU2ENJz5jMMUSpopp0PWcSfH4fpY5SbB4bKpmKGHUMWoWWJblLwlaVAGYf/Jo7+r7DkzPN+Gp0ENtnRvZsWfYitB4BXSYKv5H6PirqqODqic8DM28WYXWD7hSjyjGZIivIkNK0Bc7jEBM+HhsodKI64XXDjKvrrtFaInQUGYPImvgBM24dyCMzt7LvSGWlc7KRFy/rRnr0kUk+n5c2O95mwTVX89U2O8sO2YnWKri9j46enq1EL3kCOg5vms9LVDpc+S3MuqWu4iBTikmZjEHHZx9fH/pEuOg1mHFV3eQUCF+Yq2Y0v4g1HKRSUbm66Xfx+/bYxXXpEkDVvAGlLTh9aCEq9WCrcrHow+1B0zQg/DgWfbiNyx/piz76DHUHPUYYYtRc8kBv9q4rYu+aYlLamWjXN5E187L56/t9qDRyug5Po+uwlNrXrNTIUWrk9Lng6F8A5fk2Zr68Ab9XLDAum5c/pu+mdc84Bl/ejp/e2hIi5M3ZXo5KK6fDgCR2rQweWd6wKIcxN3Q5aWnBGoOStn0SSWkfjd/rR6aQ1rbKmgpDIz40MqkEaT2Sq5aruabDtSzKWRjioSJBwpQOV3HXkjuDtsdr4rm7992NuvieiahyVrE4dzFvbnqTCmcFMomM0RmjeaDfA5Q7IpO7CmcFRo2M3plRaJUyUTkp3RX5icx5dZk7xlQhBPX7RdVApRNi390/1VVVPA747Skhjrx5ybG5y1YXiemWjZ+LFohcDX1vglZDCetMkrsKma2I3pmZfHvrQKrsHiRAlFZRS14BkMmRqbRkfTuMR9pfxB29BqBwV2Fc8a1w+E3qLto6TYFCA21GwbQV4np9HvG+6BPEvhOFyyLIXn2SAqKyMu9euPp74WFzKmBMPjXEqAWnBS11sXpwWN0hi2cNbFVuHNWhI75nMwwxanqNyWTi/b3oMCCZOa9tpmBfFQRE8OGGhYdY9OH2iN4qkeC0evjzu721JKU+cndWUF3hjPg+799QQlb3UAFeVYkdj/sk97wBrUGJPlp9zCQF4Jw2cRG9ri7omkSMLvicKeo0nuv7HwyKuikOg8LAv/q8QLwvmU/O+4TxrcczOGUwTw58kunjppNuOLtCz/wBP3/k/cEzq56hwilEy76Aj19yfuHuxXdzUZuLIj62S2wXDpS4eeyCTpg0RxJ9WzUiXkzqLhZga7FoeXw4Al7vAl9fJnQt+mS4YZHIwQFxvtYj4bqfjo2kOC3w65Ow9oM6nYbXKdpPO2ZBnxvCP26v0M7E6VW0TdDTJkEfTFJq0P1KAJS75xC77DGMq14UJAVEm+VYJktkclHhSOsn9BzRmc1DUkCQH0sE1+H89aJK1YIWNANOWkXl3//+NwsWLGDz5s0olUqqqqpCjsnNzWXatGksXboUvV7PddddxwsvvIBcfnoKPeEW1vrweU+BiVEEOK0e3E4vEqkEjV4RNJFzIpBKJUgkEpb/uC+smLQ424K5xB62vRMJLqeX4oOWsPvUOkXtBE84+H2BsJb/sal6FKrIvLraXU25o5zVhatx+9wMTB5IgjYBtU+H2yHeN7VeEaK98fn82M1unDYPcrkUtUFxXCQFIMGg4j+XduORmcGBh2nRGh45v+ORrJM6RJuMDLIP5r0en+FQiNFxtcuAv0hJQkY8WcZUno19Fq/fi/pEtASnEaX2Ut7Y+EbYfXsq9+DwOmhjasOBMBbvN3a6hzh5Bu0S69mttx0l2h4N7+IBRj8j/rvocdj+Y932gk3w+UWiDdJxnBibdZpFW0gTI/xIjgW2UtgeIdto249w5Tew7uPQfU2ZpAExcXTVD/DjDXWZMjIFDH9MiGDPFLjCf8ZrcSabFLbgrMJJYwRut5srrriCQYMG8cknn4Ts9/l8XHjhhSQlJbFy5UoKCwu59tprUSgUPP/88yfrshqF2qBArpTidYcSEpk8ssahOeD3+5GG6Yt7PT7K86389d0+irMtSOUS2vdPot+FWRhjm+fOyN0IsQDRkklp13Q9iERCxCkaR7Wn0XA9uTL0PZBIoM/YTBSq8H+uFpeFH/b+wOsbXw/aPr7VeK6MupE/381BKpPQrl8i/S9qVevs67R52LuumDVzDtTmJ8VnGBh9faeQBOWmQKuSc2H3FHpnRDN7Uz4FVQ5Gd06kd0Y0KVGhvyupVEJcshGVRoG1yonV5kAdJYNkLw5FNVpikUvlTTZ4K7GXUGAtoMBWQLohnSRtEvHaE7P3P1E4vA7KHJH9SvZU7uH9Me/z/JrnWZa3DH/AT7ohnUf6PUbHqC4kGhqQCFO6MBObfbsYgQXRyrjgZVFRseQHk5T6WPgwpPYSEyonMjZrr4jseBrwi6yhcGh/ftPOr1BDq2EwbbnwAvG6RI5NzQj2mQJjIyPcCk0dMfP7hZ+JXH3q7PVb8D+Fk0ZUnn32WQA+//zzsPt//fVXdu7cye+//05iYiI9e/bkX//6F4888gjPPPMMSuXJIwWRoDMqGTihDct/CE0n7ndhFjKFhLK8anxePxq9Eq1ReUKVDa/HR3W5iz1rCqkotJHaPro2X0ZyRMBZWWRn1ksba6sMfm+A3SsLKdxXxcT7ezWLZkYilSBXSPF6wleM1PqmjefWQKNT0rpXPAc2hJmA8fpR6xXEpuooD+N02214Kod31pWMNQYF507tiCkxsi4jtzo3hKQAzMueR59eA4hOSqCyyM6e1UUU7K3ikgd7Y4hRc3hXBX/NCBazluZWM/f1zVz6UG9M8ceuBdGr5LRLNPDw+U1zqHT6nKy2LOfxFY8jlUprfVP6JfXjP0P/Q4K2aa6uOZYcbv/tdvKsebXbWpta8+6od0k1HOcESzNAIVOglCpx+8Mv3knaJJJ0STw/5HmqXFV4/B70Cn1kgiWRiNHhqbOEG6vPA+roI6GDUti9MfLFmA+Lto3hBLUMRxNphiMT4/57bOF+NS0bU71WX3WxCNuzldZpTU6VBiQcdPHQ4yrYEmbEe8h94vdSuhc2fSls9lP7QfdJEJXZuJEciEmlQODox7Xgb4HT9lewatUqunXrRmJi3Rz/2LFjmTZtGjt27KBXr/AZKi6XC5erTjNhsRyl/HgMkClkdBiYiCFOzZq5B6kqsmNK0NB/fGtiU3XMfmVTbdtCJpfS54JMug5LRWM4dlLl8/o5vKuChe9vr608ZG8uY+28g1zyYG/i0gy47B5WzjoQthViLnVQkmNpFqKiMSjpNCSFbUvzwu7P6nZsd58KtYxBE9tQdKAKW1XwAtV3XBa7VhYy6NK2bPo1l/w9ooQvlUvoNjyNnmMy8Lj8tO+fhMvuxeP0cXBLKWqdgtg0fUhwotfv5ZtdYb4oj2BGznTuGvgklXNElkl1hZOCfVWkdYxm9ZyDYR9jt7gpOmg5LqJyrChxlPDQnw+JyZd6PHFd0Tq+3/M9t/e4/agVlTJHGfcuvTeIpAAcNB/ksb8e482RbxJ1KkZFwyBWHcuEthP4YW9oq0Sn0NW67OqVevTKY6hi6eJCqyL2cjja6HZzxA9o4yGxq7Dab4iU3hDbXrR/dv4kBJ7dJwlioTqBBOzyA/DtZCirdxOVNRQu+eDY9DXNCY0JxjwjpotWvydaQbo4GPaIGA3OXQ0zrqwbj96/WExAXfdTZNdfW6nI7Vn3qTCt63m1eE9bhLJ/a5w2olJUVBRES7kN/wAARh1JREFUUoDafxcVFYV7CAAvvPBCbbXmZECtU9K6RzxJrY34vQGkMil+v58Z/1yLy143Funz+ln7UzaGGDUdBx3bh8jl8GKtcPLbJztD2iNup4/fP9vJ+P/rhd/rp2Bv5DC/g5vLaN3zxHNU5HIpPUalk7+3MiTPZ/jVHdBF8BGp1c0c8Tap35oxxWu47OG+HN5VQfaWMtQ6Ba16xJG3u5Jtf+SxZ1Uh3Uak0WNkGhqDEo1Ric6oxOP2sWLmPrI3B7cLdq8q4uK7e5LROXik0uPzUOoI77ALYnJEbgpevA5sLCGpjRFLWeQeelG2mcxusai1x1ZNOhrMTjMVrgrcPjcGpYG9FXsjjud+s+sbrmh/RW0adCRUOCo4UBWq8QDYVLqJSlflcROVKmcVpY5SDlQdIEYdQ7oxnURtIlJJ03T4arma27rfRrY5OyjryaAw8N6Y90jUHkeeTiSUHxSLp1wtxK0N0fpc0DaDU6o+HiZ/BdMvg4p6ZDe2rbBWj84QPx0vPPHnAiFa/eYKQVbq49Bf8MtjMOEdQQY8jiOusseouTkBVEiicPa4C1OXq1DgRq7SITUkixbcrJtDPVy8TjEufeMvoRUma6l4PfUjCHYvEETlyq9PbbhlC84oHBNRefTRR3nxxQihX0ewa9cuOnY8ecFMjz32GPfff3/tvy0WC+npzT8JoTXULc571hQFkZT6WDPvIOmdY45JbFqwvxK33RcyBl2D8nyRL6PSyFHrFNgt4cvmkQhEOLidXhwWNw6rB7lShsagQGdS4XJ4yN9dydoFh+g1JoOAL0DhQTP6aDVt+yagj1KFVDF8Hh/l+Tb+/H4vxQctSGUS2vZNYMD41kG6GUOMms6DU4hN07Nh4SF++2RHbXvJ7fSxYZGwOr/yyf6YjuhGKovsISQFgAD8OWMPlzzQO+i9VsvVDE8bzurC1WFfd5+4vlTnBP/utCYlUqkUjUERcZLLGKM+Juv8puBw9WGeWP4EG0tEe0Ir13JD1xu4t/e9YQWn1Z5qfE1w97R6IrsMA9g8jadbR0KpvZRnVj7Dn/l/1m4zKo28N/o9usR2QdbEOPtEXSL/Hf5fiu3F7K3cS5wmjtam1iRoE5p8jiZh63fC1v3CV+Cne4MXSUMyXPBS8y3iMa2FVqbqsJjIic4SIthjae80FdVFoSSlBrt+gmEPw6JHRLUlti2c+w9R8TnJhOVgqZV7Z2xmW77waNGr5DxwXnsm9vQSXV0Y3i0WRJ6QvTz0vSrZGTYniYKNsHMuDLi9RePyN8UxEZUHHniA66+/vtFjWrdu3aRzJSUlsXbt2qBtxcXFtfsiQaVSoVKd2hjtmjC/cLBWuo5pGshW5WLN7IN0H9k4ufL7AmiMSrqPTIvYomjfv2l3o3aLm3ULstnxZ36tBtCUoGHc7d2xW1ws/ECUsBd/vgtdlJLYVD0+rx+VWh5CUgAqi+3CI+XIQu73Bdi7ppjC/WYueaB3iFhWrVWQs7U8bAvLEKuu1cD4vD7ydofm7uijVXQYmIzOpMTbgNxJJBLOzTiXD7d+SKUruPqklCq5KvNaVs8K1sp0GZKKPkpFz9EZrJodugDIlVKS20Sh0jRfwbHYVswtv95CvrVunNPutfPO5nf4v97/R6+EXmwq2RT0mM6xndHIjy6YjlFHNu6SS+SYlMe+YLl9br7Y+UUQSQGwuC3c+tutzBo/ixR90+9waxyJO8V2OuZrOSZkLxNi1CkzROuhulCMIxtToLljBwzJ4ud4wguPBdbiyPsCfqg4AIeW1x37+TgY/zZ0nyySrU8CCqocTP5wNaXVdW14q8vLsz/tJE6v5GLTUawcakikxw5SpbDbX/dR5OPXfSwiD05VEveJwusWomqlroVcNQOOyUclPj6ejh07NvrTVBHsoEGD2LZtGyUldYvIb7/9htFopHPn483ZODmIz4jcW9ZHqyLax4eD2+mlvMCG1qSM+PerNSpR6+RIpRI6DkomtUNUyDHDp7RHH63G5/PjsnuDyJLL7sFa6aSyyIa10snO5flsX5YfNKhgLnEw9/VNIVUdW5Wb3B0V7F1TTGWxPeR5XQ4vq+ceDFttqC53UnQw9C5KY1Aw8JJQAiuRSjj3mo61FRKH1YtUHvxe9jk/kwHjW5O3u4J1C7JZ+vVuCvZX4bDWVZlS9al8ecGXDE8bXmsv3yOuBx8N+5T9c2xBIuGBE1tjjBNi5bZ9E2g/IClI1qDWKRhzYxd00Spk8mP6eDSKbHN2EEmpj+m7pnNJ20uCtkmQ8HC/h5vkvhurieW8zPPC7ru8/eXEao5dcFnmKOP7PeED9mweG7mWXMod5RTZiho1bTul6H6F+O/eX+CbSaI1Yq+ANR/Anl9EJszJhs97xF+kUCxWzYHGAhJlSjFm3RCLHm2c4Jwgtuebg0hKfby4aA9eQ0rkQMDkHmIBX/cJfDcV5t0t0qpbj4j8hF6nIGVnOhyVkLcO5kwTUQ8r367zwGnBceOkaVRyc3OpqKggNzcXn8/H5s2bAWjbti16vZ7zzjuPzp07c8011/DSSy9RVFTEE088wZ133nnKKyZHQ3JbE0qNHLcjtP3T76JWx9SCkcqkIIGDm0vpOjyNbX+ECliHXdm+dvHWmVScd1MXLGVOcncI59aMrrFoDErsFjfbl+VRcshCVJKWHiPTUSjlHNxSStnhagyxatr2SWDz74dDngNEpcVR7UFrVIZtLxXsqyKtQ/BC6XF6KdhbFfH1HdhUQts+CUFRA0q1nE7npJDROZbqCicBfwC5UoY+Ro0+uu6983n8mOI1SGUS/L6AOI9UwuIv6txI8/dUMXvPRs6d2oH4TCO6aBVuhZ14TTz/GfofqlxVBAhgUBpQebSkXuLg0NZyFGoZWd3j0EUpUWlEBccYq2HA+NZ0G5ZKZbENhUqOSifHFK8Jui67x061uxqJREKsOva42hW7KoIdVY1KI+NajSNZl0yxvZgMQwZyqRyv30ubqDY81v8xOsU0rfpgUBp4tP+jRKujmbVvFh6/B7VMzZSOU7i2y7XH5WTr8XuCkpvr4/Yet1PqKOWFtS+QY8khw5jB3T3vpl9yv9Nr7x/TGjpPEG2CQAAOH6nY6uJg2IOgbCajs0gw58GGL2HzV2Iaqcslwp4/OuvEzqtPhPSBcDhMe7PHlLpE6PpwW8FWIoSuJwFbDldF3JdX6cAsiyF29DPwy+PBO5V6oan57IJgIrXlGxj+KPS9EdZ/GnrSThOE582ZDKdFkK8l/6rbdugvWPEq3PgrxLU7fdd2luOkEZWnnnqKL774ovbfNVM8S5cuZcSIEchkMubPn8+0adMYNGgQOp2O6667jn/+858n65KOG4ZoNRPv78XC97fVOqpKZRJ6n59JqzAuqo1BrVeQ1S2OXSsKGXRJG4ZNac/2ZflUVziJS9UzYEJr4tMNtePJAFqjCq1RRVLrujvCgn2VzH1jc61JXdFBC7tXFTH8yvbk7iwnb5dog8SnGyLqawAs5Q60pvBERR8VekckkYpQv0j6Gn2UOiQPyePyUXTQzOIvduG0ipKwWqfg3Gs6Bj2HXCnl4KZShk5qx58z9tJ+QBKLPgw2T6vBqtkHGTq5Hfu2FLIj+S/WlK3iui7X0SW2S11AoUqkRjfMCKoPY6wanUmJPlYFAXFdNSPnXr+Xw9WHeWfzO/yZ9ydauZbJHSZzabtLjypwbYhMY2bt/49vM56RGSOZuXcmywuWk2HIQCKRsOCSBXj9XnQK3TFXQeK18TzY90Fu6HIDdq8drUJLvCYe5XHG3KtlahK0CSF5Q6MzRuMP+Hl8ed0ClG3O5v5l93Nfn/u4uuPVIQnWpwy6eBj3CnSbBKveBle1SEzuOUX4kJxMmPPhywlifLgGaz8UTrU3LxGOsMeDQECQnonvwK9PwN5FR8Z2FdD7ekjrA3PuCP9YycmJmwBokxB5QitWp8QjUUHPq0Rw4dLnoSpHpDlf8CIsfSF8tWfZf+Ca2bDxi2B9kTYWOl0kkqhPUiurWWAtCiYpNbBXwC//gMs+BvX/Rur5qYYkEIjkXHR2wGKxYDKZMJvNGI0n94/AVuXCYXXj8/hRG4SPSkOn06bAUibaLpYyJ9FJWjoMTEJrUBKbpic2VYdM3vg5bVUuZr60geqK0MkGhUrGqOs7seiI7mT09Z3564e9uGzhycp5N3fhr+/2hohKpXIJkx7vR2wD47NAIMCWJYdZ8YP4QpYrpLTtm0Bsmh6Pw0e7fglEJQb7SJTnW/nuubWhHlkSmPx4P+LS61pr2VtL2b2qiM5DkvH74Od3t0Z8H86/tSuLPtrO0AdSmLpiEgECTGgzgQf6PnBMgYWRkG3OZvL8ySGVhU4xnXh71NtN9jgBKLAWcPm8y+kQ04HzW53Pc6ufCznmucHPMa7VOBSy5p00Oh4EAgHmHpjLkyueDNr+xrlv8PCfD+PyhZb9VTIVcyfMPa2+LbVwmEUwoDoKZCdvwa7F5m9EuT8cBv8fjHxCkItjgdsutCc/3S1IV58bxOSS2ihImVwNH40QraaG0MXDrctO2ujy4Qo7Y1//E3uYWIt/jOvETUNa1YV52iuEaZ1CCy4zvNkzfKIzwOhnBRlZ9Y7QeLQbK0adf34Qrv5RVM3OVKz9//buOzyu8kr8+Hd60RT1LsuSLPeGuww2uGAbTDclIQESCNVJyMKyQEggm90ElmQhCSGUFCAJwcDyo9iYYgyY5t6bZGRbtixZvc5o+tzfH2OPPWhGtmyVsXw+zyMedO/V6L3Xku6Z+573nOdh+X3R96lU8OOtpx6wDlAne/+WXj/dkJBoIDXXSkaBHXuq6ZSCFABbqokr753AwrvGMmh0CjqDluyhiSRnnzhIAXA5fFGDFAg9vQj4FdTa0B+JsrU1jJ4Z/Y9VQqKepAwz9vTIqQGNVs3s746IOD9Phw9Hiwe300fxpAwGjUwma4idi+4Yg98XZOtHlezbWk/dgXY62o7dxPy+IFtWHoxeyFOBTR8ejOjhkz0kkaGTM/jslT2xq38eoVKrQIG2gz7Gpo5lZu5M6jrqOOw43OXXnYwOXwdPb3466vTH7qbdlDaVduv1MhMy+cv8v3D9iOv5w6Y/RD3m12t/3eVS676kUqm4IPcCfjbtZ9j0oT8gKlRo1dqoQQqAJ+AJ9/PpdyZ7qCdOXwQpXidsXRJ7/663Qjfr7mr8Gl65NhSIeJ2hp0QvL4KXLgkln1oy4Ko/dw6ANDpY9NfTL2zXhexEE/+6dRopx/WvUqng21PyuPKc7MiO4+bkUB0Ukz2UZxIrSIHQaqCmCij5IZx/f2gK6+UjS7ODcZ6j0lVOkqKcGTk2cUrK/vUTS5IRS5IxagO+EzrBDVwJKkemXxQqdzdROD6VsbNz2bGqKpwEm5ydwILbRlOxo4GiCWmMn5NHY7UTo0WHNclAW6Mbk1WH1+WnsdrB2rf30VjtxJZiZMqlBcy6YTiOFg9v/e/mcCKvo9nDir/tYujUDGZcMxSjRYff46fxUOzlsY1VDnyeQDgoMph1FE1IJ6PQhs8diJkbZEs10nGkWaJFY+WB/F9SvcmJxgiGVjtus++0aqC0e9s7rXg53vJ9y5mZOzPmfqfPSaunFUVRsOgt2A12RiSPIBAM0OaNXqSww99Bo6uxW6tpelOiMZGri69mZu5MnF4neo3+hEuhder+fxrU51Sarhv96UzRE1674nHAqsej/677PbDpHzD74VDuyp2rYfM/oWZbqI3AOd8NTXVFacnRUzRqFWNz7Cz90XnUtLlxuP3kJZtJseixGbv4GTDYYdB0OPhV9P15U0J1VnzfeIOQMapP68Ockq6SgfOm9k0y9wAlgcoZyGjRxUyA1WjV6I0aAsetdPns1T1c99AUxs7Kxe30o9WpMR2ZujIm6NizvpYv/+9rdEYtao2KYVMzGTo1E41Ow77NdXzw553h13I7fCz74zYu/dE41i/fH3Vp9p61tZxz4SCMR5onJmeZYy7xziqyE/QHaaxy4PcGMVl1mGx6LImhFU0X3jyS5c9sjyiMp9Wpmb5oSHjZdmqGjXd+vyX8N73043omXexh3Jw8jAmnduNUocKsNcdMKLXqY68EO9h2kCc2PsGnlZ8SUAJMzpzM/ZPvpyix6IRVZr+Z39PfNGoNWQlZcGQ2r66jjsyETGqcnacbMswZx/KDziY6I0y9PZQ/Es3kW0NF4rrD6withInl0Hrwd4Sq3aYWw5yHQwGM1gA9WZumC2q1iuxEU9Q+VjGZk+Di/4G/zD3WefqokVeEgr5vBikaHVzyu9B0VjyzZcP478KWf0Zu1xpDuVPms/B3o4dIoHIGSkg0MOuG4Sz/07ZOb7gmLwyVqD/e6Jk5GMxaLElG7N/4XTdZ9Yy9IJeic9II+IOh5ot2A2q1ivYmN6v+FdkL5yifN0DN3tjtCyp3NZGaa0Wr1zD+wkGUrauFb4zVnm5i9Mwc3vnDFpoPh5ZCqzUqxs8dxPi5eZisenKGJfHth6ew8/Nqmg87Sc5OIHdEMhuXV9BS28HYObns3Vzf6TpsWF5BwbjUUw5Ukk3JXDvsWp7Z+kzU/VcWXxl1e7Wjmhvfu5FG97Elu+tr1vPd5d/l9UtfJ9mYTIoxJWL/UTa9jRRjP/ZuOQnp5nSevOBJbv7g5oggzqQ18eSsJ7uVtzOgZIyGUYtg5xuR23OnwrCLu/96OlOoJ07Lgej7U4pDN8Cj1BrQ937Lhx6ROgJu/xw++1+oWBVazTP9bhgyO/Tk6duvwOf/C4660BOjGfdCckF/j/rEzMkw9xEovhC+/D10NMDgmXDe3ZB4Bow/jkky7RnK5wnQWtfBhvcO0HCoHXuaiYkLBqPRq/n0n6U0VoWmaSZelE/eiORT6glUX9nOa79aH3XfgttG88Gfd8SchZpx3VDGzgrVf/C6/VTubuLjv5eGp3H0Rg1X3DOBpU9tiVoddsZ1xYw5Pze8+ingD+Bs9bFvcy27vjyM2W5g1OwMnHV+vvy/8k5fDzD6/BzO//aw7p52WK2zlsUrF1PWXBax/ebRN3Pz6JuxGyIf5SqKwsu7X+Z/1kev3nzlkCt5cMqDbKnfwl0f3YVfOTalpVap+cOsP3Buzrnhpy6BQJCOVi/tjS58ngD2dDNmqx59DxaiOxWBYIDDzsN8UfUF2xu2MyZ1DOflnEdWQlbPVpo90zjqQ5VxN/wt1C34nBsha+ypV6vd/3koH+WbVCq448vQdMiZzNsRql6r1nZ+4tRxpOGkwXrmBGDHOzp+o63racGz3MnevyVQOcP5PH58ngBavSZcRdbV7iXgD6LSqEiwnfpS0cYqB0v+a13UfWNn59Ja7+LA9ujFvq7/xVSSMo+t/jl60+1o84ISSuStr3TEXNVjsuq49qeTOwVYfn8Qr8uPRquio8PF67/sXLTuqCET05l3y6iIpd7dVddRR2lTKe/uexer3spVxVeRY8npFKRAKAH3xx//mLU1a6O+VmZCJv+6+F/YDDaq2qt4texVdjftpjipmG8P/za5llyMR94lB3wBqstbef+57XjdR85PBeNm5zFhQT7mU2iEKfpIMAgET78Boqs5tJroo0dCNz0IrZy54k+hd+3daeIoRBw62fu3TP2c4XQGbUQzQOCkujk7mj20N7pwtHhITDeTkGhAZ1DjPrKM2WjWYbTosKeZwh2jj7dvcx2X/vgc6iraIp6IWFOMXPCdYaiOTB2ZjuSpaDRqrMnGiPL6patjr85xtfui5r9otWq0R85PpVIxaFQyezdFXylTPDnjtIIUCE11pJvTmZEz44T5I1q1tsscjURDIlq1FoPGQGFiIfdNug93wI1BY+i0JLm92cOyP26NrACswNaVlaTmWRg+TbrJxi21mh5ZUGlKCi1JHn4JNO8PBT6Jg8CSGd/1RIToYRKonIWaqh2884etOFuOJbNlDLZy7jXFLHt6Gz53gMJz0ph6WSELbh/FG49vwp5mJmdoIoGAQuWuJmZcV4w9zcTVD0xi/9YGKrY1kDcymYzBNr78v3LqD7aj0aoZVpLJpIsGd+r/A5CSE/sdodGiO2EJe71Ry5RLCzmwvTGiTD5AUpaZ9MGxE16762SSXPUaPd8Z8R3e2/9e1P03jbyJA20H2N+6n6yELNLMaVhivCveu6kuZlPEDcsryBvRvUaY4gylN4M+X+pviLOa1FE5yzha3Cx9KjJIAaitaGfT+wcYPi0TJaiwd2Mdb/zPBvRGLdf9bAojZ2TTUOWgrd7F9EVFpOZa0GjV2FJMjJ2Vy8V3jiV7SCJvP7k5vMIn4A+y6/Nqlj61BUdL59obqXnWcEPCb5qwIB/zSdyI7ekmrnlwMoPHpqJWq9AbQ8m7l/14PJbE7uflnK7BtsHcNa5zpdD5+fNx+93c8N4N3PT+TSx6ZxHra9bjDUSvvdBUHXtJd3ujO2IVlBBCDGTyROUs42jy4GiOXrDrwI5GLrpjDNs+DvUf8nT46Wj38fFLu2muOdagsHJ3E/mjU5h9w3DMdgMqlQq/L8AX//d11OTa5sMdNB5yYPlGTyRr8pHWBM9sD08vqdQqxszKYdiUzMiiUTFoNGqSsxO48OaReFx+VISmvnqyoWB32A12vjvyu8wfPJ/Pqz7HG/AyKXMS6w6v47/XHqtG2+5r566Vd/HW5W9FlNc/KmdYEnvWRW8ql5JjQaOT9xjiDOKsh5ZK+HpFqCFh8bxQkrGUlBcnQQKVM4zX7cfT4UelAqNFj7abN6xotVeOUhQI+I9FGjnDkti/pSEiSDnqwI5Gag+0k5ITxGTR4fcGqd0Xe7nygR0N5I/uvPQ2JdvClf8+AVe790gdFT1mqw6d8diPprPVQzCgoFarYjaA1Bu14WTi/mbVW7HqrRQmFtLqbuUHK34QtZKtP+jng4oPuG3sbZ325Q5PwmDWRu3TVHJVESaL5CiIM0R7LSz9cWSdmQ8fggv/GybeKIXQxAnFx1920SWfJ/Rkw+fys27Zfg5sbwwVZpuWycQF+VhTTn75mz0t9rFafaiz81H5o1PY+VlVzON3fVGNyaJDb9IyZlZuzCqyQKcAw+8L4mr3ogQVdAYNqbmd80ncDh+VpU2sfXsfrfUuLEkGJl9SQMHY1JNKGI4H7oCbitaKmPvLmspQFKVTDow12ciV/z6Bj/62i4ZDoWqwRouOGdcWk57fc7k3QvQqRYHSpdGL4a34GRSeH1rCLUQXJFCJc22NLvZuqiN9kI13nwklugIEgwo7P6/m4K4mrrx3QtRk1WjMNj05QxOp2tPSad+oGTmUbzg23aBShb5PLEpQAVVoJUraIAtjLshh43tRClSpoHD8sToJjmY3G9+rYPfqGgK+IKl5FkquKMKaYsRo0WGy6An4Auxec5ivjquR4mj28Mk/SmmdP4hJFw/utNopHhm0BoYkDWFHw46o+ydnTo6aqKtSqUjJtnDZ3eNxOXwEA0EMZh0JiYaTmhITIi4460INBmPZ+CIs/N/QHxshYpCJ7jjmaHbz7tPbqD/oYPfqw+Eg5XjtjW4qd598wzOTVc/c749k6JRjS3d1Bg0T5ueTmG6KWOrr9wUpnpQR87Xyx6RQVdYCwOdL9jDy3Gyyir7xGFcFc24aEX6i4mzxsPSPW9nxWXW4zH9DpYOlf9xKY5WT9e9W0NHmwdnmZd07+6J+3y0rKulo61wk7mQ4Wz20N7lxtnjobgmhYFDB2erB0eIh4OuisdpxEg2J/GTCT6Lus+gszMid0eXXm6x6krMSSM21Yk02nrFBSpunjWZ3M4GuGtKJgScYCNWDicVR03WTQiGQJypxrbK0maZqJyPPzWbLRwdjHle+oY7C8WkYTNqTqhtiSTJywfXDmXJpIX5vqFicWgNeV4BLfjSOhsp2coclYUs14fMGKFtb02mVUHq+FZ1eQ1tDKAnW4wrgDwSZf/to2updVO5qwmjVM2hkMmabPpw/0lzjpKkqyooWBTZ9cIDCc9LYu6menGGJ+L3Ru40GgwrOFk+X01jf5HJ4OVTafNw0kp5ZN4zAlmLE6w6gM2owWfQxS+47mt2Ura1h5+fVBANBiiakM3Z2HvbUE49hZMpIHj3vUR5b/xitnlYACu2F/M/M/wn10RnA6jvqWV+znpd3v4w74Oaigou4uODiuGm8KHqZwQaDz4PdS6PvH34ZaOQ2JLomPyFxyuPys/vLUEE0vzdw5EYffbWO3qRh2yeVaLRqiidlYE0xnrDuh86owW400d7kZv2y/ZStqyHoV0jOTmDGtcUkZSWgN2oxAYvum0Dpmhr2rKsNfY/J6dhSTXzyj2MJolq9Gq1WTYLNQILNQFqeBZfTj6stlIcStCgYLToO7or99Kf+YDsT5ufz1Rvl5A5P6nL8Wv3JPwwM+APsWVvLumX7GT4tk6whidjTTWz/9BBlq2vC01s5QxOZc9OITjk/jiNPgY4PsLZ9fIiv19dy9f2TsJ0gWLHqrSwoWMCkzEm0eFrQqrUkGZJIMcV3X5/T1dDRwENfPMTqw6vD2/Y072FJ6RJeWvASOdacfhyd6BMGC8z6Kez5AL65FN+eCwXn9c+4xBlFpn7ilEoVatAHsHdzPUOnxp6CKTonnW0fH2LNW/t47dH1tNR2XqUTjbPFw9KntrD7q8MEj6z2aap28vbvt9BQ6QgfZ00xMXZWHhffOYZzrxmCPd2MVq8huzgxfMyYC3JJsIUSXDvaPKx+ax///NlqXn90A//6xVrefWYbjhZ3l0XKdAYNwUAQZ5sHtUZFUmb0Hh9me6jz88lytnop31jLvB+MorXexf6t9ez8rIrdXx6OyMGp2tPC8me3d1oZdbi8JepTIFe7j+2fHiLgP/Gja61aS2ZCJsOThzMkcciAD1IgFJQcH6QcVdtRyyulr+ALnNr0nTjDJA+BW1aEGgxCqMLumGvhe++GghUhTkAClTilN2oZPTP0jrP+YDuWRCM5QxM7HTfi3Cxa6jrCy1g9Tj9fvl6OJ8bqm+M1HXaGuxZHUODz1/bgag/dsDvavaxduo9//WIt7/xuCx88v4P3n99O/ugUhkxKZ9i0TMbNyUOj0xDwBdj2ySG2fXwoorJqzd5W3vn9FvJGJUesLDre0KmZlG+qI2OwDYNZx/xbR2MwRz700xk0XHzHmJjLlKPxOH1MvqSQD57fwYEdjRSOT6N0dU3UYxsqHRHTXH5foMtS/+Ub68JtB8QxgWCAN75+I+b+ZfuW0ezpIndBDBxaPWSPD3VFvnsb/HgLXPo7SBrcv+MSZwyZ+oljWUV2socmUr2nhU/+Wcr0q4oYOSOHqrJmdAY12cVJ1FW0sW7p/oivO7CrEY/Th+EEXXYP7ow9DdNQ6cDnDWACqkqb2f7JoYj9Qb/CZ6/u4dqfTsaWagp/L2ebl60rK6O+ZvPhDoKBIBfePIqP/rYzojhcer6V/FHJvPf8Dq66dwLGBB0Gs5brHppMdXkrtftbSc2zkjssCUvyiae2jqc3adn68aFw88JQvZjo+S8AbQ0u0gYdWQKsUnVZq0ajU3drLP2p2dXMQcdBlu1dRkAJcEnhJeTb8nvt6Y5aFfu6qVQqkOK6ZxdzcuhDiG6SQKWfHU0M9XkCaHVqTDY9Or0GCNUemXfLKGr3t7F91SH2bW1g1Iwspl1RSNWeFlb8dWenHjcAKERd0eJq9+Jy+lD8CiabDktSF9MwRg1qtQpXu5cN71VEP0iBPWtrOPfq4vAmvycQMwkWoLHSQeE56Vz/n9M4uLMRR5Ob1EE2vC4/a9/ZxyV3jSU5O9R1WaVSYU0xMSzFxLCpmTFf80RUKhXVxy3HVmtUqNWqmEuvj78uWq2a0Rfksm9LAwAZBTaMFh2tdS5aajsYPTMHU4w2APGk0dXIbzf8lmX7loW3vb7ndWbnzebnJT8n1ZTao99Po9awqHgR71eE6meYtCaGJg3FF/RR1lTGZUWXkWw8i29annbwuUJVWvUJJz5eiLOYBCr9yOXwUr6hlnVLK3A7fai1KoZNzWTKJQVYkkJ1URLsBgrHp5E3PBlFUdAfeXKRlGGOHqQAWUPsGMzHbp6KotB02MlHL+wK556Y7Xou+eE4VCqilr0fc34OJpsex5GlvLG01rsI+oOoj5Ss1+o1aLTqmE8sbKkmdAYNielmEtPNeN1+XO1eggGFgnGpvdJoT2fQhK8bhKrqFk1M5+v1nUvU29NN4Wt/VEq2hSmXFZA+yMrhvW04mt3kzkwiKTOB1FzLaXdo7gu7GndFBClHfVz5MRcVXMSCggU9/j2Lk4qZnTebcWnjyLPlsaVuC0atkR+f82MKbAVoz8bVHu42qC+FVb+BpnJIHwkz/x1SikOJp0KITs7CvxTxIRAIsmddLV+89nV4W9CvsPvLw7TVuZh/2+iI6qs6oybi6xMSDYw8L5tdX1RHbNfq1My8bmjEMtv2Rjdv/nZTRDn2jlYv69/dz9zvj+SjF3dHNLnLKrIz+vwc9m+pp7mmg7Q8K4fKoucT5A5PCgcpECooN/LcLLav6lzR1ppsxJoaGQT0Rel7k1XP+Ll5rHxpNwClaw4z75ZReF1+DuxoBELJy8WTMyi5akin/BeDWUtGvo13/7QtnHdTtiZ0rlfcc07M4MoX8NHgaqDOVYeiKKSb00k1paLX9G1V3Q5fB//c/c+Y+/+x6x+UZJdgN/RsKfMUUwr3T7mfX6/9NU9uejJi3z0T72FR8SJshrOo14vfE1qm+/ZxTSub9kHZu3DN32H4QlBrYn+9EGcpCVT6SUerl/XL9kfdV/V1C44WT5dl4o0JOqZeVkj+6BQ2fXAAt8NHzvAkxs8dhO0bwcDBXY1Re8bs39KALc3E9Y9Mobq8FVebl5xhSVhTjBze28IHf96JMUHH7BuHc2hPc6ecAoNZy+AxkVMGWr2GiRcPpsPhY+/GuvD2xAwzF985pl86GgPkjUwmf0wKB7Y3EvQrrPjrLsZfOIiJF+Wj1qjQ6jTs31rPuqX7GDQyhYwCW7jar7PVw/vP74hIDoZQ36SP/7GbhXeNxZgQ+W/V4evg86rPefjLh+nwhxKWTVoTD097mAsGXYBF13fvnv1BP05f7G7MTr+z1wqxfVX9FasOreq0/YmNTzAta9rZFag4amH5v3feriiw7G7ImSCrYISIQgKVfnK0uWAszTVO0vK67ulitukpHJ9GdnEiwUAQvVGLVh/5jkxRFA6Vxl5dsfPTKsbPzmPkuccKcDla3HzxWqh0vdvpY8+6WubeNJK1S/fR3ugGIKPQxuzvDsea0jnwSLAbmPWdYUy9rICOVi8GsxaTVd8r0zonK8FuYPaNI2irD7Uk0OjUDJmQhjnRwOHyFj54fkd4Cmz3l4exJBm44p5zsKeZaa1zhRNxv6lmbxsuh79ToHKw/SD3rboP5bjozuV38eAXD/LKwlcYnTq61871myx6CxfmX8jW+q1R98/Om90rAUODq4GXdr4Uc/9rZa/x85Kfd5l0O6C014IvRumAjiZwNkigIkQUEqj0E61OHTM/BOhWnZBY1VQhlEianG2JKI1/PEuyEbUm8kbhcwci8lLKN9bRXONkwvx8jBYdlkQDielmjF0kkRrMOgxmHUkZ3UsUdDS7aa7toKnaSVKGmaSshJPuY3QiZqses1VPZuGxKY62Bhcf/mVnp38HR7OHL177mrk3j4rZaPGo4DfycTx+Dy/ueDEiSDne33b8jf8+978x66LXielpapWaC/Mv5O+7/k5dR13EvkRDIlcVX4VW3fN/CgLBAC2elpj76131BJTA2ROonGh12NlyHYToJvnN6Ccmi56Cc9Ki7jNadNjTeu4mNnRyRsyEz8kLB3cKijRadaeeMo1VTlb9q4wPnt9BR5unyyDlVLXUdvDG4xt553db+OK1r1n61Fb+77ENNFXHnrY4XXUH2jpN6RxVsaMRt8MXXoUUjdmm71TrxRVwsb81+rQewP7W/bj97lMb8CnKtmTz0oKXuHbotZi0JowaI5cXXc6/Lv4XudbeeRdv1VuZkjkl5v45+XPQqeN/xVSPsWSAMUYekDUTEnp25ZUQA4UEKv1Eb9Jy3tXFpOZF5ioYzFou+/H4LpcOd5cl2cDFd45BZzhuWkgF4+bmkTei8xJRk1VH8eT0qK+l1atJze16SupUdLR7+eDPO3A0R64w6mjzsvzZbThbY688Oh1dTb+hQMAXQK1WMXx69J48511b3GlKy6w1Myx5WMyXHZ40/JSepgSCAWqcNZQ1lbGvdR/N7u4VTMu15vIfk/+DpVcsZdmVy3ho2kPk2fK6PY6TZdaZuXP8nVGDkQxzBtMyp/Xa945L1iy44pnOT07UWrjiudB+IUQnKqW7LWTjTFtbG3a7ndbWVmy2My8xr+NIN9/GKieWJANJWQlYEg09vuQ14A/S0eqhtd6FzxsgKTMBs1UfsWz3eI5mN03VTrye0I3a6/az/ZNDTF80hMxCOxptz8a4TdUOXvnlupj7r/vZ5F4JkBoOtfPqf6+Pus+eZmLq5YWs+OtOzv/OcFztXnZ+VoWjxUNqroXpVw0hfbAVg6nzjbi8pZyr37magBKZ26JWqXn90tcZmjS0W+Ns87TxaeWn/GbDb8LTKaNSRvHr835NYWJht16rL/kCPspbynls3WNsqtuEVqVl3uB5/OicH/Xak5y45u2A5gpY9zzU7YKs8TD5FkjMB13/JJoL0V9O9v4tgYropL3Jzb7Ndax5a1+4VovZpufCW0aRMdgW+WSmh9RVtPH6Yxti7r/qvglkFSV2+RoBfwBXuw9FCT2xOlFlXgjVsvn05TL2be6cwzP3+yPY9MHB8NRTRoGN4dMyySi0k5BowNzFqiyP38OG2g387Muf0eAKFYtLMabwy3N/yeSMyZh0J9/5GeDzQ59z18q7Om1PNiazZOESsizx/W681d2Kw+dApVKRaEjss/ycuOX3gt8FOjNozqLpLyGOc7L3b0mmFRGcbR4O723hi9fLI7Z3tHlZ9tRWvvXwFBLTe/4mY0jQodaoouaLqFR0uVQbQk+Atq6sZOcX1fg9AfJGJjP9qiEkZpi7fPpjsuiZ+a2hZBXZ2bziIB1tXjIG25h6eSGlXx2OyI+p3d9G7f42rMlGFv3HxK7PR2ugJLuEJQuX0OxpRlEUko3JpJnTup082uRq4omNT0Tf525ic93muA9U7EY79lj5GWcjrT70IYQ4IQlURFggEKS+op2tH0Xv1RPwB/l6fS2TFxb0+Pc22/WMm53H5hUHO+0bMT2ry6cXjmY37/xhS0SDxYM7m6gq28A1P51ESnbXNUsS7AbGzc6jeFIGwaCCTq+hdO1h9qzrXLkWQk+c/L4T1x1Rq9RkJGSQkRC78/XJ8Aa9lLeUx9y/oXYDFxdefFrfQwgh4pUk056h/N4AbY0ummucOJrdEZVlT5Xb4cNk09NSG6PWA6FOzsFA7F4+p0qn1zD+wkGUXFkUXkWjN2qYfEkBUy8vjJlLc3RM0bpAB/xB1r6z74TLiwFUahUJiQasyUaMFl2Xy8P1Jm2nJd29SaPSkGGOHewMSRzSZ2MRQoi+Jk9UzkCOZjcb3qug9KsaAv4gZpueqZcXUjAuFZPl1B4nuxxe9m6qw+sOJdrWVrRFPS4939ZrN2mzLVTqvnhyBn5fEK1OjdmuR3OC71e+sS7mvoM7m/C6/V0GOtFkFtrRGTSRhd5UgALj5uRhtvfdY/tUUyq3jb2N/1rzX5326dQ6zss5r8/GIoQQfU0ClTNMR7uXj17YRdVx3YA72rx88o9SgsFhjDw3u1MNlBNRFIUD2xv5/NWvsSQZmHZ5EbUv7up0nEanpnhS9GXLPUWtUXe7wFtX+SsGkxbViQptRWFJNHDZ3eNZ9vRWsockMrwki2BAQWfUkJhuOqXXPFUqlYo5g+awp3kPr5W9Fi4kZ9VZ+d2s35GVEN/5KSLOKQp42kLLpKWTs4hDEqjEAa/bT0ebF587gN6kwWw1dGpCeJSzxRMRpBxv7dv7yB+V0u0bvbPVw5q39wGhiqz1le1Mu6KIje9VhJ8oJCQamHfLKCw9VCW2Jw2blsnWldHzakYf6QLdXWqNmvTBNq796WT2b6lnxV93hldAGS065v1gFNlFdjS6vmkil2JK4e4Jd3PDyBuoaK0gQZdAjiWHNHNar1SVFWeJlkooXQY7/x/oLTDtLsgaB5befUMiRHfIX7h+5mh288XrX7Nvcz2KEsqVGDYtg2mXFXXq4gvQWOWI+Vpuhw+fu/vN5fzeYETJ/K0rK8kfncKcm0agAMYELdZkE7ZUY58+SThZthQjUy8vZO2RYOuojALbKT1hOkqtVtFa5+q0Asrt8LHsqa18++GpJGZ0vQLK4/Ljaj8WhJqs+lPuFm3VW7HqreTb8k/p64WI0HIA/nYRtB3X6XzvxzDmGljwmFTKFXFDApV+5HZ6+eSfpRzc2RTepgQVSr+qQQnAzG8N7ZRb0VVjP5VahUbX/fwRjUbdKR/jwI5GDuxoBGD+baPJHda9uh99yWDWMeb8HArGpvL1hlo8Tj9FE9JIyko4rUaIbqePte/si7ovGFAoXX2YqZcXxgzeHM1uPn91D/u2NsCRIHTolAxKrogehArRZ/we+OqPkUHKUdtfhym3SaAi4oYEKv3I1e6LCFKOt2ddDZMXDu4UqCRmmDGYtVFLvxeOT8Nk7X7xKLNdx+jzc9j8YeelwTqDhvT8nq8I29OONkFMyel6KXJ3+H1BWutdMfc3HHIQ8CtodZ0DFbfDx8d/L6Vyd2QQWramBhSFmd8a1u0EXyF6TEcjbHs19v4tL0Ne7D5NQvQlWZ7cj1ztvpj7FCV6H5qERAOX/mgc+m/ksKTkJnDu1UNOaVpBo9UwbnYeg8dGvoMyJGi5/CfjsSTFX15KX9Dq1CRlxZ7aSc+3otFGf5riavdGBCnH27OuFle7l4A/SFuji4O7Gtm7uY7mWmfXvYeE6CkKoHRRZiAoP4cifshbun5kSOj68utMnRM11WoVafk2vvXzKTRUOXA0eUjNs2BLNZ3WNEdCooE5N47A2eahpaYDo0UXes1EwynneMQzZ6sHJaig0apjrhoyJuiYemkhbz2xudM+jVbN0KmZMad9XA5vzO+tKOD3Bajc3cQHf96B33vshjH6/BwmX1LQZYE7IU6bOQlGXgmb/x59/7jr+3Y8QnRBApV+ZLbqSRtkpf5ge6d9eSOSY9ZEUatVWFNMWFN6Nm/EaNFhtOhOWMn1TOZq93JwVxPrl+2nrcFFUlYCJVcWkVlox5jQedosNdfCnJtG8Pmre/AeSVS2JIVWQHW1uspgjj0Fp1KF/rP8me2dCvXtWFVFer6VEdOzT+0EhTgZOhPMuAf2LAdnQ+S+IfMgtbh/xiVEFL029VNRUcEtt9xCQUEBJpOJoqIiHnnkEbzeyHea27ZtY8aMGRiNRvLy8nj88cd7a0hxx2TVs+C20Z3yKjILbcy6YXjUG6c4dT63ny0rK/nohV201rtQFGiqdvLu09so31RHIErFXYNZR/GUDL718FSueXAS1z40mUX3TyKzqOsO0mZbKAiNZtzcvNAqrxjVhDe+dwBnqyfqPiF6THIB/OBjmHEfpA6FnAmw6C9w+R9lebKIK732RKW0tJRgMMhzzz3HkCFD2LFjB7feeitOp5Pf/va3QKhz4rx585g7dy7PPvss27dv5+abbyYxMZHbbrutt4YWV2ypJi67exwdbV462rwk2A2YbfoTNuET3dfR7o2aMAyw+o1y8kcmR31KpTlShK479WlMVj3zbxvN+89tp6Hy2JLyrCI7E+bl8/lrX8f82vZmN662UKXgtHwbthTjaU3rCRFTUj5c8ABMvR3UGjAn9/eIhOik1wKVBQsWsGDBgvDnhYWFlJWV8cwzz4QDlZdffhmv18vf/vY39Ho9o0aNYsuWLTzxxBNnTaACYLYZMNvkRtRb/L4gqEKBSqynGF53ALfTjzWl576vPdXEpT8aj6vdS0e7F7NNj9kaCkJzhyfx9froTQ/Tcq3s21LP+ncrgFCi9MK7xnW7kJ8QJ0WjBUtaf49CiJj6dNVPa2srycnHIvbVq1czc+ZM9PpjTw/mz59PWVkZzc3NUV/D4/HQ1tYW8SFENM4WD/u21PP+89v58C878Dj8zPzWUNQxVurE2n46zDY9KTkW8oYnk5JtCT8pyx2eFHNqb9ycPHZ/dTj8eeMhJ1/9v/JTKuYnhBBnuj4LVMrLy3nqqae4/fbbw9tqamrIyIjsCnv085qamqiv8+ijj2K328MfeXl5vTdoccZytHh477ntvPfsdg5sb2T/lgbe/dM29m9rYMY1nRMFkzLNmCx9lxNkSzFx5b9PIKPAFt5mtuu54PphVO5uwtEcmaOyd1M9He2xVxIJIcRA1e1A5YEHHkClUnX5UVpaGvE1VVVVLFiwgGuuuYZbb731tAb84IMP0traGv6orIze40XEH0+HH0eLp8uluz2lYls9tfs7P22r3NWEWqvGknRsqk1v0jLvB6P7fPotOSuBhYvHcv0vpvKtn0/h4jvHsPurwxFPU45SggoBfxd1L4QQYoDqdo7Kvffey/e+970ujyksLAz/f3V1NbNmzWL69Ok8//zzEcdlZmZSWxs5T3/088zMzKivbTAYMBgkn+NM4nX7aT7cwdp39tFwqB1LkpHJCweTWWSPuQT7dHS0e9mxKkpp8CP2bqzjgu8Mp3xjLVlFieQOT8Ka0j/5HyaLPnwN6g+2U1sRfSrTZNWhN/RNA0QhhIgn3Q5U0tLSSEs7ucSrqqoqZs2axcSJE3nhhRdQqyMf4JSUlPDQQw/h8/nQ6UKP3VesWMGwYcNISkrq7tBEHPF7A3g9ATRaFTX7Wln2x22hapiEKvIuf2Y7kxYO5pwLB51yk76YFIWAP3rSLEDAHySryE7+6B7MnO0BCUkGsockUl3e0mnftMulP5AQ4uzUazkqVVVVXHDBBQwaNIjf/va31NfXU1NTE5F7cv3116PX67nlllvYuXMnr776Kr///e+55557emtYopcFfAGaqp2sWrKHt5/YzId/3YnPHeCcuYM6HbtxeQWuXsi7MCToGDIpdh2I4SVZcdlnx2zVc+EtIxk1Mztco8Vs1zP7xuEUnpOKagBWCBZCiBPptb/WK1asoLy8nPLycnJzcyP2KUro3a7dbufDDz9k8eLFTJw4kdTUVB5++OGzamnyQFNb0c7bT24meGQZcNNhJwd3NDHp4sEUT86IWJKrKNBc04E9LXY/nVOh0agZMT2b3V9W42yJDISSMs3kDo/fp3WWJCPnXV3MhPn5BPxBdHoNCYmGmKX6hRBioFMpR6OGM1RbWxt2u53W1lZsNtuJv0D0Gmerhzf/dxOtdZ07DqvVKi66cwzvPr0tYvtlPxlP3vDeKTLV1uBi5+dV7FlXi1qjYuS52QybmolF6pEIIUS/O9n7d/w9/xZnLI/THzVIAQgGFRzNHowWHW5HqGu0Vq/Gntaz/YqOZ0s1MeWyQsbODi1hN1n1A7LBohBCDGQSqIiec4IYQKUinFCrUsHc748kwd67rQI0GrWUnxdCiDOYBCqixxjNWpIyzTTXdHTap9aoSM2zklFgIynTzIjp2VhTjGi0suRWCCFEbH1aQl8MbGa7gdk3jojaVXjGdcUkZ5lZcPtopl81hOTsBHRSF0QIIcQJyBMV0aPSBlm57udT2LHqEDV7W7GmGjnnwnwSM0zoDPLjJoQQonvkziF6lEarJinDzPSrhuDzBNDq1Gj18uRECCHEqZFARfQKjVYddQpICCGE6A65kwghhBAibkmgIoQQQoi4JYGKEEIIIeKWBCpCCCGEiFsSqAghhBAibkmgIoQQQoi4JYGKEEIIIeKWBCpCCCGEiFsSqAghhBAibkmgIoQQQoi4JYGKEEIIIeKWBCpCCCGEiFsSqAghhBAibkn3ZCFOgc8bwNXmpaPNi0arxmTVkZBoQKVS9ffQhBBiQJFARYhucjm87FhVxYblFQQDCgBmu56Lbh9Der4VtUYeVAohRE+Rv6hCdNOh3c2sW7o/HKQAdLR6eft3m2lv8vTjyIQQYuCRQEWIbuho87B26b6o+/zeIAd2NvbxiIQQYmCTQEWIbgj4FdrqXTH31x9o78PRCCHEwCeBihDdoNGqScw0x9yfUWjrw9EIIcTAJ4GKEN1gtumZellR1H06o4ZBI5L7eERCCDGwSaAiRDflDLVz3rXFaPXHfn1sqSauvGcC1mRjP45MCCEGHlmeLEQ3GRP0jJ6RTcG4VNwOH2rNkToqdkN/D00IIQYcCVSEOAUanQZbiglbiqm/hyKEEAOaTP0IIYQQIm5JoCKEEEKIuCWBihBCCCHilgQqQgghhIhbEqgIIYQQIm5JoCKEEEKIuCWBihBCCCHilgQqQgghhIhbEqgIIYQQIm5JoCKEEEKIuHXGl9BXFAWAtra2fh6JEEIIIU7W0fv20ft4LGd8oNLe3g5AXl5eP49ECCGEEN3V3t6O3W6PuV+lnCiUiXPBYJDq6mqsVisqlapfxtDW1kZeXh6VlZXYbLZ+GUN/k2sg1+BsP3+Qa3C2nz/INejO+SuKQnt7O9nZ2ajVsTNRzvgnKmq1mtzc3P4eBgA2m+2s/ME8nlwDuQZn+/mDXIOz/fxBrsHJnn9XT1KOkmRaIYQQQsQtCVSEEEIIEbckUOkBBoOBRx55BIPB0N9D6TdyDeQanO3nD3INzvbzB7kGvXH+Z3wyrRBCCCEGLnmiIoQQQoi4JYGKEEIIIeKWBCpCCCGEiFsSqAghhBAibkmgcpouu+wyBg0ahNFoJCsrixtuuIHq6uqIY7Zt28aMGTMwGo3k5eXx+OOP99Noe15FRQW33HILBQUFmEwmioqKeOSRR/B6vRHHDeRr8Ktf/Yrp06djNptJTEyMeszBgwdZuHAhZrOZ9PR07rvvPvx+f98OtJc9/fTTDB48GKPRyNSpU1m3bl1/D6lXfPbZZ1x66aVkZ2ejUql46623IvYrisLDDz9MVlYWJpOJuXPn8vXXX/fPYHvBo48+yuTJk7FaraSnp3PFFVdQVlYWcYzb7Wbx4sWkpKRgsVhYtGgRtbW1/TTinvfMM88wduzYcFGzkpIS3nvvvfD+gX7+3/TYY4+hUqn4yU9+Et7Wk9dAApXTNGvWLF577TXKysp444032Lt3L1dffXV4f1tbG/PmzSM/P5+NGzfym9/8hl/84hc8//zz/TjqnlNaWkowGOS5555j586dPPnkkzz77LP89Kc/DR8z0K+B1+vlmmuu4c4774y6PxAIsHDhQrxeL1999RUvvfQSL774Ig8//HAfj7T3vPrqq9xzzz088sgjbNq0iXHjxjF//nzq6ur6e2g9zul0Mm7cOJ5++umo+x9//HH+8Ic/8Oyzz7J27VoSEhKYP38+bre7j0faO1atWsXixYtZs2YNK1aswOfzMW/ePJxOZ/iYf/u3f2Pp0qW8/vrrrFq1iurqaq666qp+HHXPys3N5bHHHmPjxo1s2LCB2bNnc/nll7Nz505g4J//8davX89zzz3H2LFjI7b36DVQRI96++23FZVKpXi9XkVRFOVPf/qTkpSUpHg8nvAx999/vzJs2LD+GmKve/zxx5WCgoLw52fLNXjhhRcUu93eafvy5csVtVqt1NTUhLc988wzis1mi7gmZ7IpU6YoixcvDn8eCASU7Oxs5dFHH+3HUfU+QHnzzTfDnweDQSUzM1P5zW9+E97W0tKiGAwG5ZVXXumHEfa+uro6BVBWrVqlKErofHU6nfL666+Hj9m9e7cCKKtXr+6vYfa6pKQk5S9/+ctZdf7t7e1KcXGxsmLFCuX8889X7r77bkVRev5nQJ6o9KCmpiZefvllpk+fjk6nA2D16tXMnDkTvV4fPm7+/PmUlZXR3NzcX0PtVa2trSQnJ4c/PxuvwfFWr17NmDFjyMjICG+bP38+bW1t4XdgZzKv18vGjRuZO3dueJtarWbu3LmsXr26H0fW9/bv309NTU3EtbDb7UydOnXAXovW1laA8O/8xo0b8fl8Eddg+PDhDBo0aEBeg0AgwJIlS3A6nZSUlJxV57948WIWLlwYca7Q8z8DEqj0gPvvv5+EhARSUlI4ePAgb7/9dnhfTU1NxA0KCH9eU1PTp+PsC+Xl5Tz11FPcfvvt4W1n2zX4poF+/g0NDQQCgajnOBDOrzuOnu/Zci2CwSA/+clPOPfccxk9ejQQugZ6vb5TvtZAuwbbt2/HYrFgMBi44447ePPNNxk5cuRZc/5Llixh06ZNPProo5329fQ1kEAligceeACVStXlR2lpafj4++67j82bN/Phhx+i0Wi48cYbUc7wgr/dvQYAVVVVLFiwgGuuuYZbb721n0beM07l/IU42yxevJgdO3awZMmS/h5Knxs2bBhbtmxh7dq13Hnnndx0003s2rWrv4fVJyorK7n77rt5+eWXMRqNvf79tL3+Hc5A9957L9/73ve6PKawsDD8/6mpqaSmpjJ06FBGjBhBXl4ea9asoaSkhMzMzE6Zzkc/z8zM7PGx95TuXoPq6mpmzZrF9OnTOyXJnonXoLvn35XMzMxOK2Di/fy7IzU1FY1GE/XfeCCcX3ccPd/a2lqysrLC22traxk/fnw/jap3/PCHP2TZsmV89tln5ObmhrdnZmbi9XppaWmJeEc90H4e9Ho9Q4YMAWDixImsX7+e3//+91x33XUD/vw3btxIXV0dEyZMCG8LBAJ89tln/PGPf+SDDz7o0WsggUoUaWlppKWlndLXBoNBADweDwAlJSU89NBD+Hy+cN7KihUrGDZsGElJST0z4F7QnWtQVVXFrFmzmDhxIi+88AJqdeSDujPxGpzOz8A3lZSU8Ktf/Yq6ujrS09OB0PnbbDZGjhzZI9+jP+n1eiZOnMjKlSu54oorgNDvwcqVK/nhD3/Yv4PrYwUFBWRmZrJy5cpwYNLW1hZ+1z0QKIrCj370I958800+/fRTCgoKIvZPnDgRnU7HypUrWbRoEQBlZWUcPHiQkpKS/hhynwgGg3g8nrPi/OfMmcP27dsjtn3/+99n+PDh3H///eTl5fXsNeiZ3N+z05o1a5SnnnpK2bx5s1JRUaGsXLlSmT59ulJUVKS43W5FUULZzxkZGcoNN9yg7NixQ1myZIliNpuV5557rp9H3zMOHTqkDBkyRJkzZ45y6NAh5fDhw+GPowb6NThw4ICyefNm5T//8z8Vi8WibN68Wdm8ebPS3t6uKIqi+P1+ZfTo0cq8efOULVu2KO+//76SlpamPPjgg/088p6zZMkSxWAwKC+++KKya9cu5bbbblMSExMjVjoNFO3t7eF/Y0B54oknlM2bNysHDhxQFEVRHnvsMSUxMVF5++23lW3btimXX365UlBQoLhcrn4eec+48847Fbvdrnz66acRv+8dHR3hY+644w5l0KBByscff6xs2LBBKSkpUUpKSvpx1D3rgQceUFatWqXs379f2bZtm/LAAw8oKpVK+fDDDxVFGfjnH83xq34UpWevgQQqp2Hbtm3KrFmzlOTkZMVgMCiDBw9W7rjjDuXQoUMRx23dulU577zzFIPBoOTk5CiPPfZYP424573wwgsKEPXjeAP5Gtx0001Rz/+TTz4JH1NRUaFcdNFFislkUlJTU5V7771X8fl8/TfoXvDUU08pgwYNUvR6vTJlyhRlzZo1/T2kXvHJJ59E/fe+6aabFEUJLVH++c9/rmRkZCgGg0GZM2eOUlZW1r+D7kGxft9feOGF8DEul0u56667lKSkJMVsNitXXnllxJuXM93NN9+s5OfnK3q9XklLS1PmzJkTDlIUZeCffzTfDFR68hqoFOUMz/oUQgghxIAlq36EEEIIEbckUBFCCCFE3JJARQghhBBxSwIVIYQQQsQtCVSEEEIIEbckUBFCCCFE3JJARQghhBBxSwIVIYQQQsQtCVSEEEIIEbckUBFCCCFE3JJARQghhBBxSwIVIYQQQsSt/w8rVh9MJ5/hnAAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGdCAYAAAA8F1jjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd3hTZfvA8W/2arr3omUP2SgiyPYFFJShOPgJKIgDRAQXTnAr4gARFBQEBQRFVETcOBBkiYJsKC107zZNs/P7oxAJTdq0dPN8rqvX+/Y8J+c8qaXnzjPuW+J0Op0IgiAIgiA0QNL67oAgCIIgCII3IlARBEEQBKHBEoGKIAiCIAgNlghUBEEQBEFosESgIgiCIAhCgyUCFUEQBEEQGiwRqAiCIAiC0GCJQEUQBEEQhAZLXt8duFgOh4O0tDT0ej0SiaS+uyMIgiAIgg+cTifFxcVER0cjlXofN2n0gUpaWhpxcXH13Q1BEARBEKrh9OnTxMbGem1v9IGKXq8Hyt6ov79/PfdGEARBEARfFBUVERcX53qOe9PoA5Vz0z3+/v4iUBEEQRCERqayZRtiMa0gCIIgCA2WCFQEQRAEQWiwRKAiCIIgCEKD1ejXqAiCIDRGdrsdq9Va390QhFojk8mQy+UXnTpEBCqCIAh1zGAwcObMGZxOZ313RRBqlVarJSoqCqVSWe1riEBFEAShDtntds6cOYNWqyUsLEwkqhSaJKfTicViITs7m6SkJFq1alVhUreKiEBFEAShDlmtVpxOJ2FhYWg0mvrujiDUGo1Gg0KhIDk5GYvFglqtrtZ1xGJaQRCEeiBGUoRLQXVHUdyuUQP9EARBEARBqBVi6kcQBEGoFqfNVvZldyCRy5DIZEjk4rEi1CwxoiIIgiBUmcNiwZKcjPn4cSxJJzEfO4blzBkcdbzlun///syYMaPGrieRSNi4cWONXa+xOXXqFBKJhH379gGwdetWJBIJBQUF9dYnEagIgiAIlZo4cSISicT1FRYRwfAJE9h/5IjrHIfBgC0tHafdXo89FZoaEagIgiA0QnaHk+0ncvliXyrbT+Rid9R+TpahQ4eSnp5OalISXy9dilwmY8y0ae79Ki7CabPVel+ES4cIVARB8Mppt2PNyMSSmootJ6e+uyOcteVAOn1e+Ylbl+7ggbX7uHXpDvq88hNbDqTX6n1VKhWRkZFEhIXRuW1bHpo0iTMZGWTn5bmf6HCUe+2jjz5K69at0Wq1NG/enKeeesotM++cOXPo0qULq1atIiEhgYCAAG655RaKi4td55SUlDB+/Hj8/PyIiopi/vz5Vep/QkICzz33HLfeeis6nY6YmBgWLVpU4Wsq6/fff//NgAED0Ov1+Pv70717d3bv3g3AihUrCAwMZNOmTbRp0watVsuNN96I0Wjkww8/JCEhgaCgIKZPn479vFGoVatW0aNHD/R6PZGRkdx2221kZWX5/D7//fdfhg8fjr+/P3q9nquvvpoTJ0642pctW0a7du1Qq9W0bduWd955x+drJycnM2LECIKCgtDpdHTo0IHNmzf7/PrqEKueBEHwyJadQ8Hnn5P3wQfYCwpQJiYS/sjDaLt3R+bvX9/du2RtOZDOvR/t5cLxk4xCE/d+tJfF/9eNoZdF1WofJDIZBqORNZs20SI+npDAQPcTPGxJ1ev1rFixgujoaPbv389dd92FXq/nkUcecZ1z4sQJNm7cyKZNm8jPz2fs2LG8/PLLvPDCCwA8/PDD/PLLL3zxxReEh4fz+OOPs3fvXrp06eJz3+fNm8fjjz/O3Llz+fbbb3nggQdo3bo111xzjcfzK+v3uHHj6Nq1K4sXL0Ymk7Fv3z4UCoXr9UajkQULFrB27VqKi4sZPXo0o0aNIjAwkM2bN3Py5EnGjBlD7969ufnmm4GyXDvPPfccbdq0ISsri5kzZzJx4kSfAoLU1FT69u1L//79+emnn/D392fbtm3Yzo5yffzxxzz99NO8/fbbdO3alb/++ou77roLnU7HhAkTKr3+1KlTsVgs/Prrr+h0Og4ePIifn1+lr7sozkausLDQCTgLCwvruyuC0GRYCwqcp2fOch5s07bcV8GXXzoddnt9d7HRKi0tdR48eNBZWlpa5dfa7A7nlS/+4Gz26CaPXwmPbnJe+eIPTpvdUeP9njBhglMmkzl1Op1Tp9M5AWdkWJhz2yefOI3797u+zCkpTofNVun15s2b5+zevbvr+2eeecap1WqdRUVFrmMPP/yws2fPnk6n0+ksLi52KpVK57p161ztubm5To1G43zggQd8eg/NmjVzDh061O3YzTff7Bw2bJjre8D5+eef+9xvvV7vXLFihcdzly9f7gScx48fdx27++67nVqt1llcXOw6NmTIEOfdd9/t9Z67du1yAm6v8Wb27NnOxMREp8Vi8djeokUL5+rVq92OPffcc85evXo5nU6nMykpyQk4//rrL6fT6XT+/PPPTsCZn5/vdDqdzo4dOzrnzJlTaT/Oqej33dfnt5j6EQShHHt2NsVff+2xLfPlV7BVYRhaqDk7k/JILzR5bXcC6YUmdibleT3nYgwYMIB9+/axb98+dmzbxjX9+jHq3ntJSUsDQOYfgDwyEolMVu61n3zyCb179yYyMhI/Pz+efPJJUlJS3M5JSEhAr9e7vo+KinJNeZw4cQKLxULPnj1d7cHBwbRp06ZK76FXr17lvj906JDX8yvr98yZM5k8eTKDBw/m5ZdfdptigbJaNy1atHB9HxERQUJCgtsoREREhNvUzp49exgxYgTx8fHo9Xr69esHUO7n5cm+ffu4+uqr3UZ1zikpKeHEiRNMmjQJPz8/19fzzz9frt/eTJ8+neeff57evXvzzDPP8M8///j0uoshAhVBEMoxHzvmtc2em4u9qKgOeyOck1XsPUipznlVpdPpaNmyJS1btqTnVVfxwUcfUWI2s/LHH1G1aoUiJhqphwfk9u3bGTduHNdeey2bNm3ir7/+4oknnsBisbidd+HDVSKR4PCw3qWu+NLvOXPm8O+//3Ldddfx008/0b59ez7//HNXu6f3VNH7LCkpYciQIfj7+/Pxxx+za9cu1/Uu/Hl5UlFZBoPBAMDSpUtdAee+ffs4cOAAO3bsqPTaAJMnT+bkyZPcfvvt7N+/nx49erBw4UKfXltdIlARBKEcaUBAxe0XUQlVqL5wvW+1Unw972JJFQqkUikmmw2pSuVxJAXgjz/+oFmzZjzxxBP06NGDVq1akZycXKV7tWjRAoVCwZ9//uk6lp+fz9GjR6t0nQsfyDt27KBdu3YX1e/WrVvz4IMP8t133zF69GiWL19epT6d7/Dhw+Tm5vLyyy9z9dVX07Zt2yotpO3UqRO//fab24LfcyIiIoiOjubkyZOugPPcV2Jios/3iIuL45577mHDhg3MmjWLpUuX+vza6hCLaQVBKEeZkIBUp8NRUlKuTXvllciCg+uhV8IVicFEBajJKDSVW0wLIAEiA9RckVg7/33MZjMZGRlAWZDw9ttvYzAYGDFiRIWva9WqFSkpKaxdu5bLL7+cr7/+2m3UwRd+fn5MmjSJhx9+mJCQEMLDw3niiSeqXEtm27ZtvPrqq4wcOZLvv/+e9evX87WXac7K+l1aWsrDDz/MjTfeSGJiImfOnGHXrl2MGTOmSn06X3x8PEqlkoULF3LPPfdw4MABnnvuOZ9fP23aNBYuXMgtt9zC7NmzCQgIYMeOHVxxxRW0adOGuXPnMn36dAICAhg6dChms5ndu3eTn5/PzJkzK73+jBkzGDZsGK1btyY/P5+ff/7Za6BXU8SIiiAI5SjCw4l9ZxGSC0ZO5FFRRM2dK3b91BOZVMIzI9oDZUHJ+c59/8yI9siktVPwcMuWLURFRREVFUXPnj3ZtWsX69evp3///hW+7vrrr+fBBx9k2rRpdOnShT/++IOnnnqqyvefN28eV199NSNGjGDw4MH06dOH7t27V+kas2bNYvfu3XTt2pXnn3+e119/nSFDhlSr3zKZjNzcXMaPH0/r1q0ZO3Ysw4YNY+7cuVV+b+eEhYWxYsUK1q9fT/v27Xn55Zd57bXXfH59SEgIP/30EwaDgX79+tG9e3eWLl3qmm6aPHkyy5YtY/ny5XTs2JF+/fqxYsUKn0dU7HY7U6dOpV27dgwdOpTWrVtXaXtzdUicTmftZwmqRUVFRQQEBFBYWIi/+OMpCDXGYbViy8igZPt2LKdOoe3eA3WH9igiI+u7a42ayWQiKSmJxMTEape933IgnblfHXRbWBsVoOaZEe1rfWtyY5aQkMCMGTNqNOW+ULGKft99fX6LqR9BEDySKhQo4+JQxsXVd1eECwy9LIpr2keyMymPrGIT4fqy6Z7aGkkRhPokpn4EQRAaIZlUQq8WIdzQJYZeLULqNUh58cUX3ba7nv81bNiwOunDb7/95rUPtZ6QrA7dc889Xt/jPffcU9/dqxVi6kcQfOAwmXCUlCBRq5HpdPXdHaERq4mpn4YmLy+PvAvT6J+l0WiIiYmp9T6UlpaSmprqtb1ly5a13oe6kJWVRZGX9AD+/v6Eh4fXcY8qJqZ+BKGWOSwWrKdPk7d8Oca/9qGIjCTk7rtRtWmNvJItvIJwqQgODia4nneCaTSaJhOMVCQ8PLzBBSO1TQQqglAB07//kjx+ApzNSWA5cYKSbdsImzmToHG3VWt0xWE2g9OJtIl8mhYEQahNIlARBC9sOTmkP/GkK0g5X/abb+I/dEiVAhVbTg6mQ4fJX70ap9VCwOjRaLt3RxERUZPdFgRBaFJEoCIIXtgLC7GcPOm50eHAdOgwyvh4n65ly8kh/Zk5GH780XWs5PdtqNq2JW7JYrHlVxAEwQux60cQvJFUsouiCrssTIcOuwUp55gPH6Zoy7c08jXtgiAItUYEKsIlwWm14jAacVahwJksIABV61aeG+Vy1G3b+nQdh9lM/uqPvbYXfPIJ9txcn/slCIJwKRGBitCk2Q0GTIcPk/7cc5yeOo3cpUuxnD7tU8AiDwkh6oUXkKhU5doiHnsMeUiIb51wOHB6WOdyjtNiESMqglCJ/v37V5pR9tSpU0gkEvbt23fR16oLF/Z369atSCQSCgoK6rVfDY0IVIQmy15aSvG335I0chSF69Zj3L6d7DfeJGnUaMzHjvt0DXW7djT/4guCJ05E3bEj+iFDSFj3CQHXj0Cq1fp0DalGQ8Bo70XK/IcPRx4Y6NO1BKG+TJw4EYlEgkQiQaFQEBERwTXXXMMHH3yAowojlbUpLi6O9PR0LrvsMsD7g3/Dhg1VKvQn1C+xmFZosuxnF7BeyGEwkP7EE8QtfQ95UFCF15DI5SgTmhE2ayaOEiNStapa24q13buhatsW8+HDbsfl4WEEjr0JydmCYYLgM4cdkv8AQyb4RUCzq0Aqq9VbDh06lOXLl2O328nMzGTLli088MADfPrpp3z55ZfI5fX3SLFYLCiVSiJ9WJhe3zlfhKoRIypCk2U6fBhsNs9tBw5gLyj0+VpShQJ5YEC1c58oIiKIe3cJ4Y89hjIxAUVMDCF3T6HZmjUo6yBrp9DEHPwS3rwMPhwOn00q+983Lys7XotUKhWRkZHExMTQrVs3Hn/8cb744gu++eYbVqxYAUBBQQGTJ08mLCwMf39/Bg4cyN9//+26xpw5c+jSpQurVq0iISGBgIAAbrnlFoqLi13nlJSUMH78ePz8/IiKimL+/Pnl+pKQkMBzzz3H+PHj8ff3Z8qUKW5TKadOnWLAgAEABAUFIZFImDhxIlB+6sdsNvPoo48SFxeHSqWiZcuWvP/++z79TP7991+GDx+Ov78/er2eq6++mhMnTrjaly1bRrt27VCr1bRt27ZKlYaTk5MZMWIEQUFB6HQ6OnTowObNm31+fVMhRlSEJstp8b4uBCj7RFqHFBERBI+/nYDh1+F0OpEHBoqRFKHqDn4J68YDF6xrKkovOz52JbS/vs66M3DgQDp37syGDRuYPHkyN910ExqNhm+++YaAgADeffddBg0axNGjR10jGSdOnGDjxo1s2rSJ/Px8xo4dy8svv8wLL7wAwMMPP8wvv/zCF198QXh4OI8//jh79+6lS5cubvd+7bXXePrpp3nmmWfK9SsuLo7PPvuMMWPGcOTIEfz9/dFoNB7fw/jx49m+fTsLFiygc+fOJCUlkZOTU+l7T01NpW/fvvTv35+ffvoJf39/tm3bhu3sB6SPP/6Yp59+mrfffpuuXbvy119/cdddd6HT6ZgwYUKl1586dSoWi4Vff/0VnU7HwYMHm1TdIl+JQEVostQd2pdtMfawUFWZkICsllPgOywWnBYLUo0GiaxsSF4ilSIPDa3V+wpNmMMOWx6lXJACZ49JYMtj0Pa6Wp8GOl/btm35559/+P3339m5cydZWVmozi5Cf+2119i4cSOffvopU6ZMKXsbDgcrVqxAr9cDcPvtt/Pjjz/ywgsvYDAYeP/99/noo48YNGgQAB9++CGxsbHl7jtw4EBmzZrl+v7UqVOu/y+TyVyBUXh4OIFe1oEdPXqUdevW8f333zN48GAAmjdv7tP7XrRoEQEBAaxduxbF2Q8drVu3drU/88wzzJ8/n9GjRwOQmJjIwYMHeffdd30KVFJSUhgzZgwdO3asUr+aGhGoCE2WPCSE4DvvJO/CIVyZjMhn59ZawGA3GLCknCZ/1UqsZ1LRXN6DwJEjUcTEuAIWQaiW5D+gKK2CE5xQlFp2XuLVddYtp9OJRCLh77//xmAwEHLBjrjS0lK36ZCEhARXkAIQFRVFVlYWUDbaYrFY6Nmzp6s9ODiYNm3alLtvjx49Lrrv+/btQyaT0a9fv2q99uqrr3YFKecrKSnhxIkTTJo0ibvuust13GazEeDjh6Tp06dz77338t133zF48GDGjBlDp06dqtzPxq5W16gsXryYTp064e/vj7+/P7169eKbb75xtZtMJqZOnUpISAh+fn6MGTOGzMzM2uyScAmR6fWETJ5E7DuLUHfqhDwyEv3//kfihs/Q1NI/dntpKUVbtnBq9GgKP9+Icdcuct9ZzMmRozAfOVIr9xQuIQYf/z76el4NOXToEImJiRgMBqKioti3b5/b15EjR3j44Ydd51/4YJdIJNXaOaSrgUrm3qaDLva1BoMBgKVLl7r9LA4cOMCOHTt8uv7kyZM5efIkt99+O/v376dHjx4sXLiw2v1trGo1UImNjeXll19mz5497N69m4EDB3LDDTfw77//AvDggw/y1VdfsX79en755RfS0tJcQ2SCUBPkQUHoBw4k7r13SVj3CVEvvYi6TZtaKwhoz8khY87ccsedRiNpjz+BLTevVu4rXCL8fKwL5et5NeCnn35i//79jBkzhm7dupGRkYFcLqdly5ZuX6E+jmC2aNEChULBn3/+6TqWn5/P0aNHq9w3pVIJgN3ufT1ax44dcTgc/PLLL1W+fqdOnfjtt9+wesiTFBERQXR0NCdPniz3s0hMTPT5HnFxcdxzzz1s2LCBWbNmsXTp0ir3s7Gr1amfESNGuH3/wgsvsHjxYnbs2EFsbCzvv/8+q1evZuDAgQAsX76cdu3asWPHDq688sra7JpwiamrPCXmo0e97jQyHz6MvbAAeYjYGilUU7OrwD+6bOGsx3UqkrL2ZlfVyu3NZjMZGRlu25Nfeuklhg8fzvjx45FKpfTq1YuRI0fy6quv0rp1a9LS0vj6668ZNWqUT1M1fn5+TJo0iYcffpiQkBDCw8N54oknkEqr/rm6WbNmSCQSNm3axLXXXotGoym3GDUhIYEJEyZw5513uhbTJicnk5WVxdixYyu8/rRp01i4cCG33HILs2fPJiAggB07dnDFFVfQpk0b5s6dy/Tp0wkICGDo0KGYzWZ2795Nfn4+M2fOrLT/M2bMYNiwYbRu3Zr8/Hx+/vln2rVrV+WfQ2NXZ9uT7XY7a9eupaSkhF69erFnzx6sVqtr8RKULciKj49n+/btXq9jNpspKipy+xKEhsJpq2QnkUNkoBUuglQGQ185+82FtabOfj/05VpbSLtlyxaioqJISEhg6NCh/PzzzyxYsIAvvvgCmUyGRCJh8+bN9O3blzvuuIPWrVtzyy23kJycTEQVqoTPmzePq6++mhEjRjB48GD69OlD9+7dq9zfmJgY5s6dy2OPPUZERATTpk3zeN7ixYu58cYbue+++2jbti133XUXJSUllV4/JCSEn376CYPBQL9+/ejevTtLly51TW1NnjyZZcuWsXz5cjp27Ei/fv1YsWKFzyMqdrudqVOn0q5dO4YOHUrr1q2rtL25qZA4azl39/79++nVqxcmkwk/Pz9Wr17Ntddey+rVq7njjjswm81u519xxRUMGDCAV155xeP15syZw9y55YfWCwsL8ff3r5X3IAi+Micnc3LoMK87jeJXrUQRFlYPPRMaCpPJRFJSEomJiairOwV58Muy3T/nL6z1jykLUupwa7IgVKai3/eioiICAgIqfX7X+q6fNm3asG/fPgoLC/n000+ZMGFCteYCz5k9e7bbkFlRURFxcXE10VVBuGjy0FBC77uXnEUXfOo5u9NIBClCjWh/fdkW5DrOTCsI9aHWAxWlUknLli0B6N69O7t27eKtt97i5ptvxmKxUFBQ4La/PTMzs8IUyCqVyrU/XxAaGplOR/Dtt6Pp2pWcJe9iy8hA06kTIffcjTI+vr67JzQlUlmdbkG+FN1zzz189NFHHtv+7//+jyVLltRxjy5NdZ5HxeFwYDab6d69OwqFgh9//JExY8oKth05coSUlBR69epV190SGhFbfj5OqxWpVousAWZplAUG4tenD5pOnXCYzch0Op8LGAqC0HA8++yzPPTQQx7bxFKDulOrgcrs2bMZNmwY8fHxFBcXs3r1arZu3cq3335LQEAAkyZNYubMmQQHB+Pv78/9999Pr169xI4fwSNbfj6le/eSs3gJtsxMVB0vI2zaNJSJicguIhdCbZH5+yMG4gWh8QoPDyc8PLy+u3HJq9VAJSsri/Hjx5Oenk5AQACdOnXi22+/5ZprrgHgjTfeQCqVMmbMGMxmM0OGDLkkVzQLlbMbDOQtX07ue//lELD99DMlW38hbtlS/K6qne2YDYm9uBhbbi72/HykOh3ykBDkF2QArU8OoxFbbi5OsxmpVos8PBxJPVbTFQShaaj1XT+1zddVw0LjZj6VzMlhnnfTKGJiaLZmNYom/MnHmpVF1iuvUrR5s+tnoGrdmtiFC1A2a1bPvQNrRiZZb75B0aavwWZDqtcTcs/dBI4ahTxY5I05X43s+hGERqImdv3UWR4VQbgY5iNHPAYpANbUVBxNOJ+Ow2QmZ8kSir7+2u1nYD56lJS7pmA9WyOlvtjy8kh95BGKNn7hSnbnKC4me95rFHz2GQ4PWTsFQRB8JQIVoVGQqJQVn1CNrJWNhS0nm8L1n3pss6akYE2rqEhd7bNlZVG6c6fHttwl72LLyq7jHgmC0JQ03b/uQpOiatkSiYcKpQDq9u2RBQXVcY/qjsNoxFnBqIQ1tX4DFcupU17bHCUlOM4WZxMEQagOEagIjYI8NJSoF18EiXvacKlOR9SLLyBvwoGKVKtFUkHuIGVsTB32pjx5RUnspFKkGrEOQ/Bs69atSCQSCgoKLuo6K1ascMvHVR2nTp1CIpGwb9++avetf//+zJgx46L6IZQnAhWhUZCq1egHDiDxi40E3XYbuj69CZs1k8QvNqJq3bq+u1er5GFhBN56q8c2ZfPmyKOj67hH7hQxMV6DFf3gQcga0M6kCzlMZqxZWdhyc+u7Kw3exIkTkUgk5b6GDh1ap/1ISEjgzTffrPX7XHXVVa4dq0L9EnsHhUZDqtOhbt2aiCcex2m1IlGpkEguLMxWN2x5eWVbhXNykYUEIw8NrfLuFqfT6VP/pSoVoZPuxGk0UvDZZ3C2ZL26Uydi5s+v97T88ogI4pYuJWXSJOznPfDVl11GxOzZyHS6euydZ06bDcuZM+QtW4bh923I/P0JvmMifn36VDxC1IDYHXb2Zu0l25hNmDaMbuHdkNVyCv2hQ4eyfPlyt2NNNVO4UqmsMEu6UHfEiIrQ6EhkMqRq9UUFKQ6LBUtqGuaTJ7Gmp+O0V1L1+DzWtHTOTLufpBHXk3LHHSRdfwOn75qC5cyZyu9rMmE+dYrst98mdcaD5K9diyU1lcqyBMjDwgh/9BFafLOZhPXraP71JuLeXYIyLtbnftcWiUSCqk1rEj9dT/yHK4h65RUSPl1P3JLFKKKi6rt7HpmTkkgaNZqCTz/DlpGB+ehR0mc/Tvozc7Dl5dV39yr1Q/IPDPlsCHd+eyeP/vYod357J0M+G8IPyT/U6n1VKhWRkZFuX0Fnp10lEgnLli1j1KhRaLVaWrVqxZdffun2+s2bN9O6dWs0Gg0DBgzglIf1TZ999hkdOnRApVKRkJDA/PnzXW39+/cnOTmZBx980DWic75vv/2Wdu3a4efnx9ChQ0lPT3drX7ZsGe3atUOtVtO2bdsK83ZdOPWTm5vLrbfeSkxMDFqtlo4dO7JmzZqq/PiEahKBinDJsWZnk/3WW5wcPpyT115H0qjR5K1c6dPwv62wkLQnn6B0716346Z//yV15qwKH3IOqxXjn39y8rrh5Ly9iOJvvyVjzlySRo/BfPx4pfeW6XQo4+PRdOyIqkWLBrUuRyKRoIiKQtezJ4E3XI/mssuQh4bWd7c8shcXkzVvHs7S0nJthp9+wupDwFmffkj+gZlbZ5JpzHQ7nmXMYubWmbUerFRk7ty5jB07ln/++Ydrr72WcePGkXf238Tp06cZPXo0I0aMYN++fUyePJnHHnvM7fV79uxh7Nix3HLLLezfv585c+bw1FNPsWLFCgA2bNhAbGwszz77LOnp6W6BiNFo5LXXXmPVqlX8+uuvpKSkuKW///jjj3n66ad54YUXOHToEC+++CJPPfUUH374oU/vzWQy0b17d77++msOHDjAlClTuP3229npZcebUHNEoCJcUuyFhWS+9DJ573/gelDZCwrIeuVV8teswWE2V/z63FyMf2z32Gb65x/sFQQqtqxszjw40zV1c46jsJD0Rx/Dlp9f/n4FBVgzMhrFp/zGwlFcTMlvv3ttL/6+/h70lbE77Ly882WclB+BO3fslZ2vYHf4PkJYFZs2bcLPz8/t68UXX3S1T5w4kVtvvZWWLVvy4osvYjAYXA/yxYsX06JFC+bPn0+bNm0YN24cEydOdLv+66+/zqBBg3jqqado3bo1EydOZNq0acybNw+A4OBgZDIZer3eNaJzjtVqZcmSJfTo0YNu3boxbdo0fvzxR1f7M888w/z58xk9ejSJiYmMHj2aBx98kHfffden9x4TE8NDDz1Ely5daN68Offffz9Dhw5l3bp11f1xCj4Sa1SES4otL5/izZs9tuUue5+AkSNRxnqfTnEYSiq8vr242GubNSUZp9Hosc108CD2/HzXKIm9qAjTv/+S9fobmE+cQBkbS+j909D2uBx5UGCFfRAqIZGAXA5etnxL1A13zcXerL3lRlLO58RJhjGDvVl7uTzy8hq//4ABA1i8eLHbseDz1mZ16tTJ9f91Oh3+/v5knU1IeOjQIXr27On22gsL0B46dIgbbrjB7Vjv3r158803sdvtyGTe1+BotVpatGjh+j4qKsp175KSEk6cOMGkSZO46667XOfYbDafF8va7XZefPFF1q1bR2pqKhaLBbPZjFYUHK11IlARLinWtFSvbU6TqdIMt7IA/7IHnZc1JbIKtkg6PEw1uN3/bFZXp81G8Y8/kT57tqvNfPQoqfdPJ3T6dELumIi0ARZhbCxkgYH4X3stRV984bFdP/iaOu6R77KNviXP8/W8qtLpdLRs2dJru+KCXEcSiQSHw1ErffHl3ufWfhnO5vJZunRpuWCpouDnfPPmzeOtt97izTffpGPHjuh0OmbMmIHFYqmB3gsVEVM/wiVFVsmnJ4m64gBAFhyMfpjn7Zi6fv2QVbDzR9m8ebk8MOfIw8KQna11YcvKIvOllzyel7N4MbacnAr7KFRMqtEQNm0acg+1oYInT0YRGVEPvfJNmNa3HUm+nleX2rVrV249x44dO8qds23bNrdj27Zto3Xr1q6AQqlUYq/C4neAiIgIoqOjOXnyJC1btnT7SkxM9Oka27Zt44YbbuD//u//6Ny5M82bN+fo0aNV6odQPWJERbikyMPCkEdHY/OQdl7TvTuy4IoXqMr0eiIeewyJXF5WgM/hAIkE/TWDiXj8ceQVBELykBCCx99O3ocry7VFPPmE68FpKyjwPrJjtWJNz0AZF1dhP4WKKeNiSVi7BsOvv1H83XdIg4MIHjcOZWJipcFsfeoW3o0IbQRZxiyP61QkSIjQRtAtvFut3N9sNpORkeF2TC6XE+rDwul77rmH+fPn8/DDDzN58mT27NnjWiR7zqxZs7j88st57rnnuPnmm9m+fTtvv/222+6chIQEfv31V2655RZUKpVP94ayhb7Tp08nICCAoUOHYjab2b17N/n5+cycObPS17dq1YpPP/2UP/74g6CgIF5//XUyMzNp3769T/cXqk+MqAiNgrOGho8VERHELVlcbuRDER9P9MsvIfchu6UiPJzIZ54p2yr82Wc0/+Ybol54AUUlORdkej0hd99N9LxXUSYmIlGr0XTtQvxHq9D17o3kbL0iibzizw/SyuoeCT5RREcTdMvNxC56m+iXXkLbtatP//3rk0wq47ErynbKSHAfnTv3/aNXPFpr+VS2bNlCVFSU21efPn18em18fDyfffYZGzdupHPnzixZssRtIS5At27dWLduHWvXruWyyy7j6aef5tlnn3VbdPvss89y6tQpWrRoQVgVct5MnjyZZcuWsXz5cjp27Ei/fv1YsWKFzyMqTz75JN26dWPIkCH079+fyMhIRo4c6fP9GyOn04nF5sBss2O1OSpNo1BbJM76unMN8bVMtNA4WVJTKfnjD0p+/x1lswQCbrgBeXQUsotco2FNT8d84gSWlBRUrVqhjI9HEVF3Q/7WnByw2ZCo1eUejtbsbFJuH++xho4sMJDEzzc02PwkQuUqKnvvqx+Sf+DlnS+7LayN1Eby6BWPMrjZ4JrqqnAJs9od5JVYyDGYsTucKGRSIvxV+KsVyGW+j3FU9Pvu6/NbBCpCg2U+eZLkcf+H/fxtu1IpMa/Px2/AAKRNNCMmQOnBg6TcfjuOkvN2CSkUxC99D22PHpWOuggNV00EKlA/mWmFS4Pd4SCtwES+sfxC4agANSF+KqQ+JtysiUBF/LUTGiRbYSHpTz/jHqQAOBykPfwIzb/ZXOE24sZO3aYNiV98QfGPP1K6dy+qNm0JuO5a5FFRIkgRgLJpoNrYgiwINrvTY5ACkFlkJkCjQCmvu6BY/MUTGiRHQQGlu3d7bHNarZgPH2nSgYpEJkMZG0vIhAk4b7/dtX5FEAShtlns3tcEOpxO7I66nYgRf/2EBulcThFvHF4SpzVFIkgRBKEuySqZ1qnrYrDiL6DQIEn1ehQxMV7b1Zd1qMPeCILQmFjtDowWGwVGC0aLDWsFIwRCeXKZFLmXD0hapRy5VAQqglC2BXjOMx4TpAWOvQl5SEg99EoQhIbObLNzMruE41kGUvKMHM8ykJRdgsVWO/WPmiKFTEJCqBbZBQGJQiYlLkhTpV0/NUGsUREaLG337iSsXUPW/NcpPXAARXg4IXdPwa9v3wadlEsQhPphtTtIyTViviAoMdnspOSVkhCirfOHbGMkkUjQKGS0Cvej1GLHbHOgUcpQyWUo5XX/8xOBitBgSbVaNJ07E7twQVmdHLkceWAgtuxsLCkpSFQq5OHhdT5fKghCw2R3OCm1eh45MVps2BxO6nCzSqMmkUhQymV1urvHGxGoCA2eLCAAWUAA1uwcct57j7wVH+IoKkIeEUHYjAfw69/fVXVYEIRLV2W7URyNO23YJUuMgQmNgr2oiKx588hZsNBVB8eWmUn67Mcp+vJLnFZrPfdQEIT6VtEiTwmUW3NRkyZOnOiWUr9///7MmDGj1u53KRGBitAo2HJzKfryS49t2QsWYsvOxmmzYS8uxiHKrgtCrcnIyOD++++nefPmqFQq4uLiGDFiBD/++GN9dw25TEKgRuGxLVCrrPPdKkLNEFM/QqNgTUnx2qZq0wZHiZHst97CuGcvivh4gsePRxkfh8zPrw57KQh1x2m3Y9y9B1t2NvKwMLQ9uiOR1e56glOnTtG7d28CAwOZN28eHTt2xGq18u233zJ16lQOHz5cq/evjEwqJSpQg0wqIc9oxel0IpVI8FNAZIAamchJ1CiJ/2pCoyD197zLRxEXR8idd5B0003kLl1G6d69FG3cyKnRoyn+4QccJlMd91TwhdPpxJKWRuHmzWTOm0fhV19hSU2tsSrZTV3Rd99xfNBgUiZMIO2hh0iZMIHjgwZT9N13tXrf++67D4lEws6dOxkzZgytW7emQ4cOzJw5kx07dgDw+uuv07FjR3Q6HXFxcdx3330YDAbXNVasWEFgYCDffvst7dq1w8/Pj6FDh5Kenu52rw8++IAOHTqgUqmIiopi2rRprraCggImT55MWFgY/v7+DBw4kL///hso20L73puv8H/X9uP3r9cxvE8XmkcGoahkt4/D4eDVV1+lZcuWqFQq4uPjeeGFF1ztp0+fZuzYsQQGBhIcHMwNN9zAKQ+FQ7155513aNWqFWq1moiICG688UafX3upE4GK0CgooiKRecidEvx//0fW62/g9BCQZDz1NLacnLronlBF5uPHOTV6DGkzZ5H3/gekPfwISTeMxFzPn8gbg6LvviP1gRnYMjLcjtsyM0l9YEatBSt5eXls2bKFqVOnotPpyrUHnq0CLpVKWbBgAf/++y8ffvghP/30E4888ojbuUajkddee41Vq1bx66+/kpKSwkMPPeRqX7x4MVOnTmXKlCns37+fL7/8kpYtW7rab7rpJrKysvjmm2/Ys2cP3bp1Y9CgQeTl5QFlO1ZOnDjOVxs/5/MNG9i3b1+l72/27Nm8/PLLPPXUUxw8eJDVq1cTcbaiutVqZciQIej1en777Te2bdvmCrAsPkw17969m+nTp/Pss89y5MgRtmzZQt++fSt9nVBGTP0IjYI8PJy4JYtJmXgHjpIS13FFXByWkyc9vsZptWJJSmrSNYEaI1t2NqnTH8BeUOB23GEwcHrqNBLWrkFx9gFxIYfVWrYeqdSEVKNGHhaGROF5TUJT5LTbyXzxJfC0e8XpBImEzBdfQj9oUI1PAx0/fhyn00nbtm0rPO/8BaQJCQk8//zz3HPPPbzzzjuu41arlSVLltCiRQsApk2bxrPPPutqf/7555k1axYPPPCA69jll5cVYPz999/ZuXMnWVlZqM5WUH/ttdfYuHEjn376KVOmTAHAYrGwcuVKwsLCKn1vxcXFvPXWW7z99ttMmDABgBYtWtCnTx8APvnkExwOB8uWLXOlQ1i+fDmBgYFs3bqV//3vfxVePyUlBZ1Ox/Dhw9Hr9TRr1oyuXbtW2i+hjAhUhEZBIpWibt+exC+/wHTgAA5jKcrmiUjP/qHyximyUTY4tvx8LElJntvS07Hl5noMVKw5OeR/9BH5q1bhMJnRDx1K4JjRKOPjUURHXxI1kYy795QbSXHjdGLLyMC4ew+6nlfU6L2dPm7t/eGHH3jppZc4fPgwRUVF2Gw2TCYTRqMRrVYLgFardQUpAFFRUWRlZQGQlZVFWloagwYN8nj9v//+G4PBQMgFI6ylpaWcOHHC9X2zZs18ClIADh06hNlsrvCex48fR6/Xux03mUxu9/TmmmuuoVmzZjRv3pyhQ4cydOhQRo0a5fp5CBUTgYrQaEhkMqRaLcr4eIq+/x7L6RT0AwcS/ugjZL06r/ynTJkMVcsWni8m1Bun2Vxxu6l8u91gIOfNtyj49FMU8fHEPP0Uxd//QPqTTyFVKgm8eSz+117rdSSmqbBlZ9foeVXRqlUrJBJJhQtmT506xfDhw7n33nt54YUXCA4O5vfff2fSpElYLBbXg1lxwSiYRCJxBUIajabCfhgMBqKioti6dWu5tnPTT4DH6SlvfLln9+7d+fjjj8u1+RIM6fV69u7dy9atW/nuu+94+umnmTNnDrt27XLrs+BZ0/8IIjQZtrw8sl5/naRRo8l9ZzG57yzm1I03YTp6lPDHHit3ftiDM5AHB9dDT4WKyIKCkXgbCZPLkYeFljtsz82lYMMGkMuJfPop0h55lIJPPsGWlobl1CmyXnmVM/dNxZqZVcu9r19yH0cIfD2vKoKDgxkyZAiLFi2i5Lzp13MKCgrYs2cPDoeD+fPnc+WVV9K6dWvS0tKqdB+9Xk9CQoLX7c7dunUjIyMDuVxOy5Yt3b5CQ8v/7viiVatWaDSaCu957NgxwsPDy90zwMdyHnK5nMGDB/Pqq6/yzz//cOrUKX766adq9fdSIwIVodEwHThA4fpPyx0v+nwjytgYgm67DUVcHNqePYlfsZzAm25CWsGnKqfDgTUrC2t6OrbCwtrsunAeeVgooffd67EteOIEjwUnbXn54HCgHzSQ4m+/w3520eT5TP/+i+nA/hrvb0Oi7dEdeWSkx2KdAEgkyCMj0fboXiv3X7RoEXa7nSuuuILPPvuMY8eOcejQIRYsWECvXr1o2bIlVquVhQsXcvLkSVatWsWSJUuqfJ85c+Ywf/58FixYwLFjx9i7dy8LFy4EYPDgwfTq1YuRI0fy3XffcerUKf744w+eeOIJdu/eXa33pVarefTRR3nkkUdYuXIlJ06cYMeOHbz//vsAjBs3jtDQUG644QZ+++03kpKS2Lp1K9OnT+fMmTOVXn/Tpk0sWLCAffv2kZyczMqVK3E4HLRp06Za/b3UiKkfoVGwGwzkfrDca3veypXEvPEGIffdi1SlQnbBXPKFbNnZFG7aRN7y5dhy89B06UL4Iw+jatUKWSOZN3YYjdhyczGfOAESCarmzZGFhiKrZBi7vklVKgLHjkUeEUH2WwuwpacjDw8n9L770F8zGKmHn79UW/aetFdcQe6773q9dsGnn6Hr2xdpE11gK5HJiHh8NqkPzCgLVs6f7jwbvEQ8PrvW8qk0b96cvXv38sILLzBr1izS09MJCwuje/fuLF68mM6dO/P666/zyiuvMHv2bPr27ctLL73E+PHjq3SfCRMmYDKZeOONN3jooYcIDQ11beeVSCRs3ryZJ554gjvuuIPs7GwiIyPp27eva5dOdTz11FPI5XKefvpp0tLSiIqK4p577gHK1tT8+uuvPProo4wePZri4mJiYmIYNGgQ/v7+lV47MDCQDRs2MGfOHEwmE61atWLNmjV06NCh2v29lEicvq6QaqCKiooICAigsLDQp18YoXGy5edzetJkTAcPemxXtW1L/PIPfKr5Y8vPJ/2JJzFcOOwqkRC3dCnKVi2RBwZWulC3PtmLiij88ksyX34FbDYAZBERRD71JLoreyHz831+vj5Zs7JwWqxIFArk4WFeC0zacnJIHj+BoJtvJm/lh1hTPU8n6IcNJWbePCTyhvsZzGQykZSURGJiImq1ulrXKPruOzJffMltYa08MpKIx2fjX8kOFEGoSxX9vvv6/G64/5oF4TwyvR6//v29Bip+A/pXOopyji0jo3yQAuB0kvniiwTddiuOUhNBt9zs8zVrir2oCFtuHo6iIqR6P2TBwcg9LLYzHz9B5vNlyaikAQFEPPoISGUYd+zAmpaOvn8/5OHhSKv5IKwrivBwn86Th4YS+/ZCshYsRD90GHlnh+QvFHTrrQ06SKkp/v/7H/pBg+o8M60g1Iem/y9aaBIkcjkBo0aSv3p1ufwb0oAAAkeP9vkBZdyzx2ub5eRJ1G3bYsvLJ2v+fBwmEwHDR6Bq3crnh2p1WTMzyZj7rFsQpb3ySqJfehFFVJTrmL2khMIvvyDo1ltBJkM/aCCZL76E+dgx1zlZr7xC7FtvoevTu8EHK75SNW9O5GOP4igpoeTXX93eL4D+2mtRtbh0dnlJZLIa34LcVKWkpNC+fXuv7QcPHiQ+Pr4OeyRUhQhUhMZDKiV6/mvkf7wawy+/gNOJX79+BP3fOKoyfynTVzBFKJXidDpJnT7ddaho4xeou3Qh9q03a237q724mMznXyg30mPcsYPURx4hdsEC17SWvaAAddu2FH37Hdorr6Twiy/KPbSx2zkzYwYtvtmMMi6uVvpcHxSRkQDELVuKcc9eCjdsQKJRE3zbOFStW3lciCsI0dHRFWanjY6OrrvOCFUmAhWhUXBareSvWUPBmrUEXH89MfNfA6cT467dpE5/AP8bbiDisUeRKpWVXkvTrSvIZGAvnwzOb+AAirdsKXfctG8fRZs3EzxhQq0kFrPn5lL8ww8e20p37cael4c8KAhrZiapsx7CdPaPbtDNY8l9+23PF7XZMO7e3aQClXMUEREEXDsM/cABIJX69N9duHSd28osNE4iUBEaBbvRiHHHnzhKSshfs4b8NWvc2kt37cJhMCD1IW+KLDSM6FdfIe2hh912TcijowmZNInTkyd7fF3+Rx8TMHx4reSosBcXe06Lfq797HRXyY4driAFAIkEp9Xq9XW23Nwa6mHD1FSmtQRB8E4EKkKjIFUokFewRkQWGorEx0/VMo0a/YABNN/8NUVfb8aSkoKmcydULVtSsP5THCVGj69zlJTUWnVfqV5ffrvp+X0OCMReWEj+avcAzZaRibJ5c6/1jrQ9erj+v91gwGm1ItPrL4kFp4IgNA0i4ZvQKEi1WkIm3em1PXTKXcj8/Kp0PVViIqFT7yPs/mkUfb2ZjGfmoG7fzutr/AYOROZjFsqqkoeE4DdwoMc2TffuyEKCy1KM221ubflr1xJ6991l3ygUKGKikZ3dJaTu1g1lbCy2vDwMv/3Gmfunc3rSZLIXLcJy5ozPtVsEQRDqk/hYJTQaypYtCX1gOjkLFrqNPITcfTeqSiq6eiORSFDExhLzxuvYsnOQqJQeRyikOh2hU+5ym2qwGwzYc3MxHz2GRK1CmZiIPCysWvlXZHo9kU89RbrdTsl5NUy0V1xB9MsvIQ8Kwul0EjByJKYD/7raLUlJGHbsoNnaNTiKDZiPHUUWFIQyvhmKuFiQy8l+400KPvnE9RrTwYPkr15Dwto1qBITq9xXQRCEuiQSvgmNit1gwJaTQ+lff+F0ONB264Y8NBSnw4E9JwfL6dPIAwORR0Yij4jwmkCsItb0dPJXr6Fg/XocJhP6wYMJnXofyvh4V54KW14euctXkLdsmStokqhURL30EvoB/ZFWMzusrbAQe97ZPCp+fshDQlwjJADWjAxS7rgDS9KpsgNSKTHzX6Pgsw2U/P676zypTkfcu+8iCw7i5LXXebyX36BBRL/6CrIqFG8TLl5NJHwThMaiJhK+iUBFaPSsWVlkPPcchu//2zUjDw8j7t13UbVtW61gxWm1YsvLA6cTmb9/ubTuRd9977aF2UUiIXHDZ6hataq1dSDW9HSKNn9DwWefoenZE5lGQ94HH5TvilpN/IcrSL75Fs8Xkkpp+cP3KMTWzDrV1AKV/v3706VLF958800AEhISmDFjBjNmzKjxe0kkEj7//HNGjhzpsf3UqVMkJiby119/0aVLlxq/v1B1NRGo1OoalZdeeonLL78cvV5PeHg4I0eO5MiRI27nmEwmpk6dSkhICH5+fowZM4bMzMza7JbQhDgsFvKWr3ALUgBsWdmk3HEn1vT0al1XolCgiIhAERlZLkix5eWR663QmtNJ/tq12HJyqnVfXyiiogi+YyLNVq0k9I6JbtM6bl0xmTAfPoIyIcH7xRr35xShDk2cOBGJRFLu69VXX+W5556r7+7Vijlz5oiApwGo1UDll19+YerUqezYsYPvv/8eq9XK//73P7cS4Q8++CBfffUV69ev55dffiEtLY3Ro0fXZreEJsSWnUP+2rUe2+wFBZgvCIxrgtNqrTAAsmVkYk1Px5qZidNDrpaaIJFKXcnNHOf9eyrXl6wst6mj8/n174+0lhYHC7XP4XCSeiSfo7sySD2Sj8NR+0Hn0KFDSU9Pd/vq3r07+ossNWGtYIu9INRqoLJlyxYmTpxIhw4d6Ny5MytWrCAlJYU9Z1OYFxYW8v777/P6668zcOBAunfvzvLly/njjz/YsWNHbXZNaCKcFjPO0lKv7aaDh7BmZdXoPaVaLeoLq57KZGVbjKVS1F06k792LSkTJ5LzzjtYvBTQqwkStRpFbKzXdk3XLmi6di13XOrvT/hDs3zeKWXLz8ealYXD6HnrtlC3TvyVxcrH/2DjG3/x/fsH2fjGX6x8/A9O/FWzv+sXUqlUREZGun0NGjSo3DRPcXExt956KzqdjpiYGBYtWuTWLpFIWLx4Mddffz06nY4XXiirW7V48WJatGiBUqmkTZs2rFq1qlwf0tPTGTZsGBqNhubNm/Ppp5967a/dbmfSpEkkJiai0Who06YNb731lts5W7du5YorrkCn0xEYGEjv3r1JTk5mxYoVzJ07l7///ts1erRixYrq/eCEi1Kn25MLCwsBCD6blGvPnj1YrVYGDx7sOqdt27bEx8ezffv2uuya0EhJ1WpkFVRMVsbHkb3oHRwVBDNVJdPrCZt+P0gkyAIDiZg9m9gFbxHxyCPELnobff/+GH7eiiXpFDmL3iH5ttuwpKbW2P3PpwgPJ/yhhzy3xcejatWKkLsmE/feu2ivvBJV27aE3HM3iZ99itKHHT+23FyKtmzh9KTJnLrxJtKefBLTsWM4LJaafiuCj078lcWWdw9QUmB2O15SYGbLuwdqPVjxxbx58+jcuTN//fUXjz32GA888ADff/+92zlz5sxh1KhR7N+/nzvvvJPPP/+cBx54gFmzZnHgwAHuvvtu7rjjDn7++We31z311FOMGTOGv//+m3HjxnHLLbdw6NAhj/1wOBzExsayfv16Dh48yNNPP83jjz/OunXrALDZbIwcOZJ+/frxzz//sH37dqZMmYJEIuHmm29m1qxZdOjQwTV6dPPNN9fOD0yoUJ1tT3Y4HMyYMYPevXtz2WWXAZCRkYFSqSTwgqHpiIgIMs4rX34+s9mM2fzfP9CioqJa67PQ8MnDwwm97z4yz34iO58yIQGn1Urhhg2ETrkLZUxMjd1X1bIlsUuWIFUqyHzlVcyHD7va1Jd1IOa1eaTOeghHcTG2zEwK1q0nbNpUJApFjfXhHF2vK4l6+SWy5r2GPTcXJBJ0/foR+eQTrtpEfn37ounWrUoJ3+yFhWQvXEjB2v/WwBRv/obi738g4aNVaDp3rvH3IlTM4XDy2yfHKjzn93XHSOwchlRa9UXkldm0aRN+543CDRs2zON5vXv35rHHHgOgdevWbNu2jTfeeINrrrnGdc5tt93GHXfc4fr+1ltvZeLEidx3330AzJw5kx07dvDaa68xYMAA13k33XQTk89mj37uuef4/vvvWbhwIe+88065figUCubOnev6PjExke3bt7Nu3TrGjh1LUVERhYWFDB8+nBZnC1q2a/dfLiU/Pz/kcjmRZ2tMCfWjzkZUpk6dyoEDB1jrZT2Br1566SUCAgJcX3FNsI6J4DuJTIb/ddcSev80pLr/Fr1qe15BxJNPkDX/dbBay75qgDUzk9J9+yj580+UzeLJev0NtyAFwHTgX3LeW0rwhPGuY4VffVWu6rMv7KWlmJOSyFnyLmlPPkXRd99hTXcP4mUBAQRcfz2Jn66n+aavaPHtFmLmvYrygikhmZ8f8qAgn3cj2bKz3YIUF6uV9GfmlO2KEupU+rGCciMpFzLkm0k/VlAr9x8wYAD79u1zfS1YsMDjeb169Sr3/YWjHj3Oy5oMcOjQIXr37u12rHfv3uVe58u1z7do0SK6d+9OWFgYfn5+vPfee6SkpABlo/sTJ05kyJAhjBgxgrfeeov0ai7AF2pPnYyoTJs2jU2bNvHrr78Se94fz8jISCwWCwUFBW6jKpmZmV4j2NmzZzNz5kzX90VFRSJYucTJg4PRDxmCqkULkEiQyOWU/vMPqTNn4SgqQtGsGZIayBViOnKE01PuxnZ2V1rsksWY9u/3eG7p7t1umXQlMlmVKjwDOEwmDL/8QtrMWXA2dX/hp58ij44m/oP3y3K1yGQoQkORSKUooqKq9b68Me7Z67XNfPgw9qIi5D7UVhJqTklRxUFKVc+rKp1OV2PF/XR1kL9n7dq1PPTQQ8yfP59evXqh1+uZN28ef/75p+uc5cuXM336dLZs2cInn3zCk08+yffff8+VV15Z6/0TfFOrgYrT6eT+++/n888/Z+vWrSReMCfevXt3FAoFP/74I2PGjAHgyJEjpKSklIuaz1GpVKiqkflTaNrkgUFkfPwxpbt2l2uLmP0YiossJGhNTyfljjuxnxtFkEhwmkwVvsZ53jqOwLE3uXbp+MqWnV1WOPGC+kK2tDSyXnkVRWwsJb//Rui0aeh69arxoKGy2knnV5F2Wq3Y8vMBkAUFIa2FKS4BdP6+/e3z9bzacuFmiB07drhNqXjSrl07tm3bxoQJE1zHtm3bRvv27ctda/z48W7fd/WwYPzc66+66irXdBLAiRMnyp3XtWtXunbtyuzZs+nVqxerV6/myiuvRKlUYq+lnXuC72o1UJk6dSqrV6/miy++QK/Xu9adBAQEoNFoCAgIYNKkScycOZPg4GD8/f25//776dWrl4hmhSqRh4YQ89pr5K1aRcGatThKSlC1akX4Y4/WyFoKc1LSf0EKgNOJVF1B9lmJBMnZgFrZogUBI0a4Pdh9Ufr332CzeWwz/PILMW++Sf6qVaTNeoiQe+8hZPLkGs0yq+3W1WuhRG3Pnq66R9a0NPLXrqVw4xfgdOI/YgRB426r0TVBQpmoVoHoAlUVTv/4BamIahVYd53yYNu2bbz66quMHDmS77//nvXr1/P1119X+JqHH36YsWPH0rVrVwYPHsxXX33Fhg0b+OEH9xxJ69evp0ePHvTp04ePP/6YnTt38v7773u8ZqtWrVi5ciXffvstiYmJrFq1il27drk+NCclJfHee+9x/fXXEx0dzZEjRzh27JgrEEpISCApKYl9+/YRGxuLXq8XH5TrQa2uUVm8eDGFhYX079+fqKgo19cn5yWoeuONNxg+fDhjxoyhb9++REZGsmHDhtrsltBEKSIiCHvggbJ1Gj98T/yK5fj17l2lYoXeWD1sMTbu3YNfv34ez9f/739YU1KIevkl4t9fhqIai/HshRUsFHc4wPnfSEvue0vdA6kaIA8NJeLJJ8odlwUGEvn0U8gCArCmp5N8++3kvrcUW1YWtuxs8j74gOTbxmFNq71t2ZcqqVTC1Te3qvCcPmNb1cpC2qqYNWsWu3fvpmvXrjz//PO8/vrrDBkypMLXjBw5krfeeovXXnuNDh068O6777J8+XL69+/vdt7cuXNZu3YtnTp1YuXKlaxZs6bcqMs5d999N6NHj+bmm2+mZ8+e5Obmuo2uaLVaDh8+zJgxY2jdujVTpkxh6tSp3H220OeYMWMYOnQoAwYMICwsjDVr1ni8j1C7RAp9QfCB8a+/SL71NrdjEqWSmPnzKdy0ieLvvy8LHmQyAq6/nrAHpiPV6y9qhMN06BBJozwnP1QmJhB0221kvvCi61j8iuXoangk0l5cjCXlNPkff4wtMwNdnz7or7kGxdnRkrxVq8h68SWPrw2bNZOQSZOqPJLU1NVECv0Tf2Xx2yfH3EZW/IJU9BnbihZdw2uqq4Jw0Woihb6oniwIPlDGxqJs2RLL8eOuY06LhdSZM4l5eyFhMx/EaTQi1emQhYTUyBSMPCICvwEDMFyQRwKJhNB77yVnybvuh2uhboxMr0fToT3qZ+fitFqRqNWu2km2gkKKNnkfzi/a9DWBY25EHuw9z41QPS26hpPYOaxsF1CRGZ1/2XRPfY+kCEJtEIGKUKecNhu2/AKQlO3Wqc1P27a8vLJKxEYjssDAiwog5GFhxL27hIxn5riqFEu0WkLvmoymY8da2f0iDw4m6tm5FH7ZndzlK7Dn5qLu1ImQO++g+NvvsJw86TpXFhjoyplSGyRyebltzRKZFKna+3y9RK0GmRhNqS1SqYSYNiIIFJo+EagIdcaSlkbhp59R+OWXSORyAseMwX/E8Gqt36j0XsnJnJnxIOZz+RVkMgJvuomwaVORh4ZW+Fqn3Y41LQ3D1q2U7vsbdceO6AcNRBEVRczr87Hl5eEsNSH11yMPD6/VHS7ysDCC77gD/+EjwOHAYSrlzH1TsSQluc6RKBTEvPUW8vC6HfKX6fUE3X47xp27PLYH3/5/yEUtIUFoMBwOJ3aHs+yDolRSrcry9UEEKkKdsKalkTzu/7Cdl0wpa/58CjZurPZiU6/3yswkZdJkrGfO/HfQbqdg7VpkgQGETp1aYXBhOniQ5AkTcZ6ta1P09ddkv/km8cs/QNOlC6o6XgslkUpRRJQFIU6Hg7ilSyn5/TeMe/agbtsW/TXXII+KQiKTub3OYTJjy8nGaTYj1WqRh4X5nOzNV9quXdH17UvJr7+6H+91JdqePWv0XoIgVI/T6cRic5BVbKbIZEUqkRCiUxKkVaKQN/xRTxGoCLXOabdTsPELtyDlHMuJE5Rs307gqFE1dj/r6dPuQcp58lauIvCmsShjoj2/NiuLMw/McAUp5zhNJlIfeICE9eurPMViNxjK6gLVwLoViVSKMjYG5S23EHTLLV7Ps2ZlkfPOYgo3bMBpsSDV6wmZMoXA0aOqnM+lIvLQUKJffAHT0aMUrFsPDgeBY29C1abNReeuaeoa+T4GoRGx2BwczzJgP/s7Z8dJRpGJwlIrCaE6FLU4RVsTv+ciUBFqnb2ggKJNm7y2F274HP0119TINmIA86lTXtucRiNOk/cChfa8PGxettXasrKx5ea6AhVbfj72/PyyQMDfH0VYmFstH2tmJiU7/qTw009BKiHw1lvRduuGopanaOwFBWQ89TSGX35xHXMUF5M9fz5Os4mQKVOQVpLMrSrkoaH4hYaiu+IKgBoftWlqZGdHviwWCxpNBbl4BKEG2B1OsorNriDlfKVWO6UWOwpN7QUqxrMf+hQXMUUu/qIItU8iqbAYn0SpgBpcVKts1sz7vTSaCnfHOL0kWHM5m23WnJRE2iOPulLoS/V6wmbMwP+6a5EHBmLNyOT0vff+t0YGMP65E83lPYiZP79WgxVbTo5bkHK+3Pc/IGDkKJSxNZ+MTQQovpHL5Wi1WrKzs1EoFEjF9m2hFllsDgqKS7yObOQUOFCgqfH1Kk6nE6PRSFZWFoGBga4AvTrEXxah1smDgwm69RYy5sz12B40bhwyrdZjW3Uo4+ORR0d7HBkJHjcOeQVTErKgICQaDc7S8qMuEqUSWWhoWTr98ROwZWe72hzFxWQ+9xyyoED8hw2j+Kcf3YKUc0p37aZ0714UQ4dW891VzuJl2gvAWVqKw1Bca/cWKieRSIiKiiIpKYnk5OT67o7QxNkdTrKLzWWLaD0oUckwF9TcCOuFAgMDL7r6tAhUhDrhN2AA6k8/w3TggNtx3dV90HTsWO58a3Z2WcVjuQJFeNXWOigiIoj/4H1S75+O+dixsoNSKQGjRhE8YUKF0x7ysDDCH3yQzBdfLNcWOv1+5KGhGH7/3S1IOV/2/NfRdO5MwSfrvN4jf80adFdfXaPp7s8nD6p4y2pt5FsRqkapVNKqVSss59WDEoTaYLc7+H1HMqt2eA6KF93WjcSo2tkgoFAoLmok5RwRqAh1QhERQeyiRZTu20fB+vVIZDICb7sNdft2biMctrw8DL/8QvbCt7GlpaGIiSHsgQfQXd2n0gfw+VQJCcQvX44tLxeH0Yg8KKgsj0ol62CkSiX+N1yPIi6W7DffwnLyJMqEBEKn34/u8suRqtWY/vnH6+utqall00dOh9dznHZHuUKDNUkeGYk8MhLb2dpa59P27i0qHjcQUqm02plpBaEqRvZI4IsD2RxKdx9NveOqBJqFB6JW196ISk0QKfSFOucwmQBJuWRhjtJSct5/n9y3F5V7TdiDM8pGQ+rwD7stNw+n1YJEoXDbKVOwcSPpj832+BpZcDCJGz6j6OvNZM2b5/GcqFdeIfCG62ulz+eYjh4l5c5J2HNyXMeULVsS9+4SUSxQEC5BGYUm9p8p4LO/UvFXy7mtZzzNgnUE6eovSBEp9IUGy1uwYcvNJe/d9zy25byzGP/rrkMZG1ubXXMjD/E88qDt0cPrOpaQyZOQh4Xhf+215H/yCdaUFLd2VZs26K7sidPhwFFSgkShqJXgS926NYnr12E5lYwlLRVVixYoYmLElmFBuERFBqiJDIhkYLtwpJLGk+wNRKAiNCD2nFycVqvHNqfZjD03Fwtgy8wEmQxFeHhZErNazAzriSIykvjlyzlz773Y8/PLDkokBIwZQ8D11yORyVBERdLswxUUbf6Gwo0bQSolcOxY9IMG4bTZyF22DMPPW5GFBBM8YQKqli2rNLXlUz+jolBERVE7K2EEQWiMZI1wl5kIVIQGQ6KqeAjSabNx8rrhOM1lFWOlfn5Ev/IK2qt6IavDfBQSuRxNp44kbvgMa0YGjuJiFPHxyENCkOn1rvMUUVEE3zGRgJE3gFSKLDAQy6lkkm+9FXtBges8ww8/EjxpEqFT7kImUs4LgiC4aXyhldBkyYKDkUd7zhiriIvDfPSYK0gBcBgMnLn/fmwVbMetLRKpFEVUFNquXfHr2xdVQoJbkHL+efKQEORBQTiMRrJee80tSDkn7/33sWZl1UHPBUEQGhcRqAgNhiIigtiFC5FesG1XqtcT+exccj/4oPyLHA7yVq/B4WXKqCrsJSU4PKw7qSn2ggIMP//std2w1XOSNkEQhEuZmPoRGhR1u7YkfrER465dmA4dQt2hA5ouXch47nmsp097fI3l+PGykZZqrlWxZmRQsnMnhZ9tQKKQE3TbODQdL6swMVy1VbAt2Wm31/z9BEEQGjkRqAgNSlnRvdiy3T1nCxXaDQakSu9BiPqyy6q9c8aakcHpKXdjPnrUdazk923orr6aqBdfqNFdMjJ/f7RXXYXxjz88tuv79auxezVF6YWlHEwr4o8TucQHa+nfJoxIfzUqxcUnlBIEoeESgYrQ4Mn8/Ai99z4MP2+FC9L+SBQKAsfeVK06M06Hg6JvvnELUs4p+e03TAcPoqjB4EGm1xMx+zFO3XxLuerMAaNGIY+sWlXmS0lKnpHblu7gTP5/U3NyqYRlE3pwVYsQlHIRrAhCUyXWqAiNgqp5IjFvvYXsvC288shI4j54H0UVEpjZ8vMpPXCAzHmvYdj6CwXr1ns9N//j1WeT09UcVfPmNP98A0H/Nw5lYgKaLl2IeXsh4Q/NqvHtyU1FscnKs1/96xakANgcTu5etYfMIrOXVwqC0BSIERWhUZDqdOgHDUTTqWNZ7hKJBFlQEIoI30chbLm5ZM2fj3H3HtTt2uK87DKcFSU9stvKjeBcLIlMhrJZM8IffhjHPfeAQoFcbEmuUH6JhR8Pe94RZbY5OJheRFxwzRW1FAShYRGBitBoSGQyFJGRKKpZidN89Ch+fa5G3aEDpXv3Yty7l6i5cyj64ksKPv203PkBN96EtJbys0hVKqQqVeUnCljszgrjxXyjKOwnCE2ZCFSES4K9pATkcrJefx3reXlX8letIuzBBwm44QYKv/jCdVzVoT3abl1rrT+2/PyyFPpSKbLg4DqtYdTY6NVyYgI1pBZ43jreOTawbjskCEKdEoGKcElwWCwUrP3ELUg5J/uNN2i2ZjWlB/YjkcoIvOUW9IMHVWla6ULWzEzseXk4zRZkoSHIQ0ORqtXYS02Yjxwm8/kXMB04gESpJGDkSELvuQdFdNTFvMUmK8JfzTMj2jNl1Z5ybYPbhRPhL4I8QWjKRKAiXBKcRiNF333ntd24Zw/xH32ExOFAFhxc7YJdTrsd08GDnLl/OraMDKBsZ1LIPfcQdNutWE+fIfm2ca58Kk6LhYJ16zDu2kX8iuUXFRw1Zb1ahPDRpCt4/utDHM4oJlCrYHKfRMb2iCO4Hqu/CoJQ+0SgIlwanE6oIHuto9SEogZ23VjT00mZOBFHyX/bj51WKzkLF6KIi8W4Y4fHpG+WpCRMR46IQMULvVpBn1ZhfDzZH5PVjkwqIcxPhUwmNi4KQlMn/pULlwSpToe6c2ev7fq+V9fIfUq273ALUs6Xs/Bt1O3aeX1tRen1hTIhfipigrREBmhEkHKJMpptpOYbOZ1npEAspL4kiBEV4ZIgDwoi8oknOHXbbWCzubXpevVCERtbI/cx/XvAa5v19GnkUd7XocjDw2ukD4LQVKXkGZn/3RG+/icdm8NJ92ZBzBnRgTaRfvWS9C+j0MTJbAOHM4pJCNXSJtKf6AB1taeOBc9EoCJcMlRtWpO4fh1Zr7+BcedOZAEBBE+YgP/1I5CHhNTIPTQdO1Kw9hOPbYpmzZBqvef78B8ypEb64AuHxYLTYkGq1SKR1s7IhC03D2taGoZff0WqVuPXvx/y8HCPVaYFoTJpBaXc8u520gr/S8K4Jzmf0Yu3sen+PrSJ9K/T/pzKKeH/3v/TLRFhsE7J6sk9aRtVt31p6kSgIjRK9pKSssRvdgdSvR/y4OBKXyNVqVC3a0fM6/NxlJSAVIo8NLRGH9Tanj2R+vnhMBjKtYU9MB1VmzZounendM95O1gkEqKefx55HaxPsRcVYTl1irwPP8SamYVfnz74D78ORUxMjX4KtGZnkzFnLoYff3Qdy5o3j7AHZxB0yy3IRJI7oYp2nMx1C1LOsdqdvP79MeaP7YSfqnqFSasqr8TCjE/2lcuWnFdiYdKHu9lw71VEBIjdaDVFBCpCo2M5fZqsea9R/OOPYLejateOyKeeQt2+nU/5SGR6fa19qldERxO/8kNSpz/g2gotUakInTYNXa+rkAcFEvvWm1hOn6bk99+RBQTi1/dq5GFhSHW6WunTOXaDgfxPPyX71XmuY6W7d5P7wQckrP4YVcuWNXYvw9atbkHKOdlvvImud280IlARqsDmcPDtvxle27efyKHYZKvTQGXf6QKPbakFpWQVm0WgUoNEoCI0Ktb0dJJvH+/a+gtgPnSI5NtvJ2H9OjTt29dj78qqP2vat6fZ6o+x5+fjtFiQBQeX5VE5m4lWHhqKPDQUbdfaSyjniT0nl+x5r5U77igqIuO554ld8FaNjHTYcnLI+2C51/b8NWtRPzsXiUwUEhR8I5NICNd7f/AHapXIpXW3LsRktVfYXmzyvsNQqDqxbF5oVIy7d7sFKS52O9mvv4G9uLjuO+WBIjwcdZs2aDp2RBkT0yDS5Rv37PFau8j455/YCwtr5D5Ou73Ca9lzcnDaK/5DLwjnk0gk3Noz3mv75KsTcTohObeEnOLaL1IZoFGgknt+fEokECVGU2qUCFSERsPpdFL8k/ctvMY9e8rWnggeOSvII1N2Qs0UYJTp9eiuvNJru/6aa5AqRZI2oWpiAzU8Nbz89v5BbcNpE6Gn37yt9Ju3lZvf28GvR7NrdVQj3F/FPf1aeGwb1SWGEL/6/2DSlIipH+GiOUwm7MUGJMrarQQskUgqTDMvDw6CaiyMtWZlYc/LA7kcWUAA8qAgJPKG8U/DUVqK3WBAolAgDwy8qGtpe/Tw2qa+rANS/5rZqSDVagmdeh/FP/yA0+Ke50IeEYHuqqtq5D7CpcVfo2Bsjzj6twnnlyPZGMw2+rYK5XiWgf97/0+s9rJA+0S2gfEf7GT5xMsZ0LZ2tvyr5DLG92pGgEbBwp+OkW+04qeSM/GqBCZc1Qx/Td2slblUSJzOGq5jX8eKiooICAigsLAQ/xr6Qyv4xmG1Yj19mtxl72PctQt5aAghU6ag6dzZp1041WE6doykEdd7bIt48gmCxo3zefeKw2rF9M8/FG/9BV2PHpgOHwJA17sP8sgIFKGhNdbvqnJYrVhTUshZupTSXbuQBZf9bLXdu1X7Z2svLCR74ULyP/rY7bhEqaTZ6o/RXHZZTXQdKBu9MR8/TuZLL2PcuRPkcvyHDiXsgeko4+Jq7D7CpW3f6XxGLvrDY1tiqI5P7r6ywrUtF8vucJJVZMJks6OUyQj3V6IQa6985uvzWwQqQrWV7t9P8m3jyk0pBI2/nbBp05DVwn8Pe0kJRZu/IePpp92mKvwGDSJqzjPIw8J8vpY5OZmiTZuwFxaRv3KlW1vIXZMJvPU2lPVUKLD0wAGSb72t3M828NZbCZvxQLVHrmx5eZTu20fue0ux5eSgufxyQidPRhEfh1RR858CbYWFOAwGJBIJssDACvPICEJVvfvLCV765rDX9q0P9SchtHZ30wnV5+vzu2GMbwuNji0/n4xn5nhc95C/chVBN99cK4GKTKcj4Lpr0V5xOcY//8RhKEF7ZU8UkZFVGmlw2u0Ubf4GZXw8aQ8/Uq49d+kyVG3aYEkKQtOpk2s7s62wEHtODpaU08gCA1FERSKPiKjRHCS2/Hwy5nj+2RasWUPwuNuqHajIg4PRDxyIpnt3nBYrMr2fT1u6q0seEABiK7JQS8L03teCKGVS5DKRIbYpEIGKUC32oiJMBw96bTfu2YuqhefFZhdLqtWiatYMVbNm1b6G02JBqlFT+OVXXs8p/PxzlImJOC0W9AMGYM3OJvO55yj+7nvXOfKwMOLeexdV27Y1Fqw4ioowHfjXa3vJzp0XnfOkNtcSCUJd6ZEQTLtIPW2j/DFa7Px2LBujpWxH2ciu0YSIytpNgtj1I1RLZQ/lhrIY1RuJUok8Kgp7bq7Xc2w5ucj8A8h8+RWsmZnkLV/hFqQA2LKzSZ54B7b09JrrXCULgiUK8ce3IbMXF2NJTsZ05AiW1FQcFlE4r7bIJHBHn0QMZht+Khnzx3bm7r7NaRelZ8bg1miUDfvvkOAb8V9RqBZpQACay3tQumt3+UaJBE23bnXfqSqQyGRoOnRA07mz15EhTedOmI8dw5qcjKO4mPw1azye5ygsxHTsGIro6BrpmywgAG2vKzFu3+Gh4xJ0l19eI/cRap41PZ2M554vq4TtdCLRaAiZdCdBt95aY/WkhDJn8o3cunQHp/P+S2P/2d5Upg1owfsTehAdqKnH3gk1SYyoCNUiDwgg6umnkXpIRR/+0CzkoQ3/j7I8NJTAW25G4mGBp0SjQT9kCMU//4xEqQSpFGdpqYerlLEmp9RYv2T+/kQ++SRSD9MzYQ/NQtYIfraXIltODmem3Y/hp59cC72dpaXkvL2IgnXrcVSWx6YRMVpsZBebKTHbKj+5AnklZg6mF/HxjmQ2/ZNGSp4RcyVZX6EsM+w7P59wC1LOefvnExSbRELBpkSMqAjVpmzRgsQNn1G4aRMl2/5AERFO8PgJKBMTkPn51Xf3KiVVq1EmJpKwaiUZz79A6V9/AaDp0oWQu6eQ/dYCsNkIuHksUj8/ZMHBZflWPFC1bVOjfVM2b07iZ59S9M03lPz+O/KwcIInjEfZrJnrZ2svKcGWlUXJb79hNxjw690bRWys+OReT6wZmZj+9by2KHfZMvyvvx5lTM2MutWXErONk9kG3v75OMcyDbQM92PqgJa0CNPhp67arrGsYhNPbNjP94eyXMeUMilv39aVq1uHoVF43+abX2Lhs71nvLZ/sz+dNpGiSndTIbYnCxfNabfjKC1FolA0iFTxUNYnW1YWtrw8sDuQhYaUFf7zsgXXmpuLPSsLy5kzmI8coWDdOmxZ2ag6tCdu0SLk4eHkr11L5rPPlXutolkzmq1aiSK85pNLOR2O/36252VztRsMFG7aROacuW7n6666iqiXX6qVvggVK9y0ibSHHvbanvjVl6hbtarDHtUsm93B9wczuW/13nJJjN+6pQvDOkai9DGHiN3h5L1fT/DKliPl2qQS+HFWfxIr2Facmm+kz6s/e02mPLlPIk8Or9+6X0LlxPZkoc5IZLIGNYLiMJsx7t5N2kMPY8/PB0Cq0xLxxJPorxnssXKyIiQEmV6P1M8Pp9FI0PjxaLt1QxEXh+Jsbhb/oUNxGI3kLl6Mo8QIgPbKK4l67tlaCwwkUikyD1WVrenp5YIUgJI//qDoq00E3zERSTWy9ArVJ4+MrKBRXqvbwOtCVrGZRzf84zE4eOLzA/RoFkxMkG/rQrKLzSz7Lcljm8MJ3x/MYEpf77sG9WoFfVuF8cvRbI/tQy+r4L+F0OiIQKWRcFqt2HJzcdrtSDWaWsv82hRYU1M5ffc9YPtv/txRYiT98cdRJjRD62Whr1SpRBkX5zVzqjw4mOAJEwi49lrsRcVI1CrkQUE1UnG4qoo2fuG1Le/DD/EfMVyMqtQxZWws8vAwbFnlH57+w4c3+im5XIOZolLPa1IMZhs5BrPPgYrD6SS3xPtuqDP53teDQVk6/cevbcuOk7mYbQ4AVHIpkQFq2kX6kxBSM0nebA4H8kYY8DscZdGktA4rStcmEag0AtbMTPJXryH/449xGAyoO7QnfPZsNO3aIfXwaftS5rTZyF/7iVuQcr7st98m9q23PI6q+EKqUCCNjkZRj0sN7KWlWDMzvbcXFIDDUXcdEgBQREYS9/77nL5riluFb22vKwl/cEajz8pbaUqCC5oNJhslFhsquZRArfuWerVCSseYAPaneq6y3adl5eUrWoT58fX0q3n31xN0jAkgOkBDWmEpcUFazDYHdocTWTUe1Da7g9SCUjbvT+evlALaR/tzQ5cYYgLVKOUNOz1+jsHM8SwDa3emYHM4GdsjjnZResJqsYxAXajVQOXXX39l3rx57Nmzh/T0dD7//HNGjhzpanc6nTzzzDMsXbqUgoICevfuzeLFi2nViOdxa5otJ5fUhx5y2wZs+vcgKbePJ375BxVWqb0UOcxmzEe8p9S2JJ3CaTJBNQOV+uawWDBu24amaxeKNm3yeI72iitEAFtP1K1akbB2LbaMdGy5uSji4pCHhSEPCqrvrl20EJ2SEJ3S40hIgEZB6NmKwUaLjeNZBt78/igH0oqICdQwfXArusQGEnQ2AVuwTsWT17Xj5vfKb8GPDdLQMbbyUUq5TErLcD9mDGrFA2v3sTs539Xmr5azclJPOsYEVDlY2Z9ayK1Ld2CylgX73x3M5J2fT/DhnZdzRWJItYKfupBdbObpjQf45t//guRN/6TTu0UIb9zchXD/xhus1OqYVklJCZ07d2bRokUe21999VUWLFjAkiVL+PPPP9HpdAwZMgSTyVSb3WpUrKlnPOcqcTrJeP4FbDk5dd+pBkyqVqNqV74U/DmqFi2Q+LBWwFZYiPnUKUyHDpcl7TKba7Kb1WbLzib14UeQBwej8DRFJZcTPmtmtUeMhIuniIxA06UL+kGDULdu3SSCFIAIfzWvj+1c7kEtlcD8mzoTrlfhdDrZcTKPGxZt46cj2WQVm/nrdAF3LN/FRzuS3bYzXxYTwIo7Lic+WOu6zv/aR7D6riuJCvBtCqnUamPBj8fcghSAIpON25f9SUZhxVNIF8osMjFt9V+uIOUci93B1NV/kVXUcJ9N+88UuAUp52w7kcvvxxv3c6JWR1SGDRvGsGHDPLY5nU7efPNNnnzySW644QYAVq5cSUREBBs3buSWW26pza41GiU7d3ltsxw/jsNorLV723JycNpsSLXaGq3bYy8pAbu9VmoBSWQygsaOLasQ7GH6J3TqfZU+xC1nzpD+5FMYd5R92pOo1YRMntQgknbZsrJwlpaS+eJLRL34AoWfb6To++/BakXdqRMRjz6Csnnzeu2jUP9MVjvZxWZSC0qRSiA6UEO4XlWtqYu8EjMWmxOtUkbP5iFseeBqlm87xaH0ItpE6rmjdwLxwVrkMikZhaXM9rLg9s0fj3F9l2gcTic5Bgv5RgsJITpW39UTs82BTCJBKZfgcECh0UKAtvIMzDnFFj7/K81jW7HZxpHMYmKCfJ9yyyuxkFrgObjJK7GQYzAT1QATyRnNNpb/ccpr+/JtpxjQJtw1otXY1NsalaSkJDIyMhg8eLDrWEBAAD179mT79u1eAxWz2Yz5vE+3RUVFtd7X+lTRolmJQoGkFkqK23JyKP7ll7IKu1lZaDp1InzWTFQtW17UPLs1OwfT/n/IW7kKp9mE/4jr0Q/ojyKqZisUK2JjiV/6HqkPP4L97IiT1M+PyKeeQlXJtKI1K4vTd03BkvTfjgSnyUTO24uQarUEjx9fz+UByj7N2rKzOTPtfvyvu5aYea+CRILlxEmkAQENZou4UD+KSq189Xcaz2466FpoqlXKePXGTgxsE45W5dvvb36JmZ2n8lnw4zFSC0ppH+XPw0Pa0DpCz5zrO1BqtaNRyFDK/xuYLyi1klnkefTR7nByPMvA9wcz+GT3GVcw0yHanwW3duXdX0+wfvcZbA4nVyQE8cz1HWgV7ldhcGW2ObDYva/HSi+o2giIrYJrARXeqz7ZHE5KLd6T3JVa7dgbcSaSevuLm3F2sVlERITb8YiICFebJy+99BJz55bfltlUaS/vATIZ2Mv/EvqPGIGshj/h2woLyXr9dQo3fO46ZvzzT07dfAtxy5bh1/uq6l03J4eMp58uSy1+Vulf+8j74AOarfywxtLPQ9nuHW3PniR+uh57fj5Oux15cAjysFAkXvKonGM9c8YtSDlfzpJ38R86DEV0zQZWVSGPCEei1eI0GnGazRRu+Nz130oeHkbA6NFAWQVmW0YmJdu3I1Gr0PW6CnlYaIPaRi7UjiOZxTyx8YDbMaPFzv1r/mLz9KtpF+U+kllUaiWvxILF7kCvlhOhV2O02Hj/91O8/fNx13l/nMhl1Dt/sHR8Dwa3C0cpL/9vSVrJgluL3cFXf6e7jbj8m1bElJW7ubN3ImscpwHYeSqf0e/8wab7+9AqwvsIqE4lI9RPSY7B8w6i9tFVG7UN1qnwU8kxeMi4q5JLCW+gi1L1ajk3dIkuNwV2zvCOUQRqqpaQryFpdPuuZs+eTWFhoevr9OnT9d2lWiULCyfm9fnlCtUpmzcnbNrUGs/NYM/OdgtSXJxOMubOxZrtOW9BZczHjrkFKedYz5whf906nDWcXlwilaKIjETdrh2ayy5DER1VaZACYD5+3Gubo6gIRwVp9OuCPCyMqBeeL7/FQiYj6qWXkEeEY8vJIfOFF0gaNYqsV18l89nnODlsGAXr1mNv4iOQ9cFhNGJJTcWSkoIhM5scgxmrhw8WdcFgsvH2T55/h51OWL4tCYvtv1GB5NwSpq7eS//XtvK/N37l+oXb+OqfNLKKzbyz1fN1nvh8P5le1moEaZW0CPMcDGsUMmQSCSUePvmfyC4h2E+J6rzRGbPNwcKfjmGsIE1/hF7NQ0M8Z4XuGONPrI/bpc8J91fx9AjPieIe+l8bwvRVHK102KEwFfJOlv2vo3Z+LyQSCYPaRXh8v2F+Km7sHotc1uge9y71NqISeTY5UmZmJlHnDf1nZmbSpUsXr69TqVSoLqGhbZlGja5vX1p8s5niX37BlpGBrndvVK1a1UqejNL9+722WVNScBQVw9kEaL5y2u3kr1vntb1ww+cE3XZbg8j7oYiN9domUamQqOv3d0+qUKDv25fEDZ+Ru3w5lpMnUbdrT/CE8ShiY5FIJBi2/UHRpq/LvTbr1VfR9rwCTYcO9dDzxs/hcCC94AODJTWV7DffpOibLWCzoWrXDsWDj/CrIozL20QRF1y3W5JLrTaSc0u8tp/ILsFktaOUS0kvKOW2pX+6rcnINph5+ot/eXl0RxxeZgqyis0UGK1EeljwGqZX8ebNnRn77g5Kz6vZI5HAqzd2YuX2ZK99yywy469RkF3839TRtuO5FJlsXqerpFIJQ9pH4nDAa98dIa/Eglwq4bqOUTw6rG2Vt+UqZFKGdogkNlDDvG+PcDzLQLNQLbOuaUPX+EDUFaT1L6ckG/5ZB7/NB2MuaEPg6lnQcSz4Ve1vqC+iAzV8MqUXq3ac4rM9qdidTq7vHMWkPs2JrePfw5pWb4FKYmIikZGR/Pjjj67ApKioiD///JN77723vrrVIMk0GmTNmhEyfnyt36uyba2S6uQRcDqhgrldZwPK+aFKTPRa0ydw7E3IQyvP71DbpDod6nbtiHr2ORymUqRarSu9vi03l7xly7y+Nn/NWtRz59TK2qamyGqxY8gzceTPDPLTjcS2DaLZZSHoQ9TYMjJJmTAR65n/as6YDx3CfM8kui79kEkfnuH9CZfXabCiU8ppH+3PqVzPi+w7xwagUZb9tz+YXuRx4ajNXvG6D6DCLbrtovzZMuNqvtiXyq5T+TQP1TGuZzO0Khmz1v3t9XUR/iqKSt1HVgO1SuSVbAcO0im5+fI4+rcJw2ixoZTLCNUpfV6LcyF/jYKrWobyQZQ/JpsdlVxGcFUXoVqMsG0B/LHgv2PGXPj2cShKgwGPg7LmUwjEBGmY9b82TLwqEYBgnaLB537xRa0GKgaDgePnDaUnJSWxb98+goODiY+PZ8aMGTz//PO0atWKxMREnnrqKaKjo91yrQh1S92uHSgU4GEqRnvllciqsdVSIpcTeNONFH/3ncf2gBEjqnXd2qCIiiJ+xXJOT7nbLWmX3+DBhNx1V4NaqCpVq5BeMMLjtNnK6ht5YcvMxGmziUDFB3abnTOH8vhmyX7XmoqT+7JRaeWMmtUN5bEjbkGKi8MB7y7k+hsf4MM/TvHI0DZ19rDQquRMHdCSLQcyyo2IKGVSxl3ZDMXZKYCdSZ5/T0osdtQKGRqFzG1U5Jy2kfoKH9xymZRmITqmDWhV9qCXSZHJpJRabNx8eRyrdpQfVWkV7kdOscW1+PecKX0TCfVhukUmlRBdw7txLmqHjCELdrzjue3PJXD5ZAhOrP71K6CQlWXobUpqddJq9+7ddO3ala5duwIwc+ZMunbtytNPPw3AI488wv3338+UKVO4/PLLMRgMbNmyBXUjr4nRmMnDwoh+5ZVyayBkISFEPvN0tbcUq9u2ReMhOZ08PJygceO8FgusD+rWrUn4ZC0J69YR9957NP96E1HPP+c2NeW027Fm52DNzsZZT+sRPJHp9Wh7XuG1XT9wYIMKthqykkIL3y37t9xWW7PRxg8rDmJ2eP+cZ9q7l25har76J418Y82uv6pMYqiOZRN6uK2niAnUsGryFcSdt4ahWYj3kZ5Pd5/mzZu7cOFghp9KzvyxnQnxq/x3SCqVoFXKkZ0NjDRKOQ8MasnYHrFu1+0cG8D8sZ1584ejbq//X/sIBrat/+ngajHmgsPL2hqHraxd8JmoniyU4zAasaanU/jFl1iST+HXty+6Xr0uemeONSuLku3byV+1CofJjP911xJwww0oa3DHT12wpqdTsPELCj8vW3QccMMNBIwaedHvw3Z2h5IsIOCiAjfz8eMkjRpdboGyLDSUhE8+QRnTuH7e9eX0wTy+XLDPa/vY6a3JGX2NxzZ5WBgnnl/EC39m89m9V1V5t4jd4SSzyITBZEMplxLip0Sv9v13wuFwkllsIr/EikQCwTolERdkJk3JMzJ4/i8ep3ke/l8bJvZOIL2wlPV7znA8y8CViSEMuaxs/UZVasg4nU4yCk2k5BnJLDaTGKLFX62g0GRBo5AT4qdEIZOSVWTmp8NZlFrt9G8TRkygxqeAqEHK/BcWV7BD8p5tEHlZ3fWngfL1+S0CFaFCToejxqvw2goKyhK+BQY2uikIa3o6yRPvwJrsPnytiI2l2aqV1coJY83OpuT3beStXImjpAT94EEE3Xqra3FsVTksFsxHj5Lx/POY9v0NUil+AwYQ/sjDqJo1q/L1LlUn92XzzRLvi8tvfqwr2UM9P4xUUx/gcWUn+rYO5+5+LaqUdr3AaGHz/gzmfXuYfGNZoDGwTThzb+hAbBWSl1XGYrez+1Q+d324220nzrUdI5l7fQfXQlSn04nV7nTLl+Irp9PJ4Yxixr+/k2zDf4tkezQLYuGtXRtk8rQaYciEFddBzrHybSEtYOI3oI8o33aJ8fX5LYoSChWq6SAFQB4YWOPXrAtOp5PiH38qF6RA2Tbrou++K0sIV4XgwpaTQ/rsxyn5/XcApGez5pqPH8dhMCALCi7LnVKFa0qVSpTx8US//DL2/LK8CrLAwHqp8tyYBUfryvLrefgopwtUotIriXrxRdKfeILz54eUva4i7Yr+5G/PZFS3mCoFKU6nk58OZ/H45/vPOwY/Hs4iKaeENVOuLDcyUl1KmYwrEoL59sG+nMwuId9ooV2UP2F+Krf1GZKzGWOrI6PQVC5IAdidnM/L3xzmxdEd0VVz0WuD5hcBYz+CD6+DkvPS12tD4OaPRZBSRU3wN0QQaoe9qIjCL77w2l70xZcEXH99lWq7mE8muYIUeXg4US++QN7y5eQtXwFOJ/LwMMIfm41fnz7I/H2r32PLzyf3vffKrnGegBtvJPzBGfVeBqCx0OqVdB4Ux98/lM/V1PeWNvgFa3EOHYKmezcM27djLSjC3qU7xyV+FGv0rLijmc81a87JLDLz6pYjHttO5pSQlFNSY4EKlC18jQ3SehypsdodZBWZKbHYUCukhOpUVd5Jk5xnLBeknLNpfzqzhrRumoEKQHhbuGsrZB6AzIMQ0R4iO0KA9xQIgmdN9DdEEGqeRCqtOGmcQlEuMV9lCr/80vX/wx99hIxnnsGa+l/tEltWNmkzZxK7ZDH6/v19uqb56NFyQQpA4aefoh84EP3AAVXq46VKqZHTfUgzIpr5s3vzKYrzTYTF+XHlyBaExPghkUiQaLWomjVzTamZbXaCnU40iur9aS212smooPDd/jOFXNm88kDTaLaRYzCTXmRCLZcRrlcR4a/2eW1JrsHM+j1nWPTTcYrNNuRSCcM7R/Ho0LZVCr68JYaDsnU4pZaGk5qgVgTGlX218VzzTvCNCFQEwUcyvZ6gceMo3bvXY3vQuNuQV3F65VzgI4+IwFFidAtSzpf16jw0l11WaR4XR2mpxyDlnNxly9D26F4rBSGbIo1eSavLI4hpG4jD5kSulKHWeQ9WVRe5DVkpk3rdFgz4lGk1r8TMh38ks+jn49jO7lEO1ilZ8n/d6BoXhKKStSZWu4N1u0/zynkjOzaHk41/pZGWb2Lx/3XzeZGrtyy1AP5qOX5NdTRFqFGNN6euINQD7eU90PTsWe64pkd3dB62X1cmcGRZ5XBlQgLmw4e9nmc5eRKHyfMQ+vmcFgu2fM/1PgDsBQU1Xq7gUqDVq/ALUlcYpNSEML2ScT3jPbbplDI6xlQeCG87nstbPx5zBSlQVvn39vd3klZYeQmIrCITi34+4bFt56m8CkdJLhQZoKZrfKDHtqkDWhLu30h39Qh1SgQqQqNmLy3Fcvo0ht9/x7DtDyxnzuAwVa1ialUowsOJmTeP2MXv4NevH7q+fYld9DYxr79erRIAivh4AsaMwZ6fjzzce1ptaUCAT1mBpX5++PXr57Xd7+o+SMVoSoOllMu4q29z+rd2/13w18hZNalnpbtksovN5fKRnGO2Ofj+YGalfTCYbR6L8p2TlOM9Rf+FQv1UvHNbN4Z3inItKtar5Dw2rC03do91JZ8ThIqIcTehVpQUmCnIMpJzxkBAmIaQGD/8glTV2m7rjb2oiMIvvyTzlVddmXQlSiWRc55B/7//1VqlYEV4GIrwAeh6XQU4L6owpDwoiPBZMzEfOQJyORKFwuOIR/CECch9qLEkkckIuH4E+StXYi8ocGuT6rQNLrmeUF6Ev5rXb+5MVpGZo1kGQnRKEkJ1RPqrK91BZHM4SMnznD4f4EBa5UUp1QoZUglea/1UtTBfVKCGV8Z04pEhbTBZHehUcsL9VSJIEXwmAhWhxhXllPLlgn0UZv03zKzSyrlhRldC4/wqDVZs+fk4TSaQSpGHhnrNtWI+dozM519wO+a0WEh//AlUrVujuax2EypdmL6+uuTBwch79cJeUkLskiWcmTYN53lVmv0GDybwpht9zjmjiImh2ZrVZM17DcPWreB0ouvdm/BHH0ERE1MjfRZqV7BORbBORduoqo1+KWVSWoXrOZjuOSDp0azyHWmhfkqGXhbJ5v0Z5drC/FRVyuVitNgoNpUtxo0PqfnaNsKlQSR8E2qUqcTKlvf2k3qkoFyb1l/JTbN74BfkeQTCXlSELTMTS1o6Jdu2Ydi6lcAxowkYPRrFBaMJ9pISUmfOpOSXXz1ey/+664h68YVGly7eabVizcrCfOIE9oIC1G3bIQ8LrdKW53PsBgP2wkJwgsxfLxbQXiJ+PpzFHSt2lTuuV8nZ/MDVPhVJTCso5b6P97DvdKHrWKifklWTetLOh+DJanOQnFfCop9PsO14DsE6JXf3a07fVmGNN9usUONEwjehXpgMVo9BCoCxyEJxntljoGI5k0rOO4so2vQ1TpsNXZ8+RD71JNlvvYVx9x6iX3kZeXCw63ynyYT1tIeCcOeul5JSNipTR4GK3WgEqxWpn1+1su06rdayYoIOBzJ/f/R9+150n2R+frU2/SU0XN2aBfHiqI68tPkQxWfXmjQP1bHw1q7E+JgJNjpQw7Lxl5NeWMrxbANR/hriQ7Q+F/47mlnM6MV/uIoMFptslFoc7DiZy89HstAq5NzYI5a4IO3FFf8TLgkiUBFqlM3LtspzTCXl119Y09NJHj8eW9p/W3NLfv0V4+7dxC5cwJl778OakeEWqEh1OtQdO2I5edLjfTRdOiPR1ly6cW9seXmYDh4ib8UK7EVF6AcNxP+661DG+p7UyZqRQf7Hq8lfuxZHSQnaXr2IePghlM2bI1WKP+JC1QRoFNzUI5Z+bcLIK7GglEkI1ildKfF9FapXEapX0TE2sEqvKzBaeOarf90qIc+7qRMf/J7E3pQC17GVO5KZcFUzHhjUusJqzIIgApUmzJqZiSUlBcvJkyjjm6FMTEARGVmr91SqZShUMqxmzwFLQFj5T2Ql27a5BSnnOI1GCj/fiH7oUEr37EHTvr2rTapWEzJ5EkWbNsEF1YslCgVBt91W64tGbfn5ZL/5FgXr1rmOmf75h7wPV5KwZjVKH+rqWLOyOH3fVMwHD7qOGbdt49TOnSSsX4e6bdta6bvQtClkUmICNT6PoNSkIpON3af+2yLfLT6IY5kGtyDlnA//SOb6zjEiUBEqJJZdN1GW5BSSx/0fKbePJ+OZOaTccQenxt6M2csIxMWyFxVh3LcP0+dr6T7A8zbdxC5haP3d/yA5TCaKtnzr9brGnTvRdLzMY50aZVwc8e8vQ3FeNWBFs2bEf7gCRRVGNKrLmp7uFqScY8/LI3vh2ziM3ndfADjtdkyHDrsFKa42q5Ws1+ZjLy6usf4KQl2QSsq+zhl6WSQb96V6PX/V9lPYHNXLUJtvtHA8y8A/Zwo4lVuCwVTNHEHGPCjOAEvF/2aF+iFGVJogW14eqbNmYj3jvobDlpXFmalTiV+5stzi1IthNxjIX/sJ2a+/DkD0o0/T59rO7Pktl9JiK3KllMv6xtDlmvjyCbPkcqQVFCmU6vU4TGb8+nct36ZWo7vySpqtWVO2FVciQRYYWKPvrSLF337nta1oyxbCZ81E6mX6yZaTg3Hv3rJdOV6U/PEHDkMJMr1vNX4EoSEI1Cq4pn0k3/5btmtIrZBi9DLCCmUjMA4HVf7YfCbfyMx1+9iZVDZ6I5XAmG6xPDykDeG+1kMqyYGU7fD762DIhmZXQZ+ZENwc5GKUp6EQgUoTZM/Lw3TgX49tlqRT2HPzavRhbsvJcQUpAAWvPEvA5Zdz3S13IImIQR0fgy5Ig0xRfpGpVC4neNxtFG/a5PHaATdcj7pD+wpziCjCw6uVbO2iVbRhroI2e1ERWa+/gTU1FVXr1l7Pk/r5uX80FYRGwE+lYPawtuxNzifbYOavlAJ6twz1Oqoysks0ynNp/U1FUJJdNsKh1IEuDPzK/9vPKTZz18rdHEr/b8TR4YT1e86gVsh4/Nq2aJSVPN5KC2Dry7Br6X/H/vkE/v0c7tgCsd2r+taFWiKmfpogR2nFabIdJb5nlvSFcWf5rZClu3aRN+s+cv9vJGpTnscg5RxlYiLBkyeXO67t3Rv/oUPRdO16UUnVaov+f9d4bxs0CKmXuj+23FwKN2ygdO9edFf18nqNoFtvcVtALAgNmcVm50y+kd+P55BeaGLdPVfy9PD2FJVauP3KeI91fRJDdVyecPZ3vDgTvnkM3u4O7w+Gxb1g5fWQWz6df2aRyS1IOd8nu06TbbBU3mFDpnuQco7dAl8/WDbaIjQIYkSlCZIFBpZV8vVU00UiQRZSsw8/p917uu2yEypO1SMPDCRk8mQCRgynaMsWHKUm/IcOQRkXhzyk8kqx9UURE4P/8OFlC3rPI/X3J+zBGci8TPtYkpOBsnUohl9+JfTee8lZvNjtHFX79gSOHUvp/v0UfPYZToeDwFGjUDVvXmlhQkGoa0azjZ8OZzFr/d+u3T4yqYTnbriMG7vH8d3BDJb8Xzc+/jOFn49koZLLGNE5msl9EsvKAtjM8MdC+Ptj9wtnHYRVo+DOLeD/31q01ALvH8YsdgclFZQAcEn+w3tb+t9gKgSd+LfWEIhApQmSh4YSNO428ld8WK7N/4Ybavzhr7v8cq9t6o4dfaotIw8MQB4YgLpNm5rsWq2SBwURMfsx9EOHkPfB8rLtyQMGEDj2pgoX856/5qRg7VoCb7mF2CWLKdn2B44SA/5Dh6Js3pzsRe9QtGGD69yizzei69uXqBeer7N1OILgi9P5Ru5f+5fbZxK7w8njn+9n3o2d2PRPBqt2pHBDl2heHNURm8PJ4fQi9OqzjyBDJuxe5vniBcmQf8otUIkM8D7CKpdK0Cl9yGUkq2QNikRMODQUIlBpgqQaDaGTJyPz9yfvg+U4DAYkWi3Bt91G8ITxNb44Ux4WRtC4ceR/7P5pSKJUEvnMM9XKqtpYyENC8B88GN0VV+C02ZDp9Ugq2RatiI5GFhjoqsVTsHYtBZ99hrZbV1Tt26Pu3JnSffvcgpRzSn79FeOffxIwfHhtvB1BqDKb3cFHO1K8Dpyu2XmaG7pE887WE6zZeZo1O0+jlElZf0+v/7LUWo1grWDKOv9U2ULXsyL91bQI8+NEtqHcqaO6xhDqS/bb+CtBIvE84ptwNWia7t+txkYEKk2UPDSU0LvuInDkSBylJiRqFfLw8FrJLSILCCB06n3oevcmd+l72HLz0F1xOcF33okyLq7G71cbDAVmzCVWJBJQ+ynQVrH8fFXS08vDw4ld/A4pd9xZlj0XwGrFkpRE5Jy5SBQK8j9c6fX1eR+uRHf11ci9rIERGoccg5lCoxUkEKRREuxX9V0mNruD9EITO07mciLbQLf4IC6LCfA5g2xNsNqdnMwpHzCck1pgZFjH9uxMyiPbYOaKhGDu7teC+JDz+qjQln1ZvWwPDm7u9m24v5oPJvbg3o/2utU1GtIhkoeGtEHrYT1MOX4R8L8X4NvH3Y+rA+Ha10ATWPk1hDohApUmTKJQoIiOrvzEGiAPDkY/cACa7t1wWq3I/PxqbAGsvbAQW14eluRkZHo9iuho5BERSKQXPzRrtdjJOFHIz6sOU5xXFjQERWkZNKE9oXF+yGqhwqtEJkPTsSPNN32FcdcuzCdOoOnSBU2HDiiiorAXFVWYg8VhNILNhzl4oUGy2O0cTC3ikc/+4Whm2QO+fZQ/r9zYiXaReuQ+/s7ZHU7+PlPAuGV/YrL+l4ckXK/ik7uvJDG0bsonqORSejQLZtvxXI/tHWMCSAzV8f7Ey7HY7OjVCtQXLq73i4Ced8Pvb5S/QFAiBJVPntgsRMfKSVeQa7BQbLISrFMS4qciQOPjhzGVH3T9P4i/Cv5cAsVp0GIQdBgFgfG+XUOoE6IoodCgWXNyyJ73GoVffOE6JgsMJHbJEjSXdUAiv7hYO+d0Mete3FVu9FeukHLLU1cQEF77afgv5HQ6yVu1iqwXX/LYHjr1PkLvvfei37tQP05kGRj21m9Y7O5JztQKKVse6EtCqG9VhtMKSrl2wW8UGMsvmu/WLJD3J1xOkLb8KE1aQSmH0os4mFZEqwg/usYFEe6vqrSquTdZRSZO5RqZ8MFOSi8ooSGRwJfT+tAxxofRP0Nm2XbhvR+WjWr0mgpRXUClB104+IWDspb+PdosZbt9FFqogQ9Agm98fX6L/yJCg+W02yn8bINbkAJgLygg5c47sWaUL0NfFVaTnd3fJHucorZZHRz6Ix2HvXoZMy+GRCLBf/Bg5B7KHciCgwkYNVoEKY2U2WZn2W8nywUpACarg9V/JmP18XfuTL7RY5ACsDe5gLyS8lt0T2QZuGHRNiZ9uJufDmehkEn5/K8zPLfpIF//k0ZaBbtpPLHa7az44xQvbT7Eglu70jL8v1GcSH81y8b3oEWYb4EXfhFwzXNw/18wcTMc3gQfjYb3r4FFPeCn58CQVaX++UyuLBthEUFKgyT+2gkNli07m7zlyz22OY1GSvfurVLxv3LXt9opzvP+hzn9RCE2iwOlpu7/eCmio2n28UfkfbiyLFCz2/G/7lpCJk1CGRtT5/0RaobBZGNXcr7X9j+T8igx2wj0MBJyoWJTxdN/Zqt7wJNjMDNtzV6yi820i9IzpV9zpq3+yzUK8sG2U4TolHxydy+3gKMi2cUWVm5PxmC28eymf5l4VQKxQVocTidFpTbkUgnayhKvnU/lB+ZiWDUS8s4r92G3wo53QB0AV88CWe3W8RIaFhGoCA2W02p17YzxxJyUVK3rWrOysKakUHrwEP0vi8TcN47ffyggN819XUhAuAaZov4+YSljYgh/aBYhk+4EpxNZYGCDTHwn+E4llxHlr+J4lufFp9GBGlQ+/s4lXjBFFOqn5Pr2QQSq4J9sO4Fa94d5XonFlSTt7r4teOLzA+WmanJLLDyw9i9WTbqCYF3lC8qtdgeGszlLTueV8tymQ/ir5fyvQyQBGgUqmYfppJJccNpBEwwyD4+g/CT3IOV829+GLuMgsHEs0hdqhghUhAZLolKhiInBmuo59bamc+cqX9NyJpXTd03GknTKdUzq78/gNxbz/feQl/FfsNJpQCwyef0OBUuVSqQREfXaB6Hm+Knl3DegJb95WXh6V9/maBS+/VkO8VNxU/dY1u85wwtDohkYlE34gVeR5eVhaTEUhTMS+G8RqulsUKKUSVHIpB6nhgD+TSsit8TiU6CiVsiIDdJwJr9sZPLO3gn0SAjmy7/TSMopQSmXkpxbQmyQFllJBhz7AXa+B7ZSaD+qbDHrhQtlc457v6G52PvOIKHJEotpBRd7SQm2jAwKN2/Glp6BftBA1B06oPCwVqKuFH79NWmzHip3XB4RQcLaNSiiony+lr2oiDMPzsS4bVu5NllwMMqXl/H16nRkcikDbm9L886hKNQilhdqVoHRwqrtybzxw1EcZ//6SiXwxHXtuKl7LP4a37cpZxebSU1Po9WhRej+uiAdvDYEJn0PIS0ASMsv5ev96UQHqon0V3P3R3soNtkYdlkULcP9KCy1sumfNNILTXzzQB/aRfm2/f2rv9O4f81fjOwSQ7MQLW/9eMytXaeU8endPWn3+3Q49KX7i/3CYdIP7sFK0m/woZc8QQotTP1T7MppInx9fotARQDKtrwWfbOF9CeecDuuaNaM+A/eRxlTP+sibAUFFG3+huw33sBRXDZsrenalaiXX0LVrPyWxYqYT53i5NBhXttjVqykMKA5AWFatAFK5BXUJ7rUmW12ZFIJcrH4sFoMJis5Bgv7UwuRSiRcFuNPqJ8KnS/5Py7gzPgXyZKrPDe2uwFGvkOBTck/qQUs+PE4yblGWkb4MX1gSwI0Ct77NYm/zxQQplcxtkcs0YEa1HIZvxzNRqOQMahdOBH+avy9bPstMFr47mAmkf5qJi7f6Qq+ztc1LpClvQuxlxYgtVsJOboOacrvZY1X3guD54L87AhOYWpZrZ+itPIXuup+GPi0qGzcRIhARagSS3IyJ4YO85il0X/UKKKefgqppu6SSJ3PabNhy8rCXlSERKVCFhSEPDCwytcxHTlC0g0jvbbHvv02+sGDqt/RanKYTNhyc3FarUi12vqpBO2jtIJStp/IZdM/6QRqFdzeqxmJITqCdOLBUW9+mQc/P++5TSqj9IHDrN5v4LlNh8o1z7+pM9/+m8F3BzMBeO2mTmzen8FPh91318y8phXjeyV4XeRrdzj5cl8qD67722s3P7nrCmas+welXMq4Tv5cH2ci8stby1LZ3/0b+J83OppzFNbcCrlnp4EkEhyXjaXo6qfIsAcQqlf5ln1WaNB8fX6LcW0B+H/2zjo8inPtw/e6b7Ibd8Hd3YtTaKEtFWhLXamfun6n3tNTP3WoUKNKkZZipbi7Qwhxt3Xf74+BJJvdDQlS4Jzc15ULMu/szOxkd+Y3z/s8vwcs69aFbR5oWriQmHtmIj9HQkUklSJLTDxt8zqxVodYownbPVqe3rwIzZnAXVxM2XvvYfp1Pn63G1lSErGPPoqmf38k+jPb6uB0ya+ycfXHG2rzEQB+2V7A7UMzuXN4qyZVqlzI1NjdVFic2FxedEopMVpF0xxQzzZ+byNjPspccl5bfDDk8POL9jF7Rh+W7CuhU6KecrMrSKQAvLH0MMPaxob9G0vEIk72xFtp91BUI5gqvvSnjXkJWmaP/5T4ZfcErxzdVihRtpbhsZso8+uZu8/Je2/txuPz0ylRz4fX9iLF+Pf7HDUJW4VQSl1xFDRREJEi9Co6Ra+a/3XOg29ZC+cD3pqa8INuN3gbuRheIMhiY4ieeTelr74WNKYdMbxJXYmddg9uhwexVIxad3o3ZndZGXl3341z7766ZQUFFNx7L8n/eQ/dyL8/uhMOp8fLhyuzAkTKCT5adZTJPZLOuVDxuVx4ysrwVlcjksuRGgxnrNN0YbWdx3/ezV+HygCh8d1VfVK4b1QbYnXnuBKr3XhYGdockDbjKTJ5ajsaN6Ta5sZzfK7m4i4J/LgtP+xu5mzIoVOSPux0X/eUyLCvbR+vI7ciMAl2X5GF7bYkxg+4FzQhmmzq4qiRGnlo8XaWHcgJGDpQbOb7LXlM6ZGE0+NDq5ASpZU3rxT6bGEqggX3wuEldcu0cXDtjxDXpUWsnAItE8wtAKAdMCDsmLJTR8SaJpo2nceIZDIiJk8m/rlnkRzvIC1SqTDccAPx//d/SBqZTnK7vJTlmlk6ay9zX9jMr29u59DmEuzm0JUTTcGdmxcgUupT8vIruMvKTnnbZ5pKi4sftoa/iS3cGSKf4BTx+/2UmhwUVdupClOZ0hBvdTVV33zD0UmXcOzyK8iedAk5M27AcegQpzu7XWFxctfXW2tFCoDH5+frjbm8t+IIdtc5bmcQkQxdrw5cptBjGfAwhaP/g/gklvwnhjUKqdB7KAzlZideb/hzGaNVcNfwVkHL5RIx949qy5wNOUFj3x9w4+xydegyZYRzv+xA4PdAKhbx1lXdKTY5GPfWasa/vZqL/r2Sfy7YR4nJEfb4/hbcDlj970CRAoLr7peXgil0BWMLjXMeyM8WzgdkScmoB/THtn5D4IBYTNxTTyE1Gs/NgZ1hpAYDkVdeiXb4cHx2OyK5AmlMNGJ549GA0mwTv75V18beYXWzdNZeOg1NpP/kVijVzTegsu/YEXbMnZ+Pv5F+P383fmjUMdXmPjMRt3Kzg9/3lPDByiMUmxx0SYrgiQkd6JSoR6sMf46t69dT+sqrActcWVnkXHc9mT//hOw0ksFLzU525IWOOH63KY9bhmSQajy7l9IysxOPz4dCKg4uG1ZHUXnRa5j6PYfYVkak+RCl0QN4bWURy9/cwDtXd0evkmKyBwuqZIMKhVRSu483JyXjcDrYVeLhi22V1NjrhMu4zvEoGkkw16lk3DIkk36ZRj5YmUVxjYM+6UYm90jivRVHQkbjlDIxoka6FFudwcc8vX8aS/YWs2BXUe0yt9fPd5vzsDg9vDSlS9jE35B43GApguI9YC2FxB6gTwLNKUTjLCWwPUxDUVsllB0UhGULzaJFqLQAgN/rIebee7F07kz1z7/grapC3bsXsQ8/jLx1m3N9eGcUkVjcrJJra42Tld8cDJnCs3dVId0uSjkloSJtxB9FJJfDWeh0farolTIuah/Lsv2hLcwndGl6mXg4qm0uXvn9YMD0w878Gq76eAOfXN+b0R1Dny9PWRmlb70dcsxXU4Nt23YiTkOoNGYr7/L6sDjO3rRohcXJyoNlvLPiMPlVdtrF6Xh0XDu6p0YSoZLj8njZW2hi4a5CeqQakIhiSE9I54oP1mF1Ccc1a80xnp3UiUd+3IW3XkmOQirmxSmd+XFLHm9PSmaQZBvRa98BSzHD43sx7aqHeWWzl3l7q4jXKxnSJsT0TAOMGjnD2sbSI8WA0+NFq5Dy1YZc1h8N7RtzXf805I14FemUMsQiaiuJ5BIxE7vEc9XHG0Kuv2h3EQ+Nadt0oeJxQd4G+OaqQH+WjGEw5aPABN+m4HWCxxl+vDq3edtrAWgRKi0AnooKCh5+BPumTaj79yfmvvsQazQ4Dx7AazYjUf1vu6E67R6qS8JHN0qyTRjimz81pureDZFcjt8VPL0Rcdll51UUS6uU8ui49qzLqsDmCrwxD24dTUbU6U8NlltcYXMknvl1D12TIoiLCP4s+t1u3DnB0wonsO/cScSkML4cTSBGF766RCwCjeLslLFbHG4+WnWUj1fVubTuKzIx47PNvD61G1O6J5JdbuNgsRkQ8ciPuxjdMQ6JWFQrUgC25VZh0MiYNaM3fx0qI6vUQps4LVN6JGNQy5C5zfQ48h9U22fVvkZ6dCnx2ct5YspcYqMyubZ/GkmGpifTC0JBEAuTuiXwy/Z89h13xT3BpK4JtI1vPGE8WidnSo8kftpWgFEj57urkrG5akKWQINQD1Blc5PR1AM1F8HXU8HTYMoo+y/BBXfks80rhZapQWUAe5g2CXGdmr6tFmppESot4C4sxL5pEwC2DRuwbah7WjH9sYS0OXOQxZ78aeq/FbG48eQ3qfzUblTSuDhSPvqQvDvuxO+sewpTdutG9B23n3d2+RnRGhbeM5gPVmax8mAZOqWUmwZnMLpjHNGN3Mybyt7C8AndRTUOTA53SKGCVIo0IQFPUVHwGKBs3+60jiteryQzWsPR8uBqsQldEojSnp0k4gqri09Xh7aSf37hPgZlRrLmcCk5lXa+XC8Ita7JEXy6Ori1xPL9pfx1sIw7hmXyryu6IZWAUSN0TE70eBH9MCvoNfh9xK58lEdm/I404tSra+IjVMy+oQ9bc6r5YWseSqmY6wem0zpGi8fro7DajkYuISJEMrZWIeORce0BEVe2k9B2yXUcHjW70f1pmpNQm7shWKScYOtn0P8OoWKnqWgTYNijsPix4LGY9hD591cW/jfQIlRawL59e9gxd04OPpsV+N8VKkqNjITWERQdCb6RisUiYtJOrYxYLJOh6t2bzEULse/ahaesDFW37siTk85YtcqZRCoRkxmj5Z+XdsLk8CAWiRqNNjQX3UlcgKVhkkKlMTFE33UXxU8/HTQmUqtR9+9/WscVq1cy+4Y+3PrlFg7X69EzrG00T17cAa3i7EzR5VbYwkYOauxuLFYb6dFaXvvjUO1yu8uHTimlKITm8/j85FTYMGjkAdMtosLw338qjyJ1mYBmulNby6AmH46tBqWB+PRBXNw+nlEdYhGJRFTbhGaGczbkYHa46Zdh5MmLO9ImVhuUBxOnV/L8pZ2QH10C5YeJKllHj5RObM8zBe1WMM5rhnCsCZ8gjssK3mYmSksk0GWqMP2z+nXB8l8kgtaj4OI3QNfSDuNUaBEqLSAxRoUflMkQnUe5EucCpUbG8Ont+fn1rTitwoUrLkNHz35aog1+FKZiPDIj0oimWY7XRyyTIU9OPq0u0H83KrkU1VkoA20bp0MhFYcspe2facQQJg9IJBKhG3kRrmPHqPz889pSemlMDMn/ea9ZbRbCkR6t4Ztb+1NmcVJtdRGrVxKllWMIU5LtcHsx2d3IJOJTNsNTnSRSJ/F78PslAefr9z1FTOmRxKthfFNmDEwPzgmRnWRKR9zMiKG5GObdDVnL6paJxDDlQxTtL6bUIeW2OVvZkVddO7y3yMTG7Aq8Pj8VViexOiWxekVt6bdaIYVDvwFg3PAK70z5lVsW+TlYUjed1DpWy/vTexHVHCO4lL7hxwwZJz83odBEw4C7ofPl4DSBVCksUzb/+tCCQItQaQFVt65C4qY7uDRRP378eZUrEQpLtQNTmYOaMhuRcWp0USq0kaEvVn6vF09ZGX6nE5FCgTQmBpHk5BdiQ7yaK5/ow7Fd5UjcduJth6h8+iEKjpcQq/r0JuH551Gkp5/Jt/Y/RaxOwfvTe3LbnK0BSZ8xWgUvX9alUZ8WqdFI9F13YrjqStylpYiVSqTR0Ujj4hCdId+KGJ3ipBEkj9dHbqWNj/7KYs2RCgwaGXcMa0X/zKhmO6kmRKrCVuu0j9dhLNuIJXpowPK9hSZuGpzBoNZRrG3Q+PDmwRm0itEGbcsf3xWRWAq+4P34U/o3WpUThM8LO74NFCkAfh/8cjvctYmjpij2FtbQOlaL2+ujxu7mo2t78dGqowHuua1jtcya0Zu0E/lPuuNRHXsVKfOm8NWwVyjWdqDA5CExQkF8fBKxzTWAi24tmMuVHwoeG/PPun02F4mspcPzGaTFQr8FfE4n1vXryZ95D3jqLlbyzExSP/2kyY6wXrsDb0U5ziNHwONF0bYNkqgoJGfRg6W6xMb8d3ZgrqibZ46IUTHp3m5ExARetDwVFdTMn0/FRx/jra5GYjQSffvt6CdejDQqdFTJ4/MgFQfqecuateTdckvQutLYGNLnzj0jT/D/qzjdXgpr7CzeXczRciuDW0fTO93YrETOc8n+IhNT3l+Lwx0YFbqsRxJPT+zYrOiKx+tj87FKZszejKteaXiESsb3V6fQ7rcrKLrkO/6xwsrarDpRIpeIeWx8exIilGzMrkQllzCpawKJESoiQ+y/qqoSffbvSObfFTigMuCZ8RvS+I4nP1i3QxAjjhr4aDBYy0OvN+JJNqXcjMnpYX+Rie4pkUSq5Kw/Wo5cKnRi/mpDDisPCg8A7eN1fHVzPyEHqvQAvN8vcHsSuRCp6HcXDLpPmHppLtV5sORpODBfEFr6RBjzIrS6CFSRzd9eC02mpddPC83C53DgKS3Fum4d7uJi1P36oWjVqsl9Z7wWC+YlSyh69rm6yIxYTPTMmRimXXNKvXlOhs3k5Nc3d1BZFJzkGJOqY9I93VAdd4/12WyUvfMOlZ9/EbSu8Zabibn77tpeRn6/n0JrIavyV7G+cD2pulSmtJlCgjYBeY2D3BtvxHkoxBMYkPTWm+jHjQPA6/Fhq3FiM7sRiUWodTLUEYqTJue2cGFSbXNx+5ytbMyuDDn+271D6JjYvGuUy+ulsMrB0v0lHCgy0Tc9kkH6cpKWz0RUth+UkeRcu5Y7fjzK/uK6aZCECCVf39KXzJjG86fcXh+2gv1EbH8fOkyCg4uFSpjE7pDSn0JFBtHxKcilwQLA4nRTYbJhtdnReiqJKfwTVesh8PHQsO04/D1v4JPIe3nptwO8PrUrS/aW1PYZApBJRDw3qROrj5SzeE8xAIvvH0L7eL2Q77H7B1j4QOA20wZimfgxxb5IorQKjKcy1ea0gK1cyC1R6ECXcH46yDpqBGv+kr1ChVFMOyHqIz1zuWJ/Jy29flpoFmKlEnlqKvLUU2uf7s7Lo+iJwM7L+HyUv/MOqm5d0Q4adAaOMhC72R1SpACU5ZqxW9y1QsVTXkHlnK9Crlv5xZcYrroKeYoQqj1ac5QZi2dQ46zLSPxy35e8NvQ1hmq7hRUpANaNG9GPG4fT7iZ7RzmrvjuE2ynkTCi1Mkbf1JHENpF/W2dmn8OBz2JBJJcjaRHyZxWzwxNWpAD8dai02UJFLpGQHq3h1iGZwgKPE/74D5QdnyJxVJP29WC+uOh1irSdOWpTk2zUkmJUER9x8iiU22ZBtf7fsP8n2PW9EEVQG+HwUlj5CsoJH2E1JAYJlaIaOy8u3Mdve4rx+YUozvRe/bgrVUpMSn/IXR9yf470i/jy9xx6pRnIr7IHiBQQjNue/nUPs2b0YcleYdu1brkKnZComj4EDv2Bz1ZFRcJQNpsiefCdPTjcPga0iuKNK7uREO6926vAWgE+txCJOSFIFFrh53zGUgZ/vQqbP6lbJlXC5Z9Cq5EgP0/7Hp0BWiz0WzhtfG43lV+GcWMEyj/8EK8pOEP/dHE5Gs/IPyEQALyVFeH7FbndeKsE34NqZzXPrn02QKQA+PHzxJonqJS6kMaEr4CSZwgODpWFVpZ/sT/gGBwWN4ve20V12dl3nPW5XDizsij6v39ybNp08u68C8vq1XiqqvA5nXgtFvy+8E6zLTQfEYKvSjjkJ7GybxJSBQy5HxK61y2zVxH7+210Ex1lStdY+mQYmyRSAOReE/JjK4RfvC44tBh2fAMFWwEwHl2AqoGmrrS6ePiHnSzcXVxbleTy+vhsUwnvbbFhH/VK6GiEIZ0j0rbkV9mZ3D2JuZvzQh6Tzw/rsironS7kxgXk9ih0EN2G6u638Vj1JYz80cFdv+bXTrWtz6rgqV/2YLKHaAVQcQS+uxbe6wXv94ePh8PeX4QoxYVA1vJAkQJCafX31zdevfRfQItQaeG08TuduPLCf1E8hUX4nI24NTbA5/NjrXZiqXLgDmGhfQKVTi7cHUIgFotQauqqREQn8SQ5MV7tqGZneehW9W6fm8OmLKLvCdHtFUAmQzd8OE67h00Lgr0sQHhvu1flYXOFdzsNhd/rxVNRgacqjJFUAxz79pE9eQqmX37BnZuLfetW8m69jYpZs6n++Wfybr+Dytmf4cr/777A/Z1EqGWM6hC+/HRYu6ZNo54UfRJM+x5uXAzDn4BJb8PMrdBqBMiUeH1+qqyu0DfrBkglMuHmHwafyoiyQfl1ucXJmgaJuif4ZlspZR4lTP0CYo/ntoil0PFS3NN+4r7fhAhKhEpKqTn8NaHM7CRSJWNEu5g6nxpziWBBX36Y4mob32/JD5lovOJgKRWWBtuuyYfPL4acNXXLLCXw442QvzXscZw3WEqFcudQ+H2wa+7fezx/My1TPy2cNmKVCnWf3ti3bAk5ruzWtckJtZZqJwc3FLF7ZQFup5f0zlH0vjidiBhVUHM1lU5Ou37xHNxQHLSdjkMSUevrLrCSqChkaWkhHUzlmZm1lU1ef+N26HaHBd2okdi3baNm3rza5SK1mpT33kUaH4/d7m3UybamyInFbkMtb9pTr7uwkJoFC6lZsACRTIZh2jVohw5FFsaC31NRQfEzz+APUcVVOWsWKR99iH3bNuxbt1IxaxZp335z3lYrVVldODxeFFLJqeUe/I3olDIen9CerTlVVNRrpigRi3jt8i4khjKrO+WdxQk/aYHNRPMrbczbUcDve4pRyyXcPDiTXmmG8NVKmhh8fW9HvOTJkMP+XjcGRUeKq8N/tt1ePyarHbZ8DONfA3slIIIjy6iw+0k1qjlabuVImYVuyRFsy60OuZ2uyREUVdu5eUgmkTIf5KyD+fcIURGpkqpLVoY9Br8fLM4G3+P8LULZdCiWPgUJ80+tt8/fhc8jdGUOR/kh8PkgTGfrC53z4l395z//IT09HaVSSb9+/dh03CW1hQsDkURC5JQpiFQhbrwSCdG3345YffL5U2u1k8Uf7WbDvKNYq5247B4ObS7hh5e3UFMWHIFQqKQMmNKK7qNSkMqEj7JMIaHXuDT6XJyOTFGnw2UxMSS/9y4SQ2CppSQqiuR33kYaHY3PbkfrlZGkDd8Xpq00EbFWS+zjj5G5aCFJb71JyqxPyVwwH3WfPojlcqRyCZHx4d+vJl6CydO0yIi7sJBj115H2Ztv4jpyBOf+/RQ//Qz5992HuyR03x2vyYTz0OHQG/T7cR4+XNukz1tVRem/XsdrsYRe/xxRY3ez+nAZN3y2idFvrGLG7E2sPFhKte3Uu1X/HWREa/l15iCeurgDAzKjeHJCexbMHER2uY3Hft7FT1vzKQjRnO9MkFtpY8r763h9ySH2FprYfKyKO77aytPzdlMeLnohFiPufDn+tMFBQ74hDyMxpgcudNRglIQ/fpEINFI/xHUULOi/vx6+vw4OLKDA5OK6AWlIxCLmbs7jliGZIWeI4vQKRrSP5b7RbYiPUEJVNnwxSRApAB4H0YrwDxQSsQi9qsEz+LHVYdenZG94d9rzBZkKErqFH2910X+tSIHzIKIyd+5cHnzwQT788EP69evHW2+9xdixYzl48CCxTaw4aeHcI0tMJO2rORQ9/njtTVKWkkLCC88jT2uabXRFgYWS7OBcFrfTy8b52Yy8vgMyZeCEuSZCQb/JregyIhmPy4dULkYTIUcSokpB2aYNGT/9hOPAfpyHD6No1w5lu3ZIo6NxHD5MxSefoBsxgifazWTm1ifwE1i5cHXqZBRbD0BiJ6QREUgjIlC0Cm5rr1BJ6Tkhlfz9wWJELBYR31uJm5OH5f0eD9U//oSnsDBozLFjJ/ZdO5GNHh38wpNVK0ikAfkplj//xFtdg0R7fiQTujxeFu4s5Ml5e2qX7S6o4YbPNvPspI5M65vaaBffM4nH66PU7KTc4sTvh2itnBi9AnkjZbDJBjU3D87gil7JrD5czsXvrqktgpm/s4gYnYIfbh9AevSZK9t3uL18uDKLsoZTHsDivSXcNjSTaLlLcIx1mECpB02MMO2jT0A0dTaUH4G984RlnS9DHJEk9K05jtvrQ2IpI7ZyB61ioskqC05kH9HGSFTxamg7VuihAyDXUDppDk8vKSch0sY7V/fg7eWHWLynmNenduOd5YfJqbAhEsGwNjE8d0mnunPjssHqN4I8XqKLVzMgoyvrs6uDjmFy96Rgz5qo1sK/UqVgxNZmjPC7uRD2/dp8U7u/G5UBRj0Hs0J831UGaD3ybz+kv5NzLlTeeOMNbr31Vm688UYAPvzwQxYtWsTs2bN57LEQ/RJaOC8RSaWoOnUi9bPP8FZX4/f7kUREIGsk8bQhhzaXhB07trscp90dJFQApFIx+qimTaPYozSU9khlXVw+RdaNDPer6HywnLxp1+J3ufA7XbQZ1Icv+7zFf3K/Zl/VAWLVsdyYdDldjvnQyTS1ZcyNoY2T0m9aMtt+LsJ9vLuuSiej57R4fiiaw8ykmSfdhqeqipr588OOV3//A9ohQ4J6AkkiIlF26YJj9+7gF4nFKDIyAsWPzwecPy4FpWYnL/62P+TYq4sPMLpjHMmGs1/hYHd5WJdVwYPf76TmeL6HViHlxcmdGdkhFq0yvGOzSCTC5PDw4Pc7gip1y8xOnv51D/+Z1rPpXX5PQpXNxa87CkKOKaRiYqiC+S/AvnlCToNIDJ0ugzEvCB2CtXHCT3pwdZ7V6SG30sYPW/KYmZxF7Oqn+PSSn7jpV8iu1/+oZ4qeF8YkoFddIuSEDH0EolrhSerLv1dUsa+okH1FFo6UWriufxqJkSr0ShkfXtsTuVSCVCzCqJGjq39eXWbI3xx0TIa1z/PG5fN4Wh7F8kMV+P1CJGVy9yQeGdcOjaLBra3tWFjzBlz6Puz8Fn6+Bbxuwext1HMgOz9EeqPEdoRr5sJvD9Ulzyb3gUveg8hTq9a8UDinQsXlcrF161Yef/zx2mVisZhRo0axfn3o8jan04mzXmKm6SxUk7Rw6kijosKap50MecNwbf3tysWn7Wtgcpr4ev/XvL/z/dpl8V4tMe+sqe1gbF6+nJSpV2C9/3meuHgkvrbjEFVU439uLl6nE82XwT4soTDoI0joZqV7gg65S4NIBOWU8E7OKzzS72Fi1U2JFooaDeeKJJKQ50RqiCTh+efJmT4NnzUwnyD67ruoWbAgYJm6Xz8kulPrV3Q2qLS6gjo0n8Dh9lFmdv4tQiW30s6tX24J6LdjcXq4b+4Ofr17EN1SIht9/e78atze0AJwzZFySk0ODpaYUUolxOjkxOmVZ8xFtz73D4olce3TcGhR3UK/D/b8KNysL3kXVKHt3Z1uL8v2lXDf3B2o5RJuTVWDuYiM+Zczd9hLFKvbUmoXkRQdQayvgiiNGAypEJUBmUMA4SYzpaeWuVsFcZxTYeOFRYIQ1Sul/PHAUGRiMW6fD19DVSdRCD4hVQ2S011WEn68lH+Nf5/Dw4ciP96qIFqrCBYpICQgXzcPfrgh0IW2/BB8Nw1uWCiUPZ/PKLTQbpwwBeSoFpKU1VFCOfl/Oed0Uqu8vByv10tcg6TAuLg4iotDJz69/PLLRERE1P6kpLTYFP+30H5AeEfXToMTUelO7+mz0FoYIFIAuqva4Ny+o26Bx0PJy68Q/8zTyHNKcD/5Cp73ZqMdMoS0Lz5vlutsUkQiKYkJVOkKWetajk/t5IUhz9PO0LRuvlKjgcgrLg87HnnNNYgVoZMkFW1ak/HLL0TddSeqHj3QXzyBlE8+wVtdg2nhwtr1RCoVcU88juQU+hSdLaQnMcSTnYky35Pg8vj4fG122KaA7/15GGsjFWkgiJpw+P1wuNTC1A/XM+m9NVzy3lp25tcEtA5oCjU2FyUmByqZhKv6hL4WjsuUIqkvUupzYD7YysJuv8zs5JGfdgFgc3kpFseBQg/mImIX3UjXn4czavnFdPiyK1HL7ge5Br/fT16llSOlFvYV1rB0bzFGjZxvb+0XkBDdLk7HvLsHseVYFdd8soHRb6ziti+3sjWnqu7cqSJhxBOC6+zFb8CwR+o6ELtt2JTx3PDZZgwaOWlRmtAiBYSybnNJaKt8gMWPh3fTPd/QJ0BsB4hu8z8hUuA8mPppLo8//jgPPvhg7e8mk6lFrPyXoDcqGTytFX6jA7vcjAgRcruGsq1uOg9LRnKaN6j5R4KnUfwihKhFvZwNV1YW+XfPRH/xBFJnfYosIRFJlBHxKTRnTNAkkKBJYETqiGa/ViSREDFpEjW/zMOVHfhEqRk0CGXHDo2+Vp6aSszdd+O78UZEcjk+qxWf3Y595068VVVoBg4k6sYbkJ1nDRGjNAri9UqKTcEJjjFaRV256lnE7vZyoJ7Ta0OOlFqxubzhb4xA95TwPXLSo9QB5bmlZifTPtnAH/cPJaUJ/WqqbS5259fwzorDFFTZ6ZwUwd0jWmN1eZi7ObDkXE8jidJ+P9irww4fLbcGND18cVUVH1w8m5hfpwm+K163YKKmiYYpH1Lq0/D7+hxmrcmm0uqid7qB24dmYnZ6KKy2M3/mIEx2D1KJCL1Syqw1x/hk9dHa7W/MruTyD9bxyfW9hU7LLguIZfgLtiHa/hUY0mDYw1CdT5UkijkHJSRGqtA0pUnm0ZXhx4p3g/vs+xu1cGqcU6ESHR2NRCKhpCQwN6GkpIT4+NDNoBQKBYowT5EtXNh4FE7yk/fw/IbnsbqFue8IRQQvjHoR2RkwVa1xBRs7rTRt5dJhQ3D8+VfAcr/TSc3Pv2C88UZk8eeuNbssIYHUz2ZjXbOW6l9+EcqTr52Oulu3Ro3nTiCSSGqndcQKBfoxo1H37YPf7Uai14eNyJxLYvUK/jO9B9M+2Rhwk1RIxfxneg/idGewzDcMKpmYjol6ttfr8FuftnFa1CfpbhynV3Bpt0R+3RmYDC0Swb0j2/D+yqyA5TaXl3VZ5VxlbDzfwGx38/XGXP71R12H5MIawWZ/1oze9EyJZM7GXDRyKZd0S0SuDi+4gEZ9VDzeQFPALXlm/rExgmeuXIGxYAURliOI0wdD6kAqpLE8+dMulu6vq0ZbebCMNYfL+eDaniQb1Nhc3lp33uxyK5+uOUoonpq3m57Jg4jKWwY/3FBnl2SrgIJteEc8wxLRAD7cmMucm/sKvYBORkQjPcsUOhCd5wm1/8Oc06kfuVxOr169WL58ee0yn8/H8uXLGTBgQCOvbOG/keyabB5b/VitSAGocdZw38p7yTOHdrFsDuPSxwUtm5s3H9/d1yIJkVcTPfPuJvc6CkW1s5oDFQd4c+ubvLLxFbaXbqfCHtooqzFk8fFEXnE5KR9+QPJ/3kM/alSTREo4pJGRyGJizo1IcdkEC3N3+HJQkUhEt+RIljwwlIfHtmNMxzgeGt2WP+4fSvfUyL+lV5JcKuGGgelIwuxr5ojWjUZTACLVcp6a2JEXJncm2aBCIRXTL8PIB9N7seJAKUdKgyMd+wobz7lzeb0U1Nh5Y2nwFIbfD0/8vIeeaUaGtomhc1IEH67KYv4RN77kfiG2BqQNFqp/wtAqVht0Dv7KqmHkZ7k8nDeQmlH/hq5XQWTKcbEUXDLv8fl5b8URqmxu9uRX1y4/XGIO1xKIEpMTlbMUFj0Yclyy6mX6JUj46ua+9EhtYnfnNmOEJOJQ9LkVtC1Vpucr53zq58EHH2TGjBn07t2bvn378tZbb2G1WmurgFr438DmtvHxzo9Djvn8Pr7e/zVP9n8SmfjU81TaGdvR3tieA5UHapdZ3BYeOvI6X331OY4Vq7CtW4s4KgbNZVciikvCKVJyKmmbVY4qPtn1CXP2z6ld9vWBrxmUOIjnBz1PjLr5QuN8SnhtNg4zVB6BNW9CRZZgAT/4fsrlSVTYvDjcXowaOTE6BUqZBKlETFqUhrtHtMbj8yE9Bx4RKUY1s2f04cHvd9QauOlVUl6e0pXMmEaqRHw+cFtBIryfa/unMaZTHF6fH78f7v5mG9vDGJ11T42k2urC4fGhVUjRKgMv0SU1Tnbmhc9lKTY5KDM7A6I1r65y0/eat2j1592Ii7bVrZzUG6Z8GDLPwefzIxaLiNYqeHB024DoDQitAq4fkI5BXTcNt/pQ+FyXnfk1SCUi1PXEneokESmZqxpsYXoned2kiMtJz2xCZ+cT6OLhyi+FhNr65c5pA6HvbSA5MxVY/w3U2N1UWJxkl1vRKaUkG9TE6ZVhhfvZ5pwLlauuuoqysjKeeeYZiouL6d69O4sXLw5KsG3hzOL1+rBVO3FYPUikIpQ6OWrduXP+tHlsZJtC284DHKk+gt1tR6Y49YtJrDqWdy96lx8O/sD3h77H5rYxJHkIM3vMBH8cR2OH4RjaG4fLz7Fvq3E795LWJYoR17ZHE9G86MMx07EAkXKCtYVrWV2wmsvaXHbK76Op2Nw2nF4nGpkGueQcurq6HXBgIcy7o94yG0cqZ3LXos0cKhEiCwqpmLuGt+K6AWkYNXXn+1yIFAClTMKQNtEsvGcwFVYXfr+fKK2CWJ0Caah8Kb8fqnOF/jGHl4A2HvrfAdFtiNUJT/0FVTam9EgKKVQiVDLaxem4+pMNlJmd9EiN5IHRbWkVo0V53Ddm49EKpJLGbxZqhZTRHePYkFWB2enBZPdwxbf5PD3iTQYM9xIvMSHRx4EmNsCN1e31UVhl57fdRewqqKFnqoGxneKY3i+FrkkRvLPiMIXVDjonRXDvyNZkNvCBaSzCJBGLkIhExEXWlfYnG1SoZBJUcgl39I1iaIoEfF4OVIv57agbcSNeNQCS5nYMlqmh9Sih3UDuWiF5Nm2QUNrbEk2ppczs5JXf9/PTtrpy90i1jFkz+tAtOSL0Z/8sI/L7wwXfLgya2ia6hTocVjeHt5Sw4ZcsXMc9PoyJGsbc0gljguaslEieDJvbxuNrHmdF7oqQ41e2vZLH+j6G7Aw89Xh8HirsFfjxo5Pp0Mg15O2rZP47O0KuP+rGjrTrFzpnKhRun5snVj3B4pzFIcdbR7Zm1phZGFVnJ2Pf5DSRXZPNZ3s+o8BaQLeYbkzvMJ1kbfIZOX/NpjoX/tMX3HWOpgVXLOTSeQ7KLcFOsy9O6cy0vqnn5HPYVFweL6VmJw63F7VMQoxegawqC2aNEZJL63PR09D3NhwSDf83fy+Xd4tmW56JN1ccw+4Wvn+tYjS8NKULT/+6p1a4gXCD/+aWfvTLFKYmX1t8gO4pkdz73fbaRnz1aRWj5bsJUiIPfEdpmytZUarh2aVFtdVLdw5rxYNj2gZVTvl8frbmVHHtrMC8IJVMwre39ad7SiQ1Nhdurx+VXBJSlGSXW7jo33+FnM4Z2ymeq/sk0zUpkiidgiqri//8eZieqQa6KktJWPMEkty1wsox7XFPeANZdCv4bDxUhshjUejhrvUQcX4lgl/oeH1+PlmVxSuLDwaNqWQSljzQtGTvptLU+/d/r+fuaWAzuSjPN5Ozp5yyPDM2U9Mb6p3vOKwuCg5WserbQ7UiBYRuv7/8exvmynNjJa2Wqbm1y60hxyQiCdd0uOaM3WSlYilxmjjiNfFo5BrcTg87V4TPgdm5PA97iBtqODw+DzXu8B1ZzS7zSXsKnSp2t51F2Yu49vdrWZ63nAOVB5h7cC6Xz7+cPRV7Tr6Bs0FNfoBIISKZfVZdSJEC8Nayw5Scx9+5UpODN5Ye4qbPNrMrv4atuVUcOFaA//fHgkUKwIrnwVJCjd1NmsZD+31vc0PRiyy9OoL505NZfH0Kn17TgYd+2BkgUkC4cTzxy26W7y9hwc5CeqYZ+GpDDk9M6BBkoaOUiXl9chtils5EtvNLkn6cyOWVn/LSWKGkvl+GkRsGpYcs7y4xObjz660BIgWE6qdHftyJvaqYiOp9RGf/iqZoA9QU0FCRxOiUPDuxU9C2EyKU3Dw4nTaxOqKOJ71WWJ18uuYY3fRmkn+ZXCdSAMoOIJszSYh4XDZLiITURyyByz4WIlYtnFFKzQ4+WhU6wdnu9rL5WJipuLPMOZ/6Od8wVdhZ/PEeynLqMuWjkrRMuLML+uimuZ+ejzisbkqyTdjNLrYuDm7MB+C0eig6UtNkl9czTYY+g9eHvc4/1/8Tk0tIKjQqjbw46EVStGevBN3nA5cjvOeF2+nF3wx/C5VUxdi0sawvDG1aOCx5GHr52Yn+VTgqeG3Ta0HL3T43z6x9hs/GfUa06hw3X9MnsbcsfAuBMrMTp+fsCLnTxeLw8PqSg6zLquCFyZ157KfdFJscLLw2BdHR0NFAAI7+hazTdUxqLUX71YcAJB+aTzKAIYOfus8iP0wPoKwyoUT4nm+38960Hhwtt7I1p4pPr+/N4j3F5Ffb6RCvY2rPRNqsezggAqHe8zUTr7mSTvcMIkGvClsdU25xhhSOSpmYtyfEovxxGhTU6zKsiREM1OI61ZoOahVSLuuVRN8MA99tzqPE5GRQ6yj6ZxgxiG3EiG1wPOOrwuKiZ2okmqOLQ4s7nxdWvgy9ZsBVcyB7FZTuEzxU2o6D/fOFPCd9032NWjg5Hq+fKlv472ZWiATwv4OWiEo9HBY3yz7fHyBSQOhB88cne7Cbz++GaOFwu7wcWF/Ewvd2IlVIGu3sW3w0fCTgbKORaxiZOpIfL/mR7y7+ju8nfs/ciXMZmDQQRXPnowG304PN5MQZohV8feRKCa17hZ+jzugWjULdvGjOoKRBJGiCL6JqqZrrO11/Su+nKWRVZ+Hxh36/x0zHqHGeg79vRHLgU7G5mLbG8M9IURo5cun5eWkqtzr5cWs+91zUhqfm7an1ehHhD4ow1NLhEkjqgaFmPwkKJ4x8JrDSxudGcZL3Kz4uBv65YB//uqIrNXY3d32zDbPTQ7ekCEa20pC08x2ke38Ieq12x2y6xKlDixS3E6pzaOvLYuWMON6cmERCvS7PN/SKofX2VxDVFykg9Az6agqYAkuv9UoZHSUF/FM+h/cz1nJ99fu0ndMD497PKLSJOFJURV6ljTi9gq7xSiILV4V/0/mboDofvr5CEEmaWKg6Bt9eDdu+hOrQD1wtnDoKqZgUY/gH1SZXWJ1hWiIq9bBZXBQdrg45Vppjxm52ozqHCaenit3kYv08oQrAYXahNSiwVIUOrUclNq1Rmtfrw1bjqhVvKr0cTYTitEtHpWJprUnaqeJ2eakptbH1jxzKcsxojQq6j00hMlFJZERw5YxIJCKjWwzbl+QGnReFRkqnoUlImnnjjNfE89m4z5i9eza/Zv2Kx+dheMpw7ut5H8naczev3rDR4t+CNg4mvgm/3C78Xp1D1wg7ESpZbQ+d+tw5vBVxOiUlJgdVNhc+Hxg0MuJ0yr+lNLkxKi2u2u689SMgm0u8dEjqi7igQef30c+DvRI+vxiR2y74gST2hCkfwYL7oCYPavLpGi048npCRO76pBvYWygIzFKzk9vnbGX+zMHIpCJcHj9ykY+ELa8i3vRu6IN2mY9XuTS4dlkrhBv+qldRuO2kA+mxHehz+QdcP7+Go+VWJreVI/shTL8pSylU5+DVJlBidmJ3e0kTVyD9+gowFXAiFbbyotdY6O3Pm5/uo8rmRiYRMaVHEtP6ZuLckUFYya6NA1u5IACPrQHWBI5XZUNq/3CvbuEUiNUreXRce2Z+sz1oLE6voFPiuckDbREq9XA7Gg83n+zJ/HzFWu3E5xEugPvXF9FleDLrf8kKWk8qE5PS8eR9elwOD3n7K/lzzgGcNuGcKDRSRs7oQHI7IzLFuTVOKj5aw4J3dtZO19SU2ik4UE33SxLpMCQeoy4y6DU6o5Ip/+jJ9iW5HNxQjN/np1WvWPpMSEcfdWoGY0naJB7p8wi3db1NSNyV69DIzlzH3FC0imyFVCQNGVVJ06cRqYg8q/sPiVQB7S+G21bC2reh4ghJRSv49uY7ue3rnbU3fIlYxPUD0rikeyLbcqu4f+6O2rEYrYKXL+vCwNZRqJviQnqWUMslqOVSqhuExz/YWMnYy14g4cdLwHM8zyu1P/i9Qkl2fQq3wfyZgoj56WYAYra/y8sXP8TDC44FrBqplnHX8NY8/OPO2mUur9ATZ2tODV+uO4bH5+eyztMYdeUEEhddF2wF3+VKkIdIgDy8BJY/F7isdD/J8y7nzQnzufQrK1KfQ+gLFAavqYi5W/J47Y+D+Hx+Nk9XIDXVVYt4M0fyq7sf/7e0zi3X7fXz/ZZ8jlXYeGPSgyRv+yT0xgfdB2vfCbtvjJnBy3xesJQI/0qVoD11v6H/VQa3juaFyZ157Y8DmI7f83qlGXh9alcSIs9NWkCLUKmHQi0FEWEbySq1F2advbheOWPpMTPt+sXTaUgi+9YU1karlVoZE+7sgtZ48imJmlIhj6f+eXJaPfz+wW6ueqovUUnnrhOptcbJn3MOhMwp2bWwiJSuEURq9YhDGD/pIuUMHB1N3xFR+NU6ZEopspN4PZwMhVRBnDQOn82Gt9KMR+o45aaNTSFKFcU/ev+DVza/ErBcKpbyz4H/PHf5KQodJPaAye+D245IrqGjVMmPdw6kwuLE7vISo1MQrVVQanYw/dPA6pMyi5Nb52xh/szBdEkK7kvk9HipsLjw+vxoFJKA8uYzSbRWQbxeKFGuT4nJyQN/Knj9qiVE7/wIZcFa6HcX/P5w6A2ZCoUEY20cWEpQHVvG+EEP0OXGTL7d5yKn2km35Ei6pUTy4m/7A/JHXpjcmcd+3s2m7LrExh151cyOUvPNpG9J/G503X6MmZAxVPi/1w0+tzANZyqCP18IfWy2ChKt+xjZIZ0Yo0GosHGGNqKzajN47rt9uLw+0qPUwtRM/fPSfSZv/hK6K/qm7Eqqfe2Jn/gu0t/uF8TFCXrdCOlDBWv74l3BLzZm1vb8OVGBpXZVoD3wI/KN7woOtrEdhO7QyX1Aef70sjrfiVTLubpPCiPax1Bj86CQiTFq5AGeOX83LUKlHiqdnLa94zi0OfiLldE9GvVpNsU7V6gjFCjU0trox+q5h+k0JJGL7+6Kw+ZBH6VEa1CijVQgOklo3e3ysu2PnJBizu+HHctyGTatPVLZuckxcFjdmCtCVy75fH5KC2pQGaVBhmvuwkJqFi7EtHAhIrkcw7TpyAYPgtNwpgXwud24cnKoeP8DrBs2IDEYiLr5JrRDhyKNPvOiQSVVManVJDpFd2L27tm15cnXdriWZN15UMopUwfkq8TrlcTr6yJWXq+PuZvzgqpPQPh8vbv8MG9c2Q2tsu67WFht54OVR/h+Sz5Oj48uSRE8d0lHOiboUZ3h6Eu0TsEH1wrOsoNaR7H2SJ3T8IYcEyO/sPDOFQ8zaswzSFwm4ek+HFXHoONkoStu6gC0C26jfWUWz06dgzX1IubtKOCmzzcHvCTZoEImEQeIFBDyWVONagrlGfivXYOqZAtGbyV0uxJkKsjbBBs/FqahOl4qdAo+8ZSi0ENse8HvpmQ3qKOJcuXzZncl+s1fCmZoq18POnx/6kBWFEpwHbfZL7e4sOgyA6ZyLCI9Jnv4c5BVbqNz1yuETssFW4RjSOkr+JqoDDDgHojtCDu+gezjbS4Se8AVn4EuHpfHy8bsSo7m5nFV6dvID86r23jpfvjqcpj6uXCez+Ny9/MNqURMUqSapMhzfSQCLUKlHgqVlIGXt0YsE3FoQwk+nx+RWESb3rEMuKx1sxMqzxc0EXLG3tqZhe/uxHc80rB3dSEHNxRz6YM9iM9o+tOGx+mlssgadryiwIrH6T1nQuVk3ht+sR9fg1C2q7CQnOnX4ikqql1W9MQTqHr1IunNN07LRt95+DA5V12N3y1MFXgrKyl64kl048YS/8wzSI1n3ktFr9DTPbY7rw59FYfXce4N35qB3eNlR5j+OgD7ikzYXN5aoVJicnDT55sDGgjuLqhh6ofr+fGOgfRMO/PJf5kxWlRyCcPbxfDhyqP8urMAt9ePRi7htqGZ9GqdiESrgBoPKCPBEeb9GNKh4gjkrIPV/65dLDakoVPJmNAlAZVcwlvLDpNfZad1rJbXr+jKW8sOB2xGIRXzxpXd2ZFXzc1ztlNjd9MlqQ1PT2hDJ7cbTfFGYarpxJRQ1nLQJ8HkD4RuwppoyN8MUW0guS+U7EHsNKFXKYQOvRIFDH8cNrwPjhoQS6Hz5Tgv+ifZm2u4Y1gmfx4o42CJmV3WSEZEtRLchwGFxIdYRNgu1DFahTAtZcwQfk5gr4K8zcI+zUXQ6iKY8C9h38qIWqO6ohoHt325hT+mx6FcPS/0ThY/Din9QN9Ir58WzmtahEoDNJEKhl7Vlt7jM3A5PMiVElQ6OXLlhXuqxBIxia0juPrZfhxcX0R5gYW4dD1t+8ShbWb+hVQuxhCvprIwtFgxJmqQnsMcFaVGiiFeTVVxcGWTWCpCHS1FJVVhMznx+UAuF1Ezb16ASDmBfetWHHv3nrJQ8VRXU/L8C7UipT7mxX8QddttZ0WonEAlU6GSXVgl9QqpmFbRWjYcDe3XkGJU17q0AhwqMYfscuzzw/OL9jH7hj5nJWSdILWCq5zXhoh5YWRnyvwGxBIJsXoFshOOqtp4GHiP4KPSEGUkqA0w/63A5f3uAJ3gyh2lVXBFrxSGtonB4/Mjl4jRKoO/Ww+NacdXG3NYn1UX3dldUMOVn2xhzlXpDMn6QUjeXfRQ3dSMuVC46e/9BXLWCvk0yX1g9hhw1ZWg+jOGIep7q1AefOtKocOwVEEhMSzeXcHyA6XIxGKu6J1MjFbBs0sOkXHZHNJWPYAobyNR2b8yvuMkFu0N7nFlUMtIjw6Rs+UwwZbZsPyfdcty18P6/8DNSwLcdFceLMOoUaCs2Bu8nROYi4RttgiVC5YL9+57FpEppETE/HedGolMgiFOTb9LM/F6/UhPofzT6/FhqXbSeWgSWdvLgqd/RNB9VMo5i6YAqPUKhl/fjvlv7sTbwLmzxxXxxGqjKdxmZduSfdjNbhLbRNBz4HiUW3fgWLs6aHvV33+PZuDAU2rg5zObsW8Pzp4/gXX1GlQdm9Gr5H8AmUTC9QPT+WZzbshq3/suaoNeVRfZXHkwfH+Z7bnV2JxeDGfOSBP8frwle5H8chuU7EUMKNVRJI97FVHbsSCptzOPHbpMFapTdnxdN9WiT4RrvhPESp9bhSkNTTQMfkCoCFIFRoFi9UqhQsdSAhUVXN07nZXH++qoZBJSjeoAkVKfZ5eXMXfQKGLm3SVEJL6/XhjIGA6HFgsiBWDQ/fDjjYHGfIAo+y98cV0Qpw8SKnB+vZvCUe9zzYJ8cirqHga251XTMzWSe0e1Zsq3+3lyxOtMHutFayvjychW5FS52FNYJygj1TK+vLkf8RoJVB4TojxlByB1gJBbUl+knMBRDb8/ClO/AJUQBc4ut2J3e/EpI8P+yQC4QCKKLYTmv+tu3MJJEYlESKWnNldblmfml39vo3XPWIZd0471v2ThOp4VrlBLuej6DkTEnsm7wqkRlxbBFU/2ZPeqPCpzHKiNEtIG6zEY9GyfX8DhzXUdXo9uL+fYrgom3fYwnqNHgiMrYsmpz22LxSCRgDdMNZnyHHQvvgBIMap4f1pPHvphJzaXcO7kEjGPT2hPhwblkTFhDMxAMCA7022CPJU5SD+/OHA6x1aB6Odb8M1YgPhE4mrFEVj6DBz6A/reCjMWgscpNADUxoEuQfhcjX0RHCb8MiWlThletx+FxUmUtt77qimAn2+DHKE8t8cl39AnVc/mXBPp0era0uVQHC23YtFlEmMpEYSOIUMQTp0vFyp+5BroPl2IOjQQKScQb/8S381LEX88DG9yP34+Kg4QKSfYllvNZT2TSTGq6RwrR/rzNVB5lERlBJ+NeJ1CTQf2V3hJiIundZSCBF8B4gof7JoLmz4Szs+x1dD5ivB/gKN/gqOqVqj0z4zi83XHKFelEy9TCxGfhmSOCNl4sYULhxah0kJI7BYX1monpTlmlBoZxkQNe1YV4PP4ObSpBEuVk+HT29VWFEUlatBHqxCfg4ZVDZFIxETH6+k3OQOrzY5P7EUiE0O5IkCknMDn9bNueTVDrruJ6tdeDBgzXH0VYnnznsaqHdVUOiqpllShmT8b2ZptuN+dhc8S6Oro798dr8+LRHxuy7nPN9RyKSM7xrL0gaEU1jjwev0kGQRXVZUs8FyN6RjHq4sPhIy+XNc/jWjtmRODXp8f98ElSMPknIiXPQfTfwSnGWaNruv8u+ED4UcTI0xd1J+CkCoo8+v5bWsR7688QonJSccEPY9PaE/35Eh0Ygf88UStSAGI+/0W3pv4Beu6pbI024Whkdw5iViEVHQ8sliyV8gDqcoWBErnqZA+SKisqckPuw1S+wsRIY+DylaX8tPG4Km2EyzZW8yH03uSULC0ziHXUUPM77cSo42n202LYeWzsHuuEGESiaDdBLhiNvx4M4jE4D2JsWa9P3bX5Aji9UpeWl3D2xM/I2b+tUJ10wn0STDxDVBFNr7NFs5rWoRKC0FYa5z8+dUBcnbXhZMlMjHDp7fDVu0ib38lhYerKaxnjjdsWltcO8vJ6BqN1qg87bLeM4FarkZdzz9ix6HcsOuW5ZoRDw6chtEMHoyifYdm7bPYWsxTa55iY/HG2mW9onvw3Cf/xnnrQ7ViRXXf7XxbupirUxKIVbd0bm2IXCIhyaAm6STzNvF6Jf+e2o2HftgZIFZ6pEYyY2BayL42p4rZ4UZfsC78CiV7hSf63T/UiZT6WMtg0ycw6v9AKojfGrub1xYf4IetdUJhX5GJ62Zt4r1rejAh3Y94fwPDNbeNuF+mMiW2AyOu+IEqsYGXfjsQ0ixufIcooo7+JPwSkVxng+/3CzfvudeC2oj/4rcIGzdM6oX46J/h33c9RCIR327O49a+g9HfugIOLAalTpjaShsEvz0Eh5fWvcDvhwOLhNLkvrfBxg8gqWf4HTQoNU6MVPHdbf15+tc9PLJZy1NX/klU0Sq0tlzEGUMQJ/aAiKQmHXsL5y/n/vG3hfMKn9fHvjWFASIFwOv28eecA3QbGbrE1efxs/vPfL795ybyD1ThDVFeeq6R1hNPar0cVb1yc5EIZAnxqHp0R92vL0nvvUviyy8hi2l6CXGNs4Zn1z0bIFIAtpZv57ni2WhefArNZZei+PJdFnZy8Fn2d0EVSI1R5agi15RLvjkfszP8U+3/EmqFlHGd4lnx0HCem9SRmSNa88PtA/j4ul7ER5zZRGKP148lom34FSJTAJGQ+xGOI0uFypnjlJudASKlPv+3YB8lTkV4w7XS/URW7SEhQsW703rQ0FkgLUrNo/0UqHfNEUz3knoLlT89ZkBcR6GXDgiiSqYWKnxCYE4ajDsiHQBj1q9M7RTeJ2lMxzjm7Shgfb4L1NEgV8G+ebDre6FXT+ZwIWrSkMN/CNEdrxty1kO3a4LXkSphwutB0zjp0Rr+M60nz07ugTcyHU+vm5GMfwVxh4ktIuW/hJaISgsB2Ewudi4P3UnY5/VTmmMmOllLeX7dNIZIBBGxqlr7+aWz9nL1M32Dmhu6HB6s1U6ObCvDWu0kvUsU0clatIZTc35tLsltDXTsa6BTFxW+nKMgFkFyBju3WPEgRZsSS8RHH4FEgkTTfAfZSkcl6wpDP3FvLd1K6dh7sHaYwNaybczK/ppBiYOa5FTr8ro4VHWI59c/z77KfYgQMTBxII/1fYz04zeQ/2XUCikZCikZ0RknX/k0EItFWFpfgn7Dv49b0gfiHPQwCrUR1I0Y+qmMUK8L+MGS8IKzzOLE5BGTIFUI+RuhiEhBKZMwvG0Myx8aztK9RRRWmhmaLKajopz4X68Q9nf1jxDTFm77ExQRsP2rgM2I/ngM+9TvkK54Dtnh3wRxpI2jctDTfJ+l4qrut2LY/R2SY38xub+PH/ZqyC4PrPzrlWZAJhVTZnbSTVsNn04SokgnOPondLpMKHX+M3CKFb9f8FABWPc2jP+X4Ga89m1BXKUPgUH3QmR6yNOgV8kCkqxb+O+iRai0EIDP5681hguFzeRCoQm8IPQan86hTXWmTm6nl5oSe4BQcTk8HN1exvIv9tcu27uqgMg4NZfc1x2d8eyLFbXUSUfHJipmvCO0TAaQyej20GNox44TStCVp97LwuJuvLNouaOCf/z1Dx7v9zhdo7vyQK8H0MmDew81JNecy3W/X4fn+M3Rj5+1hWu5/vfr+XbityRpW54a/w6MGjkHFfGUXfIVMb/fKuSiAIjEWHvdiSh9uGCuNmBm+KjKoPsC8iV0J7E9kMtkQmXQ+veCB1P6gS4eAJVcSka0lNuGtQanFaylUFogmJ1FpgnJu5Lj+yrZF2xEV3EE67EtrE5/gnbdH0Xqc1HilPHmRguF1dWM6tQBxryNYcUjJC64hm8mzmFZUQQ/7rMilYiZ2DUBvUrGU7/sYWrXKKK2vBkoUk6w92fo/JWQI+NqYHEgO34N8Pvxy1S4Wo1DkT4YPC7he3mBldq3cOZoESr/ozht7np9emQoVMJHQSqTYEzUhPVJSW5vQK6U4nZ60EQoaNs3nuKjNQFCBcBU6eDIthKS2hhQ6eTYTC6Wf7k/aHvVJTa2LMpmyFVtA6ZmzgbuI4eofOetBgvdVL3yPBG9ukDC6VUG6GWNixylVMm17W4hWdGDBzqPQuxRUmVzNerzYXPb+Hjnx7UipT5Vzir+zP2T6R2mn9ToroUzQ3J0JIfcvSmb/AcGdzEitw2MrVBExmEwHI+kxHaEAXcLvh/16T5dyLGoR0a0BrVcUlvdVJ/e6QYMGoUgbvw+2PypkGgqEuFtMx7vuFfxyw3BTf0UGlA0MFADQZxbSuCHG2DUs0H7i156D4PHfcQOWwdmbbNiczkY0S6GrsmRXP/VHrontuXey5cTactBZCnjsi49iIuOZnOeha835nKkVBDql7RTIfv1l/An8ehKwViuft5LxlDI3wIx7XCN+D9+N6Xy45xtXNk7hT7pBuLPgEixuz1UWYVEW71KilbREoG5UGgRKv9j+Hx+qoqtrP7+MAUHqkAEaZ2iGHRFayLj1Kj1cgZd3poF7+4Meq0+WonX46Oq2MrAy1uTvaOMlV8fCIrAiESgjZSz8L1d9BybSu8J6eTurQzbQ+ngxhJ6X5yBznj2hIrXbKb8w4/Cjld+/gUJL714Sn4pJzCqjFyUchEr8lYEjfVP6E+kLJajR/rzwa9Z+P2Cc2e/DCNvXNktbNKoxW1hS8mWsPtcVbCKK9pegVL690yf/a+jUUjpkR5DqUmHxZmGTCrCoFKgrR8Z0UTB4Aeh5wyhp45EJlT8aGIEk7d6xOmUfHRdL276fDNub90XJEar4LXLuxKplgOxVPR/jIr06cg8ZjwSNQuy3Hz29n7enaZkYKso5NJGvjvmYiHRd9uX0OsGKD8o9M/peCns+7VuPa+bmMV3MPrWlfTv3Jsam5sHf9jJ28uPAFBY4+C3/VXoVTIkIjnjO9egVkj4dHV2wO4E4R2+S7ff7w8Q1v60QYgmvYMbCZujLufp30vIKjsGwOrD5XRK1DNrRu/TyjnKrbTxzvLDzN9RiMfnY3THeB4Z246MaM0578jdwslpESoXCG6nB5vJRXGWCbfbS0KrCDQRCpSa5j0VmMrt/PTqVtzO409wfsjZU0Hx0RqufKIP+mgVcZl6xt/emdXfH8ZS5RT6iHSKouuIZJZ/sR+byYWtxkWHQQkhp4k6Dkkie6dg1719aR5dhqfgsAa7s57A6/GFbCJ4JvE7nSHdZ0/gLijA73RCI0LF7fSCH2Qh3EEBdHIdT/R7ApFIxIrcFfiPX6wHJw3m9q538sVKC4v3BIbDN2ZXctc325g1o0/IUlqZWIZRaaTMHtrYLE4dh1Tc8jX+u4nVKwlbq2Uth40fwdq36kpto1rDlXOEaZ96N2mZVEy/OFh2cyuWZ1k4Uu1nQKKUXgY7idbdYOiFyS3iH78c5M9ac7u6iqLb52xl2YPDSDGGqY4yF8FPtwr+JACdLxP+XfUvmPBvYfpox9fCMaf0g6H/gOg26KQybC4v2WXBkdUTHXVTjWqu7JNM/8wovtuUi1wiZnr/NOJjZdDhUtjzY8hDsnWYSpVPhaTbTNwKI4aYBHTGeFYfKOWmb3cHrb+30MTiPcXMGJh+SpHDgiobUz9cR990I/+Z3gOXx49MImLzsUoUUjHJ4c5dC+cNLVe4CwCn3cORraX89XWgX0SHgQn0n9wKlU6GrcaFz+9HKhOj0oaeSvC6vez+M79OpNTfh83DwU0l9BqXhkIlI7NHLLEZEdhqnFiqnBQcrGLxJ3twO4TXlhwzYUhQM/nBHmycf5TyfAs6o5JOQxLxefys/Ul4CvP7/FSX2UjpaGTzwuyg/QLEpOrC3vy9Hh8OqxuRSGgaeapTHGKtFmW3briOHQs5rurVC7E69AXLWuOk9JiJ3X8V4Pf56TgokYQ2EWgjg6MYcZo4nh/0PDO7zyS7Jhu5RM720u1UmUXM2xHs4QKwM6+GcoszpFAxKA3c3OVmHln1SMjXXtP+mhahcjK8bmHKw3u8c/Bxi/qzgt8PBxbCqtcCl1ccgc8nwO2rj1cHHcfjRL72ddI2fshNST2FSpltWUKvHIkcZm6myhdT60TbEKfHx8ESc3ihcnhZnUgBIQlYZRB66Sy8X3CB7Xy50Jiw9AAoDbWl0zFaBbcOzeSl34KnbCViEWM7x2PUKBjVIY6hbWKEyrkT5eAjnhDcZu1VAa9ztL2UhflKHv29AJlEhNtbyPKH2iB1e/hy/bGwp/WrjblM7JbYbF8cv9/P0v2l3DAwnWqbm5nfbK9teJkWpSY9SkOsToFcdu7tFFoIT8sV7gLAXOlg5VcHgpbvX1dEYttIxGIR63/JwlrjIjZdx6DLWhOVrA3qT+S0e8ndF7qHCkDO7nK6DE9CedxAShupwOf1M++N7SHFTdGRGgZMacWI69pTfKQGm9nNnlUFVBUFukNKpWL00SoSW0dSeKQ6YEwkgiFXtQ0prkzldvasKuDI1lIkUjFdhiWR2SMWraH50zNipZKoW27GtGgReBpMVSmVRF45FZE0+OtgrXGy7LN95B+ou+DmH6giJkXLhLu6hqxY0sl1qKQqapw13Ln8TuweO//qPwpvI1GjUpOT9vGhx/rF92Ny68nMOzKv7pgR8XCfh0nWnp2OyJX2SixuCxKRBIPSgFp2gT51mouF6MbmT4TkV0MGjHlBqCJRNb0ZZ9P3V1RX9tsQexUUbgsUKpZS2PaF8P+CbYHre11Cj5vkSSEN7U5QbQtjkGatgE0fQ3xXGHAXKHSCIBnzAvx6t7BO6X5Y8YLw/1H/F9BHRywWMaVHIttyKlm8ty4HTS4R8+60HsRH1H325Q1bchgz4baVsG0OHFwECj2lXW5jq68tT84XyrHdXj+tYrTolTJ8PqH8Oxwer6/RcxAOq8vDwWITrWK0fLTqaMBYToWN277awvy7B4fuOdTCeUOLUDnP8fv87F1VEHZ8+5Jc+l2aSfsBCZTnW8jZU8HPr29j4j3dSOsUWCYpkYpQaWVUh+m6rtLLkUgCIxaaCBkjb+jA4o/3BEw7S2RiRt3YEbVeAYjYsSwvZFdlhVqKJlKBWi9nzC2d2LumgF0r8nHaPSRkRjDoitYYk4J9GWrK7fz06hbs5ropo9XfH+bgxmIm3NkVTWTzxYo8NZXUzz+j+KmnayMrijZtSHjpReRJoStnSrJNASLlBGV5FrJ3ldN5aFLIKI9ULKVbbDfmXTqPA5UHiJYaEYtywneRbcQK3qgy8lDvh7i+4/VsLt6MQqKgd3xvopXRaORn9gLr8DjYV7GPlza+xMGqg0hEEkanjea+nveRrDs7ouisYauAX++BI0vqllVlw9zpcNmn0OWKU2+PEA6vSxBH4Sg6nhtyAp87rHU9ADWFaFvJSIhQUlTjCLlKl6TIoGUV1SbcVjPxsR2g3Xj47eG6Kpw+t8G0H2DNm1C2X+jiPPwJSO4tdDKuR4xOyUuXdWXmRXbWZ5Ujl0pIMqjYkFVBqlFN6xgtslB9w3xeEEmgx7U4e9zA2lw7zy0pILeyzvpALIIXp3Su/exP7Z3MmiPlId/jpd2TGnXgDYdMLGZEu1j+uXBfyHGT3cO23KoWoXKe0yJUznN8Pj/mytAXKECY8vH6yd5ZTkLrCCbe3ZX187JY9e0hLnu4J5qIuhugQi2jx9g0it7fFXJb3UemIFMEfiQkUgmpnaK4+qm+QrSk2EZ8pp72AxLQHe+8rNbLGX1zJ37597ba3j8gdCsef3uXWlGhiVTQe3w6HQcn4veBTCEJmWPjdXvZtSIvQKScoDTHTGmOiYzImEbOWmjECgWa3r1JnfMlvpoaEImQREYijQrte+F2eNn9V3hr8b2rCmjdMxaVLvRUm1QsJVGbSKI2EavTzcSuiczfWRi0XqdEfaNCBSBSEUmkIpI2htCmXGeKrOosbvzjxlojOq/fy+Jji9lVtosvxn9BvCZM2Od8xFQUKFLqs/QpwWDsTHTUNZeAqUDoTBzbAbSxQqQkFPFdAHC6vSAChUwjRB8qjwo9gNqMEaZ88jYICbCp/YnSKnjukk7cPmdr0ObGd4onVh/42amqKOVfy45RYfPx0ahbEX8+IdCWfvPHcGA+XPWVYDEvVTbaC8fu8nLHV1uQSyTY3d5awTRnQw6L7hlE67gG1W7WctjxLaz+FzhqUIhldL1qPg+MasX7K7MpszjpmRLJQ2PakRlb95DSLyOK9vG6oG7Y8XolU3snIz0Fl2GFTEJalJr8qvBicE9BDZf1vMBE+P8YLULlPEciFZPWOSrIKfYEcRl6Cg9VU1FgoaLAwsGNxYy9pTPLPtuHy+4JECon1u80JJG9qwNvmD3GpBKVGNpxUiaXEJWkZfCVbfC6fUjlkqBM+agkDVc91Ye8fYK9fnSKlsxuMWiNyoB1xRJxyNyO+titQk5OOPavKyKxQwQ1nmoAjEpjs/I0ZDExEHNyoeP3+/E1Eo72ef1NDkdrFDKemNABh9vLkn11Ia0eqZG8e3WPM9qT5lQxOU28ufXNkG65hdZCdpbtPPdCxVwsmJ9JZKCNp9Gug8WhBXntds6Eu29VDnx7teC6CkJfnH53hO7+q4zEndCD5XuK+W5zLjKJiNuGZtJrzEuIq44Kibb75oPHAV2vhpj2tW6xA1tFMefmvrz02372F5kxauTcOiSDK3olB5a3V+eTX2Liu+1lDMg04j+yPHTvHHOxEGW59qeTNuxbdaiM/KrghyWnx8d7Kw7x0sRM1Lrj27CUCb2DDv1W58DrcxP97XimdL2aITe+hkesQKOQoFMGPqTERyj57MY+LNpVxLebcnF7/UzunsiVfVJIPo0W2BqFlMQIJYVhIlKdk87CFGALZ5QWoXIBkNY5CqVWhsMSGGEQiUV0HpbE0tl1YU23w8uOpbl0GJRQ2zCwPmqdnP6TW9FlWDI5eysQS0SkdoxCHSE/aQWRRCJGEuapRiQSoY9S0WlIEp2GnJ4BmUhE2P2AEKlZfGwxb2z/NxKRhMmtJzO17VQStAmntd+GyFVSOg5MoPBQdcjxdv3iUWqbHo6Oj1Dyr6ndeMzipNrmRqeUEqWVY9Sce5ECYPPY2Foa/NR+gpV5KxmbPvbvO6D62Coh609Y8U8hcqGNhcEPCYmg2jCiU91I+wORWIhcnNYxVcG8u+pECkDuBug0Rehbs2V2nYOtIR3vlV/x0OJy5u+qmxpauq+Ulbe1JS3/e0R7f67bztGVENUKrhf6/OiUMoa0ieGrm/U43D4kEhExWgWShqW1lUdYcEh44EjUy5GUNCLWKrLCO94ex+n28sfe8FNZa7OqMRccQp2YCoeX1L3ntuNh4D2w4H58MR1wGtshN+cT7ciFhC5ht5cQoeLmwRlM7p6EHzCoZacUSalPskHN/aPb8siPwedCp5DSN6Ols/L5TotQuQDQR6m47B89WTX3MPn7hWRYY6KGPhens+evgoDpFoD8g1X0n5KJx+Vj86JsnDYPGd2iiYxT15Y0KzUyopLD9+w4F/j9fixVTmw1TtoNiKfgQBX6GBU2k4v8A1W1JczxfRTct/3fVDqEc/HJ7k/449gfzBo764w/8Se1M2BM0lBZEJh/o4tS0rZffLM9GCJUMiLOU6tvsUiMUWmk1BY6mpWgObNCsMl43UKvmMWP1i2zlAq/VxyGkc+GdhSObR/aARWg7bjGre6bgq08oKtxLb8/Ct2nwZ1rwWEWHFU10Sw44mX90f08NTKe/gky/IhYmedBackLFCknqMiCrZ/DsMdqXWWjThZ5K9mH198PgLwaF6bMrug1G4XpHXORkDtygug2wvJGkEhERDUyLRmpliEp2wsyGyy4r95x7MXTYQr509fyy9ZcthZ7aG2QMN0bS7LLg0oe/tYjEomIPslUaHMZ2SGWey9qwwd/Han1q0k2qPjw2l4knuGeUC2ceVqEygWCIV7DuFs74bB68Pv8QofjOQeoKQuee+00JJHCwzWsO14iDLBzeR5xGXrG3d4F7Skkop5t/H4/5fkW5r+1A320khHXdQA/lOeZiYxV0+2iFPasKgCJn8PsqRUpJ8g157K+aD1TWk85o8elNSiZNLMbWdvL2Lu6AJ/XT7v+CbTvHx9k++/3+3F5fcgl4vPaKbbCXoHdY0cilhCtjEZ2vPdMtCqaGR1n8K8t/wr5uoszL/47D7MOczH8+ULosS2zBRfYUEJFlwjXzIWvLw+MHBgzYdwrda9xO8CUD3t+FqpgModDqxEQkSKE9xwmYZpIJAJNbJ0VfSgBdIId30C3aZAxBIAKi5M1B/cwf2oEsWueQrJ2HYjEdBjxDJLDoRM9Adg+B3rfDPomikSxhImt5czaCFKxiPLOt7Elcjxmp5dOMXKic38jct3LgtPtiCdPOu0jFYuZMSCNn8I0Trytl57ovZ+B8R+BA8ZM9nZ5lKve34LDLUwlrgG+3FLCB9f2YkT7GOSSv68kOEqj4M7hmUztnUy5xYlcKiZaqyBO32KUeCHQIlQuIBRqGYrjme9ery+kSBFLRbTqGcv8t3cEjZVkm9i/ppBe49MQhwinnjCVM1c4kMgkaA0KNBHykOueaSxVTha8swO5SsKgqW2oyLcQlaTFEK/m8OYSFn2wi7G3dKJUn8NTa58PuY0FWQsYkzamSY3+moPWoKTriGTa9I4D/Ci18oBIisvjpaDazs/bCthTYKJLUgRTeiaSZFD9rRfjk2F2mdlZupPXtrxGdk02KqmKK9tdyfUdrydWHYtYJGZ8xng2Fm9kVf6q2teJRWL+b+D/Ea8+R/kp9srw+SR+H1TlCuKjIRIppPSHuzcJHXmrcwRTs5j2dTd+jwuyV8F3V9dFG/b+LHiN3LJcyBdZ9pxg967QQe9boPeNQhKuMgLE0pANCoHaPjwg2Kv8o6+c+G9GCdsEUEYgkqtxJ/VFXrBFSKhtiNdFYy6vQbQeTerB1Tw0qg/xkTomvL+xVigATO06lEcndSTaegj0TUsgTTWqeWhUK/69LCtg+Zh2kQzXFUDJHqE7cz1K+z3GfYuKA/YN4PPDA3N3sOSBoaeVd3IqqORSUozS8J4zLZy3tAiVCxRNpIJuI1OCOh2ndYwid2/oxFuA3X/l02FwQlBCq93sYtef+WxdnFM7xaJQSxl3W2cSWkcgacyi+wxgrnBgN7sZd1sXCg9X11b9yJUSOgxKZGSvOFbM2U+vuwx4/KFvDCqJCrwibHYXMoU4qILpBA6rG7vZhcPqRq6SotLJUYep3DmBSCRCrQ9ex+fzsy23mutnbcLlFS7Kfx4s5aNVWcy5uS990o3nTXRla8lW7llxT+3vdo+dL/Z+wZ6yPfx7+L+JUkURo47h+UHPU2wtZlPRJnRyHX3i+xCtjkYtPUcXeOlJIoCKRqYwpTKh/NaQHnrcUgw/3hA4JXKC6lz45sq6ZFRbpWDkdngJTJsrVOn0ukmoomlI+4kBniRGuQffjg8EkZI+mIqLXuOoM5KvtpRgdXm5rM/n9FSXEr/41kCTtI6XnTTq4fR4ESESvEwikoiKS2O8Ipqx728J8u75YVcF3dLaMd2yBtH2OTA2TKSqHpFqOTP6xDE+XcTyI2YcHj8j0pUkVW0kauGj0Ho05AR2Da/SteNYRWhbBZvLS0GV/W8XKi1cuLQIlQsUpVpGr/FppHWOYvvSXOxmF6kdo2jdO5Ydy/LCvs5p84R8QCs4VMWW344FrbvgvZ1c80w/ImPP7kXFZnLRqmcMeQcq2bKo7jhcDi87l+fRulcsHQcmondrUEqUOLzBGfxXt72G9d9lU5Jtxpikofe4dCJiVYhlUGov5VjNMUxOM8nSNMp2uNn/exl+v+CMO+62zuijmz9XXWJ2MPObbbUi5QROj4+Z32xn/szBAcZY54oyWxmvbn415NjW0q0UWYuIUgk5G0alEaPSSMeojn/nIYZHHQ0J3aAouP8UmhihM3A4fF4QNyKyK46GnsLpPh3Wvh26YqZohzBF1GoEDHtYyEHZ/LHghyKRQbfpMOJxISpzHLHLjDhnFQx/nMrk0by+3sK324/Vji/bD+3iNHx+6VwS5o4VQjDqKBh0T9g8khKTg90FNXy3KReA6f3S6JSkJzZtMEtWHQ1rMPjB6jzGjOhFbGX4HlIN0UcY0PtqaL3zNajJh127hfOWNghG/xM+HRmwvu8kLTHc3uDKshZaCEeLULmAUWnlpHQwEpehx+vx4bS6mf/OTnqNS+PQxtCZ+sntDcgaONbaTC42hbG393n8ZG0tpdf49DN9+LX4/X4So93E9RNhN9UQd00CW9aZKcmx1K5zZFspE+/uhsflpWdcT9YVBj7BjUkdgyzHyK4NwvuuLrFxdHsZ42/vjDWxhFuW3oLNU+eYe3HqJKbeOIMNswsoyzXz+4e7mXRvt+MGdk2nwuKi3CLczPRKKZ2SIvB4fezMq6HU7KTC6jwvhIrVbSXfHN4TZkfpDjpHd/4bj6gZaKIFg7bPJ9SZloGQKHvNd8FCxWmBmjzY/pVQIdRmNLS6CCJTg7ftDpNnktQLNrwf/pj2zReEijYWLnoS+tws3LjlGuF4/X7weuHE1J9YBq1HgUJHtk3Ot9uDv58HS6x8fzSCmV2uQaKNEbZpSAu5+xKTg3u+2c6mY3W5Wsv2lzK4dTTvTevB0fLwviHFJgdedTQkXh7+/YXCkA5jXxYSma3lQisCbSx4PZDQHXLW1q3qKSVGq6DMElxVJBWLSI1qMVhroem0CJX/Ak5Y5duP55dIZRIMCeogK3uxRMSAKa1QqAL/7D6vD1N5eFO58nxL2LHTxedwYN+2jcLHHsNTKtyEJFFRDH3sWXZEGjm887gXgx/sFhdqvZxnej1Hdscsfjn8C1KxlImZEzFIjXjKJUSlaKjIs9a+5s+vDtL6FnGASAFYlLuAtp3bEt+qM8VZJsrzLdhMrmYLFY9PSJ59bGIy8VFWtpWvRi5WcOfoYaw54G7UFvzvRCaWIRVJw06bnYimnLfEtBUs2Qu2Qf4WoaInbRBEJAd6qbhsQq+dX26vW3ZgoRB5uXExRLdusN12QpJsQ0McsQxP9+sR2ysRZ/8ZnCNTL1qCVCEICpdVyINZ8haUH4Kk3tDzOohMEzoq97oB36ZP+MbRP+zb/GZHBZff8hLJUVohOhOGtUfKA0TKCdYcKWdnfg2D20Tx47bQwrRzoh6ltxoMpyBMtbHCT0OumC3Y/W/+FHweYhU+XprShdu+2h50av8xpi3RYfqRtdBCKFqEyn8RSq0cQ4Ka1T8cYvSNHcnaXsbhzSV4XD4S20Yy6LLWGOKDp3AkMjHGBA1luaETFhPbRJ61Y3bn55N7623C0+dxvBUVVDx6Pz1nfcOxA5LaPkOaCAWHNhXTZVgybWPbkhmRSbYpmyfXPEmVswqdTMcbV70D32uoyBXEisPiRu8NPTXwVdaXvDLkPYqzTIDg8kszDSqjtQpevzqD+QVvsfFQ3RPlnIOfMK3tTUTp05u3wbOEUWVkfOZ4FmQtCBqTiWV0iQ7vbXHeEJEs/HS8JPw6ltK6Pjb1sZbBb/+AqV8E9vjRxAqeJxs/En5XGykaN4ttlgTmmxKIUMC0SY+QVrwUw5r/q3tdlysCt+91wZHl8MP1daLn2BrY+AHMWCAk8Yql4HZgDZN7C+Bw+/CI5WFFisXpwWx3c7DEjFQswnN8ikUtl9A33YhUImLO+mM8M6lj2IjGE2MyMCTJBfF2ptDFC/4xrUYCfsTKCIbazOx5oBMlJjt/HnPyV46LW4Zk0DU5AnUj5ckttNCQs1/O0cLfhlovZ8xNnfB7/fz2/m5cdg8jrm3P2Fs7M3hqG6JTtCGTYlVaOQOmtAq5TblSQlrns/O07XO5qJwzJ0Ck1A36sM39kjbdIwHQGhRIpCKytpUhM4h4d/u7fLjrQ/449gdVTiH50Ow288Cme2h/aWTApsLlspbZy5Br6s7HqfQPitMpMIl3srFkbdDYN4dmU+kKtsw/F6ikKu7pfg+tIwMjClKxlHdGvEOsOsRT8oVIwdbwVThH/xQqiOqj1MPQh2HyRxDdlsLJP3LtUgl3/3yUP/aW8P22EiZ/lcuH5sFUD3pKeM3I5wTr+fqYi2HeHcGRGY8Tfr5dGJdrEBdu4Yq24W/SYzvFBVniA1RaXaw5XM4dc7Zww2ebMdndzL6hD91TInl6ZDxLrtLzYcwPfKD/gld61hAvtTL39v70z6xLxE2IUPLJdT3plBrbfJFSUwBH/4Itn8GxtUJ7glAo9UI1VGU2ikX3ovmwJ5lfD+Cm4uf5fKKOoa2NRKrPcjTF44TqPKGKyhymsVkLFxQtsva/jKgkLVc91ZecPRUUHq7GVO6gde9YdAZFo2XGsWk6Rs7owJofDgsJt4AhXs3omzsF+YWcKXx2O4694T0k3IcPYhgmQqGRctH1Hcg/WMXkB3vgsfvoounOFt2WoLwLi9tCkT8XtV4wilNHyCkj9MWqraEt1lLB7TcuQ48qRFXPyahxV/H9oa/Djs89OJeOUR2bZfF/tkjQJvDR6I/Irslma8lW4jXx9InvQ5w6DvnpurSeL4TLOTlBKBGjiYHuV+NqM4bP/iwiqyw4f+SjjWVccuuVRHa9WBApDTsv1+SH91WpyhaESmQaJPSgi6yQrkk6dhUERjD1Kik3DEwPijaY7G4+WXWUD/6qKw8+WGJm3vZC5tzUm44H3kX9w1u1Y9E75+BPH0LmFbP48NpeVNlcuL1+IlSyU/MNKTsIX14S2GzRkAHX/QLGjOD1q/Ng9piAfkeiI8sQ5a6H21cLjrtnC1MhrH4TdswRkpuNmTD2JUgbKAioFi5Izv3Vs4UzikgsQh+tosvwZDoPC93ZNxQKtYy2feNIamfAYXEjPt5pubk5G03B4/JiM7nw2LzIMjNw7NkTcj1ZejqJneKZ0DkVqUJMeZ6FLYuO4feDMTGZVya/w2clH7C8cGnA60odpSjUrXBY3Fw0oz0v5j8Rcvt3tr2H7K/NJLWLZOT1HUKWKHt9fkpMDiqtLiRiEUaNPOBi7/P5MLlMYd9rlaMKr897SkLF7DRT5azC6XWik+uIUcUgaayCpQnEqmOJVcfSL6HfaW3nvCW5T/ix6LagCH+zqvSombslfMLxT/stdJrYKfRgqPLm+lTnCsZxY18g9seb+Xjk0/yaG8+cnTU43F7GdIjhkh7JvPTbft64sjux9T5jZWZngEg5gd3t5aXfDzKrQzQNJ3RFx1bDvgVE9r3l9CIY5mKhl1HDjtBV2fDTzUIXZk29iKvTLCQah2rK6LLC+v/AuJdO6oh7SlhK4fsbIH9j3bLKo8LxX/U1dJh45vfZwt9Ci1A5D3A7PTjtXsQS0Un9PBri8/mxVTtxu31IpWLUEXIkx9uuN9e/QywRozMqw0ZQvF4fthoXHpcXiUyMRi9HImvejdNhdXNgfRHr52UhkYiZctU0zAsWBofMAe20G1g5P4+h17Rj0Xs7sVTVzbdXFlpZ+4GVW+67i20VW2qnfwDaG9rh6qhj3O1d0EcreTLpSZ5d/yxbS4Q+NtGqaB7p9QidtJ3pdbcKlS50nyOr08Oqw2U8+cseKq1CZU9SpIq3r+5O15QI5BIJOoWOgUkDQ+Z+AIxOH43iZD4g9XA5PLidXux+K6/tfIXFxxbjx49BYeD+XvczMnUkEY3cbP/n0cZB92thx1eBy0ViuPjfoAs/xeX3+4MMyupjdTYiRiJThN5BocqZtbFCpGfxY3D7ao4Mfx+1vYAr4/IYf3UXHBId2RU2cnNz6RorDSrd3ZQdnDR7gm251dQM64ch1OCmD4V8nlDJr03FWhbaiA6EaTZbuSBUXBYoPQj5m4SGhOE4sgzsj4LuLAiV6txAkVKfP56A5N4BJnwtXDi0CJVziNcjuMtu+e0YBQerUGpl9BidSmpHI+qIk9/cbGYXBzcUsXVxDk6rB5lCQteLkukyPDmoa/LpYjO72L+2iG1/5OCye5DKxXQelkT3UanN2ldZrpm1PwrW/j6Pl917fXT65ytUv/J/+KxCZY5IpSLyocfZc1gKeCk9ZgoQKSfw++Hw7zVMHXAVHx/8EICOUR1Jj00l+rKo2nyctIg03h7xNlWOKtw+N3q5nhh1DGJR4ylah0st3PnVtoBlBdV2pn+6kcX3DyEjWotKquLWLrey9NjSIG+XBE0C/RPCV3jUx2X3UFlsZcuiY1QWWdHHKpl+0S1ESWL4KutLqpxVPLvuWVRSFeMzxjdpm//V+DwgkgQnIKmNMOpZIdS/5g3hKTupN4x6BqLbNbpJnUrKRR1iWbxHiB5IxSJidQpsbi/VNjeTuiWGf7EmVvATWfxY4HKRCC56BjZ9AqYCsFWwx6TmqV9sJEQaGJRaye2dK7lo/3vIyvbgjUwH08Og7CBML7lsQOOeI2GfR5wW8HuFLsbWMiHaodALwkWha3SbtTjCRwsBcNuEL2L2GsHdt9s1gRVRDVEZGq1mOi3yN4cfq84RxFQLFyQtQuUcUllo5afXtuL1CBcim8nF8i/207p3LEOvbouqkRI+j8vLrhV5bP09J2D5oU0l2EwuBl3eBoX6zPx5PS7BdG3b4px6y3zsWJqHtdrJ0GvaoVSf/OLjsLrZvCjQr2X/1mrMbRPo9e7XaDAjAmxiPVs3mjm2v5IR17Ujb1/4J8rSo2Y6ju2CVCRlTPoYHuj1AHGa4CfICEVEsyIRZoebt5YdCjnm9Pj4YUs+D41ph0QsIkWXwjcXf8MbW95gbeHa2pLp27ve3qRGfl6vj5w9FSyZtbdu/xUOCvZXM/SqieQkHGN1kWBp/9bWt+gV1+u/J/m1udQUCE/tu38Ubnq9bxL8Peq7t2pjocd0wT/F5xG8TernJzgtwo3bXlXne6KOQquQ8Y8x7Vh3pJwbBqXTLTmSYxVWdEoZ8Xol7WIbccCVq4WbdHQbWPMmVOVAbAfodQPs+UmIPgB43fROM+IHVDIx97QqIXrudUIrAEBScQSyluG/+A18yf0Qr3qZvr2eCbvbPmmRRBStCz3Y7rig/eVOIcrh9wuRpU5TYMyLTesfpIsLXb4NQgRJZRSaHS56QFjn4O9C5Gp/6AgjA+85qdPuKaONCz8mkQleNi1ckLQIlXOE3eJi1dyDtSKlPke2lNJzbFqjQsVmcrFjqeBAq9bL6XdJJjKlBEulA61Rid3iOmNCxWZysTOM2+3hzaX0nZjZJKHidfswhTCiyj9kJv+QmZSOBiKiVULzwePoolSo9eETJFV6OZ1j2rFgygKMSiNq2Zlx0LW5vBwsFpIde6YauL1PKgaZFJFEzIb8KnaXmnG4vWgUUqRiKW0MbXht2GtYXBYQgVFhbPKUj63GxV/fHgw5tnteKdfffVOtUCm0FuL0BEeX/ieoyYcvL4WKumabbJ8Dgx+AgfeBusGTfKgpD3MJ/PmSkGx5Iq8kqZfgA2JIJz1KzYJ7BvPSb/t5Z3ndfjRyCZ/O6E2ERha+f5MqEqLbC3kybccJZnML78edPBBXn3tQOsuRqI0kalV8c2t/nBV5RP95JzUDH6MiZRw1bjE6mR9j8RqMK19GMult2L+QWE0G9w6azDtrA5PCNXIJz09sTeTiEEJGoYP+d8GC++HQ4rrlfp8gnHw+uOSd0M0cA3YSA52vhN1zg8f63i5EkqqyhSRWEMRf+SGhkeKWWYHrd7oMMoY2vr/TIamX4GkT6vvR+UrQnsFy7Bb+VlqEyjnC5fDW+neEIndvJTEp4cOzDpsbr8eHSidj5IwOrJp7iJrSOhGgj1Yx6d5uZ8T63mF1hxRUJ7BUO4mMO/l+pAoJUUlarNWhIySxaXoqCgLDs2KxiMweMez6M4x51dAkogyRSKVntoRaKZOQFqVmYoc4RkVGsO+HYxwxCfkHiW0imHhZaxTSwKkjnVyHTt7EkHo97GZXbaVVQ9xOL0pX3ZO8RqZBKvkf/Np6XLD+/UCRcoI1bwo3wYZCJWgbDlj3Lmz7PHB5wVb4+gqYsRCROpaFO4v4Y2+gKLC6vMyYvZllDw4jNSrEZ93vB69buBlGt4N5d2DreDV5k+YxZ5eNrAIvPZJ1THXpSfb56ZoUgU+SR/HYD3lmk4Qly+s+333SuvHWpT+S5D4GUiW6Le9y49AYBl59ER9vt1FqcTE408A1bSH5zztg+KNwYJHQ/dnrhHYToM+tgChQpNRn/zwY+fTJhYoyAsb8U4isbP5UmOqJaQ/Dn4TMoSBXBbcoWPkKDJgpuAbnrBOiGR0uETxw6vU/OuPoE4Ru2fX7MwHEdYaLnoAz9BDTwt/P/+AV7/xAJAofUQWQyhvPn5CrpAyaKhi47VieFyBSRCJoPyCe6mIbB9YVITp+s9caFI1GacIhlTeeMNvQ6bax9fpOzCR3b7BQkcjEtO+fwO5VgYLEYXGjNSgYdHlr1v18JOB8pXWOIrN7NNKz0DAxQiXj0XHt8RbY2PhFYLSj+HANf320h8sf6X1GSrdPlvRcf3ha+2lEK8/ixf5M4PMJzf58XuEJ93SSOU9gKxeiJ+HY9T0kdG18G+YS2PJp6LHyw1CTT6kvgk/WhE4edXl9rM0qJzWqnhW/yyKU426bA5VZkDkC2o7FPXMnfx1zcdfnh2s/s+uPVjBrXS7f3NqfXmkGbHIjL2yzseRAecB+NudUc/cSH7MmdyTq+JSQYdUz9Ne8Q9cOU3Fd8gCarR8g++ZtIUJybJUgTsa/WjfFsepfQlQjHH6/kLvSFHTxcNFTgjGe1yWUK+/6Dg4uhF43ConE8V2geHfda9a/B5s+htT+QvsDXSPTMmcKiVxwK565GXI3CFGe1P5gbPX37L+Fs0aLUDlHKDUyMrpHc3R7ecjx1I6hIwRup4fqEjubFmZTnmdGZ1TScXAisWl6tv5+DIDBU9tQlFXDpgV1+SBbfjtGpyGJ9L0ks9mVRWqdnPhW+pARIF2UMmRX4XAYEtSMu70zf31zELtZ8DDRR6sYfVNHdNFKuo9MJW9fZa39v9vlZcvvx+g+OpVJ93anNMeMx+UlPlOPSifHaW/E5vM0ydSp+G1J6DwVa7WL0mOmMyJUVDoZar0cmym4YkShkVIjEbphD08ezjXtr0F2tpIRzwSWEtj1A6x9S8gDiWkPo58XnFkbeo80B78fPOH71wRZ3IfCZRW8NcJRmY1X3ZFqmzvsKsfK601Duh1w8A/4+ea6J45Di+GvVxDN+I2XVxQHPYhIxSKO5eXRTVlCuT+W3/aG/v7vyDdR7m9LVP3IgLUc9ZYPUPe5HrbNrs1rweeB/fOFHxBEizZW6BzdGE1NqAVBcIql8MONUFAvaXXXXCE354rP4JMRgX8Hnwf63CJMif1dSOWNd8tu4YKkRaicI+RKKQOmtKY4yxR0g+o/ORN1RPDN3+/3U3i4moX/2VXbAdlS5aQoq4aeY9No2zeOoqwaJDIxR7YG+xjsXV1IZo+YsCIoHEqtjFE3dGLBOzuoKau70Kv1cibe3bVZjq5ypZTMbjHEpeuxW9yIxSKUWllt5ZDOqOSSe7tTlmvm8NZS3A4viW0MrPhiP12GJROTqkUsFlGaayF7Zw5jbw32tah2VFNmL2NfxT4iFBG0iWxDjDqm2aZmYj9UFoXPjyk8Uk2rnqcfLdBEKBh1U0cWvrMzoOusSCxi+PVtKdVm88slvxCtiiZSGXna+ztr2KtgydPCzesEZQfgm6nCU3XnywP78jQHpR7ajBX69oSi82Un34ZcEz6HAcCQhkImplWMhqyy0H/3Pun1EkEtJaHdaO1VSBbex009X+W5ZfW+L3IJP0xLo936R5AsW4X5yvU01mS4vKqadjJVoLgyZICtUjCPK94V9n1Quh9K9gn5MqGqYTIvat40jN8P++YFipQT7PxW6DZ95zrY8wvkrIGoNtDzeqERZDPK81toIRQtQuUcEhmr5vJHe5G7p4LsXeWo9Qq6DE9CH60KOZ1irXbx51cHakVKfXYszWX8nV1Q6+Uc3BjeNnr70lziW0UgVzTvTx8Ro+LS+3tQU26jstBGRIwSQ7wGfbSqWdsB4QasNSjRGgKjET6vD6fdg0wpJaNbDBndhOQ3u1nwbtk4/yie4z4XCa0jGHVDR7SRgdsos5fx8oaXWZpbZwKnlCh5a8Rb9I7vjUIS+qLpsLmxVjk5sq0Ur8tHZo8Y1Ho5Kp2sNvLTkDOR/wPC+UhoFcHVz/Rl7+pCynLNRCVp6TwsCV2UktbyC8T7wVIKh/4QboC2isAb+JInhJLhiKTwr28MhQ5GPgNZK4Q8ifqk9hciNydDGws9ZwhTEg0xZEBkCjE6JU9MaM/NX2wNWiUxQkmnxHo5HSV7Q/umAKL8TQwcGCjK7h0YQ7tNTyLJERKjdSI7YhFhxUq03B0oUiQyofR67ZtCZ+UF9wW/SCwV+u1s/AgKd8DNS2DenXVVRyBMj1z6buNlxA2xlgk5KuHY9DFcPgsG3wf97xCmn05VlLbQQgNahMo5Rh+lovOwZNoPTEAsESFu5MvtsLmxVoe+MPp8fmwmF0qNDGcjoWun1YPP44dmPuRYq52s/fkIefsr0ejl2M1uopI1jJzRMWj6w+f14XJ4kUjFyBRNyx8xlds5uLGYozvKkKukdB+VSly6/rhYkNNjTCrt+sXjtAkeLkqtDJVWXuty67C6kWnEzC+cHyBSABxeBzOXz+TXyb+Sqk8N2rfD6mL70sDy6+1Lc+k+OpXuo1NZ/3OwK6hYKiK105krs5TKJBjiNQy4rBVelw+JXIykkZYH5xsur4syvOROfgO710ErdQJR+39Du+kTYQVLqZATcapCBYRcg9tXwV//giNLBPHS9zahQWBTchBkKhjyENirYc8PdUIqtiNc9RXoEsDnpU+EmbcvSeWFFSW1Tf0GtzLw4iXtSYisJ8wbCqYGiP0e+qZF8o+BeuKlFhJ0PiTxV4C9HIp2Ep39KxM6XczCPcHTP91TIlFrI3C3m4SsJgdfYg/EHS+Bde8JPYtS+sHAe2HD+3VtAZQRMO4V2PSRMC2k0Aq+KdO+F4SGrQLU0ULCr7qZyed+X+PTZm6r4NkCLRGUFs44Z02ovPjiiyxatIgdO3Ygl8uprq4OWic3N5c777yTP//8E61Wy4wZM3j55ZeRSv/39JO0CQ6v4pMYzUokIkrzzCS1jaSyMHToOqNbNHJl85JPXXYPa386Qtbx6STX8QqVgoPVLJ21h/F3dEWlk+P3+zFVODi4oYicPRWotHJ6jE7FmKRpNIm3ptTGT//aGhC5KDxUTZs+cQy5sg0qnRypTBIUvbGbXUIEIs+MWCIisa+SL/Z9EXIfHr+H1QWrma6fHjRWXWoPECkn2LE0lyse603bvnEc2lQXpZIpJIy/swtaw5m/IEskYiSqv1+g+P1+vP5Ts/p3eBysL1zPI6seqTW9EyHi+taXcdP4VzD+ftwE7XT7CUmkgk/JpDcF0SOSCOWzzXly18ULPh/DHxMSdOU6IQJ0IuHXXIT+qzFMMrah74QHMMvjkIvBmL8M/c8Pw5QPBaFjSGs8edeQgcFg5IM+24laPF2YFgPB62PMC7D7e7TbPuSpy8bg8UaxeH9F7Uv7pht4fEIH5uwq5OphbxKvcKH+658w93pIHyjkhBxZJuRhXP2t8P49LqHiZ8P7kLdJGJv2fZ1XyulW26gM0H6S4HYbim7TBCHYQgtngbOmCFwuF1OnTmXAgAHMmjUraNzr9XLxxRcTHx/PunXrKCoq4vrrr0cmk/HSSy+drcO6oJHIxUTGqakuCX6Sk8rExGVEEJOqQyQRc2hTSVDJq1Ijo12/+EabE4bCbnZxZEvo6aSiLBM2swuVTk51iY2fXtsasN+cPRV0H51K56FJuBwe5Eopar28NtLidnnZ/NuxkNMrhzeX0O2iZFQhkn/9Pj9l+WaMiRpM5XZcDg8yh5rnu7/C0zseC7DUP0G+KbjE2efzsXtl6NJnpUZGwcFK+kzMoPeEdCxVDvCLiIxToY5Q1LYquJAxu8wUWgr54dAPlNhKGJEyggEJA0jQNsEM7DhF1iLuX3k/Pn9dCbsfP18c+YnOvR5hXEw7QVBozlAJuVwj/JwqSr3wE6o5nqUUbJWIbRtJyL+aoLNQnQPfTRf2f92v0O8O2Njg5i0SwcQ3MEiciBfdHDgFZikRpmKmzYWjK4lf8QD/Gvosz47sRJlPh1qpJEojx6CR0yP1+NSMqVAQeVd+DgcXgykfMoYIUZXl/4TJHwiNEq1lMOwxIVqijWuaodvJODGFpzLAgDsFPxV7g+9WdFtIG3D6+2qhhTCcNaHyf//3fwB8/vnnIceXLFnCvn37WLZsGXFxcXTv3p3nn3+eRx99lOeeew65/L+km+sZ9Gy8CwAAQFJJREFUwlrjZN1PWQyY0oqls/bW5moAIIKRN3RAZ1QgkUnw+/1c/mhvNszLIntHGYiE8uT+l2aeUk6J0+EJW0YNYDe5cBo8AZ2X67NjaS5JbSP5/QOhfLFtvzj6X9oKTaQCp9XNkS0hGpgd59DmUuIygqtFzFUO8vdVsn1pnRFd1rYyIuPUvD3jfW5acx0ef+Cx9EkIbljn8/pxWIJFUka3aDoMSmTX8jw2LzqGWi+nx5g0MrtHn5VGjecCq9vK/Kz5vLLpldplK/NWEquO5fNxn5OiS2nSdhZkLQgQKfX5KOtn+vS9hajMkc3LiWgK5hKhJPjoKmE6o9VIYRrEXAhyrRA90SU04jEfgjA5J7WcMIlzWWH2WJi5RRAMq18XBEViT8GfJDId8fx7QvsP+DxwcDHFV//BrgoJP+7woJRWMr2fliSjDIOmwbVPZRQ8S765qq7S59AfwrTOlXNAnyi4vaqNENN4m4AmYyqCrOWC9b/XCZ2vEKImt/4J694REmslciHnp+cMQSi10MJZ4pzNsaxfv54uXboQF1c3tzx27FjuvPNO9u7dS48ePUK+zul04nTWZe2bTCfpRXGB4DieVxLO4bWqyMrR7WVYq52Mv6MLOXsrqCiwojMqadsvjqgkbW2DQJFIhCFOzcgZHXBObYNILEKplp7UDyUccqUURIRM4gWEMmGrm9xGrO6Lj9YQlaylLNfMgfXFuF0+RlzbhATIMArJafUEiJQTVJfYKFuvYlTyaBbn/V67PF4TT/vIDkHrS2USWveKDfB2iYhV0bZvHL9/sKt296ZyB399c5CCQ1UnbW9wqri9XmwuH0qpGEUzmz2eCuX2cl7d9GrQ8lJbKW9tfYvnBz1/Uqdfr8/L0ZowTesQoi2etmMhIu20jzcAUyF8dy0UHk8SnfQ2rHpVqEA58UfTxgmRi/iuwaZkofD7hchPuMoghb5OKICQk7HlUxj9guC46nUJAkmpFyIzpXuDt3Gc4nbXcutvJnYX1pXzzt9VzOU9k3hiQgeitArBjwbAWg7z7wncN4DTBEufhut+Ofl7aw7mYvjxRshdX7dsxfOwZTbctBjGvgxDHxYEoDpGmJJroYWzyDmLXRcXFweIFKD29+Li4lAvAeDll18mIiKi9iclpWlPfecrlioH+9cVsvDdnSx6byf71xUJUwwNOLKtDICSbBML39tJVZGNyDg1LoeH397fjdvhwVRux1pdd4EVS4TE1sJDVWxalM2RrSWYKxz4GwuPhEClk5PZPbT9dEyaDtUJH5VGNtvwGpu1rRS7yYVCI6N1r/DW1m37hk6SzNoWPgqTvbGKyclXAEKuxOD4wbzZ4z8Ur3fiCeGwm9zeGJBv0nloEpsXHQupkY5sKcVWc5Kn7mbi9vg4Wmbh5d8OMGP2Jh7+cRe786sxO8InRZ8J1heuxx/mj7Y8dzlVjuDps4ZIxBL6JfQLO97B2AHlmS6p9rhg/X/qREr6EKHh345vgqdZvpgkWO+fDFMhbPhAmMYZdH/odYY9Alsb5ECVHxIEiiZaiGyccHqVqSCqdcjN+FIHMz9HEiBSTvDTtgKOlJggezX8fItQ/lxxCEY8KVT0NKR4l1CufCYp3BEoUk5gKoAtnwkXFn2iEK06FZFSUwCHlsCq12Hfr4Jhni90RK6FFqCZEZXHHnuMV18NfgKrz/79+2nfvglPyqfI448/zoMPPlj7u8lkumDFiqXKwaL3d1GeV2cbX3zUREyKlgl3dQ0o363ft8fvh7z9lbBf+F2mlFB6zMySWXvRGZX0nZRBYttIzOUOfvtwN656pmgKjZQpD/YkKil0gzWPx4fP60Mml9Q6pipUUoZc2RavW2ied4K4DD1jbumEWifHKXGT1D6SggPVIbcb3yqCHctz6xb4wWlzExmnps/EDHL3VQblqbTpExt2qsrlCG/05nX7yFS2YXafr5GKpFQd8LD+vWJ8Hh+te8UFbVNnVDLloZ5s+f0YBzcWozUqwyYjAxQerg57/k6F3QU1XPPJBpzHRdSOvGrm7yzk1cu7cGn3JJSNRFdKbaVUOarw+r0YFAZi1DFNToi1NVK14vV7w07nNGRo8lDe2/4eJldwdPO+nvc1qxlkk7CWwdbP6n7vdhUsey70uk6z0MTQ0EhEx1QE314DRTuE34f+Ay59DzbPgspsIYG33+1CkurRPwNfmzoodJWLQgdDH4HDS4KGyttfw1drQhu9AXy5Poeeyq+Q7flJWLBrrmDiNvZF+P3R4Bf4mmB6aCqCsv1weJmQu9J2vPBvw1wfjwu2hU5Irz2WfrcL02qnQsUR+Hyi0MjwBMpImLFAcLdtzjRdC/8zNEuoPPTQQ9xwww2NrpOZmdmkbcXHx7Np06aAZSUlJbVj4VAoFCgU/x05Arn7KgNEygnK8izk7a+kw8C6tvJt+8SFrEwB6DAwAalMhCFeTVWxjeVf7OfS+3uw4sv9ASIFhCmTxR/vYcqDPVBH1J1Hh9VFVbGNXcvzsVtcpHeLIbN7NPoo4aauNSgYdWNH7GahFFihkqHSyWoTXaUKCf0mZbIgeydupzfw+AYlIBILviP1BYBMKXz8ImLUXPFobw5vKcHt9KI1KolJ1qKLUoVMpAVo1TOW3SsLQo4ldzBydFM5O5YF3wwano8T6KNVDL2qLX0uzsDj8jY61SVrZtVUY5SaHTz0w85akVKfp+ftZWCraFKMwdMvHp+HPeV7eHTVoxRahYZwOpmOx/o+xkWpF6GVn1xIDUgcwJvb3gw51jm6c5P7FiVqEvl83Oc8ueZJ9lcK6jlWHcuT/Z6kTWSbJm2jWfi8Qo7ICRR6YXokHKUHGt9ewdY6kQLCk74hXaiuSekHEanw5cS6xnsnkGug0+TwN9eYdkKi62//qDteZQT+1EE43MEl7yewuf34NQ2ucQd/g/YThEhG/ePQJ50896cmH766QhAqJ1j6NFw2S+iwLG/w+RI18vkWnUYQ3loOP90SKFIAHNVCf55bVwjvr4UWGtAsoRITE0NMzJnpQDlgwABefPFFSktLiY0VSgOXLl2KXq+nY8eOZ2Qf5zMOq5u9q0PfaEFwkc3oFoNSI+SsaA0K+k7KCLDFB8GSPqWDkXU/H6HX+HRydlcIN3yXB3Nl8BQSCHkcdou7Vqg4rG52LMtj6+91QqjgUDXb/8hhyj961hqbKTUylBoZ7uPeJcXZNeAHY6IGqVzCzhV5jLu9C1nbSinOqkGhkdFpSAJOm5f1P2XRYVACEpmY1d8dIrVLFFK5mIJDVVQUWIhJ05PaMYody3LJ3llOZLyanmPSkMrEyEOY30XGqUloHUHRkcB+JRKZmG4XpbB0duj8gMbydKRyCTqjBKfdQ3rnKI7trgha54Q525mixuYmuzx09Mbl9XG03BJSqBRaCrllyS04vXVTfWa3mSfXPsln2s/oHd/7pPuO18QzOm00S3MCfWekYilP9nuyyS64IpGINoY2fDT6I6qd1Xh8HvRyPbHq2JP2MTol5GpI6AZFO4XfreVCw7twUzyJPcNvy+MK3UOo6hisfFmY3rh5KUx4HRY+IEwngSBCpnwkOK+GQ6kXklDTBwuJvyIx6OIwqOIY39nKF+tDP3hMbS9DvmVN8MDeeUJX5i2zhd9FIpj4ZuPRDbdd8J2pL1JACMv+fAvM3ApR9R4upXLofSMcWBB6ez2uFfJSTgVbORRuDz1mLhLOUYtQaSEEZy0LKjc3l8rK/2/vvsOjrrLHj79nJtMyk0x6TyAFApGOIEGQKqgoFtayNiyrq6Krq6vi6srufn+udd3i2ncf0F0VdV1l7bCoYKETepMaQkgjfVKmfX5/fMgkQyYhCSmT5LyeZ56HfD6Tyb0BMif3nntOKbm5ubjdbrZs2QJARkYGVquVWbNmkZWVxQ033MAzzzxDQUEBjz32GAsWLOgzKyZn5JTf5o3BeoZPTWLgiCj2/HCcmioHSYPDCTLq+N+SXdTbXfxvyS5m3zaM4wfLcTtbz0NxNzk1ZK+o9wlSGtRUOlj70UFm3DjUu4pQX+Nk3/pCvnvvR2+5d41WQ/YV6cRnhPHp37aSNSmeadcP4cCWItYtO+QNmL7/934GnR3LedcMJjEznI+ez6HqRB0xA0PQm4L4+s3d3hSD0nw7BzcXM/PmLDLGRqM7pfGgxWZk1s+GsW9dAdu/yaO+1kVKViTj5gxk3ccH/Z4+SjkrAlPI6fvkGM1BTLpyEMVHq5oV2Jt+w5BOPfUTZLDz2i1JKCgUV+hY8m0p+4saV9k8LZQt/fzQ5z5BSlMv5LzAX6f/9bRbLuGmcH59zq85N+FcluxcQmldKePixrFg1AIGhLY/+TXcFE64qZNP9vhjiVILmy25SH3DzfknTLgLvvx18+daYyBhZMuvpdG0XqBMZ1AfmRdBwmioLVVXHIIj29ZsMcgA1jg1SfdknRFDxTFuPSeO/27Np+yU4oxDYi2MsZTACT8rLs4aiBwMiWerrzflQbVTc2vBoL0Etr3j/57iQTn4DZrIU1bB44ZDxiy1qF5TEWlqoKLr4Iqi0/8vTl6ONvRrEv1SlwUqjz/+OG+80bjX2XCK5+uvv2bq1KnodDo++eQT7rzzTrKzs7FYLMyfP5/f//73XTWkgGKy6Bk6MYGiw3v93s+alOBdTWn6OSaLHtfZHrauzGXTl0eoOtHkP78CW5bnkjUxgSCDFq1W4/eNTqdXK7s2OOJn5aDBwS3FnDsvwxuolBfWsHqpb6O+iPhgyo7bGT4lCWuECVtMMF+/tcdvnsePGwsZc0EKX/1rj3fsw6cm8cMH+/0mr656ey8JGTZCIpvnqljDjIw+X61YqygKBnMQBlMQE+amUXS40ifIiEyyMuXaIS2eqjqVLSaYeQ+dTf6+Mg5tLyE00syQCXFYI0xtrrbbGpfbxe7S3Tz6/aMcqlBXyZJDknngkkd597twvtlbRpBWQ3p08y0cp9vJtpIW+rwABysOUueqa1NuSJQ5inmD5zEleQpujxurwYpFfwY1SrpL/EiY/yl8/pD6W/rQS9QOv9/9qXGbJW6EWtbdltTy6+j0apfhXcv83x97kxoYaTTqb/vt+Y3fXqJ2Gl7/OjgqYfjVau+d928i2VnDsmuX8PqWWj7fU44xSMdPxyczb5COuLen+3+9sy6HmLPUwCp+BNiS1eqzrfG4W+5tBDgqi6iorCMmtEl1aWuMmqNzbKNait9Vp26DDZp1ZpWFzeGgD/Zf0VejkSPOokVdFqgsWbKkxRoqDQYMGMBnn33WVUMIeAOHR7I90ULpMd839IhES6vl2feuO86Bk6eATlV4uJJR5ydzfH85w6YlsW1l8yO8Yy8Y4NPx2O0nP6KB4lG8J0OcDjc5KxoTYoNtBs67ZjAVxbUc21vG5uVHmHLtYAzmIL7/9/4WX/P4/grqm9Qu0Rt1LfbTcda7sVc4/AYqoK7mnNoUMSLByk8ePpuK4lqqS+sIi7MQEm70yclpi5AIE5kT4hl8Tlynb2Ecsx/j5i9v9lkVOVp1lF+vuZu/THuTtQcr+NWsTKJCmo85SBtEZngmq/NW+33t5JDkFnsanaqyvpLj9uN8fOBjKh2VzBowi8ERg4kJPvNmi13KYIGB58JNn4KjWl1ZCQ6HEVerp2D0JrVcfFsqssYMhbOugJ3/OeV6lvp6bTnafCr7CbUYW9PE1P0r1SJzs59E8841pLwzlceGXMGCWbPQKBA1eDA6d43/0z3hAyE0SV1FapA4Dq5+o/U3eKMVYodB4Q6/t0tiJnDtK2t445bxDIxqEqCGxMKQOZA6RT2GbeqE7c6QOPVY88rfNb83+gZ1lUgIP+QAfA+yhpu45O6RHN5+gl3f5aPRqCspA4ZFNmvY19SpvXWaMofocTk9JAwKx15RzzmXprH96zxqKh1Yw42MuziV1JFR6PRaKoprKDxcRVxayz+EkoZGeBskuh0eKkvUVRBtkIbpNw5l1dt7fVZ19m8sYsT0JMbMHsDmL/3vwQcZtLhd7Tsi3V7+mh52VGcHKS63i/f2vud368aluPj0yNt8uOBXJNhCCTY0/y+q0WiYmz6XxTsWNytqB7Bg1II25ZdU1lfy9u63eXHri95rH+7/kKzILP467a/EWtrQP6enVBWo2yN569Vk1+RxoDOpOSOt5Y34Y42BC59WC5dteB08Thh/h7oFYu3gm2fZIf+nZ04cUE8CDZoF+77AuPNd4nae7DZtfEk9/nzDR7DuZTWBVqODEddA+lT46C7f1zq2Ab76A8x5Vl2p8McSBRc+A2/MaVaTyJUwnm3V4RwpzeP5FXt56ooRBJ/arPR0KzbtEWRUv8fBEWr+T1WBusoy8R41UGk42i3EKSRQ6WHWcBPDzks8WUtE02y7x5/00TGsW3bQ71bJ8KlJJGWGU1/jZNf3x9BoNUy5LhNbtBmDUUdIpJmaynpvPZb6GhfnXJpG6qgoDm3xPTkRZNAy6ScZGE9ul+hNWuLTbRTnVpE+Kob9Gwt9t55O2vZVHpfcM5JtXx31raCLusIbl2ZDp9eSfXk6odFmrGFGgkMN1FQ2r0+iN+qw2PpWleIaVw1bira0eH936U5ibGAzt/xvIcGawMszX+bB1Q9SXl8OgEFr4L6x9zEsalibxpFvz/cJUhrsOrGL9/e9zx0j7+hQ758uV34U3pqnbqs00Jvh+v9A0viO1fawxqiP2LPAXqRWZS3aCRkz1ZWA9lbV3fxmy/d2fggzFsG+L3yv15ar21Z568FRAzN+q66uWKPhrSubFyMCtaT9lIeaH792uxq/D/Gj4OYv4ItHIH8zGEOpHjGfA6nXcf876i8Tn20vYOGFQ5sHKp3NEqkGK4NmqVtKOqOaw9PRvBfRLwTgT6H+yXRq2exWWMKMzLptGCv+vtMnB2XA8EiyJiVgsRmxhpuYOf8snA4XQQadWl0WtQty/v5y1i1rTDhd//EhplwzmMTB4exdW0Cd3Uny0HBGnz+A0KjGVQldkI5hU5PY+W0+A0dE8vU/Wz72eXRPKSnDIjmY47tFNeW6IRjMQYy/JJWNnx7ixDE7SUPDOe+ng/ny9Z0op+TUTLthSLu3bM5EaV0puZW5fLT/I9weN3PT55IWlkakuXP61FTUV1DrrCXRmthinkm8Nf60WzcGnYFxceN4/5L3KaktwelxEhMcQ5QpCmMbu9d+fKCFkx3Ae3vf48rBVwbeqkp9NSx/zDdIAfV0y1tXwl1r2r+i0sBeAqufgfWvNV5b/hhkL1C7Lre147CitN5Z2e3wH0xFZqgF5EIT1G2onf+B6KEw4Q41kbdwp/rm7vNaTvVrFe9VTz2ZwtTnbH1b3SoacZWay5IygZwpr6Nx1lLrhCXb7Kz47iAN/908Skul/7pAQ76PEG0kgUovU1/roq7aQVSSlZ8uOoeiI5XU2Z3EZ4RhDTP61B3Rm3TNan4c+7EMvUFHRXFjy3bFo/DN23sJjwsm4+xYMsZGExpp9nuUNzTKxGX3j6a2yoHb3fKPNpfDw+SrB5GUGc6RnScIiTBx1uREQiJNHNlxghX/aDw+nLdbHdOl941i79oCSvKqCYsNZvT5Kdhizd3W/K+0tpSnNzzNZ4ca86Y+OvARkxIm8ftzf090R49lAg63gwPlB3h6w9PsKd3DE5Oe4PPDn/t97m3Db2tTHRSdVkecJY44S8eKb1XUV7R4z+60t1i1tkfZi2H3f/3fc1RD0e6OBSr2Yji22TdIabDmRRg0G9KmtO21NBq1L8729/3fHzQLDp9y/DjzIjV51e1Qj12nT1fzUjIvUgOUgZNh8v1QuAtWPdXYc8gaqx6lfueaxteKSIOLnoWP74Pv/gjXfwADJqGzRDH3b9/7HdL5WXHY/JQBECIQyL/MXqSiuJbv3tvH4R0nQIGQSBPTbxhC6sgo9G1Ysq2vdbFjVT4jpvs/BVFWUMOGTw6RkhXRYr0RnU5LXJoNe3k9A4dHcmir/0Jb8Rlh/PfPWxgzewDTbxyC2WJAo9VQXVbH9+//2Oz5h7aWkLurlGsXjcdgUvsSdbQ3UXvUVjmwVzg4cawaXbDCZXE/4WhlHttPNK52fJf/HeuOr+Pi9Is7/HWOVB7hus+uw+lRk4a/P/Y9vxzzS17Y8gKuk5VFdRod9425j8Hhg89sUm00e+Bslh3wf9plUuIkrPpOzE/oLG6H/y2QBtX+k8xbVFsOR9dCwXY45D85GVAb8SWObXvORmwWpEyA3LW+101hMOmX8J+fqx+bw2HMjWrC60d3qte+/SPM/0TdGnrrJ76fP+RiuOiP8Ml96seTfwXf/7nxvlan9hta/zpMe0TNa3n/Zvj5tySFR3HpyASWbfUtXBdqCuKh2ZlYjW07Edch1UVqJ2aPW51zSDxoe3/3cdE9JFDpJarL6vjo+c1UlzUp8HWijmV/3sJlvxxNYmYb9tAVBcXjQafToA3S4PGT0KrVaRp797TCEmYk+/J08vaUNatEmzg4jLpqh7dKbspZkcy8aajavLDG5TcXBdTaLkW51WSMafuJE6fDjcvhRt+BwMZeXs9X/9pN7o7GXikGk46Ft/6OZzS/Y2vJFu/1f+3+F5MTJ2PrwOkHu9POi1te9AYpAO/ve58ZKTP489Q/43A7CNIFkRGWQaQp8rSNADtLZkQmmeGZ7C3z3UYx6ozcM+aeNq3qdDtjiLqK0FB47VTxI9r+Wh4P/LhCLXw25aHWq9vaS07fWbmpkDi4cgns/gTWv6rmnAyZAxPuhPBUtZGgq04tf//FQjUQakg60xnUAmjf/rH56+75BAZMVB8jrlb75jQEQ+N+pq7WHNuoBgSRg2DqI+oKjL2IiIREHrs4i4tGxPPqqoNU1DqZMTSG689JISm8i/7NeTxqrs8HP4Pik1vFlmi1gF76DDC1rfqx6N8kUOklig5X+gQpTX3/wX4uuWdki+XmGxiD9Qw9N4EfNxQxYloSW/x0Hx492/focmtsMcFc9etx5Kw4wpEdJzCYghh8ThyhkSa+erMxfyV35wmqyuoxhxhOu41jMOmoLKmlrsaJ3qDDHGLwm2DsqHNRUVTL5uVHKC+oITLJyujzUwiNNqNvQ8DidrnZuvKoT5Civq6bNa/nce/dD3BLyQ3e6zXOGr8nbNqi2lHN+uPrm11fmbuSlbkruST9Ev5v4v+h68gx2DMQExzDizNe5N297/LevvewO+1MSpzEvaPvZWDIwG4dS5tZ4+D8/4MPb29+L3WK2r+mraoKYPnJInEFOyAlG4p2+X9uxvlqqf72CIlXg4esS9XAITiisbhcyMncH5cTzr0Xjvygbl2BGmzsbjl/iK3vwIXPqqeTPr5XvTb5ATVP5+2rGp/37R9h2Dy1ON7JICs6xMjss+KYkBaJ0+3BZg5C35WJrBVHYfGFas+lBvZieH++muA7ILvrvrboMyRQ6SWO7mm5k21xblWz0zUtScmKYOvKo0Qlx3HuvAy2fnWU6rJ6rOFGzp4zkLSR0W16owfQajWExQYz+arBjDq/noM5xRzYXERxbvMKk8f2lhGTEoLJqid+kI3jPzbPj9Ab1VWRtxatxXMy/yVxcBjT5w/19hwCNcg4vO2ET5n8krxq9q0rYM7dI0kZGoFG2/qR4ppKJztW+29h4HJ4cBXqSQpJIq9KLcs+c8DMDjfX02l02Iw2qpz+K29agizdFqS4PC6Ka4spqWlMwL1t+G1cPeRqFEXBqrcG5kpKA60WBs9SVytWPA7luerR3LE3wcRftK8WR32FuiUB8OOXcN2/Yft7vm+qoNYQGX1dx04TaTStV7AN0kPyBLhrrZpIW1MKSWer5fpbUnMCjueo9VP0ZvXkTOww+PfNzZ+74wN19SXUd7u3tRNlnWrPJ82/nw1W/h5++nb7T1SJfkcClV6i6embU5lD9Kd9Y25gDTdxyT0jOZBTTMGBCiZclo4txozZqscW3bHl3yCDDp1Ow7r/Hmx2aqdBQ/dnk0XP9OuH8uEfN/tsAWl1GmbenMXaDw94gxRQew59+uI2Lr13lPf0T02Fg2/ean7iSFHgqzd2c+UjZ5+2horb5Wm2ZdVUXZmbMGMYeVV5RJgimDdoXoeP6kaaI7kx60b+sP4Pfu/PGzyvQ6/bXg63g5yiHB5Y9YA3kVav1XPP6Hu4IuOKDm1r9QhzuFqlNSVbPc6rM6jBQBtPO3npmqwcetywYhH8ZDGs+Rsc/EYNMtJnwOw/QFj7Wwq0fRxBEJasPkD9h5x5UfNOzQ1SstUVoKPr1fojjurmxeqa2vK2uqrT3TwuOOI/eRdQi9A5ayRQEaclgUovkToymjUf+g8ERs1MwdLG7RpQg5UR05IYNC4WFDBb2x7otMRsNZA+Jpr9G4ua3dNoIGFQmPfjsNhgfrLwbAoPVXBsXzlhscEkZ0Xww79/5PiB5istpfl2qsrqcbsVFI+Cvby+xSCjptJBXbXztIGK3qAjJMLUYuPGsGQjpQdLmTdoHrcOv5XEkI6X99ZoNJw/4HxW563mu3zf0x4PjH2ABEv3HNU8bj/OHf+7w5u8C+D0OHl+0/Ok2dKYktzGUy2BorVmfG0RHAmpU+HQN+rHBdvULaXRN6hbNtFD1ACouwM4jQYyL1CPSttPSQ4OMsKo6+Dd69Qcl0teUFeUNv695derKVVzRbqbNkg9Xr3nU//3w1LU1SAhTkMClV7CEmbkwjuG8cVrO3ySYFNHRpE5Ia7dgYZGoyH4NDkt7aE36ci+LJ3CQ5W+ReA0MH3+UCyn1EIJiTAREmEiY6y6V3/iWDVHTskXaarkaBVb/neUiuJaLri99YJmbTlUawkzMuHydJ9j0g3CYoMZkBLPm4PfxGa0YQpqf4XbkpoSDlYc5NODn2LUGZmbMZdF2Ysorivm27xvsQRZmJw8mWhzNCGG7kko/OTAJz5BSlMvbXmJEdEjuqepYKAwh8Elf4Y3Lm7svFxTCmtfhutnqkmvHdnu6QxhKXDLl/C/ReobveJRV1LOvVcNYJwnywvkvAXXvgPlR9Q8F3/SpnZ/sNVg5DXw/Z8aj1M3NeWhtrU4EP2eBCq9hN6gI2VoBNf9dgLFR6uor3ERMyAUS5gBszUwKreGRpm54ldjKDxUyeHtJVgjTAw+OxZLuPG0jfyCjDq0Oo3Ptk9TBnMQ1WV1au8hRUFv1PldVQkONWC2tm3/PSUrguk3DmHNhwfUXkMatf/S5KsGn8yJ6VhJ76KaIhZ+u5ANBRu8197Z+w7XZF7DXaPu4q5Rd7Xy2V3D5XE1O93T1NGqozjac6qlr4hIhVuWq6spR36AiHT1jd2W0HNBSoPIdLjsFbVjs8OuniD6+BeNeTW2ZLjsJXXrZNg8WPOCGmg1pTerBev0ndNOot1syXDNUvjglsZcFa0OJt0PAyb1zJhEryOBSi+i0+sIjTITGuW/QV8gaOixk96OI8agBhiZE+LY/f3xZvdCIk246t24HOry9Y7Vx5hwaRrfvudbj0XTwupNS0wWPUMmxJM8NAJHretkV2mDt7dRR32d+7VPkNJg6d6lXJh6YY+sWgRpgxgRPYKvj/rPe0gLS+vQylGfYEtUH5kX9vRImjNa1YfHAyMtEDUYTuyHhFFqM8WGCq/hA+CWFbD8UTUxWFFgwLlqD6Pw1J4bv94EadPgzjVq4rOrTi1IZ4nu3D5Cok+TQEUEBL1BxzmXpFFX7fQpIqeeKhrE1/9qTJ7N212GNczIhXcMZ9+GAsoLaolKbjye3J5tMI1Wo+azdFLscKL2BG/teavF++/sfofh0cPRa7vp1EUTswfO5rVtr1Hrqm127xejf9HhU02iG2i1p2+4GJUBV7wOtWWAom73BEKiapDeN1lYiHaSQEUEDEuYkek3Dj1ZLbYeU7AejU5D7q5SYgaGUlNR4u1ttGdNAYe2lnD1Y+MJMmg7VPCtK3gUDzWt9HmpdFbi9rh7JFCJt8Tzj1n/4KHVD5FXreZkhOhDeHj8wwyNHNrt4xFdwBSqPhRFXWIUog+QQEUEFJNFrz6seopzq9i8PJfaSgdxaTbmLBjBuo8PUnRY3esecFYkJktQm9oHdJdQQyhTkqbw3r73/N6fkzqnx7ZYgrRBDI8ezpsXvklZfRkuj4twYzjRwdGB2SW5r7OfUJNkzRGd1z24qkA99rvlnZP1ZW6E8DS1a7EQvZT8dBJt5nJ5qK10oHgU9CZdlyXx1tmdbPr8CFtXNlbOLc23s299AbNvG8bX/9yDwRzEhMvSAypIATAGGZl/1nw+O/QZ1c5qn3tJIUmMjx/fQyNrFB0cfUYNFsVpOOvUEv+uOjBYwBrvG4hUHldL9294Hdz1cNY8GPXTjnd9bvq6790IeU2qIOe8CWNvhumPyQkb0WtpFEVpy2nOgFVZWYnNZqOiooLQ0I6d0ggUbreHersLjZaAOcnToLqsjpzluez6Lh+X00N0SgiTrxpEVHLIaU/0tFfpcTvv/G6d33vx6TYmXzOY4BADlrDArMHgUTzkVubyytZX+F/u/zBoDVw26DKuH3o9CVZpb9+nVR6H1c/Bln+Cq17NEZnyEAy/Sg0Uqgrg3Rsh75R/3yHxcOvyjgcriqIeq/7yEf/3b/lCPd4sRABp6/t3YP062o9VlNSy67t8DuYUozfqGDE9ieShEW0+wdKV7OX1fPrSNkqONq4QFOdW8Z8/buaKB8YQnxHWqV/v2N6W2wUcP1CBwaQL2CAFQKvRMtA2kMezH+eXY9VS6BGmCPS67s9L6c9cbg/HK+rIyS3j8IkaRibbyIwNIc7WRafmakrV48M/Lm+8VlsGXzwCznqYeDfkb2kepIDahHDDP9SVj478O7EXqSs0LVn/OiSO6/kj10J0gPyrDQAVxbX8++mN1FU3dtdduWQ3yVnhzJif1ePBSlmh3SdI8VLg2/d+5JJfjOzcFaDT5gD2jiTBYH1wt3VCFr7cHoVteRVc/4911Dga6+0khZt5+2fnkBJp6fwvWl3oG6Q09e1zMPJq2PxGy5+//T2YcIe6utJeHo9ajr4l9VWguJEf+aI3ar2VrehyLoebzV8e9glSGhzdVUZ5QSs/fLrJ0V0tV4wtzq1qtWdOe9VWO4gd0PISYOLgMIwW+WErWldYWcfNSzb4BCkAeWW1PPzBNipqu6C43YmDLd9zVKuF2jSt/MjV6OhwEB4cDplzWr4/8pr290ISIkBIoNLD6uxOv/1xGuxe07wAWnczt9JHSG/UoT3DPkENXA43O1cfY8/aAoZPTWp2X2/SMfmawZiCu3cLxe32UFVaR0leFeVFNdTXNA8qRWDJLa2hotb/39Oag6WUVndBoNJasqpGAzUlrReVG30DdDTJOcikbi2ZwprfixoMKRM69rpCBAD51bTHadDoWn6j17Zyr7sMHBbF9//e77eJTtakBMwhnRM41FQ62PjZEdwuD+MuTuX8W7LYt76QmpPHk4dPTSQspvO2UpwONzWVDpx1LvSmIIJDDehPqcVSW+1g37pC1n98EEed+tt50pBwpl4/BFuUGY/bg71CfQ2dXkdwiAG9qefrufR35acJJutcXdCkz5YE1lh1C+hUadPg4Cq16Fn6DDiw0vd+ZDqMvu7MjimHDYTbvoJvn4fd/1VXUMbMh7NvhtCON9UUoqdJoNLDzNYgMifEsW1lnt/7Qyf2/CkRS5iBmTdl8b8lu3yClegUK6NmJqML6pw35toqB+6TbyAbPjmEOURP2qhoIhOtnDhWjaPW5VN1tr7GSZ3dheJRMAYHYW5Hk0V7eT3rPznEnjXH8bgVtDoNQ7LjGX9xqjdRV/EoHNxSwnfv+5bqz9tTxsd/2cLce0dxaFsJGz45RH2NOrb0MdGcOy/jtN2bz1R5fTm1zlq0Wi1Rpih0WgmOmsqIaTkHxWbWYzN3wapcaAJc/wG8Ode35070ELXfzvs3qSeBLnwasubCrmXqEeYRV0PG+WoZ/zOh1aoBz5znYPqjgEZd5ZEkbtHLSaDSw3R6HaOmJ3NoS4lv12Fg0LhYwmJ6vq+P3hhE2qgorvvdBI7sOEFNpYOUrAjCYoM7NdFXG+S7E1lb5WTnt/nejydcmgaAoiiUF9awauk+ju1RTwhFJVuZcm0m0cnW0wZO9bUufvhgP/s2NP7m63Er6tHrejfnXZuJ0RyEvaKe9f/1n3dQUVzLiXw7Gz89TH2N2pFY8Sjs31hEZXEtcxaMIDi0bd8bRVEoqimi0lGJQWsgzBTWYjn7GmcN+8r28dzG59havJUwYxjXD72eKwZdIbVRmoiyGrloeByfbS9odu/B2ZnEhnRBvoZGA7HD4PbVUPKj2tsmZqian/KfnzU25fvkl+ox5EGzICQOhs6F4IjOG4ferD6E6CMkUAkAIZFmLn9gDIe3l/DjhkL0Rh0jpycTlWxt1ypBV9IbgwiLCSJsetedYjGHGAiJNDUL2EBtTNjwvag6UccHz26i3u7y3i85Ws2Hz23mqkfHEZnQerOz2ioH+zb6WZ4H9m0sZNwlqRjNQbicHmoqW85lKDxUgTXCSJ3dd5uh6EgV1aX1bQpUqh3VrMlfw5Prn6S4thiAs2PP5rfZv2WAbUCz5+88sZNbv7wV5eTSVnl9OX/b8jc2FW7iqclPEWHuxDe8Xiws2MBv557FoJgQFn9/iMo6F4lhZh6ancmUzGh0ui5Kz9Nomve1Ob69eVfj8lzY8He49MXODVKE6IMkUAkQIREmhp2XyODxcWi1mk4votYbWMOMXHD7MD76Uw7OusbTGnqTjgtuH4YlzIiiKBzcUuwTpDTwuBU2fnaY6TcMabVibZ3d6TffBgAFb+Ch02nRG3Utnmqyhhn9ntYCOJFfTczA0xcg3FGyg/tX3e9zbWPhRm768ibenvM28ZbGo6onak/wh3V/8AYpTa05vobj9uMSqDQRE2LinukZXD0uGafbg0mvIza0B9oXhA+AC5+BLx5WC7M1OOsKdVVFCNEqCVQCiEajwWju338l0ckhXPPYePL2llF8pIroASEkZYYTEqG+wbgcHo7sPNHi5x/fX46j1t1qoGI4TRDYcD/YpmfYlERyluc2f45JhznUQHVZvd/XaEuOSmltKc9tfM7vvZLaErYWbSU+tTFQsTvt7C/f3+LrbSjcwFlRZ5326/YnQTotCWE9vA1iCoVR10HGTDi0Ghx2SJui5rQESw8eIU6nf78rioCj0WoIjTKTFWWGc5vf1wVpCAlrOQgwhxjQBrV+UsocYiBmYIi3uWFTMQNCvFtMuiAdI2ckU15Uy6EtxU0+X8+cBSM5tLW42ecDGC1BhMWefovM4XGwt2xvi/fXH1/PBakXeD/WaXQEaYJwKc1XkwDCDGGn/Zqihxit6iMyvadHIkSvI4GK6FW0Oi3DpyW2WF9m7AUDT1sl1xxiYPbPhvHpi9soPW73Xo+ItzD7tmE+eUEWm5HpN2RSc2ka5YU1mKx6QiJMWMOMWGwGju0rp+BAhff5JoueufeOwhp++vwUj+Ih2hztzU05VWKI7ymQcFM4swfO5tNDnzZ7rk6jY2zc2NN+TSGE6G0kUBG9TmiUmUlXDVKPDTfd8p+cQOLgsDa/xtz7RmEvr6e6rB5ruBFLmNHvKSaTxYDJYiAi3vfIqzXcxIV3DMdeVk9pfjWWMCO2mGCs4UY0mtPXvzFoDVw5+Epe2vpSs3tBmiBGx4z2uRasD+aeMfewrWQbR6saO0tr0PDk5CeJMkl33F6trgpqT4DHBcZQsMb09IiECAjSPVn0So46FzWVDo7vL8ft8pCQEU6wzYDJ0ntqRhTVFLG+YD3fHP2GLw9/6b1uDjLzmwm/QVEU5mbMbfZ5hfZCdpfu5ru874izxjEzZSYxlhiCg6SvUK9VekhtXvjjF2rCbXQmXPRHSBwDhi7oSyREAGjr+7cEKkL0EI/i4Y2db1BRX8HY2LEcrjyMVW8l1BjKP3f9k8ezHyfNltbTwxRdrSIP/nE+VOb7Xtdo4Wf/g0TZ0hN9U1vfv2XrR4geotVouTjtYhb9sIglO5cQZY6izl1HtaOaJyY9QVxwXE8PUXSHIz80D1IAFA+seByufgvMYd0+LCEChQQqQpyhmkoHjjoXWq0Gs7V9vX6ig6P5f5P+HwX2AjYWbCTUGMrY2LFEmaIwS3XR/mHfly3fy9sATrsEKqJfk0BFiA5y1rsoPFzF6qV7KTteg1arIW1MNNmXpRMa1fYgI8IUQYQpgqzILJ/riqJQXVpP0ZFKSo/biU4JITLR6q0pI/qI8IEt37PEgKb/FX8UoikJVITooJI8O8v+nOM9eeQ52eun6HAVlz8w+owbE5bkVbPsTzneXkIAwaEGLrt/NOFxkmDZZ4y4Cr77o2/V2gYTf6F2ZBaiH+uihhdC9G111U6+/+BHv6X4K0tqKc5tXkyuParL6/nspW0+QQqo20zL/76T2qqWexCJXsaWCJe/Bqd2wM66DLIuVfsHCdGPyYqKEB3gdLgpPFjZ4v0jO0tJHdnxbsa1lfUtlucvyaumtsoRMA0rxRkyWGHIxXD3ZshbB3WVMGCi2llZSuwLIYGKEB2h0WgwWfTNOic3CIk4fWXa1jjq/DdCbOByes7o9UWAMQRDxED1IYTwIVs/QnRAsM3AyBnJ/m9qIG10x1dTQO3M3NKKf5Be26sK2wkhxJmQQEWIDtBqNQw9N57krAif6xqthvNvOQtrK40T28IcYiBrcqLfe2MuHECwTbZ9hBD9g2z9CNFBFpuRmTdnUV1aR/6P5RgtehIywgi2GdAbzuxIqcEcxPiLUwmNNJGzPJc6u5Ngm4HxF6eSNiqaIL0cWRVC9A9SQl+IAObxKNRU1ON2edDptVhsbWt4KIQQgU5K6AvRB2i1mjOuxyKEEL2Z5KgIIYQQImB1WaBy+PBhbr31VlJTUzGbzaSnp7No0SIcDt9CVdu2bWPy5MmYTCaSk5N55plnumpIQgghhOhlumzrZ8+ePXg8Hl599VUyMjLYsWMHt912G3a7neeeew5Q96dmzZrFzJkzeeWVV9i+fTu33HILYWFh3H777V01NCGEEEL0Et2aTPvss8/y8ssvc/DgQQBefvllHn30UQoKCjAY1OOWCxcu5KOPPmLPnj1tek1JphVC9AqKAlXHoboQ6qshNBEsUWCSn1uifwrIZNqKigoiIhrrTqxZs4bzzjvPG6QAzJ49m6effpqysjLCw8ObvUZ9fT319Y2lxSsrWy5jLoQQAcHjgYJt8M41arACoNHC2Jth6kKwxvTs+IQIYN2WTLt//35eeOEFfv7zn3uvFRQUEBvr2xm04eOCggK/r/Pkk09is9m8j+TkFqqDCiFEoKjMgzcuaQxSABQPbPwHbHsPPK23TBCiP2t3oLJw4UI0Gk2rj1O3bY4dO8YFF1zAlVdeyW233XZGA37kkUeoqKjwPo4ePXpGryeEEF3u6Aaob2H197vn1e0gIYRf7d76eeCBB7jppptafU5aWpr3z/n5+UybNo2JEyfy2muv+TwvLi6OwkLf/6ANH8fFxfl9baPRiNF4Zg3fhBCiWxW3knNXcwLcjpbvC9HPtTtQiY6OJjq6bQ3Xjh07xrRp0xg7diyLFy9Gq/VdwMnOzubRRx/F6XSi16tN1lasWEFmZqbf/BQhhOiVEka1fC80EXTyy5cQLemyHJVjx44xdepUUlJSeO655yguLqagoMAn9+Taa6/FYDBw6623snPnTt59913+8pe/cP/993fVsIQQovvFjwRrrP970x+F0PjuHY8QvUiXnfpZsWIF+/fvZ//+/SQlJfncazgRbbPZWL58OQsWLGDs2LFERUXx+OOPSw0VIUTfYkuC+Z/AB7dAwXb1mj4YpiyEwRf07NiECHDSlFAIIbqLvUR9uOogOEJdZQmSbR/RPwVkHRUhhOjXLFHqQwjRZtKUUAghhBABSwIVIYQQQgQsCVSEEEIIEbAkUBFCCCFEwJJARQghhBABSwIVIYQQQgQsCVSEEEIIEbAkUBFCCCFEwJJARQghhBABSyrTin5LURTs5fXUVjlxOT1YbAbMoQb0Bl1PD00IIcRJEqiIfsnj9lCUW8Xnr2ynpsIBgFan4eyLBjJsSiJmq6GHRyiEEAJk60f0U1Vl9Sz7U443SAHwuBXWf3yIvN1lPTgyIYQQTUmgIvql3B0ncDk8fu+t+/ggNZX13TwiIYQQ/kigIvql4tyqFu9VFNfidindOBohhBAtkUBF9EuxqaEt3guPDUYXJP81hBAiEMhPY9EvJQ+NQG/yf7rnnEvTCQ6VZFohhAgEEqiIfikkwsTl948hNMrsvRZk0DLpykEkDLL14MiEEEI0JceTRb+k0WqITgnhigfHUFvlxOP2YLLqsYQa0OmljooQQgQKCVREv2axGbHYjD09DCGEEC2QrR8hhBBCBCwJVIQQQggRsCRQEUIIIUTAkkBFCCGEEAFLAhUhhBBCBCwJVIQQQggRsCRQEUIIIUTAkkBFCCGEEAFLAhUhhBBCBCwJVIQQQggRsHp9CX1FUQCorKzs4ZEIIYQQoq0a3rcb3sdb0usDlaqqKgCSk5N7eCRCCCGEaK+qqipstpa71muU04UyAc7j8ZCfn09ISAgajaanh9NpKisrSU5O5ujRo4SGhvb0cLqNzLv/zLs/zhlk3jLv/qEt81YUhaqqKhISEtBqW85E6fUrKlqtlqSkpJ4eRpcJDQ3tV/+4G8i8+4/+OGeQefc3Mm//WltJaSDJtEIIIYQIWBKoCCGEECJgSaASoIxGI4sWLcJoNPb0ULqVzLv/zLs/zhlk3jLv/qEz593rk2mFEEII0XfJiooQQgghApYEKkIIIYQIWBKoCCGEECJgSaAihBBCiIAlgUoAmjt3LikpKZhMJuLj47nhhhvIz8/3ec62bduYPHkyJpOJ5ORknnnmmR4abec4fPgwt956K6mpqZjNZtLT01m0aBEOh8PneX1t3k888QQTJ04kODiYsLAwv8/Jzc1lzpw5BAcHExMTw4MPPojL5eregXaBF198kYEDB2IymTjnnHNYv359Tw+pU61evZpLLrmEhIQENBoNH330kc99RVF4/PHHiY+Px2w2M3PmTH788ceeGWwnevLJJxk3bhwhISHExMRw2WWXsXfvXp/n1NXVsWDBAiIjI7FarcybN4/CwsIeGnHnePnllxkxYoS3wFl2djaff/65935fnPOpnnrqKTQaDffdd5/3WmfMWwKVADRt2jTee+899u7dywcffMCBAwf4yU9+4r1fWVnJrFmzGDBgAJs2beLZZ5/lt7/9La+99loPjvrM7NmzB4/Hw6uvvsrOnTv505/+xCuvvMKvf/1r73P64rwdDgdXXnkld955p9/7brebOXPm4HA4+OGHH3jjjTdYsmQJjz/+eDePtHO9++673H///SxatIjNmzczcuRIZs+eTVFRUU8PrdPY7XZGjhzJiy++6Pf+M888w1//+ldeeeUV1q1bh8ViYfbs2dTV1XXzSDvXqlWrWLBgAWvXrmXFihU4nU5mzZqF3W73PueXv/wlH3/8Me+//z6rVq0iPz+fK664ogdHfeaSkpJ46qmn2LRpExs3bmT69Olceuml7Ny5E+ibc25qw4YNvPrqq4wYMcLneqfMWxEBb9myZYpGo1EcDoeiKIry0ksvKeHh4Up9fb33OQ8//LCSmZnZU0PsEs8884ySmprq/bgvz3vx4sWKzWZrdv2zzz5TtFqtUlBQ4L328ssvK6GhoT7fh95m/PjxyoIFC7wfu91uJSEhQXnyySd7cFRdB1A+/PBD78cej0eJi4tTnn32We+18vJyxWg0Ku+8804PjLDrFBUVKYCyatUqRVHUeer1euX999/3Pmf37t0KoKxZs6anhtklwsPDlb///e99fs5VVVXKoEGDlBUrVihTpkxR7r33XkVROu/vWlZUAlxpaSlvvfUWEydORK/XA7BmzRrOO+88DAaD93mzZ89m7969lJWV9dRQO11FRQURERHej/vLvJtas2YNw4cPJzY21ntt9uzZVFZWen9T620cDgebNm1i5syZ3mtarZaZM2eyZs2aHhxZ9zl06BAFBQU+3wObzcY555zT574HFRUVAN7/y5s2bcLpdPrMfciQIaSkpPSZubvdbpYuXYrdbic7O7vPz3nBggXMmTPHZ37QeX/XEqgEqIcffhiLxUJkZCS5ubksW7bMe6+goMDnjQvwflxQUNCt4+wq+/fv54UXXuDnP/+591p/mPep+uKcS0pKcLvdfufVW+fUXg3z7OvfA4/Hw3333ce5557LsGHDAHXuBoOhWU5WX5j79u3bsVqtGI1G7rjjDj788EOysrL69JyXLl3K5s2befLJJ5vd66x5S6DSTRYuXIhGo2n1sWfPHu/zH3zwQXJycli+fDk6nY4bb7wRpRcWEW7vvAGOHTvGBRdcwJVXXsltt93WQyPvuI7MWYi+aMGCBezYsYOlS5f29FC6RWZmJlu2bGHdunXceeedzJ8/n127dvX0sLrM0aNHuffee3nrrbcwmUxd9nWCuuyVhY8HHniAm266qdXnpKWlef8cFRVFVFQUgwcPZujQoSQnJ7N27Vqys7OJi4trljXd8HFcXFynj/1MtHfe+fn5TJs2jYkTJzZLku0t827vnFsTFxfX7DRMIM65PaKiotDpdH7/LnvrnNqrYZ6FhYXEx8d7rxcWFjJq1KgeGlXnuvvuu/nkk09YvXo1SUlJ3utxcXE4HA7Ky8t9ftPuC3//BoOBjIwMAMaOHcuGDRv4y1/+wtVXX90n57xp0yaKiooYM2aM95rb7Wb16tX87W9/48svv+yUeUug0k2io6OJjo7u0Od6PB4A6uvrAcjOzubRRx/F6XR681ZWrFhBZmYm4eHhnTPgTtKeeR87doxp06YxduxYFi9ejFbru+DXW+Z9Jn/Xp8rOzuaJJ56gqKiImJgYQJ1zaGgoWVlZnfI1upvBYGDs2LGsXLmSyy67DFD/ja9cuZK77767ZwfXTVJTU4mLi2PlypXewKSystL7m3hvpigK99xzDx9++CHffPMNqampPvfHjh2LXq9n5cqVzJs3D4C9e/eSm5tLdnZ2Twy5y3g8Hurr6/vsnGfMmMH27dt9rt18880MGTKEhx9+mOTk5M6Zd+fm/ooztXbtWuWFF15QcnJylMOHDysrV65UJk6cqKSnpyt1dXWKoqiZ1LGxscoNN9yg7NixQ1m6dKkSHBysvPrqqz08+o7Ly8tTMjIylBkzZih5eXnK8ePHvY8GfXHeR44cUXJycpTf/e53itVqVXJycpScnBylqqpKURRFcblcyrBhw5RZs2YpW7ZsUb744gslOjpaeeSRR3p45Gdm6dKlitFoVJYsWaLs2rVLuf3225WwsDCf0029XVVVlffvE1Cef/55JScnRzly5IiiKIry1FNPKWFhYcqyZcuUbdu2KZdeeqmSmpqq1NbW9vDIz8ydd96p2Gw25ZtvvvH5f1xTU+N9zh133KGkpKQoX331lbJx40YlOztbyc7O7sFRn7mFCxcqq1atUg4dOqRs27ZNWbhwoaLRaJTly5critI35+xP01M/itI585ZAJcBs27ZNmTZtmhIREaEYjUZl4MCByh133KHk5eX5PG/r1q3KpEmTFKPRqCQmJipPPfVUD424cyxevFgB/D6a6mvznj9/vt85f/31197nHD58WLnwwgsVs9msREVFKQ888IDidDp7btCd5IUXXlBSUlIUg8GgjB8/Xlm7dm1PD6lTff31137/bufPn68oinpE+Te/+Y0SGxurGI1GZcaMGcrevXt7dtCdoKX/x4sXL/Y+p7a2VrnrrruU8PBwJTg4WLn88st9finpjW655RZlwIABisFgUKKjo5UZM2Z4gxRF6Ztz9ufUQKUz5q1RlF6YoSmEEEKIfkFO/QghhBAiYEmgIoQQQoiAJYGKEEIIIQKWBCpCCCGECFgSqAghhBAiYEmgIoQQQoiAJYGKEEIIIQKWBCpCCCGECFgSqAghhBAiYEmgIoQQQoiAJYGKEEIIIQKWBCpCCCGECFj/H72JLb4InWdQAAAAAElFTkSuQmCC", "text/plain": [ "<Figure size 640x480 with 1 Axes>" ] @@ -631,7 +631,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 21, "id": "20407247", "metadata": {}, "outputs": [], @@ -643,7 +643,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 22, "id": "29830551", "metadata": {}, "outputs": [ @@ -653,7 +653,7 @@ "<AxesSubplot: >" ] }, - "execution_count": 20, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" }, @@ -684,7 +684,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 23, "id": "18bde994", "metadata": {}, "outputs": [ @@ -706,13 +706,13 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 24, "id": "fda1732e", "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3AAAAN5CAYAAABJ5dkfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd1hT5+MF8BPC3lhUxIUTFy5EihMnOBD3KO7Rumqt1bZaV+2wzmpduK2zihUB90RxIjixTgQXICoKskfu74/+zLeRIYGEm4TzeZ48JckdJzRgDvfe95UIgiCAiIiIiIiINJ6e2AGIiIiIiIiocFjgiIiIiIiItAQLHBERERERkZZggSMiIiIiItISLHBERERERERaggWOiIiIiIhIS7DAERERERERaQkWOCIiIiIiIi3BAkdERERERKQlWOCIiIiIiIi0BAscERGJbsuWLZBIJPLbli1bxI6kEv99Te7u7mLHoTzo0nvP3d1d4bUQkW7SFzsAEVFJiYqKQkREBJ4+fYqkpCTIZDLY2NjAxsYGdevWRYMGDSCVSsWOSUQFyMrKwo0bN/DgwQPExcUhJSUFxsbGsLGxQbly5dC0aVNUrFhR7JhERGrDAkdEOu3WrVtYt24d/P398fz58wKXNTU1RcuWLTF48GD06dMHZmZmJZSSiAqSk5ODvXv3Yvv27Thx4gTS09MLXN7e3h7du3fHsGHD0KJFixJKSURUMngKJRHppCdPnqBPnz5o2LAhVq5c+dHyBgCpqak4fvw4hg0bBnt7e8yfP/+jHxRJd+nSqXXaLCgoCLVr18bAgQNx4MCBQv1MxsTEYN26dWjZsiWaNGmCkydPlkBSIqKSwQJHRDrnwIEDaNiwIfbt25fn8zY2NqhduzZcXFxQrVo1mJqa5lomKSkJM2bMQOvWrdUdl4jykJWVhXHjxqFHjx549OhRruf19fVhZ2eHhg0bokmTJrCzs8vzFOjr16+jY8eOWLVqVUnEJiJSO55CSUQ6ZceOHRg2bBhycnIUHnd2dsaoUaPQtWtXVK1aNdd69+7dQ0BAAPbs2YPw8HD54y9fvlR7ZtJdgiCIHUErZWVloU+fPggKClJ4XCqVYvDgwejduzc6dOiQ6zTnrKwsBAcHIzAwELt27cLr16/lz+X3szx8+HAMHz5c5a+BiEhdeASOiHRGWFgYRo4cqVDerKyssG3bNly5cgXjxo3Ls7wBgKOjI7799luEhYVh3759cHR0LKnYRPSB7777Lld5a9WqFW7evIktW7agR48eeV6jamBggE6dOmHFihV49OgRZsyYkecRdiIibcYCR0Q6ISkpCQMGDEBmZqb8sXLlyiE4OBiDBw9WakjtXr164ebNmxg1apQ6ohJRAQ4cOIDff/9d4bEePXrgxIkTqFevXqG3Y2lpiV9++QVhYWGoXbu2qmMSEYmGp1ASkU6YO3euwnUyenp62L9/Pxo3blyk7RkaGmLDhg35XkdHRKqXkZGBMWPGKDzWsGFD7N27FwYGBkXaZt26dREaGqpwajQRkTZjgSMirff27VusX79e4bHJkyfDzc2t2Nvu3bu3Usunpqbi3LlzePr0KV6+fAljY2OUK1cO9evXR6NGjYqd50PJyck4f/48YmJiEBcXB2NjY7Rt2xZNmzYt9DbCwsLw8OFDxMbGIj09HVWrVsVnn3320fWePHmCsLAwvHjxAm/evIGVlRXs7OzQsmVL2NnZFedlKS0+Ph4RERGIjIzE27dvkZ2djTJlysDOzg6urq4lnkeV7t27h2vXriE+Ph4pKSmwtbWFvb09WrVqBSsrK5Xv78aNGwgLC0N8fDyMjIxgZ2eHFi1awMHBQeX7+tC2bdsQFxcnv6+vr48tW7YUuby9Z2Vlhfbt2xc3XqE8f/4c4eHhiImJwevXr2FtbY1evXrB3t6+wPUyMzNx+fJlPH78GC9fvkRqaiosLCxQtWpVNGjQADVq1CiR/IIg4ObNm7hz547Ce65SpUpo3bo1zM3NSyQHERVAICLScr/99psAQH4zNDQUXr58WaIZrl+/Lnh7ewvGxsYKWf57q1ixojBr1izh3bt3hd5u27ZtFbbx3u3bt4VBgwYJpqamufbz1VdfyZfbvHmzwnObN28WBEEQUlNThXnz5gnVqlXLtb6VlVW+eTIyMoRly5YJ9erVy/d1SiQSoVmzZkJAQEChX2d+OfMjk8mEs2fPChMnThQcHR3zzfL+5uTkJGzZskXIysoqcLtRUVEf3VZ+t6ioqFzb++/zbdu2LfT3Iz09XVi0aJFQo0aNfPenr68vdOjQQQgJCSn0dk+fPq2wjTlz5sif27lzZ4HfS1dXV6X2VRR169ZV2Gfv3r3Vuj9BUP69l9//0wMHDgjt27cX9PT0cn3v/P39893ehQsXBC8vrzx/lv97c3BwEKZOnSo8fPgw323l9/uiMOLj44Wvv/5aqFChQr4ZDA0NhR49egg3btxQattEpFoscESk9Zo3b67wIWPAgAEltm+ZTCZ8++23eX5oy+9mZ2dX6A/CeX0g2759u2BoaJjv9j9W4KKjowssYPkVuEuXLuVZ+Aq6eXl5CcnJyR99ncp+iP7mm2+KVLJatWolvHjxIt/takKBi4iIEKpXr67UvkeMGCFkZmZ+dNt5FbiMjAzBx8enUPuRSqUf/X9TVP/880+u/R0+fFgt+/qv4hY4mUwmTJw4scDvW14FLikpSejTp4/S77OC3kdFLXAbNmwQLCwsCp1BT09P+PHHHwu9fSJSLZ5CSURaLSUlBVevXlV4zNvbu0T2LQgChg8fjq1bt+Z6ztbWFlWqVEFaWhqioqIUJh+Oi4tD586d4e/vDw8PD6X2eejQIQwdOhQymQzAv9f6VatWDWXKlEF8fDyePXtW4PpJSUno3Lkz7t+/L3+sXLlyqFixIjIyMvD48eM81wsKCsKAAQOQlpam8LihoSGqVasGKysrJCUl4eHDh8jOzlZYr3379jhz5gyMjY2Veq0FyWsyZxsbG9jZ2cHS0hIZGRl48eIFYmNjFZY5d+4c2rdvjytXrsDExERleVQlLCwMnTt3xps3bxQeNzAwgIODA6ysrBATE4OYmBiF5zdv3ozY2FgEBATA0NBQqX0OGzYMf/31l/y+jY0NKleuDH19fTx69Ahv376VP5eTk4PRo0ejfv36cHFxUf4FFiAkJEThvqmpKTp16qTSfajDjBkzsHLlSvl9ExMTVK1aFWZmZnj+/LnCKaHvPXv2DJ6enrh9+3au5ywsLFCxYkVYWloiMTERjx8/LtTk5UU1a9Ys/Pzzz7ket7S0ROXKlWFmZob4+HhER0fLn5PJZJgzZw5ev36N5cuXqy0bEeVD7AZJRFQcJ06cyPXX4fv375fIvv/4449c+27durVw/vx5QSaTyZd79+6dsH79eqFMmTIKy5YpU0Z4/vx5gfv48C/q5cuXlx8lW7JkSa5TRePi4oTLly/L7394dOH9+sC/Ryo/PBUqMzNTOHTokMJjERERgomJSa7XeeDAASEtLU1h2aSkJGHdunUK+wEgjB07tsDXqexRkAkTJggWFhbCiBEjhL179woxMTF5Lvfs2TPht99+E6ytrRW2P2nSpDyXT0tLE44fPy4cP35cmDZtmsI606ZNkz+X1+3D74UgKHcELikpSXBwcFBYx9TUVFi4cKHw+vVrhWVv3Lgh9OzZM9f777vvvitwHx8egfvvkT5PT0/h4sWLCu/d7Oxswd/fX7C3t1dYr3nz5gXupygGDx6ssI8WLVqofB95Kc4RuLJly8qPvteqVUvw8/PL9T64deuWEBkZKb+fkZGR66wBAELPnj2FkJAQITs7W2H9rKws4cqVK8IPP/wgVKlSRaVH4DZt2qSwvEQiEYYOHSpcuXJFyMnJUVj2+fPnwvTp0wUDAwOFdXbv3v3R/RCRarHAEZFWW7lypcKHCXNzc4UPoOry9OnTXKVm2LBhBe47KipKqFixosI63t7eBe7nww9kwL+nYN65c6dQOT/8cPr+tmzZskKtn5WVJTRo0EBh3R9//PGj3+Nnz54JtWrVUljv6tWrhc75sQ/RYWFhQmJiYqFegyAIQnR0tMLpnyYmJrlKUXEz5UWZAvfhaXhWVlYFfs8EQRB++OGHXKe2hYeH57v8hwXu/W3WrFkF7ufevXu5rtG6fv16gesoq1mzZgrbnzBhgkq3n5/iFLj//kGjsNe2fv/99wrrGhoaCjt37izUupmZmcLZs2fzfV6ZAhcZGanw/9TExCTXH2/ycubMGYXffeXKlcvzjxdEpD6cB46ItFpCQoLC/bJlyyo151tRrV69WuF0wkaNGmHDhg0F7tvBwQF+fn4KywQGBuLBgwdK7XvLli2oU6eO8qH/38CBA/HVV18Vatm9e/ciIiJCfv+LL77A7NmzP/o9rlixIv7++2/o6f3vn5klS5YULXAenJ2dYWlpWejlq1atqjBSaVpamsJpg2J7+/YtNm3apPDYxo0b0aRJkwLX+/nnn9GlSxf5fZlMlmsOtY/x9vbGvHnzClymdu3a+PLLLxUeO3z4sFL7+ZgPf5bLlSun0u2ri42NDfbs2VOo0RkTEhKwYsUKhcdWrlyJQYMGFWpfBgYGaN26dZFyfmjhwoVITU2V39+0aZPCeyk/bdq0weLFi+X34+PjsX37dpVkIqLCYYEjIq324Yc+a2trte9TEARs3LhR4bHFixdDX//jlxW7ublhwIABCtvasGFDoffdqlUrpa+b+9BPP/1U6GWXLVsm/9rU1BTz588v9LpOTk4K1yMGBAQgJyen0OurWocOHVChQgX5/QsXLoiW5UM7d+5U+DDdsmVL9OnTp1DrLl26VOH+nj17kJiYWOh9//rrr4Va7r/vWwC5rj0tLjF+llVhwoQJhZ6mYsOGDUhJSZHfb926da5570pCQkKCwrW7bm5uGDhwYKHXHzNmjELB/vvvv1Waj4gKxgJHRFrt3bt3CvfNzMzUvs+7d+8iPj5efr9KlSro0KFDodcfOXKkwv2zZ88Wet3C/qU+Py4uLqhZs2ahln39+jVCQ0Pl97t37w4bGxul9te5c2f518nJybh27ZpS66vaf+cyEzvLf505c0bh/ofvkYLUqVMHLVq0kN/PzMzEpUuXCrWuk5MT6tWrV6hlGzRooPBHiqdPnxY6Y2GI8bOsCsr8TB47dkzh/qRJk1Qdp1CCg4MVziAYMmSIUusbGBigXbt28vsXLlyQD6xEROrHAkdEWs3CwkLh/n//uq0uly9fVrjfrl07pU7bbNOmjcIH4WvXriEzM7NQ6zZv3rzQ+ynu+ufOnYMgCPL7zZo1U3p/VapUUbh/584dpbfxMdHR0Vi4cCH69euHunXromzZsjAyMoJEIsl1u3jxony9V69eqTxLUX34nlJ20ukP/4BQ2AKnzP9TAwMDhaNiyhzlKwwxfpaLy8LCAnXr1i3UstnZ2Qr/X/T09ODp6amuaAX6cMTP4v5sJyUl4fnz58XORUSFw2kEiEirlSlTRuG+qj9U5uXDofYbNmyo1PpGRkaoU6eO/Nqy90PeV65c+aPrVqtWTal9FWf9D8vWt99+i2+//bZY+//wNLniePz4Mb766isEBgYqFM3C+u/w+GISBEHhaJalpaXCkcLCaNSokcL9J0+eFGo9Za8zMzMzkxffD6eUKK4yZcoo/D8piZ/l4qpatWqh/3gTFxenUEodHR0Ldd2cOnz4s13cPwwB//5sF+Z3GBEVH4/AEZFW+7DAvXz5Uu37/HCOLltbW6W38eE6H24zP8oM3FHc9V+/fl2sfeVFVR/KQ0ND0ahRIwQEBBSpvAEo9FFPdUtMTFQ4/eyTTz5RehtFfT8VZ26+on7f8/Phz/J/T1PWVMr8PGnSIC2a/LNNRB/HI3BEpNU+HI3x3bt3ePjwYaGv8yqK5ORkhftFuVbnw3U+vP4nPwYGBkrvq6jrq+MIlSquk3n9+jW6du2a6wNjw4YN0bp1a9SsWRP29vYwMTGBsbGxwhGSb775Bjdv3ix2BlUS8/2kSerUqYOwsDD5fU26RjE/yvw8ffj/RKyjb4Dm/mwTUeGwwBGRVvv000+hr6+P7Oxs+WNhYWFqLXAffvAqyrU6H67z4fU/msDU1FTh/uTJk9GtW7dibbN69erFWh8AfvnlF4UjCLVq1cL27dsLdRrYh69JE5SW99PHtG7dWmE4+mvXriEnJwdSqVTEVKrz4f+TD4t7Sfrw52Dz5s2oVKlSsbb54Wm8RKQ+LHBEpNXMzMzQtGlThdESAwMDlRoSW1kfjsRYlNORPhxAQ9nRHUvCh6flVahQAR07dhQpzf/s3r1b/rWxsTGOHDlS6GKoymvwVMXKygp6enryIxi6+n76mA/nN0tNTcXJkycVRjLVZpp0iuiHP9v16tVTyXVwRFQyeA0cEWm9Xr16Kdzft2+fWq7xeK9q1aoK92/cuKHU+hkZGbh37578vpGREcqXL6+SbKr04YAnDx8+FCnJ/zx58gQxMTHy+56enoUub2lpaYiKilJXtCKTSCQKgz8kJSUhOjpaqW18+B788D2qDerWrZvrlOj/Tr6u7ezs7BSOtt67d0+0o3Ca+LNNRIXHAkdEWu+LL75Q+GCUkZGBBQsWqG1/n376qcL94OBgpQZ0CAkJQVZWlvx+06ZNYWhoqLJ8qvLfeZ4A4NSpUyIl+Z8XL14o3Hd0dCz0uh9+3z9GT0/xn0hVD9rxXx++p5T9Xn+4/Ifb0xZTpkxRuL9//36l/0CiqfT19eHm5ia/L5PJcOTIEVGyaOLPNhEVHgscEWk9GxsbjBo1SuGxpUuX5ppbqygOHTqU6zFHR0eFI2aPHz/G6dOnC73NTZs2Kdxv27Zt0QOqUcWKFdGgQQP5/cjISBw+fFjERLlLlDIjSa5evVqpfX04MEhqaqpS6yvjw/fAli1bCr3uvXv3cP78efl9IyMjuLq6qipaiRo6dKjCz1Z2djaGDx+uVPHOS2pqaq7J0sXw4bxvf/zxhyg5OnbsqDAX5V9//aXWsxaISLVY4IhIJ8ydO1dh7qycnBz07NkTt27dKtL2srKyMHXqVEyYMCHXcxKJJFdhnDZtGnJycj663dDQUPz1118K2xo9enSRMpaEadOmKdyfPHmyqMOF29nZKdw/d+5codY7dOgQAgIClNrXh9csqfP0y0GDBikUxpCQEOzfv79Q637zzTcK9/v37w8rKytVxisxRkZGWLt2rcJj169fx8CBA4s87UNkZCRat26t1B9Z1GXkyJEKg5mEhISIcppo+fLlMWTIEPn9lJSUPH/XEZFmYoEjIp1gbW2N3bt3KwzrHRcXh7Zt22LXrl1Knf525swZNGvWDEuWLMl3vXHjxsHExER+/+rVqxg7dmyB+3ny5An69u2rsIy3tzdq1KhR6GwlzcfHB/Xr15ffv3//Prp06aJwHdrHZGVl4c8//1TJaa1VqlRBxYoV5fevXLmiMKhJXkJDQzF48GCl9/Xf1w38OzhOcY8E5cfa2hojR45UeGzkyJEfnfJgzpw5OHjwoPy+np4evv76a7VkLCne3t6YNGmSwmP79u1D586dcffu3UJvJz09HYsXL0bjxo1x9epVVccsEmtra3z11VcKj02cOFHhjzoFycrKQkhIiEqyzJw5U2E0yt27d+OLL75QqignJCTg559/RlBQkEoyEVHhsMARkc5o3rw51q9fr3Dt0ps3b/DZZ5/B1dUVa9euxZMnT/Jc98GDB1i6dClatGgBd3f3j35wrlSpEhYuXKjw2IYNG9C+fXtcunRJ4fGUlBRs3LgRzs7OePr0qfzxMmXKYNWqVcq+zBIllUrx999/KxzRuXjxIho0aIA5c+bg/v37ea734sULHDhwAF988QUqVqyI4cOH486dOyrJNHTo0Fz358+fj6SkJIXHnz17hpkzZ6JNmzZ48+YNjI2NFY7Sfky5cuUUhkZ/8OAB3NzcsHz5chw4cAAnTpxQuKWnpxfrdf3yyy8K+d68eYMWLVpgyZIluSbmjoiIQJ8+fTBv3jyFx6dNm4YmTZoUK4cmWLRoEbp27arw2JkzZ+Dk5ISRI0fiwIEDeZ7SmpWVhVOnTuHrr79GtWrVMG3aNFGH68/L7NmzFa5RzMzMxKBBg9CnTx+cP38+15H87OxshIeHY+bMmahRowZmzZqlkhzVq1fHxo0bFR5bt24dnJycsH79+lzXmwL/nsIcGRmJbdu2oXfv3qhUqRJmzZrF0y+JSppARKRj/P39BUtLSwFAnrcyZcoIjo6OQvPmzYUaNWoIpqam+S776aef5rsfmUwmDBkyJM/1ypYtKzRt2lSoV6+eYGJikut5ExMT4ciRIx99LW3btlVYT1mbN29WWH/z5s1Kb0MQBOHUqVOCjY1Nnq/V1tZWaNCggeDq6irUrVtXKFu2bJ7LDRs2TCU5X79+LVSqVCnX9vX19YX69esLzZs3F6pVqyZIJBKF59etW6f093Pr1q35vjc+vEVFReVa/7/Pt23b9qP7u3LlSp7fZwMDA6F27dpCs2bNhIoVK+a5f09PTyEjI6PA7Z8+fVphnTlz5nw0039VrVpVvm7VqlWVWldZGRkZwujRo/P9fuvr6wv29vZCo0aNhKZNmwr29vaCvr5+vsv7+vrmuR9lf0aU/X+al2fPngn169fPM6eFhYVQr149wdXVVahTp45gbGxc6H0W5ffFkiVLBD09vTyzVK5cWWjSpIng4uIi1KpVS7CwsMhzuaL+XiGiomGBIyKdFBUVJXh7exf6w/eHt08++UT4/fffhczMzAL3I5PJhGnTpuX7ASivm52dnRASElKo16EpBU4QBOHhw4eCi4tLkb6fEolEmDlzpspyXr16VShfvnyh9q2npycsXbpUEISifT+//fbbXGVQXQVOEATh1q1bQvXq1ZX6/g4fPvyj71VB0K4C956/v7/g4OBQ5J9lNzc34fz58/luX4wCJwiC8PbtW6FHjx5Kvx5VFzhBEIQjR44IFSpUKNL318jISDh48GCRvw9EpDyeQklEOsnBwQH79+/HtWvXMH78+FyDX+TFzMwMXbp0wV9//YXnz59j8uTJCtfU5UUikWDhwoW4evUqevToASMjo3yXtbe3x6xZs/DgwQO0atVK6dcktho1aiA0NBSBgYFo3779R6c+kEqlcHNzw7x58/Dw4UP89NNPKsvSpEkThIeHY/DgwZBKpXkuI5FI0KlTJ1y6dKlY14UtWLAA169fx9SpU9GqVSuUK1cOxsbGRd7exzRo0AD//PMPFi1aVOAcd/r6+ujQoQNCQkKwefPmj75XtVXPnj3x4MED7Ny5E126dCnwZ+y9SpUqYcKECQgLC8OFCxfQokWLEkiqHCsrKwQEBOD06dPo3LnzR3+eHB0dMXPmTGzdulXlWTw8PPDo0SP88ccfaNiwISQSSYHLm5ubo1u3blizZg1iY2Nzne5KROolEQQ1TmxDRKRBIiMjERERgadPn+Ldu3cQBAHW1tYoU6YM6tWrh/r16+dbBgorNTUV586dw5MnT/Dq1SsYGRmhXLlyqF+/Pho3bqyaF6IhUlNTcenSJTx9+hSvX79GWloazM3NYWtrC0dHR9StWzfXUPzqkJCQgLNnz+Lx48d49+4dzMzMUK1aNbRo0QLlypVT+/7V7e7du7h27Rri4+ORmpqKTz75BBUrVkSrVq20drTJ4sjIyMCNGzfw4MEDvHjxAqmpqTAyMkKZMmVQvnx5ODs7o0KFCmLHVFpKSgrOnz+PZ8+e4dWrV8jJyYGlpSWqVauGhg0bolKlSiWW5eXLl7h8+TLi4uLw+vVryGQyWFpaws7ODnXr1kWtWrV09g8GRNqABY6IiIiIiEhL8BRKIiIiIiIiLcECR0REREREpCVY4IiIiIiIiLQECxwREREREZGWYIEjIiIiIiLSEvpiByjNZDIZYmJiYGFh8dE5V4iIiIiISHcJgoB3797B3t4eenr5H2djgRNRTEwMKleuLHYMIiIiIiLSEE+fPi1w7kcWOBFZWFgA+Pd/kqWlpchpiIiopKWkpMDe3h7Av3/UK4mJz4mISDMlJSWhcuXK8o6QHxY4Eb0/bdLS0pIFjoioFJJKpfKvLS0tWeCIiOijl1ZxEBMiIiIiIiItwQJHRERERESkJVjgiIiIiIiItAQLHBERERERkZZggSMiIiIiItISHIWSiIhIJCYmJoiKipJ/TURE9DEscERERCLR09ODg4OD2DGIiEiL8BRKIiIiIiIiLcECR0REJJLMzExMmzYN06ZNQ2ZmpthxiIhIC0gEQRDEDlFaJSUlwcrKComJibC0tBQ7DhERlbCUlBSYm5sDAJKTk2FmZiZyIiIiEkthuwGPwBEREREREWkJFjgiIiIiIiItwQJHRERERESkJVjgiIiIiIiItAQLHBERERERkZZggSMiIiIiItIS+mIHICIiKq1MTEwQEREh/5qIiOhjWOCIiIhEoqenh/r164sdg4iItAhPoSQiIiIiItISPAJHREQkkszMTPz6668AgBkzZsDQ0FDkREREpOkkgiAIYocorZKSkmBlZYXExERYWlqKHYeIiEpYSkoKzM3NAQDJyckwMzMTOREREYmlsN2Ap1ASERERERFpCRY4IiIiIiIiLcECR0REREREpCVY4IiIiIiIiLQECxwREREREZGWYIEjIiIiIiLSEpwHjoiISCTGxsYIDQ2Vf01ERPQxpeII3NmzZ+Hl5QV7e3tIJBLs37//o+sEBwejadOmMDIyQs2aNbFly5Zcy6xatQoODg4wNjaGq6ur/B9hIiKiwpBKpXBxcYGLiwukUqnYcYiISAuUigKXkpKCRo0aYdWqVYVaPioqCt26dUO7du1w/fp1TJ48GaNHj8bRo0fly+zevRtTpkzBnDlzcPXqVTRq1AgeHh6Ij49X18sgIiIiIqJSTiIIgiB2iJIkkUjg7++Pnj175rvMd999h4MHDyIiIkL+2MCBA/H27VscOXIEAODq6goXFxesXLkSACCTyVC5cmV8+eWX+P777wuVpbCzrRMRkW7KzMzE8uXLAQBfffUVDA0NRU5ERERiKWw34DVwebh48SI6duyo8JiHhwcmT54M4N9/cMPDwzF9+nT583p6eujYsSMuXryY73YzMjKQkZEhv5+UlKTa4EREpDI5OTlITEzEu3fvkJycrPBfZR5LTU1FTk4OZDIZZDJZrq/T09MBALNmzYK+vj709PRy3aRSKYyMjGBhYQELCwuYm5vn+rowj1lZWcHIyEjk7ywRERUHC1we4uLiUL58eYXHypcvj6SkJKSlpeHNmzfIycnJc5m7d+/mu9358+fjxx9/VEtmIiIqnKysLMTFxSE2NlZ+i4mJyXU/Pj4eMpks3+0YGBjkW5xsbW3lj5mamuZbzLKysvDtt98CAH7++WcYGBgUWPQ+LIgvXrzI9VhaWlqBr9/GxgYVKlSQ3+zt7fO8b2ZmptLvOxERqQYLXAmaPn06pkyZIr+flJSEypUri5iIiEi3pKenIzIyEg8ePMCzZ8/yLGgvX75UWEcqlcLOzk5eXlxcXORff/LJJ/ke0VLF6Y4pKSnyAjdu3DiVlKbs7GykpKTkeXTwzZs38vIaExOD6OhoXLx4ETExMbmKn4WFRb4Fr0aNGqhduzZsbGyKnZeIiJTDApcHOzs7vHjxQuGxFy9ewNLSEiYmJpBKpZBKpXkuY2dnl+92jYyMeOoKEVEx5eTk4MmTJ7h//36u2+PHj/H+0m4DAwOFAtKqVatchcTe3h62trbQ09OdMb309fVhZWUFKyurQq8jCAKSkpIKPCIZHh6O2NhYvHv3Tr6era0tateunetWs2ZNmJiYqOPlERGVeixweXBzc8OhQ4cUHjt+/Djc3NwAAIaGhnB2dsbJkyflg6HIZDKcPHkSEydOLOm4REQ6RxAEvHjxAg8ePMhV0h4+fIjMzEwA//4+rlWrFmrVqoX+/fvLC0StWrVQvnx5SCQSkV+JdpBIJPLSV6dOnQKXTUpKQmRkpML/k3/++Qf+/v5ITEyUL1elSpU8y13VqlWhr8+PH0RERVUqfoMmJyfj4cOH8vtRUVG4fv06ypQpgypVqmD69Ol4/vw5tm7dCgAYO3YsVq5ciW+//RYjR47EqVOnsGfPHhw8eFC+jSlTpmDYsGFo1qwZmjdvjmXLliElJQUjRowo8ddHRKTNMjIycOvWLYSHh+Pq1au4du0a7t69Kz/SI5FI4ODggNq1a6NDhw4YN26cvAxUrlyZ86eVMEtLSzRp0gRNmjRReFwQBLx69SpX4T579iw2btwoH8TLwMAANWrUQMOGDeHs7AxnZ2c0bdqUp2MSERVSqShwYWFhaNeunfz+++vQhg0bhi1btiA2NhZPnjyRP1+tWjUcPHgQX3/9NZYvX45KlSphw4YN8PDwkC8zYMAAvHz5ErNnz0ZcXBwaN26MI0eO5BrYhIiI/ic9PV1e1t7fIiIikJWVBalUinr16qFp06bo27evvKRVr14dxsbGYkenj5BIJChbtizKli2Lli1bKjwnk8nw9OlTeam7e/curl+/joMHDyIlJQXAv//2vi90729lypQR46UQEWm0UjcPnCbhPHBEpMvS0tJw8+ZNhbJ2+/ZtZGdnQyqVokGDBmjatKn8w3rDhg1hamoqduwSlZOTg5CQEABA69atS93RxJycHNy/fx9Xr16Vv0euXr2K5ORkAICDg4PCUTpnZ2fY2tqKnJqISD0K2w1Y4ETEAkdEukIQBPzzzz84c+YMwsLC5GUtJycH+vr6aNCggcKRlYYNG/KoGuVJJpPhwYMHCoXu6tWr8rlTq1SpIn8ftW7dGs2bN+d7iYh0AgucFmCBIyJt9b6wBQcHIzg4GGfOnMHLly+hr6+vcG2Ts7MzGjRowA/YVCwymQyRkZEKpS48PByJiYkwMjKCm5sb3N3d4e7uDldXV77fiEgrscBpARY4ItIWMpksV2F79eoVDAwM4OrqKv/w7ObmVupOgyyOrKwsrFu3DgDw+eefw8DAQORE2iMnJwe3bt1SeE++ffuWhY6ItBYLnBZggSMiTcXCVjJSUlJgbm4O4N8Rk1UxkXdp9WGhO3v2LN68ecNCR0RagwVOC7DAEZGmEAQBd+/excmTJ1nYShALnPrIZLJcR+jeF7pPP/0U7u7uaNeuHVq0aMEjn0SkEVjgtAALHBGJKSsrC+fOnUNQUBACAwMRGRkJAwMD+Ydbd3d3fPrppyxsasQCV3LyK3TW1tbo0qULvLy80KVLF1hbW4sdlYhKKRY4LcACR0Ql7c2bNzhy5AgCAwNx5MgRvH37Fvb29vDy8oKXlxfatWvHwlaCWODEI5PJcPXqVQQFBSEoKAjXrl2Dvr4+WrduLf95qFmzptgxiagUYYHTAixwRFQSHj58KD/KFhISgpycHDRt2lT+IbVp06aQSCRixyyVWOA0x9OnT3HgwAEEBQXh1KlTyMjIQN26deU/J25ubqVunj4iKlkscFqABY6I1CEnJwcXL16Ul7a7d+/CyMgIHTp0gJeXF7p3745KlSqJHZPAAqepkpOTcfz4cQQFBeHAgQN4+fIlbG1t0bVrV3h5ecHDwwMWFhZixyQiHcMCpwVY4IhIVVJTU3H48GEEBgbi4MGDeP36NcqVK4fu3bvDy8sLHTt2lBcF0hwscJovJycHoaGh8lMtIyIiYGhoCHd3d3h5eaF3796wt7cXOyYR6QAWOC3AAkdExZGdnY1Tp05hx44d2LdvH5KTk9GgQQP06NEDXl5eaN68OfT09MSOSQXIzs7G0aNHAQAeHh7Q19cXORF9TFRUlLzMnTlzBtnZ2WjXrh18fHzQp08fWFlZiR2RiLQUC5wWYIEjImUJgoCwsDDs2LEDf/31F168eIHatWvDx8cHn332GQddICpBb9++xb59+7Bjxw6cPn0ahoaG8PLygo+PD7p06QIjIyOxIxKRFmGB0wIscERUWJGRkdixYwd27NiB+/fvo3z58hg0aBB8fHzg7OzMQUiIRPb8+XPs2rULO3bswPXr12FtbY1+/frBx8cHrVu35tFwIvooFjgtwAJHRAWJj4/H7t27sWPHDly+fBnm5ubo3bs3fHx80L59e55upwOysrKwY8cOAICPjw8nlNYR//zzj/wPLo8fP0blypUxaNAgDB48GE5OTmLHIyINxQKnBVjgiOhDycnJCAgIwPbt23H8+HFIJBJ06dIFPj4+8PLy4hxtOoaDmOg2mUyGCxcuYMeOHdizZw8SEhLg5OQEHx8fDBo0CFWqVBE7IhFpEBY4LcACR0TAv9e1nT59Ghs3bsT+/fuRmpqKli1bwsfHB/369YOtra3YEUlNWOBKj8zMTBw9ehQ7duxAQEAA0tPT0aZNGwwbNgwDBw7kH2eIiAVOG7DAEZVub9++xZ9//glfX1/cvXsXderUwZAhQ/DZZ5/BwcFB7HhUAljgSqekpCT4+/tjx44dOHHiBKysrDB8+HCMHTsWjo6OYscjIpGwwGkBFjii0ik8PBxr1qzBzp07kZWVhd69e2P8+PFo06YNByMpZVjg6NGjR1i7di02bdqEV69eoUOHDhg3bhx69OjBayKJSpnCdgMOiUREVALS0tKwZcsWNG/eHM2aNcOxY8fwww8/4OnTp9i9ezfatm3L8kZUClWvXh0LFizA06dPsW3bNqSlpaFv376oWrUq5s6di+fPn4sdkYg0DAscEZEa3b9/H1OmTEHFihUxcuRIfPLJJwgICEBUVBR++OEH2NnZiR2RiDSAsbExBg8ejPPnz+PGjRvw9vbG4sWLUbVqVfTu3RsnTpyATCYTOyYRaQAWOCIiFcvOzoa/vz86deoER0dHbN26FaNHj8aDBw9w+PBh9OjRA1KpVOyYRKShGjZsiDVr1iAmJgZ//PEHHjx4gE6dOqFOnTpYunQpEhISxI5IRCLiNXAi4jVwRLolJiYG69evx/r16/H8+XO4ublh/Pjx6Nu3L4yNjcWORxrofdkHgF69enFuP8qTIAg4d+4c1qxZg71790IqlWLgwIEYP348XFxcxI5HRCrCQUy0AAsckW6IiIjAwoULsXPnTvlpUOPGjUOjRo3EjkZEOiY+Ph6bNm2Cr68vHj9+jE8//RTff/89vLy8oKfHE6uItBkHMSEiUrMLFy6gR48ecHJyQnBwMJYsWYKYmBj4+vqyvBGRWpQrVw7ff/89IiMjERgYCAMDA/Ts2RNOTk7YunUrsrKyxI5IRGrGAkdEpARBEHDo0CG0adMGLVu2xMOHD/Hnn38iMjISX331FY+mk1Kys7Ph5+cHPz8/ZGdnix2HtIhUKoWXlxfOnj2Lc+fOoUaNGhg2bBhq1qyJP/74AykpKWJHJCI1YYEjIiqE7Oxs7Nq1C40bN0a3bt2QmZmJ/fv3IyIiAkOHDuV8TVQkGRkZ6N+/P/r374+MjAyx45CWatmyJQIDA3Hr1i20adMGU6ZMQdWqVTFv3jwOeEKkg1jgiIgKkJaWhjVr1qB27dr47LPPUKFCBZw+fRoXL16Et7c3rzkhIo3RoEEDbNu2DQ8fPsSgQYMwf/58VKlSBd988w2ePXsmdjwiUhF+8iAiysPbt28xf/58ODg4YOLEiWjevDmuXr2KI0eOwN3dnZNuE5HGcnBwwIoVK/D48WNMnjwZmzZtQvXq1TFq1CjcvXtX7HhEVEwscERE/xEXF4fvv/8eVatWxdy5c9GzZ0/cu3cPf/31F5o0aSJ2PCKiQitXrhx+/vlnPH78GL/++isOHz6MevXqoU+fPrhy5YrY8YioiFjgiIjw7xxuEyZMgIODA1avXo2xY8ciOjoaa9euRc2aNcWOR0RUZJaWlpg6dSqioqKwdu1a3Lx5E82bN0fHjh1x8eJFseMRkZJY4IioVHvz5g2mT5+OmjVrYteuXZg1axaePHmCBQsWoEKFCmLHIyJSGSMjI4wZMwZ3797Fnj17EB8fjxYtWsDb2xsRERFixyOiQmKBI6JSKTU1FQsWLED16tXxxx9/4Ouvv8ajR4/www8/wNraWux4RERqI5VK0a9fP1y7dg3bt2/HrVu30LBhQwwbNgzR0dFixyOij2CBI6JSJSsrC76+vqhZsyZmzpwJHx8fREZG4pdffmFxoxJnaGiIzZs3Y/PmzTA0NBQ7DpUyUqkUPj4+uHv3LlasWIGjR4+idu3a+OqrrxAfHy92PCLKh0QQBEHsEKVVUlISrKyskJiYyMl/idRMJpNhz549mDlzJh49egQfHx/8+OOPqF69utjRiIg0QkpKCpYvX44FCxYgJycHU6ZMwdSpU/kZhaiEFLYb8AgcEek0QRBw+PBhODs7Y9CgQahbty6uX7+Obdu2sbwREf2HmZkZZsyYgUePHmH8+PFYtGgRqlevjqVLlyI9PV3seET0/1jgiEhnXbhwAe7u7ujatSvMzc1x7tw5BAUFoWHDhmJHIwIAZGdn4+DBgzh48CCys7PFjkMEAPjkk0+wcOFCPHz4EH369MG3336LWrVqYePGjXyfEmkAFjgi0jkRERHw9vZGy5Yt8fbtWxw8eBBnz55Fy5YtxY5GpCAjIwPdu3dH9+7dkZGRIXYcIgUVK1bE2rVr8c8//6BFixYYPXo0GjRogL///hu8AodIPCxwRKQzYmJiMGzYMDRs2BARERHYuXMnrl27hq5du0IikYgdj4hIK9WuXRu7d+9GeHg4HBwc0LdvX7i6uuL8+fNiRyMqlVjgiEjrZWZmYvHixXB0dMThw4excuVK3LlzB4MGDYKeHn/NERGpQtOmTXHkyBGcOnUKgiCgVatWGDp0KGJjY8WORlSq8JMNEWm1EydOoFGjRvjuu+8wYsQI3Lt3D+PHj+eQ7EREatKuXTtcvnwZ69evx6FDh+Do6Ijff/8dWVlZYkcjKhVY4IhIKz158gT9+vVDp06dULZsWVy7dg1//PEHbGxsxI5GRKTz9PT0MHr0aNy/fx9Dhw7F1KlT0bhxY5w6dUrsaEQ6jwWOiLRKeno6fvnlF9SpUwfnz5/Hjh07cObMGY4sSUQkgjJlymDlypUIDw+HjY0NOnTogAEDBuDp06diRyPSWSxwRKQ1Dh48iAYNGmDu3LmYMGEC7t27h88++4wDlBARiaxx48YICQnB1q1bcebMGdSpUwfz58/n6KpEasACR0QaLzIyEl5eXujevTuqVauGmzdvYtGiRbCwsBA7GlGxGBoaYuXKlVi5ciWv2yStJ5FIMGTIENy/fx9jx47FrFmz4OTkhCNHjogdjUinSARO5CGapKQkWFlZITExEZaWlmLHIdI4qamp+O2337Bw4UKUK1cOv//+O3r37s0jbkREWuD27duYNGkSTp06BW9vb/z++++oVq2a2LGINFZhuwGPwBGRxhEEAfv27UPdunWxYMECTJs2DXfv3kWfPn1Y3oiItET9+vVx4sQJ7NmzB+Hh4ahbty7mzp2LtLQ0saMRaTUWOCLSKE+ePEHXrl3Rp08fODk54fbt2/jpp59gamoqdjQilcvJyUFwcDCCg4ORk5MjdhwilZNIJOjXrx/u3r2LKVOm4Ndff0W9evVw8uRJsaMRaS0WOCLSCIIgYN26dWjQoAFu3bqFwMBAHDhwADVr1hQ7GpHapKeno127dmjXrh3S09PFjkOkNmZmZvj1119x+/ZtVKtWDR07dsQXX3yBpKQksaMRaR0WOCISXXR0NDp37owvvvgCAwYMwO3bt+Hl5SV2LCIiUrFatWrhxIkTWLNmDXbu3IkGDRrg2LFjYsci0ioscEQkGplMhjVr1sDJyQn37t3D0aNHsX79elhZWYkdjYiI1ERPTw9jx45FREQEHB0d4eHhgdGjRyMxMVHsaERagQWOiETx6NEjdOzYEePHj4ePjw8iIiLQuXNnsWMREVEJqVq1Ko4dO4Z169Zhz549aNCgAQ4fPix2LCKNxwJHRCVKJpNh5cqVcHJywqNHj3DixAn4+vpyKg0iolJIIpFgzJgxiIiIQP369dG1a1eMGDECb968ETsakcZigSOiEhMZGYl27drhyy+/xPDhw3Hr1i106NBB7FhERCSyKlWq4PDhw9i4cSP8/f3RoEEDHDhwQOxYRBqJBY6I1E4mk2H58uVwcnLC06dPcerUKaxatQoWFhZiRyMiIg0hkUgwcuRIREREoHHjxvDy8sLQoUORkJAgdjQijcICR0Rqdf/+fbRp0waTJ0/G6NGjcevWLbRr107sWEQawcDAAAsXLsTChQthYGAgdhwijVCpUiUcOHAAf/75J4KCglC/fn0EBASIHYtIY0gEQRDEDlFaJSUlwcrKComJibz+h3SOTCbDsmXL8MMPP6BixYrYtGkT2rRpI3YsIiLSIjExMRg7diyCgoIwaNAgrFixAp988onYsYjUorDdgEfgiEjl4uLi4OHhgalTp2Ls2LG4efMmyxsRESnN3t4eAQEB2L59O44cOYLGjRsjJCRE7FhEomKBIyKVOnbsGBo1aoSIiAgcO3YMv//+O0xNTcWORaSRcnJycOXKFVy5cgU5OTlixyHSSBKJBD4+Prh58yaqVasGd3d3/Pzzz/yZoVKLBY6IVCIrKwvTp0+Hh4cHmjRpguvXr6Njx45ixyLSaOnp6WjevDmaN2+O9PR0seMQabRKlSrh1KlTmDVrFubMmYNOnTohJiZG7FhEJY4FjoiKLTo6Gm3atMHixYuxcOFCHDp0COXLlxc7FhER6Rh9fX3MnTsXJ0+exL1799CoUSNO/k2lDgscERXL33//jSZNmiAuLg4hISGYNm0a9PT4q4WIiNTH3d0dN27cgKurK7p27YqpU6ciMzNT7FhEJYKfsoioSNLS0jB+/Hj07dsXHTt2xLVr1/Dpp5+KHYuIiEoJW1tbBAUFYenSpfjjjz/QqlUrPHr0SOxYRGrHAkdESrtz5w5cXV2xefNm+Pr6Ys+ePbC2thY7FhERlTISiQRff/01Lly4gISEBDRp0gS7d+8WOxaRWrHAEVGhCYKATZs2oVmzZsjOzkZoaCi++OILSCQSsaMREVEp1qxZM1y9ehVdu3bFwIEDMWbMGKSmpoodi0gtWOCIqFCSkpIwePBgjBo1Cp999hnCwsLg5OQkdiwiIiIAgKWlJXbu3ImNGzdix44dcHFxQUREhNixiFSOBY6IPiosLAxNmzZFUFAQdu3ahfXr13NuNyIVMDAwwJw5czBnzhwYGBiIHYdI60kkEowcORJhYWHQ09ODi4sL1q1bB0EQxI5GpDISge9o0SQlJcHKygqJiYmwtLQUOw5RnjZv3oyxY8eiYcOG+Ouvv1CjRg2xIxEREX1UWloapkyZAl9fXwwdOhRr166FsbGx2LGI8lXYbsAjcESUp6ysLHz11VcYOXIkRowYgfPnz7O8ERGR1jAxMcGaNWuwY8cO7NmzB23btsXz58/FjkVUbCxwRJTL69ev4enpidWrV2PNmjXw9fWFoaGh2LGIdI5MJsPt27dx+/ZtyGQyseMQ6aTPPvsM586dQ0xMDFxcXHDp0iWxIxEVCwscESm4desWXFxccPPmTZw8eRJjx44VOxKRzkpLS0ODBg3QoEEDpKWliR2HSGc5OzvjypUrqFatGtq2bYstW7aIHYmoyFjgiEjO398fbm5usLS0RFhYGNq0aSN2JCIiIpWws7PDqVOnMHToUIwYMQKTJ09Gdna22LGIlMYCR0SQyWT48ccf0bt3b3Tt2hXnz59H1apVxY5FRESkUkZGRli3bh1WrlyJlStXokuXLkhISBA7FpFSWOCISrnk5GT07dsXP/74I3755Rfs3r0bZmZmYsciIiJSC4lEggkTJuD48eO4du0aXFxccPv2bbFjERUaCxxRKfbo0SO4ubnhxIkTCAgIwIwZMyCRSMSORUREpHbt2rXDlStXYGZmhk8//RQBAQFiRyIqFBY4olLq1KlTcHFxQXp6Oi5dugQvLy+xIxEREZWoatWq4cKFC/Dw8EDPnj3x008/cURY0ngscESljCAIWLFiBTp37oxmzZohNDQU9erVEzsWERGRKMzNzbFnzx7MmzcPs2fPRv/+/ZGcnCx2LKJ8lZoCt2rVKjg4OMDY2Biurq4IDQ3Nd1l3d3dIJJJct27dusmXGT58eK7nPT09S+KlEBVZRkYGRo8ejUmTJmHy5Mk4ePAgbGxsxI5FVGoZGBhg6tSpmDp1KgwMDMSOQ1Rq6enpYdasWfD398fRo0fRsmVLREVFiR2LKE8SQRAEsUOo2+7duzF06FD4+vrC1dUVy5Ytg5+fH+7du4dy5crlWj4hIQGZmZny+69fv0ajRo2wYcMGDB8+HMC/Be7FixfYvHmzfDkjIyOlPgwnJSXBysoKiYmJsLS0LPoLJCqExMREeHt749KlS1i3bh2GDh0qdiQiIiKNExERAW9vbyQnJ+PQoUNwdnYWOxKVEoXtBqXiCNzSpUsxZswYjBgxAvXq1YOvry9MTU2xadOmPJcvU6YM7Ozs5Lfjx4/D1NQU/fr1U1jOyMhIYTkeySBNFRMTgzZt2sgn52Z5IyIiyluDBg1w+fJl+aTfx44dEzsSkQKdL3CZmZkIDw9Hx44d5Y/p6emhY8eOuHjxYqG2sXHjRgwcODDX0OrBwcEoV64cHB0dMW7cOLx+/brA7WRkZCApKUnhRqRud+/ehZubGxISEnDu3Dm0bNlS7EhE9P9kMhmio6MRHR3NgROINIitrS1OnjyJtm3bolu3btixY4fYkYjkdL7AvXr1Cjk5OShfvrzC4+XLl0dcXNxH1w8NDUVERARGjx6t8Linpye2bt2KkydPYsGCBThz5gy6dOmCnJycfLc1f/58WFlZyW+VK1cu2osiKqRLly6hVatWsLCwwMWLFzlYCZGGSUtLQ7Vq1VCtWjWkpaWJHYeI/sPMzAz79+/H4MGDMXjwYCxZskTsSEQAAH2xA2i6jRs3wsnJCc2bN1d4fODAgfKvnZyc0LBhQ9SoUQPBwcHo0KFDntuaPn06pkyZIr+flJTEEkdqc/DgQfTr1w/Ozs4IDAzkKb5ERERKMjAwwKZNm2Bvb4+pU6ciJiYGixYtgp6ezh8DIQ2m8+8+W1tbSKVSvHjxQuHxFy9ewM7OrsB1U1JS8Ndff2HUqFEf3U/16tVha2uLhw8f5ruMkZERLC0tFW5E6rBp0yZ4e3vD09MTx44dY3kjIiIqIolEgl9++QUrVqzA77//jiFDhigMdkdU0nS+wBkaGsLZ2RknT56UPyaTyXDy5Em4ubkVuK6fnx8yMjIwePDgj+7n2bNneP36NSpUqFDszERFJQgCfvnlF4waNQpjxoyBn58fTExMxI5FRESk9SZOnIg9e/Zg79696NatG969eyd2JCqldL7AAcCUKVOwfv16/Pnnn7hz5w7GjRuHlJQUjBgxAgAwdOhQTJ8+Pdd6GzduRM+ePfHJJ58oPJ6cnIxp06bh0qVLiI6OxsmTJ+Ht7Y2aNWvCw8OjRF4T0YdycnLw5ZdfYubMmZg3bx5Wr14NqVQqdiwiIiKd0bdvXxw9ehShoaFwd3fPdYYXUUkoFdfADRgwAC9fvsTs2bMRFxeHxo0b48iRI/KBTZ48eZLrXOZ79+7h3LlzeQ4dK5VKcfPmTfz55594+/Yt7O3t0blzZ/z0008wMjIqkddE9F/p6ekYMmQI9u3bh/Xr1+cadIeIiIhUw93dHSEhIfD09ESLFi1w9OhR1KxZU+xYVIqUiom8NRUn8iZVePv2LXr27InLly9j9+7d6NGjh9iRiKiQUlJSYG5uDuDfszs+nK6GiDTX48eP4eHhgYSEBBw6dAjNmjUTOxJpucJ2g1JxBI5IV8XExMDT0xPPnj3DyZMn0aJFC7EjEZES9PX1MX78ePnXRKQ9qlatinPnzqF79+5wd3fHvn370LlzZ7FjUSnAI3Ai4hE4Ko779++jU6dOEAQBR48eRd26dcWOREREVOqkpKRgwIABOHr0KLZs2QIfHx+xI5GWKmw3KBWDmBDpmn/++Qdt2rSBubk5Lly4wPJGREQkEjMzM/j7+2Pw4MEYMmQINm3aJHYk0nE8X4NIy9y8eRMdO3ZEhQoVcOLECZQtW1bsSERURIIg4NWrVwD+nbdUIpGInIiIisLAwAAbN26EsbExRo0ahaysLHzxxRdixyIdxQJHpEWuXr2KTp06wcHBAceOHcs1xQURaZfU1FSUK1cOAAcxIdJ2enp6WL16NQwNDTF27FhkZmbiyy+/FDsW6SAWOCItceXKFXTu3Bm1a9fG0aNHYW1tLXYkIiIi+g+JRIJly5bBwMAAkyZNQlZWFqZMmSJ2LNIxLHBEWuDixYvw9PREgwYNcOjQIVhZWYkdiYiIiPIgkUiwaNEiGBoa4ptvvkFmZia+//57sWORDmGBI9JwISEh6Nq1K5o2bYoDBw7AwsJC7EhERERUAIlEgl9++QWGhoaYPn06MjMzMXv2bLFjkY5ggSPSYOfOnUOXLl3g6uqKwMBAXh9DRESkJSQSCebOnQtDQ0P88MMPkEgkmDVrltixSAewwBFpqAsXLqBLly5o3rw5goKCYGpqKnYkIiIiUtKMGTMgCAJmzpwJqVSKGTNmiB2JtBwLHJEGunTpEjw9PeHs7MzyRkREpOV++OEH5OTk4IcffoCenh6viaNiYYEj0jCXL1+Gh4cHGjdujAMHDvC0SSIdpq+vj2HDhsm/JiLdNXv2bMhkMkyfPh1SqRTTpk0TOxJpKf5rQaRB3k8V4OTkhEOHDsHc3FzsSESkRkZGRtiyZYvYMYiohMyZMwc5OTn49ttvIZVKOcUAFQkLHJGGCA8PR+fOnVG/fn0cPnyY5Y2IiEjHSCQSzJs3Dzk5Ofjmm2+gp6eHyZMnix2LtAwLHJEGuHPnDjp37gxHR0ccOXKEUwUQlRKCICA1NRUAYGpqColEInIiIlK391MM5OTk4Ouvv4aFhQVGjRoldizSIixwRCJ7+vQpPDw8YG9vj8OHD8PS0lLsSERUQlJTU+VH25OTk3nNK1EpIZFI8NtvvyEpKQmff/45bG1t4e3tLXYs0hJ6YgcgKs0SEhLg6ekJPT09HDlyBDY2NmJHIiIiohIgkUiwcuVK9O7dGwMHDkRISIjYkUhLsMARiSQlJQXdu3dHfHw8jh49iooVK4odiYiIiEqQVCrF9u3b0aJFC3h5eeHmzZtiRyItwAJHJIKsrCz069cPN2/exKFDh+Do6Ch2JCIiIhKBkZER/P39UaNGDXh4eCAqKkrsSKThWOCISphMJsOoUaNw4sQJ+Pv7w8XFRexIREREJCJLS0scOnQIZmZm6Ny5M+Lj48WORBqMBY6oBAmCgGnTpmH79u3YunUrOnXqJHYkIiIi0gDly5fHsWPHkJycjC5duiApKUnsSKShWOCIStCiRYuwdOlSLF++HAMHDhQ7DhEREWmQ6tWr48iRI3j48CF69eqFjIwMsSORBmKBIyohW7ZswXfffYeZM2fiyy+/FDsOEWkAqVSKvn37om/fvpBKpWLHISIN0KhRIwQFBeH8+fMYMmQIcnJyxI5EGkYiCIIgdojSKikpCVZWVkhMTOTcXzouKCgIvXr1wqhRo+Dr68vJeomIiKhAAQEB6N27N8aOHYuVK1fys0MpUNhuwCNwRGp27tw59O/fHz169MDq1av5C5iIiIg+ytvbG2vXrsXq1asxb948seOQBtEXOwCRLrt16xa8vLzg6uqKnTt38hQpIiIiKrTRo0fj5cuXmDFjBsqXL4+xY8eKHYk0AAsckZo8fvwYnp6eqFq1KgICAmBsbCx2JCLSMCkpKTA3NwcAJCcnw8zMTORERKRpvv/+e7x48QLjx4+Hra0t+vbtK3YkEhkLHJEavHv3Dl5eXjAyMsKRI0dgZWUldiQiIiLSQhKJBEuXLkV8fDyGDBmCqlWrcg7ZUo7XwBGpmEwmw5AhQxAdHY2goCDY2dmJHYmIiIi0mJ6eHjZt2oTGjRvD29sbMTExYkciEbHAEanYrFmzEBgYiF27dqF+/fpixyEiIiIdYGxsDH9/f0ilUvTs2RNpaWliRyKRsMARqdCuXbvw66+/4rfffkO3bt3EjkNEREQ6xM7ODgEBAYiIiMDo0aPB2cBKJxY4IhW5cuUKRo4ciSFDhmDatGlixyEiIiId1LRpU2zZsgU7d+7EggULxI5DIuAgJkQqEBMTg549e6JRo0ZYt24d53ojIiIitenfvz8iIiIwY8YM1K9fH15eXmJHohLEAkdUTGlpaejVqxckEgn8/f05XQARFZpUKkXXrl3lXxMRFdbcuXMRERGBzz77DBcvXkSDBg3EjkQlRCLw5FnRJCUlwcrKComJibC0tBQ7DhWBIAgYMmQI9u3bh5CQEDg7O4sdiYiIiEqJ5ORktGrVCklJSQgNDYWtra3YkagYCtsNeA0cUTEsXLgQO3bswObNm1neiIiIqESZm5sjICAAycnJ6Nu3LzIzM8WORCWABY6oiIKCgjB9+nTMnDkTAwYMEDsOERERlUJVq1aFv78/Lly4gEmTJnFkylKABY6oCN6fc96zZ0/8+OOPYschIi2VkpICMzMzmJmZISUlRew4RKSlWrZsCV9fX6xduxarV68WOw6pGQcxIVLSq1ev0KNHD1SvXh1bt26Fnh7/DkJERZeamip2BCLSASNHjsStW7fw1VdfoU6dOujQoYPYkUhN+MmTSAlZWVno168f3r17h4CAAJibm4sdiYiIiAgAsGjRInTs2BH9+vXDw4cPxY5DasICR6SEadOm4fz589i3bx8cHBzEjkNEREQkp6+vj7/++gtly5ZFz549eYRfR7HAERXS/v37sXz5cixZsgStW7cWOw4RERFRLtbW1vD390dUVBQmTZokdhxSAxY4okKIjo7GiBEj0Lt3b0ycOFHsOERERET5qlevHlatWoWNGzdix44dYschFWOBI/qIzMxMDBw4ENbW1ti4cSMkEonYkYiIiIgKNHz4cAwdOhRffPEF7t27J3YcUiGOQkn0ETNmzEB4eDjOnz8Pa2trseMQkQ7R09ND27Zt5V8TEanSqlWrEBoaiv79++PSpUswMTEROxKpAP+1ICrAgQMHsGTJEixYsADNmzcXOw4R6RgTExMEBwcjODiYH6yISOXMzc2xZ88e3L9/H1OmTBE7DqkICxxRPp4+fYphw4bBy8sLX3/9tdhxiIiIiJTm5OSEP/74A76+vtizZ4/YcUgFWOCI8pCVlYVBgwbBzMwMW7Zs4XVvREREpLVGjx6NgQMHYvTo0ZwfTgewwBHlYfbs2bh06RL++usvlClTRuw4RKSjUlJSULZsWZQtWxYpKSlixyEiHSWRSLB27VqUL18eAwYMQEZGhtiRqBhY4Ig+cPToUfz222/45Zdf0KJFC7HjEJGOe/XqFV69eiV2DCLScZaWltizZw8iIiIwbdo0seNQMbDAEf1HTEwMhgwZgi5duvCXGxEREemUJk2aYOnSpVixYgX27dsndhwqIhY4ov+XnZ2Nzz77DAYGBvjzzz85pDcRERHpnPHjx6NPnz4YOXIkoqKixI5DRcBPqET/b968eQgJCcGuXbtQtmxZseMQERERqZxEIsGGDRtQpkwZDBgwAJmZmWJHIiWxwBEBOHnyJH7++Wf8+OOPaNOmjdhxiIiIiNTG2toau3fvxvXr1zF9+nSx45CSWOCo1Hvz5g2GDBmC9u3b85cYERERlQouLi5YuHAhli5diuPHj4sdh5TAAkel3ldffYXU1FRs2bIFUqlU7DhEVIro6emhWbNmaNasGa+7JaISN2nSJLRv3x6jRo1CUlKS2HGokPivBZVqgYGB2LZtG5YvX45KlSqJHYeIShkTExNcuXIFV65cgYmJidhxiKiU0dPTw8aNG/HmzRt88803YsehQmKBo1Lr9evX+OKLL9C9e3cMHTpU7DhEREREJc7BwQFLlizBhg0bcPToUbHjUCGwwFGpNWnSJGRkZGDt2rWQSCRixyEiIiISxZgxY9CpUyeMGjUKb9++FTsOfQQLHJVK+/btw86dO/HHH3/A3t5e7DhEVEqlpqbCwcEBDg4OSE1NFTsOEZVS76cWSEpKwpQpU8SOQx/BAkelzsuXLzF27Fh4e3vDx8dH7DhEVIoJgoDHjx/j8ePHEARB7DhEVIpVqVIFv//+OzZv3oyDBw+KHYcKwAJHpc7EiRORk5MDX19fnjpJRERE9P9GjhwJT09PjBkzBm/evBE7DuWDBY5KFT8/P+zZswerVq2CnZ2d2HGIiIiINIZEIsH69euRmpqKyZMnix2H8sECR6VGfHw8xo8fjz59+mDAgAFixyEiIiLSOJUqVcLy5cuxdetWBAYGih2H8sACR6WCIAgYP348AGD16tU8dZKIiIgoH0OHDkW3bt3wxRdf4PXr12LHoQ+wwFGpsHv3bvz9999YvXo1ypUrJ3YcIiIiIo0lkUiwbt06pKenY9KkSWLHoQ+wwJHOi4uLw4QJE9C/f3/069dP7DhERHISiQT16tVDvXr1eGYAEWkUe3t7rFixAjt37oS/v7/Yceg/JALHLRZNUlISrKyskJiYCEtLS7Hj6CRBENCrVy9cvHgRt2/fhq2trdiRiIiIiLQCP0eVrMJ2Ax6BI522Z88eBAQEwNfXl790iIiIiJQgkUjg6+uL7OxsfPXVV2LHof/HAkc66927d/j666/Rq1cv9OrVS+w4RERERFrHzs4OS5Yswc6dOxEcHCx2HAILHOmwuXPnIjExEcuWLRM7ChFRnlJTU1G/fn3Ur18fqampYschIsrT0KFD0aJFC0yYMAFZWVlixyn1WOBIJ0VERGD58uWYNWsWqlSpInYcIqI8CYKAf/75B//88w94SToRaSo9PT2sXr0ad+/exfLly8WOU+qxwJHOEQQBEyZMQM2aNTFlyhSx4xARERFpvUaNGmHixImYO3cunj17JnacUo0FjnTOzp07cfbsWaxYsQKGhoZixyEiIiLSCT/++CPMzc0xdepUsaOUaixwpFMSExMxdepU9OvXD506dRI7DhEREZHOsLa2xqJFi7B7926cPHlS7DilFgsc6ZS5c+fi3bt3WLp0qdhRiIiIiHTO4MGD0apVK0ycOBGZmZlixymVWOBIZ9y8eRMrVqzA7NmzUalSJbHjEBEREekciUSC1atX48GDB/j999/FjlMqlZoCt2rVKjg4OMDY2Biurq4IDQ3Nd9ktW7ZAIpEo3IyNjRWWEQQBs2fPRoUKFWBiYoKOHTviwYMH6n4ZlA9BEDB+/HjUrl0bkydPFjsOEVGhSCQSVK1aFVWrVoVEIhE7DhFRoTg5OWHSpEmYN28enj59KnacUqdUFLjdu3djypQpmDNnDq5evYpGjRrBw8MD8fHx+a5jaWmJ2NhY+e3x48cKzy9cuBB//PEHfH19cfnyZZiZmcHDwwPp6enqfjmUh23btuH8+fNYuXIlBy4hIq1hamqK6OhoREdHw9TUVOw4RESFNnfuXFhaWnLEbxGUigK3dOlSjBkzBiNGjEC9evXg6+sLU1NTbNq0Kd91JBIJ7Ozs5Lfy5cvLnxMEAcuWLcPMmTPh7e2Nhg0bYuvWrYiJicH+/ftL4BXRf719+xbTpk3DwIED0b59e7HjEBEREek8S0tLLFmyBHv37sWxY8fEjlOq6HyBy8zMRHh4ODp27Ch/TE9PDx07dsTFixfzXS85ORlVq1ZF5cqV4e3tjdu3b8ufi4qKQlxcnMI2rays4OrqWuA2MzIykJSUpHCj4ps9ezZSU1OxePFisaMQERERlRqDBg1C27Zt8eWXXyIjI0PsOKWGzhe4V69eIScnR+EIGgCUL18ecXFxea7j6OiITZs2ISAgANu3b4dMJkOLFi3kkxa+X0+ZbQLA/PnzYWVlJb9Vrly5OC+NAFy/fh2rVq3C3LlzUbFiRbHjEBEpJS0tDS4uLnBxcUFaWprYcYiIlCKRSLBq1SpERkZyBPASpPMFrijc3NwwdOhQNG7cGG3btsW+fftQtmxZrF27tljbnT59OhITE+U3XvRZPDKZDBMmTECdOnUwadIkseMQESlNJpMhLCwMYWFhkMlkYschIlJa/fr1MXnyZPz000948uSJ2HFKBZ0vcLa2tpBKpXjx4oXC4y9evICdnV2htmFgYIAmTZrg4cOHACBfT9ltGhkZwdLSUuFGRbd7925cuHABq1atgoGBgdhxiIiIiEqlOXPmwNraGt99953YUUoFnS9whoaGcHZ2VpgtXiaT4eTJk3BzcyvUNnJycnDr1i1UqFABAFCtWjXY2dkpbDMpKQmXL18u9DapeDIzM/HDDz+gR48ecHd3FzsOERERUallYWGBefPm4a+//sLVq1fFjqPzdL7AAcCUKVOwfv16/Pnnn7hz5w7GjRuHlJQUjBgxAgAwdOhQTJ8+Xb78vHnzcOzYMTx69AhXr17F4MGD8fjxY4wePRrAv+f7Tp48GT///DMCAwNx69YtDB06FPb29ujZs6cYL7HUWbt2LR4/foxff/1V7ChEREREpd7w4cNRp04dfP/992JH0Xn6YgcoCQMGDMDLly8xe/ZsxMXFoXHjxjhy5Ih8EJInT55AT+9/XfbNmzcYM2YM4uLiYGNjA2dnZ1y4cAH16tWTL/Ptt98iJSUFn3/+Od6+fYtWrVrhyJEjuSb8JtV79+4dfvrpJwwbNgz169cXOw4RERFRqaevr49ff/0VvXv3xokTJxRGayfVkgiCIIgdorRKSkqClZUVEhMTeT2cEubOnYvffvsNDx484EieRKTVUlJSYG5uDuDf6WvMzMxETkREVHSCIKBFixbIyspCaGiowgES+rjCdgN+V0mrvHjxAkuWLMGXX37J8kZEOsHW1ha2trZixyAiKjaJRIIFCxYgPDwce/fuFTuOzuIROBHxCJzyvvzyS2zbtg2PHj1CmTJlxI5DRERERB/o3r077t27h3/++YcjhSuBR+BI50RGRmLt2rX4/vvvWd6IiIiINNT8+fMRGRmJDRs2iB1FJ/EInIh4BE45n332Gc6cOYMHDx7A1NRU7DhERERElI9hw4bh6NGjePjwofxaXyoYj8CRTrl27Rp27dqFuXPnsrwRkc5IS0uDu7s73N3dkZaWJnYcIiKVmTdvHt68eYNly5aJHUXn8AiciHgErvA8PDzw+PFjREREQF+/VMx+QUSlAEehJCJdNmXKFGzYsAGRkZEoW7as2HE0Ho/Akc44efIkjh07hl9//ZXljYiIiEhLzJgxAxKJBL/++qvYUXQKCxxpNEEQ8P3338PV1RW9evUSOw4RERERFZKtrS2+/fZbrF69GtHR0WLH0RkscKTR9u7di7CwMCxYsAASiUTsOERERESkhMmTJ6NMmTKYPXu22FF0BgscaaysrCzMmDEDXbt2Rdu2bcWOQ0RERERKMjMzw5w5c7B9+3bcvHlT7Dg6gQWONNbWrVvx8OFDzJ8/X+woRERERFREo0aNQs2aNTFr1iyxo+gEFjjSSDk5Ofjtt9/Qu3dvNGzYUOw4RERqY2pqyulRiEinGRgYYMaMGQgMDERERITYcbQeCxxppL179+Lhw4eYPn262FGIiNTGzMwMKSkpSElJ4RQCRKTTfHx8UKVKFfz2229iR9F6LHCkcQRBwPz589GpUyc0a9ZM7DhEREREVEwGBgaYOnUq/vrrLzx69EjsOFqNBY40zuHDh3Hjxg3MmDFD7ChEREREpCKjRo1CmTJlsGjRIrGjaDUWONIogiDgl19+gZubG0eeJCKdl56ejm7duqFbt25IT08XOw4RkVqZmpri66+/xqZNmxAbGyt2HK3FAkcaJSQkBBcuXMCMGTM47xsR6bycnBwcOnQIhw4dQk5OjthxiIjUbvz48TA2NsbSpUvFjqK1WOBIo8yfPx9OTk7o1q2b2FGIiIiISMWsrKwwYcIE+Pr6IiEhQew4WokFjjTG1atXceTIEUyfPp1H34iIiIh01OTJk5GdnY2VK1eKHUUrscCRxpg/fz5q1KiBfv36iR2FiIiIiNSkXLlyGDNmDJYvX47k5GSx42gdFjjSCHfv3sXff/+N7777Dvr6+mLHISIiIiI1mjp1KpKSkrBu3Tqxo2gdFjjSCAsXLkSFChUwdOhQsaMQERERkZpVqVIFgwcPxpIlS5CRkSF2HK3CAkeie/LkCbZt24ZvvvkGRkZGYschIiIiohLw3XffITY2Flu3bhU7ilZhgSPRLV68GJaWlvj888/FjkJEVKLMzMwgCAIEQYCZmZnYcYiISlSdOnXQp08fLFiwANnZ2WLH0RoscCSq+Ph4rF+/Hl999RXMzc3FjkNEREREJWj69OmIjIyEn5+f2FG0BgsciWr58uXQ19fHxIkTxY5CRERERCWsadOm8PDwwPz58yEIgthxtAILHIkmPT0da9euxahRo1CmTBmx4xARlbj09HT069cP/fr1Q3p6uthxiIhEMW3aNNy6dQtnzpwRO4pWYIEj0fj5+eH169cYP3682FGIiESRk5ODvXv3Yu/evcjJyRE7DhGRKNq3b486depg9erVYkfRCixwJJpVq1ahU6dOqF27tthRiIiIiEgkEokE48ePh7+/P2JiYsSOo/FY4EgU4eHhuHz5MiZMmCB2FCIiIiIS2dChQ2FkZMSJvQuBBY5EsWrVKlSuXBndunUTOwoRERERiczKygqDBw/GunXrkJWVJXYcjcYCRyUuISEBu3btwtixY6Gvry92HCIiIiLSABMmTEBsbCz2798vdhSNxgJHJW7z5s3IycnB6NGjxY5CRERERBrCyckJrVu3xqpVq8SOotFY4KhEyWQyrFmzBv369UO5cuXEjkNEREREGmT8+PE4c+YMIiIixI6isVjgqEQdPXoUkZGRHLyEiAiAqakpkpOTkZycDFNTU7HjEBGJrnfv3ihfvjynFCgACxyVqNWrV6Nx48Zwc3MTOwoRkegkEgnMzMxgZmYGiUQidhwiItEZGhri888/x7Zt25CUlCR2HI3EAkclJioqCgcPHsSECRP4QYWIiIiI8vT5558jLS0N27ZtEzuKRmKBoxLj6+sLS0tLDBo0SOwoREQaISMjA8OHD8fw4cORkZEhdhwiIo1QqVIleHt7Y9WqVRAEQew4GocFjkpEeno6Nm7ciBEjRsDMzEzsOEREGiE7Oxt//vkn/vzzT2RnZ4sdh4hIY0yYMAF37tzBmTNnxI6icVjgqETs2bMHr1+/xrhx48SOQkREREQarl27dqhTpw6nFMgDCxyViFWrVqFTp06oXbu22FGIiIiISMNJJBKMHz8e/v7+eP78udhxNAoLHKnd1atXERoayqkDiIiIiKjQhg4dCmNjY2zYsEHsKBqFBY7Ubtu2bShXrhy6desmdhQiIiIi0hJWVlbo168ftm3bxsFM/oMFjtQqOzsbu3btwqBBg6Cvry92HCIiIiLSIoMHD0ZkZCQuX74sdhSNwQJHanXixAm8ePECQ4YMETsKEREREWkZd3d3VKxYkXPC/QcLHKnV9u3bUadOHTRt2lTsKEREGsfU1BTx8fGIj4+Hqamp2HGIiDSOVCrFZ599ht27dyMzM1PsOBqBBY7UJjk5Gf7+/hg8eDAkEonYcYiINI5EIkHZsmVRtmxZ/p4kIsrH4MGD8fr1axw9elTsKBqBBY7Uxt/fH6mpqfDx8RE7ChERERFpqYYNG6Jhw4Y8jfL/scCR2mzfvh2tW7eGg4OD2FGIiDRSRkYGJkyYgAkTJiAjI0PsOEREGmvw4MEIDAxEYmKi2FFExwJHahEbG4sTJ05g8ODBYkchItJY2dnZWL16NVavXo3s7Gyx4xARaaxBgwYhMzMTf//9t9hRRMcCR2rx119/QV9fH/369RM7ChERERFpuUqVKqF9+/bYvn272FFExwJHarFt2zZ0794dNjY2YkchIiIiIh0wePBgBAcH4+nTp2JHERULHKnc7du3ce3aNZ4+SUREREQq07t3bxgZGWHnzp1iRxEVCxyp3I4dO2BjY4OuXbuKHYWIiIiIdISlpSW8vb2xbds2CIIgdhzRsMCRSslkMuzYsQP9+/eHkZGR2HGIiIiISIcMGTIEt2/fxo0bN8SOIhoWOFKpkJAQPHnyhKdPEhEREZHKde7cGba2tqV6MBMWOFKp7du3w8HBAS1atBA7ChGRxjMxMUFUVBSioqJgYmIidhwiIo1nYGCAgQMHYufOncjJyRE7jihY4EhlMjIy4OfnBx8fH+jp8a1FRPQxenp6cHBwgIODA39vEhEV0pAhQxAbG4vTp0+LHUUU/NeCVCY4OBiJiYno37+/2FGIiIiISEe5uLigSpUq2L9/v9hRRMECRyoTGBiIqlWrwsnJSewoRERaITMzE9OmTcO0adOQmZkpdhwiIq0gkUjQo0cPBAYGlsrRKFngSCUEQUBgYCC8vb0hkUjEjkNEpBWysrKwePFiLF68GFlZWWLHISLSGt7e3nj69GmpHI2SBY5U4tq1a3j27Bl69OghdhQiIiIi0nFt2rSBpaUlAgICxI5S4ljgSCUCAwNhZWWFNm3aiB2FiIiIiHScoaEhunTpgsDAQLGjlDgWOFKJwMBAdO3aFQYGBmJHISIiIqJSwNvbG1evXsWzZ8/EjlKiWOCo2J4+fYpr167x9EkiIiIiKjGenp7Q19dHUFCQ2FFKFAscFVtgYCD09fXh6ekpdhQiIiIiKiVsbGzQpk2bUncdHAscFVtgYCDc3d1hbW0tdhQiIiIiKkW8vb1x6tQpJCUliR2lxLDAUbEkJSXh9OnTPH2SiKgITExMEBERgYiICJiYmIgdh4hI63h5eSErKwvHjh0TO0qJYYGjYjly5AiysrJY4IiIikBPTw/169dH/fr1oafHf5KJiJRVrVo1ODk5larTKPmvBRVLYGAgGjVqhKpVq4odhYiIiIhKIW9vbxw8eBDZ2dliRykRLHBUZFlZWTh48CCPvhERFVFmZibmzp2LuXPnIjMzU+w4RERaqUePHnjz5g3Onz8vdpQSoa+KjaSlpeHixYsIDw/Ho0ePEBcXh5SUFBgYGMDa2hpVqlRB/fr14erqilq1aqlil6QBzp8/j7dv38Lb21vsKEREWikrKws//vgjAGDatGkwNDQUORERkfZxdnaGvb09AgMD0bZtW7HjqF2RC1xycjL8/Pywa9cuhISEFPovh1WqVEHv3r3h4+ODpk2bFnX3pAECAgJgb2/P/49EREREJBo9PT14eXkhICAAixcvhkQiETuSWil9CuXz588xefJkVKpUCaNHj8aJEyeQkZEBQRDkN2NjY1SoUAHW1taQSCQKzz1+/BjLli2Di4sL3Nzc4Ofnp47XRWomCAICAwPRo0cPnf8hISIiIiLN1qNHD0RGRuKff/4RO4raFbrAvX37FpMnT0bNmjWxYsUKJCUlQU9PD+7u7vj++++xb98+PH78GGlpaUhJScGzZ8/w+vVrZGVlISEhAZcuXcKKFSswZMgQVKxYEYIg4PLlyxg4cCAaNmyIo0ePqvN1kopFRkbi0aNH6Natm9hRiIiIiKiUa9++PYyNjUvFdAISQRCEwixoa2uLN2/eQBAEfPrpp/jss88wYMAAlC1btkg7Pnv2LHbs2AE/Pz+8ffsWEokEy5cvx8SJE4u0PW2UlJQEKysrJCYmwtLSUuw4Slm/fj3GjRuHhIQErctORKQpUlJSYG5uDuDfSxPMzMxETkREpL06dOgAMzMzBAYGih2lSArbDQp9BC4hIQGdO3fG+fPnceHCBUycOLHI5Q0A2rRpg7Vr1+LJkyf45Zdf8MknnyAhIaHI2/uYVatWwcHBAcbGxnB1dUVoaGi+y65fvx6tW7eGjY0NbGxs0LFjx1zLDx8+HBKJROHm6emptvya5vTp03B2dmZ5IyIiIiKN0K5dO5w9exY5OTliR1GrQhe40NBQHD58GG5ubioNYG5ujunTpyM6Ohr9+vVT6bbf2717N6ZMmYI5c+bg6tWraNSoETw8PBAfH5/n8sHBwRg0aBBOnz6NixcvonLlyujcuTOeP3+usJynpydiY2Plt127dqklv6YRBAHBwcFwd3cXOwoREREREQDA3d0diYmJuH79uthR1KrQBa5Zs2bqzAFTU1PUrVtXLdteunQpxowZgxEjRqBevXrw9fWFqakpNm3alOfyO3bswPjx49G4cWPUqVMHGzZsgEwmw8mTJxWWMzIygp2dnfxmY2Ojlvya5v79+4iNjUW7du3EjkJEpNWMjY0RGhqK0NBQGBsbix2HiEirNW/eHKampjh9+rTYUdRK5yfyzszMRHh4ODp27Ch/TE9PDx07dsTFixcLtY3U1FRkZWWhTJkyCo8HBwejXLlycHR0xLhx4/D69esCt5ORkYGkpCSFmzY6ffo09PX10apVK7GjEBFpNalUChcXF7i4uEAqlYodh4hIqxkaGqJly5YscNru1atXyMnJQfny5RUeL1++POLi4gq1je+++w729vYKJdDT0xNbt27FyZMnsWDBApw5cwZdunQp8Jzb+fPnw8rKSn6rXLly0V6UyIKDg+Hi4iK/8J6IiIiISBO0a9cOISEhyM7OFjuK2qi1wJ0/fx6ff/45mjdvDkdHR7i4uGDMmDEICQlR525V6rfffsNff/0Ff39/hdNbBg4ciB49esDJyQk9e/bEgQMHcOXKFQQHB+e7renTpyMxMVF+e/r0aQm8AtXi9W9ERKqTmZmJRYsWYdGiRcjMzBQ7DhGRSj18+BC7du3C119/jZYtW8LU1FQ++N+WLVvUsk93d3e8e/cO4eHhatm+JtBX14YnTZqEVatWAfj3Q/97V69exaZNmzBu3DisXLlSXbuXs7W1hVQqxYsXLxQef/HiBezs7Apcd/Hixfjtt99w4sQJNGzYsMBlq1evDltbWzx8+BAdOnTIcxkjIyMYGRkp9wI0zJ07d/DixQte/0ZEpAJZWVn49ttvAQDjx4+HoaGhyImIiFTjzJkzovzBv1mzZjAzM0NwcDBcXV1LfP8lQS1H4NasWYOVK1fCxMQEEyZMwK5du3D48GGsW7cObdq0gSAIWLNmDdasWaOO3SswNDSEs7OzwgAk7wckKWhEzYULF+Knn37CkSNHCjWAy/uJyytUqKCS3Jrq9OnTMDAwQMuWLcWOQkREREQa6r8HcPT09FC/fn00b95c7fs1MDBA69atdfo6OLUUOF9fX+jp6eH48eP4448/MGDAAHh4eGD06NE4ffo0hg8fLi9xJWHKlClYv349/vzzT9y5cwfjxo1DSkoKRowYAQAYOnQopk+fLl9+wYIFmDVrFjZt2gQHBwfExcUhLi4OycnJAP6dbHXatGm4dOkSoqOjcfLkSXh7e6NmzZrw8PAokdcklvd/zTA1NRU7ChERERFpqIoVK2LRokUIDg5GYmIiIiIiMG7cuBLZd7t27XDu3DlkZWWVyP5KmlIFbvPmzYVa7v79+6hVq1a+R7hGjhwJAHjw4IEyuy+yAQMGYPHixZg9ezYaN26M69ev48iRI/KBTZ48eYLY2Fj58mvWrEFmZib69u2LChUqyG+LFy8G8O+oYTdv3kSPHj1Qu3ZtjBo1Cs7OzggJCdH6UyQLIpPJeP0bEREREX1UrVq1MHXqVLRt27bEB75zd3dHSkoKrly5UqL7LSlKFbhRo0ahVatWuHHjRoHLWVhYIC4uLt/RX548eQIAJfo/c+LEiXj8+DEyMjJw+fJlhXNig4ODFS6kjI6OhiAIuW5z584FAJiYmODo0aOIj49HZmYmoqOjsW7dulwjXeqa27dv49WrV7z+jYiIiEhJqampsLCwgEQigY+Pz0eXv3jxonzAj9WrV8sff/PmDTZv3ozBgwejXr16MDc3h6GhIezs7ODh4YF169YVOChSdHR0roFE9u3bh65du8Le3h76+vpa/8f6pk2bwsLCQmdPo1SqwLm6uuLChQto1qwZvvzySyQmJua5XPv27ZGUlAQfHx/Ex8crPBcSEoJvvvkGEolEYVh+0nynT5+GoaFhgdcOEhEREVFupqam6NmzJwAgICAAKSkpBS6/Y8cOAIC+vj769+8vf7xJkyYYOXIkduzYgTt37iAlJQVZWVl48eIFjh07hi+++AKffvppoabLEgQBQ4cORZ8+fXD48GHExsYWOCWWttDX10ebNm0KHB1emylV4C5evIh169bB2toaq1atgqOjY55DgP7888+wtrbG3r17UaVKFdSvXx8tWrRAlSpV4O7ujhcvXsDGxgY///yzql4HlYDg4GC4ubnBxMRE7ChEREREWuf9kbeUlBQEBATku1x2djb8/PwAAB4eHrC1tZU/l5OTA1dXV/z000/yaazOnz+P7du3w9PTEwBw7do1DBw48KN5li1bhm3btqF169bYuXMnwsLCcOLECQwZMqQ4L1MjtGvXDufPn0dGRobYUVRPKIKEhAThiy++EKRSqaCnpye0bNlSuH79usIy//zzj+Du7i5IJJJct3bt2gl37twpyq51SmJiogBASExMFDvKR+Xk5AhlypQR5s6dK3YUIiKdkZ2dLZw+fVo4ffq0kJ2dLXYcIlKzrKwsoVy5cgIAoVu3bvkud/jwYQGAAEDYuXOnwnP3798vcB+bNm2Sr3vixIlcz0dFRcmfByAMHTpUkMlkRXtBStq8ebN8v5s3b1brvsLDwwUAwtmzZ9W6H1UqbDco0iiUNjY28PX1xaVLl9CsWbM8T6usW7cuTp8+jSdPniAwMBDbtm1DYGAgHj9+jFOnTqFOnTrF7Z5UgiIiIpCQkKD150QTEWkSqVQKd3d3uLu7QyqVih2HiNRMX18fAwYMAAAcO3YMr1+/znO596dPmpubw9vbW+G5WrVqFbiPESNGoHHjxgCA/fv3F7istbU1Vq5cCYlEUoj02qVRo0awtrbWydMoizWNQLNmzXDp0iX4+vrme1plpUqV0L17d/j4+KB79+6oXLlycTOTCEJDQ6GnpwcXFxexoxARERFprfenUWZlZWHPnj25nk9LS5MXr549exY4dZMgCIiLi8P9+/cREREhv1WsWBEAPjrwoJeXFywsLIr4SjSbVCpF8+bNdXIkymLPAyeRSPD555/j3r17GD16NF69eoVRo0ahZcuWuH79ugoikiYICwtDvXr1OP8bEZEKZWVlYdWqVVi1apXOzldERIpcXV1Ro0YNAP870vZfgYGB8rmH8xut8uDBg+jevTusrKxQoUIFODo6wsnJSX47ePAgAODVq1cFZmnYsGFxXorGc3Z2RlhYmNgxVE5lE3mXKVMG69atw8WLF9G0aVNcvHgRLi4umDhxYr6jVZL2CA8PR7NmzcSOQUSkUzIzMzFx4kRMnDixwGG/iUi3vC9mFy5cQHR0tMJz70tduXLlco3YLggCRo8eje7du+PgwYN49+5dgftJS0sr8HkbGxslk2uXZs2aITY2FjExMWJHUSmVFbj3XFxcEBoailWrVsHS0hKrV6+Go6NjoScBJ82TkZGBGzduwNnZWewoRERERFrvfYETBAG7du2SP56QkICjR48CAAYMGAB9fX2F9TZt2oSNGzcCABo3bowtW7bgzp07SEpKQnZ2tnzu4vejSAqCUGAOXb/29v1n1/DwcJGTqFaRCtyuXbvQuXNnlCtXDkZGRihXrhw6d+6MnTt3Avj3tMpx48bh/v37GDFiBF6+fInRo0fztEotFRERgaysLB6BIyIiIlKB2rVryz9Xvf/8DAB79+6VH43P6/TJ9evXAwBq1qyJCxcuYNiwYahTpw4sLCwUylhCQoI642uNKlWqwNbWtnQXuJycHPTt2xeDBw/GiRMn8OrVK2RlZeHVq1fyOSN69+4tnwDwk08+wcaNG3H+/Hk0btxY4bTKt2/fquP1kBqEh4dDKpWiUaNGYkchIiIi0gnvC1pERARu3rwJ4H+nT9aoUQOurq651rl9+zYAoEePHvnOyysIAq5evaqOyFpHIpHo5HVwShW4pUuXYt++fQCAzz//HKdPn8adO3dw+vRpfP755wD+nVl+6dKlCut9+umnCAsLw4oVK2BhYYE1a9bwtEotEhYWhvr163MCbyIiIiIVGThwoPyo2Y4dO/Ds2TOEhIQAyH/wkuzsbAD/TgSen4CAAMTGxqo4rfZq1qwZwsPDP3o6qTZRqsBt2bIFEokE06ZNg6+vL9q2bQtHR0e0bdsWvr6++PbbbyEIQp7FTCKRYMKECbh//z6GDh2KV69eYfTo0Sp7IaQ+4eHhvP6NiIiISIXs7OzQvn17AP9enrRz5055ycivwL2fAy4oKCjP0yQjIyMxYcIENSXWTs7OzoiLi9OpgUz0P77I/zx69AgAck0o+F6PHj2wYMECREVF5bsNW1tbbN68GWPGjMHEiROV2T2JICMjA7du3cKoUaPEjkJERESkU3x8fHD8+HE8ffoU8+fPB/DvEaPatWvnufzQoUMxbdo0xMTEwM3NDd999x0aNGiA9PR0nDp1CsuWLUNGRgaaNm2qEadR7t27Vz4lAgCcO3cuz6+Bfwutp6enyjO8v9YwPDxcPj+etlOqwFlaWuLVq1eIjo6Gm5tbruffD4NamAkBW7RooXPno+qiW7duISsri0fgiIjUwMjICAcOHJB/TUSlS+/evTFu3DikpaXJx4fI7+gbAHz11Vc4fvw4jh07hvv37+f6A7uJiQm2bt2KgwcPakSBmzp1Kh4/fpzncxs3bpSPqAkAbdu2VUuBq1SpEsqWLYuwsDD06NFD5dsXg1KnUHbs2BGCIGDq1KkIDQ1VeO7q1av47rvvIJFIcs1Zke/O9VQ+iwGpWFhYGPT19XV+okciIjHo6+ujW7du6NatW67hwolI91lYWMDLy0t+XyqVYuDAgfkub2BggIMHD+KPP/5As2bNYGpqChMTE9SsWRNjx47F1atX0a9fv5KIrjUkEon8OjhdIRGUuKIvKioKLi4uSEhIgEQiQdWqVVGhQgXExcUhOjoagiDAxsYGoaGh8hnmKX9JSUmwsrJCYmIiLC0txY6Tp9GjRyMsLIzTPxARERGRVpo1axbWrVuHuLg4SCQSsePkq7DdQKlDYNWqVcOFCxfQrl07CIKA6OhoXLx4EVFRURAEAW3atEFISAjLmw4JDw/n/G9ERGqSlZWFLVu2YMuWLcjKyhI7DhGRTmrWrBni4+Px/PlzsaOohNLna9SuXRsnT57Es2fPcP36dSQmJsLKygqNGjVC5cqV1ZGRRJKeno6IiAj5FBFERKRamZmZGDFiBACgX79+MDAwEDkREZHueT+WQ1hYGCpVqiRymuIr8gn3lSpV0olvAOXv5s2byM7O5hE4IiIiItJaFStWRPny5REeHo6ePXuKHafYeMU05ev9ACZOTk5iRyEiIiKiEvL27Vs8e/asSOs2aNBAxWmKTyKRwNnZWWdGwGeBo3xdu3YN9evXh7GxsdhRiIiIiKiE7N+/X356t7KUGB+xRDk7O2Pt2rVix1CJYhW4hIQEREVFQSqVombNmjA3Ny/UeomJiQgICADw74SEpJnu3r2LevXqiR2DiIiISCWePXuG1atXw87ODhUqVFC4mZqaih2P1Khu3bqIj4/H27dvYW1tLXacYilSgbt58yamTJmCM2fOQCaTAfh3XgpPT0/Mmzfvo3OGPXv2DMOHD4eenh4LnAa7f/9+oef0IyIiItJ0v/76K9asWQM9PT35Z9j3zM3NUa5cOfk4Dx8WPHt7e1SoUAEWFhYaPRS9KgwfPhzDhw8XO4ZKOTo6Avj3823z5s1FTlM8She44OBgeHl5ITU1VeEQaWZmJoKCgnDo0CHMmjULM2fO/OibW1MPsdK/5z7Hx8ejdu3aYkchIiIiUonGjRtDIpHkKm8AkJycjOTkZDx69AhSqVRe8nJychSWMzY2RtmyZVGxYkVUrlw5z5JXoUIFWFlZ4Z9//kF2djaaNm1aUi+R8lGrVi0AwL1790pXgUtMTMRnn32GlJQUAP/OqeDu7o6MjAycOXNGPmrh3LlzER4ejt27d8PIyEgtwUm97t+/D+B/f60gIiLVMzIywp49e+RfE5F6tWjRolAHEHJycnIVt/fS09Px9OlTPH36FKGhoZBKpRAEAdnZ2Xku37VrVxw8eLBYuan4LCwsYG9vL/+Mq82UKnAbN26Uz2D+xx9/YMKECQrPHzx4EBMnTsTjx48RFBQET09PBAUFFfraONIc9+7dA/C/v1YQEZHq6evro1+/fmLHICo16tWrB3NzcyQnJ6tkezKZLM+jee/p6enx2joNUrt2bflnXG2mp8zCBw4cgEQiwYABA3KVNwDo1q0brl69Cg8PDwiCgLNnz6J9+/ZISEhQWWAqGffv34e9vT0sLCzEjkJERESkEnp6emjRooVar2GTSCQwMTHB3LlzkZ6eDj8/P7Xti5Tj6OioE0fglCpwERERAAAfH598l7GxscHBgwcxZswYCIKA8PBwtG3bFnFxccVLSiXq/v37vP6NiEjNsrOz4efnBz8/v3xPvyIi1WrdujX09JT6CFwoenp6kEgk+Pzzz/Hs2TPMmTMHBgYGKt8PFV3t2rXx4MGDAo+aagOl3r1v374FAFSqVKngjerpYe3atfjuu+8gCAL++ecftGnTBk+fPi1yUCpZ9+7d4/VvRERqlpGRgf79+6N///7IyMgQOw5RqdCiRYt8r28rjkaNGiE0NBS+vr4oU6aMyrdPxefo6IjU1FQ8f/5c7CjFolSBe38O7/si9zHz58/H/PnzIQgCIiMj0bp1azx8+FDpkFSyZDIZHjx4wCNwREREpHOqVaumslMopVIprKyssG7dOoSFhaFZs2Yq2S6px/vPttp+GqVSBc7BwQEAcOPGjUKv891332HlypUAgKdPn6JNmza4deuWMrulEvb8+XOkpqaywBEREZFWy8nJwZUrVzBt2jTUq1cPEokE1atXL/ZUVlKpFBKJBCNGjMDDhw8xZswYtZyWSarl4OAAfX19rR/IRKlRKJs2bYqbN2/iyJEjmDRpUqHXGz9+PCwsLDBy5Ei8ePFC5yYG1DWcQoCIiIi00du3b3Ho0CH8/fffOH36NN68eZPvshKJpMhFrkGDBli7di1cXV2LGpVEYGBggBo1apSuI3AdO3YEABw/flzpc0eHDBmC3bt3w8DAAFlZWUqtSyXr3r170NfXlx9xJSIiItI0giAgIiICc+fOhbOzM6RSKWxsbODj44N9+/YplDcbGxv07t0b27dvx5s3b7Br1y6ly5tUKoWlpSXWrFmD8PBwljctpQtTCSh1BK579+4wNDREZmYmfvnlF6xevVqpnfXu3RuBgYHo3bs3UlNTlVqXSs79+/dRo0YNjpxEREREGiM1NRUnTpzA3r17cezYMbx48SLfZevWrYtu3bqhf//+aNq0KaRSqcLzLVu2LPR+pVIpcnJyMHToUCxYsABly5Yt8msg8Tk6OmLfvn1ixygWpQqcpaUl9u/fj9evXxf5w33nzp1x8uRJHDlypEjrk/pxCgEiIiISkyAIePz4Mfz8/BAYGIjQ0FBkZmbmuayZmRlat26Nnj17omfPnihfvvxHt1+5cmWUL1++wBL4Xt26dbFu3Tq4ubkp/TpI89SuXRvR0dHIyMiAkZGR2HGKRKkCBwCenp7F3qmrqysPO2uwe/fuoXfv3mLHICLSeYaGhti8ebP8a6LSKjMzE+fOnYOfnx8OHz6Mx48f57usg4MDunTpgn79+qFly5ZF/tlp27Yt/v777zynFJBKpTAxMcH8+fMxduxY6Osr/ZGZNJSjoyNkMhkiIyNRr149seMUCd+NpCAjIwPR0dE8AkdEVAIMDAw4sBeVSnFxcdi/fz/279+PkJCQfC+tMTQ0hKurK7y9vdG3b19UrVpVZRlatWoFPz8/hcfeny7p4+ODhQsXFupoHmmX/04lwAJHOiEqKgoymYwFjoiIiFQiJycHYWFh2LNnDw4dOoS7d+/mu6ydnR06d+6Mvn37okOHDvI5iNWhRYsWuQYyqV27NtauXYvWrVurbb8krvLly8PCwgIPHjwQO0qRscCRgveji1aqVEnkJEREui87OxtHjx4FAHh4ePA0LdIJb9++xYEDB+Dv749Tp07h7du3eS6np6eHJk2awMvLC/3790edOnVUNsF2YTRq1AjGxsZIT0+Hqakpfv31V0yYMIE/hzpOIpGgUqVKSo+or0lEeYf+/fffmDZtGiQSCSIjI8WIQPmIiYkBAFSoUEHkJEREui8jIwPdu3cHACQnJ/ODI2kdQRBw+/Zt+Pn5ISgoCDdu3IBMJstzWRsbG7Rv3x59+vRB165dYWVlVcJpFenr62PhwoW4fv06fv75Z372KUXs7e3ln3m1kSj/UiQnJyM6OrpE/8pChRMbGwsrKyu1nrJARERE2iklJUU+lP/x48cLHMWxXr168qH8mzRpkmsof03w5Zdfih2BRFChQgVER0eLHaPI+Kc+UhATEwN7e3uxYxAREZHIBEFAdHQ09u7di4CAAISGhiIrKyvPZc3MzNCmTRv06tULPXr04OAfpNHs7e1x4cIFsWMUGQscKYiNjWWBIyIiKoUyMjJw7tw57NmzB0ePHi1wKP9q1aqhS5cu6Nu3b7GG8icSw/tTKAVB0MozApUqcPPmzVPJTq9fv66S7ZDqxcTEwMHBQewYREREpGaxsbHyofzPnTtX4FD+n376KXr06KHyofyJxFChQgWkp6cjMTER1tbWYsdRmlIFbu7cuVrZUqnwYmNj0aJFC7FjEBERkQplZ2cjLCwMfn5+OHjwIO7du5fvshUqVEDnzp3Rp08ftQ/lTySG92ebxcTE6H6Be+/DOTNINwiCwGvgiIiIdMCbN29w4MAB7Nu3D6dPn0ZiYmKey0mlUjRt2hReXl7o27dviQ/lTySG9591Y2NjtXIyb6UKnKWlJd69e4e2bdti7ty5Rd7pkSNHsGDBgiKvT+qRmJiItLQ0DqNLRFRCDA0NsXLlSvnXREUhk8lyDeWf3x/by5Qpg/bt26N3794aMZQ/kRjef9bV1qkElCpwzs7OOH36NGJiYtC2bdsi71Sbh+3UZbGxsQDAI3BERCXEwMAAEyZMEDsGaZnk5GSFofzj4+PzXbZ+/fro2rWrRg/lT1TSTExMYG1tXToKXPPmzXH69GlERkYiMTGRf7XRMZzEm4iISLMIgoCoqCj4+fkhMDAQV65cyXcof3Nzc7Ru3Rq9evWCt7c3ypUrV8JpibRHhQoV5AcvtI1SBc7FxQXAv79MwsLC0KFDB7WEInG8fxOzwBERlYycnByEhIQAAFq3bs2jI4SMjAyEhITIh/J/8uRJvstWr14dnp6e6N+/P9zc3HgaLpES3k8loI2UPgIH/FvgQkNDi1zg7OzsinUKJqnH+5F4ONoUEVHJSE9PR7t27QD8e1qcmZmZyImopMXExMDf3x/79+/H+fPnkZaWludyRkZG8qH8+/Tpw6H8iYrJ3t4ejx49EjtGkShV4CpVqoTo6GgIggALC4si79TDwwMeHh5FXp/UIzY2lkffiIiI1CQ7OxtXrlzBnj17cPjw4QKH8re3t0fnzp3Rt29ftG/fHiYmJiWYlEj3VahQAefPnxc7RpEoPY1AlSpV1JGDNACnECAiIlKdhIQE+VD+wcHBhRrKv1+/fnB0dORQ/kRq9v4USkEQtO7nrUjzwJFuiomJQbVq1cSOQUREpHVkMhkiIiKwZ88eHDx4sMCh/D/55BOFofwtLS1LOC0R2dvbIz09HW/fvoWNjY3YcZTCAkdysbGxcHNzEzsGERGRxktOTsaxY8fw999/4/jx43j58mW+y9avXx/dunXDgAED0KhRIw5WQ6QB3l82FBsbywJH2isxMVHr3sBERETqJggCIiMjsXfvXgQFBX10KP82bdqgV69e6NGjB4fyJ9JQ1tbWAJDvqc2ajAWO5N69e1eswWmIiIh0QXp6Os6ePQs/Pz8cPXoUT58+zXfZ6tWro0uXLvKh/A0MDEowKREV1fvPvO/evRM5ifJKvMBVr15d/rVEIkFkZGRJR6A8ZGVlISMjgwWOiKgEGRgYYOHChfKvSRwxMTHYt28fAgICPjqUv5ubG7y9vdG7d28O7EakxVjglBAdHQ2JRKKVI77osvdvXhY4IqKSY2hoiGnTpokdo1TJzs7G5cuXsXfvXhw6dAj379/Pd1l7e3t4eHigX79+cHd351D+RDqEBU5J+Y3KROJ5/+Y1NzcXOQkREZHqJCQkIDAwEP7+/ggODkZSUlKey0mlUjg7O8uH8q9duzb/0EykwwwMDGBkZITk5GSxoyitxAucTCYr6V1SIfAIHBFRycvJycHVq1cBAE2bNuXohMUkk8lw69Yt+Pn54cCBA7h582aBQ/l36NABffr0gaenJ4fyJyqFzM3NeQSOtNf7vz6wwBERlZz09HQ0b94cwL+/h83MzEROpF3evXsnH8r/xIkTBQ7l36BBA3Tv3h39+vVD48aNoaenV4JJiUgTWVhYsMCR9uIROCIi0mSCIODhw4cKQ/lnZ2fnuayFhQXatm2LXr16wcvLC2XLli3htESkDVjgSKvxGjgiItIk/x3K/8iRI3j27Fm+y9aoUUM+lP+nn37KET2JqFAsLCx4DRxpLx6BIyIiMT1//lxhKP/09PQ8lzM2NpYP5d+nTx9UqlSphJMSka4otUfgXr9+DalUKp/NvDBSU1MRFhYGAGjTpk1xI5AKvHv3DoaGhjA0NBQ7ChER6bj3Q/n7+fnh0KFDePDgQb7LVqxYER4eHujbty+H8icilSpVg5jExMRg9uzZ2LdvHxITEwEA5cuXR//+/fH999/Dzs6uwPWjoqLg7u4OPT29fM9fp5KVnJzM0yeJiEgtXr9+jaCgoEIN5d+sWTP5UP61atXiUP5EpDYWFhaIiYkRO4bSlC5wt27dQqdOnfDy5UuFoXnj4uKwYsUKbN68GStXrsSQIUM+ui3OB6c53r17x9MniYio2N4P5b9nzx4cOHAAt27dyvffe1tbW/lQ/h4eHhzKn4hKVKm4Bi49PR19+/ZFfHw8AKBChQpo2bIlMjIycPHiRbx69Qrv3r3D8OHDERoaihUrVqglNKkeCxwRUckzMDDAnDlz5F9ro3fv3uHo0aPyofxfvXqV77JOTk7o1q0b+vfvj0aNGnEofyISVam4Bm7btm148OABJBIJvv76a8yfP1/+D052djbWrl2LmTNnIjExEatXr8br16+xbds2TkyqBVjgiIhKnqGhIebOnSt2jEJ7P5S/n58fgoKCEBYWxqH8iUhrlYoCt2/fPgBA586dsXjxYsUN6etjwoQJ6NKlC3r37o2bN29i9+7dePfuHfbu3QsjIyPVpSaVe/fuHa+BIyIiBenp6QgODsbevXtx9OjRAofyr1mzJrp27Yp+/frB1dVVa48oElHpUSoGMblx4wYkEgnGjBmT7zLVq1fHxYsX0bdvXxw+fBiHDh1Cly5dEBQUBDMzs2IHJvVITk7mETgiohImk8lw584dAEDdunVFP6Xw+fPn+PvvvxEQEIALFy4UOJR/ixYt4O3tjd69e3MofyLSShYWFsjMzERmZqZWjcSuVIF7/fo1gH8nzCyIiYkJAgMDMWzYMOzcuRNnzpxBx44dcfjwYaWmG6CSk56ejk8++UTsGEREpUpaWhoaNGgA4N8/pJXkHzqzsrIQGhqKPXv24NChQ3j48GG+y1asWBGenp7yofyNjY1LLCcRkbq8/12WkZGhuwXO0NAQ2dnZhTrUKJVKsX37dlhaWsLX1xehoaFo164djh8/XuSwpD4ymYzXKhIR6bCXL1/iwIED2LdvH86cOZPvv+X6+voKQ/nXrFmTQ/kTkU56f9aDTCYTOYlylCpwVapUwd27d3Hnzh20atWqUOusXr0aFhYWWLRoEW7evIk2bdpg2bJlRclKaiSTyUQ/dYeIiFRDJpPJr0U/ePAgbt26le+y74fy79u3Lzw8PHg6PRGVGu8PXuh0gWvcuDHu3LmDY8eOFXgd3IcWLFgACwsLzJ49G/fu3cOAAQOUDkrqxQJHRKS9kpKSFIbyf3/JQ14aNmyIbt26YcCAAXBycuLvfiIqtUrFEbj27dtj165dOHDgAF6/fq3UNVMzZ86EhYUFpkyZgqSkJKWDknqxwBERaQdBEPDgwQP4+fkhMDAQ4eHhyMnJyXNZS0tLhaH8bW1tSzgtEZHm0tYCp9Qndi8vL0ilUmRmZmLJkiVK7+yrr77Chg0bRCkKq1atgoODA4yNjeHq6orQ0NACl/fz80OdOnVgbGwMJycnHDp0SOF5QRAwe/ZsVKhQASYmJujYsSMePHigzpegVixwRESaKS0tDYcPH8bIkSNRqVIl6OnpwdHRETNnzkRoaKhCeatVqxYmTfo/9u47rKnzfQP4HcLeKkscuPdARXGLigscdWtR0Vp33VpH66jWbWuH24rairVuQdx7I6LWLS7ciIoywhDI+f3hj3xNBWSFNwn357pyicl7Tu4cjyFPzjnPOwqnT59GcnIyoqOjERAQgAEDBrB4IyL6j7TPvhl9CaatsnUEzsHBAStWrMDLly9z3ClrwIABKFy4MHbt2pWj5XPin3/+wbhx47By5Uq4u7vjl19+QZs2bXDnzh04ODh8Mv7s2bPo3bs35s2bh/bt22PTpk344osvcOnSJVW3sIULF+K3337Dhg0bULp0aUybNg1t2rTBzZs3dbI7F5uYEBFph6dPn6pa+Z87dy7TVv6NGjVStfIvVqxYPiclItJtunoETiZJkiQ6hKa5u7ujbt26WLp0KYAP/0glSpTAyJEjMXny5E/G9+zZEwqFAnv27FHdV79+fbi6umLlypWQJAnOzs4YP348JkyYAACIjo6Go6Mj1q9fj169emUpV0xMDGxsbPD8+XNYW1t/8rhcLlcrBhUKRYbrMjAwgJmZWY7GxsfHo0GDBnB3d8eSJUvUxspkMpibm6uNzWiX+e/YhISETP9DfPwlQHbGJiYmZvpNSXbGmpubq7qrJSUlISUlJU/GmpmZqd4U3r9/j+Tk5DwZa2pqqiq0szM2OTkZ79+/z3CsiYkJDA0Nsz02JSUFSUlJGY41NjZWTeabnbGpqakZfmgFACMjI1W73+yMVSqVSEhIyJOxhoaGMDExAfDhiHx8fHyejM3O//v8fI/I6v97vkdk7z0iMTERXl5eePDgAWQyGcLDwzNcpnjx4vD09MQXX3yBxo0bp/tlId8jPuB7RPbH8j0iZ2P5OeIDXX2P2L9/P9q1a4c7d+5k+CVYfr5HxMTEwNnZGdHR0enWBiqSnktKSpLkcrm0c+dOtfv79esndezYMd1lSpQoIS1ZskTtvunTp0s1atSQJEmS7t+/LwGQLl++rDamadOm0qhRozLMkpiYKEVHR6tuT548kQBkePPy8lJb3tzcPMOxzZo1UxtrZ2eX4Vg3Nze1sS4uLhmOrVKlitrYKlWqZDjWxcVFbaybm1uGY+3s7NTGNmvWLMOx5ubmamO9vLwy3W4f69atW6Zj4+LiVGN9fX0zHRsZGakaO3z48EzHPnz4UDV2woQJmY69fv26auyMGTMyHXvhwgXV2IULF2Y69tixY6qxS5cuzXTsnj17VGPXrVuX6dgtW7aoxm7ZsiXTsevWrVON3bNnT6Zjly5dqhp77NixTMcuXLhQNfbChQuZjp0xY4Zq7PXr1zMdO2HCBNXYhw8fZjp2+PDhqrGRkZGZjvX19VWNjYuLy3Rst27d1PbhzMbyPeLDTZffIyIiIjIdB0Dau3evpFQqJUnie0Qavkf8T2Zj+R7x4abL7xGSxM8RaTTxHnHgwIFMxwFi3iOio6OlzOj9RU+vX79GamoqHB0d1e53dHREREREustERERkOj7tz+ysEwDmzZsHGxsb1a1EiRLZfj1ERKQ/ChUq9NlLEkqWLMl52IiINEDS0RMRs3wKZWJiosav7dLEczx//hzFihXD2bNn0aBBA9X93377LU6cOIHg4OBPljE2NsaGDRvQu3dv1X3Lly/HDz/8gJcvX+Ls2bNo1KgRnj9/jqJFi6rG9OjRAzKZDP/880+6WZKSktQO/cbExKBEiRJacQplw4YNUbdu3U/m6OOpDzkby1MfPuDpUdkfy9Oj/qcgvEcolUrcu3cPu3fvxrfffqt63NfXF7///rvq34LvEXyPSMP3iP8pCO8RAD9HaPI9Yt++ffDy8kJYWBicnZ0zHQtozymUWW5iUrp0aUycOBHDhg1T+0+bFy5cuIBZs2bB3d0d06ZNy9N129nZQS6X4+XLl2r3v3z5Ek5OTuku4+TklOn4tD9fvnypVsC9fPkSrq6uGWYxMTFR/UN9zMLCIktNYbLTOCY7Y83NzWFkZAQDA4PPLvfxG+vnZGc/yc7Y7BT52Rmb0b9PbscaGxur/jOLGmtkZKR6U8vLsYaGhqo34bwcK5fLs7wPZ2dsVvbxnIyVyWQaGQto7v99dt8jNDGW7xEfPnRWrFgRAPDgwQO0bNkSDx8+xIYNG7Bv3z4EBwejVKlSqvF8j/iA7xH/ow1j+R7xAT9HZH+s6PeItKLf0tIyS+M1/R6R1W6YWT6F8uXLl5g4cSJKlSqFGTNmICwsLKuLpisxMRFbtmxBmzZt0KBBA+zbty9X68uIsbEx6tSpgyNHjqjuUyqVOHLkiNoRuY81aNBAbTwAHDp0SDW+dOnScHJyUhsTExOD4ODgDNep7dK+CSYiIjEcHBxw//59VXOtyMhIlC5dGsuXLxecjIhIP6V99tW1TuxZLuAOHz6MqlWr4tWrV/jxxx9RuXJl1K1bF/Pnz8fx48cRGxv72XXcunULGzZsQL9+/eDo6IjevXvj0KFDMDc3x3fffYexY8fm6sVkZNy4cVizZg02bNiAW7duYdiwYVAoFBgwYAAAoF+/fpgyZYpq/OjRo7F//3789NNPuH37NmbOnImLFy/im2++AfChSh4zZgx+/PFHBAQE4Nq1a+jXrx+cnZ3xxRdfaOQ1aBoLOCIi8WQyGebNm4eLFy+qjiiMGDEC9evXR1xcnOB0RET6Je2zr67NhZzlUyhbtGiBf//9F3///TfmzJmDW7duITQ0FJcuXQLw4ZdOmTJl4ODggEKFCqFQoUJISEhAVFQU3r59i/v376v98pEkCWZmZujfvz+mT5/+SUOQvNSzZ0+8evUK06dPR0REBFxdXbF//37Vcz5+/FjtH65hw4bYtGkTvv/+e0ydOhXly5fHrl27VHPAAR+uoVMoFBg8eDDevXuHxo0bY//+/To5BxzAAo6ISJvUqVMH7969wxdffKE6ldLW1haHDx+Gh4eH6HhERHoh7ZRFXSvgcjwP3OHDh/HHH38gMDBQ7QK99Dpl/fcpatSogUGDBqFPnz6wsbHJydPrhbR54D4710M+aNKkCcqUKYMNGzYIzUFEVJAoFApYWloCAOLi4tK9XuLvv//Gl19+qfr7oEGDsHLlSp37wEFEpG22bduG7t274+3bt7C1tRUdJ8u1QZaPwP2Xp6cnPD09kZSUhODgYJw6dQpnz57F06dP8erVK0RFRcHU1BT29vawt7dH9erV0aRJEzRp0gQuLi45fVrSELlcnuULJ4mIKP/07t0bLVu2RP369fHw4UOsWbMGu3fv/qTBCRERZY/en0KZERMTEzRt2hRNmzbNizwkiFwuz7SdLRERiZPW4GTKlClYsGCBqsHJsmXLMHz4cNHxiIh0Utpn36x2t9QWulVuksZYWlryAnkionxmaGiI4cOHY/jw4Z/9ACGTyTB//nxcvHhRdb31iBEj0KBBA75/ExHlQFxcHGQyWZ5PkaZpLOAIAGBlZZWlTqJERJR3TExMsGzZMixbtizLc0KlNThp164dAOD8+fOwtbXF8ePHNZiUiEj/xMbGwtLSMt0eHtqMBRwB4BE4IiJdYmJigr1792LTpk0APnRSa968OQYPHsyOwkREWRQbGwsrKyvRMbKNBRwB4BE4IiIRJEnCq1ev8OrVq086NmdF79698fLlS5QuXRoAsGbNGhQtWhSPHj3K66hERHonLi6OBRzpLhZwRET5Lz4+Hg4ODnBwcEB8fHyO1pHW4GTSpEkAgMjISJQqVQorVqzIy6hERHqHR+BIp7GAIyLSXek1OBk+fDgaNGgAhUIhOB0RkXZKuwZO17CAIwAfCjiFQsFrJ4iIdFhag5O2bdsC+NDgxMbGBidOnBCcjIhI+/AIHOm0tG8f+E0tEZFuMzExwb59++Dv7w/gQ4MTDw8PDBkyhF/SERF9hAUc6bS0nZenURIR6Ycvv/xSrcHJ6tWr2eCEiOgjbGJCOo0FHBGR/mGDEyKijPEaONJpaQUc54IjItIvbHBCRJQ+nkJJOo1H4IiI8p+hoSF8fX3h6+sLQ0NDjT4XG5wQEaljAUc6Le3wMQs4IqL8Y2JigvXr12P9+vUwMTHJl+djgxMiIkCpVBa8a+A2btyIL774AtWrV4erqyt69+6NXbt2ZWnZGzduwMDAQOPfNlLW8QgcEVHB8eWXXyIiIgKlSpUC8KHBibOzMxucEFGBER8fDwAF4xq46OhoNGvWDL6+vggMDMTNmzdx7do1bNmyBV27doW7uzvCwsKytC5JkrIdmDTD3NwcBgYGvAaOiCgfSZIEhUIBhUKR778THR0d8eDBA1WDk5cvX7LBCREVGGkHLQrEEbhevXrh1KlTkCQJkiShSJEisLKyUv394sWLqF27NrZu3aqJvKQhMpkMlpaWPAJHRJSP4uPjYWlpCUtLS9W3wfmJDU6IqKAqMAXcvn37cODAAchkMjRu3BjXrl1DZGQk3r17h0uXLqFr166QJAnx8fHo3bs3Vq5cqancpAFWVlaIiYkRHYOIiPIZG5wQUUFTYAq4P//8EwBQsWJFHDx4EFWrVlU95urqiq1bt2Lr1q2wsrKCUqnEiBEjMG/evLxNTBrj4OCAyMhI0TGIiEgANjghooLk5cuXAD58/tU12SrggoODIZPJMG7cONWpFv/VtWtXnD9/HqVKlYIkSfj+++8xZcqUPAlLmuXs7Iznz5+LjkFERAKxwQkRFQTPnz+HTCaDo6Oj6CjZlq0CLq1SrVWrVqbjKleujNOnT6NKlSqQJAkLFy7EN998k/OUlC+KFi2KFy9eiI5BRESCZdTghJdGEJG+ePHiBezt7WFkZCQ6SrZlq4BLO4UiJSXls2OdnZ1x6tQpuLm5QZIkrFixAgMGDGDnSS3GI3BERJQmvQYnw4YNQ8OGDdnghIh03vPnz+Hs7Cw6Ro5kq4BzcnICAISHh2dpfKFChXD06FE0bdoUkiThzz//RK9evZCcnJztoKR5RYsWRUREBFJTU0VHISIiLZHW4KRNmzYAgHPnzrHBCRHpvAJTwFWrVg0AcPz48SwvY2lpif3796Nt27aQJAnbtm2Dr69vtkJS/nB2dkZqaipev34tOgoRUYEgl8vRrVs3dOvWDXK5XHScDJmYmGD//v1scEJEeuPFixcoWrSo6Bg5kq0CrlmzZqoiLDtH0UxNTREQEKCaZuD69evZDkqal7YT8zRKIqL8YWpqqurgnFFzMG3CBidEpC8KzBE4b29vAEBUVJRqSoGsMjQ0xD///IP+/fvzOjgtlbYTs5EJERFlJK3Bybfffgvgfw1OVq1aJTgZEVHWKJVKREREFIwjcJUrV0bfvn3RokULnD17NvtPZmAAPz8/jB07Fi4uLihZsmS210Ga4+joCJlMxiNwRESUKZlMhgULFiAkJER15HDo0KFo1KgRG5wQkdZ79eoVUlNTC8YROADYsGEDDh06hLVr1+b4SX/66Sc8fPgQDx8+zPE6KO8ZGhrCwcGBBRwRUT5RKBSQyWSQyWQ6Wfi4ubmpNTg5e/YsbGxscPLkScHJiIgylvZZt8AUcKTfnJ2deQolERFlWVqDk40bNwL40OCkWbNmGDp0KBucEJFWSvusWyBOoST9V7RoUR6BIyKibPPx8VFrcLJq1So4Ozvj8ePHYoMREf3H8+fPIZPJ4OjoKDpKjrCAIzWczJuIiHIqvQYnLi4ubHBCRFrl+fPncHBwgJGRkegoOcICjtQULVqUp1ASEVGOscEJEWk7XZ4DDmABR//h7OyMiIgIpKamio5CREQ6LK3BSevWrQGwwQkRaQ9dngMOYAFH/+Hs7IzU1FS8fv1adBQiItJxJiYmOHDgwCcNToYNG8YGJ0QkzIsXL1jAkf5IO5z87NkzwUmIiPSfXC6Hl5cXvLy8IJfLRcfRmLQGJy4uLgCAlStXolixYmxwQkRCPH/+nKdQkv4oXbo0AODBgweCkxAR6T9TU1MEBQUhKChIdb2YvnJ0dMTDhw9VDU7SCjo2OCGi/JSQkIDnz5+rPvPqIhZwpKZIkSIoVKgQwsLCREchIiI9wwYnRCTavXv3IEkSKlSoIDpKjrGAIzUymQwVKlTAnTt3REchIiI9xQYnRCRK2mdcFnCkVypWrMgjcERE+UChUMDCwgIWFhYF7ggUG5wQkQhhYWEoVKgQ7OzsREfJMRZw9Im0I3CSJImOQkSk9+Lj4xEfHy86hjBscEJE+enOnTuoUKECZDKZ6Cg5ZphXK3r9+jX8/f1x6tQpPHjwALGxsZ+dS0wmk+H+/ft5FYHySMWKFfH27Vu8efNGp7+dICIi3fBxg5PFixerCrqVK1diyJAhouMRkR4JCwtDxYoVRcfIlTwp4P7++28MGzYMsbGxAJDlIze6XPnqs7RzgsPCwljAERFRvpDJZFi0aBF69uyJJk2aIDExEUOHDsVff/2FAwcOwMLCQnREItIDYWFhaN++vegYuZLrAu7o0aPo06ePqmhzcXFBjRo1YGtrCwMDnqGpi8qXLw/gwyHmhg0bCk5DREQFSVqDk44dO+LgwYM4c+YMbGxscOzYMTRp0kR0PCLSYa9fv0ZUVBSPwM2fPx+SJMHW1hb+/v5o165dXuQigczMzFCyZEk2MiEiIiHSGpz4+/ujT58+SE1NRdOmTTF06FAsW7aMXxATUY6kfbbV5Q6UQB40MQkJCYFMJsMPP/zA4k2PcCoBIiISjQ1OiCgvpX22LVeunOAkuZPrAi6t1W+jRo1yHYa0B6cSICLSPAMDAzRr1gzNmjXjUaUMpDU4mTBhAgCoCrpVq1YJTkZEuiYsLAwlS5aEubm56Ci5kuvfFmXLlgWAAjd/jb6rUKEC7t2799lOokRElHNmZmY4fvw4jh8/DjMzM9FxtFZag5OQkBCYmJgAAIYOHYrGjRvz8wcRZVnaFAK6LtcFXK9evSBJEg4cOJAXeUhLVKxYEUlJSTxNhYiItIabmxuio6PRunVrAMCZM2dga2uLU6dOCU5GRLpAH6YQAPKggBs+fDiqVKmCX375BRcvXsyLTKQF0r6d4HVwRESkTdIanPz1118AgJSUFDRt2hTDhw9XXdZBRPRfqampuHfvHo/AAYClpSX27t2LSpUqoWnTpvjuu+9w9epVJCYm5kU+EqRkyZIwMTHhdXBERBqkUChgb28Pe3t7ngqYTX369FFrcLJixQo2OCGiDD1+/BhJSUk8ApemZMmS2LBhA6ysrDB//nzUqlULFhYWkMvlmd4MDfNkHnHSALlcjnLlyvEIHBGRhr1+/RqvX78WHUMnZdTgZPXq1YKTEZG2SftMyyNw/+/XX3+Fq6srXr9+DUmSsnUj7cWpBIiISNul1+BkyJAhbHBCRGru3LkDExMTlCxZUnSUXMv1IbC9e/di7NixAD60Q27SpAlq1qwJW1tbtkTWcdWrV8fy5cshSRJkMpnoOERERBlKa3DSoUMHHDp0SNXg5OjRo2jSpInoeEQk2L///osqVapALpeLjpJruS7gFi1aBAAoVqwY9u7di+rVq+c6FGkHNzc3vH79Go8fP1ZdY0BERKStTExMcPDgQfz111/o16+fqsHJsGHDsHTpUn6xTFSAXbx4EfXr1xcdI0/k+p3s6tWrkMlkmDVrFos3PVOnTh0AQGhoqOAkREREWde3b182OCEilfj4eNy8eVP12VbX5bqAS5vo2dXVNberIi3j7OyMokWLcnoIIiLSOWxwQkRp/v33X6SmpsLNzU10lDyR6wKufPnyAIC3b9/mOgxpHzc3Nx6BIyLSEAMDA7i5ucHNzY2n92lAWoOTCxcusMEJUQEWGhoKIyMjVKtWTXSUPJHr3xa9e/eGJEnYtWtXHsQhbVOnTh1cvHiRHUOJiDTAzMwMISEhCAkJgZmZmeg4eqtu3bqIjo5Gq1atAEDV4OTUqVOCkxFRfrh48SJq1Kih+iJH1+W6gBs5ciTq1auHVatWITAwMC8ykRZxc3NDVFQUHj16JDoKERFRjqU1OPnzzz8BQNXgZPjw4VAqlYLTEZEmhYaG6s31bwAgk3J5aOXx48eIjo7G4MGDERISgp49e6Jnz56oUKECzM3NP7u8PszFkFMxMTGwsbFBdHQ0rK2tRcdJV0REBIoWLYpt27aha9euouMQERHlWkREBOrXr6/6ctLJyQnBwcEF+jMJkb6Kj4+HlZUVVq5ciUGDBomOk6ms1ga5LuAMDAxUc4Rld74wmUyGlJSU3Dy9TtOFAg4Aihcvjr59+2LevHmioxAR6ZX4+HhUqVIFAHDz5s0sffFJeUOSJHz77bdYvHix6r7Vq1dr/Qc8Isqes2fPolGjRggNDUXt2rVFx8lUVmuDPLliWpIk1TVSaT9n9Ubar06dOmxkQkSkAZIk4dGjR3j06BF/J+az9BqcDB48GE2aNGGDEyI9EhoaCmNjY71pYALkwUTe69aty4scpMXc3NywZMmSbB9hJSIi0nZpDU46dOiAQ4cO4fTp07C1tcXRo0fRpEkT0fGIKJfSGpgYGxuLjpJncl3A+fr65kUO0mJubm54+/YtwsPDUbp0adFxiIiI8lRag5O//voL/fr1UzU4GTZsGJYuXcopHoh0WGhoqN59GcN3JPqstK49nNCbiIj0Wd++ffHixQtVM5MVK1agWLFiePz4seBkRJQTCoUCt27d0qsOlAALOMoCBwcHlChRgtfBERGR3nNyckJ4eDjGjx8P4EPHShcXF6xZs0ZwMiLKritXrkCpVMLNzU10lDzFAo6yJG1CbyIiIn0nk8mwePHidBucxMfHC05HRFl18eJFmJiYoGrVqqKj5KksXwM3a9Ys1c/Tp09P9/6c+HhdpL3c3NywePFiNjIhIspDMplMNY0A31u1T1qDk/bt2+Pw4cM4ffo0bGxs2OCESEeEhoaiZs2aMDIyEh0lT2V5HriP53tLTU1N9/6c+HhdBY2uzAMHAAcOHEDbtm0RFhaG8uXLi45DRESUr9IanKRhgxMi7Ve5cmU0b94cy5cvFx0lSzQyD1xGc7dld+43zgOne+rXrw8DAwOcPHlSdBQiIqJ8l1GDkydPnghORkTpefnyJW7fvo3GjRuLjpLnslzAKZVK1S2j+3NyI91gY2OD2rVr49ixY6KjEBERCZFeg5OSJUuywQmRFjp+/DgAoHnz5mKDaACP+1OWNW/eHMePH+eRUyKiPBIfH4+qVauiatWqbI6hI9IanAQHB7PBCZEWO378OCpWrIiiRYuKjpLnWMBRlnl4eODZs2e4d++e6ChERHpBkiTcvHkTN2/e5JdjOqZevXqIjo6Gp6cnAKganJw+fVpwMiICgGPHjunl0TeABRxlQ5MmTSCXy3kaJREREQATExMcOnQIf/75JwAgJSUFTZo0wYgRI3iZiBZLTk7G/v37MXbsWDRs2BB2dnYwMjKCra0tateujYkTJ+LBgweiY1IuPH/+HHfu3IGHh4foKBqR5S6UGUlNTUVISAhOnTqFsLAwvH37FrGxsbC2tkbhwoVRsWJFNG7cGG5ubuzU9B+61IUyTf369VG6dGn8/fffoqMQEek8hUIBS0tLAEBcXBwsLCwEJ6KcevHiBerXr4/Hjx8DAIoWLYrg4GCUKFFCcDL62KtXr1C5cmW8efMm03HGxsZYuHAhRo8enU/JKC9t2rQJPj4+iIiIgKOjo+g4WaaRLpQfS0lJwa+//opSpUqhUaNGmDx5Mvz8/LBz504cPnwYO3bswB9//IGJEyeiQYMGKFOmDJYtW5bv0wZERUXBx8cH1tbWsLW1xcCBAxEXF5fp+JEjR6JixYowMzNDyZIlMWrUKERHR6uNk8lkn9w2b96s6ZcjHK+DIyIi+lTRokXVGpykdaxkgxPtkpSUpCreXF1dMWPGDOzduxehoaE4evQoJk6cCFNTU7x//x5jxozB6tWrBSemnDh+/DiqVKmiU8VbduToCNybN2/QuXNnnDlzBgCy9GE+ba44Dw8PbN26FYULF87u0+ZIu3bt8OLFC6xatQrJyckYMGAA6tati02bNqU7/vr165gxYwb69++PKlWq4NGjRxg6dChq1KiBbdu2qb2edevWoW3btqr7bG1tYWpqmuVsungELm0+uFu3bqFSpUqi4xAR6TQegdNPFy5cQNOmTZGUlATgwyUI+/fvh7m5ueBk9OzZMwwYMACzZs1C/fr10x0THByM5s2bIyEhATY2Nnjy5AmsrKzyOSnlRvny5dGmTRssXbpUdJRsyWptkO0CLjU1FY0aNUJISAgkSYJMJkOrVq3g6emJ2rVro0iRIrC0tERsbCxev36Ny5cv49ChQzhy5IhqfIMGDXDy5EmNn1J569YtVKlSBSEhIXBzcwMA7N+/H15eXnj69CmcnZ2ztJ6tW7eiT58+UCgUMDQ0BPChgNu5cye++OKLHOfTxQIuLi4OhQoVwm+//YZhw4aJjkNEpNNYwOmvpKQkeHt748iRIwAAQ0NDHDt2TC/npNJHEyZMwE8//QQA2L59O7p06SI4EWXV06dPUaJECWzbtg1du3YVHSdbNHYK5fz583HhwgUAQK1atXDt2jXs378fEyZMQIsWLVCzZk2ULVsWrq6u8PT0xMSJE3Hw4EH8+++/cHV1hSRJOHfuHBYtWpTzV5dF586dg62trap4AwBPT08YGBggODg4y+tJ24hpxVuaESNGwM7ODvXq1YOfn99nj0QmJSUhJiZG7aZrLC0tUa9ePTYyISLKAzKZDC4uLnBxcVGdqUL6wcTEBIcPH9aJBifx8fGwsrKCTCaDj4/PZ8efO3dOdfnI8uXLVfe/ffsW69atQ58+fVClShVYWlrC2NgYTk5OaNOmDVavXo33799nuN7w8HDVetevXw8A2LFjB7y8vODs7AxDQ8N8a0rxcffC+/fv58tzUt5I+4zarFkzwUk0J1sFXHJyMn777TfIZDLUqlULZ86cQZUqVbK0bLVq1XD27FnUqlULkiRhyZIlSElJyVHorIqIiICDg4PafYaGhihcuDAiIiKytI7Xr19j9uzZGDx4sNr9s2bNwpYtW3Do0CF07doVw4cPx++//57puubNmwcbGxvVTVcvbOZ1cEREecPc3Bzh4eEIDw/n6XV6qm/fvnj+/Lnqd/7y5ctRvHhxPHnyRHCy/zE3N1edUbR7924oFIpMx/v7+wP48JmqR48eqvtr1aqFr776Cv7+/rh16xYUCgWSk5Px8uVLHDx4EEOGDEH9+vWz9BlMkiT069cPXbt2xb59+/DixYt87aOQdvorAMjl8nx7Xsq948ePo3r16rCzsxMdRWOyVcAFBgbi1atXkMlk2LhxY7au9wIAU1NT/PXXX5DJZHj16hX27NmTreXTTJ48Od0mIh/fbt++naN1fywmJgbe3t6oUqUKZs6cqfbYtGnT0KhRI9SqVQuTJk3Ct99++9mjilOmTEF0dLTqpk1v3tnh4eGBV69e4ebNm6KjEBERab2iRYvi0aNHGDduHADtbHCSduRNoVBg9+7dGY5LSUnB1q1bAQBt2rRR+5CcmpoKd3d3zJ49G3v27EFISAjOnDmDjRs3qnoGXL58Gb169fpsnl9++QV//fUXmjRpgk2bNuHixYs4fPgw+vbtm5uXmWUnTpxQ/Vy5cuV8eU7KG/o8/5uKlA1jx46VZDKZ1KZNm+ws9ok2bdpIBgYG0rhx43K0fGRkpHTr1q1Mb0lJSdLatWslW1tbtWWTk5MluVwu7dixI9PniImJkRo0aCC1bNlSSkhI+GymPXv2SACkxMTELL+O6OhoCYAUHR2d5WW0gUKhkIyMjKTff/9ddBQiIiKdEhwcLJmYmEgAJABSkyZNJIVCkeXllUqlNHLkSKlt27ZSfHx8nuVKTk6WHBwcJACSt7d3huP27dunyr5p0ya1x8LCwjJ9Dj8/P9Wyhw8f/uTxhw8fqh4HIPXr109SKpU5e0G58Pz5c8nKykoCINnb22fpcyBph/DwcAmAtHPnTtFRciSrtUG2jsCFhoZCJpOhZcuWuSoaW7ZsCUmSEBoamqPl7e3tUalSpUxvxsbGaNCgAd69e6f2PEePHoVSqYS7u3uG64+JiUHr1q1hbGyMgICALB1pvHLlCgoVKgQTE5McvSZdYm5ujvr16/M6OCKiXEpISEDdunVRt25dJCQkiI5D+aBevXqIjo5WfZY6deoUbGxscPr06Swtv2HDBvz+++84cOCAasqCvGBoaIiePXsCAA4ePJjhPGlpp09aWlqiU6dOao+VL18+0+cYMGAAXF1dAQC7du3KdKytrS2WLl2a79eGSpKEIUOGIDY2FsCHM66ye8YZiXPs2DHIZDI0bdpUdBSNylYBl3bKX40aNXL1pGnLP3r0KFfr+ZzKlSujbdu2GDRoEC5cuIAzZ87gm2++Qa9evVQdKJ89e4ZKlSqpGrOkFW8KhQJr165FTEwMIiIiEBERoTr3OjAwEH/88QeuX7+Oe/fuYcWKFZg7dy5Gjhyp0dejTZo3b44TJ05o3YXYRES6RKlU4uLFi7h48SLfTwuQtAYnaY06strgJCwsTNUBWpIkrFix4rOFUHaknUaZnJyMLVu2fPJ4QkKC6vm++OKLTK/blCQJERERCAsLw/Xr11W3YsWKAQD+/fffTLN06NBBSOv+uXPnIjAwEMCHzzojRozI9wyUc8ePH0fNmjXzbboyYbJzWK9w4cKSgYGBFBwcnONDg5L04fQBmUwmFSpUKFfryYo3b95IvXv3liwtLSVra2tpwIABUmxsrOrxtMP1x44dkyRJko4dO6Z2+P7j28OHDyVJ+nD6gKurq2RpaSlZWFhINWvWlFauXCmlpqZmK5uunkIpSZJ09OhRCYD077//io5CRKSz4uLiVL9j4uLiRMchAZ4/fy6VKFFCtR8ULVpUevLkySfjEhMTpRo1akiGhoaqsTKZTLK2tpYeP36cZ3nKli0rAZAaNWr0yWObN29WPfe+ffvSXX7Pnj2St7e36hTEjG6VK1f+ZNmPT6FctGhRnr2mrNq4caMkk8kkAFLp0qWlFy9e5HsGyjmlUimVLFlSGjt2rOgoOaaRUyjT2t6nzVmTU2nz3KQdntakwoULY9OmTYiNjUV0dDT8/PzU8pcqVQqSJKna0np4eECSpHRvpUqVAgC0bdsWly9fRmxsLOLi4nDlyhUMGTJE4/PaaZMGDRrAzMwMBw4cEB2FiIhIZ6XX4KREiRKfNDiZOnUqrl27ptbBW5IkKBQK9OrVK886NKYdhTt79izCw8PVHks7fdLBwQGenp5qj0mShK+//hrt27dHUFDQZz/jfe6U4UKFCmUzee4EBQVhwIABkCQJTk5OOHToEJycnPI1A+VOWFgYHj9+nOtLvXRBtiqOvG7fytNFdJepqSlatWqlOs2AiIiIckYmk+Gnn37C+fPnYWxsDAAYPHgwmjZtivj4eBw4cAA///xzutP3pKam4ty5c5gzZ06eZEkr4CRJwt9//626PyoqSvWlbc+ePT+ZG9fPzw9r164FALi6umL9+vW4desWYmJikJKSovoyPK2LZHqv5WP52br/+PHj6NatG5KTk1GoUCEcOHAAZcuWzbfnp7wREBAAMzMztGjRQnQUjSs4h4woz3Xs2BFnzpzB69evRUchIiLSee7u7oiJifmkwUmPHj0yPctHkiTMnDkzy41QMlOhQgW4ubkBADZt2qS6f9u2bapJuNOb7DvtiGG5cuVw9uxZ+Pr6olKlSrCyslIrxqKionKdMS9duHABHTp0QGJiIiwtLbFv375c93ogMXbv3o3WrVvDzMxMdBSNM/z8kE8tX778kwmysyMyMjLHy5L2aN++PSRJQlBQEHx9fUXHISIi0nlpDU42bNiA/v37IyUlRXUJS2ZkMhl69OiBGzdu5Pr0Qx8fH1y8eBHXr1/H1atXUaNGDdXpk2XLlk23k/eNGzcAfPhyN6MP0JIk4dKlS7nKlpeuXr2Ktm3bIi4uDqampggMDMy0Szlpr1evXuHs2bP4448/REfJFzkq4FasWJHXOUgHOTo6on79+ggICGABR0SUQx9PhEyUxtfXF48ePcKMGTOyNF6pVCIyMhJfffUVduzYkav2+7169cKECROQmpoKf39/FC5cGKdOnQKQ/tE3AKpr8xQKRYbr3b17N168eJHjXHkpLCwMrVu3xtu3b2FkZITt27er+iGQ7gkKCgLw4eBCQZDtUygzavCR3Rvph44dO+LAgQNITEwUHYWISOdYWFjg1atXePXqlarBFxEAXLp0CbNnz87WMqmpqdi1axdWr16dq+d2cnJSXUf0999/Y9OmTarPbhkVcGlzwAUGBqZ7muT9+/e1piX/48eP4enpiZcvX0Iul2PTpk3w8vISHYtyYffu3WjQoEGuzhDUJdk6AseJm+m/OnbsiClTpuDo0aN88yMiIsoDcXFx6N69e46/8B41ahQaN26MqlWr5jiDj48PDh06hCdPnmDevHkAADc3N1SoUCHd8f369cPEiRPx/PlzNGjQAJMmTUK1atWQmJiIo0eP4pdffkFSUhJq164t9DTKN2/ewNPTUzW38fjx41GpUiVcv349w2UKFSqkmr+OtE9CQgIOHjyI6dOni46Sb7JVwDVr1kxTOUhHVa5cGWXLlkVAQAALOCIiojwwatQohIeH57hbd2pqKrp164ZLly7luKFDly5dMGzYMCQkJODdu3cAMj76BgCjR4/GoUOHcPDgQYSFhWHgwIFqj5uZmeHPP/9EUFCQ0ALu2rVruHv3rurvCxcuxMKFCzNdxtfXVzXpOmmfo0ePIj4+Hh07dhQdJd+wCyXlikwmQ6dOnRAQEMBpIYiIsikhIQEeHh7w8PD47LxYVDD8888/WLduXa5+p6ampiIsLAzjx4/P8TqsrKzQoUMH1d/lcjl69eqV4XgjIyMEBQXht99+g5ubG8zNzWFmZoZy5cph6NChuHTpErp3757jPEQZ2b17N8qXL49KlSqJjpJvZBIvSBMmJiYGNjY2iI6OhrW1teg4OXbixAl4eHjgwoULqFu3rug4REQ6Q6FQwNLSEsCH0+Z4HVzBFh4ejmrVqiE+Pj7P+gXs3LkTX3zxRZ6si0jbKJVKFCtWDD4+Pli8eLHoOLmW1dqAR+Ao1xo1aoTChQsjICBAdBQiIiKdNX36dCgUijwr3mQyGXx9fVXXexHpm4sXLyIiIqJAnT4JsICjPGBoaAhvb2/s3r1bdBQiIiKd1a9fP9SqVQvAh+Irs8m7s0KSJCgUCvTq1Qupqal5EZFIq+zevRtFihRBw4YNRUfJVyzgKE907NgR165dw8OHD0VHISIi0kmenp64dOkSnj59iuXLl6NFixYwNPzQby7tz+xKTU3FuXPnMGfOnLyMSqQVAgIC4O3tneP/H7qK18AJpC/XwAFAbGws7OzssHDhQowePVp0HCIincBr4OhzYmJisH//fuzatQuBgYGIi4uDoaGhauLsrJLJZDh58iQaN26soaT54927d3j69GmOlq1WrVoepyGRHjx4gLJly2Lbtm3o2rWr6Dh5Iqu1AQs4gfSpgAOAtm3bIjk5GUeOHBEdhYhIJ7CAo+xITk7GyZMnsXv3bmzbtg0vXryAXC7P0umRMpkMTk5OuHHjBgoVKpQPaTVj/fr1GDBgQI6W5Ude/fLLL79g0qRJeP36NaysrETHyRNsYkL5rlOnTjhx4gTevn0rOgoRkc4wNzeHubm56BikA4yMjNCyZUv89ttvePbsGS5duoTvv/9edWRJJpNBJpOlu6wkSXj58iW++uorFjKkFwICAtCyZUu9Kd6ygwUc5ZkOHTogNTUVQUFBoqMQEekECwsLKBQKKBQKHn2jbJHJZKhVqxZmzpyJa9euITw8HL/++iuaNm2qan4il8vVllEqldi1axeWLl0qInKe6N+/PyRJytGN9MebN29w8uTJAtd9Mg0LOMozxYsXR6NGjeDv7y86ChERUYHi4uKCkSNH4vjx43j9+jU2btyIzp07w8zMDADUjsyNHj0aO3fuFBWVKNf++ecfAECXLl0EJxGDBRzlqT59+uDgwYN4+fKl6ChEREQFUqFCheDj44OtW7fi7du32Lt3LwYNGgQbGxsAH06n7Nq1K1auXCk4KVHObNy4EW3atIGDg4PoKEKwgKM81aNHD8jlcmzevFl0FCIirZeYmAhvb294e3sjMTFRdBzSQyYmJmjXrh1WrVqFqKgo7Nu3D+XKlYMkSQWu9Trph/v37+PcuXPo27ev6CjCsAulQPrWhTJN586d8eTJE1y8eFF0FCIircYulCSCJEnYu3cvvLy8Mmx6QqStfvjhByxevBgvX77UuwZQ7EJJwvTp0wehoaG4deuW6ChERET0HzKZDN7e3izeSOdIkoSNGzeia9euele8ZQcLOMpz3t7esLGxYTMTIiIiIsozFy5cwL1799CnTx/RUYRiAUd5ztTUFD169IC/vz+USqXoOERERESkBzZu3AhnZ2c0b95cdBShWMCRRvTp0wfh4eE4c+aM6ChEREREpOOSk5OxefNmfPnll5/McVjQsIAjjWjcuDFKliyJjRs3io5CRERERDruwIEDeP36dYE/fRJgAUcaYmBggD59+mDLli1ISkoSHYeIiIiIdNjGjRtRvXp11KxZU3QU4VjAkcb4+Pjg3bt3CAoKEh2FiEgrWVhYQJIkSJLEKQSIiDIQHR2N3bt38+jb/2MBRxpTpUoV1K5dm6dREhEREVGO7dixA0lJSejdu7foKFqBBRxpVJ8+fRAUFISoqCjRUYiIiIhIB23cuBEeHh4oUaKE6ChagQUcaVTv3r2RkpKCbdu2iY5CRKR1EhMT0b17d3Tv3h2JiYmi4xARaZ2nT5/i2LFj6Nu3r+goWoMFHGmUk5MTWrVqhb/++kt0FCIirZOamopt27Zh27ZtSE1NFR2HiEjrbNq0CSYmJujSpYvoKFqDBRxpXJ8+fXD69Gncv39fdBQiIiIi0hGSJOHPP/9Ex44dYWNjIzqO1mABRxrXpUsX2NraYtWqVaKjEBEREZGOOH36NG7cuIGBAweKjqJVWMCRxpmbm2PAgAFYu3YtEhISRMchIiIiIh2wbNkylC9fHp6enqKjaBUWcJQvhg0bhqioKGzZskV0FCIiIiLSchEREdi+fTuGDx8OAwOWLB/j1qB8Ub58ebRp0wbLli0THYWIiIiItNyaNWtgZGSE/v37i46idVjAUb4ZPnw4QkJCEBISIjoKEREREWmplJQUrFq1Cj4+PrC1tRUdR+uwgKN84+3tDRcXFx6FIyL6f+bm5oiLi0NcXBzMzc1FxyEi0gq7d+/Gs2fPMGLECNFRtBILOMo3crkcQ4cOxebNm/HmzRvRcYiIhJPJZLCwsICFhQVkMpnoOEREWmH58uVo2LAhXF1dRUfRSizgKF8NHDgQkiTBz89PdBQiIiIi0jK3bt3C0aNHefQtEyzgKF/Z29ujR48eWLFiBVJTU0XHISISKikpCf3790f//v2RlJQkOg4RkXDLly+Hvb09unbtKjqK1mIBR/luxIgRePjwIQ4cOCA6ChGRUCkpKdiwYQM2bNiAlJQU0XGIiISKi4vDhg0bMGjQIJiYmIiOo7VYwFG+c3d3R+3atdnMhIiIiIhUNm7cCIVCgSFDhoiOotVYwFG+k8lkGDFiBPbt24cHDx6IjkNEREREgkmShGXLlqFjx44oWbKk6DhajQUcCdGrVy/Y2tpixYoVoqMQERERkWCnTp3C9evXMXz4cNFRtB4LOBLC3NwcAwYMgJ+fHxISEkTHISIiIiKBli9fjgoVKqBly5aio2g9FnAkzLBhwxAVFYV//vlHdBQiIiIiEuTFixfYvn07hg8fDgMDliefwy1EwpQrVw5t27bFr7/+CkmSRMchIiIiIgFWrFgBY2Nj+Pr6io6iE1jAkVATJ07ElStXOKUAERVI5ubmiIyMRGRkJMzNzUXHISLKd7Gxsfj9998xePBg2Nraio6jE1jAkVDNmzeHu7s75s6dKzoKEVG+k8lksLe3h729PWQymeg4RET5buXKlVAoFBg/frzoKDqDBRwJJZPJMHXqVJw6dQqnT58WHYeIiIiI8kliYiJ+/vln+Pr6onjx4qLj6AwWcCRc+/btUbVqVcybN090FCKifJWUlIQRI0ZgxIgRSEpKEh2HiChfrV+/HpGRkfj2229FR9EpMondI4SJiYmBjY0NoqOjYW1tLTqOUP7+/ujTpw8uX74MV1dX0XGIiPKFQqGApaUlACAuLg4WFhaCExER5Y+UlBSUL18e7u7u2Lx5s+g4WiGrtQGPwJFW6NmzJ0qXLs2jcEREREQFwObNmxEeHo4pU6aIjqJzWMCRVjA0NMSkSZOwdetWhIWFiY5DRERERBqiVCoxb948eHt7o2bNmqLj6BwWcKQ1fH194ejoiIULF4qOQkREREQaEhgYiJs3b/LoWw6xgCOtYWpqivHjx+PPP//EkydPRMchIiIiojwmSRLmzp2Lpk2bolGjRqLj6CQWcKRVhgwZAktLS/z000+ioxARERFRHjt69CguXLiAqVOnio6is1jAkVaxsrLCqFGjsHr1arx69Up0HCIiIiLKQ3PnzkXt2rXRunVr0VF0Fgs40jojR46EgYEBfvvtN9FRiIg0yszMDA8fPsTDhw9hZmYmOg4RkUYFBwfj6NGjmDJlCmQymeg4OovzwAnEeeAyNn78eKxduxaPHz/mtiEiIiLSA1988QVu376NGzduQC6Xi46jdTgPHOm0cePGISEhAStWrBAdhYiIiIhy6fr169i9ezcmT57M4i2XWMCRVipWrBj69++Pn3/+GfHx8aLjEBFpxPv37zFx4kRMnDgR79+/Fx2HiEhj5s2bh5IlS8LHx0d0FJ3HAo601qRJk/D27VteC0dEeis5ORmLFy/G4sWLkZycLDoOEZFGXL16FX///TcmT54MIyMj0XF0Hgs40lplypTBkCFDMH/+fERFRYmOQ0REREQ5MHXqVJQtWxZff/216Ch6gQUcabVp06YhNTUV8+bNEx2FiIiIiLLp5MmTCAoKwpw5c3j0LY+wgCOt5uDggPHjx+P333/HkydPRMchIiIioiySJAmTJk1CnTp10K1bN9Fx9AYLONJ648ePh7W1NWbMmCE6ChERERFl0a5du3D+/HksWLAABgYsO/IKtyRpPSsrK0yfPh0bNmzAjRs3RMchIiIios9ISUnBlClT0Lp1a7Rs2VJ0HL3CAo50wuDBg1GqVClMnTpVdBQiIiIi+oz169fjzp07mD9/vugoeocFHOkEY2Nj/PjjjwgICMDp06dFxyEiyhNmZma4fv06rl+/DjMzM9FxiIjyRHx8PGbMmIHevXujVq1aouPoHZkkSZLoEAVVTEwMbGxsEB0dDWtra9FxtJ5SqYSbmxvMzc1x6tQpyGQy0ZGIiIiI6D8WLFiA77//Hrdv30bZsmVFx9EZWa0NeASOdIaBgQHmz5+PM2fOIDAwUHQcIiIiIvqPqKgozJs3D0OHDmXxpiEs4EintGrVCi1atMCUKVOQmpoqOg4RUa68f/8eM2fOxMyZM/H+/XvRcYiIcm3+/PlISUnB999/LzqK3uIplALxFMqcCQkJQb169eDn54cBAwaIjkNElGMKhQKWlpYAgLi4OFhYWAhORESUc0+ePEH58uUxefJkzJw5U3QcncNTKElv1a1bF927d8f06dORkJAgOg4RERERAZg5cyasra0xfvx40VH0Ggs40klz5szBixcvsGzZMtFRiIiIiAq8mzdvYv369Zg2bRqsrKxEx9Frel/ARUVFwcfHB9bW1rC1tcXAgQMRFxeX6TIeHh6QyWRqt6FDh6qNefz4Mby9vWFubg4HBwdMnDgRKSkpmnwp9JHy5ctj0KBBmDt3Lt6+fSs6DhEREVGBNmXKFLi4uGDIkCGio+g9vS/gfHx8cOPGDRw6dAh79uzByZMnMXjw4M8uN2jQILx48UJ1W7hwoeqx1NRUeHt74/379zh79iw2bNiA9evXY/r06Zp8KfQfM2bMQEpKCqZNmyY6ChEREVGBtX//fgQEBGDu3LkwNjYWHUfv6XUTk1u3bqFKlSoICQmBm5sbgA87mJeXF54+fQpnZ+d0l/Pw8ICrqyt++eWXdB/ft28f2rdvj+fPn8PR0REAsHLlSkyaNAmvXr3K8o7LJia5t2TJEkyYMAEhISGoXbu26DhERNnCJiZEpOuSkpJQrVo1lChRAkeOHOE8vbnAJiYAzp07B1tbW1XxBgCenp4wMDBAcHBwpsv6+/vDzs4O1apVw5QpUxAfH6+23urVq6uKNwBo06YNYmJicOPGjQzXmZSUhJiYGLUb5c4333yDKlWqYMSIEVAqlaLjEBERERUoixcvRnh4OJYuXcriLZ8Yig6gSREREXBwcFC7z9DQEIULF0ZERESGy3355ZdwcXGBs7Mzrl69ikmTJuHOnTvYsWOHar0fF28AVH/PbL3z5s3DDz/8kNOXQ+kwMjLCsmXL0KxZM6xbtw4DBw4UHYmIKMtMTU1x4cIF1c9ERLokPDwcc+bMwdixY1GlShXRcQoMnTwCN3ny5E+ajPz3dvv27Ryvf/DgwWjTpg2qV68OHx8f/Pnnn9i5cyfu37+fq9xTpkxBdHS06vbkyZNcrY8+aNq0Kfr27YtJkyYhKipKdBwioiyTy+WoW7cu6tatC7lcLjoOEVG2jBkzBoULF2YfiHymk0fgxo8fj/79+2c6pkyZMnByckJkZKTa/SkpKYiKioKTk1OWn8/d3R0AcO/ePZQtWxZOTk6qb0zTvHz5EgAyXa+JiQlMTEyy/LyUdQsXLsTu3bvx3XffYcWKFaLjEBEREem1oKAg7N69G1u2bFFdy0v5QycLOHt7e9jb2392XIMGDfDu3TuEhoaiTp06AICjR49CqVSqirKsuHLlCgCgaNGiqvXOmTMHkZGRqlM0Dx06BGtrax4+FsTJyQmzZ8/GmDFjMHDgQLXrHomItNX79+/x66+/AgBGjx7N7m1EpBMSExMxatQoeHp6olu3bqLjFDh63YUSANq1a4eXL19i5cqVSE5OxoABA+Dm5oZNmzYBAJ49e4aWLVvizz//RL169XD//n1s2rQJXl5eKFKkCK5evYqxY8eiePHiOHHiBIAP0wi4urrC2dkZCxcuREREBPr27Yuvv/4ac+fOzXI2dqHMWykpKahTpw5MTExw/vx5GBjo5BnCRFSAsAslEemiWbNm4ccff8TVq1dRqVIl0XH0BrtQ/j9/f39UqlQJLVu2hJeXFxo3bozVq1erHk9OTsadO3dUXSaNjY1x+PBhtG7dGpUqVcL48ePRtWtXBAYGqpaRy+XYs2cP5HI5GjRogD59+qBfv36YNWtWvr8++h9DQ0MsW7YMISEhWLt2reg4RERERHrnwYMHmDdvHsaPH8/iTRC9PwKnzXgETjP69++PwMBAhIWFoUiRIqLjEBFliEfgiEjXdOzYEVeuXMGtW7f4npXHeASOCqwFCxYgNTUVU6ZMER2FiIiISG8EBgYiMDAQS5YsYfEmEAs40juOjo6YM2cO/vjjj89O2E5EREREn5eQkIBRo0ahTZs26NKli+g4BRoLONJLQ4cOhaurK0aMGIHU1FTRcYiIiIh02vz58/H8+XP8/vvvkMlkouMUaCzgSC/J5XIsW7YMoaGhWLNmjeg4RERERDrr3r17WLBgASZOnIjy5cuLjlPgsYmJQGxionkDBw7Ejh07cPPmTdU8fkRE2iI1NRWnTp0CADRp0gRyuVxwIiIidZIkoW3btrh9+zZu3boFc3Nz0ZH0FpuYEOFDQxNjY2MMHToU/K6CiLSNXC6Hh4cHPDw8WLwRkVZau3YtDh48iBUrVrB40xIs4Eiv2dnZYeXKlQgICIC/v7/oOEREREQ64/Hjxxg3bhy++uoreHl5iY5D/4+nUArEUyjzT58+fRAUFIQbN27A2dlZdBwiIgBAcnIyVq9eDQAYPHgwjIyMBCciIvpAkiS0bt0at2/fxvXr12FjYyM6kt7Lam3AAk4gFnD5JyoqClWrVkWdOnUQGBjI7klEpBU4kTcRaatVq1Zh6NChOHDgAFq3bi06ToHAa+CIPlK4cGGsXr0aQUFB2LBhg+g4RERERFrr4cOHGD9+PAYNGsTiTQuxgKMCo0OHDujXrx9Gjx6Np0+fio5DREREpHWUSiUGDhyIIkWKYPHixaLjUDpYwFGB8ssvv8DS0hKDBg1iV0oiIiKi/1i5ciWOHTuGtWvX8hIfLcUCjgqUQoUKYc2aNdi/fz/8/PxExyEiIiLSGg8ePMDEiRMxbNgweHp6io5DGWABRwWOl5cXBgwYgLFjx+Lx48ei4xAREREJp1QqMWDAADg4OGDhwoWi41AmWMBRgfTzzz/DxsYGAwcO5KmUREREVOAtXboUJ0+ehJ+fn6o7LmknFnBUINna2uKPP/7A4cOHVXMwERHlNxMTE+zZswd79uyBiYmJ6DhEVEDdvXsXkydPxjfffIPmzZuLjkOfwXngBOI8cOINHjwYf//9N65du4ZSpUqJjkNERESUr1JTU9GsWTO8ePECV69e5XyUAnEeOKIsWLx4MQoXLoyBAwdCqVSKjkNERESUr3777TecPXsW69evZ/GmI1jAUYFmbW2NtWvX4ujRo1i+fLnoOERUwCQnJ2P9+vVYv349kpOTRcchogLm9u3bmDp1KkaNGoUmTZqIjkNZxFMoBeIplNrjm2++wR9//IHz58/D1dVVdBwiKiAUCoWqWUBcXBy//SaifJOQkAB3d3ckJycjNDQU5ubmoiMVeDyFkigbFi9ejMqVK6NHjx6IjY0VHYeIiIhIo8aMGYO7d+9iy5YtLN50DAs4IgCmpqbYsmULXrx4gaFDh3JqASIiItJbmzdvxurVq/Hbb7+hevXqouNQNrGAI/p/5cuXx+rVq7Fp0yb4+fmJjkNERESU5+7evYtBgwahd+/e+Prrr0XHoRxgAUf0kd69e2PQoEEYOXIkrl+/LjoOERERUZ5JTExEz5494eTkhFWrVkEmk4mORDnAAo7oP3755ReULVsWPXr0gEKhEB2HiIiIKE9MmDABN27cwJYtW2BlZSU6DuUQCzii/zA3N8eWLVvw6NEjjBgxQnQcIiIiolzbvn07li1bhiVLlqBWrVqi41AuGIoOQKSNKleujBUrVsDX1xfNmzeHr6+v6EhEpIdMTEywZcsW1c9ERJrw4MEDDBw4EN26dcOwYcNEx6Fc4jxwAnEeOO03YMAAbNmyBRcvXkTlypVFxyEiIiLKlvfv36Nx48Z4/fo1Ll++DBsbG9GRKAOcB44oDyxduhQuLi7o0aMH4uPjRcchIiIiypbJkyfjypUr2LJlC4s3PcECjigTFhYW2LJlC+7fv4/Ro0eLjkNEeiYlJQVbt27F1q1bkZKSIjoOEemZgIAALFmyBIsWLYKbm5voOJRHeAqlQDyFUnesXbsWX3/9Nfz9/fHll1+KjkNEekKhUMDS0hIAEBcXBwsLC8GJiEhfPHr0CLVq1UKzZs2wY8cOThmgA3gKJVEe+uqrr+Dj44MhQ4YgLCxMdBwiIiKiDCUnJ6N3796wtraGn58fizc9wwKOKAtkMhlWrFgBZ2dndO/enfPDERERkdaaMmUKQkJCsHnzZhQqVEh0HMpjLOCIssjKygrbtm3D/fv30b9/fyiVStGRiIiIiNRs2LABP/30ExYvXoz69euLjkMawAKOKBuqV6+Ov/76C9u2bcPs2bNFxyEiIiJSOXfuHAYPHoyBAwdi1KhRouOQhrCAI8qmzp07Y/bs2Zg5cya2bt0qOg4RERERnjx5gs6dO6NevXpYvnw5r3vTY4aiAxDpou+++w7Xr1+Hr68vypUrh1q1aomORERERAWUQqFAp06dYGJigu3bt8PY2Fh0JNIgFnBEOSCTyeDn54emTZuiU6dOCAkJgaOjo+hYRKRjjI2NsW7dOtXPRETZJUkSBgwYgLCwMJw5cwYODg6iI5GGcR44gTgPnO579uwZ3NzcULp0aRw7dgwmJiaiIxEREVEBMmvWLMyYMQM7duxA586dRcehXOA8cET5oFixYti1axcuXbqEoUOHgt+HEBERUX7Zvn07ZsyYgdmzZ7N4K0BYwBHlkru7O/744w+sX78eS5YsER2HiHRISkoKgoKCEBQUhJSUFNFxiEiHXLlyBf369UPPnj3x3XffiY5D+YjXwBHlgT59+uDatWuYOHEiKleujHbt2omOREQ6ICkpCe3btwcAxMXFwdCQv5aJ6PMiIyPRqVMnVK5cGX5+fuw4WcDwCBxRHpk7dy7atWuHXr164fbt26LjEBERkR5KSkpCly5d8P79e+zatQvm5uaiI1E+YwFHlEfkcjk2bdqE4sWLo2PHjnj79q3oSERERKRHJEnC8OHDcfHiRezcuRPFixcXHYkEYAFHlIesra0REBCAN2/eoEePHrymhYiIiPLMr7/+Cj8/P6xZswb169cXHYcEYQFHlMfKli2Lbdu24fjx4xg7diw7UxIREVGu7d+/H+PHj8e3336Lvn37io5DArGAI9KA5s2b4/fff8fSpUuxaNEi0XGIiIhIh4WEhKBbt27w9vbG3LlzRcchwdjuikhDhg4dimfPnmHSpEmwt7fHgAEDREciIiIiHXPnzh14eXmhRo0a2Lx5M+RyuehIJBgLOCINmjVrFiIjIzFo0CDY2dmhQ4cOoiMRkRYxNjbG0qVLVT8TEX3s2bNnaN26NRwcHLBnzx52nCQAgEziBTrCxMTEwMbGBtHR0bC2thYdhzQkNTUVPXr0wN69e3Ho0CE0btxYdCQiIiLSclFRUWjatCliYmJw9uxZdpwsALJaG/AaOCINk8vl8Pf3R/369dGhQwdcu3ZNdCQiIiLSYvHx8ejQoQMiIiJw8OBBFm+khgUcUT4wNTXFrl274OLigrZt2yI8PFx0JCLSAqmpqTh+/DiOHz+O1NRU0XGISAskJyejZ8+euHLlCoKCglCpUiXRkUjLsIAjyic2NjbYv38/TE1N0aZNG7x69Up0JCISLDExEc2bN0fz5s2RmJgoOg4RCSZJEgYNGoT9+/djx44dcHd3Fx2JtBALOKJ85OTkhIMHDyI6OhpeXl6IjY0VHYmIiIi0xKRJk7BhwwZs2LABbdq0ER2HtBQLOKJ8VrZsWezbtw937txBly5dkJSUJDoSERERCfbTTz9h0aJF+OWXX/Dll1+KjkNajAUckQC1atVCQEAATp48CV9fXyiVStGRiIiISJA///wTEyZMwNSpUzF69GjRcUjLsYAjEsTDwwN///03tm7ditGjR4MzehARERU8QUFB+Oqrr/D111/jxx9/FB2HdAALOCKBunTpghUrVmDp0qWYM2eO6DhERESUj86ePYvu3bujQ4cOWLFiBWQymehIpAMMRQcgKugGDx6MyMhITJs2DUWKFMGwYcNERyIiIiINu3btGtq3b4+6deti06ZNMDTkx3LKGu4pRFrgu+++w+vXrzF8+HAYGRnh66+/Fh2JiPKBkZERFi5cqPqZiAqGGzduoGXLlnBxccHu3bthZmYmOhLpEBZwRFpAJpNhyZIlSE5OxuDBg2FgYICvvvpKdCwi0jBjY2NMnDhRdAwiykc3b95EixYt4OzsjMOHD8PW1lZ0JNIxLOCItIRMJsPSpUuhVCrx9ddfQy6Xw9fXV3QsIiIiyiO3b99GixYt4OjoiMOHD6NIkSKiI5EOYgFHpEVkMhmWLVuG1NRUDBgwAAYGBujbt6/oWESkIampqbh06RIAoHbt2pDL5YITEZGm3LlzB82bN4e9vT2OHDkCOzs70ZFIR7GAI9IyBgYGWLlyJZRKJfr37w+5XM4JPYn0VGJiIurVqwcAiIuLg4WFheBERKQJd+/eRfPmzVG4cGEcOXIE9vb2oiORDmMBR6SFDAwMsHr1aiiVSvTt2xcymQy9e/cWHYuIiIiy6d69e2jevDlsbGxw9OhRODg4iI5EOo4FHJGWMjAwwJo1a6BUKtGnTx+kpqaiT58+omMRERFRFt25cwctWrSAlZUVjh49CkdHR9GRSA+wgCPSYnK5HGvXroWhoSH69euH5ORkDBgwQHQsIiIi+oy0bpNFihTBkSNH4OTkJDoS6QkWcERaTi6XY/Xq1TAyMsJXX32lmmqAiIiItNPVq1fh6emJokWL4vDhw7zmjfIUCzgiHWBgYIDly5fD2NgYQ4YMwfv37/HNN9+IjkVERET/cenSJbRq1QouLi44dOgQpwqgPMcCjkhHyGQy/PLLLzA2NsbIkSPx/v17jBs3TnQsIiIi+n8XLlxAmzZtUKFCBezfvx+FChUSHYn0EAs4Ih0ik8mwcOFCGBsbY/z48UhKSsKUKVNExyKiHDIyMsKMGTNUPxOR7jp79izatWuHatWqYe/evbCxsREdifQUCzgiHSOTyfDjjz/C2NgYU6dOxZs3b7Bw4UIYGBiIjkZE2WRsbIyZM2eKjkFEuRQUFITu3bujbt262LNnD6ysrERHIj3GAo5IB8lkMsyYMQOFCxfG6NGjERERAT8/PxgbG4uORkREVKCsW7cOgwYNQvv27fH333/DzMxMdCTSc/zKnkiHjRw5Ev/88w+2bt2K9u3bIzY2VnQkIsoGpVKJGzdu4MaNG1AqlaLjEFE2SJKEuXPn4quvvsLAgQOxbds2Fm+UL1jAEem47t27Y//+/QgODkbz5s3x8uVL0ZGIKIsSEhJQrVo1VKtWDQkJCaLjEFEWpaamYuTIkfjuu+/www8/YOXKlTA05IltlD9YwBHpgebNm+PkyZN4/vw5GjVqhPv374uOREREpJcSExPRq1cvrFixAqtWrcL06dMhk8lEx6ICRO8LuKioKPj4+MDa2hq2trYYOHAg4uLiMhwfHh4OmUyW7m3r1q2qcek9vnnz5vx4SUTpqlmzJs6ePQu5XI6GDRsiNDRUdCQiIiK9Eh0djbZt22LPnj3YsWMHBg8eLDoSFUB6X8D5+Pjgxo0bOHToEPbs2YOTJ09m+p+tRIkSePHihdrthx9+gKWlJdq1a6c2dt26dWrjvvjiCw2/GqLMlSpVCmfOnEHp0qXh4eGBQ4cOiY5ERESkF54/f46mTZvi6tWrOHz4MDp16iQ6EhVQen2y7q1bt7B//36EhITAzc0NAPD777/Dy8sLixcvhrOz8yfLyOVyODk5qd23c+dO9OjRA5aWlmr329rafjKWSDQ7OzscOXIEPXr0gJeXF9avXw8fHx/RsYiIiHTW7du30aZNGyiVSpw6dQpVq1YVHYkKML0+Anfu3DnY2tqqijcA8PT0hIGBAYKDg7O0jtDQUFy5cgUDBw785LERI0bAzs4O9erVg5+fHyRJynRdSUlJiImJUbsRaYKFhQV27dqFPn36oE+fPvjpp59ERyIiItJJ58+fR+PGjWFlZYVz586xeCPh9PoIXEREBBwcHNTuMzQ0ROHChREREZGldaxduxaVK1dGw4YN1e6fNWsWWrRoAXNzcxw8eBDDhw9HXFwcRo0aleG65s2bhx9++CH7L4QoB4yMjODn5wdnZ2dMmDABz58/x6JFizjhNxERURalTdBdp04dBAQEoFChQqIjEelmATd58mQsWLAg0zG3bt3K9fMkJCRg06ZNmDZt2iePfXxfrVq1oFAosGjRokwLuClTpmDcuHGqv8fExKBEiRK5zkmUEZlMhjlz5qBo0aIYNWoUIiIisG7dOk74TaQljIyMMGHCBNXPRKQ9/Pz8MHjwYHTs2BH+/v6c4420hk4WcOPHj0f//v0zHVOmTBk4OTkhMjJS7f6UlBRERUVl6dq1bdu2IT4+Hv369fvsWHd3d8yePRtJSUkwMTFJd4yJiUmGjxFp0jfffANHR0f06dMHL1++xNatW/ktIpEWMDY2xqJFi0THIKKPKJVKzJ49GzNnzsSQIUOwbNkyyOVy0bGIVHSygLO3t4e9vf1nxzVo0ADv3r1DaGgo6tSpAwA4evQolEol3N3dP7v82rVr0bFjxyw915UrV1CoUCEWaKS1unfvDnt7e3Tt2hX16tXD7t27UaVKFdGxiIiItEZcXBz69++P7du348cff8TUqVM5xxtpHb2+GKZy5cpo27YtBg0ahAsXLuDMmTP45ptv0KtXL1UHymfPnqFSpUq4cOGC2rL37t3DyZMn8fXXX3+y3sDAQPzxxx+4fv067t27hxUrVmDu3LkYOXJkvrwuopzy8PBASEgITE1NUb9+fQQGBoqORFSgKZVKhIeHIzw8HEqlUnQcogLt4cOHaNSoEQ4cOIBdu3bhu+++Y/FGWkmvCzgA8Pf3R6VKldCyZUt4eXmhcePGWL16terx5ORk3LlzB/Hx8WrL+fn5oXjx4mjduvUn6zQyMsKyZcvQoEEDuLq6YtWqVfj5558xY8YMjb8eotwqU6YMzp07B09PT3Tq1Alz5879bAdVItKMhIQElC5dGqVLl0ZCQoLoOEQF1rFjx1C3bl3ExcXh3LlznOONtJpM4ic3YWJiYmBjY4Po6GhYW1uLjkMFzMfn+Hfv3h3r1q2DhYWF6FhEBYpCoVDNMRoXF8f/g0T5TJIkLFu2DGPGjEHz5s3xzz//oHDhwqJjUQGV1dpA74/AEVH6DAwMMGPGDGzfvh179+5F48aN8ejRI9GxiIiI8sX79+8xePBgjBw5EiNHjsS+fftYvJFOYAFHVMB16dIF586dQ3R0NNzc3HDy5EnRkYiIiDTq5cuXaNGiBf7880+sW7cOS5YsgaGhTvb2owKIBRwRoXr16ggJCUGNGjXQsmVLrFy5UnQkIiIijQgNDYWbmxvu37+PEydOfHZqKiJtwwKOiAAARYoUwf79+zF8+HAMGzYMQ4cOxfv370XHIiIiyjObNm1C48aN4ezsjIsXL6J+/fqiIxFlGws4IlIxMjLCr7/+irVr18LPzw+enp6IjIwUHYuIiChXUlNTMWnSJPj4+KB79+44ceIEihUrJjoWUY7wZF8i+sRXX32FSpUqoUuXLnBzc8Pu3btRq1Yt0bGI9I6hoSGGDx+u+pmI8t67d+/w5Zdf4sCBA/j5558xZswYzu9GOo3TCAjEaQRI2z19+hSdO3fGjRs3sGLFCvj6+oqORERElGX//vsvevTogcjISPzzzz/pzu9LpC04jQAR5Vrx4sVx8uRJ9OrVC/3790e/fv0QGxsrOhYREVGmJEnC8uXL4e7uDjMzM1y4cIHFG+kNFnBElCkzMzP4+flh48aN2LlzJ+rUqYPLly+LjkWkFyRJwqtXr/Dq1SvwhBiivPH27Vt069YNI0aMwNdff43z58+jfPnyomMR5RkWcESUJT4+Prh06RKsrKxQv359/Pbbb/zASZRL8fHxcHBwgIODA+Lj40XHIdJ5Z8+ehaurK44dO4YdO3Zg6dKlMDU1FR2LKE+xgCOiLCtfvjzOnj2L4cOHY/To0ejUqRPevHkjOhYRERVwSqUS8+bNQ9OmTVGiRAlcuXIFnTt3Fh2LSCNYwBFRtpiYmGDJkiUICAjAmTNn4OrqilOnTomORUREBVRERATatGmD7777DpMnT8bx48dRsmRJ0bGINIYFHBHlSIcOHfDvv/+iTJky8PDwwOzZs5Gamio6FhERFSAHDx5EzZo1cf36dRw6dAg//vgjp+QgvccCjohyrHjx4jh69CimTZuGmTNnolWrVnj+/LnoWEREpOeSk5MxZcoUtGnTBrVq1cKVK1fQsmVL0bGI8gULOCLKFblcjpkzZ+LIkSO4c+cOatasiX379omORUREeio8PBxNmzbF4sWLsXDhQuzduxeOjo6iYxHlGxZwRJQnPDw88O+//8Ld3R1eXl6YMGEC3r9/LzoWERHpke3bt8PV1RURERE4ffo0Jk6cCAMDfpylgoV7PBHlGTs7OwQGBuLnn3/Gb7/9hsaNG+POnTuiYxFpLUNDQ/j6+sLX15fX7RBlIi4uDkOHDkW3bt3QunVrXL58Ge7u7qJjEQkhkziRkzAxMTGwsbFBdHQ0rK2tRcchylMXL17El19+iSdPnmD27NkYO3Ys5HK56FhERKRjjh49ioEDByIyMhI///wzBg8eDJlMJjoWUZ7Lam3AI3BEpBFubm64cuUKhg8fjm+//RaNGzfGrVu3RMciIiIdERsbi2HDhqFly5YoVaoUrl27hiFDhrB4owKPBRwRaYy5uTl++uknnD59Gm/fvkWtWrWwYMECpKSkiI5GpBUkSYJCoYBCoQBPiCH6n8OHD6NatWr466+/sGzZMhw5cgRlypQRHYtIK7CAIyKNa9iwIS5fvoxRo0Zh6tSpaNiwIW7cuCE6FpFw8fHxsLS0hKWlJeLj40XHIRIuOjoagwcPRqtWrVCuXDlcv34dw4cPZ6MSoo/wfwMR5QszMzMsXLgQZ8+eRVxcHGrXro25c+fyaBwREQEA9u/fj2rVquHvv//GqlWrcPjwYZQqVUp0LCKtwwKOiPKVu7s7Ll26hHHjxmHatGlwd3fH1atXRcciIiJB3r17h6+++grt2rVD5cqVcf36dTYqIcoECzgiynempqaYN28ezp8/j6SkJLi5uWH27NlITk4WHY2IiPJRUFAQqlatiu3bt+OPP/7AgQMH4OLiIjoWkVZjAUdEwtStWxehoaH49ttv8cMPP6BevXq4cuWK6FhERKRhb9++Rf/+/dG+fXvUqFED169fx8CBA3nUjSgLWMARkVAmJib48ccfERwcjNTUVNStWxczZszA+/fvRUcjIiINCAgIQNWqVbFr1y6sW7cOe/fuRYkSJUTHItIZLOCISCvUqVMHFy9exHfffYe5c+fC1dUVR44cER2LiIjyyKNHj9C1a1d06tQJtWvXxo0bN9C/f38edSPKJhZwRKQ1jI2NMXPmTISGhqJIkSLw9PREjx498OTJE9HRiDRCLpejW7du6NatG+Ryueg4RBqRmJiI2bNno3Llyjh//jw2bdqEwMBAFCtWTHQ0Ip0kkzhzqDAxMTGwsbFBdHQ0rK2tRcch0iqSJGHTpk2YMGECYmJi8N1332H8+PEwMTERHY2IiLJoz549GD16NB4/foxx48bh+++/h5WVlehYRFopq7UBj8ARkVaSyWTw8fHBnTt3MGzYMMyYMQPVqlXD3r17RUcjIqLPuHfvHtq3b48OHTqgbNmyuHbtGhYsWMDijSgPsIAjIq1mbW2NxYsX499//4WLiwu8vb3RsWNHPHjwQHQ0IiL6D4VCge+//x5Vq1bFtWvXsGPHDhw4cACVKlUSHY1Ib7CAIyKdUKVKFRw6dAhbt27FlStXUKVKFcyYMQPx8fGioxHlmEKhgEwmg0wmg0KhEB2HKMckScK2bdtQuXJlLF68GJMmTcKtW7fQuXNnNikhymMs4IhIZ8hkMnTr1g23bt3ChAkTMH/+fFSpUgU7d+4EL+clIhLj1q1baN26Nbp37w5XV1fcuHEDs2bNgrm5uehoRHqJBRwR6RwLCwv8+OOPuHHjBqpWrYouXbqgbdu2uHPnjuhoREQFRmxsLCZOnIgaNWrg4cOH2LNnDwICAlC2bFnR0Yj0Ggs4ItJZ5cqVQ1BQEAICAnD37l1Ur14dkyZNQkxMjOhoRER6S6lUwt/fHxUrVsSyZcswc+ZMXL9+Hd7e3qKjERUILOCISOd16NABN2/exLRp0/Dbb7+hbNmy+OWXX5CYmCg6GhGR3pAkCQcOHICbmxv69OmDRo0a4fbt2/juu+9gamoqOh5RgcECjoj0gqmpKaZNm4a7d++ic+fOmDBhAipWrIh169YhJSVFdDwiIp12/vx5tGjRAm3btoW5uTlOnjyJrVu3omTJkqKjERU4LOCISK8UL14cq1evxo0bN+Du7o6vvvoKNWrUYKMTIqIcuHHjBr744gs0aNAAb968wZ49e3Dq1Ck0adJEdDSiAosFHBHppYoVK2LLli0ICQlB8eLF0aVLF9SvXx9Hjx4VHY1IRS6Xw8vLC15eXpDL5aLjEKmEh4fD19cX1atXx9WrV7Fx40ZcvnwZ3t7enBaASDAWcESk19zc3HDw4EEcOXIEANCyZUu0bt0aFy9eFJyM6MOpv0FBQQgKCuI1RKQVIiMjMXr0aFSoUAEHDhzA77//jtu3b8PHx4dfMhBpCRZwRFQgtGjRAufPn8eOHTvw9OlT1K1bF927d+fUA0REAGJiYjBjxgyUKVMG69evx8yZM3H//n2MGDECxsbGouMR0UdYwBFRgSGTydC5c2dcu3YN69atw4ULF1C1alUMGjQIT58+FR2PiCjfJSYm4ueff0aZMmWwcOFCjBgxAg8fPsTUqVNhYWEhOh4RpYMFHBEVOHK5HP3790dYWBh++ukn7Nq1C+XKlcOECRPw+vVr0fGoAFEoFLCwsICFhQUUCoXoOFSAJCcnw8/PD+XLl8e3336Lrl274t69e1iwYAEKFy4sOh4RZYIFHBEVWCYmJhg9ejQePHiAKVOmYNWqVXBxccGYMWPw+PFj0fGogIiPj0d8fLzoGFRAxMfHY+nSpShfvjwGDhyIRo0a4ebNm1i1ahWKFSsmOh4RZQELOCIq8KysrDBjxgw8fPgQEydOxJ9//omyZcuif//+uHnzpuh4RES59vbtW8yZMwelSpXC6NGj0ahRI/z777/YvHkzKlSoIDoeEWUDCzgiov9nZ2eHmTNn4vHjx1i4cCEOHz6MqlWronPnzggODhYdj4go254/f46JEyeiZMmS+PHHH9G9e3fcvXsX/v7+qFGjhuh4RJQDLOCIiP7D0tISY8eOxYMHD+Dn54dbt26hfv36aN68OQ4cOMAJwYlI6929exeDBw9G6dKlsWbNGowaNQrh4eFYtmwZypQpIzoeEeUCCzgiogwYGxtjwIABuHHjBrZv3w6FQoG2bduiTp062LJlC1JTU0VHJCJSExoaih49eqBixYoIDAzE7Nmz8ejRI8yZMweOjo6i4xFRHmABR0T0GXK5HF26dEFwcDCOHDkCOzs79OzZE5UqVcLq1auRlJQkOiIRFWCSJOHYsWNo3bo13NzccOnSJaxcuRIPHz7Et99+CxsbG9ERiSgPsYAjIsoimUyGFi1a4ODBg7h48SJcXV0xdOhQlC5dGosWLUJMTIzoiKRjDAwM0KxZMzRr1gwGBvyVTNmjVCqxc+dO1K9fHy1atMCrV6+wefNm3L59G4MHD4apqanoiESkATKJF3MIExMTAxsbG0RHR8Pa2lp0HCLKgbCwMCxatAgbNmyAubk5fH19MWzYMFSqVEl0NCLSU2/fvsX69euxcuVKhIWFwcPDA5MnT0br1q0hk8lExyOiHMpqbcCv+4iIcqFChQpYs2YNHj58iGHDhuHvv/9G5cqV0aJFC2zduhXJycmiIxKRnggJCcFXX32FYsWKYdKkSahduzbOnTuHY8eOoU2bNizeiAoIFnBERHmgWLFimDdvHp48eYJNmzYhOTkZPXr0gIuLC6ZPn46nT5+KjkhEOig+Ph5+fn5wc3NDvXr1cOTIEUybNg1PnjzB33//jfr164uOSET5jAUcEVEeMjExQe/evXHq1ClcvXoVnTt3xpIlS+Di4oLOnTvj4MGDUCqVomOSllAoFLC3t4e9vT0UCoXoOKRF7ty5gzFjxqBYsWL4+uuv4ejoiMDAQDx48ABTpkxhR0miAowFHBGRhlSvXh3Lli3D8+fPsWzZMjx48ABt2rRBxYoV8dNPP+HNmzeiI5IWeP36NV6/fi06BmmB5ORkbNu2DS1btkSlSpXg7++PIUOG4P79+wgKCkL79u0hl8tFxyQiwVjAERFpmJWVFYYOHYorV67g9OnTcHd3x9SpU1GsWDH4+vri/PnznBycqAB7+vQpZsyYARcXF3Tv3h1JSUnYuHEjnj59ivnz56N06dKiIxKRFmEXSoHYhZKo4Hr16hX8/PywatUqPHz4ELVq1cLQoUPRo0cP2Nraio5H+UShUMDS0hIAEBcXBwsLC8GJKL+kpKTg0KFDWLNmDQICAmBmZoY+ffpg2LBhqFGjhuh4RCQAu1ASEWkxe3t7TJo0Cffu3cPevXtRrFgxDBs2DI6OjujSpQu2b9+OxMRE0TGJKA9JkoTg4GCMHDkSzs7O8PLywt27d/Hbb7/h2bNnWLFiBYs3IvosHoETiEfgiOhjL168wObNm+Hv74/Q0FDY2Niga9eu8PHxQbNmzXjtix7iEbiCISwsDP7+/vD398f9+/dRtGhR9O7dGz4+PqhVqxbb/xMRgKzXBizgBGIBR0QZuX37NjZt2gR/f388ePAAxYoVQ69eveDj4wNXV1d+4NMTLOD0V0REhOoLmYsXL8La2lr1hYyHhwe/kCGiT7CA0wEs4IjocyRJwvnz5+Hv749//vkHr1+/RpUqVeDj44Mvv/wSpUqVEh2RciEhIQFNmzYFAJw8eRJmZmaCE1FuxMbGYseOHfD398eRI0cgl8vh7e0NHx8feHt789+XiDLFAk4HsIAjouxITk7GoUOH4O/vj127diE+Ph6NGjWCj48PunfvDjs7O9ERiQqc9+/f48CBA/D390dAQICqKPfx8UG3bt1QuHBh0RGJSEewgNMBLOCIKKfi4uKwe/du+Pv74+DBg5DJZGjbti2++OILtG/fnpP8EmlQQkICjh49ioCAAGzbtg1RUVGoXr06fHx80Lt3b5QsWVJ0RCLSQSzgdAALOCLKC5GRkdiyZQs2b96Ms2fPAgDq1auHjh07okOHDqhWrRqvmSPKpYiICAQFBSEwMBCHDh1CfHw8ypYti27dusHHxwfVq1cXHZGIdBwLOB3AAo6I8tqrV6+wd+9eBAYG4sCBA4iLi0OpUqXQoUMHdOjQAc2aNYOxsbHomPT/4uPjUaVKFQDAzZs3YW5uLjgRpZEkCdeuXUNgYCACAwMRHBwMAwMDNGjQAB06dEDHjh1RqVIlfjlCRHmGBZwOYAFHRJqUlJSE48ePIyAgAIGBgXjy5AmsrKzQtm1bdOjQAV5eXihSpIjomAUau1Bql/fv3+PEiROq/zOPHj2CpaUl2rRpo/o/Y29vLzomEekpFnA6gAUcEeUXSZJw9epV1QfTkJAQGBgYoFGjRqqjCRUrVhQds8BhASfemzdvsHfvXgQEBODAgQOIjY1FiRIlVKcge3h4wMTERHRMIioAWMDpABZwRCTKixcvsGfPHtX1PImJiShfvjzatm2L5s2bo2nTpjw6lw9YwOW/pKQkBAcH4/jx4zh06BDOnj0LpVKJunXrqoq2GjVq8NRIIsp3LOB0AAs4ItIG8fHxOHLkCAIDA3HkyBE8ePAAAFCjRg14eHjAw8ODBZ2GsIDTvI8LtuPHj+PcuXNITEyEra0tmjVrhvbt28Pb2xtFixYVHZWICjgWcDqABRwRaaPHjx/jxIkTqg+8aQVd9erV1Qo6zjuXeyzg8l5iYqKqYDtx4oRawda0aVPVPlyjRg3I5XLRcYmIVFjA6QAWcESkC1jQaQ4LuNz7uGBLO8KWlJSkOsKWto9Wr16dBRsRaTUWcDqABRwR6aLMCrpmzZrB3d0dderUQYUKFfiB+TPi4+NRt25dAEBISAinEciC58+f49KlSwgJCcHJkydZsBGR3mABpwNYwBGRPnjy5ImqoDtx4gTu3bsHALCwsECtWrVQu3Zt1KlTB3Xq1EGlSpX4oZqyRJIkPHv2DJcuXUJoaKjqFhERAQAoUqQIGjduzIKNiPQGCzgdwAKOiPTR27dvcfnyZbUP3WlFnbm5OVxdXVUFXe3atVG5cmUYGhoKTk0iSZKEp0+fqu0zoaGhiIyMBADY29ur7TN16tRByZIl2SmSiPQKCzgdwAKOiAqK6OjoT4q6sLAwAICZmRlq1qyp9gG9YsWKMDU1FZyaNCE1NRWPHz9W2x8uXbqEV69eAQAcHR0/KdaKFy/OYo2I9B4LOB3AAo6ICrKYmBhcuXJFrai7c+cOJEmCTCaDi4sLKlSo8MmtZMmSenOqnL5eAydJEl69eoWwsLBPbvfu3UNSUhIAoGjRomqFWp06deDs7MxijYgKJBZwOoAFHBGRutjYWFy9ehV37tzJ8EO/sbExypUr90lhV758eTg6OurUh39d70IZGxuLu3fvpluoRUdHA0C6xXj58uVRs2ZNzr1GRPQRFnD/b86cOQgKCsKVK1dgbGyMd+/efXYZSZIwY8YMrFmzBu/evUOjRo2wYsUKlC9fXjUmKioKI0eORGBgIAwMDNC1a1f8+uuvql/EWcECjogoa1JTU/HkyZN0C4Xw8HCk/SqzsrJSFQklSpRA0aJFUbRoUTg7O6t+1qYiSZsLuOTkZERERODFixdqt2fPnuHevXsICwvDixcvVOMdHBzSPWJatmxZng5LRJQFLOD+34wZM2Bra4unT59i7dq1WSrgFixYgHnz5mHDhg0oXbo0pk2bhmvXruHmzZuqX0Lt2rXDixcvsGrVKiQnJ2PAgAGoW7cuNm3alOVsLOCIiHIvMTERDx48+KSwe/bsGV68eIGEhAS18VZWVmoFXXpFXtGiRWFtba3xo3kiCrjExERERETg+fPnnxRnH9/3+vVrfPwRQS6Xw9HREc7Ozp8cAS1fvjxsbW01np2ISJ+xgPuP9evXY8yYMZ8t4CRJgrOzM8aPH48JEyYA+HDxvaOjI9avX49evXrh1q1bqFKlCkJCQuDm5gYA2L9/P7y8vPD06VM4OztnKRMLOCIizZIkCTExMZ8UK+n9PS4uTm1Zc3NzFClSBFZWVrC0tISVlVWOfrawsIBcLoeBgYHaTSaTpVvASZIESZKQmpoKpVKpuiUkJCAuLg6xsbGIjY3N1s9pf7579w5v375Ve51GRkaZFrJpf7ezs9Obaw+JiLRRVmsD9m3+j4cPHyIiIgKenp6q+2xsbODu7o5z586hV69eOHfuHGxtbVXFGwB4enrCwMAAwcHB6Ny5c7rrTkpKUl3DAXz4RyIiIs2RyWSwsbGBjY0NKleunOnYuLi4Twq8qKioTwqhR48efXKfQqHIcb401tbWquItu0xMTNItIC0tLeHk5KT62cbG5pMjj0WKFNGp6waJiAo6FnD/kTZBqKOjo9r9jo6OqsciIiLg4OCg9rihoSEKFy6sGpOeefPm4YcffsjjxERElBcsLS1Rvnx5teudsyo1NRUKheKTI16xsbFQKBRqR9I+PrKWkJCA0aNHAwB+/vlnmJubf3KkLu1mamqa4dE+IyOjvN4cRESkpXSygJs8eTIWLFiQ6Zhbt26hUqVK+ZQoa6ZMmYJx48ap/h4TE4MSJUoITERERHlBLpfD2to626fDx8fH4+effwYADBo0SG+mESAiIs3RyQJu/Pjx6N+/f6ZjypQpk6N1Ozk5AQBevnyp1t745cuXcHV1VY2JjIxUWy4lJQVRUVGq5dNjYmICExOTHOUiIiL9Y25ujvDwcNExiIhIh+hkAWdvbw97e3uNrLt06dJwcnLCkSNHVAVbTEwMgoODMWzYMABAgwYN8O7dO4SGhqJOnToAgKNHj0KpVMLd3V0juYiIiIiIiAxEB9C0x48f48qVK3j8+DFSU1Nx5coVXLlyRa3bWKVKlbBz504AHy4oHzNmDH788UcEBATg2rVr6NevH5ydnfHFF18AACpXroy2bdti0KBBuHDhAs6cOYNvvvkGvXr1ynIHSiIiIiIiouzSySNw2TF9+nRs2LBB9fdatWoBAI4dOwYPDw8AwJ07dxAdHa0a8+2330KhUGDw4MF49+4dGjdujP3796tNROrv749vvvkGLVu2VE3k/dtvv+XPiyIiIr2QkJCApk2bAgBOnjwJMzMzwYmIiEjbFZh54LQR54EjIirYREzkTURE2imrtYHen0JJRERERESkL1jAERERERER6QgWcERERERERDqCBRwREREREZGOYAFHRERERESkI/R+GgEiIiJtZmdnJzoCERHpEBZwREREglhYWODVq1eiYxARkQ7hKZREREREREQ6ggUcERERERGRjmABR0REJEhCQgI8PDzg4eGBhIQE0XGIiEgH8Bo4IiIiQZRKJU6cOKH6mYiI6HN4BI6IiIiIiEhHsIAjIiIiIiLSESzgiIiIiIiIdAQLOCIiIiIiIh3BAo6IiIiIiEhHsAslERGRQObm5qIjEBGRDmEBR0REJIiFhQUUCoXoGEREpEN4CiUREREREZGOYAFHRERERESkI1jAERERCZKYmAhvb294e3sjMTFRdBwiItIBvAaOiIhIkNTUVOzdu1f1MxER0efwCBwREREREZGOYAFHRERERESkI1jAERERERER6QgWcERERERERDqCBRwREREREZGOYBdKgSRJAgDExMQITkJERCIoFArVzzExMexESURUgKXVBGk1QkZYwAkUGxsLAChRooTgJEREJJqzs7PoCEREpAViY2NhY2OT4eMy6XMlHmmMUqnE8+fPYWVlBZlMJjRLTEwMSpQogSdPnsDa2lpoFn3E7atZ3L6axe2rWdy+msXtq1ncvprF7atZ2rZ9JUlCbGwsnJ2dYWCQ8ZVuPAInkIGBAYoXLy46hhpra2ut2IH1FbevZnH7aha3r2Zx+2oWt69mcftqFrevZmnT9s3syFsaNjEhIiIiIiLSESzgiIiIiIiIdAQLOAIAmJiYYMaMGTAxMREdRS9x+2oWt69mcftqFrevZnH7aha3r2Zx+2qWrm5fNjEhIiIiIiLSETwCR0REREREpCNYwBEREREREekIFnBEREREREQ6ggUcERERERGRjmABV0DMmTMHDRs2hLm5OWxtbbO0jCRJmD59OooWLQozMzN4enri7t27amOioqLg4+MDa2tr2NraYuDAgYiLi9PAK9Bu2d0O4eHhkMlk6d62bt2qGpfe45s3b86Pl6RVcrKfeXh4fLLthg4dqjbm8ePH8Pb2hrm5ORwcHDBx4kSkpKRo8qVopexu36ioKIwcORIVK1aEmZkZSpYsiVGjRiE6OlptXEHef5ctW4ZSpUrB1NQU7u7uuHDhQqbjt27dikqVKsHU1BTVq1fH3r171R7PyvtxQZKd7btmzRo0adIEhQoVQqFCheDp6fnJ+P79+3+yr7Zt21bTL0NrZWf7rl+//pNtZ2pqqjaG+6+67Gzf9H6XyWQyeHt7q8Zw//2fkydPokOHDnB2doZMJsOuXbs+u8zx48dRu3ZtmJiYoFy5cli/fv0nY7L7nq5xEhUI06dPl37++Wdp3Lhxko2NTZaWmT9/vmRjYyPt2rVL+vfff6WOHTtKpUuXlhISElRj2rZtK9WsWVM6f/68dOrUKalcuXJS7969NfQqtFd2t0NKSor04sULtdsPP/wgWVpaSrGxsapxAKR169apjft4+xcUOdnPmjVrJg0aNEht20VHR6seT0lJkapVqyZ5enpKly9flvbu3SvZ2dlJU6ZM0fTL0TrZ3b7Xrl2TunTpIgUEBEj37t2Tjhw5IpUvX17q2rWr2riCuv9u3rxZMjY2lvz8/KQbN25IgwYNkmxtbaWXL1+mO/7MmTOSXC6XFi5cKN28eVP6/vvvJSMjI+natWuqMVl5Py4osrt9v/zyS2nZsmXS5cuXpVu3bkn9+/eXbGxspKdPn6rG+Pr6Sm3btlXbV6OiovLrJWmV7G7fdevWSdbW1mrbLiIiQm0M99//ye72ffPmjdq2vX79uiSXy6V169apxnD//Z+9e/dK3333nbRjxw4JgLRz585Mxz948EAyNzeXxo0bJ928eVP6/fffJblcLu3fv181Jrv/ZvmBBVwBs27duiwVcEqlUnJycpIWLVqkuu/du3eSiYmJ9Pfff0uSJEk3b96UAEghISGqMfv27ZNkMpn07NmzPM+urfJqO7i6ukpfffWV2n1ZefPRdzndvs2aNZNGjx6d4eN79+6VDAwM1D5orFixQrK2tpaSkpLyJLsuyKv9d8uWLZKxsbGUnJysuq+g7r/16tWTRowYofp7amqq5OzsLM2bNy/d8T169JC8vb3V7nN3d5eGDBkiSVLW3o8Lkuxu3/9KSUmRrKyspA0bNqju8/X1lTp16pTXUXVSdrfv5z5XcP9Vl9v9d8mSJZKVlZUUFxenuo/7b/qy8jvo22+/lapWrap2X8+ePaU2bdqo/p7bfzNN4CmUlK6HDx8iIiICnp6eqvtsbGzg7u6Oc+fOAQDOnTsHW1tbuLm5qcZ4enrCwMAAwcHB+Z5ZlLzYDqGhobhy5QoGDhz4yWMjRoyAnZ0d6tWrBz8/P0gFbOrG3Gxff39/2NnZoVq1apgyZQri4+PV1lu9enU4Ojqq7mvTpg1iYmJw48aNvH8hWiqv/h9HR0fD2toahoaGavcXtP33/fv3CA0NVXvvNDAwgKenp+q987/OnTunNh74sC+mjc/K+3FBkZPt+1/x8fFITk5G4cKF1e4/fvw4HBwcULFiRQwbNgxv3rzJ0+y6IKfbNy4uDi4uLihRogQ6deqk9h7K/fd/8mL/Xbt2LXr16gULCwu1+7n/5szn3n/z4t9MEww/P4QKooiICABQ+3Cb9ve0xyIiIuDg4KD2uKGhIQoXLqwaUxDkxXZYu3YtKleujIYNG6rdP2vWLLRo0QLm5uY4ePAghg8fjri4OIwaNSrP8mu7nG7fL7/8Ei4uLnB2dsbVq1cxadIk3LlzBzt27FCtN739O+2xgiIv9t/Xr19j9uzZGDx4sNr9BXH/ff36NVJTU9Pdt27fvp3uMhntix+/16bdl9GYgiIn2/e/Jk2aBGdnZ7UPZG3btkWXLl1QunRp3L9/H1OnTkW7du1w7tw5yOXyPH0N2iwn27dixYrw8/NDjRo1EB0djcWLF6Nhw4a4ceMGihcvzv33I7ndfy9cuIDr169j7dq1avdz/825jN5/Y2JikJCQgLdv3+b6PUcTWMDpsMmTJ2PBggWZjrl16xYqVaqUT4n0S1a3b24lJCRg06ZNmDZt2iePfXxfrVq1oFAosGjRIr34AKzp7ftxMVG9enUULVoULVu2xP3791G2bNkcr1dX5Nf+GxMTA29vb1SpUgUzZ85Ue0yf91/STfPnz8fmzZtx/PhxtUYbvXr1Uv1cvXp11KhRA2XLlsXx48fRsmVLEVF1RoMGDdCgQQPV3xs2bIjKlStj1apVmD17tsBk+mft2rWoXr066tWrp3Y/99+ChwWcDhs/fjz69++f6ZgyZcrkaN1OTk4AgJcvX6Jo0aKq+1++fAlXV1fVmMjISLXlUlJSEBUVpVpel2V1++Z2O2zbtg3x8fHo16/fZ8e6u7tj9uzZSEpKgomJyWfHa7P82r5p3N3dAQD37t1D2bJl4eTk9EkXqZcvXwIA998sbt/Y2Fi0bdsWVlZW2LlzJ4yMjDIdr0/7b0bs7Owgl8tV+1Kaly9fCYlPvAAAFrBJREFUZrg9nZycMh2flffjgiIn2zfN4sWLMX/+fBw+fBg1atTIdGyZMmVgZ2eHe/fuFagPwLnZvmmMjIxQq1Yt3Lt3DwD334/lZvsqFAps3rwZs2bN+uzzFNT9Nycyev+1traGmZkZ5HJ5rv9PaAKvgdNh9vb2qFSpUqY3Y2PjHK27dOnScHJywpEjR1T3xcTEIDg4WPVNW4MGDfDu3TuEhoaqxhw9ehRKpVL1YVmXZXX75nY7rF27Fh07doS9vf1nx165cgWFChXSiw+/+bV901y5cgUAVB8gGjRogGvXrqkVL4cOHYK1tTWqVKmSNy9SIE1v35iYGLRu3RrGxsYICAj4pG14evRp/82IsbEx6tSpo/beqVQqceTIEbWjFB9r0KCB2njgw76YNj4r78cFRU62LwAsXLgQs2fPxv79+9Wu98zI06dP8ebNG7WCoyDI6fb9WGpqKq5du6badtx//yc323fr1q1ISkpCnz59Pvs8BXX/zYnPvf/mxf8JjRDWPoXy1aNHj6TLly+rWtVfvnxZunz5slrL+ooVK0o7duxQ/X3+/PmSra2ttHv3bunq1atSp06d0p1GoFatWlJwcLB0+vRpqXz58gV2GoHMtsPTp0+lihUrSsHBwWrL3b17V5LJZNK+ffs+WWdAQIC0Zs0a6dq1a9Ldu3el5cuXS+bm5tL06dM1/nq0TXa3771796RZs2ZJFy9elB4+fCjt3r1bKlOmjNS0aVPVMmnTCLRu3Vq6cuWKtH//fsne3r7ATiOQne0bHR0tubu7S9WrV5fu3bun1ro6JSVFkqSCvf9u3rxZMjExkdavXy/dvHlTGjx4sGRra6vqeNq3b19p8uTJqvFnzpyRDA0NpcWLF0u3bt2SZsyYke40Ap97Py4osrt958+fLxkbG0vbtm1T21fTfv/FxsZKEyZMkM6dOyc9fPhQOnz4sFS7dm2pfPnyUmJiopDXKFJ2t+8PP/wgHThwQLp//74UGhoq9erVSzI1NZVu3LihGsP993+yu33TNG7cWOrZs+cn93P/VRcbG6v6jAtA+vnnn6XLly9Ljx49kiRJkiZPniz17dtXNT5tGoGJEydKt27dkpYtW5buNAKZ/ZuJwAKugPD19ZUAfHI7duyYagz+f86mNEqlUpo2bZrk6OgomZiYSC1btpTu3Lmjtt43b95IvXv3liwtLSVra2tpwIABakVhQfG57fDw4cNPtrckSdKUKVOkEiVKSKmpqZ+sc9++fZKrq6tkaWkpWVhYSDVr1pRWrlyZ7lh9l93t+/jxY6lp06ZS4cKFJRMTE6lcuXLSxIkT1eaBkyRJCg8Pl9q1ayeZmZlJdnZ20vjx49Xa4BcU2d2+x44dS/f9BID08OFDSZK4//7+++9SyZIlJWNjY6levXrS+fPnVY81a9ZM8vX1VRu/ZcsWqUKFCpKxsbFUtWpVKSgoSO3xrLwfFyTZ2b4uLi7p7qszZsyQJEmS4uPjpdatW0v29vaSkZGR5OLiIg0aNEjohzPRsrN9x4wZoxrr6OgoeXl5SZcuXVJbH/dfddl9f7h9+7YEQDp48OAn6+L+qy6j309p29TX11dq1qzZJ8u4urpKxsbGUpkyZdQ+C6fJ7N9MBJkk6XlPZyIiIiIiIj3Ba+CIiIiIiIh0BAs4IiIiIiIiHcECjoiIiIiISEewgCMiIiIiItIRLOCIiIiIiIh0BAs4IiIiIiIiHcECjoiIiIiISEewgCMiIiIiItIRLOCIiCjPyWQyyGQyzJw5U3QUrXX06FHIZDI4OjoiPj5edJwC5enTpzAxMYGxsTHCwsJExyEiyhYWcEREBdjx48dVxdbHN0NDQxQuXBilS5dG06ZNMXbsWGzfvh3v378XHVkj3r17h0OHDmHOnDno1KkTnJ2dVdvCw8Mjz59PqVRizJgxAIAJEybA3Nw8y8tevXoVRkZGqnz9+/fPk0wxMTFYsGABGjVqhMKFC8PExAQlSpRAt27dsG/fvmyv7/Dhw+jfvz/KlSsHCwsL2NjYoEKFCujWrRtWrFiBuLi4dJeLjIzE0KFDUaxYMZiYmKBs2bKYOnUqFArFZ5+zZ8+ekMlkmDZtWqbjihcvjgEDBiA5ORkTJkzI9msjIhJKIiKiAuvYsWMSgCzf7O3tpdmzZ0vJycmZrjdt/IwZM/LnheRSqVKlMnzNzZo1y/Pn8/f3lwBIdnZ2UlxcXJaXS01NlerVq6eWz9fXN9d5zp49KxUtWjTTf/t+/fpJKSkpn11XVFSU1KlTp8/uS5cvX/5k2cjISKl06dLpjq9fv76UmJiY4fMeOnRIAiCVKlVKio+P/2zO8PBwycjISAIgBQcHf3Y8EZG24BE4IiICAAwbNgzXrl1T3c6dO4e9e/di/vz5aNWqFWQyGV69eoVp06ahUaNGePXqVYbrkiQJkiTpzCmUkiSpfnZ0dET79u01+nxz5swBAAwZMgQWFhZZXm7p0qW4cOECHBwc8izLnTt30K5dO7x48QIGBgYYPHgwDh48iNDQUGzbtg0tW7YEAPz5558YNWpUpuuKjo5Gq1atsHv3bgBA586d4e/vj/PnzyMkJAQ7duzA6NGjUbx48XSXnzx5Mh4+fAgrKyssX74cZ8+exdy5c2FkZITz589j8eLF6S6XnJyMkSNHAgB++eUXmJmZffZ1u7i4oGvXrgCAH3/88bPjiYi0huACkoiIBPr4CNznjpbduHFDqlWrlmp8o0aNpKSkpPwJqmGLFi2Stm3bJj1+/Fh1X9rrzOsjcAcPHlSt+9atW1le7smTJ5KVlZUkk8mkDRs25NkROG9vb9W61q1b98njSqVS6t+/vwRAkslk0oULFzJcV9++fSUAkomJibR79+4MxymVyk+O4iYlJUlmZmYSAGnTpk1qj82dO1cCIJUtWzbd9c2bN08CIHl5eWXySj8VEBAgAZAMDAyk+/fvZ2tZIiJReASOiIiypEqVKjhz5gxq1aoFADhz5gyWLVsmOFXemDBhArp27YoSJUpo/LnWrl0LAKhduzYqVaqU5eVGjBiB2NhY9O/fH02bNs2TLK9evcLevXsBAI0aNUr3ejqZTIYlS5bAwsICkiRhwYIF6a7r9OnT+OuvvwB8OKLVsWPHDJ837TrLj925cwcJCQkwNDRE9+7d1R7r3bs3AOD+/fuIjY1Ve+zJkyf48ccfYWJigl9//TXzF/wfbdu2RZEiRaBUKrFu3bpsLUtEJAoLOCIiyjIzMzP89ddfkMlkAIDFixcjOTn5k3GZdaFcv3696vHw8HC8f/8eP//8M9zc3GBjY4PChQvDw8MDQUFBasvFxsZi4cKFqFWrFqytrWFra4tWrVrhyJEjGnmtmpCYmIiAgAAAUJ2+lxXbtm1DQEAAihQpgkWLFuVZntDQUNXpo+3atctwnK2tLerXrw8A2LdvX7pdM5cuXQoAsLGxwTfffJPtLNHR0QAAOzu7T4o7Jycn1c8xMTFqj40dOxYKhQITJ05EuXLlsvWcRkZG6NChAwBg8+bN2c5MRCQCCzgiIsqWqlWrolWrVgCA58+fIyQkJMfriomJQdOmTTF+/HiEhoYiJiYGb9++xYkTJ9C+fXssWbIEAPD48WM0aNAAkyZNwpUrVxAbG4vo6GgcPnwYrVq1gr+/f568Nk0LDg5GQkICAKgKos+Jjo5WXXu2cOFCFClSJM/yvHnzRvWzo6NjpmPTHo+Pj8fFixfVHnv//r3qurdWrVrB1NQUAJCamoonT54gPDwciYmJma7fxsYGAPD69WukpqaqPRYREaH62draWvXzwYMHsX37dri4uGDq1KmZrj8jaf8O9+7dw927d3O0DiKi/MQCjoiIss3T01P186lTp3K8nsGDByM0NBTDhw/HoUOHcPHiRfzxxx9wdnYG8OHUxuvXr6NLly548OABJk+ejOPHjyMkJAS//PILbGxsIEkShg0bhsjIyFy/Lk1L21YymQx16tTJ0jKTJk3Cixcv0KRJEwwYMCBP81haWqp+TjsClpGPH79586baY//++6+qQKtevTpiYmIwZswY2NnZoWTJkihdujRsbGzQqlUrHD9+PN31V6hQAaampkhJScHOnTvVHks7Ola2bFlYWVkB+FA0ZrdxSXrq1aun+vnEiRM5WgcRUX4y/PwQIiIidbVr11b9nJuJkC9cuIAdO3bgiy++UN1Xp04d1K1bF7Vq1YJSqUSLFi0QExODEydOwN3dXTXOzc0N5cuXh7e3N2JjY+Hv74+xY8fmOEt+OHv2LACgTJkyqiNOmTlz5gxWr14NIyMjrFixQnXqal6pXLmy6ucTJ05g/Pjx6Y57//49goODVX9//Pix2uMfF3RKpRJubm6fHM16//49Dh8+jCNHjmDevHmYNGmS2uMmJibo0aMH/vzzTwwaNAhRUVGoUaMGTp48iRkzZgAAfH19VeMXL16MsLAwtGvXTm3/ya7q1avDyMgIycnJCA0Nxddff53jdRER5QcegSMiomz7+DS+t2/f5ng9PXr0SPfDd40aNdC4cWMAHxptjBkzRq14S+Pl5QUXFxcAuTsSmF+ePn0KAFmaBuD9+/cYPHgwJEnCuHHjULVq1TzPU6FCBVSsWBEAEBQUhNOnT6c77qeffsLr169Vf/9vI5GoqCjVzwsWLMDdu3fRtm1bXLhwAYmJiYiMjMSKFStUR0wnT56sOuXyYwsWLECJEiXw7t07DBkyRHXa7Pv371G3bl1MnDgRwIcCcs6cOTAxMcFvv/2Wq22QNmk9ADx48CBX6yIiyg8s4IiIKNs+PvXuvx/ms6NXr14ZPlazZs0sjatRowYA3fjwnTZ3XqFChT47dv78/2vv/kKa+t84gL9n6FQCDVr+i22EKYJJFxmaWiGJkkGYuygxJUjsj5jGUsvQNGzZjYWEEEJCf7ywjCyiECuMsi4iUVG05owCZ15U5nRT2PleyDm/Teecfr819+v9gsHxnI+fPdu58fE5n+dzGf39/VCr1aioqPhtMYl7oFmtVqSnp+P69eswGo2YnZ3Fx48fUVxcjPLycvj4+Ei/I67jE5lMJunYbDYjJSUFjx8/RmxsLORyORQKBY4dO4bHjx/Dy2vuT4+zZ8/a7b8HzDUreffuHfLy8hAcHAxvb2+o1WqUlpbi+fPn0tq6oqIiTE1N2TUuMRqNyMvLQ2hoKORyOSIiIlBTU4OZmZklvwMxgbNda0dEtFoxgSMiomWzTdpsm0osV0RExKLXAgMDlzXu3ySSf4pYqVoqgRscHMSlS5cAAPX19fD39/9tMWk0GlRVVQGYaypTUFCAkJAQ+Pj4ICIiAlevXsW6devsNrsW16GJxMRKVFtbizVr1ix4r8TERBw4cAAAMDAwgN7e3gVjQkJCcOPGDYyOjmJmZgYGgwGXL1+W/mnw9OlTPHjwwK5xydjYGOLi4tDY2IgfP35g06ZNGB4exvnz56HRaBYkivOJ98M2ESUiWq2YwBER0bLZPk4nVi9WwlliIlZqXB03v3PhaiQmOvMrWLYEQUB+fj4sFgsyMjKwb9++3x5XRUUFOjo6kJKSArlcLp338/NDdnY2enp6oFQqpfPzE1DbhE6hUEh7BTqSmpoqHS+3g6nFYnHYuKSsrAyfP39GYmIijEajlBwGBQXh0aNHS3YpFe+Ht7f3suIhInIHNjEhIqJl+/Dhg3QsrqGipSkUCkxMTNitGZvv7du3UjfEHTt2ONyfTHwUEwAMBoM0Jjo6GtHR0SuKLTk5GcnJybBYLBgdHYUgCAgLC5MenbRtSjJ/PZ7tBugbN250+j62Y20/hyuuXLmCT58+IS0tTVo7OTMzI33+a9euSRXhqKgonDlzBlqtFk1NTcjOzl50XvF+2FZ9iYhWKyZwRES0bO3t7dKx2GyElqZQKKDX6502frFYLNKx2LTDmc7OTnR2dgIAKisrV5zAieRyOdRq9YLz79+/l45tW+8D9gndUpVQ2+vzN+x2ZmRkBDqdDnK5HPX19dL5wcFBmM1m+Pn52XVHBYCEhAQAQHd3t9O5xfthW2UkIlqt+AglEREtS19fHzo6OgDMVVO2bdvm5og8x5YtWwAAer0eVqvVzdG47tevX3j27BkAID4+3q6KBgAqlUpKfkZGRpyuOdPr9dJxWFiYyzGcOnUK09PT0Gq1UuMS4H/70zlaiylW1JztcTc2NoaJiQkACyuLRESrERM4IiJy2fT0NHJycqQ/0LVa7bKqKH+7pKQkAMDk5CQGBgYcjtm9ezcEQXD6MhgM0vjc3Fzp/IULF35L3DqdTlondvLkSYdjMjMzAcw1QhETfEdaW1ulY1ert0+ePEFbWxtUKhXKy8vtron76Y2Pj9tVLwHgy5cvAJw32rFdh+doqwoiotWGCRwREbmkv78fiYmJ0vq3Xbt24fjx426OyrOICRwwt4n5nyCTySCTyRw+FgkAs7Ozdk1p5rtz5w5qa2sBzN3zrKwsh+OKioqkJi2nT5+Wqlq2bt++jZcvXwIA0tPTF1TyHDGbzSgsLAQA1NXVSY1LRJGRkfD19YXVal2wXvDu3bsAgK1bty46v3gffH19sXPnziXjISJyN/7blIiIAADfvn1DX1+f9LPJZML379/R09ODjo4OtLe3S5W3uLg43Lt37/+ma193d/ei66SMRiOamprszmk0Gru98FylVqsRExMjfadHjhxZQbT/rZ8/f0KpVCIjIwOpqamIjIyEl5cX9Ho9mpub0dbWBmAu9lu3bkEmkzmcR6lUorq6GiUlJejt7cX27dtRWlqKmJgYTExMoLW1FQ0NDQDmKmJ1dXUuxVdbWwu9Xo+0tDRkZGQsuO7j44ODBw+iqakJBQUFmJ6eRkxMDB4+fCjdt5ycnEXnF6uFqampC5JDIqJVSSAior/WixcvBAAuvxQKhVBTUyPMzs46nVccX1lZueDazZs3pesGg2HROSorK6VxzuTm5goABJVK5cInXvq9XHk5i3sp9fX1AgBh7dq1gslkWtEcBoNBiiU3N9fpWHHcYt/P+Pj4kp83ISFBGBkZcSm2srIyQSaTLTrXhg0bhDdv3rg01/DwsODr6yvI5XJhaGho0XFGo1FQqVQO32/v3r2C1Wp1+HsGg0GKtaWlxaWYiIjcjY9QEhHRAl5eXggICIBSqURSUhKKiopw//59fP36FefOneO6t38hOzsbfn5+mJyclKpb7hQYGIjGxkYcOnQIkZGRCAgIgFwuh1KpRGZmJlpaWvDq1SuoVCqX5tPpdHj9+jUOHz4MtVoNuVyOgIAAxMbG4uLFixgaGkJ8fLxLcxUWFsJsNkOr1WLz5s2LjgsKCkJXVxeOHj2KoKAgeHt7Izw8HFVVVWhtbV20atjc3AxBEBAaGor9+/e7FBMRkbvJBMFJqygiIiL6z504cQINDQ3Ys2eP3ZYM9OdYrVZERUVhaGgIOp0OZWVl7g6JiMglTOCIiIj+MKPRiPDwcJhMJnR1dSEuLs7dIf11mpubkZWVhfXr18NgMKxoTSMRkTvwEUoiIqI/LDg4GMXFxQCA6upqN0fz9xEEATU1NQCAqqoqJm9E5FG4iIGIiMgNSkpKpLWEU1NT8Pf3d3NEf4/R0VFoNBpkZWUhPz/f3eEQES0LH6EkIiIiIiLyEHyEkoiIiIiIyEMwgSMiIiIiIvIQTOCIiIiIiIg8BBM4IiIiIiIiD8EEjoiIiIiIyEMwgSMiIiIiIvIQTOCIiIiIiIg8BBM4IiIiIiIiD8EEjoiIiIiIyEMwgSMiIiIiIvIQ/wCexohSEiOTcgAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3AAAAN5CAYAAABJ5dkfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd1hT98MF8BP2BhURceFe4EKkOHHiQqxbUdx1W7VSq62jttY668A966yjKritGycCLqwTwYWIooBsSO77R1/yMzIkELhJOJ/nySO5ueMEE8jh3vu9EkEQBBAREREREZHa0xE7ABEREREREeUNCxwREREREZGGYIEjIiIiIiLSECxwREREREREGoIFjoiIiIiISEOwwBEREREREWkIFjgiIiIiIiINwQJHRERERESkIVjgiIiIiIiINAQLHBERERERkYZggSMiItFt3boVEolEftu6davYkVTi0+fk5uYmdhzKhja99tzc3BSeCxFpJz2xAxARFZXw8HCEhobixYsXiI+Ph0wmQ4kSJVCiRAnUrl0bDg4O0NXVFTsmEeUiPT0dt2/fxuPHjxEVFYXExEQYGRmhRIkSsLGxQaNGjVCuXDmxYxIRFRoWOCLSanfv3sX69etx8OBBvHr1Ktd5TUxM0KxZMwwcOBA9e/aEqalpEaUkotxIpVLs378fO3bswOnTp5GSkpLr/HZ2dujatSsGDx6Mpk2bFlFKIqKiwUMoiUgrPX/+HD179kS9evXg6+v7xfIGAElJSfjnn38wePBg2NnZYf78+V/8oEjaS5sOrdNkhw8fRo0aNdCvXz8cOXIkT+/JyMhIrF+/Hs2aNUPDhg1x5syZIkhKRFQ0WOCISOscOXIE9erVw4EDB7J9vESJEqhRowacnZ1RuXJlmJiYZJknPj4eM2bMQIsWLQo7LhFlIz09HWPGjEG3bt3w9OnTLI/r6enB1tYW9erVQ8OGDWFra5vtIdC3bt1Cu3btsGrVqqKITURU6HgIJRFplZ07d2Lw4MGQSqUK052cnDB8+HB07twZlSpVyrLcw4cP4efnh7179yI4OFg+/e3bt4WembSXIAhiR9BI6enp6NmzJw4fPqwwXVdXFwMHDkSPHj3Qtm3bLIc5p6en4/z58/D398fu3bsRExMjfyyn9/KQIUMwZMgQlT8HIqLCwj1wRKQ1goKCMGzYMIXyZmlpie3bt+PGjRsYM2ZMtuUNAGrWrInvv/8eQUFBOHDgAGrWrFlUsYnoM9OmTctS3po3b447d+5g69at6NatW7bnqOrr66N9+/ZYuXIlnj59ihkzZmS7h52ISJOxwBGRVoiPj0ffvn2RlpYmn2ZjY4Pz589j4MCBSg2p/fXXX+POnTsYPnx4YUQlolwcOXIEf/zxh8K0bt264fTp06hTp06e12NhYYF58+YhKCgINWrUUHVMIiLR8BBKItIKc+bMUThPRkdHB4cOHUKDBg3ytT4DAwNs3Lgxx/PoiEj1UlNTMXLkSIVp9erVw/79+6Gvr5+vddauXRuBgYEKh0YTEWkyFjgi0nixsbHYsGGDwrRJkybB1dW1wOvu0aOHUvMnJSXh0qVLePHiBd6+fQsjIyPY2Nigbt26qF+/foHzfC4hIQGXL19GZGQkoqKiYGRkhFatWqFRo0Z5XkdQUBCePHmC169fIyUlBZUqVcKAAQO+uNzz588RFBSEN2/e4MOHD7C0tIStrS2aNWsGW1vbgjwtpUVHRyM0NBRhYWGIjY1FRkYGSpYsCVtbW7i4uBR5HlV6+PAhbt68iejoaCQmJsLa2hp2dnZo3rw5LC0tVb6927dvIygoCNHR0TA0NIStrS2aNm0Ke3t7lW/rc9u3b0dUVJT8vp6eHrZu3Zrv8pbJ0tISbdq0KWi8PHn16hWCg4MRGRmJmJgYWFlZ4euvv4adnV2uy6WlpeH69et49uwZ3r59i6SkJJibm6NSpUpwcHBA1apViyS/IAi4c+cO7t+/r/CaK1++PFq0aAEzM7MiyUFEuRCIiDTc77//LgCQ3wwMDIS3b98WaYZbt24Jnp6egpGRkUKWT2/lypUTZs6cKXz8+DHP623VqpXCOjLdu3dP6N+/v2BiYpJlO99++618vi1btig8tmXLFkEQBCEpKUmYO3euULly5SzLW1pa5pgnNTVVWLZsmVCnTp0cn6dEIhEaN24s+Pn55fl55pQzJzKZTLh48aIwfvx4oWbNmjlmybw5OjoKW7duFdLT03Ndb3h4+BfXldMtPDw8y/o+fbxVq1Z5/n6kpKQIixYtEqpWrZrj9vT09IS2bdsKAQEBeV7vuXPnFNYxe/Zs+WO7du3K9Xvp4uKi1Lbyo3bt2grb7NGjR6FuTxCUf+3l9H965MgRoU2bNoKOjk6W793BgwdzXN+VK1cEDw+PbN/Ln97s7e2FqVOnCk+ePMlxXTn9vMiL6OhoYfLkyULZsmVzzGBgYCB069ZNuH37tlLrJiLVYoEjIo3XpEkThQ8Zffv2LbJty2Qy4fvvv8/2Q1tON1tb2zx/EM7uA9mOHTsEAwODHNf/pQIXERGRawHLqcBdu3Yt28KX283Dw0NISEj44vNU9kP0d999l6+S1bx5c+HNmzc5rlcdClxoaKhQpUoVpbY9dOhQIS0t7Yvrzq7ApaamCl5eXnnajq6u7hf/b/Lr33//zbK948ePF8q2PlXQAieTyYTx48fn+n3LrsDFx8cLPXv2VPp1ltvrKL8FbuPGjYK5uXmeM+jo6Ag///xzntdPRKrFQyiJSKMlJiYiJCREYZqnp2eRbFsQBAwZMgTbtm3L8pi1tTUqVqyI5ORkhIeHK1x8OCoqCh06dMDBgwfh7u6u1DaPHTsGb29vyGQyAP+d61e5cmWULFkS0dHRePnyZa7Lx8fHo0OHDnj06JF8mo2NDcqVK4fU1FQ8e/Ys2+UOHz6Mvn37Ijk5WWG6gYEBKleuDEtLS8THx+PJkyfIyMhQWK5Nmza4cOECjIyMlHquucnuYs4lSpSAra0tLCwskJqaijdv3uD169cK81y6dAlt2rTBjRs3YGxsrLI8qhIUFIQOHTrgw4cPCtP19fVhb28PS0tLREZGIjIyUuHxLVu24PXr1/Dz84OBgYFS2xw8eDD++usv+f0SJUqgQoUK0NPTw9OnTxEbGyt/TCqVYsSIEahbty6cnZ2Vf4K5CAgIULhvYmKC9u3bq3QbhWHGjBnw9fWV3zc2NkalSpVgamqKV69eKRwSmunly5fo2LEj7t27l+Uxc3NzlCtXDhYWFoiLi8OzZ8/ydPHy/Jo5cyZ+/fXXLNMtLCxQoUIFmJqaIjo6GhEREfLHZDIZZs+ejZiYGCxfvrzQshFRDsRukEREBXH69Oksfx1+9OhRkWx7xYoVWbbdokUL4fLly4JMJpPP9/HjR2HDhg1CyZIlFeYtWbKk8OrVq1y38flf1MuUKSPfS7ZkyZIsh4pGRUUJ169fl9//fO9C5vLAf3sqPz8UKi0tTTh27JjCtNDQUMHY2DjL8zxy5IiQnJysMG98fLywfv16he0AEEaPHp3r81R2L8i4ceMEc3NzYejQocL+/fuFyMjIbOd7+fKl8PvvvwtWVlYK6584cWK28ycnJwv//POP8M8//wg+Pj4Ky/j4+Mgfy+72+fdCEJTbAxcfHy/Y29srLGNiYiIsXLhQiImJUZj39u3bQvfu3bO8/qZNm5brNj7fA/fpnr6OHTsKV69eVXjtZmRkCAcPHhTs7OwUlmvSpEmu28mPgQMHKmyjadOmKt9GdgqyB6506dLyve/Vq1cX9u3bl+V1cPfuXSEsLEx+PzU1NctRAwCE7t27CwEBAUJGRobC8unp6cKNGzeEH3/8UahYsaJK98Bt3rxZYX6JRCJ4e3sLN27cEKRSqcK8r169EqZPny7o6+srLLNnz54vboeIVIsFjog0mq+vr8KHCTMzM4UPoIXlxYsXWUrN4MGDc912eHi4UK5cOYVlPD09c93O5x/IgP8Owbx//36ecn7+4TTztmzZsjwtn56eLjg4OCgs+/PPP3/xe/zy5UuhevXqCsuFhITkOeeXPkQHBQUJcXFxeXoOgiAIERERCod/GhsbZylFBc2UHWUK3OeH4VlaWub6PRMEQfjxxx+zHNoWHByc4/yfF7jM28yZM3PdzsOHD7Oco3Xr1q1cl1FW48aNFdY/btw4la4/JwUpcJ/+QSOv57b+8MMPCssaGBgIu3btytOyaWlpwsWLF3N8XJkCFxYWpvB/amxsnOWPN9m5cOGCws8+GxubbP94QUSFh9eBIyKN9v79e4X7pUuXVuqab/m1evVqhcMJ69evj40bN+a6bXt7e+zbt09hHn9/fzx+/FipbW/duhW1atVSPvT/69evH7799ts8zbt//36EhobK748aNQqzZs364ve4XLly+Pvvv6Gj879fM0uWLMlf4Gw4OTnBwsIiz/NXqlRJYaTS5ORkhcMGxRYbG4vNmzcrTNu0aRMaNmyY63K//vorOnXqJL8vk8myXEPtSzw9PTF37txc56lRowYmTJigMO348eNKbedLPn8v29jYqHT9haVEiRLYu3dvnkZnfP/+PVauXKkwzdfXF/3798/TtvT19dGiRYt85fzcwoULkZSUJL+/efNmhddSTlq2bInFixfL70dHR2PHjh0qyUREecMCR0Qa7fMPfVZWVoW+TUEQsGnTJoVpixcvhp7el08rdnV1Rd++fRXWtXHjxjxvu3nz5kqfN/e5X375Jc/zLlu2TP61iYkJ5s+fn+dlHR0dFc5H9PPzg1QqzfPyqta2bVuULVtWfv/KlSuiZfncrl27FD5MN2vWDD179szTskuXLlW4v3fvXsTFxeV527/99lue5vv0dQsgy7mnBSXGe1kVxo0bl+fLVGzcuBGJiYny+y1atMhy3bui8P79e4Vzd11dXdGvX788Lz9y5EiFgv3333+rNB8R5Y4Fjog02sePHxXum5qaFvo2Hzx4gOjoaPn9ihUrom3btnleftiwYQr3L168mOdl8/qX+pw4OzujWrVqeZo3JiYGgYGB8vtdu3ZFiRIllNpehw4d5F8nJCTg5s2bSi2vap9ey0zsLJ+6cOGCwv3PXyO5qVWrFpo2bSq/n5aWhmvXruVpWUdHR9SpUydP8zo4OCj8keLFixd5zpgXYryXVUGZ9+SpU6cU7k+cOFHVcfLk/PnzCkcQDBo0SKnl9fX10bp1a/n9K1euyAdWIqLCxwJHRBrN3Nxc4f6nf90uLNevX1e437p1a6UO22zZsqXCB+GbN28iLS0tT8s2adIkz9sp6PKXLl2CIAjy+40bN1Z6exUrVlS4f//+faXX8SURERFYuHAhevfujdq1a6N06dIwNDSERCLJcrt69ap8uXfv3qk8S359/ppS9qLTn/8BIa8FTpn/U319fYW9Ysrs5csLMd7LBWVubo7atWvnad6MjAyF/xcdHR107NixsKLl6vMRPwv63o6Pj8erV68KnIuI8oaXESAijVayZEmF+6r+UJmdz4far1evnlLLGxoaolatWvJzyzKHvK9QocIXl61cubJS2yrI8p+Xre+//x7ff/99gbb/+WFyBfHs2TN8++238Pf3VyiaefXp8PhiEgRBYW+WhYWFwp7CvKhfv77C/efPn+dpOWXPMzM1NZUX388vKVFQJUuWVPg/KYr3ckFVqlQpz3+8iYqKUiilNWvWzNN5c4Xh8/d2Qf8wBPz33s7LzzAiKjjugSMijfZ5gXv79m2hb/Pza3RZW1srvY7Pl/l8nTlRZuCOgi4fExNToG1lR1UfygMDA1G/fn34+fnlq7wByPNez8IWFxencPhZqVKllF5Hfl9PBbk2X36/7zn5/L386WHK6kqZ95M6DdKizu9tIvoy7oEjIo32+WiMHz9+xJMnT/J8nld+JCQkKNzPz7k6ny/z+fk/OdHX11d6W/ldvjD2UKniPJmYmBh07tw5ywfGevXqoUWLFqhWrRrs7OxgbGwMIyMjhT0k3333He7cuVPgDKok5utJndSqVQtBQUHy++p0jmJOlHk/ff5/ItbeN0B939tElDcscESk0b766ivo6ekhIyNDPi0oKKhQC9znH7zyc67O58t8fv6POjAxMVG4P2nSJHTp0qVA66xSpUqBlgeAefPmKexBqF69Onbs2JGnw8A+f07qoLi8nr6kRYsWCsPR37x5E1KpFLq6uiKmUp3P/08+L+5F6fP3wZYtW1C+fPkCrfPzw3iJqPCwwBGRRjM1NUWjRo0URkv09/dXakhsZX0+EmN+Dkf6fAANZUd3LAqfH5ZXtmxZtGvXTqQ0/7Nnzx7510ZGRjhx4kSei6Eqz8FTFUtLS+jo6Mj3YGjr6+lLPr++WVJSEs6cOaMwkqkmU6dDRD9/b9epU0cl58ERUdHgOXBEpPG+/vprhfsHDhwolHM8MlWqVEnh/u3bt5VaPjU1FQ8fPpTfNzQ0RJkyZVSSTZU+H/DkyZMnIiX5n+fPnyMyMlJ+v2PHjnkub8nJyQgPDy+saPkmkUgUBn+Ij49HRESEUuv4/DX4+WtUE9SuXTvLIdGfXnxd09na2irsbX348KFoe+HU8b1NRHnHAkdEGm/UqFEKH4xSU1OxYMGCQtveV199pXD//PnzSg3oEBAQgPT0dPn9Ro0awcDAQGX5VOXT6zwBwNmzZ0VK8j9v3rxRuF+zZs08L/v59/1LdHQUf0WqetCOT33+mlL2e/35/J+vT1NMmTJF4f6hQ4eU/gOJutLT04Orq6v8vkwmw4kTJ0TJoo7vbSLKOxY4ItJ4JUqUwPDhwxWmLV26NMu1tfLj2LFjWabVrFlTYY/Zs2fPcO7cuTyvc/PmzQr3W7Vqlf+AhahcuXJwcHCQ3w8LC8Px48dFTJS1RCkzkuTq1auV2tbnA4MkJSUptbwyPn8NbN26Nc/LPnz4EJcvX5bfNzQ0hIuLi6qiFSlvb2+F91ZGRgaGDBmiVPHOTlJSUpaLpYvh8+u+rVixQpQc7dq1U7gW5V9//VWoRy0QkWqxwBGRVpgzZ47CtbOkUim6d++Ou3fv5mt96enpmDp1KsaNG5flMYlEkqUw+vj4QCqVfnG9gYGB+OuvvxTWNWLEiHxlLAo+Pj4K9ydNmiTqcOG2trYK9y9dupSn5Y4dOwY/Pz+ltvX5OUuFefhl//79FQpjQEAADh06lKdlv/vuO4X7ffr0gaWlpSrjFRlDQ0OsW7dOYdqtW7fQr1+/fF/2ISwsDC1atFDqjyyFZdiwYQqDmQQEBIhymGiZMmUwaNAg+f3ExMRsf9YRkXpigSMirWBlZYU9e/YoDOsdFRWFVq1aYffu3Uod/nbhwgU0btwYS5YsyXG5MWPGwNjYWH4/JCQEo0ePznU7z58/R69evRTm8fT0RNWqVfOcrah5eXmhbt268vuPHj1Cp06dFM5D+5L09HT8+eefKjmstWLFiihXrpz8/o0bNxQGNclOYGAgBg4cqPS2Pn3ewH+D4xR0T1BOrKysMGzYMIVpw4YN++IlD2bPno2jR4/K7+vo6GDy5MmFkrGoeHp6YuLEiQrTDhw4gA4dOuDBgwd5Xk9KSgoWL16MBg0aICQkRNUx88XKygrffvutwrTx48cr/FEnN+np6QgICFBJlp9++klhNMo9e/Zg1KhRShXl9+/f49dff8Xhw4dVkomI8oYFjoi0RpMmTbBhwwaFc5c+fPiAAQMGwMXFBevWrcPz58+zXfbx48dYunQpmjZtCjc3ty9+cC5fvjwWLlyoMG3jxo1o06YNrl27pjA9MTERmzZtgpOTE168eCGfXrJkSaxatUrZp1mkdHV18ffffyvs0bl69SocHBwwe/ZsPHr0KNvl3rx5gyNHjmDUqFEoV64chgwZgvv376skk7e3d5b78+fPR3x8vML0ly9f4qeffkLLli3x4cMHGBkZKeyl/RIbGxuFodEfP34MV1dXLF++HEeOHMHp06cVbikpKQV6XvPmzVPI9+HDBzRt2hRLlizJcmHu0NBQ9OzZE3PnzlWY7uPjg4YNGxYohzpYtGgROnfurDDtwoULcHR0xLBhw3DkyJFsD2lNT0/H2bNnMXnyZFSuXBk+Pj6iDtefnVmzZimco5iWlob+/fujZ8+euHz5cpY9+RkZGQgODsZPP/2EqlWrYubMmSrJUaVKFWzatElh2vr16+Ho6IgNGzZkOd8U+O8Q5rCwMGzfvh09evRA+fLlMXPmTB5+SVTUBCIiLXPw4EHBwsJCAJDtrWTJkkLNmjWFJk2aCFWrVhVMTExynPerr77KcTsymUwYNGhQtsuVLl1aaNSokVCnTh3B2Ng4y+PGxsbCiRMnvvhcWrVqpbCcsrZs2aKw/JYtW5RehyAIwtmzZ4USJUpk+1ytra0FBwcHwcXFRahdu7ZQunTpbOcbPHiwSnLGxMQI5cuXz7J+PT09oW7dukKTJk2EypUrCxKJROHx9evXK/393LZtW46vjc9v4eHhWZb/9PFWrVp9cXs3btzI9vusr68v1KhRQ2jcuLFQrly5bLffsWNHITU1Ndf1nzt3TmGZ2bNnfzHTpypVqiRftlKlSkotq6zU1FRhxIgROX6/9fT0BDs7O6F+/fpCo0aNBDs7O0FPTy/H+deuXZvtdpR9jyj7f5qdly9fCnXr1s02p7m5uVCnTh3BxcVFqFWrlmBkZJTnbebn58WSJUsEHR2dbLNUqFBBaNiwoeDs7CxUr15dMDc3z3a+/P5cIaL8YYEjIq0UHh4ueHp65vnD9+e3UqVKCX/88YeQlpaW63ZkMpng4+OT4weg7G62trZCQEBAnp6HuhQ4QRCEJ0+eCM7Ozvn6fkokEuGnn35SWc6QkBChTJkyedq2jo6OsHTpUkEQ8vf9/P7777OUwcIqcIIgCHfv3hWqVKmi1Pd3yJAhX3ytCoJmFbhMBw8eFOzt7fP9XnZ1dRUuX76c4/rFKHCCIAixsbFCt27dlH4+qi5wgiAIJ06cEMqWLZuv76+hoaFw9OjRfH8fiEh5PISSiLSSvb09Dh06hJs3b2Ls2LFZBr/IjqmpKTp16oS//voLr169wqRJkxTOqcuORCLBwoULERISgm7dusHQ0DDHee3s7DBz5kw8fvwYzZs3V/o5ia1q1aoIDAyEv78/2rRp88VLH+jq6sLV1RVz587FkydP8Msvv6gsS8OGDREcHIyBAwdCV1c323kkEgnat2+Pa9euFei8sAULFuDWrVuYOnUqmjdvDhsbGxgZGeV7fV/i4OCAf//9F4sWLcr1Gnd6enpo27YtAgICsGXLli++VjVV9+7d8fjxY+zatQudOnXK9T2WqXz58hg3bhyCgoJw5coVNG3atAiSKsfS0hJ+fn44d+4cOnTo8MX3U82aNfHTTz9h27ZtKs/i7u6Op0+fYsWKFahXrx4kEkmu85uZmaFLly5Ys2YNXr9+neVwVyIqXBJBKMQL2xARqZGwsDCEhobixYsX+PjxIwRBgJWVFUqWLIk6deqgbt26OZaBvEpKSsKlS5fw/PlzvHv3DoaGhrCxsUHdunXRoEED1TwRNZGUlIRr167hxYsXiImJQXJyMszMzGBtbY2aNWuidu3aWYbiLwzv37/HxYsX8ezZM3z8+BGmpqaoXLkymjZtChsbm0LffmF78OABbt68iejoaCQlJaFUqVIoV64cmjdvrrGjTRZEamoqbt++jcePH+PNmzdISkqCoaEhSpYsiTJlysDJyQlly5YVO6bSEhMTcfnyZbx8+RLv3r2DVCqFhYUFKleujHr16qF8+fJFluXt27e4fv06oqKiEBMTA5lMBgsLC9ja2qJ27dqoXr261v7BgEgTsMARERERERFpCB5CSUREREREpCFY4IiIiIiIiDQECxwREREREZGGYIEjIiIiIiLSECxwREREREREGkJP7ADFmUwmQ2RkJMzNzb94zRUiIiIiItJegiDg48ePsLOzg45OzvvZWOBEFBkZiQoVKogdg4iIiIiI1MSLFy9yvfYjC5yIzM3NAfz3n2RhYSFyGiIiKmqJiYmws7MD8N8f9YriwudERKSe4uPjUaFCBXlHyAkLnIgyD5u0sLBggSMiKoZ0dXXlX1tYWLDAERHRF0+t4iAmREREREREGoIFjoiIiIiISEOwwBEREREREWkIFjgiIiIiIiINwQJHRERERESkITgKJRERkUiMjY0RHh4u/5qIiOhLWOCIiIhEoqOjA3t7e7FjEBGRBuEhlERERERERBqCBY6IiEgkaWlp8PHxgY+PD9LS0sSOQ0REGkAiCIIgdojiKj4+HpaWloiLi4OFhYXYcYiIqIglJibCzMwMAJCQkABTU1ORExERkVjy2g24B46IiIiIiEhDsMARERERERFpCBY4IiIiIiIiDcECR0REREREpCFY4IiIiIiIiDQECxwREREREZGG0BM7ABERUXFlbGyM0NBQ+ddERERfwgJHREQkEh0dHdStW1fsGEREpEF4CCUREREREZGG4B44IiIikaSlpeG3334DAMyYMQMGBgYiJyIiInUnEQRBEDtEcRUfHw9LS0vExcXBwsJC7DhERFTEEhMTYWZmBgBISEiAqampyImIiEgsee0GPISSiIiIiIhIQ7DAERERERERaQgWOCIiIiIiIg3BAkdERERERKQhWOCIiIiIiIg0BAscERERERGRhuB14IiIiERiZGSEwMBA+ddERERfUiz2wF28eBEeHh6ws7ODRCLBoUOHvrjM+fPn0ahRIxgaGqJatWrYunVrlnlWrVoFe3t7GBkZwcXFRf5LmIiIKC90dXXh7OwMZ2dn6Orqih2HiIg0QLEocImJiahfvz5WrVqVp/nDw8PRpUsXtG7dGrdu3cKkSZMwYsQInDx5Uj7Pnj17MGXKFMyePRshISGoX78+3N3dER0dXVhPg4iIiIiIijmJIAiC2CGKkkQiwcGDB9G9e/cc55k2bRqOHj2K0NBQ+bR+/fohNjYWJ06cAAC4uLjA2dkZvr6+AACZTIYKFSpgwoQJ+OGHH/KUJa9XWyciIu2UlpaG5cuXAwC+/fZbGBgYiJyIiIjEktduwHPgsnH16lW0a9dOYZq7uzsmTZoE4L9fuMHBwZg+fbr8cR0dHbRr1w5Xr17Ncb2pqalITU2V34+Pj1dtcCIiUhmpVIq4uDh8/PgRCQkJCv8qMy0pKQlSqRQymQwymSzL1ykpKQCAmTNnQk9PDzo6Olluurq6MDQ0hLm5OczNzWFmZpbl67xMs7S0hKGhocjfWSIiKggWuGxERUWhTJkyCtPKlCmD+Ph4JCcn48OHD5BKpdnO8+DBgxzXO3/+fPz888+FkpmIiPImPT0dUVFReP36tfwWGRmZ5X50dDRkMlmO69HX18+xOFlbW8unmZiY5FjM0tPT8f333wMAfv31V+jr6+da9D4viG/evMkyLTk5OdfnX6JECZQtW1Z+s7Ozy/a+qampSr/vRESkGixwRWj69OmYMmWK/H58fDwqVKggYiIiIu2SkpKCsLAwPH78GC9fvsy2oL19+1ZhGV1dXdja2srLi7Ozs/zrUqVK5bhHSxWHOyYmJsoL3JgxY1RSmjIyMpCYmJjt3sEPHz7Iy2tkZCQiIiJw9epVREZGZil+5ubmORa8qlWrokaNGihRokSB8xIRkXJY4LJha2uLN2/eKEx78+YNLCwsYGxsDF1dXejq6mY7j62tbY7rNTQ05KErREQFJJVK8fz5czx69CjL7dmzZ8g8tVtfX1+hgDRv3jxLIbGzs4O1tTV0dLRnTC89PT1YWlrC0tIyz8sIgoD4+Phc90gGBwfj9evX+Pjxo3w5a2tr1KhRI8utWrVqMDY2LoynR0RU7LHAZcPV1RXHjh1TmPbPP//A1dUVAGBgYAAnJyecOXNGPhiKTCbDmTNnMH78+KKOS0SkdQRBwJs3b/D48eMsJe3JkydIS0sD8N/P4+rVq6N69ero06ePvEBUr14dZcqUgUQiEfmZaAaJRCIvfbVq1cp13vj4eISFhSn8n/z77784ePAg4uLi5PNVrFgx23JXqVIl6Onx4wcRUX4Vi5+gCQkJePLkifx+eHg4bt26hZIlS6JixYqYPn06Xr16hW3btgEARo8eDV9fX3z//fcYNmwYzp49i7179+Lo0aPydUyZMgWDBw9G48aN0aRJEyxbtgyJiYkYOnRokT8/IiJNlpqairt37yI4OBghISG4efMmHjx4IN/TI5FIYG9vjxo1aqBt27YYM2aMvAxUqFCB108rYhYWFmjYsCEaNmyoMF0QBLx79y5L4b548SI2bdokH8RLX18fVatWRb169eDk5AQnJyc0atSIh2MSEeVRsShwQUFBaN26tfx+5nlogwcPxtatW/H69Ws8f/5c/njlypVx9OhRTJ48GcuXL0f58uWxceNGuLu7y+fp27cv3r59i1mzZiEqKgoNGjTAiRMnsgxsQkRE/5OSkiIva5m30NBQpKenQ1dXF3Xq1EGjRo3Qq1cveUmrUqUKjIyMxI5OXyCRSFC6dGmULl0azZo1U3hMJpPhxYsX8lL34MED3Lp1C0ePHkViYiKA/373Zha6zFvJkiXFeCpERGqt2F0HTp3wOnBEpM2Sk5Nx584dhbJ27949ZGRkQFdXFw4ODmjUqJH8w3q9evVgYmIiduwiJZVKERAQAABo0aJFsdubKJVK8ejRI4SEhMhfIyEhIUhISAAA2NvbK+ylc3JygrW1tcipiYgKR167AQuciFjgiEhbCIKAf//9FxcuXEBQUJC8rEmlUujp6cHBwUFhz0q9evW4V42yJZPJ8PjxY4VCFxISIr92asWKFeWvoxYtWqBJkyZ8LRGRVmCB0wAscESkqTIL2/nz53H+/HlcuHABb9++hZ6ensK5TU5OTnBwcOAHbCoQmUyGsLAwhVIXHByMuLg4GBoawtXVFW5ubnBzc4OLiwtfb0SkkVjgNAALHBFpCplMlqWwvXv3Dvr6+nBxcZF/eHZ1dS12h0EWRHp6OtavXw8A+Oabb6Cvry9yIs0hlUpx9+5dhddkbGwsCx0RaSwWOA3AAkdE6oqFrWgkJibCzMwMwH8jJqviQt7F1eeF7uLFi/jw4QMLHRFpDBY4DcACR0TqQhAEPHjwAGfOnGFhK0IscIVHJpNl2UOXWei++uoruLm5oXXr1mjatCn3fBKRWmCB0wAscEQkpvT0dFy6dAmHDx+Gv78/wsLCoK+vL/9w6+bmhq+++oqFrRCxwBWdnAqdlZUVOnXqBA8PD3Tq1AlWVlZiRyWiYooFTgOwwBFRUfvw4QNOnDgBf39/nDhxArGxsbCzs4OHhwc8PDzQunVrFrYixAInHplMhpCQEBw+fBiHDx/GzZs3oaenhxYtWsjfD9WqVRM7JhEVIyxwGoAFjoiKwpMnT+R72QICAiCVStGoUSP5h9RGjRpBIpGIHbNYYoFTHy9evMCRI0dw+PBhnD17Fqmpqahdu7b8feLq6lrsrtNHREWLBU4DsMARUWGQSqW4evWqvLQ9ePAAhoaGaNu2LTw8PNC1a1eUL19e7JgEFjh1lZCQgH/++QeHDx/GkSNH8PbtW1hbW6Nz587w8PCAu7s7zM3NxY5JRFqGBU4DsMARkaokJSXh+PHj8Pf3x9GjRxETEwMbGxt07doVHh4eaNeunbwokPpggVN/UqkUgYGB8kMtQ0NDYWBgADc3N3h4eKBHjx6ws7MTOyYRaQEWOA3AAkdEBZGRkYGzZ89i586dOHDgABISEuDg4IBu3brBw8MDTZo0gY6OjtgxKRcZGRk4efIkAMDd3R16enoiJ6IvCQ8Pl5e5CxcuICMjA61bt4aXlxd69uwJS0tLsSMSkYZigdMALHBEpCxBEBAUFISdO3fir7/+wps3b1CjRg14eXlhwIABHHSBqAjFxsbiwIED2LlzJ86dOwcDAwN4eHjAy8sLnTp1gqGhodgRiUiDsMBpABY4IsqrsLAw7Ny5Ezt37sSjR49QpkwZ9O/fH15eXnBycuIgJEQie/XqFXbv3o2dO3fi1q1bsLKyQu/eveHl5YUWLVpwbzgRfRELnAZggSOi3ERHR2PPnj3YuXMnrl+/DjMzM/To0QNeXl5o06YND7fTAunp6di5cycAwMvLixeU1hL//vuv/A8uz549Q4UKFdC/f38MHDgQjo6OYscjIjXFAqcBWOCI6HMJCQnw8/PDjh078M8//0AikaBTp07w8vKCh4cHr9GmZTiIiXaTyWS4cuUKdu7cib179+L9+/dwdHSEl5cX+vfvj4oVK4odkYjUCAucBmCBIyLgv/Pazp07h02bNuHQoUNISkpCs2bN4OXlhd69e8Pa2lrsiFRIWOCKj7S0NJw8eRI7d+6En58fUlJS0LJlSwwePBj9+vXjH2eIiAVOE7DAERVvsbGx+PPPP7F27Vo8ePAAtWrVwqBBgzBgwADY29uLHY+KAAtc8RQfH4+DBw9i586dOH36NCwtLTFkyBCMHj0aNWvWFDseEYmEBU4DsMARFU/BwcFYs2YNdu3ahfT0dPTo0QNjx45Fy5YtORhJMcMCR0+fPsW6deuwefNmvHv3Dm3btsWYMWPQrVs3nhNJVMzktRtwSCQioiKQnJyMrVu3okmTJmjcuDFOnTqFH3/8ES9evMCePXvQqlUrljeiYqhKlSpYsGABXrx4ge3btyM5ORm9evVCpUqVMGfOHLx69UrsiESkZljgiIgK0aNHjzBlyhSUK1cOw4YNQ6lSpeDn54fw8HD8+OOPsLW1FTsiEakBIyMjDBw4EJcvX8bt27fh6emJxYsXo1KlSujRowdOnz4NmUwmdkwiUgMscEREKpaRkYGDBw+iffv2qFmzJrZt24YRI0bg8ePHOH78OLp16wZdXV2xYxKRmqpXrx7WrFmDyMhIrFixAo8fP0b79u1Rq1YtLF26FO/fvxc7IhGJiOfAiYjnwBFpl8jISGzYsAEbNmzAq1ev4OrqirFjx6JXr14wMjISOx6pocyyDwBff/01r+1H2RIEAZcuXcKaNWuwf/9+6Orqol+/fhg7diycnZ3FjkdEKsJBTDQACxyRdggNDcXChQuxa9cu+WFQY8aMQf369cWORkRaJjo6Gps3b8batWvx7NkzfPXVV/jhhx/g4eEBHR0eWEWkyTiICRFRIbty5Qq6desGR0dHnD9/HkuWLEFkZCTWrl3L8kZEhcLGxgY//PADwsLC4O/vD319fXTv3h2Ojo7Ytm0b0tPTxY5IRIWMBY6ISAmCIODYsWNo2bIlmjVrhidPnuDPP/9EWFgYvv32W+5NJ6VkZGRg37592LdvHzIyMsSOQxpEV1cXHh4euHjxIi5duoSqVati8ODBqFatGlasWIHExESxIxJRIWGBIyLKg4yMDOzevRsNGjRAly5dkJaWhkOHDiE0NBTe3t68XhPlS2pqKvr06YM+ffogNTVV7DikoZo1awZ/f3/cvXsXLVu2xJQpU1CpUiXMnTuXA54QaSEWOCKiXCQnJ2PNmjWoUaMGBgwYgLJly+LcuXO4evUqPD09ec4JEakNBwcHbN++HU+ePEH//v0xf/58VKxYEd999x1evnwpdjwiUhF+8iAiykZsbCzmz58Pe3t7jB8/Hk2aNEFISAhOnDgBNzc3XnSbiNSWvb09Vq5ciWfPnmHSpEnYvHkzqlSpguHDh+PBgwdixyOiAmKBIyL6RFRUFH744QdUqlQJc+bMQffu3fHw4UP89ddfaNiwodjxiIjyzMbGBr/++iuePXuG3377DcePH0edOnXQs2dP3LhxQ+x4RJRPLHBERPjvGm7jxo2Dvb09Vq9ejdGjRyMiIgLr1q1DtWrVxI5HRJRvFhYWmDp1KsLDw7Fu3TrcuXMHTZo0Qbt27XD16lWx4xGRkljgiKhY+/DhA6ZPn45q1aph9+7dmDlzJp4/f44FCxagbNmyYscjIlIZQ0NDjBw5Eg8ePMDevXsRHR2Npk2bwtPTE6GhoWLHI6I8YoEjomIpKSkJCxYsQJUqVbBixQpMnjwZT58+xY8//ggrKyux4xERFRpdXV307t0bN2/exI4dO3D37l3Uq1cPgwcPRkREhNjxiOgLWOCIqFhJT0/H2rVrUa1aNfz000/w8vJCWFgY5s2bx+JGRc7AwABbtmzBli1bYGBgIHYcKmZ0dXXh5eWFBw8eYOXKlTh58iRq1KiBb7/9FtHR0WLHI6IcSARBEMQOUVzFx8fD0tIScXFxvPgvUSGTyWTYu3cvfvrpJzx9+hReXl74+eefUaVKFbGjERGphcTERCxfvhwLFiyAVCrFlClTMHXqVH5GISoiee0G3ANHRFpNEAQcP34cTk5O6N+/P2rXro1bt25h+/btLG9ERJ8wNTXFjBkz8PTpU4wdOxaLFi1ClSpVsHTpUqSkpIgdj4j+HwscEWmtK1euwM3NDZ07d4aZmRkuXbqEw4cPo169emJHIwIAZGRk4OjRozh69CgyMjLEjkMEAChVqhQWLlyIJ0+eoGfPnvj+++9RvXp1bNq0ia9TIjXAAkdEWic0NBSenp5o1qwZYmNjcfToUVy8eBHNmjUTOxqRgtTUVHTt2hVdu3ZFamqq2HGIFJQrVw7r1q3Dv//+i6ZNm2LEiBFwcHDA33//DZ6BQyQeFjgi0hqRkZEYPHgw6tWrh9DQUOzatQs3b95E586dIZFIxI5HRKSRatSogT179iA4OBj29vbo1asXXFxccPnyZbGjERVLLHBEpPHS0tKwePFi1KxZE8ePH4evry/u37+P/v37Q0eHP+aIiFShUaNGOHHiBM6ePQtBENC8eXN4e3vj9evXYkcjKlb4yYaINNrp06dRv359TJs2DUOHDsXDhw8xduxYDslORFRIWrdujevXr2PDhg04duwYatasiT/++APp6eliRyMqFljgiEgjPX/+HL1790b79u1RunRp3Lx5EytWrECJEiXEjkZEpPV0dHQwYsQIPHr0CN7e3pg6dSoaNGiAs2fPih2NSOuxwBGRRklJScG8efNQq1YtXL58GTt37sSFCxc4siQRkQhKliwJX19fBAcHo0SJEmjbti369u2LFy9eiB2NSGuxwBGRxjh69CgcHBwwZ84cjBs3Dg8fPsSAAQM4QAkRkcgaNGiAgIAAbNu2DRcuXECtWrUwf/58jq5KVAhY4IhI7YWFhcHDwwNdu3ZF5cqVcefOHSxatAjm5uZiRyMqEAMDA/j6+sLX15fnbZLGk0gkGDRoEB49eoTRo0dj5syZcHR0xIkTJ8SORqRVJAIv5CGa+Ph4WFpaIi4uDhYWFmLHIVI7SUlJ+P3337Fw4ULY2Njgjz/+QI8ePbjHjYhIA9y7dw8TJ07E2bNn4enpiT/++AOVK1cWOxaR2sprN+AeOCJSO4Ig4MCBA6hduzYWLFgAHx8fPHjwAD179mR5IyLSEHXr1sXp06exd+9eBAcHo3bt2pgzZw6Sk5PFjkak0VjgiEitPH/+HJ07d0bPnj3h6OiIe/fu4ZdffoGJiYnY0YhUTiqV4vz58zh//jykUqnYcYhUTiKRoHfv3njw4AGmTJmC3377DXXq1MGZM2fEjkaksVjgiEgtCIKA9evXw8HBAXfv3oW/vz+OHDmCatWqiR2NqNCkpKSgdevWaN26NVJSUsSOQ1RoTE1N8dtvv+HevXuoXLky2rVrh1GjRiE+Pl7saEQahwWOiEQXERGBDh06YNSoUejbty/u3bsHDw8PsWMREZGKVa9eHadPn8aaNWuwa9cuODg44NSpU2LHItIoLHBEJBqZTIY1a9bA0dERDx8+xMmTJ7FhwwZYWlqKHY2IiAqJjo4ORo8ejdDQUNSsWRPu7u4YMWIE4uLixI5GpBFY4IhIFE+fPkW7du0wduxYeHl5ITQ0FB06dBA7FhERFZFKlSrh1KlTWL9+Pfbu3QsHBwccP35c7FhEao8FjoiKlEwmg6+vLxwdHfH06VOcPn0aa9eu5aU0iIiKIYlEgpEjRyI0NBR169ZF586dMXToUHz48EHsaERqiwWOiIpMWFgYWrdujQkTJmDIkCG4e/cu2rZtK3YsIiISWcWKFXH8+HFs2rQJBw8ehIODA44cOSJ2LCK1xAJHRIVOJpNh+fLlcHR0xIsXL3D27FmsWrUK5ubmYkcjIiI1IZFIMGzYMISGhqJBgwbw8PCAt7c33r9/L3Y0IrXCAkdEherRo0do2bIlJk2ahBEjRuDu3bto3bq12LGI1IK+vj4WLlyIhQsXQl9fX+w4RGqhfPnyOHLkCP78808cPnwYdevWhZ+fn9ixiNSGRBAEQewQxVV8fDwsLS0RFxfH839I68hkMixbtgw//vgjypUrh82bN6Nly5ZixyIiIg0SGRmJ0aNH4/Dhw+jfvz9WrlyJUqVKiR2LqFDktRtwDxwRqVxUVBTc3d0xdepUjB49Gnfu3GF5IyIipdnZ2cHPzw87duzAiRMn0KBBAwQEBIgdi0hULHBEpFKnTp1C/fr1ERoailOnTuGPP/6AiYmJ2LGI1JJUKsWNGzdw48YNSKVSseMQqSWJRAIvLy/cuXMHlStXhpubG3799Ve+Z6jYYoEjIpVIT0/H9OnT4e7ujoYNG+LWrVto166d2LGI1FpKSgqaNGmCJk2aICUlRew4RGqtfPnyOHv2LGbOnInZs2ejffv2iIyMFDsWUZFjgSOiAouIiEDLli2xePFiLFy4EMeOHUOZMmXEjkVERFpGT08Pc+bMwZkzZ/Dw4UPUr1+fF/+mYocFjogK5O+//0bDhg0RFRWFgIAA+Pj4QEeHP1qIiKjwuLm54fbt23BxcUHnzp0xdepUpKWliR2LqEjwUxYR5UtycjLGjh2LXr16oV27drh58ya++uorsWMREVExYW1tjcOHD2Pp0qVYsWIFmjdvjqdPn4odi6jQscARkdLu378PFxcXbNmyBWvXrsXevXthZWUldiwiIipmJBIJJk+ejCtXruD9+/do2LAh9uzZI3YsokLFAkdEeSYIAjZv3ozGjRsjIyMDgYGBGDVqFCQSidjRiIioGGvcuDFCQkLQuXNn9OvXDyNHjkRSUpLYsYgKBQscEeVJfHw8Bg4ciOHDh2PAgAEICgqCo6Oj2LGIiIgAABYWFti1axc2bdqEnTt3wtnZGaGhoWLHIlI5Fjgi+qKgoCA0atQIhw8fxu7du7FhwwZe241IBfT19TF79mzMnj0b+vr6Ysch0ngSiQTDhg1DUFAQdHR04OzsjPXr10MQBLGjEamMROArWjTx8fGwtLREXFwcLCwsxI5DlK0tW7Zg9OjRqFevHv766y9UrVpV7EhERERflJycjClTpmDt2rXw9vbGunXrYGRkJHYsohzltRtwDxwRZSs9PR3ffvsthg0bhqFDh+Ly5cssb0REpDGMjY2xZs0a7Ny5E3v37kWrVq3w6tUrsWMRFRgLHBFlERMTg44dO2L16tVYs2YN1q5dCwMDA7FjEWkdmUyGe/fu4d69e5DJZGLHIdJKAwYMwKVLlxAZGQlnZ2dcu3ZN7EhEBcICR0QK7t69C2dnZ9y5cwdnzpzB6NGjxY5EpLWSk5Ph4OAABwcHJCcnix2HSGs5OTnhxo0bqFy5Mlq1aoWtW7eKHYko31jgiEju4MGDcHV1hYWFBYKCgtCyZUuxIxEREamEra0tzp49C29vbwwdOhSTJk1CRkaG2LGIlMYCR0SQyWT4+eef0aNHD3Tu3BmXL19GpUqVxI5FRESkUoaGhli/fj18fX3h6+uLTp064f3792LHIlIKCxxRMZeQkIBevXrh559/xrx587Bnzx6YmpqKHYuIiKhQSCQSjBs3Dv/88w9u3rwJZ2dn3Lt3T+xYRHnGAkdUjD19+hSurq44ffo0/Pz8MGPGDEgkErFjERERFbrWrVvjxo0bMDU1xVdffQU/Pz+xIxHlCQscUTF19uxZODs7IyUlBdeuXYOHh4fYkYiIiIpU5cqVceXKFbi7u6N79+745ZdfOCIsqT0WOKJiRhAErFy5Eh06dEDjxo0RGBiIOnXqiB2LiIhIFGZmZti7dy/mzp2LWbNmoU+fPkhISBA7FlGOik2BW7VqFezt7WFkZAQXFxcEBgbmOK+bmxskEkmWW5cuXeTzDBkyJMvjHTt2LIqnQpRvqampGDFiBCZOnIhJkybh6NGjKFGihNixiIotfX19TJ06FVOnToW+vr7YcYiKLR0dHcycORMHDx7EyZMn0axZM4SHh4sdiyhbEkEQBLFDFLY9e/bA29sba9euhYuLC5YtW4Z9+/bh4cOHsLGxyTL/+/fvkZaWJr8fExOD+vXrY+PGjRgyZAiA/wrcmzdvsGXLFvl8hoaGSn0Yjo+Ph6WlJeLi4mBhYZH/J0iUB3FxcfD09MS1a9ewfv16eHt7ix2JiIhI7YSGhsLT0xMJCQk4duwYnJycxI5ExUReu0Gx2AO3dOlSjBw5EkOHDkWdOnWwdu1amJiYYPPmzdnOX7JkSdja2spv//zzD0xMTNC7d2+F+QwNDRXm454MUleRkZFo2bKl/OLcLG9ERETZc3BwwPXr1+UX/T516pTYkYgUaH2BS0tLQ3BwMNq1ayefpqOjg3bt2uHq1at5WsemTZvQr1+/LEOrnz9/HjY2NqhZsybGjBmDmJiYXNeTmpqK+Ph4hRtRYXvw4AFcXV3x/v17XLp0Cc2aNRM7EhH9P5lMhoiICERERHDgBCI1Ym1tjTNnzqBVq1bo0qULdu7cKXYkIjmtL3Dv3r2DVCpFmTJlFKaXKVMGUVFRX1w+MDAQoaGhGDFihML0jh07Ytu2bThz5gwWLFiACxcuoFOnTpBKpTmua/78+bC0tJTfKlSokL8nRZRH165dQ/PmzWFubo6rV69ysBIiNZOcnIzKlSujcuXKSE5OFjsOEX3C1NQUhw4dwsCBAzFw4EAsWbJE7EhEAAA9sQOou02bNsHR0RFNmjRRmN6vXz/5146OjqhXrx6qVq2K8+fPo23bttmua/r06ZgyZYr8fnx8PEscFZqjR4+id+/ecHJygr+/Pw/xJSIiUpK+vj42b94MOzs7TJ06FZGRkVi0aBF0dLR+HwipMa1/9VlbW0NXVxdv3rxRmP7mzRvY2trmumxiYiL++usvDB8+/IvbqVKlCqytrfHkyZMc5zE0NISFhYXCjagwbN68GZ6enujYsSNOnTrF8kZERJRPEokE8+bNw8qVK/HHH39g0KBBCoPdERU1rS9wBgYGcHJywpkzZ+TTZDIZzpw5A1dX11yX3bdvH1JTUzFw4MAvbufly5eIiYlB2bJlC5yZKL8EQcC8efMwfPhwjBw5Evv27YOxsbHYsYiIiDTe+PHjsXfvXuzfvx9dunTBx48fxY5ExZTWFzgAmDJlCjZs2IA///wT9+/fx5gxY5CYmIihQ4cCALy9vTF9+vQsy23atAndu3dHqVKlFKYnJCTAx8cH165dQ0REBM6cOQNPT09Uq1YN7u7uRfKciD4nlUoxYcIE/PTTT5g7dy5Wr14NXV1dsWMRERFpjV69euHkyZMIDAyEm5tbliO8iIpCsTgHrm/fvnj79i1mzZqFqKgoNGjQACdOnJAPbPL8+fMsxzI/fPgQly5dynboWF1dXdy5cwd//vknYmNjYWdnhw4dOuCXX36BoaFhkTwnok+lpKRg0KBBOHDgADZs2JBl0B0iIiJSDTc3NwQEBKBjx45o2rQpTp48iWrVqokdi4qRYnEhb3XFC3mTKsTGxqJ79+64fv069uzZg27duokdiYjyKDExEWZmZgD+O7rj88vVEJH6evbsGdzd3fH+/XscO3YMjRs3FjsSabi8doNisQeOSFtFRkaiY8eOePnyJc6cOYOmTZuKHYmIlKCnp4exY8fKvyYizVGpUiVcunQJXbt2hZubGw4cOIAOHTqIHYuKAe6BExH3wFFBPHr0CO3bt4cgCDh58iRq164tdiQiIqJiJzExEX379sXJkyexdetWeHl5iR2JNFReu0GxGMSESNv8+++/aNmyJczMzHDlyhWWNyIiIpGYmpri4MGDGDhwIAYNGoTNmzeLHYm0HI/XINIwd+7cQbt27VC2bFmcPn0apUuXFjsSEeWTIAh49+4dgP+uWyqRSERORET5oa+vj02bNsHIyAjDhw9Heno6Ro0aJXYs0lIscEQaJCQkBO3bt4e9vT1OnTqV5RIXRKRZkpKSYGNjA4CDmBBpOh0dHaxevRoGBgYYPXo00tLSMGHCBLFjkRZigSPSEDdu3ECHDh1Qo0YNnDx5ElZWVmJHIiIiok9IJBIsW7YM+vr6mDhxItLT0zFlyhSxY5GWYYEj0gBXr15Fx44d4eDggGPHjsHS0lLsSERERJQNiUSCRYsWwcDAAN999x3S0tLwww8/iB2LtAgLHJGaCwgIQOfOndGoUSMcOXIE5ubmYkciIiKiXEgkEsybNw8GBgaYPn060tLSMGvWLLFjkZZggSNSY5cuXUKnTp3g4uICf39/nh9DRESkISQSCebMmQMDAwP8+OOPkEgkmDlzptixSAuwwBGpqStXrqBTp05o0qQJDh8+DBMTE7EjERERkZJmzJgBQRDw008/QVdXFzNmzBA7Emk4FjgiNXTt2jV07NgRTk5OLG9EREQa7scff4RUKsWPP/4IHR0dnhNHBcICR6Rmrl+/Dnd3dzRo0ABHjhzhYZNEWkxPTw+DBw+Wf01E2mvWrFmQyWSYPn06dHV14ePjI3Yk0lD8bUGkRjIvFeDo6Ihjx47BzMxM7EhEVIgMDQ2xdetWsWMQURGZPXs2pFIpvv/+e+jq6vISA5QvLHBEaiI4OBgdOnRA3bp1cfz4cZY3IiIiLSORSDB37lxIpVJ899130NHRwaRJk8SORRqGBY5IDdy/fx8dOnRAzZo1ceLECV4qgKiYEAQBSUlJAAATExNIJBKRExFRYcu8xIBUKsXkyZNhbm6O4cOHix2LNAgLHJHIXrx4AXd3d9jZ2eH48eOwsLAQOxIRFZGkpCT53vaEhASe80pUTEgkEvz++++Ij4/HN998A2tra3h6eoodizSEjtgBiIqz9+/fo2PHjtDR0cGJEydQokQJsSMRERFREZBIJPD19UWPHj3Qr18/BAQEiB2JNAQLHJFIEhMT0bVrV0RHR+PkyZMoV66c2JGIiIioCOnq6mLHjh1o2rQpPDw8cOfOHbEjkQZggSMSQXp6Onr37o07d+7g2LFjqFmzptiRiIiISASGhoY4ePAgqlatCnd3d4SHh4sdidQcCxxREZPJZBg+fDhOnz6NgwcPwtnZWexIREREJCILCwscO3YMpqam6NChA6Kjo8WORGqMBY6oCAmCAB8fH+zYsQPbtm1D+/btxY5EREREaqBMmTI4deoUEhIS0KlTJ8THx4sdidQUCxxREVq0aBGWLl2K5cuXo1+/fmLHISIiIjVSpUoVnDhxAk+ePMHXX3+N1NRUsSORGmKBIyoiW7duxbRp0/DTTz9hwoQJYschIjWgq6uLXr16oVevXtDV1RU7DhGpgfr16+Pw4cO4fPkyBg0aBKlUKnYkUjMSQRAEsUMUV/Hx8bC0tERcXByv/aXlDh8+jK+//hrDhw/H2rVrebFeIiIiypWfnx969OiB0aNHw9fXl58dioG8dgPugSMqZJcuXUKfPn3QrVs3rF69mj+AiYiI6Is8PT2xbt06rF69GnPnzhU7DqkRPbEDEGmzu3fvwsPDAy4uLti1axcPkSIiIqI8GzFiBN6+fYsZM2agTJkyGD16tNiRSA2wwBEVkmfPnqFjx46oVKkS/Pz8YGRkJHYkIlIziYmJMDMzAwAkJCTA1NRU5EREpG5++OEHvHnzBmPHjoW1tTV69eoldiQSGQscUSH4+PEjPDw8YGhoiBMnTsDS0lLsSERERKSBJBIJli5diujoaAwaNAiVKlXiNWSLOZ4DR6RiMpkMgwYNQkREBA4fPgxbW1uxIxEREZEG09HRwebNm9GgQQN4enoiMjJS7EgkIhY4IhWbOXMm/P39sXv3btStW1fsOERERKQFjIyMcPDgQejq6qJ79+5ITk4WOxKJhAWOSIV2796N3377Db///ju6dOkidhwiIiLSIra2tvDz80NoaChGjBgBXg2seGKBI1KRGzduYNiwYRg0aBB8fHzEjkNERERaqFGjRti6dSt27dqFBQsWiB2HRMBBTIhUIDIyEt27d0f9+vWxfv16XuuNiIiICk2fPn0QGhqKGTNmoG7duvDw8BA7EhUhFjiiAkpOTsbXX38NiUSCgwcP8nIBRJRnurq66Ny5s/xrIqK8mjNnDkJDQzFgwABcvXoVDg4OYkeiIiIRePCsaOLj42FpaYm4uDhYWFiIHYfyQRAEDBo0CAcOHEBAQACcnJzEjkRERETFREJCApo3b474+HgEBgbC2tpa7EhUAHntBjwHjqgAFi5ciJ07d2LLli0sb0RERFSkzMzM4Ofnh4SEBPTq1QtpaWliR6IiwAJHlE+HDx/G9OnT8dNPP6Fv375ixyEiIqJiqFKlSjh48CCuXLmCiRMncmTKYoAFjigfMo857969O37++Wex4xCRhkpMTISpqSlMTU2RmJgodhwi0lDNmjXD2rVrsW7dOqxevVrsOFTIOIgJkZLevXuHbt26oUqVKti2bRt0dPh3ECLKv6SkJLEjEJEWGDZsGO7evYtvv/0WtWrVQtu2bcWORIWEnzyJlJCeno7evXvj48eP8PPzg5mZmdiRiIiIiAAAixYtQrt27dC7d288efJE7DhUSFjgiJTg4+ODy5cv48CBA7C3txc7DhEREZGcnp4e/vrrL5QuXRrdu3fnHn4txQJHlEeHDh3C8uXLsWTJErRo0ULsOERERERZWFlZ4eDBgwgPD8fEiRPFjkOFgAWOKA8iIiIwdOhQ9OjRA+PHjxc7DhEREVGO6tSpg1WrVmHTpk3YuXOn2HFIxVjgiL4gLS0N/fr1g5WVFTZt2gSJRCJ2JCIiIqJcDRkyBN7e3hg1ahQePnwodhxSIY5CSfQFM2bMQHBwMC5fvgwrKyux4xCRFtHR0UGrVq3kXxMRqdKqVasQGBiIPn364Nq1azA2NhY7EqkAf1sQ5eLIkSNYsmQJFixYgCZNmogdh4i0jLGxMc6fP4/z58/zgxURqZyZmRn27t2LR48eYcqUKWLHIRVhgSPKwYsXLzB48GB4eHhg8uTJYschIiIiUpqjoyNWrFiBtWvXYu/evWLHIRVggSPKRnp6Ovr37w9TU1Ns3bqV570RERGRxhoxYgT69euHESNG8PpwWoAFjigbs2bNwrVr1/DXX3+hZMmSYschIi2VmJiI0qVLo3Tp0khMTBQ7DhFpKYlEgnXr1qFMmTLo27cvUlNTxY5EBcACR/SZkydP4vfff8e8efPQtGlTseMQkZZ79+4d3r17J3YMItJyFhYW2Lt3L0JDQ+Hj4yN2HCoAFjiiT0RGRmLQoEHo1KkTf7gRERGRVmnYsCGWLl2KlStX4sCBA2LHoXxigSP6fxkZGRgwYAD09fXx559/ckhvIiIi0jpjx45Fz549MWzYMISHh4sdh/KBn1CJ/t/cuXMREBCA3bt3o3Tp0mLHISIiIlI5iUSCjRs3omTJkujbty/S0tLEjkRKYoEjAnDmzBn8+uuv+Pnnn9GyZUux4xAREREVGisrK+zZswe3bt3C9OnTxY5DSmKBo2Lvw4cPGDRoENq0acMfYkRERFQsODs7Y+HChVi6dCn++ecfseOQEljgqNj79ttvkZSUhK1bt0JXV1fsOERUjOjo6KBx48Zo3Lgxz7sloiI3ceJEtGnTBsOHD0d8fLzYcSiP+NuCijV/f39s374dy5cvR/ny5cWOQ0TFjLGxMW7cuIEbN27A2NhY7DhEVMzo6Ohg06ZN+PDhA7777jux41AescBRsRUTE4NRo0aha9eu8Pb2FjsOERERUZGzt7fHkiVLsHHjRpw8eVLsOJQHLHBUbE2cOBGpqalYt24dJBKJ2HGIiIiIRDFy5Ei0b98ew4cPR2xsrNhx6AtY4KhYOnDgAHbt2oUVK1bAzs5O7DhEVEwlJSXB3t4e9vb2SEpKEjsOERVTmZcWiI+Px5QpU8SOQ1/AAkfFztu3bzF69Gh4enrCy8tL7DhEVIwJgoBnz57h2bNnEARB7DhEVIxVrFgRf/zxB7Zs2YKjR4+KHYdywQJHxc748eMhlUqxdu1aHjpJRERE9P+GDRuGjh07YuTIkfjw4YPYcSgHLHBUrOzbtw979+7FqlWrYGtrK3YcIiIiIrUhkUiwYcMGJCUlYdKkSWLHoRywwFGxER0djbFjx6Jnz57o27ev2HGIiIiI1E758uWxfPlybNu2Df7+/mLHoWywwFGxIAgCxo4dCwBYvXo1D50kIiIiyoG3tze6dOmCUaNGISYmRuw49BkWOCoW9uzZg7///hurV6+GjY2N2HGIiIiI1JZEIsH69euRkpKCiRMnih2HPsMCR1ovKioK48aNQ58+fdC7d2+x4xARyUkkEtSpUwd16tThkQFEpFbs7OywcuVK7Nq1CwcPHhQ7Dn1CInDcYtHEx8fD0tIScXFxsLCwEDuOVhIEAV9//TWuXr2Ke/fuwdraWuxIRERERBqBn6OKVl67AffAkVbbu3cv/Pz8sHbtWv7QISIiIlKCRCLB2rVrkZGRgW+//VbsOPT/WOBIa338+BGTJ0/G119/ja+//lrsOEREREQax9bWFkuWLMGuXbtw/vx5seMQWOBIi82ZMwdxcXFYtmyZ2FGIiLKVlJSEunXrom7dukhKShI7DhFRtry9vdG0aVOMGzcO6enpYscp9ljgSCuFhoZi+fLlmDlzJipWrCh2HCKibAmCgH///Rf//vsveEo6EakrHR0drF69Gg8ePMDy5cvFjlPsscCR1hEEAePGjUO1atUwZcoUseMQERERabz69etj/PjxmDNnDl6+fCl2nGKNBY60zq5du3Dx4kWsXLkSBgYGYschIiIi0go///wzzMzMMHXqVLGjFGsscKRV4uLiMHXqVPTu3Rvt27cXOw4RERGR1rCyssKiRYuwZ88enDlzRuw4xRYLHGmVOXPm4OPHj1i6dKnYUYiIiIi0zsCBA9G8eXOMHz8eaWlpYscplljgSGvcuXMHK1euxKxZs1C+fHmx4xARERFpHYlEgtWrV+Px48f4448/xI5TLBWbArdq1SrY29vDyMgILi4uCAwMzHHerVu3QiKRKNyMjIwU5hEEAbNmzULZsmVhbGyMdu3a4fHjx4X9NCgHgiBg7NixqFGjBiZNmiR2HCKiPJFIJKhUqRIqVaoEiUQidhwiojxxdHTExIkTMXfuXLx48ULsOMVOsShwe/bswZQpUzB79myEhISgfv36cHd3R3R0dI7LWFhY4PXr1/Lbs2fPFB5fuHAhVqxYgbVr1+L69eswNTWFu7s7UlJSCvvpUDa2b9+Oy5cvw9fXlwOXEJHGMDExQUREBCIiImBiYiJ2HCKiPJszZw4sLCw44rcIikWBW7p0KUaOHImhQ4eiTp06WLt2LUxMTLB58+Ycl5FIJLC1tZXfypQpI39MEAQsW7YMP/30Ezw9PVGvXj1s27YNkZGROHToUBE8I/pUbGwsfHx80K9fP7Rp00bsOERERERaz8LCAkuWLMH+/ftx6tQpseMUK1pf4NLS0hAcHIx27drJp+no6KBdu3a4evVqjsslJCSgUqVKqFChAjw9PXHv3j35Y+Hh4YiKilJYp6WlJVxcXHJdZ2pqKuLj4xVuVHCzZs1CUlISFi9eLHYUIiIiomKjf//+aNWqFSZMmIDU1FSx4xQbWl/g3r17B6lUqrAHDQDKlCmDqKiobJepWbMmNm/eDD8/P+zYsQMymQxNmzaVX7Qwczll1gkA8+fPh6WlpfxWoUKFgjw1AnDr1i2sWrUKc+bMQbly5cSOQ0SklOTkZDg7O8PZ2RnJyclixyEiUopEIsGqVasQFhbGEcCLkNYXuPxwdXWFt7c3GjRogFatWuHAgQMoXbo01q1bV6D1Tp8+HXFxcfIbT/osGJlMhnHjxqFWrVqYOHGi2HGIiJQmk8kQFBSEoKAgyGQyseMQESmtbt26mDRpEn755Rc8f/5c7DjFgtYXOGtra+jq6uLNmzcK09+8eQNbW9s8rUNfXx8NGzbEkydPAEC+nLLrNDQ0hIWFhcKN8m/Pnj24cuUKVq1aBX19fbHjEBERERVLs2fPhpWVFaZNmyZ2lGJB6wucgYEBnJycFK4WL5PJcObMGbi6uuZpHVKpFHfv3kXZsmUBAJUrV4atra3COuPj43H9+vU8r5MKJi0tDT/++CO6desGNzc3seMQERERFVvm5uaYO3cu/vrrL4SEhIgdR+tpfYEDgClTpmDDhg34888/cf/+fYwZMwaJiYkYOnQoAMDb2xvTp0+Xzz937lycOnUKT58+RUhICAYOHIhnz55hxIgRAP473nfSpEn49ddf4e/vj7t378Lb2xt2dnbo3r27GE+x2Fm3bh2ePXuG3377TewoRERERMXekCFDUKtWLfzwww9iR9F6emIHKAp9+/bF27dvMWvWLERFRaFBgwY4ceKEfBCS58+fQ0fnf132w4cPGDlyJKKiolCiRAk4OTnhypUrqFOnjnye77//HomJifjmm28QGxuL5s2b48SJE1ku+E2q9/HjR/zyyy8YPHgw6tatK3YcIiIiomJPT08Pv/32G3r06IHTp08rjNZOqiURBEEQO0RxFR8fD0tLS8TFxfF8OCXMmTMHv//+Ox4/fsyRPIlIoyUmJsLMzAzAf5evMTU1FTkREVH+CYKApk2bIj09HYGBgQo7SOjL8toN+F0ljfLmzRssWbIEEyZMYHkjIq1gbW0Na2trsWMQERWYRCLBggULEBwcjP3794sdR2txD5yIuAdOeRMmTMD27dvx9OlTlCxZUuw4RERERPSZrl274uHDh/j33385UrgSuAeOtE5YWBjWrVuHH374geWNiIiISE3Nnz8fYWFh2Lhxo9hRtBL3wImIe+CUM2DAAFy4cAGPHz+GiYmJ2HGIiIiIKAeDBw/GyZMn8eTJE/m5vpQ77oEjrXLz5k3s3r0bc+bMYXkjIq2RnJwMNzc3uLm5ITk5Wew4REQqM3fuXHz48AHLli0TO4rW4R44EXEPXN65u7vj2bNnCA0NhZ5esbj6BREVAxyFkoi02ZQpU7Bx40aEhYWhdOnSYsdRe9wDR1rjzJkzOHXqFH777TeWNyIiIiINMWPGDEgkEvz2229iR9EqLHCk1gRBwA8//AAXFxd8/fXXYschIiIiojyytrbG999/j9WrVyMiIkLsOFqDBY7U2v79+xEUFIQFCxZAIpGIHYeIiIiIlDBp0iSULFkSs2bNEjuK1mCBI7WVnp6OGTNmoHPnzmjVqpXYcYiIiIhISaamppg9ezZ27NiBO3fuiB1HK7DAkdratm0bnjx5gvnz54sdhYiIiIjyafjw4ahWrRpmzpwpdhStwAJHakkqleL3339Hjx49UK9ePbHjEBEVGhMTE14ehYi0mr6+PmbMmAF/f3+EhoaKHUfjscCRWtq/fz+ePHmC6dOnix2FiKjQmJqaIjExEYmJibyEABFpNS8vL1SsWBG///672FE0HgscqR1BEDB//ny0b98ejRs3FjsOERERERWQvr4+pk6dir/++gtPnz4VO45GY4EjtXP8+HHcvn0bM2bMEDsKEREREanI8OHDUbJkSSxatEjsKBqNBY7UiiAImDdvHlxdXTnyJBFpvZSUFHTp0gVdunRBSkqK2HGIiAqViYkJJk+ejM2bN+P169dix9FYLHCkVgICAnDlyhXMmDGD130jIq0nlUpx7NgxHDt2DFKpVOw4RESFbuzYsTAyMsLSpUvFjqKxWOBIrcyfPx+Ojo7o0qWL2FGIiIiISMUsLS0xbtw4rF27Fu/fvxc7jkZigSO1ERISghMnTmD69Onc+0ZERESkpSZNmoSMjAz4+vqKHUUjscCR2pg/fz6qVq2K3r17ix2FiIiIiAqJjY0NRo4cieXLlyMhIUHsOBqHBY7UwoMHD/D3339j2rRp0NPTEzsOERERERWiqVOnIj4+HuvXrxc7isZhgSO1sHDhQpQtWxbe3t5iRyEiIiKiQlaxYkUMHDgQS5YsQWpqqthxNAoLHInu+fPn2L59O7777jsYGhqKHYeIiIiIisC0adPw+vVrbNu2TewoGoUFjkS3ePFiWFhY4JtvvhE7ChFRkTI1NYUgCBAEAaampmLHISIqUrVq1ULPnj2xYMECZGRkiB1HY7DAkaiio6OxYcMGfPvttzAzMxM7DhEREREVoenTpyMsLAz79u0TO4rGYIEjUS1fvhx6enoYP3682FGIiIiIqIg1atQI7u7umD9/PgRBEDuORmCBI9GkpKRg3bp1GD58OEqWLCl2HCKiIpeSkoLevXujd+/eSElJETsOEZEofHx8cPfuXVy4cEHsKBqBBY5Es2/fPsTExGDs2LFiRyEiEoVUKsX+/fuxf/9+SKVSseMQEYmiTZs2qFWrFlavXi12FI3AAkeiWbVqFdq3b48aNWqIHYWIiIiIRCKRSDB27FgcPHgQkZGRYsdReyxwJIrg4GBcv34d48aNEzsKEREREYnM29sbhoaGvLB3HrDAkShWrVqFChUqoEuXLmJHISIiIiKRWVpaYuDAgVi/fj3S09PFjqPWWOCoyL1//x67d+/G6NGjoaenJ3YcIiIiIlID48aNw+vXr3Ho0CGxo6g1Fjgqclu2bIFUKsWIESPEjkJEREREasLR0REtWrTAqlWrxI6i1ljgqEjJZDKsWbMGvXv3ho2NjdhxiIiIiEiNjB07FhcuXEBoaKjYUdQWCxwVqZMnTyIsLIyDlxARATAxMUFCQgISEhJgYmIidhwiItH16NEDZcqU4SUFcsECR0Vq9erVaNCgAVxdXcWOQkQkOolEAlNTU5iamkIikYgdh4hIdAYGBvjmm2+wfft2xMfHix1HLbHAUZEJDw/H0aNHMW7cOH5QISIiIqJsffPNN0hOTsb27dvFjqKWWOCoyKxduxYWFhbo37+/2FGIiNRCamoqhgwZgiFDhiA1NVXsOEREaqF8+fLw9PTEqlWrIAiC2HHUDgscFYmUlBRs2rQJQ4cOhampqdhxiIjUQkZGBv7880/8+eefyMjIEDsOEZHaGDduHO7fv48LFy6IHUXtsMBRkdi7dy9iYmIwZswYsaMQERERkZpr3bo1atWqxUsKZIMFjorEqlWr0L59e9SoUUPsKERERESk5iQSCcaOHYuDBw/i1atXYsdRKyxwVOhCQkIQGBjISwcQERERUZ55e3vDyMgIGzduFDuKWmGBo0K3fft22NjYoEuXLmJHISIiIiINYWlpid69e2P79u0czOQTLHBUqDIyMrB79270798fenp6YschIiIiIg0ycOBAhIWF4fr162JHURsscFSoTp8+jTdv3mDQoEFiRyEiIiIiDePm5oZy5crxmnCfYIGjQrVjxw7UqlULjRo1EjsKEZHaMTExQXR0NKKjo2FiYiJ2HCIitaOrq4sBAwZgz549SEtLEzuOWmCBo0KTkJCAgwcPYuDAgZBIJGLHISJSOxKJBKVLl0bp0qX5c5KIKAcDBw5ETEwMTp48KXYUtcACR4Xm4MGDSEpKgpeXl9hRiIiIiEhD1atXD/Xq1eNhlP+PBY4KzY4dO9CiRQvY29uLHYWISC2lpqZi3LhxGDduHFJTU8WOQ0SktgYOHAh/f3/ExcWJHUV0LHBUKF6/fo3Tp09j4MCBYkchIlJbGRkZWL16NVavXo2MjAyx4xARqa3+/fsjLS0Nf//9t9hRRMcCR4Xir7/+gp6eHnr37i12FCIiIiLScOXLl0ebNm2wY8cOsaOIjgWOCsX27dvRtWtXlChRQuwoRERERKQFBg4ciPPnz+PFixdiRxEVCxyp3L1793Dz5k0ePklEREREKtOjRw8YGhpi165dYkcRFQscqdzOnTtRokQJdO7cWewoRERERKQlLCws4Onpie3bt0MQBLHjiIYFjlRKJpNh586d6NOnDwwNDcWOQ0RERERaZNCgQbh37x5u374tdhTRsMCRSgUEBOD58+c8fJKIiIiIVK5Dhw6wtrYu1oOZsMCRSu3YsQP29vZo2rSp2FGIiNSesbExwsPDER4eDmNjY7HjEBGpPX19ffTr1w+7du2CVCoVO44oWOBIZVJTU7Fv3z54eXlBR4cvLSKiL9HR0YG9vT3s7e35c5OIKI8GDRqE169f49y5c2JHEQV/W5DKnD9/HnFxcejTp4/YUYiIiIhISzk7O6NixYo4dOiQ2FFEwQJHKuPv749KlSrB0dFR7ChERBohLS0NPj4+8PHxQVpamthxiIg0gkQiQbdu3eDv718sR6NkgSOVEAQB/v7+8PT0hEQiETsOEZFGSE9Px+LFi7F48WKkp6eLHYeISGN4enrixYsXxXI0ShY4UombN2/i5cuX6Natm9hRiIiIiEjLtWzZEhYWFvDz8xM7SpFjgSOV8Pf3h6WlJVq2bCl2FCIiIiLScgYGBujUqRP8/f3FjlLkWOBIJfz9/dG5c2fo6+uLHYWIiIiIigFPT0+EhITg5cuXYkcpUixwVGAvXrzAzZs3efgkERERERWZjh07Qk9PD4cPHxY7SpFigaMC8/f3h56eHjp27Ch2FCIiIiIqJkqUKIGWLVsWu/PgWOCowPz9/eHm5gYrKyuxoxARERFRMeLp6YmzZ88iPj5e7ChFhgWOCiQ+Ph7nzp3j4ZNERPlgbGyM0NBQhIaGwtjYWOw4REQax8PDA+np6Th16pTYUYoMCxwVyIkTJ5Cens4CR0SUDzo6Oqhbty7q1q0LHR3+SiYiUlblypXh6OhYrA6j5G8LKhB/f3/Ur18flSpVEjsKERERERVDnp6eOHr0KDIyMsSOUiRY4Cjf0tPTcfToUe59IyLKp7S0NMyZMwdz5sxBWlqa2HGIiDRSt27d8OHDB1y+fFnsKEVCTxUrSU5OxtWrVxEcHIynT58iKioKiYmJ0NfXh5WVFSpWrIi6devCxcUF1atXV8UmSQ1cvnwZsbGx8PT0FDsKEZFGSk9Px88//wwA8PHxgYGBgciJiIg0j5OTE+zs7ODv749WrVqJHafQ5bvAJSQkYN++fdi9ezcCAgLy/JfDihUrokePHvDy8kKjRo3yu3lSA35+frCzs+P/IxERERGJRkdHBx4eHvDz88PixYshkUjEjlSolD6E8tWrV5g0aRLKly+PESNG4PTp00hNTYUgCPKbkZERypYtCysrK0gkEoXHnj17hmXLlsHZ2Rmurq7Yt29fYTwvKmSCIMDf3x/dunXT+jcJEREREam3bt26ISwsDP/++6/YUQpdngtcbGwsJk2ahGrVqmHlypWIj4+Hjo4O3Nzc8MMPP+DAgQN49uwZkpOTkZiYiJcvXyImJgbp6el4//49rl27hpUrV2LQoEEoV64cBEHA9evX0a9fP9SrVw8nT54szOdJKhYWFoanT5+iS5cuYkchIiIiomKuTZs2MDIyKhaXE5AIgiDkZUZra2t8+PABgiDgq6++woABA9C3b1+ULl06Xxu+ePEidu7ciX379iE2NhYSiQTLly/H+PHj87U+TRQfHw9LS0vExcXBwsJC7DhK2bBhA8aMGYP3799rXHYiInWRmJgIMzMzAP+dmmBqaipyIiIizdW2bVuYmprC399f7Cj5ktdukOc9cO/fv0eHDh1w+fJlXLlyBePHj893eQOAli1bYt26dXj+/DnmzZuHUqVK4f379/le35esWrUK9vb2MDIygouLCwIDA3Ocd8OGDWjRogVKlCiBEiVKoF27dlnmHzJkCCQSicKtY8eOhZZf3Zw7dw5OTk4sb0RERESkFlq3bo2LFy9CKpWKHaVQ5bnABQYG4vjx43B1dVVpADMzM0yfPh0RERHo3bu3Stedac+ePZgyZQpmz56NkJAQ1K9fH+7u7oiOjs52/vPnz6N///44d+4crl69igoVKqBDhw549eqVwnwdO3bE69ev5bfdu3cXSn51IwgCzp8/Dzc3N7GjEBEREREBANzc3BAXF4dbt26JHaVQ5bnANW7cuDBzwMTEBLVr1y6UdS9duhQjR47E0KFDUadOHaxduxYmJibYvHlztvPv3LkTY8eORYMGDVCrVi1s3LgRMpkMZ86cUZjP0NAQtra28luJEiUKJb+6efToEV6/fo3WrVuLHYWISKMZGRkhMDAQgYGBMDIyEjsOEZFGa9KkCUxMTHDu3DmxoxQqrb+Qd1paGoKDg9GuXTv5NB0dHbRr1w5Xr17N0zqSkpKQnp6OkiVLKkw/f/48bGxsULNmTYwZMwYxMTG5ric1NRXx8fEKN0107tw56OnpoXnz5mJHISLSaLq6unB2doazszN0dXXFjkNEpNEMDAzQrFkzFjhN9+7dO0ilUpQpU0ZhepkyZRAVFZWndUybNg12dnYKJbBjx47Ytm0bzpw5gwULFuDChQvo1KlTrsfczp8/H5aWlvJbhQoV8vekRHb+/Hk4OzvLT7wnIiIiIlIHrVu3RkBAADIyMsSOUmgKtcBdvnwZ33zzDZo0aYKaNWvC2dkZI0eOREBAQGFuVqV+//13/PXXXzh48KDC4S39+vVDt27d4OjoiO7du+PIkSO4ceMGzp8/n+O6pk+fjri4OPntxYsXRfAMVIvnvxERqU5aWhoWLVqERYsWIS0tTew4REQaz83NDR8/fkRwcLDYUQqNXmGteOLEiVi1ahWA/z70ZwoJCcHmzZsxZswY+Pr6Ftbm5aytraGrq4s3b94oTH/z5g1sbW1zXXbx4sX4/fffcfr0adSrVy/XeatUqQJra2s8efIEbdu2zXYeQ0NDGBoaKvcE1Mz9+/fx5s0bnv9GRKQC6enp+P777wEAY8eOhYGBgciJiIg0W+PGjWFqaorz58/DxcVF7DiFolD2wK1Zswa+vr4wNjbGuHHjsHv3bhw/fhzr169Hy5YtIQgC1qxZgzVr1hTG5hUYGBjAyclJYQCSzAFJchtRc+HChfjll19w4sSJPA3gknnh8rJly6okt7o6d+4c9PX10axZM7GjEBEREREp0NfXR4sWLbT6PLhCKXBr166Fjo4O/vnnH6xYsQJ9+/aFu7s7RowYgXPnzmHIkCHyElcUpkyZgg0bNuDPP//E/fv3MWbMGCQmJmLo0KEAAG9vb0yfPl0+/4IFCzBz5kxs3rwZ9vb2iIqKQlRUFBISEgD8d7FVHx8fXLt2DREREThz5gw8PT1RrVo1uLu7F8lzEkvmXzNMTEzEjkJERERElEXr1q1x6dIlpKenix2lUChV4LZs2ZKn+R49eoTq1avnuIdr2LBhAIDHjx8rs/l869u3LxYvXoxZs2ahQYMGuHXrFk6cOCEf2OT58+d4/fq1fP41a9YgLS0NvXr1QtmyZeW3xYsXA/hv1LA7d+6gW7duqFGjBoYPHw4nJycEBARo/CGSuZHJZDz/jYiIiIjUmpubGxITE3Hjxg2xoxQKifDpCWpfoKOjg6ZNm2LVqlWoX79+jvPZ2NggPT0db9++hZ5e1tPsdu3ahYEDB6JUqVJ4+/Zt/pJrgfj4eFhaWiIuLg4WFhZix/miu3fvol69ejhz5gzatGkjdhwiIo2XmJgoH9E3ISEBpqamIiciItJ8GRkZKFmyJKZNm4Yff/xR7Dh5ltduoNQeOBcXF1y5cgWNGzfGhAkTEBcXl+18bdq0QXx8PLy8vBAdHa3wWEBAAL777jtIJBKFYflJ/Z07dw4GBga5njtIRERERCQmPT09tGzZMtfR4TWZUgXu6tWrWL9+PaysrLBq1SrUrFkTW7duzTLfr7/+CisrK+zfvx8VK1ZE3bp10bRpU1SsWBFubm548+YNSpQogV9//VVVz4OKwPnz5+Hq6gpjY2OxoxARERER5ah169a4fPkyUlNTxY6ickoPYjJixAg8evQI33zzDd69e4fhw4ejefPmuH37tnyeatWq4dKlS2jZsiXS0tJw//59XLt2DS9fvoQgCHBzc8OlS5dQtWpVlT4ZKjwymQwXLlzg5QOIiFTIyMgI586dw7lz5xSuNUpERAXTunVrJCcnIzAwUOwoKqfUOXCfCwoKwrhx43Djxg3o6upi9OjR+PXXX2FpaSmf5+XLl7h16xbi4uJgaWmJ+vXro0KFCioJr+k06Ry4O3fuoH79+jh//jxatWoldhwiIiIiohxJpVJYW1tjypQpmDlzpthx8iSv3aBABQ747yLdGzZswI8//oiYmBjY2Njg999/x5AhQwqy2mJBkwrcxo0bMWrUKHz8+JGXECAiIiIitefu7g5DQ0P4+/uLHSVPCmUQk+xIJBJ88803ePjwIUaMGCE/rLJZs2a4detWQVdPaiIoKAh16tRheSMiUqH09HSsWrUKq1at0trrFRERicXJyQlBQUFix1A5lV3Iu2TJkli/fj2uXr2KRo0a4erVq3B2dsb48eNzHK2SNEdwcDAaN24sdgwiIq2SlpaG8ePHY/z48UhLSxM7DhGRVmncuDFev36NyMhIsaOolMoKXCZnZ2cEBgZi1apVsLCwwOrVq1GzZs08XwSc1E9qaipu374NJycnsaMQEREREeVJ5mfX4OBgkZOoVr4K3O7du9GhQwfY2NjA0NAQNjY26NChA3bt2gXgv8Mqx4wZg0ePHmHo0KF4+/YtRowYwcMqNVRoaCjS09O5B46IiIiINEbFihVhbW1dvAucVCpFr169MHDgQJw+fRrv3r1Deno63r17h9OnT2PQoEHo0aMHpFIpAKBUqVLYtGkTLl++jAYNGigcVhkbG1sYz4cKQXBwMHR1dVG/fn2xoxARERER5YlEItHK8+CUKnBLly7FgQMHAADffPMNzp07h/v37+PcuXP45ptvAAB+fn5YunSpwnJfffUVgoKCsHLlSpibm2PNmjU8rFKDBAUFoW7duryANxERERFplMaNGyM4OBgFHHhfrShV4LZu3QqJRAIfHx+sXbsWrVq1Qs2aNdGqVSusXbsW33//PQRByLaYSSQSjBs3Do8ePYK3tzfevXuHESNGqOyJUOEJDg7m+W9EREREpHGcnJwQFRWlVQOZKFXgnj59CgDw9PTM9vFu3boBAMLDw3Nch7W1NbZs2YKAgAAekqcBUlNTcffuXZ7/RkREREQaJ/MzrDadB6dUgcu8oFxERES2j2dONzc3/+K6mjZtqnXHo2qju3fvIj09nXvgiIgKgaGhIY4cOYIjR47A0NBQ7DhERFqnfPnyKF26tFb1DqUKXLt27SAIAqZOnYrAwECFx0JCQjBt2jRIJBK0a9cubxvXUflVDEjFgoKCoKenh3r16okdhYhI6+jp6aFLly7o0qUL9PT0xI5DRKR1JBKJ/Dw4baFUg/r1119RsmRJvH79Gq6urqhSpQqaNWuGqlWrwtnZGS9fvoSVlRV++eWXwspLRYwDmBARERGRJssciVJbBjJRqsBVrlwZV65cQevWrSEIAiIiInD16lWEh4dDEAS0bNkSAQEBqFq1amHlpSIWHBzM89+IiApJeno6tm7diq1btyI9PV3sOEREWqlx48aIjo7Gq1evxI6iEkofr1GjRg2cOXMGL1++xK1btxAXFwdLS0vUr18fFSpUKIyMJJKUlBSEhobKLxFBRESqlZaWhqFDhwIAevfuDX19fZETERFpn8yxHIKCglC+fHmR0xRcvg+4L1++vFZ8Ayhnd+7cQUZGBvfAEREREZHGKleuHMqUKYPg4GB0795d7DgFxlFEKEeZA5g4OjqKHYWIiIiIKF8kEon8PDhtwAJHObp58ybq1q0LIyMjsaMQEREREeWbk5MTQkJCxI6hEgUas/j9+/cIDw+Hrq4uqlWrBjMzszwtFxcXBz8/PwCAt7d3QSJQIXrw4AHq1KkjdgwiIiIiogKpXbs2oqOjERsbCysrK7HjFEi+9sDduXMH7dq1Q5kyZdCkSRM4OTnB2toa3bt3x507d764/MuXLzFkyBAMGzYsP5unIvLo0SPUrFlT7BhERERERAWS+Zn20aNHIicpOKUL3Pnz59GsWTOcO3cOUqkUgiBAEASkpaXh8OHDaNy4MX755Zc8XWdBW67FoI1iY2MRHR2NGjVqiB2FiIiIiKhAqlevDgB4+PChyEkKTqlDKOPi4jBgwAAkJiYC+O+aCm5ubkhNTcWFCxfkoxbOmTMHwcHB2LNnDwwNDQslOBWuzL9OcA8cEVHhMTQ0xN69e+VfExFR4TA3N4ednZ1W7IFTqsBt2rQJUVFRkEgkWLFiBcaNG6fw+NGjRzF+/Hg8e/YMhw8fRseOHXH48OE8nxtH6iPzrxOZf60gIiLV09PTQ+/evcWOQURULNSoUUMr9sApdQjlkSNHIJFI0Ldv3yzlDQC6dOmCkJAQuLu7QxAEXLx4EW3atMH79+9VFpiKxqNHj2BnZwdzc3OxoxARERERFVjNmjW1Yg+cUgUuNDQUAODl5ZXjPCVKlMDRo0cxcuRICIKA4OBgtGrVClFRUQVLSkXq0aNHPP+NiKiQZWRkYN++fdi3bx8yMjLEjkNEpNVq1KiBx48fQyaTiR2lQJQqcLGxsQCA8uXL575SHR2sW7cO06ZNgyAI+Pfff9GyZUu8ePEi30GpaD18+JDnvxERFbLU1FT06dMHffr0QWpqqthxiIi0Ws2aNZGUlIRXr16JHaVAlCpwJiYmAP5X5L5k/vz5mD9/PgRBQFhYGFq0aIEnT54oHZKKlkwmw+PHj7kHjoiIiIi0RuZnW00/jFKpAmdvbw8AuH37dp6XmTZtGnx9fQEAL168QMuWLXH37l1lNktF7NWrV0hKSmKBIyIiIiKtYW9vDz09PY0fyESpAteoUSMIgoATJ04otZGxY8di69at0NHRwZs3bzBkyBCllqeixUsIEBEREZG20dfXR9WqVYvXHrh27doBAP755x+ljx0dNGgQ9uzZA319faSnpyu1LBWthw8fQk9PT77HlYiIiIhIG2jDpQSUKnBdu3aFgYEBpFIp5s2bp/TGevToAX9/fxgbGyu9LBWdR48eoWrVqtDX1xc7ChERERGRymjDpQSUupC3hYUFDh06hJiYmHx/uO/QoQPOnDmj9GGYVHR4CQEiIiIi0kY1atRAREQEUlNTYWhoKHacfFGqwAFAx44dC7xRFxcXuLi4FHg9VDgePnyIHj16iB2DiEjrGRgYYMuWLfKviYiocNWsWRMymQxhYWGoU6eO2HHyRekCR9otNTUVERER3ANHRFQE9PX1ObAXEVER+vRSAppa4JQ6B460X3h4OGQyGQscEREREWmdMmXKwNzcHI8fPxY7Sr4Vyh64yMhIHDhwADdu3MDbt2+hp6eHcuXKoUWLFujRoweMjIwKY7OkApmji5YvX17kJERE2i8jIwMnT54EALi7u0NPjwfGEBEVJolEgvLlyys9or46Ueo3xbNnz+Dn5weJRIIxY8Zk+UWTkZGBH374Ab6+vtleKmD9+vWYMmUKfH190atXr4Ilp0IRGRkJAChbtqzISYiItF9qaiq6du0KAEhISGCBIyIqAnZ2dvLPvJpIqUMod+3ahcmTJ2PLli1ZfskIgoDevXvjjz/+QFpaGgRByPYWHR2Nvn37YsOGDSp9IqQar1+/hqWlJUxMTMSOQkRERESkcmXLlsXr16/FjpFvShW4s2fPAgB69uyZ5bEtW7bAz88PgiDAzs4Ovr6+ePz4MVJSUpCYmIg7d+5g9uzZMDMzgyAImDBhAp4+faqaZ0EqExkZCTs7O7FjEBEREREVCk3fA6fUsRpPnjwBADRp0iTLYytXrgQA1K5dGxcvXkSpUqUUHndwcICDgwP69OmD5s2bIzY2FsuXL8fy5cvzm50KwevXr1ngiIiIiEhrZRY4QRAgkUjEjqM0pfbAvXnzBgBgY2OjMP3jx4+4ffs2JBIJFi9enKW8fap27dqYNm0aBEHAP//8k4/IVJgiIyN5/hsRERERaa2yZcsiJSUFcXFxYkfJF6UKXObokQkJCQrTo6Oj5V+3atXqi+vJnOfFixfKbJ6KAPfAEREREZE2y/ysq6mHUSpV4CpUqAAAuHv3rsJ0CwsL+deCIOR94zq8DJ06EQSB58ARERERkVbL/KyrqQOZKNWgOnToAEEQsGXLFoXppUuXhr29PQDgwoULX1zPuXPnAPyvEJJ6iIuLQ3JyMg+hJCIqIgYGBvD19YWvry8MDAzEjkNEVCiePHmC3bt3Y/LkyWjWrBlMTEwgkUggkUiwdevWIs+T+Vm3WOyBGz58OPT09BAUFIRZs2YpPDZ58mQIgoCpU6fi/fv3Oa7j/v37WLhwISQSCbp06ZK/1FQoMv8KwT1wRERFQ19fH+PGjcO4ceOgr68vdhwiIpW7cOECqlevjgEDBmDZsmW4cuUKkpOTRc1kbGwMKyur4lHgatWqBR8fHwiCgHnz5sHT0xPXr18HAEyYMAFff/01Hjx4AEdHR6xYsQL3799HQkIC4uLicPPmTcyaNQsuLi6IjY2Fubk5JkyYUChPivKHF/EmIiIiIlX69PQqHR0d1K1bN9sR7YuaJl8LTqnLCADAvHnzEB0djU2bNuHIkSM4cuQIrK2t4ejoiJIlS8LIyAivX7/G5MmTs11eEAQYGBhgy5YtKF++fIGfAKlO5ouYBY6IqGhIpVIEBAQAAFq0aAFdXV2RExERqVa5cuWwaNEiODs7w8nJCWZmZti6dSsCAwNFzaXJ14JTusABwIYNG+Dq6gofHx98+PABb9++lZ/XBkB+PYXsBjRxcHDAmjVr0KxZs3xGpsISGRkJKysrmJiYiB2FiKhYSElJQevWrQH8N8KzqampyImIiFSrevXqmDp1qtgxsrCzs8PTp0/FjpEv+R4GctiwYXj27BnWrFmDNm3awMzMDIIgKNyA/8qcvb09vL294efnh9u3b7O8qanXr19z7xsRERFRIUpKSoK5uTkkEgm8vLy+OP/Vq1flA36sXr1aPv3Dhw/YsmULBg4ciDp16sDMzAwGBgawtbWFu7s71q9fj7S0tBzXGxERkWUgkQMHDqBz586ws7ODnp4e3NzcCvp01VaxOoTyU2ZmZhg1ahRGjRoFQRDw4sULvH//HomJiTAyMoK5uTnKly/PPToagpcQICIiIipcJiYm6N69O3bs2AE/Pz8kJibmuvd9586dAAA9PT306dNHPr1hw4Z49uxZlvnfvHmDU6dO4dSpU1i7di2OHTsGW1vbXDMJggBvb29s3749n89K82QeQikIgvzoQU1RoAL3KYlEgooVK6JixYqqWiUVscjISFSuXFnsGERERERazcvLCzt27EBiYiL8/PwwYMCAbOfLyMjAvn37AADu7u6wtraWPyaVSuHi4oKuXbuiYcOGKFOmDNLS0hAeHo4dO3bgxIkTuHnzJvr164fz58/nmmfZsmW4c+cOWrRogTFjxqBGjRqIjY1FRESEqp6y2rGzs0NKSgpiY2NRokQJseMoRWUFjjTf69ev4erqKnYMIiIiIq3Wrl072NjYIDo6Grt27cqxwJ0+fRrR0dEAkOVwy7Nnz6J69epZlmnatCm8vLywZcsWDBs2DBcuXMCZM2fQtm3bHPPcuXMH3t7e2Lp1q8btjcqvzNOGXr9+rXEFLt/nwJH2iYuL07gXMBEREZGm0dPTQ9++fQEAp06dQkxMTLbzZR4+aWZmBk9PT4XHsitvnxo6dCgaNGgAADh06FCu81pZWcHX17fYlDfgv+cM/Pf5V9OwwJHcx48fYW5uLnYMIiIiIq2XuUctPT0de/fuzfJ4cnKyvHh179491zElBEFAVFQUHj16hNDQUPmtXLlyAIDbt2/nmsXDw6PYfQbMfL4fP34UOYnyivwQyipVqsi/lkgkCAsLK+oIlI309HSkpqYWuzcvEZGY9PX1sXDhQvnXRFR8uLi4oGrVqggLC8POnTsxZswYhcf9/f2RkJAAIOvhk5mOHj2KNWvW4OLFi7kWkXfv3uWapV69ekqm13wscErIHLJUE0d80WaZL14WOCKiomNgYAAfHx+xYxCRSLy8vDB37lxcuXIFERERsLe3lz+WefikjY0N2rVrp7CcIAgYOXIkNm3alKftJCcn5/p4cTyFRpMLnCiHUGZ3gW8SV+aL18zMTOQkRERERMVD5p41QRCwe/du+fT379/j5MmTAIC+fftCT09xn8vmzZvl5a1BgwbYunUr7t+/j/j4eGRkZMivyTxo0CD5+nOjq6ursuekKfT19WFoaCjfy6lJirzAyWQy+U0qlRb15ikH3ANHRFT0pFIpbty4gRs3bvB3IlExVKNGDTRu3BgAsGvXLvn0/fv3yy/Cnd3hkxs2bAAAVKtWDVeuXMHgwYNRq1YtmJubK5Sx9+/fF2Z8jWdmZsY9cKS5Mv/6wAJHRFR0UlJS0KRJEzRp0gQpKSlixyEiEWQWtNDQUNy5cwfA/w6frFq1KlxcXLIsc+/ePQBAt27dYGxsnO16BUFASEhIYUTWGubm5ixwpLm4B46IiIio6PXr10++12znzp14+fIlAgICAOQ8eElGRgYAIDExMcf1+vn54fXr1ypOq11Y4Eij8Rw4IiIioqJna2uLNm3aAAB2796NXbt2yc9Zy6nAZV4D7vDhw9keJhkWFoZx48YVUmLtYW5urpHnwBX5KJSknrgHjoiIiEgcXl5e+Oeff/DixQvMnz8fANC4cWPUqFEj2/m9vb3h4+ODyMhIuLq6Ytq0aXBwcEBKSgrOnj2LZcuWITU1FY0aNVKLwyj379+vUJQuXbqU7dfAf4W2Y8eORZJLU/fAFbjAxcTEQFdXV34187xISkpCUFAQAKBly5YFjUAq8PHjRxgYGMDAwEDsKERERETFSo8ePTBmzBgkJycjNjYWQM573wDg22+/xT///INTp07h0aNHGD58uMLjxsbG2LZtG44ePaoWBW7q1Kl49uxZto9t2rRJ4XIIrVq1KrICV6wGMYmMjMSIESNQsmRJ2NjYoFSpUrCzs8OkSZMQFRX1xeXDw8Ph5uYm311M4ktISODhk0REREQiMDc3h4eHh/y+rq4u+vXrl+P8+vr6OHr0KFasWIHGjRvDxMQExsbGqFatGkaPHo2QkBD07t27KKJrNE3dAycRlLwo2927d9G+fXu8ffs2yzUlJBIJzMzM4OvrK7/uRHbu3bsHR0dHSCSSYj1scnx8PCwtLREXFwcLCwtRs/z444/YuXMnIiIiRM1BRFScJCYmyv94lpCQAFNTU5ETERUfsbGxWLRoERo3bowOHTrw/VcMTZw4EefPn5eP/im2vHYDpQ6hTElJQa9evRAdHQ0AKFu2LJo1a4bU1FRcvXoV7969w8ePHzFkyBAEBgZi5cqVBXsWVGQ+fvzI89+IiIqYvr4+Zs+eLf+aiIrO5s2b8dtvvwEADAwM0KFDB/To0QNdu3ZF6dKlRU5HRUFT98ApVeC2b9+Ox48fQyKRYPLkyZg/f778F05GRgbWrVuHn376CXFxcVi9ejViYmKwffv2Ynl1d03DAkdEVPQMDAwwZ84csWMQFUt6enrQ0dGBTCZDWloajh8/jiNHjkAikeCrr75Cr1694OnpiapVq4odlQqJphY4pc6BO3DgAACgQ4cOWLx4scJfC/X09DBu3DgEBwejXr16EAQBe/bsQffu3ZGamqra1KRyHz9+5DlwREREVGzo6+srnA6UeVqPIAi4du0afHx8UK1aNdSuXRszZ85EcHBwltOHSLNp6iAmSu2Bu337NiQSCUaOHJnjPFWqVMHVq1fRq1cvHD9+HMeOHUOnTp1w+PBhHlusxhISErgHjoioiMlkMty/fx8AULt2bejo8PKsREVFT08vx0ImCIL8sQcPHmD+/Pn49ddfYWtri549e6J79+5o1aqV1h/6HBsbi5cvX+ZrWQcHBxWnUT1zc3OkpaUhLS1No0ZiV6rAxcTEAMAXdyUbGxvD398fgwcPxq5du3DhwgW0a9cOx48fV+pyA1R0UlJSUKpUKbFjEBEVK8nJyfIPORzEhKhoKVO+MvfORUVFYd26dVi1apV85Mivv/4a7u7uWvmH8EOHDmHo0KH5WlYT9lYaGRkBAFJTUzWqwCn1p77MJ5aXXY26urrYsWMHRo8eDUEQEBgYiNatW+Pdu3f5S0qFSiaT8VxFIiIiKjbyu/csIyMDwH+fh/fu3YvevXujVKlS6NSpEzZs2JCnS2qResg86kEmk4mcRDlKFbiKFSsCgPxwj7xYvXo1fHx8IAgC7ty5g5YtW+LVq1fKpaRCJ5PJeOgOERERFRuqOPwxs8ylp6fj1KlT+Oabb2BnZwcXFxcsXLgQjx49KvA2xDRkyBD54aTK3jRB5s4LTStwSh1C2aBBA9y/fx+nTp3K9Ty4zy1YsADm5uaYNWsWHj58iL59+yodlAoXCxwRERFpIqlUioyMDKSnp8v//fTrjIwMpKWlITU1FSkpKUhJSUFqaiqCgoJUmiOzBGQeeRYYGIhp06bBxMQEpUuXRvPmzbFjxw6VbpMKRlP3wClV4Nq0aYPdu3fjyJEjiImJUeqcqZ9++gnm5uaYMmUK4uPjlQ5KhYsFjoiISPsIgpCl3GT3b2pqKlJTU5GcnJyl6Hx+yxz04dOv09PTFf7NfDy7eT7ffmYBk0qlkMlk8n817UN1TpKSkvDs2TOeqqKGikWB8/DwgK6uLtLS0rBkyRL5xQ/z6ttvv4WFhQW++eYb+cmgRWXVqlVYtGgRoqKiUL9+faxcuRJNmjTJcf59+/Zh5syZiIiIQPXq1bFgwQJ07txZ/rggCJg9ezY2bNiA2NhYNGvWDGvWrEH16tWL4umoHAscEREVR9ntvfn838wy8mm5yfw3LS0ty7+pqalIT0/P8u+nJSaz2HxecD7de/TpTSqVZik4mnKYWnGUeX05a2trNG/eHC1atECtWrXEjkWfyfzsW9S9pKCUKnA2NjZYs2YN3rx5k++RsoYOHYqSJUvi0KFD+Vo+P/bs2YMpU6Zg7dq1cHFxwbJly+Du7o6HDx/CxsYmy/xXrlxB//79MX/+fHTt2hW7du1C9+7dERISIh8tbOHChVixYgX+/PNPVK5cGTNnzoS7uzv+/fdf+Yg2moSDmBAR0ecy997kdFhabntvPv3381tmqfnS3ptPy1Dm9E+3+3nB+bzcsOAUjI6Ojvymq6srv+np6UFfXx96enryrw0MDOT/GhgYwNDQUP5vdo9/evt03k9vxsbGMDAwgLGxsfy+oaGhwvaz+1dXVzdPf5Q+ffo02rdvr7Lvl56eHjIyMmBjY4OBAwdiwIABaNSoESQSicq2QaqlqXvgJEIx+Onm4uICZ2dn+Pr6AvjvP6lChQqYMGECfvjhhyzz9+3bF4mJiThy5Ih82ldffYUGDRpg7dq1EAQBdnZ2+O677zB16lQAQFxcHMqUKYOtW7eiX79+ecoVHx8PS0tLREZGwsLCIsvjurq6CmUwMTExx3Xp6OjA2Ng4X/MmJSXB1dUVLi4u+OOPPxTmlUgkMDExUZg3p5fM5/MmJyfn+ob49I8AysybkpKS619KlJnXxMRE/oM1NTVVfjJyQec1NjaW/1DI/FCiinmNjIzkRVuZeTM//OQk8xeisvNmfnDLSeYvbGXnlUqlSElJyXHezA8Bys4rk8mQnJysknn19PRgaGgI4L8PuUlJSSqZV5n3fVH+jMjr+54/I5T7GZGWlobOnTvj2bNnGDBgAGQymby4ZL4XZTJZlnN4MstMbntuPv06u703VHDZlZvMgqOnpyf/uaarqysvGJll5dPiYmBgACMjI/n8Ojo6CvNmliAjIyMYGRnB3NwcRkZGMDQ0hCAIMDQ0lN83MDBQKFXm5uby+6mpqfI82T2X4vYzIiAgAJ06dcpxmbzQ1dWFVCqFubk5evXqhd69e6NZs2bZfo/5OSLrvGJ/jjhx4gQ6deqEhw8foly5crnOCxT+54j4+HjY2dkhLi4u224gJ2i51NRUQVdXVzh48KDCdG9vb6Fbt27ZLlOhQgXhjz/+UJg2a9YsoV69eoIgCEJYWJgAQLh586bCPC1bthQmTpyYY5aUlBQhLi5Ofnvx4oUAIMdb586dFZY3MTHJcd5WrVopzGttbZ3jvI0bN1aYt1KlSjnOW6dOHYV569Spk+O8lSpVUpi3cePGOc5rbW2tMG+rVq1ynNfExERh3s6dO+f6fftUr169cp03ISFBPu/gwYNznTc6Olo+79ixY3OdNzw8XD7v1KlTc503NDRUPu/s2bNznTcwMFA+78KFC3Od99y5c/J5fX19c533yJEj/8fenYfHdP1/AH9PJvtuySL2JSQhBCG2EqSoWGotokLVXqWWWlr7WpRWhaKCVlTtErHvaheCEInYicQSsieSzP394Zv5SRHZJmeW9+t55mkyc+bOe67byf3MOfccZdu1a9fm2nbz5s3Ktps3b8617dq1a5Vtd+/enWvbZcuWKdsePXo017YLFixQtj1//nyubadNm6ZsGxYWlmvbcePGKdvevXs317bDhw9Xtn369GmubX19fZVtk5KScm3bvXv3HMdwbm35GfHmpsmfEY8fP861nSbf9PT0JD09vVzblCpVSnJ0dJRq1qwpVa5cOde2Hh4e0uDBg6URI0ZIPXr0yLVtnz59pJ07d0p79uyRVqxYkWvb8ePHS/Hx8VJycrJ0+fLlXNvyM+LNTds/I/Jz69q1q7Rr1y4pLS2N5xH/o0nnEfv37//ov7GI84j4+HgpN1p/0dPz58+RlZUFOzu7HPfb2dl9cJ2OmJiYXNtn/zc/2wSAefPmwcrKSnkrX758vt8PERFpDwcHhxw9Ge9TqVIluLq6on79+ihXrlyubbt164ZRo0Zh3LhxaNWqVa5t586di4MHD+LEiRMYP358rm23bNmC1NRUZGZmwt/fP9e2mzdvhiRJyMrKwqZNm3Jtu2jRIkRGRiIsLAy//fZbrm2//PJLrFy5EsuWLcPw4cNzbevm5obOnTvjs88+Q/369XNta2pqCktLS5iamhbJtPKkW/7880906tRJ2ZNCmkXS0IGIeR5CmZaWpvJru1TxGtHR0ShbtixOnz6Nxo0bK+///vvvcfz4cZw7d+6d5xgaGmL9+vXo3bu38r7ly5djxowZiI2NxenTp9G0aVNER0ejTJkyyjY9e/aETCbDP//8894s2WP/syUkJKB8+fJqMYSySZMmaNCgAX755ZccbdV56ENRtOUQyjc0dehDfttyCKVuDY8qiraq/oxQKBSIjIyEl5eXco3UM2fOwNXV9Z22AD8j+BnBz4i3FfYzIjQ0FM2aNfvgc7IzSZKEhg0bonfv3vj8889hY2OjfJznEW9o6mfE3r170b59e0RGRsLBwSHXtoD6DKHM8yQmlStXxvjx4zFs2LCPfluYX+fPn8fMmTPh4eGBKVOmFOm2S5cuDblcjtjY2Bz3x8bGwt7e/r3Psbe3z7V99n9jY2NzFHCxsbFwc3P7YJbsi3L/y8zMLE+TwuRn4pj8tM3+1lFPT++jz3v7g/Vj8nOc5Kdtfor8/LT90L9PYdtmX98gsq2BgUGev1nOT9vs6zyKuq1cLs/zMZyftnk5xgvSViaTqaQtoLr/7/P7GaGKtvyMeHPS6ezsDODNvsv+Qu3JkyfvjPIA+BmRjZ8R/08d2mrqZ8SHTpCzr2urWbMm+vXrhy+++AIVK1b86HZ5HpH/tqI/I7KLfnNz8zy1V/VnRF5nw8zzEMrY2FiMHz8elSpVwrRp0wq9snxaWho2b96Mtm3bonHjxti7d2+htvchhoaGqF+/Pg4fPqy8T6FQ4PDhwzl65N7WuHHjHO0B4ODBg8r2lStXhr29fY42CQkJOHfu3Ae3qe6yvwkmIiIxLl++DODNN7FOTk65fntMRIX3dpGRXRiUL18eEydORFhYGMLCwvD999/nqXgjzZR97qtpM7HnuQfu0KFDGD16NMLCwjB79mzMnj0b9erVQ7du3dCoUSPUr18fFhYWuW4jPDwc58+fx+HDh7Fr1y4kJSVBkiSYmZnhu+++w3fffVfoN/Q+Y8aMga+vL9zd3dGwYUP88ssvSE5OxoABAwAA/fr1Q9myZTFv3jwAb9ara9GiBX7++Wd4e3tj06ZNuHjxIlatWgXgTZU8evRozJ49G46OjsplBBwcHPD555+r5D2oGgs4IiKxypYti3PnzsHDwwOvXr1C/fr1ce3aNa7RSaQiJUuWRKlSpSBJEnx8fODj44OGDRty2n8dkn3uq2mfs3ku4Fq1aoUrV67g77//xpw5cxAeHo6QkBBcunQJwJuipkqVKrC1tUWJEiVQokQJpKamIi4uDi9fvsTt27eRlJSk3J4kSTAxMUH//v0xderU9w4VKSpffPEFnj17hqlTpyImJgZubm7Yt2+f8jUfPHiQ4x+uSZMm2LhxI3788UdMnjwZjo6O2Llzp3INOODNNXTJyckYPHgwXr16hWbNmmHfvn0auQYcwAKOiEgdNGzYEBs3bkSfPn1w48YNdOvWDTt27BAdi0gr2dra4u7duzAxMcnz0DzSLtlDFjWtgCvwOnCHDh3CH3/8gaCgoBwX6L3vW4v/vkTt2rUxaNAg9O3bF1ZWVgV5ea2QvQ7cR9d6KAaffPIJqlSpgvXr1wvNQUSkS5KTk2Fubg4ASEpKUl4vMW3aNMycORMA8OOPP2LWrFnCMhIRaautW7eiR48eePnyJaytrUXHyXNtUOCvG7y8vODl5YX09HScO3cOJ0+exOnTp/Ho0SM8e/YMcXFxMDY2ho2NDWxsbODq6opPPvkEn3zyCccSq6HsC3aJiEi8GTNmICwsDNu3b8fs2bPh7OyMPn36iI5FRKRVtH4I5YcYGRmhefPmaN68eVHkIUHkcnmu09kSEVHx2rp1K2rXro2wsDD4+PigcuXKGjtRFhGROso+99W0IbSalZZUxtzcPMc1ikREpHr6+vrKRan/ewIhk8lw8eJFlClTBi9fvkSTJk1w//59VKhQQURUIiKtk5SUBJlMVuRLpKkaCzgCAFhYWODhw4eiYxAR6RQjIyP4+fnl+vjNmzdRpkwZKBQKODs7IzY2VnndHBERFVxiYiLMzc01buZRzRrwSSrDHjgiIvVka2uL0NBQAEBKSgpcXV15zTIRURFITEz86DJo6ogFHAF40wOXmJgoOgYRkU6RJAnPnj3Ds2fP3pmx+W2urq4IDAwEANy7dw9t27YtrohERForKSmJBRxpLhZwRETFLyUlBba2trC1tUVKSkqubTt27Iiff/4ZAHD48GGMGjWqOCISEWkt9sCRRmMBR0Sk/saMGYP+/fsDAJYuXYqVK1eKDUREpMGyr4HTNCzgCMCbAi45OVm5HgYREaknf39/NGrUCAAwdOhQHD58WHAiIiLNxB440mjZ3z4kJycLTkJERLmRyWQ4ceIE7O3tAQBeXl6IjIwUnIqISPOwgCONln3wchglEZH6MzAwwM2bN2FgYAAAqFWrFl6+fCk4FRGRZuEkJqTRWMAREWkWKysr3LhxAwCQkZEBFxcXZGRkCE5FRKQ5eA0cabTsAo5rwRERaY5q1aopr4GLiYnBJ598kutyBERE9P84hJI0GnvgiIiKn76+Pnx9feHr6wt9ff0CbaNVq1bK2SjPnTuHAQMGFGVEIiKtpakFXMH+WpDWye4+ZgFHRFR8jIyMsG7dukJvZ/Dgwbhx4wZ+/fVXrF+/HrVq1cK4ceMKH5CISEspFArduwZuw4YN+Pzzz+Hq6go3Nzf07t0bO3fuzNNzr1+/Dj09vQJ/20hFjz1wRESa7ZdffoGXlxcAYPz48QgMDBSciIhIfaWkpACARl4Dl+8KKj4+Hp06dcK///6b4/5r165h8+bNcHd3x19//YXq1at/dFscp68+TE1Noaenx2vgiIiKkSRJypMIU1NTyGSyQm1v3759cHR0xN27d9G5c2dcuXIFtWvXLoqoRERaJbvTQid64Hr16oWTJ09CkiRIkoRSpUrBwsJC+fvFixdRr149bNmyRRV5SUVkMhnMzc3ZA0dEVIxSUlJgbm4Oc3NzZSFXGHK5HFevXoWpqSkAoG7dunj69Gmht0tEpG10poDbu3cv9u/fD5lMhmbNmuHatWt4+vQpXr16hUuXLqFbt27KbxN79+6N33//XVW5SQUsLCyQkJAgOgYRERWCubk5bt68CeDNNR41atRAWlqa4FREROpFZwq4P//8EwBQo0YNHDhwADVr1lQ+5ubmhi1btmDLli2wsLCAQqHAiBEjMG/evKJNTCpja2vLb2qJiLRA+fLlcebMGQDAq1ev4O7uDoVCITgVEZH6iI2NBfDm/FfT5KuAO3fuHGQyGcaMGQNjY+P3tunWrRvOnj2LSpUqQZIk/Pjjj5g0aVKRhCXVcnBwQHR0tOgYRERUBBo1aoSAgAAAbyYP6969u+BERETqIzo6GjKZDHZ2dqKj5Fu+CrjsSrVu3bq5tnN2dsa///4LFxcXSJKEBQsW4Jtvvil4SioWZcqUwZMnT0THICKiItKnTx9MmTIFALBjxw5MnTpVcCIiIvXw5MkT2NjYwMDAQHSUfMtXAZc9/CIzM/OjbR0cHHDy5Em4u7tDkiSsWLECAwYM4MyTaow9cERE2mfmzJno0qULAGDWrFnYuHGj4EREROJFR0fDwcFBdIwCyVcBZ29vDwC4d+9entqXKFECR44cQfPmzSFJEv7880/06tULGRkZ+Q5KqlemTBnExMQgKytLdBQiIipC27ZtU1637uPjg3PnzglOREQkls4UcLVq1QIAHDt2LM/PMTc3x759+9CuXTtIkoStW7fC19c3XyGpeDg4OCArKwvPnz8XHYWISCfI5XJ0794d3bt3h1wuV9nryGQyhISEwNraGsCb6+MePnyostcjIlJ3T548QZkyZUTHKJB8FXAtWrRQFmH56UUzNjZGYGCgcpmBsLCwfAcl1cs+iDmMkoioeBgbGytncP7Q5GBFxcjICBEREcrFwp2cnJCUlKTS1yQiUlc60wPn7e0NAIiLi1MuKZBX+vr6+Oeff9C/f39eB6emsg9iTmRCRKSdbG1tERoaCuDNIuK1a9fmsHki0jkKhQIxMTG60QPn7OyML7/8Eq1atcLp06fz/2J6evD398d3332HihUrokKFCvneBqmOnZ0dZDIZe+CIiLRY7dq1ERgYCAC4e/cu2rVrJzgREVHxevbsGbKysnSjBw4A1q9fj4MHD2LNmjUFftGff/4Zd+/exd27dwu8DSp6+vr6sLW1ZQFHRFRMkpOTIZPJIJPJkJycXGyv27FjRyxcuBAAcOjQIYwePbrYXpuISLTsc12dKeBIuzk4OHAIJRGRDhg3bpxyUrFff/0Vq1atEpyIiKh4ZJ/r6sQQStJ+ZcqUYQ8cEZGOWLt2LRo2bAgAGDJkCI4cOSI4ERGR6kVHR0Mmk8HOzk50lAJhAUc5cDFvIiLdIZPJ8O+//yrXeW3dujWioqIEpyIiUq3o6GjY2trCwMBAdJQCYQFHOZQpU4ZDKImIdIiBgQFu3LihPJGpWbMm4uPjBaciIlIdTV4DDmABR//h4OCAmJgYTitNRKRDSpQooVyj9fXr13BycsrXeq9ERJpEk9eAA1jA0X84ODggKysLz58/Fx2FiIiKUfXq1XHo0CEAQExMDJo3b851W4lIKz158oQFHGmP7O7kx48fC05CRKT95HI52rdvj/bt20Mul4uOg9atW+P3338HAJw9exYDBw4UnIiIqOhFR0dzCCVpj8qVKwMA7ty5IzgJEZH2MzY2RnBwMIKDg2FsbCw6DoA3s1F+++23AN7MUvnzzz8LTkREVHRSU1MRHR2tPOfVRCzgKIdSpUqhRIkSiIyMFB2FiIgE+fXXX9G6dWsAb9aLCwoKEpyIiKhoREVFQZIkVK9eXXSUAmMBRznIZDJUr14dERERoqMQEZFA+/fvV35D3alTJ+UkJ0REmiz7HJcFHGmVGjVqsAeOiKgYJCcnw8zMDGZmZkhOThYdJwe5XI6rV6/C1NQUAFCnTh08ffpUcCoiosKJjIxEiRIlULp0adFRCowFHL0juweOs48REaleSkoKUlJSRMd4L3Nzc9y8eRMAoFAo4OTkhPT0dMGpiIgKLiIiAtWrV4dMJhMdpcD0i2pDz58/R0BAAE6ePIk7d+4gMTHxo2uJyWQy3L59u6giUBGpUaMGXr58iRcvXmj0txNERFR45cuXx5kzZ9C4cWO8fPkS7u7uuHr1qkaf/BCR7oqMjESNGjVExyiUIing/v77bwwbNgyJiYkAkOeeG374q6fsMcGRkZEs4IiICI0aNUJAQAB8fHwQFhaGHj16YOvWraJjERHlW2RkJDp06CA6RqEUuoA7cuQI+vbtqyzaKlasiNq1a8Pa2hp6ehyhqYkcHR0BvOlibtKkieA0RESkDvr06YPw8HDMnj0b27Ztw7Rp0zBjxgzRsYiI8uz58+eIi4tjD9z8+fMhSRKsra0REBCAzz77rChykUAmJiaoUKECJzIhIqIcZs2ahbCwMOzcuRMzZ86Ek5MTevfuLToWEVGeZJ/bavIMlEARTGJy4cIFyGQyzJgxg8WbFuFSAkRE9D7bt2+Hi4sLgDe9cufPnxeciIgob7LPbatVqyY4SeEUuoBTKBQAgKZNmxY6DKkPLiVARKR6enp6aNGiBVq0aKExlx3IZDJcunQJ1tbWAAAPDw88fPhQbCgiojyIjIxEhQoVlMujaKpC/7WoWrUqAKjd+jVUONWrV0dUVNRHZxIlIqKCMzExwbFjx3Ds2DGYmJiIjpNnRkZGiIiIUE5G5uzszPMAIlJ72UsIaLpCF3C9evWCJEnYv39/UeQhNVGjRg2kp6fjwYMHoqMQEZEasrW1RWhoKIA3X+LWrl2bX/oRkVrThiUEgCIo4IYPHw4XFxf88ssvuHjxYlFkIjWQ/e0Er4MjIqIPqV27Nnbu3AkAuHPnDq+FJyK1lZWVhaioKPbAAYC5uTn27NkDJycnNG/eHD/88AOuXr2KtLS0oshHglSoUAFGRka8Do6ISIWSk5NhY2MDGxsbjR2C2LlzZyxYsAAAcPDgQXz33XeCExERvevBgwdIT09nD1y2ChUqYP369bCwsMD8+fNRt25dmJmZQS6X53rT1y+SdcRJBeRyOapVq8YeOCIiFXv+/DmeP38uOkahjB8/Hv369QMA/PLLL1i9erXgREREOWWf07IH7n9+/fVXuLm54fnz55AkKV83Ul9cSoCIiPJq3bp1aNCgAQBg8ODBOHLkiOBERET/LyIiAkZGRqhQoYLoKIVW6C6wPXv2KIdL6Onp4ZNPPkGdOnVgbW2tMVMi0/u5urpi+fLlkCRJOdMYERHR+8hkMpw6dQrly5dHbGwsWrdujVu3bmn8ektEpB2uXLkCFxcXyOVy0VEKrdAF3MKFCwEAZcuWxZ49e+Dq6lroUKQe3N3d8fz5czx48AAVK1YUHYeIiNScgYEBwsPDYWdnh4yMDNSsWRNPnz6FlZWV6GhEpOMuXryIRo0aiY5RJArdRXb16lXIZDLMnDmTxZuWqV+/PgAgJCREcBIiItIUJUqUQFhYGADg9evXcHZ2RkZGhuBURKTLUlJScOPGDeW5raYrdAGXveaLm5tbYTdFasbBwQFlypTh8hBERJQv1atXx6FDhwAAT548QfPmzXndOxEJc+XKFWRlZcHd3V10lCJR6ALO0dERAPDy5ctChyH14+7uzh44IiIV0dPTg7u7O9zd3bXuuvHWrVtj+fLlAICzZ8/i66+/FpyIiHRVSEgIDAwMUKtWLdFRikSh/1r07t0bkiQpF/Ik7VK/fn1cvHiR35wSEamAiYkJLly4gAsXLsDExER0nCI3bNgwfPPNNwAAf39/LFmyRHAiItJFFy9eRO3atWFkZCQ6SpEodAE3cuRINGzYECtXrkRQUFBRZCI14u7ujri4ONy/f190FCIi0kC//fYbWrZsCQAYM2YMgoODBSciIl0TEhKiNde/AUUwC+WTJ0+wevVqDB48GF26dMEXX3yBL774AtWrV4epqelHn68NazFos7cnMqlUqZLYMEREpJEOHjyIatWq4d69e+jQoQOuXbumNUOZiEi9ZU9g8u2334qOUmQKXcBVqlRJuUaYJEnYtGkTNm3alKfnymQyZGZmFjYCqZC9vT3Kli2Lixcvolu3bqLjEBFplZSUFLi4uAAAbty4kacvPjWRXC7HtWvXYGtri9TUVNSpUwcxMTGwsbERHY2ItFxoaCgUCoVW9cAVyRXTkiQpr5HK/jmvN1J/9evX50QmREQqIEkS7t+/j/v372v930Rzc3PcvHkTAKBQKODk5IT09HTBqYhI24WEhMDQ0FCrev0L3QO3du3aoshBaszd3R1LliyBJEnK3lYiIqL8qlChAk6fPo0mTZogLi4ODRo0wJUrV/i3hYhUJnsCE0NDQ9FRikyhCzhfX9+iyEFqzN3dHS9fvsS9e/dQuXJl0XGIiEiDNW7cGBs2bEDfvn1x7do19OzZE1u2bBEdi4i0VEhICD755BPRMYqUdi06QyqRPWaYC3oTEVFR8PHxwQ8//AAA2Lp1K6ZPny42EBFppeTkZISHh2vV9W8ACzjKA1tbW5QvX57XwRERUZGZPXs2OnfuDACYMWMG/vnnH8GJiEjbZE9g4u7uLjpKkWIBR3mSvaA3ERFRUdm+fTucnZ0BAL169cL58+cFJyIibXLx4kUYGRmhZs2aoqMUqTxfAzdz5kzlz1OnTn3v/QXx9rZIfbm7u2PRokWcyISIqAjJZDLlMgK6+Nmqp6eHS5cuwd7eHvHx8fDw8MCjR49QtmxZ0dGISAuEhISgTp06MDAwEB2lSMmkPM5brKenp/zjkpWV9d77C+LtbemahIQEWFlZIT4+HpaWlqLj5Gr//v1o164dIiMj4ejoKDoOERFpkdjYWJQpUwaSJMHMzAyxsbEwMzMTHYuINJyzszNatmyJ5cuXi46SJ3mtDfI1hPJDa7fld+03rgOneRo1agQ9PT2cOHFCdBQiItIydnZ2uHTpEoA3kw7UqVNHp7/gJaLCi42Nxc2bN9GsWTPRUYpcngs4hUKhvH3o/oLcSDNYWVmhXr16OHr0qOgoRESkhdzc3LBjxw4AwO3bt9G+fXvBiYhIkx07dgwA0LJlS7FBVICTmFCetWzZEseOHWPPKRFREUlJSUHNmjVRs2ZNpKSkiI4j3Oeff4758+cDAA4cOICxY8cKTkREmurYsWOoUaMGypQpIzpKkWMBR3nm6emJx48fIyoqSnQUIiKtIEkSbty4gRs3bvDLsf+ZMGEC+vbtCwBYvHgx/vjjD8GJiEgTHT16VCt73wAWcJQPn3zyCeRyOYdREhGRSv3555/KdZsGDRqkHApFRJQX0dHRiIiIgKenp+goKpHnZQQ+JCsrCxcuXMDJkycRGRmJly9fIjExEZaWlihZsiRq1KiBZs2awd3dHXp6rBc1mYWFBdzd3XH06FEMHjxYdBwiItJSMpkMp0+fRrly5fD06VO0bNkSUVFRqFq1quhoRKQBsr/0YQH3H5mZmfDz88OiRYsQHR390fbly5fH+PHjMXToUMjl8oK+bL7FxcVh5MiRCAoKgp6eHrp164Zff/0V5ubmH2w/bdo0HDhwAA8ePICNjQ0+//xzzJo1C1ZWVsp271s64e+//0avXr1U9l7UQcuWLbFu3TquB0dERCplYGCAmzdvwtbWFpmZmXBxccHTp09z/C0mInqfY8eOwcXFBXZ2dqKjqESBusRevHiBVq1aYcyYMYiOjs7TcgEPHz7Et99+izZt2iAuLq6o38cH+fj44Pr16zh48CB2796NEydO5Np7FB0djejoaCxatAhhYWFYt24d9u3bh4EDB77Tdu3atXjy5Iny9vnnn6vwnagHT09PxMTEICIiQnQUIiLSciVKlEBYWBgA4PXr13BxcUFmZqbgVESk7rT5+jcgHwt5Z8vKykLTpk1x4cIFZS/Mp59+Ci8vL9SrVw+lSpWCubk5EhMT8fz5c1y+fBkHDx7E4cOHle0bN26MEydOqHxIZXh4OFxcXHDhwgXlWPp9+/ahffv2ePToERwcHPK0nS1btqBv375ITk6Gvv6bTkuZTIYdO3YUqmjTpIW8syUlJaFEiRJYunQphg0bJjoOEZFGS05OVo4ISUpK4uLVH3Do0CF8+umnAIAmTZrg33//5SgQInqvR48eoXz58ti6dSu6desmOk6+qGQhbwCYP38+zp8/DwCoW7curl27hn379mHcuHFo1aoV6tSpg6pVq8LNzQ1eXl4YP348Dhw4gCtXrsDNzQ2SJOHMmTNYuHBhwd9dHp05cwbW1tbK4g0AvLy8oKenh3PnzuV5O9k7Mbt4yzZixAiULl0aDRs2hL+//0dnEEtPT0dCQkKOm6YxNzdHw4YNOZEJEVERkMlkqFixIipWrMiCJBdeXl7w8/MDAJw+fRqDBg0SnIiI1FX2OWqLFi0EJ1GdfBVwGRkZWLp0KWQyGerWrYtTp07BxcUlT8+tVasWTp8+jbp160KSJCxZskTlwyBiYmJga2ub4z59fX2ULFkSMTExedrG8+fPMWvWrHeGXc6cORObN2/GwYMH0a1bNwwfPhy//fZbrtuaN28erKyslLfy5cvn7w2pCa4HR0RUNExNTXHv3j3cu3cPpqamouOoteHDh2PEiBEAgDVr1mDJkiWCExGROjp27BhcXV1RunRp0VFUJl8FXFBQEJ49ewaZTIYNGzbA2Ng4Xy9mbGyMv/76CzKZDM+ePcPu3bvz9fxsEydOhEwmy/V28+bNAm37bQkJCfD29oaLiwumT5+e47EpU6agadOmqFu3LiZMmIDvv//+o72KkyZNQnx8vPL28OHDQmcUwdPTE8+ePcONGzdERyEiIh2ybNky5axyY8aMwZ49e8QGIiK1o+3XvwH5LOD+/fdfAG+GMjg5ORXoBV1cXJTj2E+ePFmgbYwdOxbh4eG53qpUqQJ7e3s8ffo0x3MzMzMRFxcHe3v7XF8jMTER7dq1g4WFBXbs2AEDA4Nc23t4eODRo0dIT0//YBsjIyNYWlrmuGmiJk2awMDAgMMoiYio2B06dAgVKlQAAHh7e+P69euCExGRurh//z7u3r2r9QVcvpYRCAkJgUwmQ+vWrQv1oq1bt8aBAwcQEhJSoOfb2NjAxsbmo+0aN26MV69eISQkBPXr1wcAHDlyBAqFAh4eHh98XkJCAtq2bQsjIyMEBgbmqacxNDQUJUqUgJGRUd7fiIYyNTVFo0aNcPToUXzzzTei4xARaazU1FQ0b94cAHDixAmYmJgITqT+5HI5wsLCYGdnh9TUVNSuXRuxsbFaPVyKiPLm6NGjkMlkys9VbZWvHrjsIX+1a9cu1ItmP//+/fuF2s7HODs7o127dhg0aBDOnz+PU6dO4ZtvvkGvXr2UM1A+fvwYTk5OyolZEhIS0KZNGyQnJ2PNmjVISEhATEwMYmJikJWVBeDNUNI//vgDYWFhiIqKwooVKzB37lyMHDlSpe9HnbRs2RLHjx+HQqEQHYWISGMpFApcvHgRFy9e5OdpPlhYWCgvlVAoFKhRo0auI2CISDccO3YMderUQcmSJUVHUal8FXDx8fEA3qzLUhjZz8/enioFBATAyckJrVu3Rvv27dGsWTOsWrVK+XhGRgYiIiKQkpICALh06RLOnTuHa9euoVq1aihTpozyll3AGhgYwM/PD40bN4abmxtWrlyJxYsXY9q0aSp/P+rC09MTL168UK7PQ0REVJwqVKigvLQjLi4OHh4enFyLSIdJkqQT178B+RxCmT3tffaaNQWVvc5NYmJiobaTFyVLlsTGjRs/+HilSpVyfOB7enp+9A9Au3bt0K5duyLLqIkaN24MExMT7N+/v9A9skRERAXRtGlT/PXXX/jyyy9x5coV9OrVC//884/oWEQkQGRkJB48eFDoS700Qb564LKHEBYVDhfRXMbGxvj0008RFBQkOgoREemwvn37YvLkyQCAzZs3Y8aMGYITEZEIgYGBMDExQatWrURHUbl8L+RNlK1Tp044deoUnj9/LjoKERHpsDlz5qBTp04AgOnTp2Pz5s2CExFRcdu1axfatGmjE5NB5WsIZbbly5e/s0B2fvx3an/STB06dIAkSQgODoavr6/oOEREpMN27NiBmjVr4ubNm/jiiy9QpUoVuLu7i45FRMXg2bNnOH36NP744w/RUYqFTMrHFb96enqQyWRF8sKSJEEmkxX5sExNkpCQACsrK8THx2v0mnBlypTBtm3bREchItI4ycnJqFSpEgDg3r17ymvEqWBSU1NRpkwZ5SRpjx49QtmyZQWnIiJVW7duHb766ivExMQUqpNJtLzWBvkeQilJUpHcSDt06tQJ+/fvR1pamugoREQax8zMDM+ePcOzZ89YvBUBExMTREREKL9sdnJyQnJysuBURKRqu3btQuPGjTW6eMuPfA2hPHr0qKpykIbq1KkTJk2ahCNHjqB9+/ai4xARkY6zs7PDpUuXULduXSQlJcHNzQ0RERHQ0+Nl/0TaKDU1FQcOHMDUqVNFRyk2+SrgWrRooaocpKGcnZ1RtWpVBAYGsoAjIiK14Obmhh07dqBLly6IioqCt7c39u7dKzoWEanAkSNHkJKSopzISBfw6ygqFJlMhs6dOyMwMJDLQhAR5VNqaio8PT3h6emJ1NRU0XG0yueff4758+cDAPbt24exY8cKTkREqrBr1y44OjrCyclJdJRiwwKOCq1Tp0548uQJQkJCREchItIoCoUCx48fx/Hjx/klmApMmDABPj4+AIDFixdjzZo1ghMRUVFSKBQICgpCp06dimyiRU3AAo4KrWnTpihZsiQCAwNFRyEiIsrhr7/+Ui4n8PXXX+P48eOCExFRUbl48SJiYmJ0avgkwAKOioC+vj68vb2xa9cu0VGIiIhykMlkOH36NGxsbAAAnp6euH37tuBURFQUdu3ahVKlSqFJkyaioxQrFnBUJDp16oRr167h7t27oqMQERHlYGBggJs3b0Jf/83cbTVr1kRCQoLgVERUWIGBgfD29lb+v60rWMBRkWjbti0MDQ05jJKIiNRSyZIlERYWBgBIT0+Hi4sLMjMzBaciooK6c+cOwsLCdG74JMACjoqIhYUFWrZsyQKOiIjUVo0aNbB//34AwOPHj+Hp6QlJkgSnIqKCCAwMhKGhIdq0aSM6SrFjAUdFpnPnzjh+/DhevnwpOgoRkcYwNTWFqamp6Bg6o02bNvDz8wMAnDp1CoMHDxaciIgKIjAwEK1bt4aFhYXoKMWOBRwVmY4dOyIrKwvBwcGioxARaQQzMzMkJycjOTkZZmZmouPojOHDh2PYsGEAgD/++AO//vqr4ERElB8vXrzAiRMndHL4JMACjopQuXLl0LRpUwQEBIiOQkRElCs/Pz+0aNECADB69Gjs2bNHcCIiyqt//vkHANC1a1fBScRgAUdFqm/fvjhw4ABiY2NFRyEiIvogmUyGw4cPo3z58gAAb29v3LhxQ3AqIsqLDRs2oG3btrC1tRUdRQgWcFSkevbsCblcjk2bNomOQkSk9tLS0uDt7Q1vb2+kpaWJjqNz5HI5rl+/DmNjYwBA7dq18eLFC8GpiCg3t2/fxpkzZ/Dll1+KjiIMCzgqUiVLloS3tzf++usv0VGIiNReVlYW9uzZgz179iArK0t0HJ1kYWGBmzdvAnjz71GjRg28fv1acCoi+pANGzbA3NxcZ69/A1jAkQr07dsXISEhCA8PFx2FiIjooypWrIh///0XwJvJERo2bMjlBYjUkCRJ2LBhA7p166bTs/eygKMi5+3tDSsrK05mQkREGqNp06ZYv349AODKlSvo3bu34ERE9F/nz59HVFQU+vbtKzqKUCzgqMgZGxujZ8+eCAgIgEKhEB2HiIgoT/r164dJkyYBeDPL3axZswQnIqK3bdiwAQ4ODmjZsqXoKEKxgCOV6Nu3L+7du4dTp06JjkJERJRnc+fORceOHQEAU6dOxZYtWwQnIiIAyMjIwKZNm9CnTx/I5XLRcYRiAUcq0axZM1SoUAEbNmwQHYWIiChfdu7cCScnJwBvZle+ePGi4EREtH//fjx//lznh08CLOBIRfT09NC3b19s3rwZ6enpouMQERHlmZ6eHi5dugRLS0sAQIMGDRAdHS04FZFu27BhA1xdXVGnTh3RUYRjAUcq4+Pjg1evXiE4OFh0FCIitWRmZgZJkiBJEszMzETHobeYmJggIiJC+XuNGjWQkpIiMBGR7oqPj8euXbvY+/Y/LOBIZVxcXFCvXj0OoyQiIo1kb2+PS5cuAQCSkpLg5ubGybmIBNi+fTvS09M5O+z/sIAjlerbty+Cg4MRFxcnOgoREVG+1a1bF9u3bwcA3Lp1Cx06dBCciEj3bNiwAZ6enihfvrzoKGqBBRypVO/evZGZmYmtW7eKjkJEpHbS0tLQo0cP9OjRA2lpaaLj0Ad06dIFc+fOBQDs3bsX48ePF5yISHc8evQIR48exZdffik6itpgAUcqZW9vj08//RR//fWX6ChERGonKysLW7duxdatW5GVlSU6DuVi0qRJ6NOnDwBg0aJF8Pf3F5yISDds3LgRRkZG6Nq1q+goaoMFHKlc37598e+//+L27duioxARERXYhg0bUK9ePQDAwIEDcfLkScGJiLSbJEn4888/0alTJ1hZWYmOozZYwJHKde3aFdbW1li5cqXoKERERAUmk8lw5swZ2NjYAACaN2+OO3fuCE5FpL3+/fdfXL9+HQMHDhQdRa2wgCOVMzU1xYABA7BmzRqkpqaKjkNERFRghoaGuHnzJvT19QG8mXE5ISFBcCoi7eTn5wdHR0d4eXmJjqJWWMBRsRg2bBji4uKwefNm0VGIiIgKpWTJkrh27RoAID09HS4uLsjMzBSciki7xMTEYNu2bRg+fDj09FiyvI17g4qFo6Mj2rZtCz8/P9FRiIiICs3JyQn79+8HADx+/BgtW7aEJEmCUxFpj9WrV8PAwAD9+/cXHUXtsICjYjN8+HBcuHABFy5cEB2FiIio0Nq0aYNly5YBeHOtztChQwUnItIOmZmZWLlyJXx8fGBtbS06jtphAUfFxtvbGxUrVmQvHBHR/5iamiIpKQlJSUkwNTUVHYcKYMSIEcrCbdWqVVi6dKngRESab9euXXj8+DFGjBghOopaYgFHxUYul2Po0KHYtGkTXrx4IToOEZFwMpkMZmZmMDMzg0wmEx2HCmj58uVo0aIFAGDUqFHYu3ev4EREmm358uVo0qQJ3NzcREdRSyzgqFgNHDgQkiRxAVQiItIaMpkMhw8fRvny5QEA7du3R3h4uOBURJopPDwcR44cYe9bLljAUbGysbFBz549sWLFCmRlZYmOQ0QkVHp6Ovr374/+/fsjPT1ddBwqBLlcjuvXr8PY2BgA4OrqytEmRAWwfPly2NjYoFu3bqKjqC0WcFTsRowYgbt37ypn7yIi0lWZmZlYv3491q9fz2notYCFhYWy5y0rKwtOTk54/fq14FREmiMpKQnr16/HoEGDYGRkJDqO2mIBR8XOw8MD9erV42QmRESkdSpVqoR///0XAPD8+XN4eHhweQGiPNqwYQOSk5MxZMgQ0VHUGgs4KnYymQwjRozA3r17cefOHdFxiIiIilTTpk2xfv16AEBoaCj69OkjOBGR+pMkCX5+fujUqRMqVKggOo5aYwFHQvTq1QvW1tZYsWKF6ChERERFrl+/fpgwYQIAYNOmTZgzZ47gRETq7eTJkwgLC8Pw4cNFR1F7LOBICFNTUwwYMAD+/v5ITU0VHYeIiKjIzZ8/Hx06dAAA/Pjjj9i6davgRNorIyMD+/btw3fffYcmTZqgdOnSMDAwgLW1NerVq4fx48dz1I+aW758OapXr47WrVuLjqL2ZBIHZguTkJAAKysrxMfHw9LSUnScYhcVFQVHR0esXbsW/fv3Fx2HiKjYJScnw9zcHMCbi/fNzMwEJ6KiplAo4OLigoiICADAxYsXUb9+fcGptMuzZ8/g7Oz80Vk/DQ0NsWDBAowaNaqYklFePXnyBBUqVMCiRYt0+t8nr7UBe+BImGrVqqFdu3b49ddfeYE3ERFpJT09PVy6dEl5Mubu7o7o6GjBqbRLenq6snhzc3PDtGnTsGfPHoSEhODIkSMYP348jI2N8fr1a4wePRqrVq0SnJj+a8WKFTA0NISvr6/oKBqBPXAC6XoPHAAcOXIErVu3xt69e9GuXTvRcYiIipUkSXj+/DkAoHTp0pDJZIITkao8efIEDg4OAN4sNxATEwNTU1PBqbTD48ePMWDAAMycORONGjV6b5tz586hZcuWSE1NhZWVFR4+fAgLC4tiTkrvk5iYiAoVKqB///5YsmSJ6DhCsQeONELLli3h4eGBuXPnio5CRFTsZDIZbGxsYGNjw+JNy5UpUwYhISEA3pywurm5QaFQfPR5UVFR2L59OzIyMlQdUWOVLVsWBw4c+GDxBrxZwih7coz4+HgcPHiwuOLRR/z+++9ITk7G2LFjRUfRGCzgSCiZTIbJkyfj5MmTynVziIiItFG9evWUE5ncunULHTt2zLX9iRMn4O7ujm7dumHXrl35eq2UlBRYWFhAJpPBx8fno+3PnDkDmUwGmUyG5cuXK+9/+fIl1q5di759+8LFxQXm5uYwNDSEvb092rZti1WrVuW6WPm9e/eU2123bh0AYPv27Wjfvj0cHBygr68PT0/PfL23gmrZsqXy59u3bxfLa1Lu0tLSsHjxYvj6+qJcuXKi42gMFnAkXIcOHVCzZk3MmzdPdBQiomKVnp6OESNGYMSIEUhPTxcdh4pBt27dlEsK7NmzR7nUwH/9+eefaN26NRITE6Gnp4dly5bl63VMTU3x+eefAwB27dqF5OTkXNsHBAQAAPT19dGzZ0/l/XXr1sVXX32FgIAAhIeHIzk5GRkZGYiNjcWBAwcwZMgQNGrUCDExMR/NJEkS+vXrh27dumHv3r148uQJsrKy8vW+CuPt/8fkcnmxvS592Lp16/D06VN8//33oqNoFBZwJJyenh4mTZqEPXv2IDQ0VHQcIqJik5mZieXLl2P58uXIzMwUHYeKyeTJk9G7d28AwIIFC5Q9U8CbWSt//PFH+Pr6IjMzEwqFAgqFAsePH1fOZJlX2T1vycnJufbgZWZmYsuWLQCAtm3bonTp0srHsrKy4OHhgVmzZmH37t24cOECTp06hQ0bNiivXb98+TJ69er10Ty//PIL/vrrL3zyySfYuHEjLl68iEOHDuHLL7/M1/sqqOPHjyt/dnZ2LpbXpA/LzMzETz/9hB49esDR0VF0HM0ikTDx8fESACk+Pl50FOEyMjKkypUrSz179hQdhYio2CQlJUkAJABSUlKS6DhUjBQKhVS3bl3lv/+JEyek1NRUqUePHsr73r7J5XJp1KhR+XqNjIwMydbWVgIgeXt7f7Dd3r17la+zcePGHI9FRkbm+hr+/v7K5x46dOidx+/evZvjffTr109SKBT5eh9FITo6WrKwsJAASDY2NlJqamqxZ6Cc/vrrLwmAFBoaKjqK2shrbcAeOFIL+vr6mDBhArZs2YLIyEjRcYiIiFRKJpPh7Nmzyt6u5s2bo1GjRti2bdt722dlZcHf3x8pKSl5fg19fX188cUXAIADBw58cJ207OGT5ubm6Ny5c47HPtYzMmDAALi5uQEAdu7cmWtba2trLFu2rNgn7JEkCUOGDEFiYiIAYMqUKTA2Ni7WDJSTQqHAvHnz4O3tjTp16oiOo3FYwJHa8PX1hZ2dHRYsWCA6ChERkcoZGhoiIiICenpvTseuXLmS68yUiYmJ+Oeff/L1GtnDKDMyMrB58+Z3Hk9NTVUWXp9//nmuSxtIkoSYmBhERkYiLCxMeStbtqwyf246duwoZOr+uXPnIigoCMCbiUxGjBhR7Bkop6CgINy4cQOTJk0SHUUjsYAjtWFsbIyxY8fizz//xMOHD0XHISIiUrmQkBAYGRnlqa1MJsNvv/2Wr+17eHigatWqAP6/p+1tgYGBSEpKAoAPzlYZHByMDh06wMrKCmXKlEGNGjXg6uqqvAUHBwOAck3DD6ldu3a+sheFgIAATJkyBQBQuXJlbNy4UVkwkxiSJGHu3Llo3rw5mjZtKjqORuIRTGplyJAhMDc3x88//yw6ChERkUqtXr0a7dq1Q1paWp7aS5KEy5cv4+LFi/l6nezC7PTp07h3716Ox7KLOltbW3h5eb3zel9//TU6dOiA4OBg5RDED0lNTc318RIlSuQrd2EFBwdjwIABkCQJ9vb2OHjwIOzt7Ys1A73ryJEjOH/+PCZPniw6isZiAUdqxcLCAt9++y1WrVqFZ8+eiY5DRERU5BQKBcaNG4fBgwdDoVBAkqQ8P1dfXz/HOm15kV3ASZKEv//+W3l/XFwc9u/fDwD44osvoK+vn+N5/v7+WLNmDQDAzc0N69atQ3h4OBISEpCZmQlJkiBJknIWyY+9j+Kcuv/YsWPo3r07MjIyUKJECezfv1/ZE0lizZ07F/Xq1UObNm1ER9FYLOBI7YwcORJ6enpYunSp6ChERCplYmKCu3fv4u7duzAxMREdh4pBcnIyunTpgsWLFxfo+ZmZmdi4cSNevnyZ5+dUr14d7u7uAICNGzcq79+6datyEe73DZ9cvXo1AKBatWo4ffo0fH194eTkBAsLixzFWFxcXIHei6qcP38eHTt2RFpaGszNzbF3714hwzfpXefOncORI0cwadKkYp/MRpuwgCO1U6pUKQwZMgS//fYbEhISRMchIlIZPT09VKpUCZUqVeJ1OTrgyZMnaNq0KXbv3p2vXrf/ev36Nf788898PSe7QAsLC8PVq1cB/P/wyapVq8LDw+Od51y/fh0A0KlTpw9+wSBJEi5dupSvLKp09epVtGvXDklJSTA2NkZQUNB73xuJMW/ePNSoUQNdunQRHUWj8a8FqaUxY8YgNTUVK1asEB2FiIioSBw4cOCjM03m1bJly/JVBPbq1UvZaxYQEIBHjx7h5MmTAD48eUn24vLJyckf3O6uXbvw5MmTPOdQpcjISLRp0wYvX76EgYEBtm3bBk9PT9Gx6H/CwsKwa9cuTJw4sViH02ojFnCklsqWLYv+/ftj8eLF+VrzhohIk7x+/Rrjx4/H+PHjlUPZSHt16dIFo0aNgp6e3jvXm+WHJEmIiorCsWPH8vwce3t7tGrVCgDw999/Y+PGjcoC8EMFXPYacEFBQe8dJnn79m21mZL/wYMH8PLyQmxsLORyOTZu3Ij27duLjkVvmTdvHipUqPDB443yjgUcqa0JEybg5cuXvBaOiLRWRkYGFi1ahEWLFiEjI0N0HFIxS0tL/PLLLwgNDUWjRo0AoMDXAcnlcvj5+eXrOdknzg8fPsS8efMAAO7u7qhevfp72/fr1w8AEB0djcaNG8Pf3x/nz5/HiRMnMH36dNSvXx9xcXGoV69egd5DUXnx4gW8vLyUSxCNHTsWTk5OOdaq++/t8ePHQjPrmqtXr+Lvv//GxIkTYWBgIDqOxpNJhRmETYWSkJAAKysrxMfHw9LSUnQctTRy5Ej89ddfuHPnDkqWLCk6DhFRkUpOToa5uTkAICkpCWZmZoITUXGRJAn//PMPRo0ahefPnxdoWKVcLsfDhw9RpkyZPLVPTEyEnZ1djun+lyxZgtGjR7+3fUZGBjp06IADBw6893ETExOsX78ewcHBWL9+PSpWrPjOMgX37t1D5cqVAQBr165F//7985Q1P44dO4aWLVvm6zm+vr5Yt25dkWeh9+vQoQMiIiJw48YNFnC5yGttwB44UmtTpkxBVlaW8ptCIiIibSCTydCrVy9ERUVh7NixkMvl+R5WKUmScpr/vLCwsEDHjh2Vv8vlcvTq1euD7Q0MDBAcHIylS5fC3d0dpqamMDExQbVq1TB06FBcunQJPXr0yFdm0j0nTpxAcHAw5syZw+KtiLAHTiD2wOXN9OnTMX/+fNy6dQvly5cXHYeIqMiwB46y3bx5E8OHD8fRo0chk8nyPEGJvb09Hj58WKhr6ohURZIkNGnSBBkZGTh//jxn2/0I9sCR1hg7diwsLS0xbdo00VGIiIhUwsnJCYcPH8bWrVthb2+f5xPdmJgYBAcHqzgdUcHs3LkTZ8+exU8//cTirQhxT5Las7CwwNSpU7F+/XrlmjRERETaRiaToVu3brh16xYmTpwIfX39j/asyeVyLFu2rJgSEuVdZmYmJk2ahDZt2qB169ai42gVFnCkEQYPHoxKlSph8uTJoqMQERGplJmZGebMmYPw8HDlie+Hei+ysrJw6NAh3L59uzgjEn3UunXrEBERgfnz54uOonV4DZxAvAYuf/7++2/06dMHJ0+eRLNmzUTHISIqNIVCgfDwcACAs7MzhxjROyRJwu7duzFixAg8evTovdfG6enpYezYsViwYIGAhIX36tUrPHr0qEDPrVWrVhGnoaKQkpICR0dHtGjRAhs3bhQdR2PktTZgAScQC7j8USgUylmwTp48WeC1c4iIiDRNamoqFi5ciDlz5kChUCAzMzPH45aWloiNjYWxsbGghAW3bt06DBgwoEDP5Wmsevrpp5/w448/4ubNm6hataroOBqDk5iQ1tHT08P8+fNx6tQpBAUFiY5DRERUbExMTDB16lTcvHkT7du3B5BzEfCEhARs3bpVVDwipbi4OMybNw9Dhw5l8aYi7IETiD1w+SdJEry8vBATE4OrV69CLpeLjkREVGCvX7/G3LlzAQCTJ0+GoaGh4ESkKfbt24fhw4fj7t27yvsqVaqU43ciEb7//nssX74ct2/fhp2dneg4GoU9cKSVZDIZ5s+fjxs3buDPP/8UHYeIqFAyMjIwY8YMzJgxAxkZGaLjkAZp164dwsPDMXfuXOVMlffu3cPy5csFJyNd9vDhQyxduhTjxo1j8aZC7IETiD1wBdezZ0+cOXMGkZGRMDExER2HiKhAuJA3FYUHDx6gefPmuH//PkxMTHDlyhU4OjqKjkU6aODAgQgKCsLt27dhYWEhOo7GYQ8cabU5c+bgyZMn8PPzEx2FiIhIqAoVKiAyMhKNGzfGV199hcqVK4uORDroxo0bWLduHaZMmcLiTcW0voCLi4uDj48PLC0tYW1tjYEDByIpKSnX53h6ekImk+W4DR06NEebBw8ewNvbG6amprC1tcX48ePfmRGKVMfR0RGDBg3C3Llz8fLlS9FxiIiIhDI0NMSRI0fw22+/fXTxbyJVmDRpEipWrIghQ4aIjqL1tL6A8/HxwfXr13Hw4EHs3r0bJ06cwODBgz/6vEGDBuHJkyfK29trq2RlZcHb2xuvX7/G6dOnsX79eqxbtw5Tp05V5Vuh/5g2bRoyMzMxZcoU0VGIiIiEMzY25hI7JMS+ffsQGBiIuXPncjKmYqDV18CFh4fDxcUFFy5cgLu7O4A3B1j79u3x6NEjODg4vPd5np6ecHNzwy+//PLex/fu3YsOHTogOjpaeYHm77//jgkTJuDZs2d5PnB5DVzhLVmyBOPGjcOFCxdQr1490XGIiPKF18ARkaZLT09HrVq1UL58eRw+fJhfIhQCr4EDcObMGVhbWyuLNwDw8vKCnp4ezp07l+tzAwICULp0adSqVQuTJk1CSkpKju26urrmmF2nbdu2SEhIwPXr1z+4zfT0dCQkJOS4UeF88803cHFxwYgRI6BQKETHISIiItIpixYtwr1797Bs2TIWb8VEqwdJx8TEwNbWNsd9+vr6KFmyJGJiYj74vD59+qBixYpwcHDA1atXMWHCBERERGD79u3K7f53atTs33Pb7rx58zBjxoyCvh16DwMDA/j5+aFFixZYu3YtBg4cKDoSEVGeGRsb4/z588qfiYg0yb179zBnzhx89913cHFxER1HZ2hkD9zEiRPfmWTkv7ebN28WePuDBw9G27Zt4erqCh8fH/z555/YsWMHbt++XajckyZNQnx8vPL28OHDQm2P3mjevDm+/PJLTJgwAXFxcaLjEBHlmVwuR4MGDdCgQQPI5XLRcYiI8mX06NEoWbIk54EoZhrZAzd27Fj0798/1zZVqlSBvb09nj59muP+zMxMxMXFwd7ePs+v5+HhAQCIiopC1apVYW9vr/zGNFtsbCwA5LpdIyMjGBkZ5fl1Ke8WLFiAXbt24YcffsCKFStExyEiIiLSasHBwdi1axc2b96svJaXiodGFnA2NjawsbH5aLvGjRvj1atXCAkJQf369QEAR44cgUKhUBZleREaGgoAKFOmjHK7c+bMwdOnT5VDNA8ePAhLS0t2Hwtib2+PWbNmYfTo0Rg4cGCO6x6JiNTV69ev8euvvwIARo0axdnbiEgjpKWl4dtvv4WXlxe6d+8uOo7O0epZKAHgs88+Q2xsLH7//XdkZGRgwIABcHd3x8aNGwEAjx8/RuvWrfHnn3+iYcOGuH37NjZu3Ij27dujVKlSuHr1Kr777juUK1cOx48fB/BmGQE3Nzc4ODhgwYIFiImJwZdffomvv/4ac+fOzXM2zkJZtDIzM1G/fn0YGRnh7Nmz0NPTyBHCRKRDOAslEWmimTNnYvbs2bh69SqcnJxEx9EanIXyfwICAuDk5ITWrVujffv2aNasGVatWqV8PCMjAxEREcpZJg0NDXHo0CG0adMGTk5OGDt2LLp164agoCDlc+RyOXbv3g25XI7GjRujb9++6NevH2bOnFns74/+n76+Pvz8/HDhwgWsWbNGdBwiIiIirXPnzh3MmzcPY8eOZfEmiNb3wKkz9sCpRv/+/REUFITIyEiUKlVKdBwiog9iDxwRaZpOnTohNDQU4eHh/MwqYuyBI531008/ISsrC5MmTRIdhYiIiEhrBAUFISgoCEuWLGHxJhALONI6dnZ2mDNnDv7444+PLthORERERB+XmpqKb7/9Fm3btkXXrl1Fx9FpLOBIKw0dOhRubm4YMWIEsrKyRMchIiIi0mjz589HdHQ0fvvtN8hkMtFxdBoLONJKcrkcfn5+CAkJwerVq0XHISIiItJYUVFR+OmnnzB+/Hg4OjqKjqPzOImJQJzERPUGDhyI7du348aNG8p1/IiI1EVWVhZOnjwJAPjkk08gl8sFJyIiykmSJLRr1w43b95EeHg4TE1NRUfSWpzEhAhvJjQxNDTE0KFDwe8qiEjdyOVyeHp6wtPTk8UbEamlNWvW4MCBA1ixYgWLNzXBAo60WunSpfH7778jMDAQAQEBouMQERERaYwHDx5gzJgx+Oqrr9C+fXvRceh/OIRSIA6hLD59+/ZFcHAwrl+/DgcHB9FxiIgAABkZGVi1ahUAYPDgwTAwMBCciIjoDUmS0KZNG9y8eRNhYWGwsrISHUnr5bU2YAEnEAu44hMXF4eaNWuifv36CAoK4uxJRKQWuJA3EamrlStXYujQodi/fz/atGkjOo5O4DVwRG8pWbIkVq1aheDgYKxfv150HCIiIiK1dffuXYwdOxaDBg1i8aaGWMCRzujYsSP69euHUaNG4dGjR6LjEBEREakdhUKBgQMHolSpUli0aJHoOPQeLOBIp/zyyy8wNzfHoEGDOCslERER0X/8/vvvOHr0KNasWcNLfNQUCzjSKSVKlMDq1auxb98++Pv7i45DREREpDbu3LmD8ePHY9iwYfDy8hIdhz6ABRzpnPbt22PAgAH47rvv8ODBA9FxiIiIiIRTKBQYMGAAbG1tsWDBAtFxKBcs4EgnLV68GFZWVhg4cCCHUhIREZHOW7ZsGU6cOAF/f3/l7LiknljAkU6ytrbGH3/8gUOHDinXYCIiKm5GRkbYvXs3du/eDSMjI9FxiEhH3bp1CxMnTsQ333yDli1bio5DH8F14ATiOnDiDR48GH///TeuXbuGSpUqiY5DREREVKyysrLQokULPHnyBFevXuV6lAJxHTiiPFi0aBFKliyJgQMHQqFQiI5DREREVKyWLl2K06dPY926dSzeNAQLONJplpaWWLNmDY4cOYLly5eLjkNEOiYjIwPr1q3DunXrkJGRIToOEemYmzdvYvLkyfj222/xySefiI5DecQhlAJxCKX6+Oabb/DHH3/g7NmzcHNzEx2HiHREcnKycrKApKQkfvtNRMUmNTUVHh4eyMjIQEhICExNTUVH0nkcQkmUD4sWLYKzszN69uyJxMRE0XGIiIiIVGr06NG4desWNm/ezOJNw7CAIwJgbGyMzZs348mTJxg6dCiXFiAiIiKttWnTJqxatQpLly6Fq6ur6DiUTyzgiP7H0dERq1atwsaNG+Hv7y86DhEREVGRu3XrFgYNGoTevXvj66+/Fh2HCoAFHNFbevfujUGDBmHkyJEICwsTHYeIiIioyKSlpeGLL76Avb09Vq5cCZlMJjoSFQALOKL/+OWXX1C1alX07NkTycnJouMQERERFYlx48bh+vXr2Lx5MywsLETHoQJiAUf0H6ampti8eTPu37+PESNGiI5DREREVGjbtm2Dn58flixZgrp164qOQ4WgLzoAkTpydnbGihUr4Ovri5YtW8LX11d0JCLSQkZGRti8ebPyZyIiVbhz5w4GDhyI7t27Y9iwYaLjUCFxHTiBuA6c+hswYAA2b96MixcvwtnZWXQcIiIionx5/fo1mjVrhufPn+Py5cuwsrISHYk+gOvAERWBZcuWoWLFiujZsydSUlJExyEiIiLKl4kTJyI0NBSbN29m8aYlWMAR5cLMzAybN2/G7du3MWrUKNFxiEjLZGZmYsuWLdiyZQsyMzNFxyEiLRMYGIglS5Zg4cKFcHd3Fx2HigiHUArEIZSaY82aNfj6668REBCAPn36iI5DRFoiOTkZ5ubmAICkpCSYmZkJTkRE2uL+/fuoW7cuWrRoge3bt3PJAA3AIZREReirr76Cj48PhgwZgsjISNFxiIiIiD4oIyMDvXv3hqWlJfz9/Vm8aRkWcER5IJPJsGLFCjg4OKBHjx5cH46IiIjU1qRJk3DhwgVs2rQJJUqUEB2HihgLOKI8srCwwNatW3H79m30798fCoVCdCQiIiKiHNavX4+ff/4ZixYtQqNGjUTHIRVgAUeUD66urvjrr7+wdetWzJo1S3QcIiIiIqUzZ85g8ODBGDhwIL799lvRcUhFWMAR5VOXLl0wa9YsTJ8+HVu2bBEdh4iIiAgPHz5Ely5d0LBhQyxfvpzXvWkxfdEBiDTRDz/8gLCwMPj6+qJatWqoW7eu6EhERESko5KTk9G5c2cYGRlh27ZtMDQ0FB2JVIgFHFEByGQy+Pv7o3nz5ujcuTMuXLgAOzs70bGISMMYGhpi7dq1yp+JiPJLkiQMGDAAkZGROHXqFGxtbUVHIhXjOnACcR04zff48WO4u7ujcuXKOHr0KIyMjERHIiIiIh0yc+ZMTJs2Ddu3b0eXLl1Ex6FC4DpwRMWgbNmy2LlzJy5duoShQ4eC34cQERFRcdm2bRumTZuGWbNmsXjTISzgiArJw8MDf/zxB9atW4clS5aIjkNEGiQzMxPBwcEIDg5GZmam6DhEpEFCQ0PRr18/fPHFF/jhhx9Ex6FixGvgiIpA3759ce3aNYwfPx7Ozs747LPPREciIg2Qnp6ODh06AACSkpKgr88/y0T0cU+fPkXnzp3h7OwMf39/zjipY9gDR1RE5s6di88++wy9evXCzZs3RcchIiIiLZSeno6uXbvi9evX2LlzJ0xNTUVHomLGAo6oiMjlcmzcuBHlypVDp06d8PLlS9GRiIiISItIkoThw4fj4sWL2LFjB8qVKyc6EgnAAo6oCFlaWiIwMBAvXrxAz549eU0LERERFZlff/0V/v7+WL16NRo1aiQ6DgnCAo6oiFWtWhVbt27FsWPH8N1333FmSiIiIiq0ffv2YezYsfj+++/x5Zdfio5DArGAI1KBli1b4rfffsOyZcuwcOFC0XGIiIhIg124cAHdu3eHt7c35s6dKzoOCcbprohUZOjQoXj8+DEmTJgAGxsbDBgwQHQkIiIi0jARERFo3749ateujU2bNkEul4uORIKxgCNSoZkzZ+Lp06cYNGgQSpcujY4dO4qORERqxNDQEMuWLVP+TET0tsePH6NNmzawtbXF7t27OeMkAQBkEi/QESYhIQFWVlaIj4+HpaWl6DikIllZWejZsyf27NmDgwcPolmzZqIjERERkZqLi4tD8+bNkZCQgNOnT3PGSR2Q19qA18ARqZhcLkdAQAAaNWqEjh074tq1a6IjERERkRpLSUlBx44dERMTgwMHDrB4oxxYwBEVA2NjY+zcuRMVK1ZEu3btcO/ePdGRiEgNZGVl4dixYzh27BiysrJExyEiNZCRkYEvvvgCoaGhCA4OhpOTk+hIpGZYwBEVEysrK+zbtw/GxsZo27Ytnj17JjoSEQmWlpaGli1bomXLlkhLSxMdh4gEkyQJgwYNwr59+7B9+3Z4eHiIjkRqiAUcUTGyt7fHgQMHEB8fj/bt2yMxMVF0JCIiIlITEyZMwPr167F+/Xq0bdtWdBxSUyzgiIpZ1apVsXfvXkRERKBr165IT08XHYmIiIgE+/nnn7Fw4UL88ssv6NOnj+g4pMZYwBEJULduXQQGBuLEiRPw9fWFQqEQHYmIiIgE+fPPPzFu3DhMnjwZo0aNEh2H1BwLOCJBPD098ffff2PLli0YNWoUuKIHERGR7gkODsZXX32Fr7/+GrNnzxYdhzQACzgigbp27YoVK1Zg2bJlmDNnjug4REREVIxOnz6NHj16oGPHjlixYgVkMpnoSKQB9EUHINJ1gwcPxtOnTzFlyhSUKlUKw4YNEx2JiIiIVOzatWvo0KEDGjRogI0bN0Jfn6fllDc8UojUwA8//IDnz59j+PDhMDAwwNdffy06EhEVAwMDAyxYsED5MxHphuvXr6N169aoWLEidu3aBRMTE9GRSIOwgCNSAzKZDEuWLEFGRgYGDx4MPT09fPXVV6JjEZGKGRoaYvz48aJjEFExunHjBlq1agUHBwccOnQI1tbWoiORhmEBR6QmZDIZli1bBoVCga+//hpyuRy+vr6iYxEREVERuXnzJlq1agU7OzscOnQIpUqVEh2JNBALOCI1IpPJ4Ofnh6ysLAwYMAB6enr48ssvRcciIhXJysrCpUuXAAD16tWDXC4XnIiIVCUiIgItW7aEjY0NDh8+jNKlS4uORBqKBRyRmtHT08Pvv/8OhUKB/v37Qy6Xc0FPIi2VlpaGhg0bAgCSkpJgZmYmOBERqcKtW7fQsmVLlCxZEocPH4aNjY3oSKTBWMARqSE9PT2sWrUKCoUCX375JWQyGXr37i06FhEREeVTVFQUWrZsCSsrKxw5cgS2traiI5GGYwFHpKb09PSwevVqKBQK9O3bF1lZWejbt6/oWERERJRHERERaNWqFSwsLHDkyBHY2dmJjkRagAUckRqTy+VYs2YN9PX10a9fP2RkZGDAgAGiYxEREdFHZM82WapUKRw+fBj29vaiI5GWYAFHpObkcjlWrVoFAwMDfPXVV8qlBoiIiEg9Xb16FV5eXihTpgwOHTrEa96oSLGAI9IAenp6WL58OQwNDTFkyBC8fv0a33zzjehYRERE9B+XLl3Cp59+iooVK+LgwYNcKoCKHAs4Ig0hk8nwyy+/wNDQECNHjsTr168xZswY0bGIiIjof86fP4+2bduievXq2LdvH0qUKCE6EmkhFnBEGkQmk2HBggUwNDTE2LFjkZ6ejkmTJomORUQFZGBggGnTpil/JiLNdfr0aXz22WeoVasW9uzZAysrK9GRSEuxgCPSMDKZDLNnz4ahoSEmT56MFy9eYMGCBdDT0xMdjYjyydDQENOnTxcdg4gKKTg4GD169ECDBg2we/duWFhYiI5EWowFHJEGkslkmDZtGkqWLIlRo0YhJiYG/v7+MDQ0FB2NiIhIp6xduxaDBg1Chw4d8Pfff8PExER0JNJy/MqeSIONHDkS//zzD7Zs2YIOHTogMTFRdCQiygeFQoHr16/j+vXrUCgUouMQUT5IkoS5c+fiq6++wsCBA7F161YWb1QsWMARabgePXpg3759OHfuHFq2bInY2FjRkYgoj1JTU1GrVi3UqlULqampouMQUR5lZWVh5MiR+OGHHzBjxgz8/vvv0NfnwDYqHizgiLRAy5YtceLECURHR6Np06a4ffu26EhERERaKS0tDb169cKKFSuwcuVKTJ06FTKZTHQs0iFaX8DFxcXBx8cHlpaWsLa2xsCBA5GUlPTB9vfu3YNMJnvvbcuWLcp273t806ZNxfGWiN6rTp06OH36NORyOZo0aYKQkBDRkYiIiLRKfHw82rVrh927d2P79u0YPHiw6Eikg7S+gPPx8cH169dx8OBB7N69GydOnMj1f7by5cvjyZMnOW4zZsyAubk5Pvvssxxt165dm6Pd559/ruJ3Q5S7SpUq4dSpU6hcuTI8PT1x8OBB0ZGIiIi0QnR0NJo3b46rV6/i0KFD6Ny5s+hIpKO0erBueHg49u3bhwsXLsDd3R0A8Ntvv6F9+/ZYtGgRHBwc3nmOXC6Hvb19jvt27NiBnj17wtzcPMf91tbW77QlEq106dI4fPgwevbsifbt22PdunXw8fERHYuIiEhj3bx5E23btoVCocDJkydRs2ZN0ZFIh2l1D9yZM2dgbW2tLN4AwMvLC3p6ejh37lyethESEoLQ0FAMHDjwncdGjBiB0qVLo2HDhvD394ckSbluKz09HQkJCTluRKpgZmaGnTt3om/fvujbty9+/vln0ZGIiIg00tmzZ9GsWTNYWFjgzJkzLN5IOK3ugYuJiYGtrW2O+/T19VGyZEnExMTkaRtr1qyBs7MzmjRpkuP+mTNnolWrVjA1NcWBAwcwfPhwJCUl4dtvv/3gtubNm4cZM2bk/40QFYCBgQH8/f3h4OCAcePGITo6GgsXLuSC30RERHmUvUB3/fr1ERgYiBIlSoiORKSZBdzEiRPx008/5domPDy80K+TmpqKjRs3YsqUKe889vZ9devWRXJyMhYuXJhrATdp0iSMGTNG+XtCQgLKly9f6JxEHyKTyTBnzhyUKVMG3377LWJiYrB27Vou+E2kJgwMDDBu3Djlz0SkPvz9/TF48GB06tQJAQEBXOON1IZGFnBjx45F//79c21TpUoV2Nvb4+nTpznuz8zMRFxcXJ6uXdu6dStSUlLQr1+/j7b18PDArFmzkJ6eDiMjo/e2MTIy+uBjRKr0zTffwM7ODn379kVsbCy2bNnCbxGJ1IChoSEWLlwoOgYRvUWhUGDWrFmYPn06hgwZAj8/P8jlctGxiJQ0soCzsbGBjY3NR9s1btwYr169QkhICOrXrw8AOHLkCBQKBTw8PD76/DVr1qBTp055eq3Q0FCUKFGCBRqprR49esDGxgbdunVDw4YNsWvXLri4uIiORUREpDaSkpLQv39/bNu2DbNnz8bkyZO5xhupHa2+GMbZ2Rnt2rXDoEGDcP78eZw6dQrffPMNevXqpZyB8vHjx3BycsL58+dzPDcqKgonTpzA119//c52g4KC8McffyAsLAxRUVFYsWIF5s6di5EjRxbL+yIqKE9PT1y4cAHGxsZo1KgRgoKCREci0mkKhQL37t3DvXv3oFAoRMch0ml3795F06ZNsX//fuzcuRM//PADizdSS1pdwAFAQEAAnJyc0Lp1a7Rv3x7NmjXDqlWrlI9nZGQgIiICKSkpOZ7n7++PcuXKoU2bNu9s08DAAH5+fmjcuDHc3NywcuVKLF68GNOmTVP5+yEqrCpVquDMmTPw8vJC586dMXfu3I/OoEpEqpGamorKlSujcuXKSE1NFR2HSGcdPXoUDRo0QFJSEs6cOcM13kitySSeuQmTkJAAKysrxMfHw9LSUnQc0jFvj/Hv0aMH1q5dCzMzM9GxiHRKcnKyco3RpKQk/j9IVMwkSYKfnx9Gjx6Nli1b4p9//kHJkiVFxyIdldfaQOt74Ijo/fT09DBt2jRs27YNe/bsQbNmzXD//n3RsYiIiIrF69evMXjwYIwcORIjR47E3r17WbyRRmABR6TjunbtijNnziA+Ph7u7u44ceKE6EhEREQqFRsbi1atWuHPP//E2rVrsWTJEujra+TcfqSDWMAREVxdXXHhwgXUrl0brVu3xu+//y46EhERkUqEhITA3d0dt2/fxvHjxz+6NBWRumEBR0QAgFKlSmHfvn0YPnw4hg0bhqFDh+L169eiYxERERWZjRs3olmzZnBwcMDFixfRqFEj0ZGI8o0FHBEpGRgY4Ndff8WaNWvg7+8PLy8vPH36VHQsIiKiQsnKysKECRPg4+ODHj164Pjx4yhbtqzoWEQFwsG+RPSOr776Ck5OTujatSvc3d2xa9cu1K1bV3QsIq2jr6+P4cOHK38moqL36tUr9OnTB/v378fixYsxevRoru9GGo3LCAjEZQRI3T169AhdunTB9evXsWLFCvj6+oqORERElGdXrlxBz5498fTpU/zzzz/vXd+XSF1wGQEiKrRy5crhxIkT6NWrF/r3749+/fohMTFRdCwiIqJcSZKE5cuXw8PDAyYmJjh//jyLN9IaLOCIKFcmJibw9/fHhg0bsGPHDtSvXx+XL18WHYtIK0iShGfPnuHZs2fggBiiovHy5Ut0794dI0aMwNdff42zZ8/C0dFRdCyiIsMCjojyxMfHB5cuXYKFhQUaNWqEpUuX8oSTqJBSUlJga2sLW1tbpKSkiI5DpPFOnz4NNzc3HD16FNu3b8eyZctgbGwsOhZRkWIBR0R55ujoiNOnT2P48OEYNWoUOnfujBcvXoiORUREOk6hUGDevHlo3rw5ypcvj9DQUHTp0kV0LCKVYAFHRPliZGSEJUuWIDAwEKdOnYKbmxtOnjwpOhYREemomJgYtG3bFj/88AMmTpyIY8eOoUKFCqJjEakMCzgiKpCOHTviypUrqFKlCjw9PTFr1ixkZWWJjkVERDrkwIEDqFOnDsLCwnDw4EHMnj2bS3KQ1mMBR0QFVq5cORw5cgRTpkzB9OnT8emnnyI6Olp0LCIi0nIZGRmYNGkS2rZti7p16yI0NBStW7cWHYuoWLCAI6JCkcvlmD59Og4fPoyIiAjUqVMHe/fuFR2LiIi01L1799C8eXMsWrQICxYswJ49e2BnZyc6FlGxYQFHREXC09MTV65cgYeHB9q3b49x48bh9evXomMREZEW2bZtG9zc3BATE4N///0X48ePh54eT2dJt/CIJ6IiU7p0aQQFBWHx4sVYunQpmjVrhoiICNGxiNSWvr4+fH194evry+t2iHKRlJSEoUOHonv37mjTpg0uX74MDw8P0bGIhJBJXMhJmISEBFhZWSE+Ph6Wlpai4xAVqYsXL6JPnz54+PAhZs2ahe+++w5yuVx0LCIi0jBHjhzBwIED8fTpUyxevBiDBw+GTCYTHYuoyOW1NmAPHBGphLu7O0JDQzF8+HB8//33aNasGcLDw0XHIiIiDZGYmIhhw4ahdevWqFSpEq5du4YhQ4aweCOdxwKOiFTG1NQUP//8M/7991+8fPkSdevWxU8//YTMzEzR0YjUgiRJSE5ORnJyMjgghuj/HTp0CLVq1cJff/0FPz8/HD58GFWqVBEdi0gtsIAjIpVr0qQJLl++jG+//RaTJ09GkyZNcP36ddGxiIRLSUmBubk5zM3NkZKSIjoOkXDx8fEYPHgwPv30U1SrVg1hYWEYPnw4Jyohegv/byCiYmFiYoIFCxbg9OnTSEpKQr169TB37lz2xhEREQBg3759qFWrFv7++2+sXLkShw4dQqVKlUTHIlI7LOCIqFh5eHjg0qVLGDNmDKZMmQIPDw9cvXpVdCwiIhLk1atX+Oqrr/DZZ5/B2dkZYWFhnKiEKBcs4Iio2BkbG2PevHk4e/Ys0tPT4e7ujlmzZiEjI0N0NCIiKkbBwcGoWbMmtm3bhj/++AP79+9HxYoVRcciUmss4IhImAYNGiAkJATff/89ZsyYgYYNGyI0NFR0LCIiUrGXL1+if//+6NChA2rXro2wsDAMHDiQvW5EecACjoiEMjIywuzZs3Hu3DlkZWWhQYMGmDZtGl6/fi06GhERqUBgYCBq1qyJnTt3Yu3atdizZw/Kly8vOhaRxmABR0RqoX79+rh48SJ++OEHzJ07F25ubjh8+LDoWEREVETu37+Pbt26oXPnzqhXrx6uX7+O/v37s9eNKJ9YwBGR2jA0NMT06dMREhKCUqVKwcvLCz179sTDhw9FRyNSCblcju7du6N79+6Qy+Wi4xCpRFpaGmbNmgVnZ2ecPXsWGzduRFBQEMqWLSs6GpFGkklcOVSYhIQEWFlZIT4+HpaWlqLjEKkVSZKwceNGjBs3DgkJCfjhhx8wduxYGBkZiY5GRER5tHv3bowaNQoPHjzAmDFj8OOPP8LCwkJ0LCK1lNfagD1wRKSWZDIZfHx8EBERgWHDhmHatGmoVasW9uzZIzoaERF9RFRUFDp06ICOHTuiatWquHbtGn766ScWb0RFgAUcEak1S0tLLFq0CFeuXEHFihXh7e2NTp064c6dO6KjERHRfyQnJ+PHH39EzZo1ce3aNWzfvh379++Hk5OT6GhEWoMFHBFpBBcXFxw8eBBbtmxBaGgoXFxcMG3aNKSkpIiORlRgycnJkMlkkMlkSE5OFh2HqMAkScLWrVvh7OyMRYsWYcKECQgPD0eXLl04SQlREWMBR0QaQyaToXv37ggPD8e4ceMwf/58uLi4YMeOHeDlvEREYoSHh6NNmzbo0aMH3NzccP36dcycOROmpqaioxFpJRZwRKRxzMzMMHv2bFy/fh01a9ZE165d0a5dO0RERIiORkSkMxITEzF+/HjUrl0bd+/exe7duxEYGIiqVauKjkak1VjAEZHGqlatGoKDgxEYGIhbt27B1dUVEyZMQEJCguhoRERaS6FQICAgADVq1ICfnx+mT5+OsLAweHt7i45GpBNYwBGRxuvYsSNu3LiBKVOmYOnSpahatSp++eUXpKWliY5GRKQ1JEnC/v374e7ujr59+6Jp06a4efMmfvjhBxgbG4uOR6QzWMARkVYwNjbGlClTcOvWLXTp0gXjxo1DjRo1sHbtWmRmZoqOR0Sk0c6ePYtWrVqhXbt2MDU1xYkTJ7BlyxZUqFBBdDQincMCjoi0Srly5bBq1Spcv34dHh4e+Oqrr1C7dm1OdEJEVADXr1/H559/jsaNG+PFixfYvXs3Tp48iU8++UR0NCKdxQKOiLRSjRo1sHnzZly4cAHlypVD165d0ahRIxw5ckR0NCIluVyO9u3bo3379pDL5aLjECndu3cPvr6+cHV1xdWrV7FhwwZcvnwZ3t7eXBaASDAWcESk1dzd3XHgwAEcPnwYANC6dWu0adMGFy9eFJyM6M3Q3+DgYAQHB/MaIlILT58+xahRo1C9enXs378fv/32G27evAkfHx9+yUCkJljAEZFOaNWqFc6ePYvt27fj0aNHaNCgAXr06MGlB4iIACQkJGDatGmoUqUK1q1bh+nTp+P27dsYMWIEDA0NRccjorewgCMinSGTydClSxdcu3YNa9euxfnz51GzZk0MGjQIjx49Eh2PiKjYpaWlYfHixahSpQoWLFiAESNG4O7du5g8eTLMzMxExyOi92ABR0Q6Ry6Xo3///oiMjMTPP/+MnTt3olq1ahg3bhyeP38uOh7pkOTkZJiZmcHMzAzJycmi45AOycjIgL+/PxwdHfH999+jW7duiIqKwk8//YSSJUuKjkdEuWABR0Q6y8jICKNGjcKdO3cwadIkrFy5EhUrVsTo0aPx4MED0fFIR6SkpCAlJUV0DNIRKSkpWLZsGRwdHTFw4EA0bdoUN27cwMqVK1G2bFnR8YgoD1jAEZHOs7CwwLRp03D37l2MHz8ef/75J6pWrYr+/fvjxo0bouMRERXay5cvMWfOHFSqVAmjRo1C06ZNceXKFWzatAnVq1cXHY+I8oEFHBHR/5QuXRrTp0/HgwcPsGDBAhw6dAg1a9ZEly5dcO7cOdHxiIjyLTo6GuPHj0eFChUwe/Zs9OjRA7du3UJAQABq164tOh4RFQALOCKi/zA3N8d3332HO3fuwN/fH+Hh4WjUqBFatmyJ/fv3c0FwIlJ7t27dwuDBg1G5cmWsXr0a3377Le7duwc/Pz9UqVJFdDwiKgQWcEREH2BoaIgBAwbg+vXr2LZtG5KTk9GuXTvUr18fmzdvRlZWluiIREQ5hISEoGfPnqhRowaCgoIwa9Ys3L9/H3PmzIGdnZ3oeERUBFjAERF9hFwuR9euXXHu3DkcPnwYpUuXxhdffAEnJyesWrUK6enpoiMSkQ6TJAlHjx5FmzZt4O7ujkuXLuH333/H3bt38f3338PKykp0RCIqQizgiIjySCaToVWrVjhw4AAuXrwINzc3DB06FJUrV8bChQuRkJAgOiJpGD09PbRo0QItWrSAnh7/JFP+KBQK7NixA40aNUKrVq3w7NkzbNq0CTdv3sTgwYNhbGwsOiIRqYBM4sUcwiQkJMDKygrx8fGwtLQUHYeICiAyMhILFy7E+vXrYWpqCl9fXwwbNgxOTk6ioxGRlnr58iXWrVuH33//HZGRkfD09MTEiRPRpk0byGQy0fGIqIDyWhvw6z4iokKoXr06Vq9ejbt372LYsGH4+++/4ezsjFatWmHLli3IyMgQHZGItMSFCxfw1VdfoWzZspgwYQLq1auHM2fO4OjRo2jbti2LNyIdwQKOiKgIlC1bFvPmzcPDhw+xceNGZGRkoGfPnqhYsSKmTp2KR48eiY5IRBooJSUF/v7+cHd3R8OGDXH48GFMmTIFDx8+xN9//41GjRqJjkhExYwFHBFRETIyMkLv3r1x8uRJXL16FV26dMGSJUtQsWJFdOnSBQcOHIBCoRAdk9REcnIybGxsYGNjg+TkZNFxSI1ERERg9OjRKFu2LL7++mvY2dkhKCgId+7cwaRJkzijJJEOYwFHRKQirq6u8PPzQ3R0NPz8/HDnzh20bdsWNWrUwM8//4wXL16Ijkhq4Pnz53j+/LnoGKQGMjIysHXrVrRu3RpOTk4ICAjAkCFDcPv2bQQHB6NDhw6Qy+WiYxKRYCzgiIhUzMLCAkOHDkVoaCj+/fdfeHh4YPLkyShbtix8fX1x9uxZLg5OpMMePXqEadOmoWLFiujRowfS09OxYcMGPHr0CPPnz0flypVFRyQiNcJZKAXiLJREuuvZs2fw9/fHypUrcffuXdStWxdDhw5Fz549YW1tLToeFZPk5GSYm5sDAJKSkmBmZiY4ERWXzMxMHDx4EKtXr0ZgYCBMTEzQt29fDBs2DLVr1xYdj4gE4CyURERqzMbGBhMmTEBUVBT27NmDsmXLYtiwYbCzs0PXrl2xbds2pKWliY5JREVIkiScO3cOI0eOhIODA9q3b49bt25h6dKlePz4MVasWMHijYg+ij1wArEHjoje9uTJE2zatAkBAQEICQmBlZUVunXrBh8fH7Ro0YLXvmgh9sDphsjISAQEBCAgIAC3b99GmTJl0Lt3b/j4+KBu3bqc/p+IAOS9NmABJxALOCL6kJs3b2Ljxo0ICAjAnTt3ULZsWfTq1Qs+Pj5wc3PjCZ+WYAGnvWJiYpRfyFy8eBGWlpbKL2Q8PT35hQwRvYMFnAZgAUdEHyNJEs6ePYuAgAD8888/eP78OVxcXODj44M+ffqgUqVKoiNSIaSmpqJ58+YAgBMnTsDExERwIiqMxMREbN++HQEBATh8+DDkcjm8vb3h4+MDb29v/vsSUa5YwGkAFnBElB8ZGRk4ePAgAgICsHPnTqSkpKBp06bw8fFBjx49ULp0adERiXTO69evsX//fgQEBCAwMFBZlPv4+KB79+4oWbKk6IhEpCFYwGkAFnBEVFBJSUnYtWsXAgICcODAAchkMrRr1w6ff/45OnTowEV+iVQoNTUVR44cQWBgILZu3Yq4uDi4urrCx8cHvXv3RoUKFURHJCINxAJOA7CAI6Ki8PTpU2zevBmbNm3C6dOnAQANGzZEp06d0LFjR9SqVYvXzBEVUkxMDIKDgxEUFISDBw8iJSUFVatWRffu3eHj4wNXV1fREYlIw7GA0wAs4IioqD179gx79uxBUFAQ9u/fj6SkJFSqVAkdO3ZEx44d0aJFCxgaGoqOSf+TkpICFxcXAMCNGzdgamoqOBFlkyQJ165dQ1BQEIKCgnDu3Dno6emhcePG6NixIzp16gQnJyd+OUJERYYFnAZgAUdEqpSeno5jx44hMDAQQUFBePjwISwsLNCuXTt07NgR7du3R6lSpUTH1GmchVK9vH79GsePH1f+P3P//n2Ym5ujbdu2yv9nbGxsRMckIi3FAk4DsIAjouIiSRKuXr2qPDG9cOEC9PT00LRpU2VvQo0aNUTH1Dks4MR78eIF9uzZg8DAQOzfvx+JiYkoX768cgiyp6cnjIyMRMckIh3AAk4DsIAjIlGePHmC3bt3K6/nSUtLg6OjI9q1a4eWLVuiefPm7J0rBizgil96ejrOnTuHY8eO4eDBgzh9+jQUCgUaNGigLNpq167NoZFEVOxYwGkAFnBEpA5SUlJw+PBhBAUF4fDhw7hz5w4AoHbt2vD09ISnpycLOhVhAad6bxdsx44dw5kzZ5CWlgZra2u0aNECHTp0gLe3N8qUKSM6KhHpOBZwGoAFHBGpowcPHuD48ePKE97sgs7V1TVHQcd15wqPBVzRS0tLUxZsx48fz1GwNW/eXHkM165dG3K5XHRcIiIlFnAagAUcEWkCFnSqwwKu8N4u2LJ72NLT05U9bNnHqKurKws2IlJrLOA0AAs4ItJEuRV0LVq0gIeHB+rXr4/q1avzhPkjUlJS0KBBAwDAhQsXuIxAHkRHR+PSpUu4cOECTpw4wYKNiLQGCzgNwAKOiLTBw4cPlQXd8ePHERUVBQAwMzND3bp1Ua9ePdSvXx/169eHk5MTT6opTyRJwuPHj3Hp0iWEhIQobzExMQCAUqVKoVmzZizYiEhrsIDTACzgiEgbvXz5EpcvX85x0p1d1JmamsLNzU1Z0NWrVw/Ozs7Q19cXnJpEkiQJjx49ynHMhISE4OnTpwAAGxubHMdM/fr1UaFCBc4USURahQWcBmABR0S6Ij4+/p2iLjIyEgBgYmKCOnXq5DhBr1GjBoyNjQWnJlXIysrCgwcPchwPly5dwrNnzwAAdnZ27xRr5cqVY7FGRFqPBZwGYAFHRLosISEBoaGhOYq6iIgISJIEmUyGihUronr16u/cKlSooDVD5bT1GjhJkvDs2TNERka+c4uKikJ6ejoAoEyZMjkKtfr168PBwYHFGhHpJBZwGoAFHBFRTomJibh69SoiIiI+eNJvaGiIatWqvVPYOTo6ws7OTqNO/jV9FsrExETcunXrvYVafHw8ALy3GHd0dESdOnW49hoR0VtYwP3PnDlzEBwcjNDQUBgaGuLVq1cffY4kSZg2bRpWr16NV69eoWnTplixYgUcHR2VbeLi4jBy5EgEBQVBT08P3bp1w6+//qr8Q5wXLOCIiPImKysLDx8+fG+hcO/ePWT/KbOwsFAWCeXLl0eZMmVQpkwZODg4KH9WpyJJnQu4jIwMxMTE4MmTJzlujx8/RlRUFCIjI/HkyRNle1tb2/f2mFatWpXDYYmI8oAF3P9MmzYN1tbWePToEdasWZOnAu6nn37CvHnzsH79elSuXBlTpkzBtWvXcOPGDeUfoc8++wxPnjzBypUrkZGRgQEDBqBBgwbYuHFjnrOxgCMiKry0tDTcuXPnncLu8ePHePLkCVJTU3O0t7CwyFHQva/IK1OmDCwtLVXemyeigEtLS0NMTAyio6PfKc7evu/58+d4+xRBLpfDzs4ODg4O7/SAOjo6wtraWuXZiYi0GQu4/1i3bh1Gjx790QJOkiQ4ODhg7NixGDduHIA3F9/b2dlh3bp16NWrF8LDw+Hi4oILFy7A3d0dALBv3z60b98ejx49goODQ54ysYAjIlItSZKQkJDwTrHyvt+TkpJyPNfU1BSlSpWChYUFzM3NYWFhUaCfzczMIJfLoaenl+Mmk8neW8BJkgRJkpCVlQWFQqG8paamIikpCYmJiUhMTMzXz9n/ffXqFV6+fJnjfRoYGORayGb/Xrp0aa259pCISB3ltTbgvM3/cffuXcTExMDLy0t5n5WVFTw8PHDmzBn06tULZ86cgbW1tbJ4AwAvLy/o6enh3Llz6NKly3u3nZ6erryGA3jzj0RERKojk8lgZWUFKysrODs759o2KSnpnQIvLi7unULo/v3779yXnJxc4HzZLC0tlcVbfhkZGb23gDQ3N4e9vb3yZysrq3d6HkuVKqVR1w0SEek6FnD/kb1AqJ2dXY777ezslI/FxMTA1tY2x+P6+vooWbKkss37zJs3DzNmzCjixEREVBTMzc3h6OiY43rnvMrKykJycvI7PV6JiYlITk7O0ZP2ds9aamoqRo0aBQBYvHgxTE1N3+mpy74ZGxt/sLfPwMCgqHcHERGpKY0s4CZOnIiffvop1zbh4eFwcnIqpkR5M2nSJIwZM0b5e0JCAsqXLy8wERERFQW5XA5LS8t8D4dPSUnB4sWLAQCDBg3SmmUEiIhIdTSygBs7diz69++fa5sqVaoUaNv29vYAgNjY2BzTG8fGxsLNzU3Z5unTpzmel5mZibi4OOXz38fIyAhGRkYFykVERNrH1NQU9+7dEx2DiIg0iEYWcDY2NrCxsVHJtitXrgx7e3scPnxYWbAlJCTg3LlzGDZsGACgcePGePXqFUJCQlC/fn0AwJEjR6BQKODh4aGSXERERERERHqiA6jagwcPEBoaigcPHiArKwuhoaEIDQ3NMduYk5MTduzYAeDNBeWjR4/G7NmzERgYiGvXrqFfv35wcHDA559/DgBwdnZGu3btMGjQIJw/fx6nTp3CN998g169euV5BkoiIiIiIqL80sgeuPyYOnUq1q9fr/y9bt26AICjR4/C09MTABAREYH4+Hhlm++//x7JyckYPHgwXr16hWbNmmHfvn05FiINCAjAN998g9atWysX8l66dGnxvCkiItIKqampaN68OQDgxIkTMDExEZyIiIjUnc6sA6eOuA4cEZFuE7GQNxERqae81gZaP4SSiIiIiIhIW7CAIyIiIiIi0hAs4IiIiIiIiDQECzgiIiIiIiINwQKOiIiIiIhIQ2j9MgJERETqrHTp0qIjEBGRBmEBR0REJIiZmRmePXsmOgYREWkQDqEkIiIiIiLSECzgiIiIiIiINAQLOCIiIkFSU1Ph6ekJT09PpKamio5DREQagNfAERERCaJQKHD8+HHlz0RERB/DHjgiIiIiIiINwQKOiIiIiIhIQ7CAIyIiIiIi0hAs4IiIiIiIiDQECzgiIiIiIiINwVkoiYiIBDI1NRUdgYiINAgLOCIiIkHMzMyQnJwsOgYREWkQDqEkIiIiIiLSECzgiIiIiIiINAQLOCIiIkHS0tLg7e0Nb29vpKWliY5DREQagNfAERERCZKVlYU9e/YofyYiIvoY9sARERERERFpCBZwREREREREGoIFHBERERERkYZgAUdERERERKQhWMARERERERFpCM5CKZAkSQCAhIQEwUmIiEiE5ORk5c8JCQmciZKISIdl1wTZNcKHsIATKDExEQBQvnx5wUmIiEg0BwcH0RGIiEgNJCYmwsrK6oOPy6SPlXikMgqFAtHR0bCwsIBMJhOaJSEhAeXLl8fDhw9haWkpNIs24v5VLe5f1eL+VS3uX9Xi/lUt7l/V4v5VLXXbv5IkITExEQ4ODtDT+/CVbuyBE0hPTw/lypUTHSMHS0tLtTiAtRX3r2px/6oW969qcf+qFvevanH/qhb3r2qp0/7NrectGycxISIiIiIi0hAs4IiIiIiIiDQECzgCABgZGWHatGkwMjISHUUrcf+qFvevanH/qhb3r2px/6oW969qcf+qlqbuX05iQkREREREpCHYA0dERERERKQhWMARERERERFpCBZwREREREREGoIFHBERERERkYZgAacj5syZgyZNmsDU1BTW1tZ5eo4kSZg6dSrKlCkDExMTeHl54datWznaxMXFwcfHB5aWlrC2tsbAgQORlJSkgneg3vK7H+7duweZTPbe25YtW5Tt3vf4pk2biuMtqZWCHGeenp7v7LuhQ4fmaPPgwQN4e3vD1NQUtra2GD9+PDIzM1X5VtRSfvdvXFwcRo4ciRo1asDExAQVKlTAt99+i/j4+BztdPn49fPzQ6VKlWBsbAwPDw+cP38+1/ZbtmyBk5MTjI2N4erqij179uR4PC+fx7okP/t39erV+OSTT1CiRAmUKFECXl5e77Tv37//O8dqu3btVP021FZ+9u+6deve2XfGxsY52vD4zSk/+/d9f8tkMhm8vb2VbXj8/r8TJ06gY8eOcHBwgEwmw86dOz/6nGPHjqFevXowMjJCtWrVsG7dunfa5PczXeUk0glTp06VFi9eLI0ZM0aysrLK03Pmz58vWVlZSTt37pSuXLkiderUSapcubKUmpqqbNOuXTupTp060tmzZ6WTJ09K1apVk3r37q2id6G+8rsfMjMzpSdPnuS4zZgxQzI3N5cSExOV7QBIa9euzdHu7f2vKwpynLVo0UIaNGhQjn0XHx+vfDwzM1OqVauW5OXlJV2+fFnas2ePVLp0aWnSpEmqfjtqJ7/799q1a1LXrl2lwMBAKSoqSjp8+LDk6OgodevWLUc7XT1+N23aJBkaGkr+/v7S9evXpUGDBknW1tZSbGzse9ufOnVKksvl0oIFC6QbN25IP/74o2RgYCBdu3ZN2SYvn8e6Ir/7t0+fPpKfn590+fJlKTw8XOrfv79kZWUlPXr0SNnG19dXateuXY5jNS4urrjeklrJ7/5du3atZGlpmWPfxcTE5GjD4/f/5Xf/vnjxIse+DQsLk+RyubR27VplGx6//2/Pnj3SDz/8IG3fvl0CIO3YsSPX9nfu3JFMTU2lMWPGSDdu3JB+++03SS6XS/v27VO2ye+/WXFgAadj1q5dm6cCTqFQSPb29tLChQuV97169UoyMjKS/v77b0mSJOnGjRsSAOnChQvKNnv37pVkMpn0+PHjIs+uropqP7i5uUlfffVVjvvy8uGj7Qq6f1u0aCGNGjXqg4/v2bNH0tPTy3GisWLFCsnS0lJKT08vkuyaoKiO382bN0uGhoZSRkaG8j5dPX4bNmwojRgxQvl7VlaW5ODgIM2bN++97Xv27Cl5e3vnuM/Dw0MaMmSIJEl5+zzWJfndv/+VmZkpWVhYSOvXr1fe5+vrK3Xu3Lmoo2qk/O7fj51X8PjNqbDH75IlSyQLCwspKSlJeR+P3/fLy9+g77//XqpZs2aO+7744gupbdu2yt8L+2+mChxCSe919+5dxMTEwMvLS3mflZUVPDw8cObMGQDAmTNnYG1tDXd3d2UbLy8v6Onp4dy5c8WeWZSi2A8hISEIDQ3FwIED33lsxIgRKF26NBo2bAh/f39IOrZ0Y2H2b0BAAEqXLo1atWph0qRJSElJybFdV1dX2NnZKe9r27YtEhISL/uxkQAAGVdJREFUcP369aJ/I2qqqP4/jo+Ph6WlJfT19XPcr2vH7+vXrxESEpLjs1NPTw9eXl7Kz87/OnPmTI72wJtjMbt9Xj6PdUVB9u9/paSkICMjAyVLlsxx/7Fjx2Bra4saNWpg2LBhePHiRZFm1wQF3b9JSUmoWLEiypcvj86dO+f4DOXx+/+K4vhds2YNevXqBTMzsxz38/gtmI99/hbFv5kq6H+8CemimJgYAMhxcpv9e/ZjMTExsLW1zfG4vr4+SpYsqWyjC4piP6xZswbOzs5o0qRJjvtnzpyJVq1awdTUFAcOHMDw4cORlJSEb7/9tsjyq7uC7t8+ffqgYsWKcHBwwNWrVzFhwgRERERg+/btyu2+7/jOfkxXFMXx+/z5c8yaNQuDBw/Ocb8uHr/Pnz9HVlbWe4+tmzdvvvc5HzoW3/6szb7vQ210RUH2739NmDABDg4OOU7I2rVrh65du6Jy5cq4ffs2Jk+ejM8++wxnzpyBXC4v0vegzgqyf2vUqAF/f3/Url0b8fHxWLRoEZo0aYLr16+jXLlyPH7fUtjj9/z58wgLC8OaNWty3M/jt+A+9PmbkJCA1NRUvHz5stCfOarAAk6DTZw4ET/99FOubcLDw+Hk5FRMibRLXvdvYaWmpmLjxo2YMmXKO4+9fV/dunWRnJyMhQsXasUJsKr379vFhKurK8qUKYPWrVvj9u3bqFq1aoG3qymK6/hNSEiAt7c3XFxcMH369ByPafPxS5pp/vz52LRpE44dO5Zjoo1evXopf3Z1dUXt2rVRtWpVHDt2DK1btxYRVWM0btwYjRs3Vv7epEkTODs7Y+XKlZg1a5bAZNpnzZo1cHV1RcOGDXPcz+NX97CA02Bjx45F//79c21TpUqVAm3b3t4eABAbG4syZcoo74+NjYWbm5uyzdOnT3M8LzMzE3Fxccrna7K87t/C7oetW7ciJSUF/fr1+2hbDw8PzJo1C+np6TAyMvpoe3VWXPs3m4eHBwAgKioKVatWhb29/TuzSMXGxgIAj9887t/ExES0a9cOFhYW2LFjBwwMDHJtr03H74eULl0acrlceSxli42N/eD+tLe3z7V9Xj6PdUVB9m+2RYsWYf78+Th06BBq166da9sqVaqgdOnSiIqK0qkT4MLs32wGBgaoW7cuoqKiAPD4fVth9m9ycjI2bdqEmTNnfvR1dPX4LYgPff5aWlrCxMQEcrm80P9PqAKvgdNgNjY2cHJyyvVmaGhYoG1XrlwZ9vb2OHz4sPK+hIQEnDt3TvlNW+PGjfHq1SuEhIQo2xw5cgQKhUJ5sqzJ8rp/C7sf1qxZg06dOsHGxuajbUNDQ1GiRAmtOPktrv2bLTQ0FACUJxCNGzfGtWvXchQvBw8ehKWlJVxcXIrmTQqk6v2bkJCANm3awNDQEIGBge9MG/4+2nT8foihoSHq16+f47NToVDg8OHDOXop3ta4ceMc7YE3x2J2+7x8HuuKguxfAFiwYAFmzZqFffv25bje80MePXqEFy9e5Cg4dEFB9+/bsrKycO3aNeW+4/H7/wqzf7ds2YL09HT07dv3o6+jq8dvQXzs87co/p9QCWHTp1Cxun//vnT58mXlVPWXL1+WLl++nGPK+ho1akjbt29X/j5//nzJ2tpa2rVrl3T16lWpc+fO711GoG7dutK5c+ekf//9V3J0dNTZZQRy2w+PHj2SatSoIZ07dy7H827duiXJZDJp796972wzMDBQWr16tXTt2jXp1q1b0vLlyyVTU1Np6tSpKn8/6ia/+zcqKkqaOXOmdPHiRenu3bvSrl27pCpVqkjNmzdXPid7GYE2bdpIoaGh0r59+yQbGxudXUYgP/s3Pj5e8vDwkFxdXaWoqKgcU1dnZmZKkqTbx++mTZskIyMjad26ddKNGzekwYMHS9bW1soZT7/88ktp4sSJyvanTp2S9PX1pUWLFknh4eHStGnT3ruMwMc+j3VFfvfv/PnzJUNDQ2nr1q05jtXsv3+JiYnSuHHjpDNnzkh3796VDh06JNWrV09ydHSU0tLShLxHkfK7f2fMmCHt379fun37thQSEiL16tVLMjY2lq5fv65sw+P3/+V3/2Zr1qyZ9MUXX7xzP4/fnBITE5XnuACkxYsXS5cvX5bu378vSZIkTZw4Ufryyy+V7bOXERg/frwUHh4u+fn5vXcZgdz+zURgAacjfH19JQDv3I4ePapsg/+t2ZRNoVBIU6ZMkezs7CQjIyOpdevWUkRERI7tvnjxQurdu7dkbm4uWVpaSgMGDMhRFOqKj+2Hu3fvvrO/JUmSJk2aJJUvX17Kysp6Z5t79+6V3NzcJHNzc8nMzEyqU6eO9Pvvv7+3rbbL7/598OCB1Lx5c6lkyZKSkZGRVK1aNWn8+PE51oGTJEm6d++e9Nlnn0kmJiZS6dKlpbFjx+aYBl9X5Hf/Hj169L2fJwCku3fvSpLE4/e3336TKlSoIBkaGkoNGzaUzp49q3ysRYsWkq+vb472mzdvlqpXry4ZGhpKNWvWlIKDg3M8npfPY12Sn/1bsWLF9x6r06ZNkyRJklJSUqQ2bdpINjY2koGBgVSxYkVp0KBBQk/ORMvP/h09erSyrZ2dndS+fXvp0qVLObbH4zen/H4+3Lx5UwIgHThw4J1t8fjN6UN/n7L3qa+vr9SiRYt3nuPm5iYZGhpKVapUyXEunC23fzMRZJKk5XM6ExERERERaQleA0dERERERKQhWMARERERERFpCBZwREREREREGoIFHBERERERkYZgAUdERERERKQhWMARERERERFpCBZwREREREREGoIFHBERERERkYZgAUdEREVOJpNBJpNh+vTpoqOorSNHjkAmk8HOzg4pKSmi4+iUCxcuQCaToWTJkoiLixMdh4goX1jAERHpsGPHjimLrbdv+vr6KFmyJCpXrozmzZvju+++w7Zt2/D69WvRkVXi1atXOHjwIObMmYPOnTvDwcFBuS88PT2L/PUUCgVGjx4NABg3bhxMTU1zbZ+RkYF169bB29sbFSpUgJGREUqXLg1XV1d8/fXX2LJlS6GynDhxApMnT4anpyfs7e1haGgIS0tL1KpVC8OHD8fVq1c/up3p06e/91h63+3YsWMf3M6dO3fg4+MDW1tbGBsbw8XFBQsWLEBmZmaury9JEho3bgyZTIY1a9bk2rZBgwZo27YtXr58yS8ZiEjzSEREpLOOHj0qAcjzzcbGRpo1a5aUkZGR63az20+bNq143kghVapU6YPvuUWLFkX+egEBARIAqXTp0lJSUlKuba9cuSLVqlUr138XKyurAmcpX778R//d9fT0pO+//15SKBQf3M60adPyfBwdPXr0vdu4efOmVKpUqfc+p0uXLrm+/urVqyUAUqNGjXJtl+3UqVMSAMnQ0FB68ODBR9sTEakL/aItB4mISFMNGzYMw4cPV/6elJSEly9f4urVqzh8+DAOHTqEZ8+eYcqUKQgKCsLu3bthY2Pz3m1JklRcsYvE23nt7OzQoEED7N69W2WvN2fOHADAkCFDYGZm9sF2V69eRcuWLREXFwdjY2N8/fXXaNOmDcqWLYvXr1/j1q1b2LdvH06ePFngLNHR0QCAatWqoVu3bmjatCkcHByQmpqKo0ePYsmSJXj58iUWLFgAuVyOuXPnfnSb165dy/XxypUrv/f+4cOH48WLF7C3t8fChQtRuXJl7Nq1C4sWLcKOHTuwceNG+Pj4vPO8uLg4TJo0CXp6evDz84NMJvtoxiZNmqBRo0Y4e/YsFi5ciKVLl370OUREakF0BUlEROK83QP3sd6y69evS3Xr1lW2b9q0qZSenl48QVVs4cKF0tatW3P0xGS/z6LugTtw4IBy2+Hh4R9sl5qaKlWvXl0CIFWoUEGKjIz8YNvC/Ds0btxY2rdv3wd7raKioiQbGxsJgKSvry/dvn37ve3e7oEriAcPHiiff/r06RyPDR48WAIgtW7d+r3PHTJkiARAGjZsWL5ec+nSpRIAycLCQoqPjy9QbiKi4sZr4IiIKE9cXFxw6tQp1K1bFwBw6tQp+Pn5CU5VNMaNG4du3bqhfPnyKn+t7Ouz6tWrBycnpw+2W7RoESIjI6Gnp4fNmzfD0dHxg20NDQ0LnOf06dNo27btB3utqlatiqlTpwIAMjMzsXPnzgK/Vm5CQ0MBABUrVkTjxo1zPNa7d+8cbd528eJFrF69GqVLl1b2bObVF198AblcjsTExEJdR0hEVJxYwBERUZ6ZmJjgr7/+Up7sL1q0CBkZGe+0y20WynXr1ikfv3fvHl6/fo3FixfD3d0dVlZWKFmyJDw9PREcHJzjeYmJiViwYAHq1q0LS0tL/F979x4UVfn/Afy9oEiAgtwGq1nQJEcZEESCBrlYCxkylFFI6WAXU2BrgkqgcnKGqcCa1JlIhcEBpiGlUYexGboAw5KTIpdkjBAj1ATChI3LgoLFnu8f/PbMIrvsLnJxf75fM8w8nvM5zz5nD3/w8XnO53FwcEBERAQqKytn5F5nwvDwME6dOgUAiI2N1Rs3OjqKw4cPAwBkMhkCAwNnZXz6rF+/Xmy3tbXNyGf09/cDAJYsWTLhnJub27gYDbVaDblcDrVajezsbCxevNikz3R1dUVISAgA4OjRo1MZNhHRrGMCR0REJvHy8kJERASAsfen6urqptzXwMAAQkND8c4776ChoQEDAwPo7e1FdXU1oqOjsX//fgDAtWvX8PjjjyM9PR2NjY1QqVTo7+9HRUUFIiIiUFxcPC33NtPOnTuHW7duAQCCgoL0xp05cwadnZ0AgJiYGPH48PAw2tra0NnZidHR0ZkdrJaRkRGxbWlpOSOfYW9vDwC4fv36hHOaY5oYjfz8fNTW1iIoKAivvvrqlD5X8xxOnz6NoaGhKfVBRDSbmMAREZHJZDKZ2L6bAho7duxAQ0MDkpOTUV5ejvr6euTn5+PBBx8EMLa0sampCc899xwuX76MjIwMKBQK1NXV4cCBA7C3t4cgCEhKSsKNGzfu+r5mmua7kkgk8Pf31xtXU1Mjtr29vdHa2orY2FgsWrQIy5cvx8MPPwwnJyckJCTM2IyYturqarG9cuVKg/GRkZFwdXWFlZUVXF1dER4ejuzsbPT29uq9ZvXq1QCAq1evor6+fty5Y8eOAQB8fX3FY0qlEu+//75JhUt0eeyxxwAAt2/fxtmzZ6fUBxHRbGIVSiIiMtmaNWvE9u+//z7lfmpra3Hy5Ek8++yz4jF/f38EBATAz88ParUaTzzxBAYGBlBdXT1uKeHatWvh6emJjRs3QqVSobi4GKmpqVMey2w4c+YMAGDZsmUTZpO0NTc3i+2WlhZERUVNmB3q7+/HV199hdLSUpw8eXJcUj2dbt68iQMHDgAAFixYgGeeecbgNeXl5WK7u7sb1dXVqK6uxt69e1FYWKizD6lUitDQUPz000/YtGkT9u7dCw8PD3z77bfIy8sDAGzbtk2Mf++996BUKpGUlDTu99FU2ol0Q0PDjH2PRETThTNwRERkMicnJ7E92ayKIXFxceOSNw0fHx+sW7cOwFgCkJKSovM9sKioKLi7uwO4u5nA2dLR0QFg7N2ryfzzzz9i+6233sLQ0BBSU1PR2tqKkZERtLW1YdeuXZBIJFCpVHjhhRdw7dq1GRlzenq62LdcLhdnR3Xx9vYWt5loaGhATU0NioqKEBkZCWBsw/TY2Fh89913Oq8/ePAgHBwc0NHRgS1btiA4OBjZ2dkQBAExMTHYunUrAKCurg5HjhyZUuGSO2k/i8uXL99VX0REs4EJHBERmczOzk5sq1SqKfcTHx+v95xmSZ2hOB8fHwDm8cd3d3c3ABgstqE92zY8PIzMzEzs27cPy5cvh5WVFZYtW4ZPP/1UTF76+vqQlZU17eMtLi5GTk4OgLGlkx999JHe2JSUFFy4cAGZmZmIjo7GmjVrEBgYiISEBPzwww9iUZbR0VFs374dw8PDE/rw8vJCbW0tNm/eDCcnJ1hZWWHFihXIysrC8ePHIZFIoFarkZycPKFwSWtrK+Lj4+Hi4gJra2v4+Pjg4MGDBvcktLa2xgMPPABA9/t3RET3GiZwRERkMu2kbdGiRVPu59FHH9V7zsHBwaS4u0kkZ4tmZs1QAmdtbS22nZ2dkZ6erjNu165dYoXGb775Zlo3UFcoFHjttdcAAI6Ojjhx4oSY6Oii/bx02blzp9jfX3/9hRMnTuiM8/T0xLFjx9DT04ORkRG0tLQgIyMD8+fPBwDk5uaivr4egYGBYuGSixcvIjAwECUlJbh9+zbc3d3R1NQEuVyON9980+C9ap4Hi5gQkTlgAkdERCbr6ekR246OjlPux8bGRu85CwsLk+JmsyrjVGkSM00lSn0WLlwotsPDw/Xu8zZv3jw8+eSTAMaSw+mahayvr0dMTAxGRkZgZ2eHsrIyo4qXGLJz506xrV0YxVg9PT344IMPJhQuSU5ORm9vL2JjY9Hd3Y1Lly7h9OnTsLGxwZdffmlwea3meWiSRCKiexkTOCIiMtn58+fF9ooVK+ZwJObFxcUFwPh33HTR3lDc0Obi2uc1SzTvxm+//YYNGzZApVJhwYIFKC0tnbZ96FatWiW2NdskmCI9PR29vb3YsWOHWHykvb0dCoUC8+fPR05OjpjsBgcHizN0RUVFevtUq9Xi/nKGZhGJiO4FTOCIiMhk2lUGNcVGyDBNAmeo8IuXl5fYNjSzqH1+3ry7Ky7d1taGiIgIKJVKzJs3DyUlJeIM33SYaql/YGxrhYKCAjg7O+OTTz4Rjzc2NgIAHnnkEXE5qUZwcPC4GF36+/uhVqsBjFXCJCK61zGBIyIikzQ1NaGyshLA2OzP2rVr53hE5sPb2xvAWKKkSRp0CQ0NFduGlkVq7wP30EMPTXlsHR0dkMlk6OrqgoWFBYqKiozaMsAU2tsjTFbN8k5qtRpyuRyCICArK2vcO4Sa2TNd2zJoZtQ0Mbpob4OhnTgTEd2rmMAREZHRbt26hYSEBLFYxrvvvnvXsz73k5CQEADA4OAgLl68qDdu6dKl8PPzAwBUVVXpTUBUKhUqKioAjM1ALVmyZErjunHjBmQyGa5evQoAOHz4MF566aUp9TWZ3NxcsR0WFmb0dYcOHcIvv/yCwMBAsRCKhiZx02zRoK29vR3A5IV26urqxPZ0LRUlIppJTOCIiMgozc3NWLdunfj+W1hYGJKSkuZ4VOZFk8ABY5uYTyYjIwPAWGXElJQUnTFvv/02BgYGAACJiYk6YyQSCSQSCTw8PHSe7+vrw1NPPYVLly4BAPbv34/XX3990rHd6ddff8Uff/wxaUxeXh7y8/MBAG5ubti0aZNRfXd3d2P37t0TCpdoaLab6OzsnFAY5euvvwYA+Pr66u1f8xykUinf5yQis8D/NiUiIgBjszBNTU3iv4eGhtDb24sLFy6gsrIS5eXl4sxbUFAQjh8//v+mal9jY6Pe96SuX7+OwsLCcceef/75cXvhGcvDwwM+Pj7id/rKK6/ojY2Li0NRURHKyspQWFiIrq4uJCUlQSqVor29Hbm5uSgrKwMA+Pn54Y033jB5PCMjI9i4caN471u2bIFMJhv3e3AnW1tbLF26dNyxhoYGbN++HevXr8fTTz8Nb29vODk54b///kNLSwuKi4vx448/AgAsLS2Rl5cHW1tbo8aYlpaGvr4+JCYmioVLtEmlUoSHh0OhUGDz5s34/PPPIZVKUVBQAIVCAQBISEjQ2bcgCKiqqgIAoxNKIqI5JxAR0X2rqqpKAGD0j4uLi/Dxxx8L//7776T9auL37Nkz4VxBQYF4/sqVK3r72LNnjxg3mW3btgkABHd3dyPu2PBnGfMz2bgN+eKLLwQAgp2dnTA0NDRprEqlEiIjIycdS0BAgNDV1aW3D02cru/nypUrJt03ACEsLGxCP9rPdLIfJycnobS01Ojv6ueffxYkEong7OwsKJVKvXHNzc3C4sWLdX5mYmKi3usUCoUYV1dXZ/S4iIjmEmfgiIhoAgsLCyxcuBD29vZwd3eHv78/QkJCEB0drXdPMjLO1q1bkZaWhsHBQZw6dQrx8fF6Y+3s7PD999+jpKQERUVFaGxshFKphIODA3x9ffHiiy8iISEBlpaWs3gHE0VFReHIkSM4e/Yszp8/j7///htKpRKCIMDR0RGrV6/Ghg0b8PLLLxu98fvo6Oi4wiWT7Te4cuVKnDt3Drt370ZFRQUGBwfh6emJxMREyOVyvddpllgGBASwGA8RmQ2JIPzfehgiIiKaFcnJyTh06BBkMtm4LRlo9qhUKkilUvT19eHo0aOTJtJERPcSFjEhIiKaZR9++CFsbW1RUVGBmpqauR7OfSknJwd9fX1YtWoV4uLi5no4RERGYwJHREQ0y9zc3JCamgoAyMzMnOPR3H+Ghoawb98+AMBnn30GCwv+OURE5oPvwBEREc2BtLQ0cQ+9mzdvwsbGZo5HdP/4888/IZfL4ejoiKioqLkeDhGRSfgOHBERERERkZngmgEiIiIiIiIzwQSOiIiIiIjITDCBIyIiIiIiMhNM4IiIiIiIiMwEEzgiIiIiIiIzwQSOiIiIiIjITDCBIyIiIiIiMhNM4IiIiIiIiMwEEzgiIiIiIiIzwQSOiIiIiIjITPwPFeOcE0XI2cUAAAAASUVORK5CYII=", "text/plain": [ "<Figure size 1000x1000 with 1 Axes>" ] @@ -736,7 +736,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 25, "id": "e87031f5", "metadata": {}, "outputs": [ @@ -4232,9 +4232,9 @@ } }, "text/html": [ - "<div> <div id=\"33c9959c-7097-4bf0-b731-9e7719c1f40e\" class=\"plotly-graph-div\" style=\"height:525px; width:100%;\"></div> <script type=\"text/javascript\"> require([\"plotly\"], function(Plotly) { window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById(\"33c9959c-7097-4bf0-b731-9e7719c1f40e\")) { Plotly.newPlot( \"33c9959c-7097-4bf0-b731-9e7719c1f40e\", [{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[-13.485832256179549,-7.189660331361448,-8.220685644381215,-7.701462642766967,-16.37611215549606,-23.99375797890234,-26.467940888996168,-16.28857376527097,-15.841271945940584,-16.407094138131175,-26.280660971436106,-18.53378351829238,-16.400625407528892,-14.959303899657753,-11.03427444875975,-16.889173519434113,-14.377944949905272,-25.852894720513063,-17.20224794162716,-21.282801445451913,-16.78254565038487,-13.899368208850296,-14.680034759215923,-14.496280572153593,-15.529024962946355,-10.941373809405682,-8.407487791598685,-18.95976747480897,-7.074122575028374,-10.595129173402064,-8.463547103212836,-18.78662441569911,-15.81044447267235,-12.113259288430468,-17.36159382192693,-19.37463615462794,-3.8289441534588,-10.312421109445435,-9.626036346156582,-21.934972165157962,-18.59856974463521,-13.22099217502711,-14.320925643547273,-5.923269316442223,-7.061313469262377,-3.944701989289313,-9.660898418114465,-7.30470371173166,-15.045090296895768,-11.151110773281282,-15.85856124287331,-23.449157581782252,-21.934625052416557,4.066203823304633,6.007786962489893,8.740962758176043,-17.61632376519237,-17.356640316302375,-17.607345920326576,-17.464077582763927,-13.729716317213825,-14.321895155355548,-20.200658317368948,-18.301900411654596,-17.094465967375648,-8.889052754642023,-11.732913064889617,-9.580262710676497,-23.522103260833653,-18.56301451932344,-18.1767034128914,-15.852530267493968,-17.195105335603532,-18.717062349017088,-19.801178815721492,-25.689092615441574,-17.037977843590106,-14.861687134223686,-17.99023702861699,-11.688657323453327,-16.37210610842098,-9.077776287571403,-22.4265504030788,-20.85288357145655,-9.60322186256329,-1.4732543991182947,-15.307086382884584,-11.633202819032437,-14.635737944705212,-13.32997701552544,-7.766076540815828,-12.543719419070282,-0.3109307674765721,-13.481402222022238,4.3933382334141,-15.57726217020302,-3.9803706813807342,-0.5768948953929787,-14.42036129924054,-0.69306316377745,-10.239768666708358,-10.40307891080748,-6.426077760318902,-16.59075314499131,-6.322104203358188,-13.220900565034007,-21.86440745984765,-4.795233681818559,-1.5929444362755567,-9.8829889445808,-8.566361446044263,-6.064387387185558,-1.9365678308328782,-3.919382926029635,-2.56990998963255,2.4472241108761716,9.930199407043888,-11.743594278342732,-6.218438031622158,-14.68541324560725,-12.981932255677755]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[0.8561162604169961,-2.4223467420639695,4.5029932245386135,-2.2370775519136727,0.39960131249443775,9.334022350116772,16.90855951675804,2.4874644565033326,10.005882515533411,7.955126680884188,3.7365730262662424,3.302780339323917,12.062114315565086,8.534077957709297,-0.8019748447172431,3.981480948773354,-1.0315849598370486,14.499538694182583,8.521195095144902,-0.5120749757691874,10.611826692358296,0.6850774843587853,-0.5854356717545519,1.5127650910848076,2.043678550170685,-0.16926230608579454,0.3749895917357164,10.298507921039093,-11.24191020263385,-2.6153135686199063,-7.459968983820739,-0.10017948308922842,-0.27109033188296683,-0.3578967802001422,2.4326520025177265,4.659847360227568,0.11839958288924213,-6.076967000606706,-1.8923275149873486,7.814081016708776,6.910821898673213,2.6874801851128787,-8.536393364110056,-8.18968108423394,-7.74281569396989,-12.394825321325818,-11.100238815712412,-10.08506169425673,1.4500272344526721,-4.999781245454626,0.6424833109123402,10.626953350987772,6.188736878922587,-10.72204595410615,-7.611869284410833,-8.360238572334763,7.190605442172762,3.437290091499559,6.570770660127417,3.039442008869589,-1.0746137463166685,-0.9270097714770797,-0.3292868221432526,-2.539206775826631,0.5682251827505725,6.521295774209009,2.3197826097652348,-4.357208226805581,10.120253244709069,2.5610687241447465,4.578926113533243,-0.13766157618600028,-0.007592595710827654,4.37977208956866,7.118393215525675,10.138672417431149,9.3017900491864,4.887210957851792,1.9582667318175417,-6.379161313120736,3.8510544846059958,-2.233689625509276,10.858186514642032,4.9792412689143495,-8.012301138712,-3.780667763437834,-2.7342780445085078,-2.0133173177636294,10.919179184841427,2.5223038853853383,-0.6759468640141024,10.53476989250878,10.416251429675,6.8035162361014,3.4893888900867913,10.450300045179995,6.250940359176617,14.523617385086789,7.778940530895499,5.362127181491685,4.279827371556711,1.855097159539953,7.524172129362892,12.409724741461117,6.002245859177169,7.847263113619391,18.35501581617254,4.031347234233916,5.045961006394364,5.567663363457296,6.892388931944794,8.530895336485546,4.44726607084926,5.498825656354592,2.6620116713417774,8.667077697383373,-1.3600484547916223,6.162427467556856,7.4108598213551655,3.391285555966408,7.852393878329946]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-6.166738079722642,-1.3375025995278627,0.1690607736110435,4.004753160887004,-0.6693655658050464,-2.396844359752764,-5.1791077276404005,-6.427864695263608,-10.021991812554319,-8.290233663131122,-9.085710913511734,-8.95198091543351,-1.4682515479896558,-4.3864152591033605,-6.365761348154896,-3.7807708991708453,-5.1155165392374045,-0.858456737225273,2.943839937010515,-1.4971556284024579,-4.323918082813098,-1.5622622288326173,-0.9090754030528995,-3.5260314444209735,-4.997656807306374,-6.31357055952711,-4.572278805913902,-6.913370000822192,3.187413990300125,0.9831831609475967,-0.7191700131187289,-3.768863796179047,-1.598536425563227,-4.1481902933872306,5.1163197992508795,-1.7088721495472292,2.115285906921837,-4.180469151305479,-3.727996110772291,-1.3063921440608888,1.5347699597820144,1.783905649886031,2.5779406558184754,2.521370921101294,6.46113310688682,4.150877213701727,3.0456570611611316,5.734173290635045,-1.7015310590561121,1.0311940984050956,0.28679524901918363,2.3184969336530483,-4.879212807494051,2.087973734688634,2.0846987679133466,3.7657318446758534,-8.774306231690883,-4.364025301687822,-3.170365214567567,-3.501781645103757,-6.216919510058339,-1.1342769549936127,-5.35070716808923,-3.624849793401306,-3.7813761407087525,-9.880787669972223,-5.784279385325103,1.3319784767344784,-1.9298810748929847,1.236614563595812,-3.2861766595338686,-3.2614777345851116,0.5757218044242872,-2.6522943383917585,-4.334370245576741,-5.64641136878369,-6.68563800046728,-2.7845183962595916,-2.9302203290722693,-0.3397004672350262,-3.5351368679551913,-8.044514103370293,-2.0941927190596807,-5.681455938866785,-3.568401220780091,3.969796057998173,-4.844066926101827,-6.447013972900308,-5.810810536265194,-3.7238350507111595,-8.457034277394065,-14.240541419014527,-8.159432261060843,-8.92197803614776,-3.9302101100294515,-9.452199477659068,-10.863658506972406,0.25280044493809045,-11.542628536155796,-5.6591469459855075,-9.55012778425338,-12.251367843960718,-12.058264649206393,-9.3153162835529,-9.956970094437128,-10.623660695669297,-6.367333898016932,-10.612733210677435,-4.388276526477961,-7.777527695149229,-10.372482789450995,-8.590211450792527,-5.092438515366361,-5.09263568869777,-4.864619225204668,-7.140927505256369,-3.2218509664349386,-9.250014624940304,-11.154802252619024,-10.805218541693339,-6.898529715214668]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[5.286138116265656,0.8347280209337054,5.0661706580164605,-2.285021833699268,0.3051104885237872,8.183285704758461,0.3516683625917474,10.07979300071462,5.917995845182315,4.922325170052606,7.453634721428179,6.817408914934887,8.163875185549127,4.83120090476416,9.1313425049263,4.19275350983208,4.7046052207074975,8.58485641489457,11.229710809956622,3.225643928217888,5.240283630945653,1.2747528640637558,-0.2976263909272745,6.678409779623292,4.537556647719628,4.497697074515387,0.8662652518696528,2.492716553429145,-1.8400264214742976,1.9686369173473768,-5.1095993957849695,0.4029004377913489,2.287882527236009,8.851262224973365,1.7814969769363727,3.587735153026252,6.206068279452139,9.993881284973266,2.981084538995132,1.1598789348565082,2.9341098512948895,5.493454949773056,0.5553835760874943,-2.9840364943662214,-0.057047825802598595,-1.2180174451816153,0.17167831857068716,-1.8230408537381186,3.9923453594770137,2.8069867532971946,2.8008172491562555,-0.4798477298875349,-0.22475153808421522,5.8126932961483355,7.693804470163312,6.453297874052346,5.712753476247896,9.795424916247562,5.781140551236771,5.446935396778057,3.2179977610007255,2.867599634872446,3.549507135516393,2.072286944238104,1.3605567694987475,6.06247224539427,7.2382431174621225,-1.7265723350666065,2.652748280846904,1.0224725428282442,4.533964090703504,0.8244877749583455,0.9789011769168174,0.3570909006451243,3.427536136619022,-0.5640430841020714,4.717182466103427,2.543964240515866,2.2549199445974764,5.326893788951626,5.476461889993747,8.004097467880678,5.892657544374265,10.679634789369109,7.317952084556444,1.352471754286115,0.3144243422494697,9.485652085115552,2.4646061072998444,0.21632304485268639,10.56049573417922,-10.886487541826977,5.005439764819464,-0.5327155744601211,4.648224722041285,-2.7518714494134127,-0.7019797247818225,-3.0729257818795626,1.3428486407065279,5.010731421000025,-2.3246448509885096,-1.044956960146854,1.1913140799195188,2.0515715542585573,-10.995607434014325,-3.099963760399482,-8.062780736633462,1.1161903060978735,4.746564766490579,4.059439918469084,-7.205610229519513,-5.503653041105264,3.7167913504435965,-0.8599048489508551,1.1892013530415255,1.2048131279049885,1.2479755253900862,-3.143390742512556,3.124246778772246,-3.984764033869591,-0.1664265608196405]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-2.4861452783656346,-2.0531701682639363,-1.4889855722760361,-3.579637779792084,-1.2006004485577453,-0.17230052988566147,0.6928928581981122,6.252774295878327,3.5586157254281665,4.733048105900042,11.959023429514971,4.453066948147658,1.2963245190093724,3.3299307419953457,-0.2367767323681113,3.353042215343874,1.9258974452500017,5.108636097488058,6.366955666780836,-0.6724810601511486,3.2640778954558973,4.064655079247586,5.365191261257941,2.093483259425731,-1.5941923539194534,2.3326149203127686,0.8518217563866666,3.6787872973074274,-1.1625330174093405,-5.1276284150942075,-2.502152514952267,-2.6584499903674503,-1.8002474411331995,-2.4996607161129005,-4.011249912691172,2.208938653971141,-5.512050563968571,-5.448160038112565,-1.5876515489052467,1.2302398305795164,1.40204744498021,-2.885691614608348,1.1798256909887757,-5.61307298187088,-5.631844692280314,-4.040147608511132,0.022076980967666637,-2.8175796147630283,1.2951901108712027,3.8806693322859047,3.377709024675047,0.47673529517735225,6.858848335135122,-5.948285726428967,-4.958631565919664,-6.410111452460865,-4.179466740555829,2.2140289606175334,0.3819315932512959,2.0996698836941334,0.0886421272027742,1.8963695751560363,4.2975386860789,2.7334524260288777,2.2898776545536554,-5.960792251021213,0.682705337730508,-2.4371567541064696,0.7063148582618624,-0.9962878865793496,5.142068257080279,-0.8855444008491875,-1.0847824034161273,2.0569438442908385,0.014251187560585299,-1.3421485626541485,0.4004970024977341,0.24269974883992768,-0.9802223194920142,-0.869882222561728,7.9199111870326835,1.020005164730848,2.2247417956625677,4.3692478364362355,-2.478634617248172,-3.0653440111010966,0.41913387349734976,0.6192415945168482,-0.5072867958886458,-1.3158520439601245,3.00900220881661,3.057847347317031,-6.551183446620734,-5.811240732383211,-9.21430589978031,-2.1887683732443866,8.159011579157864,-9.753643186678403,0.39649864097801957,-9.552282113732776,-3.135952814999693,-2.0806631289599355,-0.6185734507064029,-3.846760478648113,-6.335295951176651,-0.2154605032416963,-8.568466080463653,-8.888684156703606,-11.173771903168982,-3.765425279331925,-10.874514257724307,-8.382085601257227,-6.499011206687411,-6.558073964837828,-10.122654196309009,-10.729996375089232,-7.36819326257564,-4.474305444066397,-6.043073797030084,-2.0854285856122443,-4.095454521170898]}],\"hovertemplate\":\"color=B_and_plasma_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"B_and_plasma_cells\",\"marker\":{\"color\":\"#636efa\",\"symbol\":\"circle\"},\"name\":\"B_and_plasma_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[-20.551813674779158,-9.544767717560354,-17.426167228543314,-6.659840602250819,-14.812897509156471,-15.71832283234083,-15.513921875320452,-7.229954668858253,-15.627484571664393,-10.941908764413638,-13.125302128791912,-17.342792819125336,-11.954048318059925,-16.821580652173132,-13.656734568748362,-15.19611424055701,-16.52361567515362,-13.080472796424433,-17.02771995356195,-10.358645578343122,-18.07651127075593,-13.620404671353231,-5.479507157392314,-7.15329280046535,-7.75256627870368,-13.491273727613493,-11.724539241000155,-17.94320817911359,-15.002146052874389,-16.562748070454973,-8.509472491091698,-11.705728109036237,-12.39719481685788,-5.820089369604429,-12.145689995494783,-3.575963530646176,-15.156446744411681,-13.918764588770255,-11.90406293801297,-20.060542184963968,-16.051773091515614,-8.956057061177518,-13.26278632374969,-6.8501846699671205,-15.742375816770583,-13.464057604564836,-15.117005083624862,-6.342143155047294,-1.8448745796989314,-10.308977445361148,-19.40843706705318,-20.40082628312905,-18.460312608719004,-13.91128662473593,-17.854942401248742,-11.485493341544723,-16.221124656938553,-14.313007218307533,-7.817987734328634,-9.952729988234978,-15.428328362136469,-16.485466720198694,-23.253861252993794,-20.316884125012436,-12.800678051019073,-17.35924617913138,-18.044399365204452,-15.963002265313884,-21.152982769689444,-17.33418645725673,-19.536108170179105,-23.87200722905647,-15.538815465715842,-17.85069272926797,-13.44926640880557,-12.05836272055138,-13.351363810994627,-12.046649849295601,-11.72783870566339,-10.210305041110699,-17.71725885620981,-9.645934712384992,-6.015872930176605,-14.09531407775435,-7.949464249084485,-15.081355996563229,-18.035799837804028,-8.93829500786745,-15.948122216121476,-20.247174996975005,-18.29015800331421,-19.56196521190069,-19.842408143765937,-6.416463982432232,-15.190419663427562,-11.368213701834357,-11.236392189916664,-16.275320286812676,-10.533078519211845,-1.9886166842605333,-21.15067693739661,-19.731849609611942,-7.123722510967424,-7.647022947006608,-10.194063626157876,-15.465940408633003,-7.899060600242244,-9.023693725454253,-7.761644879091324,-18.888002626506932,-4.8895475413018845,-5.324025011902133,-17.359837127944783,-14.588223756528267,-15.023211661979055,-17.95185155210282,-9.99885635871156,-14.362142250479685,-18.800789089172465]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-5.055261238686897,-10.368980635584391,-6.778724050212644,-7.5857337484778276,-11.07799021799466,-11.329449150525154,-9.978012888231738,-2.5151845007808658,-0.3753501538801435,5.003542132117237,-13.799085221407266,-4.8745002116135705,-11.229938525003142,-6.577584652521558,-5.8130863564834865,-10.045593518449683,-10.1006352846101,-9.414571126806345,-7.955089324492347,-5.599932882555965,-2.5013881181004587,-4.112556314986133,-16.661828192849192,-13.27741026082176,-12.232955261278704,-10.323155747135651,-10.42352452249941,-8.830849270632584,-11.791121806686679,-12.096308441728546,-13.965294979032352,-10.594189706542116,-12.995529408774853,-10.160919670900068,-10.150635129360225,-10.241552427652959,-7.208206200859776,-9.325632445155062,-8.220776708141248,-0.7633648518051065,-7.732747433893269,-10.929511058969096,-8.773433161666471,-6.346861754887684,-4.724935015348802,-14.57140560064515,-12.748744041897066,-11.602051378759938,-9.585315040386241,-10.982359405851765,-2.437824160971708,-6.398905922841374,-5.9318436977426,-5.4463448667771885,-5.914506538987021,-11.67145117056804,-4.544352500863849,-5.676631762140762,-9.045292540074264,-7.063942788438395,-6.5892450394068725,-9.299437570495018,-4.02214878897362,-6.4438534524130775,-5.700930114957936,-5.0517669722830485,-5.627224202496761,-3.1088565596636952,-1.8477717065173713,-3.0409239258118577,-3.120155302089816,-2.5503058448268137,-8.888276569715876,-4.798353567852818,-6.0455339258278435,-8.491757052136407,-9.502905361642846,-12.492767922287527,-9.683161385687017,-11.744167399101212,-3.3674123621969203,-8.803401307267624,-10.201887019743282,2.3850816662682934,-12.161000780081677,-7.559700727603461,-8.685353246825569,-6.698221460559605,-6.8504473310176035,-7.276182394073164,-5.694226271859727,-7.728902089651756,-4.3597957765293085,-3.5990325584877647,-7.817845143783346,-9.395165672873127,-7.566526600801241,-4.314013604562922,-10.250247635159036,-6.816477386031551,6.054192056994163,-2.3294477086851364,-9.107328743587143,-13.341464055729372,-11.577043837021087,-4.9973418068879925,-6.525491637166612,-13.530621499615505,-9.915012733652222,-9.217720422101294,-6.440926759056701,0.5255499528685453,4.955571151426233,-4.104666818752437,-4.2331823308392575,-1.96456586350332,0.11015738701636857,-6.729707646877416,-2.8105132718006707]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-6.029867360298656,5.439564162475868,5.218587140781718,2.9528570949376745,7.2430592636776705,4.483619578706973,6.751087987202602,6.6610565850338705,2.0227956935863554,-6.812154974916103,7.289490989446859,4.668173513330082,7.2938415895870055,6.571510182480987,6.0266956510876275,4.68416318105697,1.8376642646109276,0.8856249129210868,5.534610928997909,7.019037501445563,1.0495491284342573,5.638574580049682,0.6888221286117612,-0.31047884066585574,7.146062688427118,6.4782970607906165,6.116643105296826,2.801237686532411,2.9709311281799793,5.1303238227540415,5.052055861935029,5.118123345085162,7.9035509368676555,6.1082275714308665,4.599778624685311,3.9355464530897364,3.359908022741788,3.3359715184896856,7.2906003196266935,4.5428820248053015,3.622625682392618,7.162907567886217,6.678865789233296,5.850528284182869,7.885472789079309,3.8959243319099355,5.579382464091244,9.08615954022369,1.375956434275212,6.63551437652526,7.60084188866858,4.673939714606172,4.137066871832047,5.092081537246511,3.3883524320772285,0.5188354217078193,4.692317929848135,6.871655443669539,7.852266358913814,6.044673053619832,6.61466102108456,6.098541663523392,1.7228517986203102,-0.2914670547865844,7.84710464471538,5.6922065306256595,3.5091718357909434,5.017741022869586,2.0923175092532538,8.484036786131385,2.9521826765508634,2.5080254568466747,7.222913692515471,5.269946824231893,3.237877525273843,5.408681306293703,1.5341185491949196,5.393120593160274,6.728960670306568,6.5788954493865734,2.7162577528013094,8.004983026361604,8.996306140462806,-1.9549809102378108,3.7377098674436158,5.017255701731625,0.5838411951193149,4.072519032440907,3.98064148572982,0.6333900606728415,1.3877281359432692,3.327630119450344,3.0633438903790493,7.478557055877751,2.3260950288065545,4.446984982375914,5.390604616956967,1.5041363230266271,5.575725681710725,8.879596962719381,-6.930520132561648,-2.9715621829671903,9.480995985019884,8.863545877923649,6.080022245357816,1.6439877472481361,6.499865551885674,7.138032358120009,5.024595877166973,1.6105405674198063,4.070511501293433,-9.96778403874993,-10.575205981555023,0.6044740899271345,-5.7737565683193735,-7.235392632539506,-7.002947224537884,0.9072239972535494,-0.20089249118849117]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-1.7093380523758512,0.09599032338844204,5.4525605174797915,3.4917400919571255,-3.27967193692899,-0.778883816588992,-2.18132655644735,-5.519504851453632,0.5148447714101473,8.092566183840232,3.6183395426583838,0.15947966519439205,-3.2542382255614006,-1.2319945658128564,0.18271015282272107,0.7020800285556088,-1.0444484636052993,-1.5556075394770044,-3.7087730066804045,-4.990974085963375,6.712285342362796,4.0731442888167,-8.76466915404012,5.697054042488963,-5.37470403124083,-3.749812621546962,-5.370674336793762,-4.0122623530088495,-4.982992230871824,-1.3084458752185528,0.4471082370797457,-8.034550739868703,-2.0702717489960287,3.344517360379916,4.082384101961465,4.98943947927215,-5.494592166407866,-5.012319598338053,-5.560104220881824,-2.630685607205503,-2.253446499779631,-3.8677024742562924,-3.6845888092559824,-9.900016448455855,3.9866926456906775,-2.4936737169649312,-3.383320669111611,0.23638739757582064,-0.6360866785912416,6.921000580709215,-1.1764459451213376,-1.6588783528080642,-2.5314477985661457,-5.976821364328551,-5.407094434158739,3.23988270200831,-6.028407442502323,-6.9641554144972195,-7.846330702684714,-5.83777532103184,-0.5643005162554686,0.6210669775672066,-4.311650991122458,-5.007370501483165,2.5819464682477418,-5.710012694624786,-0.6077608644987076,-5.8555122750265545,-5.142372172107957,-3.0464906060211843,-3.6075058517071397,-1.0265585611734354,-4.139873618257274,-3.821102137342418,-5.779179985723447,-8.526917877083957,-6.355236712536961,-3.143170074814252,-4.2472343948872675,-4.927466969922358,-5.009089096642809,-8.519774825933233,-5.144372997211256,3.519963437609661,0.19996306780004636,2.164257512138912,-2.8217196441605252,2.1466042221301165,-2.6046246413174665,-1.810919869289092,-1.7049183650149162,3.598377839062336,-1.7553447848088215,4.91851359322054,-1.2444149102602498,7.776927283281749,6.561577258454677,-0.2698131877336707,6.245598300398573,-0.8987280202891114,4.524167802632226,-0.04846604972268431,2.56636764286795,-2.8176428796284405,-4.887428679961217,1.48683844977895,2.228513988598216,1.3402396014247147,0.6811161696745242,-1.2706419858312015,1.1264516569030212,-4.531245080628688,-9.816931562379338,-5.3475465694644,-6.546432350849514,-9.177440725002867,-1.1243059003127407,-10.027957065904955,-4.9745635982659175]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[6.710596447909446,0.5970568885992424,-0.17321167034210005,-3.737234274937504,1.3665803820333011,2.138403874146206,-0.32843966205434316,-0.9496335913790325,-5.120821360087039,-1.1777141168171654,0.5689542361739959,-0.9107120767563571,-0.6716684717890861,-1.017481107711801,0.43793598254874655,-1.5478672443043189,-0.8938112930887954,-0.32332242978060277,0.7605924302361116,4.949126674979402,3.1825796414977305,-1.2832932527513843,1.276801079316142,-2.912457984466773,-0.021295876017900853,2.4746684722557157,1.5514373406586155,3.4522416905971864,2.646895882000664,3.03447349448986,-1.767333293732361,3.236206820142487,1.3629677911821019,-6.081614546151698,0.7284404107813196,-3.2249919171183636,2.038638786769358,1.4071824723036264,-1.6483223353182832,0.6625019505720148,6.107478150464548,0.7887784279990898,1.085897519756674,4.757491788835262,-1.9694608098434874,1.6653358609178848,3.011194722966839,-1.786694326540333,-4.765438558704964,-2.311437813372846,1.5827744932206222,3.4987515173531314,2.0563410739485684,-0.04458894720481742,2.044185262061603,-1.8305684312500563,1.0540111743698313,0.9036495967492537,-3.037123117408295,1.3789182697771079,-1.834406661291009,-1.5735058744464543,7.757958650163909,4.338933305272689,0.8845463225607437,1.8728962441322679,1.271977891647476,0.7995882796080171,4.431874391871772,0.9724814211092737,1.838990632005772,2.602293586665961,3.575434709619539,3.691917339465678,0.2877133844930091,3.7582946798943517,4.655843462032161,2.3531416172503725,-0.8033799351114075,1.0696207082887346,3.3240750506296743,4.432382413754311,-0.720070186803979,2.120017429541488,-1.8883708221766122,-2.4754154629394294,1.1993168716076177,3.3805749568332755,-1.9100579220477953,3.591917336749251,0.6374903616527421,0.16092934461448793,1.3419269966214018,-3.907287685656086,2.0991566124866705,-3.061399630240942,-1.8356615732969073,2.2651936227793534,0.4448793515524565,1.3666633064016798,0.10278287159547084,0.8445665613030517,-5.497837120867603,-2.0217519741826315,0.39640577363088614,0.7027991800267578,0.770203992341153,-1.6005384797703837,1.8504159565114264,3.724475999502161,-5.760816103515116,-6.528370892641591,-1.3593589988975803,0.23471549259364966,-2.315592751794523,2.2194814779561773,-1.329192444367268,-2.778669399268536,-0.9163822926621548]}],\"hovertemplate\":\"color=Dendritic_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Dendritic_cells\",\"marker\":{\"color\":\"#EF553B\",\"symbol\":\"circle\"},\"name\":\"Dendritic_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[11.355871395701506,7.4439480672754526,8.708513388324064,8.350215278868264,2.705936560846243,8.505672477304344,8.522859525927178,10.815153943925022,7.9992473272462234,5.629166362743898,8.76327830318692,12.594794986813461,14.016318086671792,9.322211049709292,11.347088609517776,7.547063033669998,9.31845028815087,5.72681885473889,10.806858750232383,6.713626576813113,17.331134761646155,18.346148776222076,14.367874050306014,11.535994128004983,6.953794389531515,3.7824036230183253,5.848059115660348,15.358148799559313,10.65856120355443,9.395063115184634,3.1972512775491184]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-1.0540899754164377,-1.8370082845424471,-2.623645734051482,-1.640435943026054,-4.063608615186265,-1.0748025323877857,-2.5762470778643545,-6.097264568769251,-4.68922583995875,-10.049807861055937,-12.767133486374707,-12.237532025844523,-7.665765471723176,-8.855522117874159,-14.015396076522434,-10.72745150927833,-1.992662337744221,-9.718483666543182,-11.160117669973328,-6.122126063383672,-4.432891859828648,-2.932510154483833,-10.46767606002215,-10.408672036460569,-11.131713791916248,-8.249652796593152,-2.7666549083527125,-2.695066604087557,-1.1473983350808332,-3.470092323931269,-4.475587599303206]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-5.164618487882344,-8.219003851480348,-8.543015704360213,-11.963586437523848,-5.839939305354767,-7.967374483514306,-3.6078579055929003,-7.381020796857026,-9.392045795029892,-7.122090061637016,-7.282355583332403,-1.4843598330794152,-1.1456350601405003,-4.826288788287725,-4.802781109126219,-3.450535856563665,-10.14945671562835,-7.297745139557369,-8.525134733389454,-4.9058519098477635,-5.337376296819576,-4.68932478978197,-4.058682335292847,-6.975037099680291,-1.206402660583641,-6.441391662583868,-5.124332755235907,-0.049613546560855704,-0.5228230710786075,-7.887022060337104,-8.004150205804073]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-4.485032236648996,-6.225378778551022,-3.2409144777186487,-7.892309551785376,-3.317715113797096,-3.814701651670823,-2.4222893188228904,-4.356955675618801,-7.1691361466398575,-1.8829943249950278,-4.51386052679571,-0.33971884772250543,2.2788233549238455,-0.7554829372879082,-1.3816046665007995,1.0569888389698125,-3.1795807687483366,-4.856976300837475,0.3301632766814916,-3.9739602085192294,2.4141116277624683,0.8510092179725719,-5.450737066307868,-0.06407592945660845,4.5942745088601065,-3.833563296912573,-5.111413089730834,-3.3069479404535453,-5.549106907950067,-2.4865265362886104,-6.955905380758418]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-5.805666805390428,-3.669993435894027,-0.6179830750460518,-0.033415584465015025,-4.758646877594328,-6.05636469889763,-6.345679991162099,-6.070077436827072,-5.374651090529811,-11.774957802485211,-7.634600026349936,-6.681501981110694,-5.714989412289697,-8.453862190083159,-11.072487742149056,-8.57880374702841,-6.7985433163349,-1.8894501131425954,-7.606977079950319,-6.433377478278931,-1.6961695866864193,-4.549586066492648,-0.8054314574407144,-8.194643383400772,-10.516880598115005,-8.349230984417169,-3.2757877815969065,-7.886718968013579,-4.813515149635204,-5.389902226821214,-4.696075008646343]}],\"hovertemplate\":\"color=Endothelial<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Endothelial\",\"marker\":{\"color\":\"#00cc96\",\"symbol\":\"circle\"},\"name\":\"Endothelial\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[4.653020404777198,11.497638045685717,14.734464507121555,-0.4100350956587756,6.020223414453456,12.086474482120337,9.68776915429098,7.988237063952346,7.423842862993961,15.299580489892488,5.776486613600851,5.095099279175322,9.368739330738464,12.302446259416342,9.571900397503288,11.902708521514448,5.558090050355235,12.650193817746825,9.475795228276677,5.39048030730744,16.60154411599718,5.874700956308997,6.625689777969847,-2.4007368569614496,5.602183502528469,1.6986814444153402,1.5500781789868998,12.75527872288334,8.3750609355079,11.786278512089233,12.598444576337748,11.28594654727041,11.403323968381798,10.511142320703337,13.51233295692186,10.152689324628355,10.146465622009016,12.66383950851982,10.71222772113969,7.780198593481105,8.965220314263698,10.249777923242657,20.3862141485515,6.882838127552043,8.298124822460537,11.149470575823278,9.977986076492016,10.590818949591448,9.679307302537797,2.77107205633496,7.726243537169561,7.798745475199722,15.6617433882072,3.93236499376133,1.4796104887589467,19.028181407952385,8.68899200219636,-5.308343319951799,8.159205441855166,0.4155377668702691,8.8696745788795,2.115602043056375,6.461887539272793,7.392815499628561,-4.195662694325296,9.980036166136943,-4.066605373082742,-1.6732077251770672,11.17204387805959,4.0048796927619055,12.733197762631926,5.33759172890709,9.994451125427192,10.653074512622307,9.337258268725925,10.018273839880246,6.646679900231053,17.795833367527937,21.098756161381928,20.72087319738526,3.6346095542286196,6.017833592302608,17.27944460676524,15.175397616221328,12.07011927356902,19.769990065324762,20.386016900088755,10.986147437480948,19.614865763074178,13.37109051756029,10.614925571467698,10.757842543005328,8.939028482721715,6.006995932376667,14.020627559432427,15.564397999755258,14.445484207172456,11.720467975760055,10.795745719154223,9.10379460909672,14.534121713037843,9.472926296971544,16.487317659461908,9.96310935628738,10.873641166417858,10.771584358891129,9.184378684016838]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[18.198668582154227,11.375837962495535,0.5730709023076921,2.675965131818641,18.05395182790304,7.466213825402848,17.39637514165182,20.545073228208906,5.051929705465863,6.973541824047987,13.312291739837468,9.65967249582384,12.087155366505808,11.861847693931853,22.583723326864,12.499269378003273,26.629833339562445,13.665220117272476,17.18280232574112,26.06476843898563,3.1555335703785845,22.520808842539203,16.476654016562662,27.82569652079505,18.98964850679095,2.3803054440670244,19.754483686401464,19.84314516612576,18.51610716885224,17.976626258130313,17.91640593689983,16.761438750196223,15.906838506624325,12.88894432994218,13.448028593304151,11.740249558560741,22.610825019874433,23.20189873488974,20.492942571669744,19.02832862853667,-2.1086215180843073,4.750852682443172,3.6700401596787455,11.110749610024403,6.403189784407338,12.454529376845901,10.590111041419728,7.5065545127953595,5.625563784989279,18.909874042033398,7.601183518763838,11.641258241251457,-0.11034432784528359,23.991905418427557,19.764164077877986,8.70975171464663,14.907856142234433,19.03065233107317,19.821017341581186,19.64065096914544,20.364293037161605,12.221642564985915,18.189101261284264,18.088319689102377,11.517205031881264,4.372127070659634,13.852018041850197,26.32266567390182,14.59582109819905,21.18436263060186,9.459578503133407,25.56724811035063,18.413630042237212,17.932220631190052,19.342568970964734,13.868970992022913,3.1899453481553763,-2.3375674347325495,1.9225738033649376,7.887558970921839,6.0456527905374315,12.63363221536321,9.09598103946859,11.663111017839395,1.752467425953813,4.568942912128703,2.412979307707007,14.687561655114383,1.8154554082114167,15.641509286678952,17.541066601841713,18.81612660042054,-0.9850729882676131,16.520636419732377,7.2921753731701,17.390361714671137,-1.5392105475118194,12.473848787448379,15.93681475897803,18.68081285558382,5.271033113121639,10.38627348322289,2.6042091404432357,12.788638090448707,11.470839023252086,5.947887370300492,8.357608899501574]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[1.2647536686604064,-5.78228239212509,-3.230906682410448,-6.058868204310862,4.544879710226466,3.0090419035259903,4.657505294007505,5.219417172778742,-4.119521680773903,1.2135028714225324,5.643823692168298,2.0501231822454318,3.976322495536063,0.3066188989968879,7.271823328513857,10.609128906324004,13.998742313760497,5.444844398864821,12.73508871788016,13.241710904406567,2.0194746430339716,9.550889440600788,4.716526297436232,-1.6407306642251702,-1.28495818327203,-3.475219520485723,7.335603991898292,6.791708498264584,2.2370147254097335,2.0140326651082194,6.68460349771199,4.094631548904366,0.6601100367010121,4.060982921201706,4.3832453038434105,0.9019613920431764,9.390061550831227,5.439260220558106,6.493995027440873,3.936981703725838,1.5957890129779677,-2.7598343402321075,1.3489750107363454,-0.45166759763157394,2.131950071492346,1.1573429212620525,-5.261441242673401,-1.1345352846460721,-0.2939081983157565,-0.16036755167618355,-2.7850983760505414,5.0438838149325385,-2.3873845432915166,-2.789672405071806,2.0745630023603656,-3.685785623662178,5.815822840118283,6.923618518703872,6.3933671076827565,-2.9028574951381576,6.418557122445765,0.9850039105168441,0.6867852096365749,-2.522268262992212,-2.8225626449704855,3.9488258086361236,-3.4799791703737486,2.821295319787046,3.503225239805061,2.1593087181761463,9.695145183695999,4.643147957830302,6.074020591862153,5.123018275588663,2.6378178376901653,8.732762148169535,10.53838977343666,0.9399189630723435,6.232110605547339,7.879890263822123,3.4464756499611404,12.904689837267004,5.753871415151137,7.31686595723545,4.258227015810028,1.4188965837857812,5.899415613014558,7.205863149140638,6.809936183529887,7.197723516284601,7.7613863911666865,6.808544043470329,-0.91571873018143,5.441226964669084,3.8627207159230643,9.980136310762003,8.0117313032478,6.696079199678026,14.831563656230509,11.509794944139802,5.871976482945945,-1.5479327710527362,1.821061881803225,-2.986748179814068,1.121385503898183,0.7380309621163785,2.545433294089514]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-2.687224788882384,-0.2172673173479358,-3.6346036438420724,-10.996972419212334,-0.6919667531772031,-7.333785690961582,-0.18402428519244865,4.059836922695362,-3.947121513565719,2.56132502412716,9.738688080868368,-8.248154149825087,-4.393778797664732,-5.290472640818853,-0.37947931573741966,3.715193761417551,1.925728358368774,7.0938517398967615,0.5609491250531855,0.8199078830446322,-3.386589401650241,3.238009274051422,-0.8101052137560074,-2.8539506825753738,1.284816194034648,-10.30721378903107,-9.32002120102791,-0.8920728866462637,0.27762082505984564,6.353774931717964,1.7167052578819506,-3.734934994917047,-0.34859984792311716,-1.7611986009733236,2.7738000091941015,1.5389058898605126,0.8181337918576468,-1.3472422599433962,-1.2701123444895626,-2.944228924331707,0.22797710914917987,0.21647277610979088,-0.10227542728934136,7.4019553454164,-1.7553038182496463,-1.5410163216051105,-4.934065059856057,-2.377577711655995,-3.9615563956080053,4.540385574499538,-10.194285067612022,4.34594807163616,-3.8844557833295443,6.400636810758815,-4.17496652570588,-0.8101061682719759,-3.751849480532597,0.5786890630491264,3.7598679172342298,-0.8907165692701782,0.30416404556947474,2.5420968074376527,4.347114185268985,4.731837592975404,-2.378902454981188,-7.045267299069556,-0.08244079282422523,2.622290674536467,-4.17576609608533,0.17356518998910442,-3.4973396381797093,-5.94197493393394,1.64204124486284,-4.741611077085919,-6.038481346520465,-0.33051272402285714,-6.8262437647164935,7.844579224539117,-1.2448389626598977,2.8559604698144825,-1.9969246508343872,1.0422976337374483,-3.3458122725403356,2.8752899102393292,0.08566589314782513,-3.7397443594602198,0.26512654768613464,0.17708770794098183,1.1641473037840697,0.9864044327090562,-1.9101429519763045,-2.426621303290919,1.134471563356599,-2.0222641970309154,-1.6687736514619644,-1.4975710053851272,1.316018667201282,-2.2082054058607707,1.0790451357721489,-3.641868583920856,0.7510638362646226,-10.736586713647451,-2.689153621935637,-6.971258938241375,-9.24945368686592,-5.936804965799169,-1.2199693134571044]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-3.53247467517671,5.768223536216525,8.125300580187899,6.628823986412913,-6.257484671220999,3.262463113759699,-1.707841660587095,-5.016325920270033,11.533660198985675,0.3300333775747575,-4.650210694822967,3.8696920161672796,8.57457901798913,14.440166818085535,1.5665858917247357,-4.8202440698491165,-1.2271783532642901,-2.33938987576651,-0.2742238881723097,2.6548571664584464,4.16467215490295,-0.4080871350317838,-5.070617110221438,2.16264825242824,2.5351942439411035,2.0040153429394723,1.3282935323819118,-5.1433580079239265,-4.297627400218971,4.689185524475739,-7.413976404898676,-7.988201298996204,-1.9396630078854489,4.283981271662856,-4.830483858094691,-1.3600765148464327,-2.5372273416752433,-3.863188002546731,-6.3327091664374215,-4.841944596029363,-5.723214349993604,-1.0151775382216854,-1.156417566612599,2.8141626070169568,4.72686504323849,1.761689242302201,2.4299615592048696,-0.08356726795610661,5.05518840116812,6.2525507834590925,10.394449892254855,2.374220273935354,0.6537208558057839,5.930424225009034,-1.1792448077262805,1.6761866558250202,0.24743320461828974,3.2910534135165155,-4.2911544082739645,7.015753346628121,-1.850413334505277,9.852042261597411,8.677619309489252,9.95638558425188,7.919403240869292,4.854218475809332,3.2825311184550996,1.1658405763017368,7.4154413137333215,2.2031711083495855,-1.887162455095654,10.13266694167675,-0.7788804712880255,11.314464483589433,7.722434421416114,-4.086655520636034,-7.849750601126885,-10.691719274036075,-4.257430153415374,-6.175094858414248,-1.9209618692668409,-7.554296530637513,1.1513194307196148,0.695036931624119,-4.953646672521905,-5.0326319565392925,-4.826200065701004,-8.343598811890974,-5.257904715866221,-1.9822085861394698,-2.782561944136109,-0.9965504030262511,-6.6542329451740425,-4.107550527615165,-2.2548753118898412,1.3536254232870983,-4.494874860552068,2.3394608210011985,-6.323157970170557,-3.934558840706071,-5.423640814409803,1.3990474148183258,-2.7564737073626606,5.96597948976063,0.5592597589393596,-1.5619367612026733,1.1141846457402496]}],\"hovertemplate\":\"color=Cancer_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Cancer_cells\",\"marker\":{\"color\":\"#ab63fa\",\"symbol\":\"circle\"},\"name\":\"Cancer_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[23.75110565724524,14.322028315840143,24.19870228150203,16.66877542544105,19.326898410904015,16.028888207296607,23.10289940356126,23.367599731697,19.391412860148883,25.84302273251005,25.982968934845996,18.33542013861662,28.076275982535215,26.324208003380782,25.0654436941784,17.743013142997324,22.231937878191737,16.386639030872637,28.27541862511292,14.988851208839119,17.980055203984985,16.506508380303174,16.272063679102217,25.826521108335246,24.084289268329712,25.61615596573362,16.21721681797106,18.696563599897427,15.954870854755669,19.058377741005675,21.51299437250937,26.427841287625185,25.278407937036143,16.459775325495837,18.43807914928853,20.87829294431268,16.962115290165002,21.2759871665478,28.42965002457402,11.321749016258952,17.834987985817776,26.174005288920696,25.741084060126823,17.032604984044674,13.03581904258421,21.25575911249913,19.446043365592956,13.341856485901886,19.561235197600432,19.54577981248739,15.406482271024414,11.145819650953666,16.970356954674017,21.638218215947532,21.960050105516228,25.440344658559546,21.517619790117664,13.713833352596552,19.540758434487845,14.682548689289408,21.196931347077612,19.88356657325114,14.201828314872856,14.789181562334445,19.031030546790102,21.896680263795908,23.752003145258605,20.124675255264336,16.847165894383668,18.84424052948902,18.39792388186547,15.033723243458939,18.15846135153694,11.140573625038563,13.969244768639303,-1.8018137105633167,18.15694789748288,20.958090290263954,20.89342607737699,26.078245953047787,28.149079883037526,21.625011307886425,20.757725829902817,21.182010489173813,20.80917750541409,24.089827596106293,25.741302074546184,21.08470946759239,23.762997638449818,26.303434490536297,23.531367191067595]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-8.804772347189626,0.12611296732409769,-8.83651859018781,-8.859666764530232,-4.407311604031676,6.015268410493691,-3.215844558668181,-4.058224095570281,-6.916112397888014,-9.603943699125992,-5.139938286603443,3.9295833826921918,-11.653060341961083,-8.271516891902703,-7.5749507184726825,-1.9287305557212218,-4.286591947137925,-6.9182110100945575,-5.904990022461607,-7.000450082973784,-7.57128653627603,-0.805384228946353,-0.7093593360360106,-8.360145300476427,-12.4139504089989,-12.42567014908429,-15.071127400745695,-12.152836910293534,-12.924875000212058,-16.023820597569543,-11.245422772478207,-13.696286178030746,-13.077634442379857,-16.45018373818308,-9.354164873364066,-9.728081973678892,-6.819918596368263,-7.428964410576606,-15.244957291957506,-14.713557305760009,-5.427452878492797,-11.82584571329056,-11.778747319841072,-4.495621748873798,-8.144088672894167,-12.23409558469896,-12.491559155030563,-13.666585622774772,-10.02336388801459,-6.351906537940556,-7.62440187134324,-4.938223864716761,-20.618208309782666,-3.15682351024416,-10.316157852268248,-6.7474260120542855,-3.749894738237555,-8.671621758231016,-4.545622315079659,-13.008796290451112,-3.9831926345165884,-11.514058022019768,-12.708162378919672,-1.6432034092490444,-4.170175076447997,-8.136871561222655,-9.029114060300827,-8.414786655644392,-12.583864471143496,-3.003476287500069,-4.857517804353691,-1.9307716177732945,-5.1715174073781425,3.51147362522518,5.012552330278014,1.391383146101818,-6.429953990941283,-3.357563907102994,-1.3795784036420544,-3.7095590979437496,-7.392869497583854,-8.5766334613657,-3.7807858630531994,-5.896582131989757,-9.586271368807832,-0.5355333404447371,-6.817398355143901,6.537440085218526,-12.258602753181101,-9.910580127364446,-8.076878315539172]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-4.502578975533359,-7.566983749339649,1.9344802953870932,2.546639032553388,-5.466201607394865,-6.188046065078826,-9.76176876625977,-3.292314743903346,-7.096997243640784,-0.6685496822797827,-3.4264561460270877,-7.099213877881745,6.007792480983992,-1.95570817791728,-2.4573365008070143,-11.010191822279577,-0.8034928460976594,2.3574543368963323,-2.7647463157782286,-7.188416040499128,0.9378484385826871,-0.5387943699313409,-6.394289496917189,6.453347160869805,4.3644221250057225,8.81460090067186,1.5696007656704631,3.629254098575266,-3.609392292417441,4.7048972504781785,-3.3874249219127073,3.268832474117239,3.2634893117411266,3.2045192364315365,2.5254452762591253,-1.1964290456483078,-2.7447796929074535,-5.715557174833307,4.186327285112744,-5.324638142540309,-4.299794779524179,2.8283276588701973,3.3865029274798313,-8.806741994048704,-1.6575576690955773,4.647317724886948,1.9007074798123071,-6.785664831996828,-6.265735519774147,-7.175811449333064,-8.455624926950659,-3.0619273928571955,-3.924432605197506,2.110875514825002,-5.570326540980256,-3.917476922035218,-8.151271079562687,0.10275835249026849,-6.780366867452157,-2.2646738372170527,-2.3973369595907936,0.5343028294558446,-2.4411922437679565,-6.831622542426625,-4.919933447782691,-0.9359179558450758,0.5750298747435657,-3.0766408963021092,0.45836891724257833,-1.8047781326967232,-5.902922300568403,-9.020948327173958,1.7154304054484903,-6.722768086634483,-4.724879151612201,6.863253831736474,2.6833013457146757,-3.266251176610305,-6.476973873958226,3.9675662220882044,8.694177866627573,2.8089850440748263,-12.46929321532211,-9.617901395655728,3.8759700857590813,-3.787422052459241,5.463099188313642,-2.3077784257994782,-6.5008468550259675,-6.618039168476477,-6.070477017483836]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[2.4189833844941537,-7.933867586564627,4.67838486816265,2.7516043382004756,-2.261753290851034,-8.690637485020646,-3.0741681858777925,-0.5603980661805058,-6.462589591873806,4.979912461935934,3.700311349105574,-6.944380275476744,4.113034491465319,4.228202799347036,8.32136374258012,-5.8742605283990175,3.8574406966133967,1.2433368507278095,4.324334321428512,-8.334186678121654,4.284439395451718,0.10953282854102586,-6.1123565704750185,6.8844635972949915,7.088033418739075,6.7158124053077275,2.8199369954834475,1.0958161128618418,-3.074291730493877,4.399828306824283,-0.8333912506713761,1.2324186659011054,5.803548773041896,2.657108087368794,0.974406548248435,-1.5457184570451503,0.591531414188112,-2.26525274594123,6.178492002039036,-3.0058383687782317,4.842475252621064,5.069254294119186,6.21061508951048,-4.170668983661663,-0.2902126449459262,5.679441136023085,2.60536462266209,-0.7988919278485883,3.240029160006948,-2.9239828429088486,-0.06379373519361353,7.005378954955237,1.5954099922102505,7.916075318265888,-2.8749666709766935,5.554835341655426,-2.8750703540931104,-0.9481942926530862,11.251528924623216,10.79045877970119,6.180054940260417,4.943262381654614,1.9830658662605611,-2.7473391179290374,5.351855181674817,1.9493599410491926,6.149093201739125,3.1283351289472883,4.531110722512329,-6.32602442451808,-1.5806549333038782,-7.106698764225739,5.698742046090197,3.203756942460685,-7.924711925119826,-6.823645435268168,5.631788606247902,-5.0613911560633085,-4.818541810766857,2.8799526340713077,4.815760831273391,4.198050270868248,-2.0925300062924075,-7.852321908330172,8.435865892604312,-2.6309443301918836,6.629144270919462,-2.1794067075608137,-1.8418526875094712,-2.700689771083093,-3.1516631414189487]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[2.137809967080481,11.62483895454584,4.965888208437154,2.742667886001337,6.253834558040233,4.916700850253399,7.366606966011671,6.872825854309317,3.67339420577236,6.526089258354867,9.15922711385399,-0.6871959057888792,4.34375823443145,8.265640964118264,4.507889875995791,0.13378933249869585,11.33064947210335,3.8580951789940428,-0.5662694067072089,-4.952874013488297,0.44448596151165254,6.512656395037128,-2.221343381530472,4.578584910120751,0.9240578604835219,4.39655256878218,-2.2023322053734296,-2.187097999641927,5.7129124910238245,-3.8966378916219653,-0.9891805236851976,1.820802103468625,1.2387801015239364,-1.9654263929679865,-1.5126937453180092,0.3819485793712203,3.8673072877969803,3.053441920384924,4.751040790872938,-1.102847897706489,11.440861438631625,2.928968830595677,3.3343520978704966,-0.9888268191726959,0.7021031397388202,1.838745120663285,-0.9135842055662288,3.516140601178121,-1.3078471301088344,-5.804192352039123,2.3792615303448823,-3.480411411738516,3.691329855807127,1.6725173592468408,2.2880850302493365,6.730039064175665,-3.3310190810372813,5.220016521488272,5.557372773022285,4.301536992594885,-0.26010280217168624,6.618396381778686,-2.2677844397120968,-0.9658365713855617,1.1256642903272112,6.193302153633052,6.6541433741188385,-2.107761963852562,-2.128446527438801,-1.5132472569181568,-2.6393991364499225,12.237758350527244,5.255338760194468,3.293473043675267,6.108782294014787,0.6514228587086544,4.949697622500486,-1.8916800087289636,-4.802029505741472,-0.0751077900754249,6.1337929433956715,4.153868124900406,7.423162050626999,3.350432421364726,1.468796554823334,-0.32628354228787604,5.32448595300229,-2.7457036493226443,4.010223498855424,4.353346217848459,4.372543825493868]}],\"hovertemplate\":\"color=Fibroblast<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Fibroblast\",\"marker\":{\"color\":\"#FFA15A\",\"symbol\":\"circle\"},\"name\":\"Fibroblast\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}}], {\"template\":{\"data\":{\"histogram2dcontour\":[{\"type\":\"histogram2dcontour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"choropleth\":[{\"type\":\"choropleth\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"histogram2d\":[{\"type\":\"histogram2d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmap\":[{\"type\":\"heatmap\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmapgl\":[{\"type\":\"heatmapgl\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"contourcarpet\":[{\"type\":\"contourcarpet\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"contour\":[{\"type\":\"contour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"surface\":[{\"type\":\"surface\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"mesh3d\":[{\"type\":\"mesh3d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"scatter\":[{\"fillpattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2},\"type\":\"scatter\"}],\"parcoords\":[{\"type\":\"parcoords\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolargl\":[{\"type\":\"scatterpolargl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"bar\":[{\"error_x\":{\"color\":\"#2a3f5f\"},\"error_y\":{\"color\":\"#2a3f5f\"},\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"bar\"}],\"scattergeo\":[{\"type\":\"scattergeo\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolar\":[{\"type\":\"scatterpolar\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"histogram\":[{\"marker\":{\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"histogram\"}],\"scattergl\":[{\"type\":\"scattergl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatter3d\":[{\"type\":\"scatter3d\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattermapbox\":[{\"type\":\"scattermapbox\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterternary\":[{\"type\":\"scatterternary\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattercarpet\":[{\"type\":\"scattercarpet\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"carpet\":[{\"aaxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"baxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"type\":\"carpet\"}],\"table\":[{\"cells\":{\"fill\":{\"color\":\"#EBF0F8\"},\"line\":{\"color\":\"white\"}},\"header\":{\"fill\":{\"color\":\"#C8D4E3\"},\"line\":{\"color\":\"white\"}},\"type\":\"table\"}],\"barpolar\":[{\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"barpolar\"}],\"pie\":[{\"automargin\":true,\"type\":\"pie\"}]},\"layout\":{\"autotypenumbers\":\"strict\",\"colorway\":[\"#636efa\",\"#EF553B\",\"#00cc96\",\"#ab63fa\",\"#FFA15A\",\"#19d3f3\",\"#FF6692\",\"#B6E880\",\"#FF97FF\",\"#FECB52\"],\"font\":{\"color\":\"#2a3f5f\"},\"hovermode\":\"closest\",\"hoverlabel\":{\"align\":\"left\"},\"paper_bgcolor\":\"white\",\"plot_bgcolor\":\"#E5ECF6\",\"polar\":{\"bgcolor\":\"#E5ECF6\",\"angularaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"radialaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"ternary\":{\"bgcolor\":\"#E5ECF6\",\"aaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"baxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"caxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"coloraxis\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"colorscale\":{\"sequential\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"sequentialminus\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"diverging\":[[0,\"#8e0152\"],[0.1,\"#c51b7d\"],[0.2,\"#de77ae\"],[0.3,\"#f1b6da\"],[0.4,\"#fde0ef\"],[0.5,\"#f7f7f7\"],[0.6,\"#e6f5d0\"],[0.7,\"#b8e186\"],[0.8,\"#7fbc41\"],[0.9,\"#4d9221\"],[1,\"#276419\"]]},\"xaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"yaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"scene\":{\"xaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"yaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"zaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2}},\"shapedefaults\":{\"line\":{\"color\":\"#2a3f5f\"}},\"annotationdefaults\":{\"arrowcolor\":\"#2a3f5f\",\"arrowhead\":0,\"arrowwidth\":1},\"geo\":{\"bgcolor\":\"white\",\"landcolor\":\"#E5ECF6\",\"subunitcolor\":\"white\",\"showland\":true,\"showlakes\":true,\"lakecolor\":\"white\"},\"title\":{\"x\":0.05},\"mapbox\":{\"style\":\"light\"}}},\"legend\":{\"title\":{\"text\":\"color\"},\"tracegroupgap\":0},\"margin\":{\"t\":60},\"dragmode\":\"select\"}, {\"responsive\": true} ).then(function(){\n", + "<div> <div id=\"3fd5eec1-e3a1-4dfb-84aa-ef2789f90938\" class=\"plotly-graph-div\" style=\"height:525px; width:100%;\"></div> <script type=\"text/javascript\"> require([\"plotly\"], function(Plotly) { window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById(\"3fd5eec1-e3a1-4dfb-84aa-ef2789f90938\")) { Plotly.newPlot( \"3fd5eec1-e3a1-4dfb-84aa-ef2789f90938\", [{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[-13.485832256179549,-7.189660331361448,-8.220685644381215,-7.701462642766967,-16.37611215549606,-23.99375797890234,-26.467940888996168,-16.28857376527097,-15.841271945940584,-16.407094138131175,-26.280660971436106,-18.53378351829238,-16.400625407528892,-14.959303899657753,-11.03427444875975,-16.889173519434113,-14.377944949905272,-25.852894720513063,-17.20224794162716,-21.282801445451913,-16.78254565038487,-13.899368208850296,-14.680034759215923,-14.496280572153593,-15.529024962946355,-10.941373809405682,-8.407487791598685,-18.95976747480897,-7.074122575028374,-10.595129173402064,-8.463547103212836,-18.78662441569911,-15.81044447267235,-12.113259288430468,-17.36159382192693,-19.37463615462794,-3.8289441534588,-10.312421109445435,-9.626036346156582,-21.934972165157962,-18.59856974463521,-13.22099217502711,-14.320925643547273,-5.923269316442223,-7.061313469262377,-3.944701989289313,-9.660898418114465,-7.30470371173166,-15.045090296895768,-11.151110773281282,-15.85856124287331,-23.449157581782252,-21.934625052416557,4.066203823304633,6.007786962489893,8.740962758176043,-17.61632376519237,-17.356640316302375,-17.607345920326576,-17.464077582763927,-13.729716317213825,-14.321895155355548,-20.200658317368948,-18.301900411654596,-17.094465967375648,-8.889052754642023,-11.732913064889617,-9.580262710676497,-23.522103260833653,-18.56301451932344,-18.1767034128914,-15.852530267493968,-17.195105335603532,-18.717062349017088,-19.801178815721492,-25.689092615441574,-17.037977843590106,-14.861687134223686,-17.99023702861699,-11.688657323453327,-16.37210610842098,-9.077776287571403,-22.4265504030788,-20.85288357145655,-9.60322186256329,-1.4732543991182947,-15.307086382884584,-11.633202819032437,-14.635737944705212,-13.32997701552544,-7.766076540815828,-12.543719419070282,-0.3109307674765721,-13.481402222022238,4.3933382334141,-15.57726217020302,-3.9803706813807342,-0.5768948953929787,-14.42036129924054,-0.69306316377745,-10.239768666708358,-10.40307891080748,-6.426077760318902,-16.59075314499131,-6.322104203358188,-13.220900565034007,-21.86440745984765,-4.795233681818559,-1.5929444362755567,-9.8829889445808,-8.566361446044263,-6.064387387185558,-1.9365678308328782,-3.919382926029635,-2.56990998963255,2.4472241108761716,9.930199407043888,-11.743594278342732,-6.218438031622158,-14.68541324560725,-12.981932255677755]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[0.8561162604169961,-2.4223467420639695,4.5029932245386135,-2.2370775519136727,0.39960131249443775,9.334022350116772,16.90855951675804,2.4874644565033326,10.005882515533411,7.955126680884188,3.7365730262662424,3.302780339323917,12.062114315565086,8.534077957709297,-0.8019748447172431,3.981480948773354,-1.0315849598370486,14.499538694182583,8.521195095144902,-0.5120749757691874,10.611826692358296,0.6850774843587853,-0.5854356717545519,1.5127650910848076,2.043678550170685,-0.16926230608579454,0.3749895917357164,10.298507921039093,-11.24191020263385,-2.6153135686199063,-7.459968983820739,-0.10017948308922842,-0.27109033188296683,-0.3578967802001422,2.4326520025177265,4.659847360227568,0.11839958288924213,-6.076967000606706,-1.8923275149873486,7.814081016708776,6.910821898673213,2.6874801851128787,-8.536393364110056,-8.18968108423394,-7.74281569396989,-12.394825321325818,-11.100238815712412,-10.08506169425673,1.4500272344526721,-4.999781245454626,0.6424833109123402,10.626953350987772,6.188736878922587,-10.72204595410615,-7.611869284410833,-8.360238572334763,7.190605442172762,3.437290091499559,6.570770660127417,3.039442008869589,-1.0746137463166685,-0.9270097714770797,-0.3292868221432526,-2.539206775826631,0.5682251827505725,6.521295774209009,2.3197826097652348,-4.357208226805581,10.120253244709069,2.5610687241447465,4.578926113533243,-0.13766157618600028,-0.007592595710827654,4.37977208956866,7.118393215525675,10.138672417431149,9.3017900491864,4.887210957851792,1.9582667318175417,-6.379161313120736,3.8510544846059958,-2.233689625509276,10.858186514642032,4.9792412689143495,-8.012301138712,-3.780667763437834,-2.7342780445085078,-2.0133173177636294,10.919179184841427,2.5223038853853383,-0.6759468640141024,10.53476989250878,10.416251429675,6.8035162361014,3.4893888900867913,10.450300045179995,6.250940359176617,14.523617385086789,7.778940530895499,5.362127181491685,4.279827371556711,1.855097159539953,7.524172129362892,12.409724741461117,6.002245859177169,7.847263113619391,18.35501581617254,4.031347234233916,5.045961006394364,5.567663363457296,6.892388931944794,8.530895336485546,4.44726607084926,5.498825656354592,2.6620116713417774,8.667077697383373,-1.3600484547916223,6.162427467556856,7.4108598213551655,3.391285555966408,7.852393878329946]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-6.166738079722642,-1.3375025995278627,0.1690607736110435,4.004753160887004,-0.6693655658050464,-2.396844359752764,-5.1791077276404005,-6.427864695263608,-10.021991812554319,-8.290233663131122,-9.085710913511734,-8.95198091543351,-1.4682515479896558,-4.3864152591033605,-6.365761348154896,-3.7807708991708453,-5.1155165392374045,-0.858456737225273,2.943839937010515,-1.4971556284024579,-4.323918082813098,-1.5622622288326173,-0.9090754030528995,-3.5260314444209735,-4.997656807306374,-6.31357055952711,-4.572278805913902,-6.913370000822192,3.187413990300125,0.9831831609475967,-0.7191700131187289,-3.768863796179047,-1.598536425563227,-4.1481902933872306,5.1163197992508795,-1.7088721495472292,2.115285906921837,-4.180469151305479,-3.727996110772291,-1.3063921440608888,1.5347699597820144,1.783905649886031,2.5779406558184754,2.521370921101294,6.46113310688682,4.150877213701727,3.0456570611611316,5.734173290635045,-1.7015310590561121,1.0311940984050956,0.28679524901918363,2.3184969336530483,-4.879212807494051,2.087973734688634,2.0846987679133466,3.7657318446758534,-8.774306231690883,-4.364025301687822,-3.170365214567567,-3.501781645103757,-6.216919510058339,-1.1342769549936127,-5.35070716808923,-3.624849793401306,-3.7813761407087525,-9.880787669972223,-5.784279385325103,1.3319784767344784,-1.9298810748929847,1.236614563595812,-3.2861766595338686,-3.2614777345851116,0.5757218044242872,-2.6522943383917585,-4.334370245576741,-5.64641136878369,-6.68563800046728,-2.7845183962595916,-2.9302203290722693,-0.3397004672350262,-3.5351368679551913,-8.044514103370293,-2.0941927190596807,-5.681455938866785,-3.568401220780091,3.969796057998173,-4.844066926101827,-6.447013972900308,-5.810810536265194,-3.7238350507111595,-8.457034277394065,-14.240541419014527,-8.159432261060843,-8.92197803614776,-3.9302101100294515,-9.452199477659068,-10.863658506972406,0.25280044493809045,-11.542628536155796,-5.6591469459855075,-9.55012778425338,-12.251367843960718,-12.058264649206393,-9.3153162835529,-9.956970094437128,-10.623660695669297,-6.367333898016932,-10.612733210677435,-4.388276526477961,-7.777527695149229,-10.372482789450995,-8.590211450792527,-5.092438515366361,-5.09263568869777,-4.864619225204668,-7.140927505256369,-3.2218509664349386,-9.250014624940304,-11.154802252619024,-10.805218541693339,-6.898529715214668]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[5.286138116265656,0.8347280209337054,5.0661706580164605,-2.285021833699268,0.3051104885237872,8.183285704758461,0.3516683625917474,10.07979300071462,5.917995845182315,4.922325170052606,7.453634721428179,6.817408914934887,8.163875185549127,4.83120090476416,9.1313425049263,4.19275350983208,4.7046052207074975,8.58485641489457,11.229710809956622,3.225643928217888,5.240283630945653,1.2747528640637558,-0.2976263909272745,6.678409779623292,4.537556647719628,4.497697074515387,0.8662652518696528,2.492716553429145,-1.8400264214742976,1.9686369173473768,-5.1095993957849695,0.4029004377913489,2.287882527236009,8.851262224973365,1.7814969769363727,3.587735153026252,6.206068279452139,9.993881284973266,2.981084538995132,1.1598789348565082,2.9341098512948895,5.493454949773056,0.5553835760874943,-2.9840364943662214,-0.057047825802598595,-1.2180174451816153,0.17167831857068716,-1.8230408537381186,3.9923453594770137,2.8069867532971946,2.8008172491562555,-0.4798477298875349,-0.22475153808421522,5.8126932961483355,7.693804470163312,6.453297874052346,5.712753476247896,9.795424916247562,5.781140551236771,5.446935396778057,3.2179977610007255,2.867599634872446,3.549507135516393,2.072286944238104,1.3605567694987475,6.06247224539427,7.2382431174621225,-1.7265723350666065,2.652748280846904,1.0224725428282442,4.533964090703504,0.8244877749583455,0.9789011769168174,0.3570909006451243,3.427536136619022,-0.5640430841020714,4.717182466103427,2.543964240515866,2.2549199445974764,5.326893788951626,5.476461889993747,8.004097467880678,5.892657544374265,10.679634789369109,7.317952084556444,1.352471754286115,0.3144243422494697,9.485652085115552,2.4646061072998444,0.21632304485268639,10.56049573417922,-10.886487541826977,5.005439764819464,-0.5327155744601211,4.648224722041285,-2.7518714494134127,-0.7019797247818225,-3.0729257818795626,1.3428486407065279,5.010731421000025,-2.3246448509885096,-1.044956960146854,1.1913140799195188,2.0515715542585573,-10.995607434014325,-3.099963760399482,-8.062780736633462,1.1161903060978735,4.746564766490579,4.059439918469084,-7.205610229519513,-5.503653041105264,3.7167913504435965,-0.8599048489508551,1.1892013530415255,1.2048131279049885,1.2479755253900862,-3.143390742512556,3.124246778772246,-3.984764033869591,-0.1664265608196405]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-2.4861452783656346,-2.0531701682639363,-1.4889855722760361,-3.579637779792084,-1.2006004485577453,-0.17230052988566147,0.6928928581981122,6.252774295878327,3.5586157254281665,4.733048105900042,11.959023429514971,4.453066948147658,1.2963245190093724,3.3299307419953457,-0.2367767323681113,3.353042215343874,1.9258974452500017,5.108636097488058,6.366955666780836,-0.6724810601511486,3.2640778954558973,4.064655079247586,5.365191261257941,2.093483259425731,-1.5941923539194534,2.3326149203127686,0.8518217563866666,3.6787872973074274,-1.1625330174093405,-5.1276284150942075,-2.502152514952267,-2.6584499903674503,-1.8002474411331995,-2.4996607161129005,-4.011249912691172,2.208938653971141,-5.512050563968571,-5.448160038112565,-1.5876515489052467,1.2302398305795164,1.40204744498021,-2.885691614608348,1.1798256909887757,-5.61307298187088,-5.631844692280314,-4.040147608511132,0.022076980967666637,-2.8175796147630283,1.2951901108712027,3.8806693322859047,3.377709024675047,0.47673529517735225,6.858848335135122,-5.948285726428967,-4.958631565919664,-6.410111452460865,-4.179466740555829,2.2140289606175334,0.3819315932512959,2.0996698836941334,0.0886421272027742,1.8963695751560363,4.2975386860789,2.7334524260288777,2.2898776545536554,-5.960792251021213,0.682705337730508,-2.4371567541064696,0.7063148582618624,-0.9962878865793496,5.142068257080279,-0.8855444008491875,-1.0847824034161273,2.0569438442908385,0.014251187560585299,-1.3421485626541485,0.4004970024977341,0.24269974883992768,-0.9802223194920142,-0.869882222561728,7.9199111870326835,1.020005164730848,2.2247417956625677,4.3692478364362355,-2.478634617248172,-3.0653440111010966,0.41913387349734976,0.6192415945168482,-0.5072867958886458,-1.3158520439601245,3.00900220881661,3.057847347317031,-6.551183446620734,-5.811240732383211,-9.21430589978031,-2.1887683732443866,8.159011579157864,-9.753643186678403,0.39649864097801957,-9.552282113732776,-3.135952814999693,-2.0806631289599355,-0.6185734507064029,-3.846760478648113,-6.335295951176651,-0.2154605032416963,-8.568466080463653,-8.888684156703606,-11.173771903168982,-3.765425279331925,-10.874514257724307,-8.382085601257227,-6.499011206687411,-6.558073964837828,-10.122654196309009,-10.729996375089232,-7.36819326257564,-4.474305444066397,-6.043073797030084,-2.0854285856122443,-4.095454521170898]}],\"hovertemplate\":\"color=B_and_plasma_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"B_and_plasma_cells\",\"marker\":{\"color\":\"#636efa\",\"symbol\":\"circle\"},\"name\":\"B_and_plasma_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[-20.551813674779158,-9.544767717560354,-17.426167228543314,-6.659840602250819,-14.812897509156471,-15.71832283234083,-15.513921875320452,-7.229954668858253,-15.627484571664393,-10.941908764413638,-13.125302128791912,-17.342792819125336,-11.954048318059925,-16.821580652173132,-13.656734568748362,-15.19611424055701,-16.52361567515362,-13.080472796424433,-17.02771995356195,-10.358645578343122,-18.07651127075593,-13.620404671353231,-5.479507157392314,-7.15329280046535,-7.75256627870368,-13.491273727613493,-11.724539241000155,-17.94320817911359,-15.002146052874389,-16.562748070454973,-8.509472491091698,-11.705728109036237,-12.39719481685788,-5.820089369604429,-12.145689995494783,-3.575963530646176,-15.156446744411681,-13.918764588770255,-11.90406293801297,-20.060542184963968,-16.051773091515614,-8.956057061177518,-13.26278632374969,-6.8501846699671205,-15.742375816770583,-13.464057604564836,-15.117005083624862,-6.342143155047294,-1.8448745796989314,-10.308977445361148,-19.40843706705318,-20.40082628312905,-18.460312608719004,-13.91128662473593,-17.854942401248742,-11.485493341544723,-16.221124656938553,-14.313007218307533,-7.817987734328634,-9.952729988234978,-15.428328362136469,-16.485466720198694,-23.253861252993794,-20.316884125012436,-12.800678051019073,-17.35924617913138,-18.044399365204452,-15.963002265313884,-21.152982769689444,-17.33418645725673,-19.536108170179105,-23.87200722905647,-15.538815465715842,-17.85069272926797,-13.44926640880557,-12.05836272055138,-13.351363810994627,-12.046649849295601,-11.72783870566339,-10.210305041110699,-17.71725885620981,-9.645934712384992,-6.015872930176605,-14.09531407775435,-7.949464249084485,-15.081355996563229,-18.035799837804028,-8.93829500786745,-15.948122216121476,-20.247174996975005,-18.29015800331421,-19.56196521190069,-19.842408143765937,-6.416463982432232,-15.190419663427562,-11.368213701834357,-11.236392189916664,-16.275320286812676,-10.533078519211845,-1.9886166842605333,-21.15067693739661,-19.731849609611942,-7.123722510967424,-7.647022947006608,-10.194063626157876,-15.465940408633003,-7.899060600242244,-9.023693725454253,-7.761644879091324,-18.888002626506932,-4.8895475413018845,-5.324025011902133,-17.359837127944783,-14.588223756528267,-15.023211661979055,-17.95185155210282,-9.99885635871156,-14.362142250479685,-18.800789089172465]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-5.055261238686897,-10.368980635584391,-6.778724050212644,-7.5857337484778276,-11.07799021799466,-11.329449150525154,-9.978012888231738,-2.5151845007808658,-0.3753501538801435,5.003542132117237,-13.799085221407266,-4.8745002116135705,-11.229938525003142,-6.577584652521558,-5.8130863564834865,-10.045593518449683,-10.1006352846101,-9.414571126806345,-7.955089324492347,-5.599932882555965,-2.5013881181004587,-4.112556314986133,-16.661828192849192,-13.27741026082176,-12.232955261278704,-10.323155747135651,-10.42352452249941,-8.830849270632584,-11.791121806686679,-12.096308441728546,-13.965294979032352,-10.594189706542116,-12.995529408774853,-10.160919670900068,-10.150635129360225,-10.241552427652959,-7.208206200859776,-9.325632445155062,-8.220776708141248,-0.7633648518051065,-7.732747433893269,-10.929511058969096,-8.773433161666471,-6.346861754887684,-4.724935015348802,-14.57140560064515,-12.748744041897066,-11.602051378759938,-9.585315040386241,-10.982359405851765,-2.437824160971708,-6.398905922841374,-5.9318436977426,-5.4463448667771885,-5.914506538987021,-11.67145117056804,-4.544352500863849,-5.676631762140762,-9.045292540074264,-7.063942788438395,-6.5892450394068725,-9.299437570495018,-4.02214878897362,-6.4438534524130775,-5.700930114957936,-5.0517669722830485,-5.627224202496761,-3.1088565596636952,-1.8477717065173713,-3.0409239258118577,-3.120155302089816,-2.5503058448268137,-8.888276569715876,-4.798353567852818,-6.0455339258278435,-8.491757052136407,-9.502905361642846,-12.492767922287527,-9.683161385687017,-11.744167399101212,-3.3674123621969203,-8.803401307267624,-10.201887019743282,2.3850816662682934,-12.161000780081677,-7.559700727603461,-8.685353246825569,-6.698221460559605,-6.8504473310176035,-7.276182394073164,-5.694226271859727,-7.728902089651756,-4.3597957765293085,-3.5990325584877647,-7.817845143783346,-9.395165672873127,-7.566526600801241,-4.314013604562922,-10.250247635159036,-6.816477386031551,6.054192056994163,-2.3294477086851364,-9.107328743587143,-13.341464055729372,-11.577043837021087,-4.9973418068879925,-6.525491637166612,-13.530621499615505,-9.915012733652222,-9.217720422101294,-6.440926759056701,0.5255499528685453,4.955571151426233,-4.104666818752437,-4.2331823308392575,-1.96456586350332,0.11015738701636857,-6.729707646877416,-2.8105132718006707]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-6.029867360298656,5.439564162475868,5.218587140781718,2.9528570949376745,7.2430592636776705,4.483619578706973,6.751087987202602,6.6610565850338705,2.0227956935863554,-6.812154974916103,7.289490989446859,4.668173513330082,7.2938415895870055,6.571510182480987,6.0266956510876275,4.68416318105697,1.8376642646109276,0.8856249129210868,5.534610928997909,7.019037501445563,1.0495491284342573,5.638574580049682,0.6888221286117612,-0.31047884066585574,7.146062688427118,6.4782970607906165,6.116643105296826,2.801237686532411,2.9709311281799793,5.1303238227540415,5.052055861935029,5.118123345085162,7.9035509368676555,6.1082275714308665,4.599778624685311,3.9355464530897364,3.359908022741788,3.3359715184896856,7.2906003196266935,4.5428820248053015,3.622625682392618,7.162907567886217,6.678865789233296,5.850528284182869,7.885472789079309,3.8959243319099355,5.579382464091244,9.08615954022369,1.375956434275212,6.63551437652526,7.60084188866858,4.673939714606172,4.137066871832047,5.092081537246511,3.3883524320772285,0.5188354217078193,4.692317929848135,6.871655443669539,7.852266358913814,6.044673053619832,6.61466102108456,6.098541663523392,1.7228517986203102,-0.2914670547865844,7.84710464471538,5.6922065306256595,3.5091718357909434,5.017741022869586,2.0923175092532538,8.484036786131385,2.9521826765508634,2.5080254568466747,7.222913692515471,5.269946824231893,3.237877525273843,5.408681306293703,1.5341185491949196,5.393120593160274,6.728960670306568,6.5788954493865734,2.7162577528013094,8.004983026361604,8.996306140462806,-1.9549809102378108,3.7377098674436158,5.017255701731625,0.5838411951193149,4.072519032440907,3.98064148572982,0.6333900606728415,1.3877281359432692,3.327630119450344,3.0633438903790493,7.478557055877751,2.3260950288065545,4.446984982375914,5.390604616956967,1.5041363230266271,5.575725681710725,8.879596962719381,-6.930520132561648,-2.9715621829671903,9.480995985019884,8.863545877923649,6.080022245357816,1.6439877472481361,6.499865551885674,7.138032358120009,5.024595877166973,1.6105405674198063,4.070511501293433,-9.96778403874993,-10.575205981555023,0.6044740899271345,-5.7737565683193735,-7.235392632539506,-7.002947224537884,0.9072239972535494,-0.20089249118849117]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-1.7093380523758512,0.09599032338844204,5.4525605174797915,3.4917400919571255,-3.27967193692899,-0.778883816588992,-2.18132655644735,-5.519504851453632,0.5148447714101473,8.092566183840232,3.6183395426583838,0.15947966519439205,-3.2542382255614006,-1.2319945658128564,0.18271015282272107,0.7020800285556088,-1.0444484636052993,-1.5556075394770044,-3.7087730066804045,-4.990974085963375,6.712285342362796,4.0731442888167,-8.76466915404012,5.697054042488963,-5.37470403124083,-3.749812621546962,-5.370674336793762,-4.0122623530088495,-4.982992230871824,-1.3084458752185528,0.4471082370797457,-8.034550739868703,-2.0702717489960287,3.344517360379916,4.082384101961465,4.98943947927215,-5.494592166407866,-5.012319598338053,-5.560104220881824,-2.630685607205503,-2.253446499779631,-3.8677024742562924,-3.6845888092559824,-9.900016448455855,3.9866926456906775,-2.4936737169649312,-3.383320669111611,0.23638739757582064,-0.6360866785912416,6.921000580709215,-1.1764459451213376,-1.6588783528080642,-2.5314477985661457,-5.976821364328551,-5.407094434158739,3.23988270200831,-6.028407442502323,-6.9641554144972195,-7.846330702684714,-5.83777532103184,-0.5643005162554686,0.6210669775672066,-4.311650991122458,-5.007370501483165,2.5819464682477418,-5.710012694624786,-0.6077608644987076,-5.8555122750265545,-5.142372172107957,-3.0464906060211843,-3.6075058517071397,-1.0265585611734354,-4.139873618257274,-3.821102137342418,-5.779179985723447,-8.526917877083957,-6.355236712536961,-3.143170074814252,-4.2472343948872675,-4.927466969922358,-5.009089096642809,-8.519774825933233,-5.144372997211256,3.519963437609661,0.19996306780004636,2.164257512138912,-2.8217196441605252,2.1466042221301165,-2.6046246413174665,-1.810919869289092,-1.7049183650149162,3.598377839062336,-1.7553447848088215,4.91851359322054,-1.2444149102602498,7.776927283281749,6.561577258454677,-0.2698131877336707,6.245598300398573,-0.8987280202891114,4.524167802632226,-0.04846604972268431,2.56636764286795,-2.8176428796284405,-4.887428679961217,1.48683844977895,2.228513988598216,1.3402396014247147,0.6811161696745242,-1.2706419858312015,1.1264516569030212,-4.531245080628688,-9.816931562379338,-5.3475465694644,-6.546432350849514,-9.177440725002867,-1.1243059003127407,-10.027957065904955,-4.9745635982659175]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[6.710596447909446,0.5970568885992424,-0.17321167034210005,-3.737234274937504,1.3665803820333011,2.138403874146206,-0.32843966205434316,-0.9496335913790325,-5.120821360087039,-1.1777141168171654,0.5689542361739959,-0.9107120767563571,-0.6716684717890861,-1.017481107711801,0.43793598254874655,-1.5478672443043189,-0.8938112930887954,-0.32332242978060277,0.7605924302361116,4.949126674979402,3.1825796414977305,-1.2832932527513843,1.276801079316142,-2.912457984466773,-0.021295876017900853,2.4746684722557157,1.5514373406586155,3.4522416905971864,2.646895882000664,3.03447349448986,-1.767333293732361,3.236206820142487,1.3629677911821019,-6.081614546151698,0.7284404107813196,-3.2249919171183636,2.038638786769358,1.4071824723036264,-1.6483223353182832,0.6625019505720148,6.107478150464548,0.7887784279990898,1.085897519756674,4.757491788835262,-1.9694608098434874,1.6653358609178848,3.011194722966839,-1.786694326540333,-4.765438558704964,-2.311437813372846,1.5827744932206222,3.4987515173531314,2.0563410739485684,-0.04458894720481742,2.044185262061603,-1.8305684312500563,1.0540111743698313,0.9036495967492537,-3.037123117408295,1.3789182697771079,-1.834406661291009,-1.5735058744464543,7.757958650163909,4.338933305272689,0.8845463225607437,1.8728962441322679,1.271977891647476,0.7995882796080171,4.431874391871772,0.9724814211092737,1.838990632005772,2.602293586665961,3.575434709619539,3.691917339465678,0.2877133844930091,3.7582946798943517,4.655843462032161,2.3531416172503725,-0.8033799351114075,1.0696207082887346,3.3240750506296743,4.432382413754311,-0.720070186803979,2.120017429541488,-1.8883708221766122,-2.4754154629394294,1.1993168716076177,3.3805749568332755,-1.9100579220477953,3.591917336749251,0.6374903616527421,0.16092934461448793,1.3419269966214018,-3.907287685656086,2.0991566124866705,-3.061399630240942,-1.8356615732969073,2.2651936227793534,0.4448793515524565,1.3666633064016798,0.10278287159547084,0.8445665613030517,-5.497837120867603,-2.0217519741826315,0.39640577363088614,0.7027991800267578,0.770203992341153,-1.6005384797703837,1.8504159565114264,3.724475999502161,-5.760816103515116,-6.528370892641591,-1.3593589988975803,0.23471549259364966,-2.315592751794523,2.2194814779561773,-1.329192444367268,-2.778669399268536,-0.9163822926621548]}],\"hovertemplate\":\"color=Dendritic_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Dendritic_cells\",\"marker\":{\"color\":\"#EF553B\",\"symbol\":\"circle\"},\"name\":\"Dendritic_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[11.355871395701506,7.4439480672754526,8.708513388324064,8.350215278868264,2.705936560846243,8.505672477304344,8.522859525927178,10.815153943925022,7.9992473272462234,5.629166362743898,8.76327830318692,12.594794986813461,14.016318086671792,9.322211049709292,11.347088609517776,7.547063033669998,9.31845028815087,5.72681885473889,10.806858750232383,6.713626576813113,17.331134761646155,18.346148776222076,14.367874050306014,11.535994128004983,6.953794389531515,3.7824036230183253,5.848059115660348,15.358148799559313,10.65856120355443,9.395063115184634,3.1972512775491184]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-1.0540899754164377,-1.8370082845424471,-2.623645734051482,-1.640435943026054,-4.063608615186265,-1.0748025323877857,-2.5762470778643545,-6.097264568769251,-4.68922583995875,-10.049807861055937,-12.767133486374707,-12.237532025844523,-7.665765471723176,-8.855522117874159,-14.015396076522434,-10.72745150927833,-1.992662337744221,-9.718483666543182,-11.160117669973328,-6.122126063383672,-4.432891859828648,-2.932510154483833,-10.46767606002215,-10.408672036460569,-11.131713791916248,-8.249652796593152,-2.7666549083527125,-2.695066604087557,-1.1473983350808332,-3.470092323931269,-4.475587599303206]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-5.164618487882344,-8.219003851480348,-8.543015704360213,-11.963586437523848,-5.839939305354767,-7.967374483514306,-3.6078579055929003,-7.381020796857026,-9.392045795029892,-7.122090061637016,-7.282355583332403,-1.4843598330794152,-1.1456350601405003,-4.826288788287725,-4.802781109126219,-3.450535856563665,-10.14945671562835,-7.297745139557369,-8.525134733389454,-4.9058519098477635,-5.337376296819576,-4.68932478978197,-4.058682335292847,-6.975037099680291,-1.206402660583641,-6.441391662583868,-5.124332755235907,-0.049613546560855704,-0.5228230710786075,-7.887022060337104,-8.004150205804073]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-4.485032236648996,-6.225378778551022,-3.2409144777186487,-7.892309551785376,-3.317715113797096,-3.814701651670823,-2.4222893188228904,-4.356955675618801,-7.1691361466398575,-1.8829943249950278,-4.51386052679571,-0.33971884772250543,2.2788233549238455,-0.7554829372879082,-1.3816046665007995,1.0569888389698125,-3.1795807687483366,-4.856976300837475,0.3301632766814916,-3.9739602085192294,2.4141116277624683,0.8510092179725719,-5.450737066307868,-0.06407592945660845,4.5942745088601065,-3.833563296912573,-5.111413089730834,-3.3069479404535453,-5.549106907950067,-2.4865265362886104,-6.955905380758418]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-5.805666805390428,-3.669993435894027,-0.6179830750460518,-0.033415584465015025,-4.758646877594328,-6.05636469889763,-6.345679991162099,-6.070077436827072,-5.374651090529811,-11.774957802485211,-7.634600026349936,-6.681501981110694,-5.714989412289697,-8.453862190083159,-11.072487742149056,-8.57880374702841,-6.7985433163349,-1.8894501131425954,-7.606977079950319,-6.433377478278931,-1.6961695866864193,-4.549586066492648,-0.8054314574407144,-8.194643383400772,-10.516880598115005,-8.349230984417169,-3.2757877815969065,-7.886718968013579,-4.813515149635204,-5.389902226821214,-4.696075008646343]}],\"hovertemplate\":\"color=Endothelial<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Endothelial\",\"marker\":{\"color\":\"#00cc96\",\"symbol\":\"circle\"},\"name\":\"Endothelial\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[4.653020404777198,11.497638045685717,14.734464507121555,-0.4100350956587756,6.020223414453456,12.086474482120337,9.68776915429098,7.988237063952346,7.423842862993961,15.299580489892488,5.776486613600851,5.095099279175322,9.368739330738464,12.302446259416342,9.571900397503288,11.902708521514448,5.558090050355235,12.650193817746825,9.475795228276677,5.39048030730744,16.60154411599718,5.874700956308997,6.625689777969847,-2.4007368569614496,5.602183502528469,1.6986814444153402,1.5500781789868998,12.75527872288334,8.3750609355079,11.786278512089233,12.598444576337748,11.28594654727041,11.403323968381798,10.511142320703337,13.51233295692186,10.152689324628355,10.146465622009016,12.66383950851982,10.71222772113969,7.780198593481105,8.965220314263698,10.249777923242657,20.3862141485515,6.882838127552043,8.298124822460537,11.149470575823278,9.977986076492016,10.590818949591448,9.679307302537797,2.77107205633496,7.726243537169561,7.798745475199722,15.6617433882072,3.93236499376133,1.4796104887589467,19.028181407952385,8.68899200219636,-5.308343319951799,8.159205441855166,0.4155377668702691,8.8696745788795,2.115602043056375,6.461887539272793,7.392815499628561,-4.195662694325296,9.980036166136943,-4.066605373082742,-1.6732077251770672,11.17204387805959,4.0048796927619055,12.733197762631926,5.33759172890709,9.994451125427192,10.653074512622307,9.337258268725925,10.018273839880246,6.646679900231053,17.795833367527937,21.098756161381928,20.72087319738526,3.6346095542286196,6.017833592302608,17.27944460676524,15.175397616221328,12.07011927356902,19.769990065324762,20.386016900088755,10.986147437480948,19.614865763074178,13.37109051756029,10.614925571467698,10.757842543005328,8.939028482721715,6.006995932376667,14.020627559432427,15.564397999755258,14.445484207172456,11.720467975760055,10.795745719154223,9.10379460909672,14.534121713037843,9.472926296971544,16.487317659461908,9.96310935628738,10.873641166417858,10.771584358891129,9.184378684016838]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[18.198668582154227,11.375837962495535,0.5730709023076921,2.675965131818641,18.05395182790304,7.466213825402848,17.39637514165182,20.545073228208906,5.051929705465863,6.973541824047987,13.312291739837468,9.65967249582384,12.087155366505808,11.861847693931853,22.583723326864,12.499269378003273,26.629833339562445,13.665220117272476,17.18280232574112,26.06476843898563,3.1555335703785845,22.520808842539203,16.476654016562662,27.82569652079505,18.98964850679095,2.3803054440670244,19.754483686401464,19.84314516612576,18.51610716885224,17.976626258130313,17.91640593689983,16.761438750196223,15.906838506624325,12.88894432994218,13.448028593304151,11.740249558560741,22.610825019874433,23.20189873488974,20.492942571669744,19.02832862853667,-2.1086215180843073,4.750852682443172,3.6700401596787455,11.110749610024403,6.403189784407338,12.454529376845901,10.590111041419728,7.5065545127953595,5.625563784989279,18.909874042033398,7.601183518763838,11.641258241251457,-0.11034432784528359,23.991905418427557,19.764164077877986,8.70975171464663,14.907856142234433,19.03065233107317,19.821017341581186,19.64065096914544,20.364293037161605,12.221642564985915,18.189101261284264,18.088319689102377,11.517205031881264,4.372127070659634,13.852018041850197,26.32266567390182,14.59582109819905,21.18436263060186,9.459578503133407,25.56724811035063,18.413630042237212,17.932220631190052,19.342568970964734,13.868970992022913,3.1899453481553763,-2.3375674347325495,1.9225738033649376,7.887558970921839,6.0456527905374315,12.63363221536321,9.09598103946859,11.663111017839395,1.752467425953813,4.568942912128703,2.412979307707007,14.687561655114383,1.8154554082114167,15.641509286678952,17.541066601841713,18.81612660042054,-0.9850729882676131,16.520636419732377,7.2921753731701,17.390361714671137,-1.5392105475118194,12.473848787448379,15.93681475897803,18.68081285558382,5.271033113121639,10.38627348322289,2.6042091404432357,12.788638090448707,11.470839023252086,5.947887370300492,8.357608899501574]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[1.2647536686604064,-5.78228239212509,-3.230906682410448,-6.058868204310862,4.544879710226466,3.0090419035259903,4.657505294007505,5.219417172778742,-4.119521680773903,1.2135028714225324,5.643823692168298,2.0501231822454318,3.976322495536063,0.3066188989968879,7.271823328513857,10.609128906324004,13.998742313760497,5.444844398864821,12.73508871788016,13.241710904406567,2.0194746430339716,9.550889440600788,4.716526297436232,-1.6407306642251702,-1.28495818327203,-3.475219520485723,7.335603991898292,6.791708498264584,2.2370147254097335,2.0140326651082194,6.68460349771199,4.094631548904366,0.6601100367010121,4.060982921201706,4.3832453038434105,0.9019613920431764,9.390061550831227,5.439260220558106,6.493995027440873,3.936981703725838,1.5957890129779677,-2.7598343402321075,1.3489750107363454,-0.45166759763157394,2.131950071492346,1.1573429212620525,-5.261441242673401,-1.1345352846460721,-0.2939081983157565,-0.16036755167618355,-2.7850983760505414,5.0438838149325385,-2.3873845432915166,-2.789672405071806,2.0745630023603656,-3.685785623662178,5.815822840118283,6.923618518703872,6.3933671076827565,-2.9028574951381576,6.418557122445765,0.9850039105168441,0.6867852096365749,-2.522268262992212,-2.8225626449704855,3.9488258086361236,-3.4799791703737486,2.821295319787046,3.503225239805061,2.1593087181761463,9.695145183695999,4.643147957830302,6.074020591862153,5.123018275588663,2.6378178376901653,8.732762148169535,10.53838977343666,0.9399189630723435,6.232110605547339,7.879890263822123,3.4464756499611404,12.904689837267004,5.753871415151137,7.31686595723545,4.258227015810028,1.4188965837857812,5.899415613014558,7.205863149140638,6.809936183529887,7.197723516284601,7.7613863911666865,6.808544043470329,-0.91571873018143,5.441226964669084,3.8627207159230643,9.980136310762003,8.0117313032478,6.696079199678026,14.831563656230509,11.509794944139802,5.871976482945945,-1.5479327710527362,1.821061881803225,-2.986748179814068,1.121385503898183,0.7380309621163785,2.545433294089514]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[-2.687224788882384,-0.2172673173479358,-3.6346036438420724,-10.996972419212334,-0.6919667531772031,-7.333785690961582,-0.18402428519244865,4.059836922695362,-3.947121513565719,2.56132502412716,9.738688080868368,-8.248154149825087,-4.393778797664732,-5.290472640818853,-0.37947931573741966,3.715193761417551,1.925728358368774,7.0938517398967615,0.5609491250531855,0.8199078830446322,-3.386589401650241,3.238009274051422,-0.8101052137560074,-2.8539506825753738,1.284816194034648,-10.30721378903107,-9.32002120102791,-0.8920728866462637,0.27762082505984564,6.353774931717964,1.7167052578819506,-3.734934994917047,-0.34859984792311716,-1.7611986009733236,2.7738000091941015,1.5389058898605126,0.8181337918576468,-1.3472422599433962,-1.2701123444895626,-2.944228924331707,0.22797710914917987,0.21647277610979088,-0.10227542728934136,7.4019553454164,-1.7553038182496463,-1.5410163216051105,-4.934065059856057,-2.377577711655995,-3.9615563956080053,4.540385574499538,-10.194285067612022,4.34594807163616,-3.8844557833295443,6.400636810758815,-4.17496652570588,-0.8101061682719759,-3.751849480532597,0.5786890630491264,3.7598679172342298,-0.8907165692701782,0.30416404556947474,2.5420968074376527,4.347114185268985,4.731837592975404,-2.378902454981188,-7.045267299069556,-0.08244079282422523,2.622290674536467,-4.17576609608533,0.17356518998910442,-3.4973396381797093,-5.94197493393394,1.64204124486284,-4.741611077085919,-6.038481346520465,-0.33051272402285714,-6.8262437647164935,7.844579224539117,-1.2448389626598977,2.8559604698144825,-1.9969246508343872,1.0422976337374483,-3.3458122725403356,2.8752899102393292,0.08566589314782513,-3.7397443594602198,0.26512654768613464,0.17708770794098183,1.1641473037840697,0.9864044327090562,-1.9101429519763045,-2.426621303290919,1.134471563356599,-2.0222641970309154,-1.6687736514619644,-1.4975710053851272,1.316018667201282,-2.2082054058607707,1.0790451357721489,-3.641868583920856,0.7510638362646226,-10.736586713647451,-2.689153621935637,-6.971258938241375,-9.24945368686592,-5.936804965799169,-1.2199693134571044]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[-3.53247467517671,5.768223536216525,8.125300580187899,6.628823986412913,-6.257484671220999,3.262463113759699,-1.707841660587095,-5.016325920270033,11.533660198985675,0.3300333775747575,-4.650210694822967,3.8696920161672796,8.57457901798913,14.440166818085535,1.5665858917247357,-4.8202440698491165,-1.2271783532642901,-2.33938987576651,-0.2742238881723097,2.6548571664584464,4.16467215490295,-0.4080871350317838,-5.070617110221438,2.16264825242824,2.5351942439411035,2.0040153429394723,1.3282935323819118,-5.1433580079239265,-4.297627400218971,4.689185524475739,-7.413976404898676,-7.988201298996204,-1.9396630078854489,4.283981271662856,-4.830483858094691,-1.3600765148464327,-2.5372273416752433,-3.863188002546731,-6.3327091664374215,-4.841944596029363,-5.723214349993604,-1.0151775382216854,-1.156417566612599,2.8141626070169568,4.72686504323849,1.761689242302201,2.4299615592048696,-0.08356726795610661,5.05518840116812,6.2525507834590925,10.394449892254855,2.374220273935354,0.6537208558057839,5.930424225009034,-1.1792448077262805,1.6761866558250202,0.24743320461828974,3.2910534135165155,-4.2911544082739645,7.015753346628121,-1.850413334505277,9.852042261597411,8.677619309489252,9.95638558425188,7.919403240869292,4.854218475809332,3.2825311184550996,1.1658405763017368,7.4154413137333215,2.2031711083495855,-1.887162455095654,10.13266694167675,-0.7788804712880255,11.314464483589433,7.722434421416114,-4.086655520636034,-7.849750601126885,-10.691719274036075,-4.257430153415374,-6.175094858414248,-1.9209618692668409,-7.554296530637513,1.1513194307196148,0.695036931624119,-4.953646672521905,-5.0326319565392925,-4.826200065701004,-8.343598811890974,-5.257904715866221,-1.9822085861394698,-2.782561944136109,-0.9965504030262511,-6.6542329451740425,-4.107550527615165,-2.2548753118898412,1.3536254232870983,-4.494874860552068,2.3394608210011985,-6.323157970170557,-3.934558840706071,-5.423640814409803,1.3990474148183258,-2.7564737073626606,5.96597948976063,0.5592597589393596,-1.5619367612026733,1.1141846457402496]}],\"hovertemplate\":\"color=Cancer_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Cancer_cells\",\"marker\":{\"color\":\"#ab63fa\",\"symbol\":\"circle\"},\"name\":\"Cancer_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 44.4%\",\"values\":[23.75110565724524,14.322028315840143,24.19870228150203,16.66877542544105,19.326898410904015,16.028888207296607,23.10289940356126,23.367599731697,19.391412860148883,25.84302273251005,25.982968934845996,18.33542013861662,28.076275982535215,26.324208003380782,25.0654436941784,17.743013142997324,22.231937878191737,16.386639030872637,28.27541862511292,14.988851208839119,17.980055203984985,16.506508380303174,16.272063679102217,25.826521108335246,24.084289268329712,25.61615596573362,16.21721681797106,18.696563599897427,15.954870854755669,19.058377741005675,21.51299437250937,26.427841287625185,25.278407937036143,16.459775325495837,18.43807914928853,20.87829294431268,16.962115290165002,21.2759871665478,28.42965002457402,11.321749016258952,17.834987985817776,26.174005288920696,25.741084060126823,17.032604984044674,13.03581904258421,21.25575911249913,19.446043365592956,13.341856485901886,19.561235197600432,19.54577981248739,15.406482271024414,11.145819650953666,16.970356954674017,21.638218215947532,21.960050105516228,25.440344658559546,21.517619790117664,13.713833352596552,19.540758434487845,14.682548689289408,21.196931347077612,19.88356657325114,14.201828314872856,14.789181562334445,19.031030546790102,21.896680263795908,23.752003145258605,20.124675255264336,16.847165894383668,18.84424052948902,18.39792388186547,15.033723243458939,18.15846135153694,11.140573625038563,13.969244768639303,-1.8018137105633167,18.15694789748288,20.958090290263954,20.89342607737699,26.078245953047787,28.149079883037526,21.625011307886425,20.757725829902817,21.182010489173813,20.80917750541409,24.089827596106293,25.741302074546184,21.08470946759239,23.762997638449818,26.303434490536297,23.531367191067595]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 19.6%\",\"values\":[-8.804772347189626,0.12611296732409769,-8.83651859018781,-8.859666764530232,-4.407311604031676,6.015268410493691,-3.215844558668181,-4.058224095570281,-6.916112397888014,-9.603943699125992,-5.139938286603443,3.9295833826921918,-11.653060341961083,-8.271516891902703,-7.5749507184726825,-1.9287305557212218,-4.286591947137925,-6.9182110100945575,-5.904990022461607,-7.000450082973784,-7.57128653627603,-0.805384228946353,-0.7093593360360106,-8.360145300476427,-12.4139504089989,-12.42567014908429,-15.071127400745695,-12.152836910293534,-12.924875000212058,-16.023820597569543,-11.245422772478207,-13.696286178030746,-13.077634442379857,-16.45018373818308,-9.354164873364066,-9.728081973678892,-6.819918596368263,-7.428964410576606,-15.244957291957506,-14.713557305760009,-5.427452878492797,-11.82584571329056,-11.778747319841072,-4.495621748873798,-8.144088672894167,-12.23409558469896,-12.491559155030563,-13.666585622774772,-10.02336388801459,-6.351906537940556,-7.62440187134324,-4.938223864716761,-20.618208309782666,-3.15682351024416,-10.316157852268248,-6.7474260120542855,-3.749894738237555,-8.671621758231016,-4.545622315079659,-13.008796290451112,-3.9831926345165884,-11.514058022019768,-12.708162378919672,-1.6432034092490444,-4.170175076447997,-8.136871561222655,-9.029114060300827,-8.414786655644392,-12.583864471143496,-3.003476287500069,-4.857517804353691,-1.9307716177732945,-5.1715174073781425,3.51147362522518,5.012552330278014,1.391383146101818,-6.429953990941283,-3.357563907102994,-1.3795784036420544,-3.7095590979437496,-7.392869497583854,-8.5766334613657,-3.7807858630531994,-5.896582131989757,-9.586271368807832,-0.5355333404447371,-6.817398355143901,6.537440085218526,-12.258602753181101,-9.910580127364446,-8.076878315539172]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 6.4%\",\"values\":[-4.502578975533359,-7.566983749339649,1.9344802953870932,2.546639032553388,-5.466201607394865,-6.188046065078826,-9.76176876625977,-3.292314743903346,-7.096997243640784,-0.6685496822797827,-3.4264561460270877,-7.099213877881745,6.007792480983992,-1.95570817791728,-2.4573365008070143,-11.010191822279577,-0.8034928460976594,2.3574543368963323,-2.7647463157782286,-7.188416040499128,0.9378484385826871,-0.5387943699313409,-6.394289496917189,6.453347160869805,4.3644221250057225,8.81460090067186,1.5696007656704631,3.629254098575266,-3.609392292417441,4.7048972504781785,-3.3874249219127073,3.268832474117239,3.2634893117411266,3.2045192364315365,2.5254452762591253,-1.1964290456483078,-2.7447796929074535,-5.715557174833307,4.186327285112744,-5.324638142540309,-4.299794779524179,2.8283276588701973,3.3865029274798313,-8.806741994048704,-1.6575576690955773,4.647317724886948,1.9007074798123071,-6.785664831996828,-6.265735519774147,-7.175811449333064,-8.455624926950659,-3.0619273928571955,-3.924432605197506,2.110875514825002,-5.570326540980256,-3.917476922035218,-8.151271079562687,0.10275835249026849,-6.780366867452157,-2.2646738372170527,-2.3973369595907936,0.5343028294558446,-2.4411922437679565,-6.831622542426625,-4.919933447782691,-0.9359179558450758,0.5750298747435657,-3.0766408963021092,0.45836891724257833,-1.8047781326967232,-5.902922300568403,-9.020948327173958,1.7154304054484903,-6.722768086634483,-4.724879151612201,6.863253831736474,2.6833013457146757,-3.266251176610305,-6.476973873958226,3.9675662220882044,8.694177866627573,2.8089850440748263,-12.46929321532211,-9.617901395655728,3.8759700857590813,-3.787422052459241,5.463099188313642,-2.3077784257994782,-6.5008468550259675,-6.618039168476477,-6.070477017483836]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 4.3%\",\"values\":[2.4189833844941537,-7.933867586564627,4.67838486816265,2.7516043382004756,-2.261753290851034,-8.690637485020646,-3.0741681858777925,-0.5603980661805058,-6.462589591873806,4.979912461935934,3.700311349105574,-6.944380275476744,4.113034491465319,4.228202799347036,8.32136374258012,-5.8742605283990175,3.8574406966133967,1.2433368507278095,4.324334321428512,-8.334186678121654,4.284439395451718,0.10953282854102586,-6.1123565704750185,6.8844635972949915,7.088033418739075,6.7158124053077275,2.8199369954834475,1.0958161128618418,-3.074291730493877,4.399828306824283,-0.8333912506713761,1.2324186659011054,5.803548773041896,2.657108087368794,0.974406548248435,-1.5457184570451503,0.591531414188112,-2.26525274594123,6.178492002039036,-3.0058383687782317,4.842475252621064,5.069254294119186,6.21061508951048,-4.170668983661663,-0.2902126449459262,5.679441136023085,2.60536462266209,-0.7988919278485883,3.240029160006948,-2.9239828429088486,-0.06379373519361353,7.005378954955237,1.5954099922102505,7.916075318265888,-2.8749666709766935,5.554835341655426,-2.8750703540931104,-0.9481942926530862,11.251528924623216,10.79045877970119,6.180054940260417,4.943262381654614,1.9830658662605611,-2.7473391179290374,5.351855181674817,1.9493599410491926,6.149093201739125,3.1283351289472883,4.531110722512329,-6.32602442451808,-1.5806549333038782,-7.106698764225739,5.698742046090197,3.203756942460685,-7.924711925119826,-6.823645435268168,5.631788606247902,-5.0613911560633085,-4.818541810766857,2.8799526340713077,4.815760831273391,4.198050270868248,-2.0925300062924075,-7.852321908330172,8.435865892604312,-2.6309443301918836,6.629144270919462,-2.1794067075608137,-1.8418526875094712,-2.700689771083093,-3.1516631414189487]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 4.1%\",\"values\":[2.137809967080481,11.62483895454584,4.965888208437154,2.742667886001337,6.253834558040233,4.916700850253399,7.366606966011671,6.872825854309317,3.67339420577236,6.526089258354867,9.15922711385399,-0.6871959057888792,4.34375823443145,8.265640964118264,4.507889875995791,0.13378933249869585,11.33064947210335,3.8580951789940428,-0.5662694067072089,-4.952874013488297,0.44448596151165254,6.512656395037128,-2.221343381530472,4.578584910120751,0.9240578604835219,4.39655256878218,-2.2023322053734296,-2.187097999641927,5.7129124910238245,-3.8966378916219653,-0.9891805236851976,1.820802103468625,1.2387801015239364,-1.9654263929679865,-1.5126937453180092,0.3819485793712203,3.8673072877969803,3.053441920384924,4.751040790872938,-1.102847897706489,11.440861438631625,2.928968830595677,3.3343520978704966,-0.9888268191726959,0.7021031397388202,1.838745120663285,-0.9135842055662288,3.516140601178121,-1.3078471301088344,-5.804192352039123,2.3792615303448823,-3.480411411738516,3.691329855807127,1.6725173592468408,2.2880850302493365,6.730039064175665,-3.3310190810372813,5.220016521488272,5.557372773022285,4.301536992594885,-0.26010280217168624,6.618396381778686,-2.2677844397120968,-0.9658365713855617,1.1256642903272112,6.193302153633052,6.6541433741188385,-2.107761963852562,-2.128446527438801,-1.5132472569181568,-2.6393991364499225,12.237758350527244,5.255338760194468,3.293473043675267,6.108782294014787,0.6514228587086544,4.949697622500486,-1.8916800087289636,-4.802029505741472,-0.0751077900754249,6.1337929433956715,4.153868124900406,7.423162050626999,3.350432421364726,1.468796554823334,-0.32628354228787604,5.32448595300229,-2.7457036493226443,4.010223498855424,4.353346217848459,4.372543825493868]}],\"hovertemplate\":\"color=Fibroblast<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Fibroblast\",\"marker\":{\"color\":\"#FFA15A\",\"symbol\":\"circle\"},\"name\":\"Fibroblast\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}}], {\"template\":{\"data\":{\"histogram2dcontour\":[{\"type\":\"histogram2dcontour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"choropleth\":[{\"type\":\"choropleth\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"histogram2d\":[{\"type\":\"histogram2d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmap\":[{\"type\":\"heatmap\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmapgl\":[{\"type\":\"heatmapgl\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"contourcarpet\":[{\"type\":\"contourcarpet\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"contour\":[{\"type\":\"contour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"surface\":[{\"type\":\"surface\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"mesh3d\":[{\"type\":\"mesh3d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"scatter\":[{\"fillpattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2},\"type\":\"scatter\"}],\"parcoords\":[{\"type\":\"parcoords\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolargl\":[{\"type\":\"scatterpolargl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"bar\":[{\"error_x\":{\"color\":\"#2a3f5f\"},\"error_y\":{\"color\":\"#2a3f5f\"},\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"bar\"}],\"scattergeo\":[{\"type\":\"scattergeo\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolar\":[{\"type\":\"scatterpolar\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"histogram\":[{\"marker\":{\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"histogram\"}],\"scattergl\":[{\"type\":\"scattergl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatter3d\":[{\"type\":\"scatter3d\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattermapbox\":[{\"type\":\"scattermapbox\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterternary\":[{\"type\":\"scatterternary\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattercarpet\":[{\"type\":\"scattercarpet\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"carpet\":[{\"aaxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"baxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"type\":\"carpet\"}],\"table\":[{\"cells\":{\"fill\":{\"color\":\"#EBF0F8\"},\"line\":{\"color\":\"white\"}},\"header\":{\"fill\":{\"color\":\"#C8D4E3\"},\"line\":{\"color\":\"white\"}},\"type\":\"table\"}],\"barpolar\":[{\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"barpolar\"}],\"pie\":[{\"automargin\":true,\"type\":\"pie\"}]},\"layout\":{\"autotypenumbers\":\"strict\",\"colorway\":[\"#636efa\",\"#EF553B\",\"#00cc96\",\"#ab63fa\",\"#FFA15A\",\"#19d3f3\",\"#FF6692\",\"#B6E880\",\"#FF97FF\",\"#FECB52\"],\"font\":{\"color\":\"#2a3f5f\"},\"hovermode\":\"closest\",\"hoverlabel\":{\"align\":\"left\"},\"paper_bgcolor\":\"white\",\"plot_bgcolor\":\"#E5ECF6\",\"polar\":{\"bgcolor\":\"#E5ECF6\",\"angularaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"radialaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"ternary\":{\"bgcolor\":\"#E5ECF6\",\"aaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"baxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"caxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"coloraxis\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"colorscale\":{\"sequential\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"sequentialminus\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"diverging\":[[0,\"#8e0152\"],[0.1,\"#c51b7d\"],[0.2,\"#de77ae\"],[0.3,\"#f1b6da\"],[0.4,\"#fde0ef\"],[0.5,\"#f7f7f7\"],[0.6,\"#e6f5d0\"],[0.7,\"#b8e186\"],[0.8,\"#7fbc41\"],[0.9,\"#4d9221\"],[1,\"#276419\"]]},\"xaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"yaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"scene\":{\"xaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"yaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"zaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2}},\"shapedefaults\":{\"line\":{\"color\":\"#2a3f5f\"}},\"annotationdefaults\":{\"arrowcolor\":\"#2a3f5f\",\"arrowhead\":0,\"arrowwidth\":1},\"geo\":{\"bgcolor\":\"white\",\"landcolor\":\"#E5ECF6\",\"subunitcolor\":\"white\",\"showland\":true,\"showlakes\":true,\"lakecolor\":\"white\"},\"title\":{\"x\":0.05},\"mapbox\":{\"style\":\"light\"}}},\"legend\":{\"title\":{\"text\":\"color\"},\"tracegroupgap\":0},\"margin\":{\"t\":60},\"dragmode\":\"select\"}, {\"responsive\": true} ).then(function(){\n", " \n", - "var gd = document.getElementById('33c9959c-7097-4bf0-b731-9e7719c1f40e');\n", + "var gd = document.getElementById('3fd5eec1-e3a1-4dfb-84aa-ef2789f90938');\n", "var x = new MutationObserver(function (mutations, observer) {{\n", " var display = window.getComputedStyle(gd).display;\n", " if (!display || display === 'none') {{\n", @@ -4270,7 +4270,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 26, "id": "cee2f89f", "metadata": {}, "outputs": [ @@ -4290,774 +4290,774 @@ "axis": { "matches": true }, - "label": "PC1: 43.3%", + "label": "PC1: 42.5%", "values": [ - 14.667071892557662, - 6.159664156007862, - 8.42955166856628, - 20.645271916816, - 23.692935404231196, - 29.725313390671946, - 32.64429224592636, - 20.941473881319006, - 16.05438968920349, - 12.90939938100161, - 38.150515533994934, - 25.568031108062485, - 22.149977208928245, - 9.470087668809292, - 20.268232004196804, - 17.09746577113392, - 10.097065929316248, - 25.729061085085277, - 20.94712476361974, - 20.255429066234605, - 17.755036573166407, - 17.34623350389034, - 11.932382289948633, - 15.365666763256545, - 18.856833539154515, - 13.746098520851579, - 7.594856630728291, - 18.7410228159523, - 11.598460730702817, - 16.274825371870307, - 9.632184930706735, - 24.01447383806095, - 25.837381700426754, - 10.65621882501478, - 15.076501728430319, - 12.69762246936913, - -0.500918637493315, - 11.355984932970753, - 13.83010526794769, - 15.887928790272815, - 18.441228986962003, - 14.164205646693024, - 20.075132804185845, - 13.1112030355767, - 13.119389356858408, - 2.799756550636464, - 19.537242711203938, - 7.320611300927936, - 13.839959555766697, - 10.323537417412535, - 12.167785209783858, - 19.296402076203755, - 19.777306978944686, - -9.416799333891793, - -13.812857511427211, - -15.388705134171918, - 11.652351156193916, - 14.964318528219202, - 16.060846624936456, - 21.349943429402785, - 8.830772248288467, - 19.780326085850977, - 21.013275304477354, - 18.93844191679734, - 19.36186358696034, - 12.92169405966849, - 10.17768254728934, - 10.802812266877293, - 18.13004748361471, - 21.993083300263763, - 14.763086636103536, - 15.989096672736698, - 21.735008993887938, - 20.175106398785267, - 16.025185138907005, - 23.743912614040582, - 17.641446173316766, - 21.445449052656816, - 16.088598271129367, - 13.633429556431466, - 14.253166753962425, - 2.4141308297938164, - 20.445109883513563, - 25.134408152310204, - 12.359147985995225, - 3.7781231517049205, - 13.921668807107078, - 16.33693918649877, - 10.638403144885999, - 6.385907074515314, - 1.306815978837046, - 12.11183493531587, - 13.435714423354618, - 24.094470881504495, - -2.3079620733891075, - 17.939802171312326, - 8.679654402492908, - -4.650651871406316, - 20.377117471173005, - -0.6316796632001087, - 7.2855301622232655, - 15.8605889385473, - 15.010232968783129, - 21.247290533312057, - 21.27642704838534, - 24.95118993885775, - 29.897330797229365, - 6.346984644995849, - 3.0742019012523767, - 15.363378023368615, - 15.224540793657852, - 10.872335957301608, - 6.083508910138134, - 10.603046985560066, - 10.474822184068127, - -6.765576397679164, - -11.044579648114485, - 11.249527358667951, - 16.202898715341547, - 17.670234359253815, - 17.951798050245703 + 14.332529211990725, + 7.642687951185881, + 8.497198098823965, + 20.99071081079275, + 20.246560009534846, + 34.64941088112382, + 35.75173549293255, + 21.141172996639984, + 15.947164723032866, + 14.268765164407087, + 37.512110382447204, + 23.857105045617573, + 23.869347220574788, + 11.665067640206102, + 18.484540115803707, + 18.77431054115837, + 10.241755766482214, + 28.75809118325044, + 24.547856117715895, + 22.89734645439984, + 17.93100823786355, + 16.97999004150192, + 10.415899163826191, + 15.35103328996064, + 17.427898358671907, + 12.463256240546144, + 7.752478290803671, + 17.10585440811749, + 10.499556063889194, + 16.47152929405477, + 13.162669999564788, + 25.28647502076559, + 25.403709399529152, + 11.945518083095697, + 19.999653890497537, + 15.818081646631143, + 1.1106827884411812, + 9.358352050884616, + 13.946018687583035, + 21.417895549859324, + 20.53108192288033, + 15.857740321048169, + 19.717861045484426, + 11.404321915044385, + 14.327097139897878, + 4.184139297732231, + 15.299627652235817, + 10.457844942521056, + 18.586381927772294, + 10.088498821871813, + 12.103113151249374, + 21.04638838944689, + 20.790287565729646, + -9.67040021145354, + -12.271898971374753, + -13.816634963040684, + 13.392257384213693, + 16.23538285105128, + 16.70218026156183, + 22.619427901497545, + 12.879829987122061, + 17.54054030320688, + 20.162113370443578, + 21.734138407589064, + 17.33314898065155, + 13.069140138417293, + 8.924007094754531, + 8.064482691430221, + 19.397460247125533, + 20.574526119858113, + 18.991672869699382, + 16.579319974729316, + 23.573359734248413, + 19.433851457157648, + 21.48843931351292, + 27.139045377125832, + 20.09441162783532, + 19.194132480989012, + 18.160603772845676, + 10.965064482951458, + 15.013050890110158, + 6.354343022596781, + 23.628285200353524, + 23.565010161656026, + 10.439796814903481, + 1.4583454559253504, + 13.014778462146829, + 12.406470486007581, + 14.288485495505558, + 11.281389485948733, + 1.3856438569622338, + 19.582272730830162, + 10.386228958642086, + 23.421775883379354, + 0.2865038638237982, + 20.254337516976364, + 7.255814989600932, + 2.1295259465328673, + 21.05050662603179, + 3.18644060639185, + 9.503483067638438, + 14.484656053141283, + 13.47321407779847, + 23.036442910533637, + 19.310117058010402, + 23.932845604696062, + 37.7810427557729, + 10.832224684856147, + 6.872371095589076, + 13.579663499735219, + 19.183355229031722, + 18.24093824135496, + 6.506857993891794, + 13.696989859961032, + 10.56532681827732, + -2.854993135356645, + -10.644345857617274, + 14.282512458500491, + 20.05073948372887, + 15.015388350074922, + 20.006549880420266 ] }, { "axis": { "matches": true }, - "label": "PC2: 18.5%", + "label": "PC2: 17.9%", "values": [ - 2.0983473055525983, - -3.5970119669092324, - 9.809288797108621, - -6.08184754844007, - -0.021354379207473295, - 13.638418098556372, - 19.730086851044874, - 1.3752024331202688, - 9.736382135773372, - 9.466663436394915, - -1.3303024797381298, - -1.8597993448711847, - 12.697623404385, - 11.11934397710547, - -1.762524764149378, - 9.250199738267588, - 1.9581294023375893, - 16.05130278262376, - 15.575002122317255, - 0.9727331461629449, - 9.966625631360003, - 6.556363889267607, - 3.7276392835620977, - 6.195937058246721, - 1.1596930411080375, - -2.696724428278602, - 2.1343135283634544, - 7.564279288807265, - -8.064296260232986, - -0.27106596790061543, - -3.7748336209332867, - -0.7801766620007773, - -3.1799059369422618, - 3.981854483022685, - 8.462025265887423, - 8.43613689055297, - 7.32333079050049, - -4.578519558985423, - -0.7624693152684197, - 10.390173766043322, - 14.39152630273246, - 8.77689318369406, - -3.533252019635766, - -8.000330915757146, - -4.3215969587231315, - -8.631818664809597, - -10.803042310231701, - -4.365795582653974, - 4.306495474613636, - -0.8208043275746336, - 5.68388737479691, - 14.24887095814074, - 3.7251330549497608, - -8.73559050370868, - -4.286472889430798, - -5.746633643921611, - 9.365806269725104, - 4.798348649364909, - 6.631304986176097, - 5.440905938445839, - 3.114406158009598, - -4.873689611959077, - -3.983381091745471, - -3.5336269778920553, - -0.7150683087740788, - 3.375851262812765, - 2.911582299185053, - -0.7187046646198951, - 12.093931143809545, - 4.364903065719786, - 10.073297300661762, - -2.0225282371871374, - 1.830590340023122, - -0.09332889484720508, - 8.015427636416145, - 13.697318015235226, - 6.228053862824834, - 3.3453550392312694, - -0.3463339047295457, - -1.0458715844618078, - 5.399595398547385, - 1.0052041769116529, - 12.576362040748382, - 5.388267418426999, - -3.5223436753941892, - -1.2678682527885528, - -1.7161652215010497, - -7.2266594816851715, - 12.710318918804173, - 4.823851119207223, - 1.9976069836060066, - 9.009353596179906, - 1.7994221087649287, - 6.719433145298908, - 0.02592864771023276, - 7.942778576546601, - 0.7365419664567812, - 15.793817768523356, - 1.754868689740537, - 2.058287597985131, - 3.0141081516557655, - -5.5111851917855175, - 1.4587451831164346, - 13.951035363560568, - 1.666349369044982, - 4.316950533130043, - 28.558600959415365, - -3.224056109582045, - 3.2321693985656528, - 0.3279426238638785, - 2.6046845089697923, - 5.82423028075462, - -2.699282044782064, - 1.9108952007797473, - -2.965666638544553, - 5.441789245330132, - -3.217967042311527, - 3.2369790784911836, - -0.08364384220350729, - 0.4612234442231187, - 6.513967916205658 + -0.8104918823997715, + -3.5383935924495926, + 8.941653955536577, + -1.4806291036630008, + 0.4646048685251337, + 10.129511438524641, + 17.690466918006102, + -0.6626801507603681, + 9.166649982037152, + 7.450386507632137, + -1.455943760886996, + -2.8459593769959297, + 12.469000813640637, + 8.118557945191455, + -3.8146143816320635, + 4.31952342512561, + -1.3568864889419472, + 13.420705934652084, + 24.767953660836888, + -0.03063846420119365, + 10.856745411851719, + 7.592830813384243, + 4.445134790227043, + 4.162436950684578, + -0.34773238501501924, + -4.742636471936218, + -1.850541914424992, + 9.472585529481396, + -6.623266723360079, + -1.584482920673526, + -6.994052121406513, + -4.844852727245213, + -5.144676933495157, + -1.9420461221602776, + 5.773498891703266, + 3.99565324809987, + 2.3164064227878702, + -8.912820234466887, + 0.18896260209453244, + 9.496224079558008, + 9.54153039058828, + 3.1664538311391706, + -6.293693030319245, + -8.697142525208726, + -5.586088874529289, + -15.064825114992983, + -11.528243863933636, + -7.6019867003564565, + -2.5533516465101234, + -5.944168939199145, + 2.141095077915681, + 10.970167846860662, + 1.6196143843797435, + -10.654407209943225, + -6.647676977161785, + -8.123007355631035, + 4.517515865141029, + 1.289673627983747, + 3.1182220054714493, + 2.907356701364784, + -1.0968871724656424, + -5.537444542630619, + -4.551613005374584, + -4.718060365627244, + -4.299147199956735, + -0.8480556221964322, + 1.2484539870155422, + -1.9726020167340046, + 9.992052276386746, + 1.0076065783740584, + 5.187130606918812, + -3.4803764781508812, + 0.8092111843952954, + -0.7218640703700812, + 1.3396605519746947, + 10.12979902103425, + 3.0004826481771523, + 3.5021815376557326, + -3.30644413219163, + -3.7985262137629343, + 3.1160876034233724, + -7.524407419385691, + 11.355032246013707, + 5.689957067483499, + -6.82239135157845, + 0.03579115919456144, + -2.6096948251224097, + -7.675590164904796, + 9.550690249176881, + -1.5416973687511084, + -2.532410940610937, + 6.496098781934817, + -0.5922470871868275, + 2.5294253572936602, + -1.4772646633334872, + 6.278036034118641, + 2.7134081076093386, + 15.183188592141159, + 1.0049983463800727, + -2.91592283601925, + -0.013558801583148927, + -6.517096567548226, + 2.1636066505802303, + 13.517202225032339, + -0.8323918991615602, + 10.091817094672255, + 25.756168914178353, + -8.81332684610447, + -0.2464379790320086, + 1.999569413680897, + -0.8375108467603729, + 0.7842105691224992, + -8.615549341624137, + -3.0967242617173087, + -10.318782425564686, + 2.967902877879182, + -4.795617508106332, + 2.3204377350786682, + -2.375254387430666, + 3.175909898913471, + 8.164482382471586 ] }, { "axis": { "matches": true }, - "label": "PC3: 8.3%", + "label": "PC3: 8.7%", "values": [ - 3.049463198167788, - -1.6346525840487787, - -1.0672450659955963, - 6.31417391006982, - 3.1748166045618618, - 6.181964825268226, - 11.141115564950226, - 10.948941718353622, - 11.026832458939333, - 6.464621626938033, - 16.731592638761757, - 12.347191720147912, - 9.360481718231055, - 2.0555536384227118, - 10.021937627576204, - 0.7979605399455219, - -3.254664585549764, - 6.734519305039539, - 4.517429874951116, - -1.3265516317415846, - 8.56915756560715, - 2.3338694224869614, - -3.2626896172711257, - 1.9948918388009422, - 5.359125069220793, - 6.533997064894361, - -0.9742942978044897, - 8.852736640690871, - -4.344770503045783, - 0.7004899004737268, - -4.194887303999321, - 2.8139199497456477, - 6.942005979542724, - -0.4844195379761142, - -7.434201218095476, - -5.484518982964939, - -8.490118176155702, - -1.7453129859622736, - 2.1961221629792034, - -4.2710242996421055, - -3.0135910462399638, - -1.9125755877937018, - -4.084776797214936, - -1.0935483313301737, - -5.040754104528887, - -10.127647527744, - -0.03376500147743711, - -10.50293693269147, - -2.6779899890305927, - -6.968040679474851, - -4.244433335624498, - -3.207984831427253, - 3.085313200536685, - -13.303522251243269, - -13.087545110819594, - -13.137454040797028, - 0.9298006364423927, - 1.7279475864696243, - 1.7824647086402348, - 4.849331150290972, - -4.195755984254917, - 4.415989659396056, - 4.951352770779398, - 0.5325848804070266, - 4.071269380924363, - 9.627287417607311, - 1.5683712998008643, - -3.8377634724480933, - -0.7208634061037626, - 0.44253035991034073, - -2.47008093252316, - 0.7013726105775955, - 0.1315836199834286, - 4.719094697131993, - 1.222066611870245, - 2.6890218886258253, - 8.585260815262671, - 8.134491741207263, - -0.1558519948178172, - -2.5586859832690143, - 1.5855898215033555, - -3.9795059104978, - 1.9733510479782397, - 7.9811660671019, - -1.6794700511556535, - -3.5753000032411864, - -1.5748094961622972, - 7.11501121693827, - 1.713721447074463, - -4.695122051023788, - -2.6504260683387613, - 10.382331673326556, - 21.1113195218447, - 11.257215206120051, - 5.30261962659898, - 7.758042284332234, - 14.351803899641085, - -0.07421628394466939, - 12.18057439808588, - 4.671045948793846, - 3.357433589524259, - 9.773337738262574, - 15.859182322224495, - 11.373622289329905, - 16.841878482246095, - 15.807994391140852, - 7.0202613389913076, - 8.197112727774817, - 4.107326832487607, - 9.793149675714755, - 11.13520762174011, - 11.0841427602196, - 9.861732524638283, - 12.156957422180046, - 11.197907509741853, - 3.9053487650801193, - 0.7570808841690964, - 3.605220457630657, - 15.658975840593822, - 8.909911541736108, - 7.364233264341852 + 3.9160580925914976, + -0.44898385135950025, + -4.9981441794460375, + 5.6523702669565985, + -0.22584708025125866, + 8.686815593277673, + 8.634540287208411, + 10.758378104878778, + 5.851157329350546, + 5.0510391856821615, + 16.902625942466862, + 9.347734468236931, + 6.458273546227448, + 2.0799362525894245, + 10.466868326952413, + 3.4764688236484798, + -2.4527285344058463, + 6.291447749429133, + -10.255090384437965, + -0.9710363894843703, + 2.3938176882923674, + -3.1552228690833615, + -9.558502769651248, + -0.4195716075486773, + 3.8699615000727596, + 7.607841297912546, + 1.613136796963954, + 1.1942675747787035, + -4.498244959576938, + 1.6811046423735783, + 2.1568549439021147, + 10.68599654307872, + 11.881205037198754, + 2.9339903039214743, + -4.373451448052166, + -2.02097975631002, + -5.65568593827869, + 0.5490022387274109, + 1.616679826175159, + -2.551834107561884, + -0.3589359983438958, + 0.2853115464086915, + -2.0317723526564473, + 1.155701454855171, + -0.7979907483284379, + -1.82081539113858, + 1.7407520977800157, + -3.978499879682736, + 6.708082726588096, + -1.9771519770276735, + -3.858075863442799, + -4.731851611473965, + 6.021088757624244, + -7.610864377709934, + -8.162102502610887, + -7.501596832324893, + 1.9387504864775504, + 1.8422045520466444, + 2.65469099849782, + 6.122570408786891, + 0.8760756718379592, + 5.392224152715619, + 7.0686425041689755, + 3.2097008131383813, + 6.193760654780283, + 14.742800895374112, + 0.7065392436817941, + -7.393699078208147, + -3.8159585840898465, + 0.7730111307170207, + 1.5530582043561165, + 1.7213622817046628, + 1.6009807598222974, + 3.28429363831568, + 9.169582603956583, + 3.2391598575112406, + 10.92630909282882, + 4.512514629876933, + 2.0528492276137458, + -3.2074757444500754, + 2.425261213628209, + 7.3204099245192475, + 0.16985392055577453, + 1.7013812013013325, + 0.6491277743433774, + -6.850427916563714, + -1.004860258317459, + 6.023634000881418, + 3.3294502048984342, + 2.227995087728103, + 2.0027939271013393, + 16.0191289325825, + 24.69401261472831, + 14.065156758142322, + 11.269418260990125, + 9.545737630782737, + 12.356160181038257, + 2.1498064659404705, + 14.289360707744688, + 12.276889929725487, + 7.176950819222199, + 13.29380411477159, + 15.39077782624856, + 8.055165757996193, + 20.17372252611663, + 7.351518765348029, + 7.954174354381655, + 20.780844832668286, + 10.813315178808248, + 8.975887726052655, + 18.881198929922647, + 20.36743384822577, + 20.14903718451248, + 19.85060221750624, + 22.73694970945952, + 8.845985499532876, + 5.8633880249030845, + 7.396078472754498, + 25.046496604890862, + 1.5150414471000522, + 5.553512217395722 ] }, { "axis": { "matches": true }, - "label": "PC4: 3.7%", + "label": "PC4: 3.8%", "values": [ - 7.453372930506837, - 2.4505205754361175, - 3.729455032334409, - -4.083284240960847, - 1.284433704078697, - 4.87862930332236, - 2.1550847791486882, - 10.351786368162326, - 10.209381858133607, - 8.789496734540842, - 3.8922532012603743, - 7.4797810693958535, - 4.86994841611029, - 10.657822833189936, - 5.671086875801672, - 3.8867481090647855, - 8.702219109569452, - 7.088530864409047, - 8.517420638762356, - 4.838066047283241, - 7.724891122383733, - 0.7891914708124093, - 1.34860668716597, - 7.443494540736925, - 6.378256526246161, - 7.759471412880117, - 4.182659629055295, - 6.207049723188548, - -3.366223769660464, - 1.9761902122698327, - -3.821936161792717, - 3.3177116804851843, - 0.4120360218082373, - 10.771343117892684, - 2.9461655045666704, - 7.251324467798896, - 5.627275304883911, - 12.975245388224248, - 3.41073758697765, - 5.226442427978045, - 1.5857898664796808, - 6.105317335313837, - -3.347213233235948, - -3.031896489024691, - -3.1762269445214413, - -0.0930881760136093, - -5.2778612104835005, - -2.099498257193696, - 2.573074958011463, - 1.8195845739910301, - 2.532987128385585, - 0.7581851998733693, - 4.592036860940209, - 7.815647254622069, - 9.209829309161305, - 6.5851099022341035, - 12.215556349442885, - 13.292867558734354, - 8.303785034941662, - 4.462009180370433, - 6.602453727856961, - 3.773696872788884, - 8.45873015253709, - 5.146547140831813, - 4.2864724160645755, - 7.913159257345619, - 8.126564583552591, - 1.0731661093970546, - 6.962974445357024, - 2.368877868519869, - 6.039650900998127, - 3.71110866103378, - 3.455124804502243, - 4.374822181037539, - 9.028021265841474, - 4.292350913781943, - 8.888266200131566, - 3.6869118634964835, - 5.499310849084832, - 4.706779144946573, - 6.8463786979510655, - 13.347166059737578, - 6.721004784746212, - 11.508378916060753, - 7.46717845992373, - 1.1702835542390408, - 4.934255411042146, - 10.980867145562382, - 6.880830773764902, - 8.073913126547803, - 15.951179580664249, - -2.4949937083387614, - 3.0050631861178836, - 0.29829712599739366, - 4.330155871637738, - 1.2257926604313818, - 2.162771108025941, - 1.2080508269012138, - 3.378836156377173, - 8.61109315971165, - 2.364601688111138, - 3.8452872848205377, - 4.184873608231511, - 4.253757478679937, - -10.067874217465755, - -4.464627186147451, - -7.664130321486493, - 4.638611118236364, - 4.398978899790234, - 4.863223640095406, - -2.9779267832063687, - -3.9557883579291904, - 5.083858754082374, - -0.9778821651533318, - 1.8172539852825478, - 9.499516739847657, - 5.807691299778233, - 3.895286695744659, - 1.715305930463554, - 2.275883890311599, - 1.520328182078075 + -2.1766437471510636, + 0.4480781782923884, + -1.0898223396688649, + 7.268778987363953, + 0.6415313373373788, + -6.1332103774971465, + 4.616120400776775, + -3.9753491829543512, + 1.0268496607703235, + -1.1561518170809442, + 0.5233937604288599, + 2.0111153256536927, + -3.238833138153431, + -4.833763770501845, + -1.8372559933093733, + -3.416623903553419, + -4.432397594676891, + -7.019653901219181, + -0.8134236138908097, + -1.6702041392836964, + 0.3998972223987297, + 1.4959634536157875, + 4.160387365646797, + -3.619632371261792, + 0.45486292269648376, + -1.9331849490133208, + 1.0149425301851163, + 1.883460081187285, + 2.127446987980897, + -0.9066126941892392, + 2.079673472165213, + -6.23844686528247, + -1.8476323320185357, + -6.773903869488095, + -5.919074157453013, + -5.025616783033454, + -8.174986984410218, + -11.527743571178426, + 1.4258610538990424, + -1.8406346308186579, + -7.129698229009214, + -6.829375312277519, + 0.028916459531506566, + 0.02326488668708282, + -3.8668234449486856, + -0.4514546041059434, + -1.857747729939586, + -3.5565454654374964, + -3.173453580124839, + -5.566556619026567, + -1.631350479461339, + 0.13524143547102607, + -1.0307267513374871, + -9.623880494925022, + -8.371966189248607, + -7.725430053182934, + -4.490651591759906, + -5.115564744057648, + -2.0893340642121903, + -1.9570286237581835, + 0.416743713787971, + -2.1634413768337435, + -5.772011447744125, + -0.005554662239816599, + -3.4828498905087146, + -9.94357416195084, + -3.7383038162018134, + 1.2520828561905868, + -1.126644469581712, + -3.061241970991449, + -6.328324531833868, + 1.4540184587315168, + -0.7406012574505492, + 2.0285965693220547, + -3.211479656934838, + -1.0739816796917463, + -0.8270460200595668, + 0.6643188739717412, + 3.458004631819404, + -5.568588791563796, + -4.866933447880493, + -10.813228223225337, + -3.7113857806346857, + -1.8981895118024918, + -8.623635469752482, + 1.871873612379718, + 0.5354546719802068, + -2.3619759853001163, + 0.03760377238342727, + -0.9394683043194318, + -8.899339296357462, + 4.042336943332462, + -8.278043672978974, + -3.049470159154266, + -8.558282764663721, + 0.3976806481611004, + 3.820717738009028, + -4.321577831734745, + 0.11698952583983946, + -9.081858296572747, + -0.5705449255396868, + 1.547347505651683, + 0.18885213592003103, + 0.4701388621678644, + 4.702324881392711, + 10.870836538915569, + -0.2297088059416963, + -8.748327269059747, + -9.77942111231914, + -5.765758385739923, + -0.6514482709653326, + 1.7028418386182995, + -10.067680830238352, + -1.1639402245731119, + -9.683474974761662, + -6.307176369989038, + -9.57843150708897, + -1.4501417643347025, + -7.270960669535335, + 4.851756250185918, + 1.5293219616819762 ] }, { "axis": { "matches": true }, - "label": "PC5: 3.1%", + "label": "PC5: 3.4%", "values": [ - 0.9421394195547308, - -1.7600384301335774, - -1.7791735478926693, - 2.697314466632045, - 3.841641628348195, - -0.1006733808203522, - -0.22912542006198588, - -7.659415088981032, - -2.6449147606784336, - -8.028438949601668, - -11.926863808271971, - -6.080769352075243, - -3.0522926852607317, - -4.382203577725635, - -3.1107403108859, - -4.225295531497075, - -2.6116544418963468, - -6.540017982878866, - -7.756971116299173, - -0.6566291748552983, - -3.8926574624313166, - -2.70564663230117, - -2.9696744360461906, - -3.3801849960998664, - -1.241053235720405, - -4.06862192240371, - -0.47179684775215036, - -4.142159941881303, - -0.6075781239837519, - 2.886577134950146, - 4.04444406202623, - 3.0149306312560222, - 0.10399105639511681, - -1.908456793263834, - 2.010468675446852, - -4.298715494961232, - -0.1413563041200174, - 3.6532510741111857, - -0.7706100931702953, - -0.4295804954190159, - -3.9762210184353495, - 0.10834563460481425, - -2.9366986979805287, - 5.173931493493334, - 0.8864637625992456, - 0.823786723225505, - -2.6897131814380586, - 2.006287468062313, - -7.1337658340899255, - -4.493692074929085, - -4.9568505245495205, - -0.8845113805793139, - -5.114967804598211, - 2.1294423654096573, - 0.6226253802983533, - 1.8065290296340724, - 3.407294570787993, - -5.882055426021695, - -1.6568216603031753, - -5.39778861307495, - -0.5331992211357676, - -4.208449530276909, - -2.578780079182698, - -5.603378363238059, - -1.4535766632343856, - 5.060750723235622, - -3.079696110657462, - 2.7864407152166124, - -2.3231372285751464, - 0.10716106969473507, - -4.936647431573848, - -2.0936972965882052, - -1.053660077931345, - -1.1625618972233307, - -1.3752934069073202, - 1.8565775470596264, - -2.329757607384881, - 0.2892103337375277, - 1.8195448999434678, - -2.4057060373142423, - -6.997030039617323, - -2.0456672462528305, - -4.691605425099458, - -8.019041586219275, - 0.11803599160101881, - -0.5664413602451557, - -1.8558046582693828, - -5.047506892429682, - 0.3302802239871919, - 1.3148339423802675, - -6.6136818896013985, - 5.0043716504394755, - 5.447308729749029, - 7.705955830918647, - 8.86989871185932, - 3.3962483070566227, - -5.18074326321125, - 11.65823191750759, - -1.2408693023782358, - 8.712094360147091, - 2.6017934475414757, - 2.836475944617046, - 0.9602555212984213, - 2.389207999269442, - 11.047213974482046, - -0.8107719367498119, - 12.833062780516622, - 9.91496189526972, - 10.44423261657264, - 2.34126231395251, - 15.465102894566593, - 14.756360775448679, - 5.9078798909317225, - 10.409240008033565, - 9.200869208948665, - 12.05210940530076, - 8.059915955929638, - 5.105878209334287, - 7.746306618435755, - 4.369131993328258, - 3.5004951484313027 + 6.812801267677687, + 3.7196152201241577, + 5.915387878407335, + -2.359515873408713, + 1.565704388866675, + 2.9085593645062877, + 4.463148131014035, + 12.196398369652213, + 12.384434842734432, + 11.77600545357487, + 8.173959350586085, + 10.683632025290612, + 6.204039394417154, + 9.971853845768337, + 7.882225360561104, + 5.1078315266444045, + 7.772669097916503, + 6.08908082231034, + 14.294815351678555, + 2.348519410390725, + 9.663513276867127, + 3.993526978140645, + 6.191622513195067, + 8.731075900960775, + 7.7609956424393465, + 7.757898535406653, + 3.863293769422669, + 8.769827504627127, + -1.9057665936298462, + 1.0033946839522743, + -6.138473793177208, + -1.594805969155735, + 0.007734774990132683, + 7.959345268540801, + -2.316311917245987, + 5.903445977745262, + 2.817468973600416, + 5.804472728350058, + 5.255448687393654, + 2.6734214082265026, + 0.30949525107064807, + 2.7046092057779343, + -1.8426134068646125, + -4.785006463336584, + -5.092919835243929, + -1.9429063520327043, + -4.615005588354811, + -5.683616512927732, + 3.590091505534988, + 1.041060770675611, + 4.81877972521308, + 1.2965647242295018, + 5.038147868098971, + 1.9044984239926483, + 5.415082245675536, + 2.604538472086259, + 9.086793231321627, + 12.262087181885367, + 7.8066470649290505, + 6.0942745416377155, + 6.805888676696376, + 3.866404666173541, + 5.3122203925322395, + 5.422188723452256, + 3.1416980649104893, + 1.3541650440621973, + 8.282226199835765, + 1.3923611789757138, + 6.613610615749896, + 1.2705215036902824, + 3.8485041493930687, + 4.902799605685996, + 1.705466756342267, + 5.226701795852031, + 5.451682373857134, + 3.463073772160657, + 7.52839797349439, + 5.280275531143445, + 4.58528092553559, + 4.084050217468922, + 7.317469253809759, + 6.5174947615889876, + 6.336787228562907, + 14.3886677927669, + 4.94969908132998, + 3.8038160210019556, + 5.2375521115742, + 11.591714523804194, + 6.531346181363468, + 4.399406355738958, + 12.526282101340325, + -5.839365954694227, + -3.720983401778018, + -4.5141510337274555, + -3.613808946456627, + -1.6479969378883021, + 5.628100512970149, + -6.692335268529045, + 2.5553151899419664, + -0.4315194145007398, + 0.36682274253807823, + 1.5887050925178334, + 2.7653516786179813, + 3.7033795027136613, + -11.983708335420674, + 0.9396505661579588, + -11.173738015653225, + -5.135797371227762, + -5.249164388190396, + 1.294233384414828, + -11.30346738338428, + -10.241209427068789, + -3.9020286179915393, + -6.190547445881049, + -7.748698962288128, + 0.6099638695367015, + -3.1374477967117707, + -0.6699374195512913, + -8.022523159187505, + 3.3756278747534973, + 0.369342994538738 ] }, { "axis": { "matches": true }, - "label": "PC6: 2.7%", + "label": "PC6: 3.2%", "values": [ - 2.5340662896645108, - -1.6589921060872526, - -0.8672912263222158, - -2.430906600367459, - -0.6701350627152016, - 7.758096803684491, - -5.429118582927567, - 2.0287621506788924, - -2.2008440636915787, - -6.026715497588022, - 3.1628712563051216, - -4.1345208193327645, - 4.96120520004276, - -6.130751968239557, - 3.420413203275869, - 0.5828387297570821, - -2.09572345798102, - 4.682196940919357, - -0.6958039031749461, - 0.505000605236422, - -2.2721786586777655, - 2.002007175039807, - -4.753233536139034, - 0.8489528326334967, - -3.1522529263439334, - -1.4900657298424511, - -8.55408057079334, - -3.481844617683566, - -1.288616283655114, - -1.0699445944559083, - -0.25347260728212456, - 3.4843938326899337, - 1.9444968667945954, - 1.220803096604134, - 2.1217481517296712, - -1.5492729947812622, - 1.8824492579624927, - 5.065008306171863, - -2.8959623648871986, - -3.86052832334883, - 3.006653847760853, - 3.1149232139696093, - 0.36028427231634796, - 2.3894882927151753, - 7.573219457827115, - -4.169593894883691, - 6.442344195247852, - 3.130935852153836, - -4.147360183847579, - 0.19421855262741694, - -2.2245741037635733, - -2.4486006664736752, - -6.015138409329803, - 1.952938384894347, - 0.27599952489143703, - 0.959382343425752, - -3.5438796697561674, - -1.8415805152941636, - -3.2528628163849787, - 1.9697608689006367, - -5.3223802227570705, - -2.0075358673045716, - 0.34714617722754393, - -6.102381096545156, - -3.9451904176744303, - 6.623372171058313, - -0.7189383789451071, - -0.12413587009769343, - -2.103756012361687, - 1.1193474781920036, - -1.171117213928373, - -5.631223931075161, - -0.7142761975639649, - -5.174533451536246, - -6.815743637657338, - -1.835635715940228, - -2.914187103967847, - 0.7544213792056068, - -5.2333137428174545, - 7.481882389215546, - -1.3909168894633215, - -2.064967297553826, - 0.6470990136146014, - 0.6371478001193704, - 7.090332141344594, - -4.924388111004887, - -8.373887342827604, - -0.09327230053097478, - -5.7465754407200045, - -9.60460593639519, - -7.694653835856818, - -5.901455485208482, - 9.972089071976052, - 5.830064996487128, - 10.594810231492207, - -2.904203156490323, - -3.141373959895272, - 1.998136909326266, - -3.1140891022657557, - 7.40880711601808, - -4.050993492920901, - -8.448647938956096, - -1.471555024986852, - 2.1313513223762546, - 1.2705221099727537, - -1.1783937357551773, - 7.731962160019912, - 8.085161975144176, - 12.706120242293556, - 6.280336656533261, - 1.7964053578752428, - 0.3185821638510605, - 8.850947705423167, - 4.525101346699659, - 8.934272984343362, - 1.0997310483663676, - 3.146227073093901, - -2.1422901622017454, - 11.90734169939075, - -1.5354828220125056, - -0.6502692890440506 + -2.7656300872338018, + -3.159257767514143, + -2.119352940293039, + -1.5448844014876986, + -3.959244037872567, + 1.1648954000447742, + -2.8158509798350253, + 6.380151310936718, + 0.1382301229821774, + 3.4010067684515035, + 10.838815429112133, + 2.3459236977643627, + 0.5149094126498426, + 1.0118350574148027, + 0.7700381513162728, + 2.639923955497068, + -0.1839458346027693, + 5.465156688983686, + 4.260514705229571, + -0.8165420922046037, + 0.1800067165706988, + 4.136784722082916, + -0.08889964558232369, + 1.2916924129878322, + -2.0720400904664067, + 1.677367053079844, + -1.0474094410348591, + -0.21075190918536113, + 0.09570711631927702, + -4.179625529848889, + -0.41049094825001986, + -0.8392533911754845, + -0.30422756340895535, + -1.3364847499910004, + -1.696033234436825, + 2.2092902598472826, + -2.850357758348288, + -4.839610046169609, + -2.027903334589363, + -1.2924014854582273, + 4.101286842382472, + -0.4568202985620041, + 5.250603232159921, + -3.4251638422002415, + 0.692929638738025, + -0.27222423751660507, + 5.437781251930481, + -1.3284771716913637, + 3.2688806997420556, + 3.7943419751156005, + 3.8502410363470143, + -1.3840728462805563, + 3.713952724524436, + -4.279230936936403, + -3.66941440141049, + -4.216412014113052, + -7.541845611826977, + 1.3655623613795578, + -0.4515252209242448, + 2.829520320315827, + -1.7142647383641583, + 3.1070755774765186, + 3.0418325876722303, + 2.883830059903028, + -0.009614944703546818, + -3.9256652728715844, + -1.6657182524337544, + -2.9699225018291844, + 0.230511349218466, + 0.6984922275497444, + 3.603711551032066, + -3.525466250356079, + 1.2150365909611542, + -2.14855099321047, + -1.7383649653173507, + -2.295797048319236, + 0.19541059710466882, + -3.1294228306070444, + -3.1354469582789117, + 2.52477533296675, + 5.414050672363167, + -0.10699413389413913, + 2.1745272743939483, + 3.9478829572669807, + -0.6138346632276692, + -3.390478632372202, + -1.397059164917487, + 0.42142927325933727, + -3.3280138400972166, + -4.625025913461674, + -0.06109520614139301, + -1.6660605259089476, + -3.76663262350529, + -4.516477204611346, + -6.055945247850341, + -3.8633382661026063, + 4.127143180270747, + -7.720144928910849, + -0.8232779869215501, + -8.143364233800785, + -4.73039598105204, + -5.261780352735953, + -2.7197137417311508, + -5.485612193420319, + -5.220768025905199, + -3.1056740277791497, + -8.118084300163108, + -4.5745845149895255, + -5.902720386709978, + -3.657789435976979, + -9.259279760941808, + -7.444286348462946, + -0.7550149767308013, + -3.755984868983272, + -2.4154375076174395, + -12.682966874229047, + -6.2116054252594735, + -5.697275932966961, + -3.509853704902246, + -6.089175023189463, + -4.161564513114559 ] } ], @@ -5080,762 +5080,762 @@ "axis": { "matches": true }, - "label": "PC1: 43.3%", + "label": "PC1: 42.5%", "values": [ - 24.64839349119047, - 2.536742661807661, - 13.688924184237358, - 9.794372783400842, - 12.1740161867939, - 14.075311543930617, - 19.71428350003106, - 15.852340090949767, - 9.510028603217641, - 18.92118534337812, - 10.545569012405629, - 20.97027837565986, - 13.169803769667823, - 24.053437230237627, - 9.118514524654028, - 15.425238384799677, - 12.638226190308185, - 10.891013457404664, - 20.46725734717249, - 17.23379175236689, - 20.759169287431178, - 9.030783391101089, - 11.675056988114346, - 6.632960129251367, - 13.451497262279286, - 14.377161858578468, - 16.666187724275723, - 22.95169745988398, - 22.802961998901207, - 19.723518078719394, - 10.218302794501271, - 14.715220459382854, - 13.03558750690057, - 4.346724245789173, - 7.954298513344273, - 5.434651832538885, - 17.66887595631259, - 20.8088031608292, - 12.198126025721894, - 29.041093438444378, - 13.453766880222489, - 6.392476718651529, - 10.901169047505713, - 6.995062912782589, - 18.249012690889458, - 21.18518410048344, - 16.162379034944504, - 17.153211571513317, - 0.2394346511358676, - 9.704380312773068, - 28.326260204454996, - 24.803335180749457, - 28.05789475863529, - 16.780819290717307, - 25.35987635366989, - 7.216668638347679, - 21.691859792773002, - 16.488059241048493, - 14.77764383958884, - 15.791647618015773, - 18.842386533267764, - 17.269142465869912, - 30.30846156995973, - 29.244998476097983, - 15.899404155741713, - 24.238201285418796, - 11.630868386435889, - 23.51776545926253, - 28.119005051367758, - 26.941839379370663, - 26.314420319457106, - 34.377669113667366, - 14.792133196217007, - 31.539679442437414, - 20.73584415668326, - 15.952272170384774, - 12.6936554838001, - 12.144761185688814, - 8.814558477178736, - 12.472269782666135, - 26.67601740623096, - 13.834606611721393, - 10.481439770662794, - 16.380908414872316, - 6.055625654802407, - 7.329691426506944, - 17.59838353290986, - 5.461728889247363, - 22.55552559490573, - 19.391804989512046, - 15.947777930333904, - 23.23948510709228, - 22.781322625287903, - 9.682588086305497, - 21.419396165779016, - 17.40230139108229, - 10.746199160756998, - 11.81204021207403, - 4.86649733098877, - -1.4166031777666883, - 21.78591777697122, - 27.989133577098677, - 4.373275709678575, - 5.616247639206001, - 7.89871940050819, - 11.942520546535226, - 10.395646622986245, - 5.5538615572326115, - 9.50971647757201, - 22.765333776715703, - 5.6307655153641285, - 8.628698495562448, - 31.757902739003224, - 18.47759926911459, - 30.228965726476385, - 26.301421445670748, - 11.237358942521212, - 21.783914235857253, - 33.20689585324936 + 19.403622281840335, + 2.112770708204475, + 13.798038053250965, + 5.799149109796061, + 14.163548002622191, + 12.465029849759862, + 18.914925133578656, + 16.58355799509259, + 12.962054291124588, + 21.667999956594986, + 12.424718074108064, + 18.8253605878646, + 10.943081330088361, + 19.531189962455265, + 9.132638090519102, + 10.198270196861138, + 17.438114689587145, + 9.035487201773666, + 17.75883407211865, + 14.748818531444162, + 19.236620047393455, + 10.534577016549758, + 9.974993113499048, + 2.7918725945537166, + 9.79163052736058, + 11.022239629192182, + 14.772410092732306, + 15.872334780688819, + 16.8949806281369, + 13.590017381164131, + 5.67667234961352, + 11.77574026630876, + 13.89644970847785, + 2.315929425898786, + 8.246808663746018, + 2.6130171748592312, + 14.800417306477572, + 17.834489141562727, + 8.712937221250659, + 25.47069772378685, + 10.085449857609625, + 7.172958678838768, + 13.42652531100114, + 11.776878617752072, + 19.437924081631486, + 14.899743884086693, + 12.420013775753986, + 11.090789612163514, + -1.470679668497744, + 10.181016173582442, + 26.295695146245713, + 24.20452589855516, + 22.684649606468895, + 14.81441770028834, + 19.357058823726902, + 8.868787814899395, + 18.07494704207588, + 15.514497934229016, + 13.956680761903321, + 12.291781475809104, + 13.145113598387931, + 14.695253008524, + 22.10119089667603, + 21.143485079168453, + 15.881232948202316, + 20.510718004961262, + 9.088088675921803, + 18.50617488236074, + 26.450746528284498, + 22.474333817086904, + 19.433629286904946, + 29.746326570752018, + 11.08560676269701, + 25.636659272501145, + 15.793748664272293, + 12.4092919468462, + 11.886042628166727, + 11.542615861713045, + 9.312752287443782, + 9.443697458087383, + 20.289885799633776, + 13.81945392423814, + 10.273695368651625, + 15.021040122504044, + 5.0309946987423295, + 10.20069862418709, + 16.438537395336038, + 6.836398226394337, + 19.59307641449557, + 17.69436421741975, + 17.464909661138716, + 19.47286642885307, + 22.460913154031275, + 10.313930778322636, + 14.099586885057825, + 13.502245699763261, + 10.833142234305777, + 13.687170230334685, + 5.913326164782688, + -0.4907687776225207, + 21.255281640022908, + 23.044702073682657, + 3.850716061909873, + 6.218791143705236, + 8.216360787839577, + 8.758548912382505, + 9.56799128223097, + 3.4360751656220625, + 6.457454873980443, + 20.52213858717672, + 4.338675848233439, + 7.69839420591223, + 30.0417746447081, + 16.213408784011534, + 24.356578259193697, + 22.09425029863867, + 16.28934252133, + 19.96831786680629, + 33.32950554409529 ] }, { "axis": { "matches": true }, - "label": "PC2: 18.5%", + "label": "PC2: 17.9%", "values": [ - -7.557764420235808, - -1.0628147609908885, - -1.4404356442030446, - -5.995564940432041, - -2.097563335697453, - -6.365368140377703, - -8.206785436139315, - -1.8401184387694411, - 2.7041374786776498, - 5.577834035142002, - -8.190465417142569, - -2.9595923937711826, - -8.681485023253604, - 3.359241651047912, - 2.6256577548218005, - -6.2961234266361465, - -6.912141498307026, - -5.579562154107903, - 2.25163396014679, - 2.6053973400032278, - 4.4627881862357714, - 6.221108956085819, - -15.886167326606301, - -8.438818023671434, - -11.766984065431407, - -6.695847732463498, - -4.7041201391026055, - -3.4397359185575387, - -11.283425099410353, - -9.207474446689249, - -11.440842029470385, - -6.909871364037224, - -5.398996540749442, - -8.84266794478104, - -1.1117550199563713, - -10.309147500916238, - -4.491288992718141, - -10.325484582465103, - -3.366244468713461, - -1.9489770192518168, - -2.8886975734212545, - -4.766319576050816, - -5.696032072344757, - -1.1929137048158764, - 0.6709979569531073, - -13.823155559746057, - -7.860176703696287, - -18.59809040769057, - -7.11167450570676, - -5.769628325483536, - 0.7023238138485501, - -7.904651970800137, - -4.1652152294486315, - -0.18563014647115605, - -2.823586500027128, - -7.924965956996308, - 1.9579082133125014, - 0.15138785535133348, - -10.86867792958148, - -5.371281148663191, - -2.1524411958029814, - -3.677457429796969, - -0.7677964043974699, - -7.1947892194209375, - -1.477525875714367, - -4.660752217045183, - -3.4866781220019964, - 0.2542551059812699, - -2.8008019845612413, - -1.9336053789186336, - -4.531832175199765, - -2.2454934057723266, - -4.853074148392042, - -4.573992968763999, - -3.7205752214391588, - -8.10135794031186, - -7.372740000965745, - -6.1034495148022945, - -5.0631655085555725, - -6.856677295571389, - 2.148112821200468, - -10.389814891098279, - -9.1194891503868, - 0.568864057838621, - -2.7032595740880643, - 0.43473503900698685, - -4.165615822302359, - -1.4368989467066138, - -2.0264373086896454, - -5.553813486981403, - 3.329800897337906, - -0.19049594802294023, - -0.08402690200801688, - 2.1832977813565493, - -6.071875162395006, - -7.244609326820058, - -3.655084462835186, - 5.785085592285931, - -5.713757994810624, - -1.1541861194331096, - 1.2080164262569155, - 2.601396103982104, - -0.5673488719505976, - -9.693761325658105, - -6.071834670835589, - 1.827534366076406, - -1.2541706537996082, - -7.89855392176845, - -9.465988684522348, - -10.52127898920294, - -3.8403302969086894, - -2.3345783038446517, - 7.509480488417756, - -1.1808061311070304, - -11.283328023595484, - -5.750274434096471, - -1.0412206751389097, - -11.287879706455133, - -3.837567412015959 + -5.692133818368072, + -6.064272770183398, + -1.7609798231481275, + -6.480716398109512, + -5.686783688292741, + -9.300743921072451, + -8.183507872569361, + -2.763612910376305, + 1.6316225614970545, + 5.34694721649297, + -12.699468129104446, + -6.099614621457193, + -11.80943495082301, + -2.7835290733218336, + -3.614722665432582, + -11.389167836989952, + -7.7347414154799266, + -7.850713443650499, + -5.852724374794694, + -6.287575569451922, + 0.931865498869201, + -2.263774954775489, + -20.341284220299883, + -10.343343355403137, + -15.319420333512005, + -4.933363140571699, + -4.902935498823286, + -2.3530027471171056, + -10.294378593871862, + -8.826402705881307, + -12.532664926836977, + -11.40470185403106, + -12.243563482090131, + -8.572999475566139, + -5.820273314082227, + -11.921525829485006, + -5.48978328078871, + -9.097787874130441, + -7.339291992611292, + 0.6029183343856461, + -6.4079478203619615, + -10.022190531190407, + -12.29793049578642, + -8.550612621613663, + -2.0440127140865694, + -10.861962798930278, + -10.144033926841846, + -17.23780532367784, + -9.213814989150455, + -9.333147462937069, + 5.6187622231089165, + -2.746349594323263, + 0.25119984397231715, + -2.9730018581069286, + -2.1642223220607, + -14.787086491302873, + 0.2208590833360733, + -4.376301372543206, + -14.51943430731989, + -3.5557862657049935, + -8.650334922762418, + -8.471971805624186, + 4.115648903220077, + -3.5181061145691706, + -3.176769429154533, + -3.7512058099681638, + -4.970068608964352, + 0.8745818680452966, + 2.2283707536190005, + -1.8192886728373483, + 1.5377064588439426, + 2.7409670410383793, + -5.941121865160639, + -0.28711608989630855, + -3.847990791995475, + -11.580594339473393, + -12.442815017460742, + -10.67214955398795, + -11.538022649549951, + -9.867636089036854, + 3.5170826325397004, + -12.793860926744872, + -14.66258385313349, + -0.2587812246103629, + -8.642407695865202, + -6.922472240922492, + -3.192701443260987, + -6.598671556858756, + -4.185035649530011, + -3.4135186617359667, + -1.972109755871064, + -7.637036284984007, + -0.8422181459802897, + -1.7189962767962, + -3.237938560223224, + -7.7074830852954115, + -8.37050564574054, + 3.5037914863115605, + -10.635430218420854, + -1.3844374279042477, + -1.6776262404037658, + 1.9777910414090627, + -7.175809931711077, + -12.093557989528879, + -9.09609896793305, + -1.569883162179956, + -2.356146390532386, + -10.376809634559097, + -9.621548895234099, + -10.695839752731047, + -5.360603168479748, + -3.4098957948244344, + 8.975466989276708, + 1.0961369529233167, + -7.804925708543246, + -3.1420359642540996, + 2.130438526283089, + -12.67381145284099, + 0.8227164740024449 ] }, { "axis": { "matches": true }, - "label": "PC3: 8.3%", + "label": "PC3: 8.7%", "values": [ - 4.096578414312186, - -17.09057308324522, - -9.896697238905896, - -4.413240806345717, - -15.452827148626167, - -10.837064373313577, - -8.499085832483845, - -0.9666973748038358, - -7.9589897087240775, - 12.193746627477012, - -13.158736705878027, - -5.486585764363277, - -10.580041337060742, - -7.552042973336975, - -13.854633814662634, - -9.25475373090678, - -10.154498689740384, - -8.587748303389619, - -10.457605073436685, - -5.924912256310047, - -2.7380075908316455, - -13.88251000874208, - -4.342420740820759, - -7.393641366400569, - -6.346586136291878, - -9.609615876165094, - -7.2877309974762845, - -5.227190050945934, - -3.3505832446797292, - -7.168118501146169, - -9.36231981173677, - -8.422065164703728, - -13.568681930342319, - -11.832991082923341, - -14.499126975060904, - -5.240098320334547, - -6.225057799358304, - -1.5649532752215642, - -10.96887887056266, - 0.7138426265410436, - -8.501135583672779, - -14.000106974718545, - -12.43640092481957, - -9.095394296689951, - -7.693795778587065, - -3.5293759411730843, - -9.557913051332031, - -0.8650573840727582, - -8.223496659035044, - -11.802104614233981, - -1.512477735298011, - -2.2240495990609395, - -0.6151400748526608, - -7.623935872826692, - -3.0216036837660076, - -10.281154041350035, - -4.1862340898454695, - -9.777214430896963, - -5.071166241805628, - -5.0815443299247836, - -7.9887110813641975, - -10.272923393996201, - -1.4786909233752614, - 2.4961336645965773, - -7.652037643872687, - -2.4769995226369037, - -10.439496453059885, - -2.9972390988262236, - 1.0483566244910607, - -1.182639404232495, - 2.3681965469467245, - 2.380116774368694, - -11.175175742269104, - 2.8182831763312866, - -2.8431272229484876, - -6.321152963767636, - -6.868110490364305, - -11.19053412437487, - -14.355111195306083, - -10.406777781798823, - -0.7558898284254238, - -7.952030122085555, - -8.301266448378374, - 3.3345100987945315, - -13.725513267491163, - -15.383192417089473, - -7.0738848714501215, - -10.758753174163633, - -3.6216790220887636, - -5.368784711310237, - -8.671402344703573, - -8.462963202815665, - -4.693355278964495, - -7.019594669243904, - -2.5274237128287664, - -3.866432274946266, - -9.610274243814786, - -12.583852650249282, - -14.104076664424827, - -13.222361581828727, - 7.0959085058805265, - 2.94466049965081, - -15.673219298726774, - -14.52939452520329, - -13.70216895267195, - -9.604378606384346, - -6.925315695762226, - -15.058238604492452, - -5.914424195079626, - -1.9642567381192797, - -7.653939928856937, - 6.281096497546182, - 14.811412374544034, - -0.7739927006584507, - 11.707442928482605, - 7.30471103394545, - 4.020370898577047, - 1.4442357795568945, - 5.887513080391125 + -4.352702244237481, + -14.776905742577604, + -13.18358425879017, + -8.838215312915516, + -11.125716885883502, + -11.64895155611935, + -8.093652811097883, + -1.0750214452794502, + -7.161656717050818, + 11.757307978971314, + -9.521577374092088, + -2.2958161171930787, + -7.57957828717389, + -9.543845772752178, + -10.377921197751693, + -8.522651651843038, + -9.555552509903025, + -8.413701969428875, + -7.709383846611283, + 1.4751614498367949, + -7.446319813415608, + -6.987107826017092, + 2.6687448080194094, + -10.29550509358114, + 0.40538471425568234, + -16.67540544639411, + -10.661991964948685, + -14.338574136162064, + -7.069060993761666, + -12.243721419689887, + -9.70610111884335, + -4.042256589044049, + -6.289444609531182, + -13.38354014313601, + -11.546857689980945, + -3.1613287841902573, + -7.248028238516005, + -4.796793379187683, + -8.69209862717074, + -5.492844885628353, + -10.084340210223825, + -7.0412538934203734, + -1.8224938955243641, + 3.7214943684187856, + -6.022647664150817, + -10.541090208054676, + -11.441526001062254, + -0.6399331666476837, + -4.900181118296153, + -8.625993721509015, + -11.715264970547889, + -8.631902869097287, + -11.251463963579262, + -8.230767117726758, + -10.223871919694673, + -4.626940532879335, + -9.261906124163323, + -6.433897600820117, + 2.1248461142502544, + -9.867936168413186, + -7.412205148662601, + -9.96808232502854, + -14.097471388100283, + -9.261778529630675, + -7.199020430079915, + -5.941770424082909, + -13.142127410923926, + -8.685945016453037, + -7.436920641505369, + -4.11881035198242, + -12.29637345887672, + -8.934439152570462, + -13.041512336809408, + -6.03918685016567, + -4.035636282272753, + -2.1464581157228535, + -1.2761203610190686, + -7.2939635050867615, + -5.302208956533007, + -8.515228883354675, + -9.444911066301556, + -0.879937584664682, + 0.7075635230313693, + 2.367767129267998, + -12.208152697348538, + -10.150195878461837, + -13.282203934129239, + -5.5550291791927275, + -4.209982400272137, + -11.265347813493294, + -7.158121527796995, + -6.251034865013431, + -9.390174456845832, + -5.092387436740656, + -11.605282592427562, + -7.534650437327317, + -4.7615150779707305, + -17.57377958560984, + -9.625690367877471, + -13.959718681737755, + 7.733260291564879, + -4.075541000364089, + -10.884797373451184, + -11.844352942890437, + -9.395220196296043, + -12.723347865776306, + -8.397789389312928, + -14.803315166127959, + -6.225005884292891, + -1.83172630583377, + -7.501583955496441, + 6.537396849163428, + 7.5848022844971945, + -5.7883361060892025, + 8.214781326091181, + -0.23108180799948508, + 4.986394448007828, + 3.078493914206971, + 1.138146962227536 ] }, { "axis": { "matches": true }, - "label": "PC4: 3.7%", + "label": "PC4: 3.8%", "values": [ - 1.1598286426599151, - 0.9046182303547972, - 5.949473684750705, - 1.9115900244202162, - -2.1109995057769897, - -0.004110948600553055, - -1.8785678918163287, - -6.639528306633103, - 5.0930006729165544, - 6.178311235098391, - 2.1833322311823595, - -2.7640570166062184, - -4.373370672851622, - -5.919885581944919, - -0.7032384165552545, - 1.0067786527948759, - 2.2839634572572245, - 1.2786504883967884, - -6.392093435199657, - -11.717595774644415, - 2.3143001950041695, - 1.8731129238040967, - -9.392952873577006, - 7.639595166696399, - -9.554008944887887, - -3.591834372135751, - -5.632198162192122, - -5.082291789382983, - -4.6997347280001565, - -4.969820143887455, - -0.7106578784489482, - -10.941870292575814, - -3.956630733883239, - 2.8415590644157147, - 1.5942540068161417, - 5.5334746682622615, - -5.378556999467167, - -3.146829277436373, - -6.698242860907832, - -4.629471740630951, - -4.228648691356472, - -7.3563617183225345, - -5.661656978798195, - -10.225164582984402, - -0.6491916490440524, - -3.5065596616451975, - -3.3776741223810065, - -3.821051846046809, - -1.286199714424343, - 5.808529229515996, - -5.386312119676675, - -2.4672951181339164, - -4.823574614613182, - -6.499366307406974, - -8.248385081172454, - 5.157329698046976, - -6.299389669279299, - -9.247504800439883, - -9.997536802679976, - -9.789050708997623, - -3.8220233222275932, - 0.4909014575092827, - -8.345147011175058, - -6.932995427688886, - -3.0529283608382403, - -7.889708732235986, - 2.1158876399796895, - -9.319671163209634, - -5.546294362431469, - -7.7634938964750795, - -4.284837490093425, - -5.1054596981831075, - -4.902133954767963, - -9.240013103224085, - -7.388673236099431, - -12.09246795170674, - -7.627167301520082, - -3.9866137676529183, - -3.9107699572251886, - -7.529305927502724, - -9.128440304213182, - -10.665913913930542, - -8.822561709037618, - 5.46358163012914, - -0.1920524955445857, - 4.562514671976083, - 0.8879890281800592, - 0.37092288585085875, - -3.9822344171134074, - 1.216407140268171, - -0.5663704506556724, - 0.19124855748696745, - -0.31330300758238416, - 0.18929801999046963, - -5.5109281726244745, - 2.8396742285511056, - 3.058408662127831, - -0.06122200884677054, - 6.59068578337278, - -0.4939736486516331, - 11.285871918636634, - -2.4691181973388234, - 0.035489945150305235, - -1.6950463111103276, - -3.3673106316944, - -0.04793636276072544, - -3.5082721138212416, - 2.908934032415306, - 0.02340345128882837, - -2.714791865835987, - 2.462988405846634, - 1.8929860389418178, - -7.878113520100008, - -4.38974819231293, - -6.8479939834615084, - -5.5962793501372525, - 2.3265721513157716, - -8.074097839853522, - -5.542899369196329 + 7.573783300596363, + -2.6989698079333104, + -3.017600341514727, + 2.19433045133043, + -2.2273942982827135, + 0.2654615513054692, + -1.4022110006359663, + 4.544984602685149, + -3.9141310877274034, + -3.3232368232831933, + -3.942184217004854, + -4.734684721559432, + -1.314764177388699, + -0.5103691037608535, + -2.311466498769025, + -5.9521170422544305, + 2.7528861452570976, + -2.07524547965755, + 0.4377602198450359, + 1.1904576934040003, + 1.1123186670832172, + -7.098042360395583, + 3.942372739768884, + -1.4468403431925934, + -0.2586420310812311, + 5.740469484846536, + 3.15651165384585, + 3.809601618536574, + 4.325259725927686, + 2.382949629126415, + -2.5626389590534533, + 1.5698789677003637, + -1.6958911452540402, + -3.141018075437486, + -3.648417344481398, + -4.24567319209123, + 3.929412106283073, + 3.6571242246463083, + -0.3278658969041752, + 4.657822013989979, + 5.042157197885459, + 0.28835271035086674, + -2.255064637251328, + 0.266800936509596, + -4.322952266445559, + 4.287075873339859, + 3.5736356375838705, + -1.1578702538549956, + -0.2680087408292615, + -4.0497555789678445, + 6.295666751874364, + 6.052999340246597, + 5.704970321409872, + 2.850081981366307, + 8.643546632972306, + 0.6490424692968257, + 4.314859082438084, + 2.328977586701118, + 0.23635688042531378, + 8.693498351175528, + 0.732216292521743, + -0.7151222336499706, + 8.480823968034843, + 12.286160091762934, + -0.7680671354159144, + 4.946943136506493, + -1.5106348714450095, + 5.726534120444066, + 7.498562199181032, + 1.1505903062607472, + 10.474312648384057, + 10.205919058558615, + 0.747075672401309, + 6.879674237083448, + 0.31215789417542894, + 3.51619194714959, + 3.237261706189572, + -0.8371731734332148, + -3.74860889505441, + 0.7099776413003672, + 7.656765388205017, + 2.7044774344581732, + -3.2535765228553073, + -3.9470383959765707, + 1.0073426539719212, + -7.272989443734973, + 4.9455368411126575, + -0.9212944030040429, + -1.7113727050507135, + 0.7991097638701339, + -1.2063630426314866, + -5.275272395718721, + 4.125730592538635, + -3.909013354879333, + 7.210422285908819, + -2.161214935383547, + -5.62749788411999, + 4.399723650969165, + -7.200521903686518, + 1.2681574934749782, + -0.7583486358828266, + 6.909848749781991, + -7.987729583088376, + -1.3418111229415737, + -1.6927420835442528, + 0.7732234696746834, + 1.1073322168112083, + -2.7627263173383803, + -1.6389085031060069, + 0.6980267446337266, + -1.575115508066682, + 1.320276964179636, + 12.9446759078545, + 3.9925277267834822, + 8.480323731587763, + 11.019275967564237, + 1.9358597811556848, + 5.717250126079697, + 6.680794801693571 ] }, { "axis": { "matches": true }, - "label": "PC5: 3.1%", + "label": "PC5: 3.4%", "values": [ - -4.5124439056934085, - -1.8958960489025884, - -2.100549143009243, - 2.144931700023676, - 0.6099191348375337, - -1.7980693946214952, - 1.441952035995533, - 4.511211535822776, - 5.2311692026830725, - -1.0319677293101688, - -1.7408774409115888, - 0.016946160715446013, - 3.4660216661607297, - 1.3742780635909977, - -0.7790417631269738, - 0.44622330848235214, - 2.8809603504222476, - 3.010843745232459, - 1.821452386827645, - -1.9591875649876493, - -4.493568416555679, - -4.357307385722979, - 0.5448749330375156, - 2.0569737592656407, - -3.5074579381315165, - 0.19800456542498268, - 2.9564301855409436, - 0.21333961712678956, - -0.42104335865388276, - -3.637365122086586, - 2.617749997213395, - -2.8791865503686993, - -0.9868744142742729, - 4.074305808816045, - -5.216040239612152, - -1.274951061874377, - 0.07521519013653569, - 2.748156925708947, - 1.80435823216475, - -0.0226770535255819, - -9.422556858591639, - -5.452352356357221, - -3.8266144348030053, - -3.49103369186571, - -1.4267056824528992, - -0.5833802017388925, - -0.05920628670585619, - 0.35175041731067785, - 1.8793080501514934, - -0.4046150457893127, - -1.653260952041456, - -1.0412239817620808, - -0.5581781532928316, - 1.2441179418298964, - -0.8807902736523106, - -2.4486224669984784, - 3.005389903746119, - -0.8078794017549014, - 4.687360134894096, - -1.5475002159373616, - -1.9576852156021083, - 1.4710948427338584, - -5.019642100094333, - -0.1445654841146044, - -3.5112588346604348, - 0.3232515384012094, - -1.2687305019175663, - 1.0113253572040537, - -2.182833952451136, - -0.058031714490694136, - 0.4714516606517818, - -1.8318711075792462, - -0.9857914726287001, - -0.009242748754808395, - 2.5888901394270034, - -5.844329439021546, - -6.040249919539074, - -1.6474186963281083, - -1.4221619659954505, - 1.236294906374334, - -1.3510567696572837, - -4.487528448687689, - -1.9215778484749257, - -2.3421744868652294, - 1.4843238405059578, - 1.6965997867635088, - 3.1261444779996026, - -5.866568727716238, - 3.4801703624204645, - -0.9010059790771383, - 1.5717314282967427, - -0.25638868013743304, - 2.0242709599218176, - 1.1376336845697823, - -3.2814248047933137, - 0.23360607834882227, - -2.5800964210352104, - -3.4305366702106665, - -2.8914228448910837, - -1.016794433001877, - -0.4102590378960386, - 3.9043369377193233, - 3.0847293504654765, - 3.3798862507835388, - 0.44824109013548186, - -1.6120653887190102, - -3.10442565228924, - -0.7476566294784198, - -0.9713457793978699, - -3.366631138917662, - 3.2854199587992703, - 8.890807142622515, - 7.705027938978775, - 0.19932630682073063, - 5.399502055504298, - 2.4012714257365255, - 4.354528222891451, - 4.700510740294122, - 4.4000189583084355 + 6.795167552080497, + -0.4640224879356641, + 4.2117515624914255, + 4.683164546327707, + -6.182563346104973, + 0.4151589592541638, + -5.326073738095133, + -6.895454260161932, + 0.044980463445334205, + 6.013628477737726, + -1.4137263705211187, + -4.358225690289158, + -6.630976103483795, + -4.0993377646244085, + -1.1911147281338008, + -1.570890028389897, + -0.40532078976683467, + -1.1724182108786696, + -6.090231167076639, + -11.245150203421627, + 7.876675087449544, + 0.12602523963771503, + -9.094327662917474, + 6.9324033859618615, + -10.220890685851495, + -0.25846240516174585, + -5.066128028072141, + -0.9663406399817243, + -2.635196028108933, + -0.6436752051130691, + -1.525531417372078, + -9.273095549678516, + -4.78760736366809, + -0.013317177845053877, + 1.3570331704816667, + 3.3369727951106576, + -3.629345786211788, + -3.0277487404025933, + -7.006864629131808, + -2.105756736170245, + 2.2660813078172657, + -5.586061175177508, + -7.031569578006979, + -12.887115258942393, + -1.6906464933400192, + -0.38811758483334047, + -2.304957958395858, + -4.791474974788912, + -0.7023040524771157, + 2.848904821694757, + -0.4196159141238686, + 0.11689143448600459, + -0.8251442082365674, + -4.908620007474553, + -1.7178745806041982, + 4.840015822390029, + -4.367364039971548, + -7.973466134393554, + -13.767915834706434, + -3.395545616961797, + -1.746753886560387, + 0.4261575900661827, + 0.7173420934997777, + 1.6140701554219476, + -1.3172301768266883, + -5.187685733050459, + 2.088071329709691, + -4.554039971692719, + -0.8250143719866408, + -6.897684761719175, + 3.7674101396907185, + 2.4595692479501996, + -4.382511652032042, + -4.147740459121453, + -7.140962683007371, + -7.857717759902819, + -4.707194693218519, + -5.34409980292725, + -7.094928233281181, + -6.877981329138137, + -2.0364250493070397, + -10.070158909550386, + -11.514131480779328, + 4.629199862319323, + 0.5606473289623616, + -1.4729162323210967, + 1.4605609518885005, + 2.097157785044053, + -5.543643956834832, + 1.2895139186810134, + -2.819087394050447, + -0.8948700455783439, + 0.29692221417101894, + -0.825198569486407, + 2.362882896533612, + 3.2173773538853494, + 1.034047124773793, + 4.782198775698495, + 1.9189652842117013, + -0.3930304296585169, + 8.6385486073006, + 2.523839382043679, + -4.782400650347165, + -5.202580296988493, + -6.206056981207049, + 2.7504940882178484, + -0.08116829497164693, + 0.42861390754335965, + 0.3333666555026751, + -0.33569172059220503, + 0.36282707266636643, + -1.0692001139908944, + -4.855548986493762, + -3.646049565910946, + -5.422477246745545, + -2.145145562450495, + -0.6147404196471902, + -8.395565666324908, + -6.3083951914528695 ] }, { "axis": { "matches": true }, - "label": "PC6: 2.7%", + "label": "PC6: 3.2%", "values": [ - -2.7208141162551756, - 0.1927442135349275, - 3.080040352961841, - -1.0820896510557259, - 0.7452120430250135, - -1.3296360779219134, - 2.170217109771809, - -1.763323682817988, - 2.9142929748344195, - 4.96290658490656, - 5.155650634804046, - 5.343376840603494, - 3.806418850321353, - 6.629005846373656, - -0.5262077181463731, - 4.995012715908317, - 0.35567460678214075, - 0.8003774405846636, - 2.637909140498642, - 0.9471422763690173, - 1.2210771797960025, - 1.0069302397747895, - -0.8643389730992089, - -0.3803137749959966, - 1.0228012255408887, - -2.2949384510168325, - 0.8906187274201562, - 0.1875013921552457, - -1.2552654772271474, - 2.489720335346723, - -0.08247638955463006, - 0.1899126447267061, - -2.508323605962024, - 3.9634703391795485, - -0.3495362276794175, - 0.28744956692332524, - -2.662851476962756, - -2.2442621556512905, - 2.1866714712787703, - 0.17690092397060658, - -4.150203526821938, - -2.379883961357857, - 0.6726318646371965, - -3.254488610778853, - 8.89765026245549, - 0.238626341764682, - 0.1820851876810973, - 4.266358998387821, - 2.647935998078335, - 1.4583821945624487, - 0.18844948834966618, - 0.8432220258621498, - 3.019686277950187, - -4.3371558930021745, - -2.52985792650074, - -5.21854600940329, - -2.2258769359926345, - -1.312851288642706, - 2.610346786119605, - -2.010595341893088, - 2.7817651202952374, - 0.8986596316481167, - -2.6906241368466195, - -4.875635215119824, - 1.592931389586114, - 1.093255985873783, - 1.0158183709622088, - 0.4892146999030948, - 1.163941017267693, - 4.894330071529058, - -4.318148547725412, - -0.1520664871948772, - 0.43449018514909077, - 4.598610870085148, - -0.9673885301875192, - -3.3768889498059163, - -3.962246836624139, - 1.7021856554831225, - -0.21379424971630961, - 2.1032538138554275, - -0.7926794038803809, - -3.899828845964593, - 2.667963099545971, - 1.6292257709111475, - -0.5024933760528836, - 3.5280862782071236, - -4.16007570431708, - -1.1482281054388082, - 5.324421573548205, - 0.03285051677196553, - 2.665990755214486, - 3.770804620334538, - -2.209026050054191, - 5.548018332461223, - 0.1973849669690075, - 6.827991981203512, - 2.760935569830575, - -8.599823254524912, - 2.1261588599918606, - -6.2327662405465745, - -7.906223646376361, - 0.12810382272098678, - 6.533734569679248, - 0.07858666802226645, - -1.4595034573793042, - -0.6408358941193738, - 4.095034722795174, - 0.4059879285130102, - 1.4909097790134063, - 1.3917040650000287, - -2.660946759782535, - 0.1525325796664597, - -7.51322766110807, - -0.008398746088800816, - -2.6957370264095686, - -2.2628175054192585, - 1.1819221309818027, - 3.349838817997342, - 4.042649240935671 + 1.1740525137452598, + 0.6194522995624296, + 1.4116979452333707, + -5.738350427581665, + 1.9664694101469808, + -0.2919817835271673, + 1.378905606750184, + -1.0001430840514098, + -6.399397258886863, + -0.16963745913637268, + 4.50833079993023, + 2.3772385224362553, + 1.289613032801864, + 2.159061143956561, + 1.1673075789099039, + 1.0320816326797433, + -1.9955973100895341, + -2.789507645879339, + 3.941092499509458, + 9.151460913116399, + 2.450907274688111, + 3.6473890079026665, + 4.273714121202266, + -3.741168244452658, + 6.220520688396557, + -0.19408944783903823, + 2.236320165240667, + 1.8204615960165993, + 1.7811675076164577, + 3.87675746424305, + -1.9207111507635095, + 8.526336389178926, + 3.943571151328981, + -4.76048127162867, + 4.812747131792236, + -2.0374400916460758, + 1.8249139143816318, + -0.8658133425587913, + 1.8802138355417803, + -0.31688052353898505, + 7.457348357065514, + 7.476167384941426, + 4.41385928326326, + 7.472069921159433, + 3.5606128417935827, + 1.342814155995613, + 2.2281932872906163, + 0.9652375910915142, + -1.2884939351264797, + -0.8394272929609746, + -1.1566472159021417, + 0.5842009771741354, + 0.5627923338667818, + -1.0323883043772988, + 0.3073221901282677, + -3.4163299366541895, + -2.2225494714289513, + 2.575434614046635, + 1.1309092052438847, + 1.6983714940739192, + 0.3180478310977318, + -1.1246101286119632, + 4.9044496672368, + -2.1383560641240864, + 2.764634557039705, + 2.914218864148363, + -0.3362342414379309, + 2.0283676329159417, + 3.7071245527389203, + 3.8429344147438877, + -3.529092488292079, + 0.6333031165831989, + 2.677492649979236, + 2.7064580865032637, + 2.097274709811909, + 7.073715683391287, + 7.542516317610304, + 4.9574451227193705, + 2.940436128003915, + 2.1458489242721908, + 3.0611019927221332, + 10.854617807143583, + 6.137142666324331, + 1.273689937112697, + 1.3118507915901296, + 0.06918185755825025, + -1.3395113291531535, + 6.821629609012828, + 0.6234052050315874, + 2.4619186737334537, + 1.8804402544118883, + 2.2933933568466767, + -0.3655981452031235, + -0.7986225631447255, + 1.9800345873708247, + -1.1364971129262806, + 0.5908739212542092, + -2.1472822099135023, + -0.7808299905396946, + -1.0123532242741202, + -3.813019632265686, + -2.9693479566674417, + -2.41043439481379, + 1.2588831853359785, + 1.7368327041242508, + -0.8431785739727582, + 1.8520951764851736, + 0.428769558539, + 2.065877347357987, + 3.784036576871056, + -5.269033847597604, + -8.233033077042089, + -7.549791360767359, + 0.906123483011288, + -4.499769449939621, + -2.818009007177712, + -3.8622579890691675, + -1.6043144223870915, + -1.9761230940578045 ] } ], @@ -5858,234 +5858,234 @@ "axis": { "matches": true }, - "label": "PC1: 43.3%", + "label": "PC1: 42.5%", "values": [ - -17.30078441742645, - -6.895183135440192, - -1.4514489714579013, - 1.0895457209591934, - 2.49511857736391, - -7.212015578343002, - -11.097478159991248, - -3.415595746285426, - -7.413177018619896, - -4.741439362376054, - -8.553271239005065, - -15.220443802363672, - -15.419101138264349, - -9.293646209401704, - -8.992986220532272, - -8.331459867525565, - -6.634151095597277, - 1.208808477029636, - -5.9276792814334645, - -7.686198387039736, - -18.313940782205577, - -21.514663179741127, - -14.713146305611744, - -11.341482556764184, - -7.985597493375022, - -0.554284298395611, - -8.22163310611368, - -17.08146811007675, - -16.042436965412687, - -7.601115979130334, - 1.3329517564470417 + -16.681919153031615, + -11.296893276411879, + -5.455369691536223, + -1.8329579148372783, + -6.4751794295070555, + -11.031525230244867, + -15.716645491000397, + -7.488901659579707, + -8.917148357040848, + -5.387218561287919, + -7.875151590417475, + -14.444904870628807, + -16.19563059270942, + -10.757213022263052, + -9.796588505080477, + -12.205891308309283, + -8.32787716762199, + -4.734638802587958, + -9.798380279568008, + -11.334200785749445, + -17.894668324179523, + -18.174394364804165, + -16.907361441101074, + -11.557613880503954, + -10.0855990324363, + -4.717415942790337, + -11.369680787048875, + -19.62475748590959, + -14.186143620626542, + -11.783881210736023, + -3.9799058107685754 ] }, { "axis": { "matches": true }, - "label": "PC2: 18.5%", + "label": "PC2: 17.9%", "values": [ - -4.056965500290928, - -5.174309089041109, - -10.765096311649277, - -14.735597038236705, - -8.827400597952604, - -7.338308495602373, - -5.503712740001616, - -15.5422035595684, - -10.168503241395882, - -10.185714970964423, - -13.472530126214338, - -11.65823093565362, - -7.639266184615172, - -12.377718930805049, - -16.17633168080061, - -10.805009260397437, - -8.363656727028934, - -15.94273643557891, - -17.165011318553432, - -6.130104014075151, - -7.471366207175148, - -3.901565708948807, - -9.975700204867584, - -11.627458340696865, - -11.64677574072958, - -11.430102611615519, - -4.469213066257257, - -5.178669851646845, - 1.6063989784347965, - -8.480087854246698, - -9.470081885473785 + -2.4866964760553727, + 0.5827084616970761, + -6.584790126959794, + -9.843204474701432, + -1.735037112093135, + -3.5203890971439935, + -0.3648528115117955, + -10.917437959041518, + -8.137481841552493, + -13.17666462701792, + -16.0340516466598, + -12.249309450814785, + -7.044110888655938, + -12.35485172204314, + -16.541870907670287, + -7.365893206475485, + -4.305005599332729, + -11.727679282920416, + -16.17806090479052, + -4.301600076456077, + -7.38751015486325, + -6.083325694074893, + -9.485794812327878, + -13.649810256624965, + -11.455826284271044, + -10.14553057600989, + -1.2441429122295307, + -0.9462311260941736, + 0.8661076293733264, + -3.9148123998510487, + -5.53122749805992 ] }, { "axis": { "matches": true }, - "label": "PC3: 8.3%", + "label": "PC3: 8.7%", "values": [ - 1.6923283762750372, - 6.545986367969174, - 12.446557351031501, - 19.777046519462218, - 6.087999166318857, - 7.262813185812481, - -1.1117169467119539, - 12.32044275074673, - 4.055991856363653, - -1.650729443544562, - -3.622195780252244, - -8.311867623196596, - -1.891553174762896, - -1.184364447435346, - 0.6871120232129617, - -5.369020751810356, - 9.180152047203357, - 6.6120017999584855, - 5.129325704895745, - -3.684547081978468, - 1.0254377825478478, - -0.6973450209585323, - -2.488952021368264, - -0.8411181499733547, - -5.809332053846951, - 1.530007284529864, - -2.2138910537134873, - 0.6245006614609903, - -5.561976278573392, - 3.51356033159883, - 6.606046590184823 + 2.6185924659809556, + -1.018407303529048, + 10.225681931958816, + 19.904646500675078, + -5.616410932343824, + 3.1163211888804714, + -7.100892494915584, + 10.551856369121449, + 4.838372227757695, + 1.460753556288361, + 5.05695512014473, + -1.716539092530606, + -1.4635000834508833, + 2.2482965254834832, + 3.5841306523773815, + -8.108271926005097, + 6.777793172685909, + 2.979251246941288, + 6.357240783287284, + -7.6652734147395645, + 5.264964247602477, + 6.50843019057861, + -2.8950354344450377, + 3.775356067187089, + -5.462830714200916, + -0.46155422978483257, + -6.650470731976192, + -4.352703555432485, + -3.793486004612836, + -0.3050314264329532, + 0.25496446419036234 ] }, { "axis": { "matches": true }, - "label": "PC4: 3.7%", + "label": "PC4: 3.8%", "values": [ - -1.6779098990403383, - -1.948308191774345, - -1.9664716071525092, - -5.772409757582352, - 0.3865189815220693, - -1.0854729084638646, - -2.123139918588411, - -4.3473878517558076, - -2.041991090058496, - 2.3975721436833553, - -1.722165238420525, - 0.5642367505681425, - 2.936334001220176, - 0.8746365409446628, - -1.2582970846528225, - 3.736683761019063, - -0.35590460615534736, - -4.6677209282220575, - 2.6877473360911273, - 1.6793290304031303, - 4.7145129885619825, - 3.3016217209575114, - 0.7947500589752892, - 0.39632138922720406, - 5.789047191455284, - -3.3108667294505265, - -0.7727457369377098, - -0.3415273564457557, - -3.8068549085496173, - 1.1492791069721877, - -3.6348778938758772 + 3.6762665008616078, + 6.766644662762168, + 6.123733287812233, + 9.780139022029173, + 4.524486048893658, + 4.787873377486053, + 3.648982237328458, + 5.464958411607075, + 5.614366012423148, + 1.7358535797936918, + -0.014460968870127822, + -3.766067808118958, + -1.1991938525729064, + -1.7058724674915315, + 4.003989070100161, + -3.437928065904411, + 5.2237200002062, + 6.5474677003094905, + -0.956728047335869, + 1.937080285692793, + -4.499993516026979, + -4.179867939580983, + 3.7174903088048725, + 0.7447393315798223, + -2.3962108946888008, + 5.263364517421521, + 2.9231726924067627, + 5.719908570946161, + 2.229494715575124, + 3.196495047941754, + 9.547705280158665 ] }, { "axis": { "matches": true }, - "label": "PC5: 3.1%", + "label": "PC5: 3.4%", "values": [ - 2.8826568563223063, - 8.385578934959236, - 2.8064768989456024, - 6.899533271199019, - 10.073240701191683, - 7.123051721772946, - 4.134828695860586, - 5.033808951948692, - 6.883315876184511, - 13.210383524592904, - 6.215880569271835, - 4.3434926157288025, - 5.914872621157408, - 6.325869242090135, - 7.132303555927263, - 8.633741916781442, - 6.360354639551508, - 3.457890287262184, - 6.520922876793362, - 10.035381658027356, - 2.8244645494345813, - 4.007257863678036, - 5.905802182038886, - 5.904274646552366, - 9.069112240415748, - 11.435432546153233, - 6.5095745904626, - 7.1100596713676945, - 5.561492647443086, - 7.783081284224584, - 8.173622934181987 + -0.1283063713900702, + 0.38143405864212854, + 0.8543002575797355, + -3.7600804208584995, + 2.385453430153325, + 0.6070860582841603, + 0.9319586319699327, + -0.1411742222920248, + -1.6096741738729843, + -2.8300671472675822, + -5.474244116933185, + -4.094150542766461, + -0.42615083257252084, + -1.7275787751535507, + -2.8449158937252665, + 0.19302357919142785, + 1.437828406076573, + -0.5641951570270887, + -0.08400948879488705, + 0.18476066027971905, + 0.6179924567773644, + -1.1351875448605788, + 0.30198351714256, + -1.2532164417618834, + 1.3775625239131852, + -4.0107207886394045, + -0.3738303926183886, + 0.08158311282873078, + -3.8505344237072916, + 1.5822323940194407, + 0.9003545899065515 ] }, { "axis": { "matches": true }, - "label": "PC6: 2.7%", + "label": "PC6: 3.2%", "values": [ - -0.9055255921862997, - 1.7161397727731398, - -2.3242713970494004, - -1.124992124062061, - -0.23798864656240323, - -1.383507964556784, - -1.4507796761235316, - 4.056413996560381, - -5.6890075309765535, - -2.8673242684484306, - -4.72561490128473, - -2.3105627851749384, - 1.9439641052466405, - 0.4484404520958876, - 0.11358271097435348, - 1.921231203440371, - -0.7572293657417313, - -1.920755570305236, - 0.946327886896481, - -3.895037831219219, - -0.3773084792136632, - -4.218316074653955, - -8.473564688482261, - -3.050908088074039, - 1.580327017399225, - 2.3409370478678966, - -2.408918868182238, - 1.6120639144464868, - -5.932892716666613, - -3.997582185730285, - -7.410345546897444 + -4.357256773342994, + -7.729013711273134, + -5.2216531136481334, + -5.4398592099452365, + -12.305627577556272, + -8.177296232482782, + -5.675296891091547, + -9.07596603922799, + -8.317932390031267, + -10.30121516941709, + -7.416101082904636, + -6.7178377327023036, + -4.729148340497778, + -7.506239007834563, + -7.903397122826476, + -10.371903864095446, + -9.04913319865811, + -5.231457614774617, + -7.746948531750676, + -9.796542376799318, + -3.583295613837255, + -7.2049922201199035, + -4.751859481831092, + -8.42056202413112, + -8.98676964512599, + -7.812353808290694, + -7.629435537275462, + -4.864725739113075, + -5.577830298073223, + -10.443103339881866, + -10.159666880593742 ] } ], @@ -6108,690 +6108,690 @@ "axis": { "matches": true }, - "label": "PC1: 43.3%", + "label": "PC1: 42.5%", "values": [ - -12.310056120844079, - -9.597136259315189, - -10.954699253113967, - 6.892142074092062, - -14.885390456411244, - -18.067334920639446, - -17.00680854067071, - -16.046150514323738, - -9.33672233717876, - -21.582160661635754, - -15.110822139030386, - -12.365459479715174, - -17.21514039376611, - -13.510799182203282, - -16.26593551672371, - -18.657377097133594, - -16.427077783470452, - -19.357545443167268, - -12.548582331793943, - -19.24604557779213, - -17.550920028891724, - -10.936030161744458, - -14.380488489499044, - -3.918770312432982, - -12.810168623656239, - 4.532703608705291, - -9.769866006572082, - -22.733180344396743, - -16.147634698453007, - -21.024170494215802, - -22.237041763938933, - -20.561172572133383, - -13.01349087454783, - -18.34068090213155, - -22.485035752361632, - -17.426841277756807, - -21.07455628180842, - -22.01439221963137, - -21.129743790380527, - -17.98658280062636, - -19.944906834468995, - -9.866357124655236, - -27.364026489863647, - -11.823102394543174, - -10.172315257455338, - -14.314453591987139, - -17.715160985120374, - -13.46999655476322, - -13.898409701199967, - -8.325749981804938, - -10.84952501412731, - -20.827897473381647, - -13.752829355351356, - -14.32036072117102, - -11.82782417363492, - -20.07588642711798, - -16.018400556863906, - -2.618372226082084, - -23.730262856704865, - -3.9021028166538434, - -18.10515243713806, - -5.562355369265948, - -15.481143366972539, - -11.820351983636773, - -3.7063869749256284, - -21.628237883941644, - 2.4582936337324344, - 7.725042754672949, - -15.332502634483493, - -5.5430078363379955, - -20.476842984843824, - -14.982910284999953, - -13.483616351249758, - -13.964361330067524, - -8.837838535673724, - -16.409372138604063, - -17.791307242332703, - -21.700830156218167, - -27.744177367267557, - -29.81082747134628, - -11.834113826844863, - -15.157359893497622, - -22.975998910394622, - -24.32167463891348, - -16.522040384237705, - -21.97627620672622, - -27.970496088054237, - -21.07558989577943, - -26.95825636922722, - -21.831587616340283, - -21.592010060724274, - -21.30018265768053, - -10.83231045913125, - -15.60440763807564, - -20.253245917465417, - -22.295132895250436, - -20.624881103709, - -20.310135467873806, - -23.09345723394033, - -20.8590766599342, - -22.295888070199876, - -18.02459345411228, - -19.147928938955594, - -14.477716677192433, - -16.58440698585764, - -14.623864111104291, - -17.00645441413316 + -7.900747032988724, + -14.236430087722049, + -14.881142565966856, + 8.185992224233239, + -9.35839628334074, + -15.08408507237451, + -14.89201483884963, + -10.511293868206662, + -5.526822278742043, + -19.66681040829703, + -9.740688500051156, + -6.371459307153175, + -13.208046867663398, + -10.453885165768408, + -11.646130459986443, + -15.740567938868473, + -10.724731504722048, + -16.65724413489272, + -8.870988414022133, + -13.702496653823188, + -18.85110760161824, + -7.507836184472188, + -8.587527071236536, + 5.377498168574093, + -6.309368600426573, + 3.4415204536627195, + -3.4187311109913985, + -19.029588369725076, + -12.501262919785843, + -14.94624656443327, + -18.420885400490686, + -18.323990700353985, + -11.85355172475099, + -15.783218300560305, + -16.700742648686553, + -14.567187667586742, + -16.33149803517134, + -18.428060397654217, + -15.511260259148962, + -14.270999543450772, + -14.817336777120417, + -8.488105670856552, + -22.355276516515307, + -8.846962647557769, + -6.391677800395032, + -11.387958252786824, + -12.47847252958988, + -10.185952269079017, + -9.883202763175763, + -2.0133816641705575, + -7.755917342520793, + -12.996125575649284, + -14.788432672851808, + -7.830113753537813, + -7.066838249339037, + -24.570581924459894, + -12.691552101148488, + 0.1684012490284288, + -17.421243223598765, + 2.2611097361548924, + -12.913139798265307, + -5.357826175116285, + -13.266412117897467, + -12.434733256327208, + -1.0635539195819115, + -14.139994118794416, + 8.863345351243343, + 18.406614424614776, + -5.853527648553441, + 2.6511320198910266, + -14.934291131725868, + -3.5704986701270784, + -7.064094592611723, + -8.212426154794864, + -1.7928814738483354, + -14.206226863285657, + -12.867919827079241, + -20.09985598195333, + -25.061878458328735, + -29.10278402415135, + -6.2692867696998125, + -10.444686396461545, + -21.05388366428948, + -22.89917320104677, + -15.825107308449315, + -22.477324876553176, + -26.96223497656853, + -18.49788336522248, + -24.906346325367586, + -19.53976086749695, + -17.14867000355328, + -17.407631966891532, + -11.101667042486516, + -12.798049562157697, + -17.684908425832276, + -21.079279697387353, + -15.748780552679376, + -18.141612910711025, + -18.32634523528215, + -15.371226094707144, + -19.927539835047508, + -8.793898342168665, + -15.64530932661723, + -7.481090802758926, + -11.673959658393965, + -9.737808387349144, + -9.245947532262072 ] }, { "axis": { "matches": true }, - "label": "PC2: 18.5%", + "label": "PC2: 17.9%", "values": [ - 17.098352203031247, - 2.9140116244552736, - -6.8656606743322435, - 1.7903085055689836, - 22.45800075960447, - 9.588071088712933, - 21.465236479597884, - 25.37139749129129, - 1.9977194997483654, - 5.968842794515615, - 11.674411807288216, - 10.263941815102928, - 10.266881259531209, - 10.04660750968372, - 27.14338400056772, - 15.340899592100346, - 32.02277420842411, - 16.363910231665756, - 24.9862662802731, - 30.46321173277923, - -0.3046999154964183, - 28.344846960339815, - 19.92040780427992, - 27.02426597893539, - 19.386484201852447, - -2.070863097176524, - 20.99604559745245, - 23.109916150524185, - 18.942859135668726, - 15.602050423853228, - 22.779113035424615, - 20.43591159693211, - 17.824560485319743, - 12.751648635126546, - 14.270055160160354, - 11.759388977322862, - 28.265224891329026, - 27.361313807892287, - 24.747546557592884, - 20.59011277219461, - 4.029841500368219, - 0.5471198838305864, - 3.8298648835346842, - 11.214521885631836, - 9.870099243998364, - 14.594918214219025, - 8.809924391233917, - 6.312788382974761, - 4.240679534522542, - 20.83727858736194, - 10.465528353975833, - 12.974005476104804, - -3.7497795814071067, - 21.338977419627035, - 22.706109440881395, - 0.06980589792572878, - 18.31112042616361, - 24.892178792095848, - 22.15733013629792, - 22.139854894382474, - 25.589570059212754, - 10.748267276911506, - 18.326229391606848, - 15.518117841924226, - 14.796059454687624, - 5.39171754087444, - 15.799690939644258, - 38.72848658608494, - 17.6952427447776, - 26.315860205688146, - 11.715009016832422, - 27.70020041601636, - 21.33800189163556, - 23.742724954349406, - 25.063066817015773, - 17.174758542965176, - 5.944643694394152, - -4.907591258662221, - 1.7771949324496785, - 7.945286278261365, - 5.238791735539701, - 16.722710857375738, - 8.039493760423202, - 11.781870738929776, - 1.4730122320616987, - 2.1416635708361316, - 3.6016493159521366, - 14.745142829182853, - 3.1891182010508805, - 15.195491757985977, - 18.75358957997639, - 19.942515632230165, - -3.9982825562304174, - 15.988182667949399, - 6.706761129322929, - 15.229811445016386, - -0.37949077873941883, - 13.172522268693209, - 19.401439683183376, - 19.77227175198794, - 6.7630640536526565, - 9.728304216002963, - 4.5371854031912875, - 12.336421074828458, - 11.692605331087364, - 8.061342703183328, - 12.101986151304562 + 18.919995269865193, + 9.701143954425625, + -0.2386112802513436, + 8.975598772878007, + 21.77508642972416, + 12.155070128869626, + 21.941448585611653, + 27.081498264702283, + 0.35395336545172906, + 6.5152550517054975, + 8.063820909617041, + 7.835647051494222, + 11.6535430212538, + 16.0880249577356, + 30.243585139576297, + 14.83637549269717, + 31.98861429520331, + 17.928288524848057, + 26.607436784327575, + 32.35496800350661, + 4.31212637661411, + 22.144925606683813, + 19.209264298071382, + 27.29584744322463, + 17.552918781900917, + 2.5408980468946005, + 19.605235335670784, + 25.684133464550385, + 20.329887026304963, + 15.386680057538095, + 24.181932508025312, + 21.744049916182377, + 19.774753700499865, + 14.49449080175656, + 10.59041617899231, + 11.773439567535156, + 31.288880697754287, + 28.696198736217934, + 26.987489300369425, + 21.660974445280573, + -0.1468472948980296, + 0.19433920974056829, + 0.9960163615139334, + 12.085714706764406, + 9.229760429734942, + 17.312033291766404, + 6.127701615773333, + 5.647476789476855, + 2.861973300479522, + 18.736687481017032, + 10.154734911285814, + 8.24209441142698, + -2.953596079257325, + 21.504633673708305, + 21.821442734644393, + 9.477709392780596, + 20.57293927039004, + 23.396632931790602, + 23.60272169830288, + 22.968875404692294, + 27.204224760258167, + 13.113537893207361, + 19.169992596245965, + 19.516430291751767, + 13.255246226888374, + 0.5107563612740406, + 11.270393504314931, + 35.06455788410068, + 20.81404085264694, + 22.75166155160022, + 12.020963751451132, + 34.31332498091387, + 20.57700798851869, + 28.421681920175168, + 28.3930572181606, + 17.8150917653637, + 1.3025017509000263, + -7.247972587663433, + 2.1530640390380595, + 10.190534905687215, + 0.5459561302514571, + 14.21490559146433, + 9.447572876977134, + 12.08363012214338, + 1.1098906053174122, + 5.773178849137752, + 5.899488176837009, + 16.015356601185626, + 3.3036944432975726, + 16.65611707395691, + 19.898347306889175, + 20.392857797121994, + -2.940388354997953, + 17.216969010590443, + 6.934474813878473, + 17.855913427998555, + -4.422213412858078, + 12.997784458263743, + 22.69816012035653, + 22.243441749522972, + 6.34221289240787, + 5.056244622048658, + 3.6310537016254685, + 10.80687760475136, + 14.086759386883816, + 6.164101307138273, + 7.7663721232734675 ] }, { "axis": { "matches": true }, - "label": "PC3: 8.3%", + "label": "PC3: 8.7%", "values": [ - -0.6023688511022534, - 13.96328096833205, - 6.682726235276905, - 11.006328973949952, - 1.1270016960201645, - -3.108510645836018, - -1.3943736503009805, - -2.0349823526479427, - 6.684079165911009, - -3.4568807668929202, - -2.316157823352796, - -5.499860999478624, - -1.3513667107723497, - 13.760900661727295, - 4.031816614162613, - -8.067679088857478, - -6.419789492305879, - -1.3278624644356725, - -0.563594053480386, - -5.889350585104976, - 1.701063149075422, - -2.8388270009454493, - -0.9818698892521316, - 9.220031343733476, - 5.2258527264532, - 7.963766057865881, - -2.916981004355433, - 1.599808179442833, - -0.036743172429871684, - 1.638013111798088, - 1.3459791333061606, - 1.1153882341445742, - 9.118752591854452, - -0.3576730299879909, - -4.742658989994048, - -1.1990252429155888, - 4.453139568410764, - 3.129609162673128, - 3.1297323613878083, - -3.6847446536770123, - -13.523778540766434, - 5.7500591286817, - -6.49117574317722, - 4.29492757529618, - 0.24771430859490318, - 5.372161892346468, - -0.4638929280721782, - 2.663625292287364, - 0.05061937584153808, - 3.561323289385537, - 3.5375261914190226, - -7.753932798536015, - 4.373496653157904, - 8.354754188851292, - -3.2182855456013666, - 12.949336457039335, - -5.663441979323378, - -4.8724898556961405, - -5.721019921319009, - 9.36809845481417, - -3.5459620761006643, - 5.652309573485979, - 2.5861423755265385, - 11.20554261157816, - -0.32536307843182233, - -9.382202745458827, - 4.296657908066059, - 12.30060375358479, - 7.197241241923383, - 5.323206513824758, - -7.065679001458829, - 4.90153355812923, - 5.634087643375932, - 7.048703707858355, - 12.044165410530127, - -3.443840079142995, - -16.87800601934662, - -4.388293234036358, - -7.908926436014884, - -8.900582896621342, - -6.982255741167236, - -11.054729558667237, - -4.245183234598273, - -6.078373792544937, - -4.8872192708718325, - 2.3925225573506723, - -11.238948083951204, - -4.506661189443729, - -11.355388160393863, - -0.546981577673443, - -8.226345635508213, - -1.1004690564285218, - -0.8935434194917488, - -4.825103018870045, - -4.0432394381826064, - -1.174745366409531, - -11.864415196893866, - -3.940005574055834, - -10.488339063605787, - -9.017632256118624, - -5.679649202519469, - -4.172792979090403, - -2.975577129461443, - 2.944694973485905, - 0.24262600600734796, - -3.0095480127827283, - -3.7041274229205507 + -1.7604264592263132, + 3.8638947795504714, + 2.473417949564354, + 4.496111672407371, + 1.061596289634073, + -4.1014311619135135, + -4.847051019530076, + -8.856776099911658, + 14.197114741506324, + 0.5534390841555901, + 4.964286171758113, + 1.3159790935852258, + 0.44879249363792195, + 10.04955719647131, + -0.9644627664818755, + -4.519202993787872, + -11.172082526564507, + -1.904148049824725, + -2.70511396128998, + -11.879199414019055, + -3.3254376724999086, + 0.38212826692638613, + 0.5015934564368614, + 12.960519173883402, + 8.944138099778622, + 6.086282338897705, + -1.6155312984775447, + -5.166163325579187, + -3.2563512061391013, + 6.344609835909145, + -5.498893989162884, + -5.435631757680342, + 3.5704080068489423, + -2.6568018494677204, + 1.4265496004840101, + 0.6293678278177577, + -4.5033203505481865, + -5.5210845023883275, + -5.217760688859671, + -8.364813421566687, + -6.565359696890449, + 11.143041547987654, + 2.7374142348477104, + 2.1009459824912993, + 1.7774160438809523, + 5.040372876704135, + 6.441739330969414, + 8.438321424474632, + 6.668740256176774, + 6.0670833043173955, + 5.709903678577047, + 2.0074039357297098, + 4.857408306504009, + 9.742590158054407, + -4.303629222392513, + 1.079052270758614, + -8.70735693731097, + -8.630383811652882, + -12.566329465605024, + 7.694832074790657, + -9.331665956905969, + 1.6354622866757533, + -0.46014952595857306, + 4.7562351474889795, + 0.5339712777190511, + 1.14441808792956, + 12.692535966323586, + 17.64675298976014, + 9.640799999645452, + 11.552913540854401, + -5.248839631758651, + 2.83918015009881, + 10.459572855230281, + 4.382177596760152, + 9.907738940971493, + -7.678855076488778, + -8.885269541780895, + 2.61317864585126, + -4.110529643624969, + -9.173601230555716, + 2.266056249140971, + -9.456299345620803, + -5.485670255960193, + -7.736592806899377, + -4.2762044668501815, + -1.2474588692133484, + -11.310086631791174, + -8.692037995622547, + -9.054891871786204, + -4.5350447861577745, + -11.439353998806492, + -4.812777494416858, + 0.053787046427223206, + -8.502216145303581, + -3.0492384793445586, + -4.17790673585807, + -1.4684002239359282, + -4.6616380166864335, + -15.28332031670251, + -13.863925128790497, + -3.2582964320959396, + 9.118893051130241, + 2.5460635707724615, + 9.843414741638195, + 1.4918619916769202, + 3.4263168625426745, + 5.487144876341413 ] }, { "axis": { "matches": true }, - "label": "PC4: 3.7%", + "label": "PC4: 3.8%", "values": [ - -1.4313879318597809, - -1.345340328434895, - -2.8988591606564533, - -6.31772432482184, - 2.546120546843126, - -5.693615802224246, - -2.419719950624107, - 2.643096907480727, - -8.123687665685562, - 3.4317114159697653, - 6.883301229244093, - -5.646827190218466, - -7.801198524666155, - -11.739479848194005, - -2.787153823343226, - -0.04010129139388585, - -2.568095932209951, - 5.146900791902378, - -7.185734208777548, - -6.161651500330322, - -3.391505263758002, - -5.363059619200616, - -1.3924973615979621, - -2.6738872496459303, - 1.555634137171539, - -8.270134639951296, - -8.65280599762584, - -1.6467107279207873, - -0.6572924398679483, - 0.3182199171563007, - -0.8991692144048007, - -3.2616959895477007, - -1.0998708662593302, - -4.571733199558724, - 2.847136156652144, - -0.8117181777619531, - -3.5904650572289993, - -1.1532716849068245, - -4.334453126460278, - -2.3559824912619938, - 0.3873973357231968, - -0.6381574255130203, - 1.5007504108594978, - 2.249909277460577, - -4.892043385196032, - -5.749500334568027, - -0.17013161177874325, - -2.653930035949688, - -4.382211603423768, - 1.49357170021779, - -8.912559702670563, - 3.279796378856509, - -0.001648640137373869, - 1.9352050867660724, - 1.8467771490350482, - 1.5124246648904738, - -3.5065845681915864, - -5.2768225336174375, - 5.793316820650766, - -2.56721532693767, - -0.9277259086395369, - 2.8691371290924494, - 4.856066434270849, - 2.0795614685151307, - -1.5706279760527773, - -5.538136787148029, - -2.2554114101306615, - -14.208838792735428, - -8.380342801642112, - -8.565867712296008, - -2.495354127626511, - -9.783503811261191, - -8.974662250899415, - -12.63083915165049, - -13.901588510963109, - 1.970754788475023, - 0.8092023292813821, - 11.15148354245675, - 1.561083178970062, - 4.838028211867541, - 1.5855555668352517, - 1.4965775090178663, - 2.5790988137858673, - 5.209592732556338, - 3.225258886526545, - -2.431964086225078, - 3.1554324160618172, - 6.874275796605955, - 4.943283220059857, - 2.482657634367664, - 1.502956747647954, - -2.968798629405172, - 4.448898998929997, - 1.3159206322451946, - 0.9640462560466545, - -3.243252469766997, - 2.131616645501171, - 0.5965824827666972, - -2.1673883133182015, - -2.542242103415161, - 3.6331488547756625, - -4.868154973949728, - -2.7350093940551514, - -1.4073705345569487, - -6.920408934020776, - -2.976993108227227, - -3.105601058831591 + -0.0537795940546027, + 4.9636759289870165, + 5.570594659167735, + 13.453393180139795, + -4.456541380679365, + 2.7924935070275563, + -1.9176923535634143, + -2.243203659556479, + 1.7557191510374623, + -5.274860520404694, + -14.960721128966398, + 0.4576590420747173, + -0.057277507281960016, + 6.222164150440402, + 0.5477716353555935, + -8.799669112239961, + -5.348386155485222, + -4.183118274152389, + -2.9720632264839435, + 1.1441983845435564, + 3.3457637247456393, + -9.956150345281173, + -3.2234450195945534, + -3.4165993776267927, + -2.291355965952606, + 7.921919521606905, + 2.272978023640487, + 2.808761379218692, + 2.2681637071988803, + -7.9995576188883035, + 1.3938515034794974, + 4.1416196864437955, + 2.1075019868513336, + -0.6271747948057409, + -9.907549431073312, + -4.162863773250031, + 7.538531002307636, + 5.478348894420704, + 6.445095434934383, + 2.6496485803245315, + -5.955893830803164, + -4.929658620728713, + -6.012603983257646, + -4.058735411301967, + 1.857886808699082, + 0.23966102176568183, + 0.13640073927227458, + -1.7012190280791306, + -0.8543311728816977, + -6.755543044824636, + 5.175355686552372, + -11.698477755641061, + 1.0827725849731098, + -3.005711021836513, + -2.402935371260109, + 4.953834725020483, + 2.3262958660078965, + 0.45366675348478225, + -2.735217546037185, + 4.0419143394284935, + 1.3181021867320781, + -0.8809296853166673, + -3.0114777539304485, + 0.038147613640335754, + -0.6819637437160466, + -0.9024896593123573, + -5.224080253823271, + -0.9650488396729641, + 4.4821134165898915, + -4.78926635184008, + -5.2476279368529894, + 6.826850572864495, + -4.345172775070611, + 7.805018600290016, + 8.829208377420429, + -3.8547539050342308, + -9.13629559174337, + -14.116254566433199, + -4.403217059976005, + -5.436830588764407, + -5.286746574444326, + -8.380326737064516, + -1.062556323419245, + -6.282376928519888, + -4.75215772699916, + 2.2244866366930927, + -1.5797282082922388, + -3.5476852678999906, + -6.204786568648321, + -3.7185958956844356, + -0.4229901844175654, + 0.624575432821042, + -2.7820681581718776, + 1.214323371343705, + -0.8955702936452841, + 0.4481853080313727, + -9.958697757441012, + -1.0476604844539668, + -1.4862045913337658, + 3.348781903710634, + -7.644276450467186, + -1.2656755795769148, + -3.253175304759872, + 0.4134729807483484, + 4.151044977801335, + -1.2417089338791654, + -5.3914366799680415 ] }, { "axis": { "matches": true }, - "label": "PC5: 3.1%", + "label": "PC5: 3.4%", "values": [ - 5.631050342705594, - -3.8144743710080244, - -6.736300582772111, - 1.8212783907285466, - 7.5307152721407435, - -0.35387317420805525, - 1.8612419709255068, - 5.541050734363105, - -7.561280080767835, - 0.03777561698896861, - -1.1358260769180384, - -0.7467815379211329, - -8.62163942042976, - -6.756573501799959, - -2.4665342261046885, - 0.8241626353452904, - -0.8996893170534661, - 0.7439239283117225, - -1.2149350702940391, - -7.05072066513583, - -1.3458524643004801, - 0.7195081167976093, - 0.1503508123169932, - -2.2803216088008407, - -6.189929096490347, - 1.2608146235980442, - 1.726918629520568, - 4.2153514377659596, - 4.979202524448097, - -8.795615730137266, - 5.318250276396861, - 9.811861222223914, - 4.546558174243827, - -6.065400881684538, - 3.9223979335971344, - -2.451559959866317, - 3.898653378441622, - 5.826024362944995, - 5.4801062594927386, - 8.23752210871069, - 0.653067722261329, - -3.0000876068246267, - -0.21307396888212995, - -4.584871042514836, - -2.9977468754344083, - -3.609161751774498, - -1.982273857500729, - -0.9126397818202481, - -4.875023458563086, - -7.470007146743442, - -3.9302780016213377, - -5.836754789800693, - 1.9376640495589512, - -10.281365974255653, - 4.805086097794056, - 0.10441806253041597, - -0.987728859363005, - -5.1084632444514595, - 3.100499280586549, - -5.1455140807795035, - 2.521608632016753, - -7.865488532884913, - -7.176076567902554, - -8.261065159369243, - -11.434395836006097, - -4.601990984837101, - -2.4662873577014337, - -4.053544738693274, - -6.575398510003801, - -0.613385018733946, - 3.8503872173516327, - -8.625729673771348, - -4.245205735563793, - -7.907743592007594, - -5.286104709018203, - 5.7062191628671615, - 7.566675644106729, - 8.203662335998603, - 3.039444399827849, - 2.9793092818466103, - -0.9554827362679592, - 5.978189396611881, - 1.5393208012852246, - -0.7686026978261313, - 5.971112148069218, - 4.378349468968074, - 1.9209319206166253, - 7.962436180331535, - 2.8185696025208387, - 0.09003354005600889, - 1.5433293271334363, - 1.269585074968269, - 3.762500446934474, - 3.9250527312339916, - 1.4918140237662225, - -2.343546602135828, - 0.5132298563408981, - -0.012559684583232247, - -0.09252522252494207, - 3.0809052302749937, - 1.9311464491009116, - 1.0920726097555653, - 2.1691569094552223, - -1.6153992094718999, - 2.6877447156717076, - 1.766933371763959, - -2.8449948510735976 + -3.4523125279042164, + 5.492437538441156, + 2.6359796498522794, + -1.7546753758165288, + -1.3132494479849606, + -4.481813500193165, + -2.2642978060756023, + 0.6522510895155538, + -4.921386534871595, + 0.14432321031173637, + -0.6129883871739497, + -7.879186437686645, + -4.609326670231442, + -5.654292056808716, + -0.6950779688716286, + -3.4342692713013654, + -3.0477194103413288, + 1.7791845884452222, + -5.840914690402992, + -0.22567899727011964, + -0.5079792687519723, + -8.533940247086976, + -3.4760662279871473, + -5.3472634289224015, + 2.082839635401604, + -6.183278423649231, + -9.127685183068994, + 0.9627175185873024, + -0.20706277285519312, + 0.5638055107461029, + 1.1397917559661477, + -1.37568750078426, + 0.5354462255115671, + -0.9546523319640956, + -4.599112324372678, + -1.7277242070181038, + 2.1236140397649357, + 3.0184720733030184, + -0.25059311802936474, + -2.4504566502084804, + -4.250792306816921, + -3.0344750679883363, + -3.297008791211476, + 2.279122212218469, + -3.290261297923208, + -4.074818435044223, + -1.1631144730627099, + -4.113701649643669, + -5.194057352291915, + 0.16394463090789907, + -6.368586969509843, + -1.553429050918303, + -0.012610789330244359, + 6.11002329901853, + -1.884750204791824, + 5.047671922912995, + -1.883984473335911, + -1.5948798031823463, + 3.6698401010863133, + 1.9283214697371593, + -0.15317475805331254, + 5.443268768096081, + 6.099517379653371, + 6.23024996459581, + 2.5822989862999246, + -6.992216447451486, + -4.733650528830365, + -10.661995362593174, + -5.211899015442075, + -10.128533773115795, + -7.17691603710887, + -4.316115229885083, + -7.918789275877677, + -4.162926005376901, + -6.266365130214744, + -3.3712941448357245, + -10.35988301336044, + -0.9142442596764349, + -3.6724730327694872, + -0.11109390042268816, + -3.394528129971577, + -5.76288148875798, + -0.9383905000655892, + 2.0336271501106316, + -2.9984954149944283, + -2.744142208836306, + 1.0826240320217173, + 0.848813810834254, + -0.8630558093268614, + -0.703343951319268, + 0.5535162801998693, + -3.181478428783471, + 1.2937425871708055, + -1.6719848696236848, + -0.6481725661300775, + -2.969589725279203, + -6.395902308481097, + -1.93940278583244, + -2.365615872641996, + -2.4893618256051595, + -2.0471584121887196, + -9.480041680318775, + -5.8361592076507645, + -3.54267927840111, + -6.94398359791191, + -6.089604101442374, + -5.872611712443662 ] }, { "axis": { "matches": true }, - "label": "PC6: 2.7%", + "label": "PC6: 3.2%", "values": [ - -3.065663024221989, - -3.896181500813422, - -6.934923965449229, - -4.354668550629121, - 4.796534006993468, - -4.612776447240108, - 3.8123073074512646, - 5.572573270523418, - -2.65294487829109, - 1.6270995330072509, - 6.8523543629711225, - -8.718586420785856, - -1.7066051022947648, - 4.2751309465485585, - 1.8694733446855578, - 5.33978600644579, - 5.873274178835059, - 4.10729977051113, - 7.861684710493215, - -0.03769047667488, - -0.9957266363415959, - 11.024542218838626, - 1.0466826033194425, - 3.567219041082404, - -1.2477242246337563, - -4.254756573584529, - -2.572426863470716, - 3.363750655621602, - 0.32075278792314044, - 1.057294951664562, - 6.393885895137851, - 2.163302035389289, - 1.558518594970465, - -0.5037853678844478, - 0.11378308990276852, - 0.6234423384981902, - 0.751987915256055, - -1.0850670710816808, - 1.579567229667295, - 0.9374197080592874, - -4.045214299440801, - 2.0486703529734096, - -4.735092099027254, - 6.391227549994447, - -1.2776041333588746, - 0.21974999384816435, - -9.051154141787492, - -2.326030660686611, - -3.351023535245997, - -0.14116563185742545, - -5.734127797127325, - -0.36943001965365213, - -6.261754077831865, - -3.212055966141944, - -3.571724487243903, - 1.3082671542900246, - -2.5591308288849164, - -1.0211259112803295, - -1.2177768786128493, - -5.455063990865367, - 1.6721823546819066, - -2.202874344458503, - -3.706362385465264, - -3.4896267377534356, - -5.319961822238247, - -5.82602938897649, - -6.786558437152811, - 17.457743632071647, - 1.3121453742162408, - 9.347644274124198, - 3.690753387567978, - 2.710915366622321, - 10.476147201802194, - 4.370968540144425, - 4.7698261833824045, - 3.5363014461658033, - -3.033524028139721, - 7.083336582520283, - -1.0219896611177104, - -0.07690631483883487, - -2.800969400533943, - 4.738045420317473, - -5.662654988595915, - -0.6721698601767923, - -0.8569086479426508, - 0.8693632897983101, - -5.468434121765321, - -1.7189938435085232, - -2.0570057066033858, - 1.1079392025446386, - -8.104536784733908, - -1.9030184542364448, - 0.49336910004739537, - -6.073889108408978, - -4.640354690744293, - 2.550385645201196, - -0.3750058848213656, - -2.0011221218249053, - 3.552555331010486, - -6.203558707218285, - 2.226016870768223, - -6.409332598211216, - 2.384875492266012, - -8.154089918843752, - -6.197827915199729, - -3.6226726907954125, - -0.27421434676385054 + -5.414670336804079, + 0.388516626322085, + 3.662798300271323, + 0.8661234392697559, + -4.407465801884762, + 0.9032555572745893, + -0.5160340989259296, + -5.5801524922897645, + 9.43300794691925, + 3.7889936834881515, + 0.5512546099356231, + 2.629704352175371, + 10.114031934266025, + 12.828975147824378, + 1.6428349855964697, + -1.0840104098957366, + 1.2543731969428802, + 2.9029776166367247, + 3.377010752740482, + 4.436359179059937, + 1.45835583232539, + 3.5242014751423962, + -2.128203659810887, + 4.642601040780792, + 5.139398261851032, + 1.1514904008029125, + -0.3710533383830253, + -3.1067224728691425, + -6.002864616985229, + 4.344836333356531, + -3.173302330323848, + -8.058103951185009, + -3.3361465887336528, + 5.193858571326917, + -4.290346842319525, + 1.3920172187758735, + -2.5987824563969717, + -5.5230436826983365, + -7.538978579660606, + -7.587630603926997, + -3.073002153756185, + 2.0754513128749363, + -1.5222533552601667, + 7.648625549765501, + 3.2880120429986945, + 3.7988026366681087, + 1.566959909291743, + 1.1357363755899106, + 5.353424660817543, + 6.578077361414298, + 7.982727218327953, + 3.1383243834863133, + -3.285281872484094, + 3.6976947776129387, + -6.126966437350923, + -1.7168046171400806, + -1.8168591987378573, + 6.422820339612926, + -5.665791578123437, + 2.9027419587866867, + -2.0054395016612125, + 6.613078615514447, + 5.569546362302364, + 5.728220480368759, + 9.123369497339965, + 7.121585305788346, + 3.5744584658140797, + 9.716587138896047, + 8.689689183275533, + 5.516736876234602, + 1.4744667805389022, + 12.286200024660412, + 7.34513064579574, + 11.811668950465577, + 8.895158747270685, + -3.059756411904246, + -7.199923686841736, + -6.376530414910964, + -3.9982995104692947, + -5.151156749236543, + 2.626482887811331, + -5.078959418326882, + -3.021636619492215, + -1.9721273606190124, + -4.903773934968704, + -4.421850716522769, + -5.3700756311194, + -8.429507878535293, + -5.5491846091275, + -1.6043093855677033, + -7.565014924070918, + -4.4407552719983086, + -3.934650397148025, + -5.888825188889845, + -2.9069320471180293, + 1.993103868213147, + -1.5483236289433948, + 1.1031687386159708, + -0.5845070155793154, + -6.033667015665203, + -3.107117086430851, + 2.110392141178906, + 0.6335826655020264, + 1.9102218405254512, + -0.3674232244349622, + -1.5483354170463373, + 5.266177794196306 ] } ], @@ -6814,594 +6814,594 @@ "axis": { "matches": true }, - "label": "PC1: 43.3%", + "label": "PC1: 42.5%", "values": [ - -24.27579188758203, - -6.49191798799571, - -27.78507176886876, - -18.38061248203453, - -16.778669581157697, - -13.087338906572612, - -19.116707693250653, - -25.32735419588556, - -11.432471162640347, - -23.37502903828049, - -26.056060957617124, - -18.958619582990234, - -27.480831293026817, - -21.727575590096794, - -21.433121769719545, - -17.152550988251704, - -16.34564191741886, - -18.946958093793853, - -30.79717099355485, - -16.12393959020578, - -13.401181528185598, - -15.35781685895909, - -13.666946322102653, - -22.04751413759439, - -21.55840866246838, - -26.00683285677178, - -17.642329280113067, - -22.389537815268433, - -11.17941334873595, - -20.559172811617742, - -24.39254067637542, - -25.35632543816945, - -26.17989939399574, - -21.142414280828316, - -20.322473112628515, - -22.353702803682204, - -14.901978849568497, - -20.03247008008428, - -27.352082023017743, - -10.995499846177042, - -8.92345862759339, - -25.058355009797303, - -25.733451566941557, - -16.689840316565515, - -12.547156236471967, - -22.15410831824707, - -19.402547983930386, - -6.928638479362252, - -13.107433104647662, - -13.342238303622423, - -13.969329168975356, - -7.5654427448652255, - -10.416339941335107, - -22.63402561550747, - -20.938253558462215, - -25.168842802730964, - -22.038863517865636, - -12.592076478942097, - -20.734879591184523, - -18.439983129689136, - -15.858414375187854, - -13.958775185553261, - -16.204540092826228, - -12.881826845637065, - -16.889588194860426, - -23.443073496979594, - -22.43398600524135, - -17.942222328316888, - -17.471113486331117, - -17.764486963712166, - -10.94929905011083, - -12.181218636424273, - -12.772174606498712, - -6.157155226518993, - -13.23285377550182, - 2.275984125090501, - -7.731436661556339, - -23.688595228423715, - -24.286880585430517, - -29.83026461908899, - -31.680459877652545, - -16.966717187832998, - -12.352167853072984, - -21.087504179660804, - -14.700969201653802, - -23.526807908184463, - -27.51567535065592, - -20.381246385207696, - -18.50437736534651, - -19.275975857807428, - -22.871238626653295 + -27.278425810670058, + -14.605393811545056, + -28.089167496050344, + -22.434534695854783, + -19.74170032846431, + -14.94493310326094, + -25.92231196526005, + -26.327339824382292, + -17.724404878944007, + -27.124681194987254, + -27.96300101698023, + -17.603413639162596, + -30.601129353201863, + -27.77316915904595, + -27.049952547057714, + -15.650983579164102, + -23.910054641650824, + -19.216380420076675, + -30.64320541768008, + -14.758746622257632, + -19.489267332709186, + -19.592606673312332, + -16.775311766774312, + -25.913968447295865, + -25.67365332541916, + -27.940086037747506, + -18.76276724730871, + -22.20397651555315, + -12.54409962023275, + -19.53837209474834, + -22.248335401621794, + -28.79210816610921, + -28.308492398771385, + -19.249138254206066, + -17.92949068981475, + -21.47627695195097, + -18.699451871545868, + -20.49834410565282, + -30.301556901774987, + -8.7248868312093, + -16.805978096138425, + -28.793926389842955, + -28.567230125113298, + -21.03160601874093, + -13.677508522571589, + -23.330809504492912, + -19.679013911470435, + -12.568604997039792, + -17.99062200571608, + -15.14117889209881, + -14.015427899706719, + -9.522478363558353, + -15.229336270734521, + -25.78776281345347, + -21.76914126134889, + -28.134981811738488, + -20.762441989079747, + -17.28253169064552, + -22.91281686693661, + -19.972072181303112, + -18.653898804298393, + -20.648584509863383, + -13.690245271033945, + -13.636243931344417, + -19.718506049561235, + -23.72827371363983, + -25.070285854552324, + -19.854844191874975, + -15.339005120022637, + -19.150984504490715, + -19.36411123654689, + -15.697145753536066, + -19.52866777870281, + -11.393828988526511, + -13.374973106318732, + 6.929482600339284, + -18.62572862311603, + -23.55141694524937, + -23.173875134973866, + -27.020111989250303, + -35.03890661397108, + -23.728349666618612, + -17.81055275119759, + -24.97254986909441, + -22.245454133353764, + -24.50001521518099, + -28.967882595664676, + -21.94541690692605, + -23.780770047095686, + -26.808667891999715, + -22.87950717672124 ] }, { "axis": { "matches": true }, - "label": "PC2: 18.5%", + "label": "PC2: 17.9%", "values": [ - -11.379323733662336, - -12.891139621259757, - -10.650992826347427, - -10.123109167513299, - -10.754790573985774, - -1.0192524830038374, - -15.035984019524971, - -7.19856600033784, - -17.167354603710578, - -15.817594631764853, - -10.067499815731967, - -0.6048536699086171, - -15.830423223078949, - -17.48710139799467, - -16.031153720982285, - -8.807402214724114, - -14.120637727137892, - -9.322371224813592, - -10.40520046395942, - -7.617897266395577, - -14.697602984988588, - -5.666256571286195, - -7.616774266938884, - -18.13279633851767, - -17.33743131926463, - -14.216184484086654, - -14.416724233717872, - -11.13000792900962, - -18.491180966518534, - -14.070221958457761, - -11.4959616927021, - -18.146371403226716, - -14.344161646038827, - -12.213857112155416, - -10.837384309640754, - -10.825807932514106, - -12.26861331501533, - -12.02674275766444, - -18.701099450494233, - -14.71953532465657, - -18.891772279997653, - -14.940889151455904, - -14.124216934837381, - -10.465193852645587, - -10.142350446002043, - -13.351128111237355, - -13.341892177540565, - -20.192540027452694, - -19.713835704256645, - -15.084054526508249, - -10.069087122668863, - -8.681761971475753, - -26.004702386889548, - -8.194108702610922, - -15.193419535455854, - -11.47112086548913, - -9.49952955068294, - -10.033641864373045, - -7.954727564333074, - -11.563039724436763, - -11.46801362267173, - -20.535480848169037, - -11.252310406660857, - -7.109159405021284, - -9.047266473929733, - -10.978800568937952, - -14.879433624822713, - -14.305256766821874, - -12.525523257221566, - -9.236493915678743, - -15.93040908991357, - -8.022613274981921, - -14.843790994479907, - -5.924817457563523, - -0.3277639123202507, - 10.19411208199854, - -19.436425114606337, - -6.648875763700126, - -3.8071674191102858, - -6.414594650911657, - -9.183978157989262, - -16.723738745265145, - -16.315048643635855, - -13.591412978988062, - -17.591887856910304, - -7.301461837081898, - -10.33257925436309, - 0.40000948756306953, - -21.076584154083697, - -21.47773570860796, - -13.2696539138549 + -6.602869063478773, + -1.4148079271920584, + -8.32353406053715, + -4.677282930788876, + -7.652946890050872, + 4.45280450640705, + -4.834911361487343, + -2.0648062778002707, + -9.86916650423742, + -10.830296283652618, + -5.375135124956586, + 1.1768184022683115, + -10.650984663698896, + -8.789198660715572, + -9.114318255171842, + -6.886553779404501, + -4.518725004458369, + -6.8259200210399555, + -8.360514184549821, + -9.63755821820504, + -8.1746840925213, + 0.2113942624786045, + -2.426192677674135, + -12.054979203616675, + -11.911554250571559, + -10.783845212751572, + -13.578801130415066, + -10.609335432740114, + -17.94029311303664, + -15.334325951871687, + -12.541726987137059, + -12.514479756036327, + -11.120423118315227, + -14.492023464855219, + -12.921940188679113, + -10.226776777013683, + -8.452116469432923, + -11.22761976249755, + -15.011692574877133, + -18.03643461025301, + -10.411681116680729, + -10.098242172813213, + -9.23805998574629, + -4.390282094930095, + -8.835663624868268, + -11.240111011760952, + -13.017730973935906, + -15.830662235327104, + -16.014279450680696, + -12.63447993473568, + -10.274626581970942, + -8.366284303220853, + -23.6676351116195, + -1.657544901604677, + -11.154460456993391, + -5.753938781865573, + -8.103828617321923, + -6.972648117653477, + -4.029923444215551, + -9.775465135537619, + -8.087891108630137, + -12.18753008866414, + -15.648667768709608, + -5.240313107272552, + -6.337663351662816, + -9.691259463493447, + -11.010124995203865, + -11.171581448254404, + -15.823989195964785, + -5.392396621572551, + -6.474040502822446, + -3.463787628049084, + -6.752866842002741, + 0.8007454067555946, + 2.4799903035024466, + 8.730240132634934, + -7.976057248978441, + -3.478741638263443, + -2.521836309369347, + -6.315761929611103, + -1.982918563513512, + -8.320415407398603, + -10.148763257768138, + -6.705304628936194, + -9.954035129930478, + -3.8901886838549062, + -5.110661602307884, + 6.777701126350403, + -14.80292260586333, + -10.690100763498204, + -11.71356025561116 ] }, { "axis": { "matches": true }, - "label": "PC3: 8.3%", + "label": "PC3: 8.7%", "values": [ - 2.347159638892229, - 21.598821360683893, - -1.746262123808753, - -2.8105830398671676, - 7.395983491063644, - 15.111549311864252, - 17.766839180090503, - 5.266937954811595, - 13.331996097641229, - 3.051836978054463, - 7.523498237144397, - 7.929811109985633, - -0.6167679438661778, - 9.09551505910838, - 7.75480751778919, - 10.879428668526618, - 10.367973312847248, - -4.517772719565418, - 0.8060450489119767, - -0.7547878870668316, - 3.429552063510878, - 3.3252671830550886, - 7.301731205570238, - 7.364399762360527, - 0.37732114385348736, - -4.1716804565964605, - -7.804206758230828, - -10.802159779779089, - 2.588260077764448, - -9.58098622031287, - -4.510412093677432, - 1.9100119699714846, - -3.24326455029178, - -12.825365395658043, - -6.455430952743392, - -3.204491786260132, - 2.9553392044713327, - 5.644909201609863, - 4.377085936258237, - -4.868210439159559, - 14.420497130402595, - -0.9889261616851709, - -0.5369557879838981, - 6.3559195173688305, - -2.284754768653846, - -4.43623325930937, - -7.411126310606513, - 3.935009954492879, - 6.84312476350007, - 9.09071442123044, - 5.340793188330647, - 5.043547801261835, - 0.42408656610668727, - 1.7576790913641078, - 4.997384466275412, - 6.145425715116494, - 6.964495423199375, - -1.608055756466085, - 9.141365998750334, - -3.0060673989994244, - 8.42464485772332, - 7.589637174612774, - -7.935214856244159, - 7.534554115766469, - 7.184190657441681, - 2.185173161421031, - 4.499016674486819, - 6.02700716779508, - -6.939764924458172, - 5.27230652063816, - 11.835579589175168, - 12.389258725072658, - 8.382183950601606, - 14.3536827658934, - 11.30564248255811, - -6.7547900795449625, - 12.245666741246902, - 2.7945651037063644, - 4.16611012814351, - -6.959893428779305, - -4.755404966219347, - 5.927321846446411, - 15.753425461991966, - 13.347557608051783, - 4.087014153358026, - 8.771308379288488, - 0.15468796292089348, - 14.122785071049503, - 9.264839115395436, - 13.885490657183318, - 7.113785345081675 + -1.8219615584180946, + 11.448007605904785, + -3.086671197003407, + -6.860274879530236, + 7.439989601724317, + 9.726716852875287, + 9.102156421420807, + 0.3768544356620387, + 7.9670409663816315, + 1.7129125121741233, + 4.387444106337225, + 8.781619059131433, + -3.2172999338199286, + 3.8528793881844914, + 3.370920853669758, + 12.422156505500915, + 2.903901962819926, + -3.424182326929565, + 2.3420006892068383, + 2.6632399303621392, + -2.0057406090757857, + -4.406264314187682, + 3.295686466584884, + 4.423680952676297, + -2.1539578540647635, + -4.862884557657122, + -4.866723352746031, + -6.97579815133548, + 9.642799796586507, + -3.650159252798633, + 2.145455121941593, + -0.4291240466227333, + -2.8953990735801165, + -3.9426066810096563, + 4.834573852409949, + 2.598547159891154, + 1.3571666610409743, + 8.63762946601023, + 2.024260702898964, + 6.632609651362024, + 10.954920336530403, + -3.7413291261676447, + -1.2309832301521841, + 2.2837263129633807, + -1.2558823426187415, + -2.7572357188329892, + -0.20049697808182076, + 2.9380699865102757, + 7.722245703867686, + 10.832778878807217, + 6.56015763996625, + 5.524505273069979, + 4.703881054419221, + -3.150301122658373, + 3.437300596966052, + 1.5289758664716557, + 9.793052909918284, + -6.361930064033119, + 5.8936976666976975, + -5.166617205366503, + 6.59410111109896, + 3.059472672088495, + 2.7024609501908916, + 6.664686829041564, + 3.763325292248319, + 1.4867957798104705, + 2.330021325058163, + 5.390043513922425, + 3.279797392168464, + 3.528603081021849, + 2.0927522254190176, + 7.635389231628741, + 2.152003537827898, + 7.23066428220857, + 10.539188488563164, + -6.472919565014628, + -1.418557604536565, + 2.2251505691305846, + 3.215987577635652, + 1.1142354715861449, + -11.738606338008445, + -0.9415769537849747, + 12.40378772077181, + 8.913046027947752, + -1.2920893560238178, + 8.100928410106745, + -2.105860914114582, + 6.9163495130742785, + 4.725206105696792, + 6.646738950196704, + 7.237524844144101 ] }, { "axis": { "matches": true }, - "label": "PC4: 3.7%", + "label": "PC4: 3.8%", "values": [ - 4.165799091030754, - -10.710852895071902, - 2.3647569639054717, - -0.5893209110466228, - -3.0136118512239025, - -5.797039107592093, - -3.6098385413947796, - -1.694339193220797, - -6.3960905767714, - 1.5704466911790875, - 2.958800884822806, - -3.22027474065104, - -0.7504336439294675, - 1.408393278238721, - 4.199848154663165, - -2.8867887947864848, - -1.6238694193616456, - -2.119309556154265, - 5.322682087715654, - -2.284447366577745, - 1.2336439749228039, - 0.7384914815575478, - -4.2475747962787676, - -5.070908619319219, - 0.3088131955694622, - -0.6640913035917484, - 2.914672838768566, - 0.2554032478640649, - -3.9529125788212918, - 3.5805647588409526, - -0.5920720414226239, - -3.9065984382764807, - 3.770987375658983, - 3.2890105275458215, - 0.39722238601892856, - 0.0815811211793489, - 2.181198665527554, - -0.978353693357034, - 2.2787124308720075, - 1.1732777030481292, - -1.6290189809226066, - 3.5763896326408955, - 2.794543923822482, - -2.7822759060003555, - -0.5047179274422342, - -0.5590037557665062, - 2.0896815292210302, - 0.26208369079304017, - 2.6983179062530382, - 0.17489341098317945, - 1.3674723766444812, - 4.681880455030558, - 1.402282794335211, - 1.828096990496913, - -2.4066262149492803, - 1.8285077002585133, - -0.8269935671049617, - 2.3173609672256426, - 10.30231321705076, - 6.689238802462359, - 4.311572837888867, - -2.5620981905163687, - 4.058710544920959, - -2.622764591487205, - 5.7636955881556595, - 1.7846702868128843, - 1.054574538209587, - 0.2705277220968327, - 5.013712808530597, - -6.575858899250411, - -1.3444590763548543, - -3.0727581603518, - -2.8091188251859807, - 3.2306316886307, - -5.176483813658925, - -9.584313261249768, - -1.5204448101256807, - -3.5336817007386623, - 0.8996082301517787, - 0.33521928536535084, - -2.540531285068976, - -2.3121830788865987, - -0.13674747530457276, - -6.4372502317525235, - 2.426225640898341, - -3.351822696909025, - -1.4135057769119421, - -0.933568136690079, - -0.08910161707725783, - -2.248869736660282, - -2.3859931171452065 + 2.0078988012985204, + 16.074720455876943, + 1.855519765608083, + -1.2993802518981232, + 3.0282415211788267, + 12.188593966611833, + 12.467525594655886, + 6.079525608787977, + 10.220930984999516, + -0.5119664749775874, + 2.3369040381310637, + 9.013926449886336, + -0.9929849718972328, + 1.581705970009124, + -0.6001452603874255, + 11.238141861527954, + -0.9559612563267448, + -0.14398513173387362, + -1.2763322415862235, + 8.073555742929441, + -2.233929162235136, + 4.284479644401542, + 9.369400849601226, + 0.27400579327346186, + -3.9565041982619316, + -5.52095365539836, + -3.608947919898025, + -3.048171550315682, + 1.017303617432591, + -7.7192715075899585, + 2.1857083539640434, + 2.9560035000850133, + -3.448419571323332, + -10.22213355036681, + -6.853164687813068, + -2.2315541335708993, + -1.2386401346781732, + 3.1043235674102907, + -3.990087641063579, + -2.8725466026628044, + 2.9786078544341983, + -3.092255795466466, + -4.528907973015695, + 8.232316433046488, + 1.4426517116149469, + -4.889751277364694, + -6.887717939030699, + 2.8538252713052152, + -0.6680067674448227, + 3.411391847370142, + 4.6395260446768924, + -2.0373656031708474, + -2.7197316803880915, + -3.3050159448634155, + 7.007257065999971, + 2.9229202391010993, + 5.52127212614843, + 2.8128637502252714, + -0.9045615055238185, + -0.6753137106582341, + 0.8844185174731527, + 1.033015524568813, + -5.778743056443133, + 9.146994915040779, + 2.0617249450553428, + 2.022658813861812, + 0.8550900590298967, + 5.819104124501319, + -7.784578711395074, + 11.5651441419658, + 6.47838445270458, + 12.386093082358705, + 2.2181879788506507, + 2.753240096854668, + 9.307493927145238, + 6.777821470816648, + 3.4299536478516424, + 7.8754456595462905, + 5.134966243187164, + -5.883145875707667, + 2.6692590147996995, + -0.7897391947924721, + 9.254949448420476, + 13.466597712068332, + -6.4529472518813815, + 5.791793473180474, + -2.7609918503991566, + 8.590678401434001, + 7.478829046942535, + 8.894334587416395, + 6.3857551144650815 ] }, { "axis": { "matches": true }, - "label": "PC5: 3.1%", + "label": "PC5: 3.4%", "values": [ - -1.4832861413530893, - -4.967722707216535, - -3.482854072564528, - -3.9241968511527046, - -3.1486658141614146, - 0.394149015907307, - -1.6421476229342478, - -3.5766578914950804, - 1.599609376637607, - -4.489863293388943, - -5.401868972129145, - 5.198384517705758, - -7.380735899716945, - -4.615994948873356, - -3.3780349534545775, - 3.9623146233737745, - -8.501859124338058, - -7.714562965348388, - 1.7070438130969727, - 8.022724175910575, - -0.6530730390775661, - -4.253038983449243, - 3.0918201853168066, - -10.7414718099639, - -5.0301068871362435, - -6.409593726261178, - 0.5682103023311236, - -1.0741996903261535, - -5.21361315376458, - 1.2460943362113746, - -1.4888212269083427, - -5.415726575219595, - -1.6175411205771486, - -0.4827050161240196, - -0.7631592994463448, - 0.03674873167003611, - -3.053218763543152, - -1.304454577093715, - -2.851530013584887, - 0.4047865783407459, - -11.599637890381318, - -1.876877750490995, - -4.244089510728099, - 1.5740178180265465, - -0.8577882812188338, - -4.057352385105377, - -1.3767227550218228, - -1.5216977598632102, - 1.2209748291445748, - 6.859926672535221, - -0.3792162817046144, - 1.5147123497654578, - -1.7865766979373876, - -4.7614483666124805, - -3.3306151866581284, - -6.1174868639499005, - 3.193881376951876, - -0.9838831792724828, - -6.047634546428211, - -6.563422303702804, - -0.786723503330556, - -10.46530912677886, - 2.2429700828910004, - 0.6327039691686593, - 0.28023963050687567, - -4.889668074714999, - -7.041280516123311, - -1.0445272719154886, - -1.4148077489244801, - 2.4483322958024147, - 2.756645194695451, - -3.5211707285689275, - -9.403417654489015, - -2.631883532673956, - -1.388332515892372, - -0.49681532568449516, - -9.415169171857977, - 2.6186595695833343, - 6.015739257191577, - -1.7312914063870215, - -9.052739619366603, - -5.307766922158138, - 1.6403495782469875, - 1.3385046401157854, - -2.705344525203786, - 2.7385792087394423, - -7.191388194164714, - 3.1149451022062555, - 2.918316684291957, - -0.972948554623695, - 1.3317379942196612 + 5.078918884612836, + 0.34537819018872395, + 4.049923186417789, + 1.2765552570773466, + -0.45069624950636666, + 0.35472682908342773, + 3.9521490325215365, + 3.2519680001653763, + 0.18857551749266363, + 3.366982389105258, + 5.2360415630054185, + -0.7963820621468946, + 1.8025596389154381, + 5.744517294452768, + 5.7809357106785235, + 1.0758361420167815, + 2.180736902261679, + 1.155208638106821, + 3.8341930922413856, + -2.1503654433257853, + 1.4864643093078136, + 6.032566316083721, + 1.1293786858251522, + 0.7316550393549694, + 1.2646440489274031, + -0.9149536023191387, + -0.44649314936574336, + -2.1710237609097276, + -2.983938837091577, + -2.084543807587328, + -0.23488533108123083, + 0.2509242361074661, + 1.8548343067023683, + -3.659581520164556, + -5.288971843831737, + -3.0395697655743406, + 2.288809512884367, + -0.2098881146283918, + 0.8212732028120237, + -4.4310412109324275, + 5.402793602448149, + 2.2986077949542953, + 1.6858710635153877, + 2.3851754675600167, + -0.26449349373824826, + -0.6558300864237675, + -2.5114409444687986, + 1.8456828208205796, + 1.8980171396765, + -1.2991791146595897, + 3.103212909530044, + 3.2582146125264697, + -0.8112654641089347, + 4.742918473564936, + 2.7376603997627207, + 7.512932106054243, + 0.4799190301382237, + 3.3873844378788855, + 11.816785583057072, + 10.40260075722323, + 5.8896674783790655, + 5.336578787388065, + -1.4589975881925772, + 2.994670407663374, + 7.596579857494697, + 3.109511047763389, + 4.754230206058141, + 5.241806386077953, + -0.435643253785688, + -0.5210847244856099, + 4.12727870487845, + 4.006699900035439, + 4.391014274511706, + 7.252255346074734, + -1.1432407077869549, + -6.8622420900493735, + 6.326469419543008, + 0.5983902889784161, + 1.252494305479058, + -3.88320040920781, + 3.7411516827341, + 1.3083391779113835, + 3.970072378407167, + -0.011281966891825711, + 1.7089226573796914, + -0.9267725071274397, + 0.20369582187502536, + 2.9989733263423193, + 1.9681860928793842, + 3.33581550835706, + -0.4007123785928376 ] }, { "axis": { "matches": true }, - "label": "PC6: 2.7%", + "label": "PC6: 3.2%", "values": [ - -0.150784769178507, - -6.356019415729809, - 2.287018676945913, - 5.474286913063144, - -1.577616558548222, - -6.841180900885247, - -2.0605736860681376, - -0.4194906340407425, - -4.160312748318342, - 4.9721370367941, - 1.9223519337617705, - -9.013962152385792, - 6.075397483529931, - 4.97357046412846, - 6.220177829269241, - -6.953829678227262, - 7.6855580474026075, - -1.6304737232109496, - 1.8797429609171918, - -10.867513731691021, - 7.590574891624497, - -2.5212895989726367, - -5.593990008932265, - 11.373211345786132, - 9.207459629316912, - 10.772141196921583, - 2.096307413623203, - -1.5814784996373226, - -3.446278279658749, - 4.601109201484515, - -6.762001368892034, - 3.9535763991408683, - 7.174823768400058, - 0.8139373021854979, - 1.553931049587883, - -1.982697375754642, - -0.3894151138391742, - -3.806247164501766, - 12.185188261120818, - -3.8507374436343387, - 0.7940537033461068, - 6.4955535390761145, - 7.982271177634834, - -6.828430248249314, - -1.5777165188622722, - 8.065981302182784, - 1.1926670048523704, - -3.72870499132947, - -0.7810748896888545, - -4.199880499502033, - -3.344189581202186, - 2.85652291058269, - 2.3614900042783047, - 7.558430137344659, - -4.586324418731911, - 1.8581714914136938, - -4.552112274922218, - -3.8796149509176066, - 0.6880893451727296, - 2.733042374510013, - 1.4277721907750345, - 3.5673513163925765, - -4.286444543420791, - -8.438789566720459, - -3.701495381881668, - 0.5615063888099531, - 5.047128841231566, - -0.2981759609217594, - 0.7475402061087131, - -6.360745094910205, - -3.854566275284847, - -7.916612429124508, - 5.871279035449094, - -2.9069593300762038, - -7.735253222093001, - -2.9752540090607247, - 8.444653502418673, - -6.4495729173816505, - -5.102821488653266, - 0.12349939844648188, - 5.045708727266792, - 9.581011089331584, - -3.090269552738634, - -4.297249000977339, - 11.46979037942219, - -0.8003862026794014, - 8.260793567356973, - -3.1074002751120844, - -0.13760824523068096, - 0.3009674532614824, - -2.702153700680006 + 2.198742698667211, + 7.974709450207474, + 5.198325785052623, + 3.677668137408569, + 6.500902599880445, + -0.021522595907430775, + 6.479150491908802, + 5.913959490324048, + -2.0929520241615105, + 5.7192264928990415, + 6.661598940970958, + -5.375134825891802, + 5.9356712161221035, + 5.831144184640111, + 4.425114930784403, + -3.6105742629504665, + 11.02605083974469, + 5.646800502133901, + -1.4015081885908873, + -5.831373937094482, + 0.15510866826384237, + 0.9774167480629667, + -5.563091758681031, + 11.595526785089314, + 5.931215431957846, + 8.32173602891842, + -0.3758695636898754, + -0.9641680684214425, + 3.960531508666615, + -0.5627102458436519, + -0.45347020573860597, + 5.555220650704475, + 3.909775583604018, + -1.3911297534872498, + 1.9505917959993369, + 0.9170769192055462, + 2.0164946722822923, + 1.947328503100982, + 7.93527436421253, + -0.0005051365909556286, + 10.458162710442288, + 2.8727865729755484, + 6.731656765693897, + -3.28429658473205, + 1.5163052447646574, + 5.350546861646856, + 0.3996132145700458, + 0.2452234809286357, + -2.0518409199017555, + -8.491760773849348, + 0.3495543953197952, + -0.6754475995529734, + 4.113369828064585, + 2.717889582507938, + 3.4547667023019373, + 2.7203802421628707, + -2.5866674954004423, + 0.823975574639254, + 6.300362180004513, + 5.640643228548198, + 0.5916345180934338, + 4.9382750241490765, + -2.4490871684693514, + -3.963996271546293, + -1.1256946254088918, + 5.747683173908308, + 6.839018490051548, + -1.6047073743287692, + 0.9913632083987445, + -2.3359545688016112, + -7.596095553922318, + 5.465065675766574, + 7.887086932547321, + -2.433342267772769, + 2.895924824739715, + 2.0347399449300596, + 6.251019939932169, + -4.252873870949998, + -7.3031343903624215, + 3.539054021636623, + 8.737914345842706, + 5.027278122214256, + 1.7550008301989943, + 2.1334184282438047, + 0.41296269625672655, + -1.1291544580343476, + 8.563361068231112, + -4.288226711949552, + 0.11978876111212795, + 2.7255664656699947, + 1.7830293631078575 ] } ], @@ -8246,9 +8246,9 @@ } }, "text/html": [ - "<div> <div id=\"d3f4fb2e-7e05-4ff9-89c4-1b255a137870\" class=\"plotly-graph-div\" style=\"height:525px; width:100%;\"></div> <script type=\"text/javascript\"> require([\"plotly\"], function(Plotly) { window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById(\"d3f4fb2e-7e05-4ff9-89c4-1b255a137870\")) { Plotly.newPlot( \"d3f4fb2e-7e05-4ff9-89c4-1b255a137870\", [{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 43.3%\",\"values\":[14.667071892557662,6.159664156007862,8.42955166856628,20.645271916816,23.692935404231196,29.725313390671946,32.64429224592636,20.941473881319006,16.05438968920349,12.90939938100161,38.150515533994934,25.568031108062485,22.149977208928245,9.470087668809292,20.268232004196804,17.09746577113392,10.097065929316248,25.729061085085277,20.94712476361974,20.255429066234605,17.755036573166407,17.34623350389034,11.932382289948633,15.365666763256545,18.856833539154515,13.746098520851579,7.594856630728291,18.7410228159523,11.598460730702817,16.274825371870307,9.632184930706735,24.01447383806095,25.837381700426754,10.65621882501478,15.076501728430319,12.69762246936913,-0.500918637493315,11.355984932970753,13.83010526794769,15.887928790272815,18.441228986962003,14.164205646693024,20.075132804185845,13.1112030355767,13.119389356858408,2.799756550636464,19.537242711203938,7.320611300927936,13.839959555766697,10.323537417412535,12.167785209783858,19.296402076203755,19.777306978944686,-9.416799333891793,-13.812857511427211,-15.388705134171918,11.652351156193916,14.964318528219202,16.060846624936456,21.349943429402785,8.830772248288467,19.780326085850977,21.013275304477354,18.93844191679734,19.36186358696034,12.92169405966849,10.17768254728934,10.802812266877293,18.13004748361471,21.993083300263763,14.763086636103536,15.989096672736698,21.735008993887938,20.175106398785267,16.025185138907005,23.743912614040582,17.641446173316766,21.445449052656816,16.088598271129367,13.633429556431466,14.253166753962425,2.4141308297938164,20.445109883513563,25.134408152310204,12.359147985995225,3.7781231517049205,13.921668807107078,16.33693918649877,10.638403144885999,6.385907074515314,1.306815978837046,12.11183493531587,13.435714423354618,24.094470881504495,-2.3079620733891075,17.939802171312326,8.679654402492908,-4.650651871406316,20.377117471173005,-0.6316796632001087,7.2855301622232655,15.8605889385473,15.010232968783129,21.247290533312057,21.27642704838534,24.95118993885775,29.897330797229365,6.346984644995849,3.0742019012523767,15.363378023368615,15.224540793657852,10.872335957301608,6.083508910138134,10.603046985560066,10.474822184068127,-6.765576397679164,-11.044579648114485,11.249527358667951,16.202898715341547,17.670234359253815,17.951798050245703]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 18.5%\",\"values\":[2.0983473055525983,-3.5970119669092324,9.809288797108621,-6.08184754844007,-0.021354379207473295,13.638418098556372,19.730086851044874,1.3752024331202688,9.736382135773372,9.466663436394915,-1.3303024797381298,-1.8597993448711847,12.697623404385,11.11934397710547,-1.762524764149378,9.250199738267588,1.9581294023375893,16.05130278262376,15.575002122317255,0.9727331461629449,9.966625631360003,6.556363889267607,3.7276392835620977,6.195937058246721,1.1596930411080375,-2.696724428278602,2.1343135283634544,7.564279288807265,-8.064296260232986,-0.27106596790061543,-3.7748336209332867,-0.7801766620007773,-3.1799059369422618,3.981854483022685,8.462025265887423,8.43613689055297,7.32333079050049,-4.578519558985423,-0.7624693152684197,10.390173766043322,14.39152630273246,8.77689318369406,-3.533252019635766,-8.000330915757146,-4.3215969587231315,-8.631818664809597,-10.803042310231701,-4.365795582653974,4.306495474613636,-0.8208043275746336,5.68388737479691,14.24887095814074,3.7251330549497608,-8.73559050370868,-4.286472889430798,-5.746633643921611,9.365806269725104,4.798348649364909,6.631304986176097,5.440905938445839,3.114406158009598,-4.873689611959077,-3.983381091745471,-3.5336269778920553,-0.7150683087740788,3.375851262812765,2.911582299185053,-0.7187046646198951,12.093931143809545,4.364903065719786,10.073297300661762,-2.0225282371871374,1.830590340023122,-0.09332889484720508,8.015427636416145,13.697318015235226,6.228053862824834,3.3453550392312694,-0.3463339047295457,-1.0458715844618078,5.399595398547385,1.0052041769116529,12.576362040748382,5.388267418426999,-3.5223436753941892,-1.2678682527885528,-1.7161652215010497,-7.2266594816851715,12.710318918804173,4.823851119207223,1.9976069836060066,9.009353596179906,1.7994221087649287,6.719433145298908,0.02592864771023276,7.942778576546601,0.7365419664567812,15.793817768523356,1.754868689740537,2.058287597985131,3.0141081516557655,-5.5111851917855175,1.4587451831164346,13.951035363560568,1.666349369044982,4.316950533130043,28.558600959415365,-3.224056109582045,3.2321693985656528,0.3279426238638785,2.6046845089697923,5.82423028075462,-2.699282044782064,1.9108952007797473,-2.965666638544553,5.441789245330132,-3.217967042311527,3.2369790784911836,-0.08364384220350729,0.4612234442231187,6.513967916205658]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.3%\",\"values\":[3.049463198167788,-1.6346525840487787,-1.0672450659955963,6.31417391006982,3.1748166045618618,6.181964825268226,11.141115564950226,10.948941718353622,11.026832458939333,6.464621626938033,16.731592638761757,12.347191720147912,9.360481718231055,2.0555536384227118,10.021937627576204,0.7979605399455219,-3.254664585549764,6.734519305039539,4.517429874951116,-1.3265516317415846,8.56915756560715,2.3338694224869614,-3.2626896172711257,1.9948918388009422,5.359125069220793,6.533997064894361,-0.9742942978044897,8.852736640690871,-4.344770503045783,0.7004899004737268,-4.194887303999321,2.8139199497456477,6.942005979542724,-0.4844195379761142,-7.434201218095476,-5.484518982964939,-8.490118176155702,-1.7453129859622736,2.1961221629792034,-4.2710242996421055,-3.0135910462399638,-1.9125755877937018,-4.084776797214936,-1.0935483313301737,-5.040754104528887,-10.127647527744,-0.03376500147743711,-10.50293693269147,-2.6779899890305927,-6.968040679474851,-4.244433335624498,-3.207984831427253,3.085313200536685,-13.303522251243269,-13.087545110819594,-13.137454040797028,0.9298006364423927,1.7279475864696243,1.7824647086402348,4.849331150290972,-4.195755984254917,4.415989659396056,4.951352770779398,0.5325848804070266,4.071269380924363,9.627287417607311,1.5683712998008643,-3.8377634724480933,-0.7208634061037626,0.44253035991034073,-2.47008093252316,0.7013726105775955,0.1315836199834286,4.719094697131993,1.222066611870245,2.6890218886258253,8.585260815262671,8.134491741207263,-0.1558519948178172,-2.5586859832690143,1.5855898215033555,-3.9795059104978,1.9733510479782397,7.9811660671019,-1.6794700511556535,-3.5753000032411864,-1.5748094961622972,7.11501121693827,1.713721447074463,-4.695122051023788,-2.6504260683387613,10.382331673326556,21.1113195218447,11.257215206120051,5.30261962659898,7.758042284332234,14.351803899641085,-0.07421628394466939,12.18057439808588,4.671045948793846,3.357433589524259,9.773337738262574,15.859182322224495,11.373622289329905,16.841878482246095,15.807994391140852,7.0202613389913076,8.197112727774817,4.107326832487607,9.793149675714755,11.13520762174011,11.0841427602196,9.861732524638283,12.156957422180046,11.197907509741853,3.9053487650801193,0.7570808841690964,3.605220457630657,15.658975840593822,8.909911541736108,7.364233264341852]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.7%\",\"values\":[7.453372930506837,2.4505205754361175,3.729455032334409,-4.083284240960847,1.284433704078697,4.87862930332236,2.1550847791486882,10.351786368162326,10.209381858133607,8.789496734540842,3.8922532012603743,7.4797810693958535,4.86994841611029,10.657822833189936,5.671086875801672,3.8867481090647855,8.702219109569452,7.088530864409047,8.517420638762356,4.838066047283241,7.724891122383733,0.7891914708124093,1.34860668716597,7.443494540736925,6.378256526246161,7.759471412880117,4.182659629055295,6.207049723188548,-3.366223769660464,1.9761902122698327,-3.821936161792717,3.3177116804851843,0.4120360218082373,10.771343117892684,2.9461655045666704,7.251324467798896,5.627275304883911,12.975245388224248,3.41073758697765,5.226442427978045,1.5857898664796808,6.105317335313837,-3.347213233235948,-3.031896489024691,-3.1762269445214413,-0.0930881760136093,-5.2778612104835005,-2.099498257193696,2.573074958011463,1.8195845739910301,2.532987128385585,0.7581851998733693,4.592036860940209,7.815647254622069,9.209829309161305,6.5851099022341035,12.215556349442885,13.292867558734354,8.303785034941662,4.462009180370433,6.602453727856961,3.773696872788884,8.45873015253709,5.146547140831813,4.2864724160645755,7.913159257345619,8.126564583552591,1.0731661093970546,6.962974445357024,2.368877868519869,6.039650900998127,3.71110866103378,3.455124804502243,4.374822181037539,9.028021265841474,4.292350913781943,8.888266200131566,3.6869118634964835,5.499310849084832,4.706779144946573,6.8463786979510655,13.347166059737578,6.721004784746212,11.508378916060753,7.46717845992373,1.1702835542390408,4.934255411042146,10.980867145562382,6.880830773764902,8.073913126547803,15.951179580664249,-2.4949937083387614,3.0050631861178836,0.29829712599739366,4.330155871637738,1.2257926604313818,2.162771108025941,1.2080508269012138,3.378836156377173,8.61109315971165,2.364601688111138,3.8452872848205377,4.184873608231511,4.253757478679937,-10.067874217465755,-4.464627186147451,-7.664130321486493,4.638611118236364,4.398978899790234,4.863223640095406,-2.9779267832063687,-3.9557883579291904,5.083858754082374,-0.9778821651533318,1.8172539852825478,9.499516739847657,5.807691299778233,3.895286695744659,1.715305930463554,2.275883890311599,1.520328182078075]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.1%\",\"values\":[0.9421394195547308,-1.7600384301335774,-1.7791735478926693,2.697314466632045,3.841641628348195,-0.1006733808203522,-0.22912542006198588,-7.659415088981032,-2.6449147606784336,-8.028438949601668,-11.926863808271971,-6.080769352075243,-3.0522926852607317,-4.382203577725635,-3.1107403108859,-4.225295531497075,-2.6116544418963468,-6.540017982878866,-7.756971116299173,-0.6566291748552983,-3.8926574624313166,-2.70564663230117,-2.9696744360461906,-3.3801849960998664,-1.241053235720405,-4.06862192240371,-0.47179684775215036,-4.142159941881303,-0.6075781239837519,2.886577134950146,4.04444406202623,3.0149306312560222,0.10399105639511681,-1.908456793263834,2.010468675446852,-4.298715494961232,-0.1413563041200174,3.6532510741111857,-0.7706100931702953,-0.4295804954190159,-3.9762210184353495,0.10834563460481425,-2.9366986979805287,5.173931493493334,0.8864637625992456,0.823786723225505,-2.6897131814380586,2.006287468062313,-7.1337658340899255,-4.493692074929085,-4.9568505245495205,-0.8845113805793139,-5.114967804598211,2.1294423654096573,0.6226253802983533,1.8065290296340724,3.407294570787993,-5.882055426021695,-1.6568216603031753,-5.39778861307495,-0.5331992211357676,-4.208449530276909,-2.578780079182698,-5.603378363238059,-1.4535766632343856,5.060750723235622,-3.079696110657462,2.7864407152166124,-2.3231372285751464,0.10716106969473507,-4.936647431573848,-2.0936972965882052,-1.053660077931345,-1.1625618972233307,-1.3752934069073202,1.8565775470596264,-2.329757607384881,0.2892103337375277,1.8195448999434678,-2.4057060373142423,-6.997030039617323,-2.0456672462528305,-4.691605425099458,-8.019041586219275,0.11803599160101881,-0.5664413602451557,-1.8558046582693828,-5.047506892429682,0.3302802239871919,1.3148339423802675,-6.6136818896013985,5.0043716504394755,5.447308729749029,7.705955830918647,8.86989871185932,3.3962483070566227,-5.18074326321125,11.65823191750759,-1.2408693023782358,8.712094360147091,2.6017934475414757,2.836475944617046,0.9602555212984213,2.389207999269442,11.047213974482046,-0.8107719367498119,12.833062780516622,9.91496189526972,10.44423261657264,2.34126231395251,15.465102894566593,14.756360775448679,5.9078798909317225,10.409240008033565,9.200869208948665,12.05210940530076,8.059915955929638,5.105878209334287,7.746306618435755,4.369131993328258,3.5004951484313027]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 2.7%\",\"values\":[2.5340662896645108,-1.6589921060872526,-0.8672912263222158,-2.430906600367459,-0.6701350627152016,7.758096803684491,-5.429118582927567,2.0287621506788924,-2.2008440636915787,-6.026715497588022,3.1628712563051216,-4.1345208193327645,4.96120520004276,-6.130751968239557,3.420413203275869,0.5828387297570821,-2.09572345798102,4.682196940919357,-0.6958039031749461,0.505000605236422,-2.2721786586777655,2.002007175039807,-4.753233536139034,0.8489528326334967,-3.1522529263439334,-1.4900657298424511,-8.55408057079334,-3.481844617683566,-1.288616283655114,-1.0699445944559083,-0.25347260728212456,3.4843938326899337,1.9444968667945954,1.220803096604134,2.1217481517296712,-1.5492729947812622,1.8824492579624927,5.065008306171863,-2.8959623648871986,-3.86052832334883,3.006653847760853,3.1149232139696093,0.36028427231634796,2.3894882927151753,7.573219457827115,-4.169593894883691,6.442344195247852,3.130935852153836,-4.147360183847579,0.19421855262741694,-2.2245741037635733,-2.4486006664736752,-6.015138409329803,1.952938384894347,0.27599952489143703,0.959382343425752,-3.5438796697561674,-1.8415805152941636,-3.2528628163849787,1.9697608689006367,-5.3223802227570705,-2.0075358673045716,0.34714617722754393,-6.102381096545156,-3.9451904176744303,6.623372171058313,-0.7189383789451071,-0.12413587009769343,-2.103756012361687,1.1193474781920036,-1.171117213928373,-5.631223931075161,-0.7142761975639649,-5.174533451536246,-6.815743637657338,-1.835635715940228,-2.914187103967847,0.7544213792056068,-5.2333137428174545,7.481882389215546,-1.3909168894633215,-2.064967297553826,0.6470990136146014,0.6371478001193704,7.090332141344594,-4.924388111004887,-8.373887342827604,-0.09327230053097478,-5.7465754407200045,-9.60460593639519,-7.694653835856818,-5.901455485208482,9.972089071976052,5.830064996487128,10.594810231492207,-2.904203156490323,-3.141373959895272,1.998136909326266,-3.1140891022657557,7.40880711601808,-4.050993492920901,-8.448647938956096,-1.471555024986852,2.1313513223762546,1.2705221099727537,-1.1783937357551773,7.731962160019912,8.085161975144176,12.706120242293556,6.280336656533261,1.7964053578752428,0.3185821638510605,8.850947705423167,4.525101346699659,8.934272984343362,1.0997310483663676,3.146227073093901,-2.1422901622017454,11.90734169939075,-1.5354828220125056,-0.6502692890440506]}],\"hovertemplate\":\"color=B_and_plasma_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"B_and_plasma_cells\",\"marker\":{\"color\":\"#636efa\",\"symbol\":\"circle\"},\"name\":\"B_and_plasma_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 43.3%\",\"values\":[24.64839349119047,2.536742661807661,13.688924184237358,9.794372783400842,12.1740161867939,14.075311543930617,19.71428350003106,15.852340090949767,9.510028603217641,18.92118534337812,10.545569012405629,20.97027837565986,13.169803769667823,24.053437230237627,9.118514524654028,15.425238384799677,12.638226190308185,10.891013457404664,20.46725734717249,17.23379175236689,20.759169287431178,9.030783391101089,11.675056988114346,6.632960129251367,13.451497262279286,14.377161858578468,16.666187724275723,22.95169745988398,22.802961998901207,19.723518078719394,10.218302794501271,14.715220459382854,13.03558750690057,4.346724245789173,7.954298513344273,5.434651832538885,17.66887595631259,20.8088031608292,12.198126025721894,29.041093438444378,13.453766880222489,6.392476718651529,10.901169047505713,6.995062912782589,18.249012690889458,21.18518410048344,16.162379034944504,17.153211571513317,0.2394346511358676,9.704380312773068,28.326260204454996,24.803335180749457,28.05789475863529,16.780819290717307,25.35987635366989,7.216668638347679,21.691859792773002,16.488059241048493,14.77764383958884,15.791647618015773,18.842386533267764,17.269142465869912,30.30846156995973,29.244998476097983,15.899404155741713,24.238201285418796,11.630868386435889,23.51776545926253,28.119005051367758,26.941839379370663,26.314420319457106,34.377669113667366,14.792133196217007,31.539679442437414,20.73584415668326,15.952272170384774,12.6936554838001,12.144761185688814,8.814558477178736,12.472269782666135,26.67601740623096,13.834606611721393,10.481439770662794,16.380908414872316,6.055625654802407,7.329691426506944,17.59838353290986,5.461728889247363,22.55552559490573,19.391804989512046,15.947777930333904,23.23948510709228,22.781322625287903,9.682588086305497,21.419396165779016,17.40230139108229,10.746199160756998,11.81204021207403,4.86649733098877,-1.4166031777666883,21.78591777697122,27.989133577098677,4.373275709678575,5.616247639206001,7.89871940050819,11.942520546535226,10.395646622986245,5.5538615572326115,9.50971647757201,22.765333776715703,5.6307655153641285,8.628698495562448,31.757902739003224,18.47759926911459,30.228965726476385,26.301421445670748,11.237358942521212,21.783914235857253,33.20689585324936]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 18.5%\",\"values\":[-7.557764420235808,-1.0628147609908885,-1.4404356442030446,-5.995564940432041,-2.097563335697453,-6.365368140377703,-8.206785436139315,-1.8401184387694411,2.7041374786776498,5.577834035142002,-8.190465417142569,-2.9595923937711826,-8.681485023253604,3.359241651047912,2.6256577548218005,-6.2961234266361465,-6.912141498307026,-5.579562154107903,2.25163396014679,2.6053973400032278,4.4627881862357714,6.221108956085819,-15.886167326606301,-8.438818023671434,-11.766984065431407,-6.695847732463498,-4.7041201391026055,-3.4397359185575387,-11.283425099410353,-9.207474446689249,-11.440842029470385,-6.909871364037224,-5.398996540749442,-8.84266794478104,-1.1117550199563713,-10.309147500916238,-4.491288992718141,-10.325484582465103,-3.366244468713461,-1.9489770192518168,-2.8886975734212545,-4.766319576050816,-5.696032072344757,-1.1929137048158764,0.6709979569531073,-13.823155559746057,-7.860176703696287,-18.59809040769057,-7.11167450570676,-5.769628325483536,0.7023238138485501,-7.904651970800137,-4.1652152294486315,-0.18563014647115605,-2.823586500027128,-7.924965956996308,1.9579082133125014,0.15138785535133348,-10.86867792958148,-5.371281148663191,-2.1524411958029814,-3.677457429796969,-0.7677964043974699,-7.1947892194209375,-1.477525875714367,-4.660752217045183,-3.4866781220019964,0.2542551059812699,-2.8008019845612413,-1.9336053789186336,-4.531832175199765,-2.2454934057723266,-4.853074148392042,-4.573992968763999,-3.7205752214391588,-8.10135794031186,-7.372740000965745,-6.1034495148022945,-5.0631655085555725,-6.856677295571389,2.148112821200468,-10.389814891098279,-9.1194891503868,0.568864057838621,-2.7032595740880643,0.43473503900698685,-4.165615822302359,-1.4368989467066138,-2.0264373086896454,-5.553813486981403,3.329800897337906,-0.19049594802294023,-0.08402690200801688,2.1832977813565493,-6.071875162395006,-7.244609326820058,-3.655084462835186,5.785085592285931,-5.713757994810624,-1.1541861194331096,1.2080164262569155,2.601396103982104,-0.5673488719505976,-9.693761325658105,-6.071834670835589,1.827534366076406,-1.2541706537996082,-7.89855392176845,-9.465988684522348,-10.52127898920294,-3.8403302969086894,-2.3345783038446517,7.509480488417756,-1.1808061311070304,-11.283328023595484,-5.750274434096471,-1.0412206751389097,-11.287879706455133,-3.837567412015959]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.3%\",\"values\":[4.096578414312186,-17.09057308324522,-9.896697238905896,-4.413240806345717,-15.452827148626167,-10.837064373313577,-8.499085832483845,-0.9666973748038358,-7.9589897087240775,12.193746627477012,-13.158736705878027,-5.486585764363277,-10.580041337060742,-7.552042973336975,-13.854633814662634,-9.25475373090678,-10.154498689740384,-8.587748303389619,-10.457605073436685,-5.924912256310047,-2.7380075908316455,-13.88251000874208,-4.342420740820759,-7.393641366400569,-6.346586136291878,-9.609615876165094,-7.2877309974762845,-5.227190050945934,-3.3505832446797292,-7.168118501146169,-9.36231981173677,-8.422065164703728,-13.568681930342319,-11.832991082923341,-14.499126975060904,-5.240098320334547,-6.225057799358304,-1.5649532752215642,-10.96887887056266,0.7138426265410436,-8.501135583672779,-14.000106974718545,-12.43640092481957,-9.095394296689951,-7.693795778587065,-3.5293759411730843,-9.557913051332031,-0.8650573840727582,-8.223496659035044,-11.802104614233981,-1.512477735298011,-2.2240495990609395,-0.6151400748526608,-7.623935872826692,-3.0216036837660076,-10.281154041350035,-4.1862340898454695,-9.777214430896963,-5.071166241805628,-5.0815443299247836,-7.9887110813641975,-10.272923393996201,-1.4786909233752614,2.4961336645965773,-7.652037643872687,-2.4769995226369037,-10.439496453059885,-2.9972390988262236,1.0483566244910607,-1.182639404232495,2.3681965469467245,2.380116774368694,-11.175175742269104,2.8182831763312866,-2.8431272229484876,-6.321152963767636,-6.868110490364305,-11.19053412437487,-14.355111195306083,-10.406777781798823,-0.7558898284254238,-7.952030122085555,-8.301266448378374,3.3345100987945315,-13.725513267491163,-15.383192417089473,-7.0738848714501215,-10.758753174163633,-3.6216790220887636,-5.368784711310237,-8.671402344703573,-8.462963202815665,-4.693355278964495,-7.019594669243904,-2.5274237128287664,-3.866432274946266,-9.610274243814786,-12.583852650249282,-14.104076664424827,-13.222361581828727,7.0959085058805265,2.94466049965081,-15.673219298726774,-14.52939452520329,-13.70216895267195,-9.604378606384346,-6.925315695762226,-15.058238604492452,-5.914424195079626,-1.9642567381192797,-7.653939928856937,6.281096497546182,14.811412374544034,-0.7739927006584507,11.707442928482605,7.30471103394545,4.020370898577047,1.4442357795568945,5.887513080391125]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.7%\",\"values\":[1.1598286426599151,0.9046182303547972,5.949473684750705,1.9115900244202162,-2.1109995057769897,-0.004110948600553055,-1.8785678918163287,-6.639528306633103,5.0930006729165544,6.178311235098391,2.1833322311823595,-2.7640570166062184,-4.373370672851622,-5.919885581944919,-0.7032384165552545,1.0067786527948759,2.2839634572572245,1.2786504883967884,-6.392093435199657,-11.717595774644415,2.3143001950041695,1.8731129238040967,-9.392952873577006,7.639595166696399,-9.554008944887887,-3.591834372135751,-5.632198162192122,-5.082291789382983,-4.6997347280001565,-4.969820143887455,-0.7106578784489482,-10.941870292575814,-3.956630733883239,2.8415590644157147,1.5942540068161417,5.5334746682622615,-5.378556999467167,-3.146829277436373,-6.698242860907832,-4.629471740630951,-4.228648691356472,-7.3563617183225345,-5.661656978798195,-10.225164582984402,-0.6491916490440524,-3.5065596616451975,-3.3776741223810065,-3.821051846046809,-1.286199714424343,5.808529229515996,-5.386312119676675,-2.4672951181339164,-4.823574614613182,-6.499366307406974,-8.248385081172454,5.157329698046976,-6.299389669279299,-9.247504800439883,-9.997536802679976,-9.789050708997623,-3.8220233222275932,0.4909014575092827,-8.345147011175058,-6.932995427688886,-3.0529283608382403,-7.889708732235986,2.1158876399796895,-9.319671163209634,-5.546294362431469,-7.7634938964750795,-4.284837490093425,-5.1054596981831075,-4.902133954767963,-9.240013103224085,-7.388673236099431,-12.09246795170674,-7.627167301520082,-3.9866137676529183,-3.9107699572251886,-7.529305927502724,-9.128440304213182,-10.665913913930542,-8.822561709037618,5.46358163012914,-0.1920524955445857,4.562514671976083,0.8879890281800592,0.37092288585085875,-3.9822344171134074,1.216407140268171,-0.5663704506556724,0.19124855748696745,-0.31330300758238416,0.18929801999046963,-5.5109281726244745,2.8396742285511056,3.058408662127831,-0.06122200884677054,6.59068578337278,-0.4939736486516331,11.285871918636634,-2.4691181973388234,0.035489945150305235,-1.6950463111103276,-3.3673106316944,-0.04793636276072544,-3.5082721138212416,2.908934032415306,0.02340345128882837,-2.714791865835987,2.462988405846634,1.8929860389418178,-7.878113520100008,-4.38974819231293,-6.8479939834615084,-5.5962793501372525,2.3265721513157716,-8.074097839853522,-5.542899369196329]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.1%\",\"values\":[-4.5124439056934085,-1.8958960489025884,-2.100549143009243,2.144931700023676,0.6099191348375337,-1.7980693946214952,1.441952035995533,4.511211535822776,5.2311692026830725,-1.0319677293101688,-1.7408774409115888,0.016946160715446013,3.4660216661607297,1.3742780635909977,-0.7790417631269738,0.44622330848235214,2.8809603504222476,3.010843745232459,1.821452386827645,-1.9591875649876493,-4.493568416555679,-4.357307385722979,0.5448749330375156,2.0569737592656407,-3.5074579381315165,0.19800456542498268,2.9564301855409436,0.21333961712678956,-0.42104335865388276,-3.637365122086586,2.617749997213395,-2.8791865503686993,-0.9868744142742729,4.074305808816045,-5.216040239612152,-1.274951061874377,0.07521519013653569,2.748156925708947,1.80435823216475,-0.0226770535255819,-9.422556858591639,-5.452352356357221,-3.8266144348030053,-3.49103369186571,-1.4267056824528992,-0.5833802017388925,-0.05920628670585619,0.35175041731067785,1.8793080501514934,-0.4046150457893127,-1.653260952041456,-1.0412239817620808,-0.5581781532928316,1.2441179418298964,-0.8807902736523106,-2.4486224669984784,3.005389903746119,-0.8078794017549014,4.687360134894096,-1.5475002159373616,-1.9576852156021083,1.4710948427338584,-5.019642100094333,-0.1445654841146044,-3.5112588346604348,0.3232515384012094,-1.2687305019175663,1.0113253572040537,-2.182833952451136,-0.058031714490694136,0.4714516606517818,-1.8318711075792462,-0.9857914726287001,-0.009242748754808395,2.5888901394270034,-5.844329439021546,-6.040249919539074,-1.6474186963281083,-1.4221619659954505,1.236294906374334,-1.3510567696572837,-4.487528448687689,-1.9215778484749257,-2.3421744868652294,1.4843238405059578,1.6965997867635088,3.1261444779996026,-5.866568727716238,3.4801703624204645,-0.9010059790771383,1.5717314282967427,-0.25638868013743304,2.0242709599218176,1.1376336845697823,-3.2814248047933137,0.23360607834882227,-2.5800964210352104,-3.4305366702106665,-2.8914228448910837,-1.016794433001877,-0.4102590378960386,3.9043369377193233,3.0847293504654765,3.3798862507835388,0.44824109013548186,-1.6120653887190102,-3.10442565228924,-0.7476566294784198,-0.9713457793978699,-3.366631138917662,3.2854199587992703,8.890807142622515,7.705027938978775,0.19932630682073063,5.399502055504298,2.4012714257365255,4.354528222891451,4.700510740294122,4.4000189583084355]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 2.7%\",\"values\":[-2.7208141162551756,0.1927442135349275,3.080040352961841,-1.0820896510557259,0.7452120430250135,-1.3296360779219134,2.170217109771809,-1.763323682817988,2.9142929748344195,4.96290658490656,5.155650634804046,5.343376840603494,3.806418850321353,6.629005846373656,-0.5262077181463731,4.995012715908317,0.35567460678214075,0.8003774405846636,2.637909140498642,0.9471422763690173,1.2210771797960025,1.0069302397747895,-0.8643389730992089,-0.3803137749959966,1.0228012255408887,-2.2949384510168325,0.8906187274201562,0.1875013921552457,-1.2552654772271474,2.489720335346723,-0.08247638955463006,0.1899126447267061,-2.508323605962024,3.9634703391795485,-0.3495362276794175,0.28744956692332524,-2.662851476962756,-2.2442621556512905,2.1866714712787703,0.17690092397060658,-4.150203526821938,-2.379883961357857,0.6726318646371965,-3.254488610778853,8.89765026245549,0.238626341764682,0.1820851876810973,4.266358998387821,2.647935998078335,1.4583821945624487,0.18844948834966618,0.8432220258621498,3.019686277950187,-4.3371558930021745,-2.52985792650074,-5.21854600940329,-2.2258769359926345,-1.312851288642706,2.610346786119605,-2.010595341893088,2.7817651202952374,0.8986596316481167,-2.6906241368466195,-4.875635215119824,1.592931389586114,1.093255985873783,1.0158183709622088,0.4892146999030948,1.163941017267693,4.894330071529058,-4.318148547725412,-0.1520664871948772,0.43449018514909077,4.598610870085148,-0.9673885301875192,-3.3768889498059163,-3.962246836624139,1.7021856554831225,-0.21379424971630961,2.1032538138554275,-0.7926794038803809,-3.899828845964593,2.667963099545971,1.6292257709111475,-0.5024933760528836,3.5280862782071236,-4.16007570431708,-1.1482281054388082,5.324421573548205,0.03285051677196553,2.665990755214486,3.770804620334538,-2.209026050054191,5.548018332461223,0.1973849669690075,6.827991981203512,2.760935569830575,-8.599823254524912,2.1261588599918606,-6.2327662405465745,-7.906223646376361,0.12810382272098678,6.533734569679248,0.07858666802226645,-1.4595034573793042,-0.6408358941193738,4.095034722795174,0.4059879285130102,1.4909097790134063,1.3917040650000287,-2.660946759782535,0.1525325796664597,-7.51322766110807,-0.008398746088800816,-2.6957370264095686,-2.2628175054192585,1.1819221309818027,3.349838817997342,4.042649240935671]}],\"hovertemplate\":\"color=Dendritic_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Dendritic_cells\",\"marker\":{\"color\":\"#EF553B\",\"symbol\":\"circle\"},\"name\":\"Dendritic_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 43.3%\",\"values\":[-17.30078441742645,-6.895183135440192,-1.4514489714579013,1.0895457209591934,2.49511857736391,-7.212015578343002,-11.097478159991248,-3.415595746285426,-7.413177018619896,-4.741439362376054,-8.553271239005065,-15.220443802363672,-15.419101138264349,-9.293646209401704,-8.992986220532272,-8.331459867525565,-6.634151095597277,1.208808477029636,-5.9276792814334645,-7.686198387039736,-18.313940782205577,-21.514663179741127,-14.713146305611744,-11.341482556764184,-7.985597493375022,-0.554284298395611,-8.22163310611368,-17.08146811007675,-16.042436965412687,-7.601115979130334,1.3329517564470417]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 18.5%\",\"values\":[-4.056965500290928,-5.174309089041109,-10.765096311649277,-14.735597038236705,-8.827400597952604,-7.338308495602373,-5.503712740001616,-15.5422035595684,-10.168503241395882,-10.185714970964423,-13.472530126214338,-11.65823093565362,-7.639266184615172,-12.377718930805049,-16.17633168080061,-10.805009260397437,-8.363656727028934,-15.94273643557891,-17.165011318553432,-6.130104014075151,-7.471366207175148,-3.901565708948807,-9.975700204867584,-11.627458340696865,-11.64677574072958,-11.430102611615519,-4.469213066257257,-5.178669851646845,1.6063989784347965,-8.480087854246698,-9.470081885473785]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.3%\",\"values\":[1.6923283762750372,6.545986367969174,12.446557351031501,19.777046519462218,6.087999166318857,7.262813185812481,-1.1117169467119539,12.32044275074673,4.055991856363653,-1.650729443544562,-3.622195780252244,-8.311867623196596,-1.891553174762896,-1.184364447435346,0.6871120232129617,-5.369020751810356,9.180152047203357,6.6120017999584855,5.129325704895745,-3.684547081978468,1.0254377825478478,-0.6973450209585323,-2.488952021368264,-0.8411181499733547,-5.809332053846951,1.530007284529864,-2.2138910537134873,0.6245006614609903,-5.561976278573392,3.51356033159883,6.606046590184823]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.7%\",\"values\":[-1.6779098990403383,-1.948308191774345,-1.9664716071525092,-5.772409757582352,0.3865189815220693,-1.0854729084638646,-2.123139918588411,-4.3473878517558076,-2.041991090058496,2.3975721436833553,-1.722165238420525,0.5642367505681425,2.936334001220176,0.8746365409446628,-1.2582970846528225,3.736683761019063,-0.35590460615534736,-4.6677209282220575,2.6877473360911273,1.6793290304031303,4.7145129885619825,3.3016217209575114,0.7947500589752892,0.39632138922720406,5.789047191455284,-3.3108667294505265,-0.7727457369377098,-0.3415273564457557,-3.8068549085496173,1.1492791069721877,-3.6348778938758772]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.1%\",\"values\":[2.8826568563223063,8.385578934959236,2.8064768989456024,6.899533271199019,10.073240701191683,7.123051721772946,4.134828695860586,5.033808951948692,6.883315876184511,13.210383524592904,6.215880569271835,4.3434926157288025,5.914872621157408,6.325869242090135,7.132303555927263,8.633741916781442,6.360354639551508,3.457890287262184,6.520922876793362,10.035381658027356,2.8244645494345813,4.007257863678036,5.905802182038886,5.904274646552366,9.069112240415748,11.435432546153233,6.5095745904626,7.1100596713676945,5.561492647443086,7.783081284224584,8.173622934181987]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 2.7%\",\"values\":[-0.9055255921862997,1.7161397727731398,-2.3242713970494004,-1.124992124062061,-0.23798864656240323,-1.383507964556784,-1.4507796761235316,4.056413996560381,-5.6890075309765535,-2.8673242684484306,-4.72561490128473,-2.3105627851749384,1.9439641052466405,0.4484404520958876,0.11358271097435348,1.921231203440371,-0.7572293657417313,-1.920755570305236,0.946327886896481,-3.895037831219219,-0.3773084792136632,-4.218316074653955,-8.473564688482261,-3.050908088074039,1.580327017399225,2.3409370478678966,-2.408918868182238,1.6120639144464868,-5.932892716666613,-3.997582185730285,-7.410345546897444]}],\"hovertemplate\":\"color=Endothelial<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Endothelial\",\"marker\":{\"color\":\"#00cc96\",\"symbol\":\"circle\"},\"name\":\"Endothelial\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 43.3%\",\"values\":[-12.310056120844079,-9.597136259315189,-10.954699253113967,6.892142074092062,-14.885390456411244,-18.067334920639446,-17.00680854067071,-16.046150514323738,-9.33672233717876,-21.582160661635754,-15.110822139030386,-12.365459479715174,-17.21514039376611,-13.510799182203282,-16.26593551672371,-18.657377097133594,-16.427077783470452,-19.357545443167268,-12.548582331793943,-19.24604557779213,-17.550920028891724,-10.936030161744458,-14.380488489499044,-3.918770312432982,-12.810168623656239,4.532703608705291,-9.769866006572082,-22.733180344396743,-16.147634698453007,-21.024170494215802,-22.237041763938933,-20.561172572133383,-13.01349087454783,-18.34068090213155,-22.485035752361632,-17.426841277756807,-21.07455628180842,-22.01439221963137,-21.129743790380527,-17.98658280062636,-19.944906834468995,-9.866357124655236,-27.364026489863647,-11.823102394543174,-10.172315257455338,-14.314453591987139,-17.715160985120374,-13.46999655476322,-13.898409701199967,-8.325749981804938,-10.84952501412731,-20.827897473381647,-13.752829355351356,-14.32036072117102,-11.82782417363492,-20.07588642711798,-16.018400556863906,-2.618372226082084,-23.730262856704865,-3.9021028166538434,-18.10515243713806,-5.562355369265948,-15.481143366972539,-11.820351983636773,-3.7063869749256284,-21.628237883941644,2.4582936337324344,7.725042754672949,-15.332502634483493,-5.5430078363379955,-20.476842984843824,-14.982910284999953,-13.483616351249758,-13.964361330067524,-8.837838535673724,-16.409372138604063,-17.791307242332703,-21.700830156218167,-27.744177367267557,-29.81082747134628,-11.834113826844863,-15.157359893497622,-22.975998910394622,-24.32167463891348,-16.522040384237705,-21.97627620672622,-27.970496088054237,-21.07558989577943,-26.95825636922722,-21.831587616340283,-21.592010060724274,-21.30018265768053,-10.83231045913125,-15.60440763807564,-20.253245917465417,-22.295132895250436,-20.624881103709,-20.310135467873806,-23.09345723394033,-20.8590766599342,-22.295888070199876,-18.02459345411228,-19.147928938955594,-14.477716677192433,-16.58440698585764,-14.623864111104291,-17.00645441413316]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 18.5%\",\"values\":[17.098352203031247,2.9140116244552736,-6.8656606743322435,1.7903085055689836,22.45800075960447,9.588071088712933,21.465236479597884,25.37139749129129,1.9977194997483654,5.968842794515615,11.674411807288216,10.263941815102928,10.266881259531209,10.04660750968372,27.14338400056772,15.340899592100346,32.02277420842411,16.363910231665756,24.9862662802731,30.46321173277923,-0.3046999154964183,28.344846960339815,19.92040780427992,27.02426597893539,19.386484201852447,-2.070863097176524,20.99604559745245,23.109916150524185,18.942859135668726,15.602050423853228,22.779113035424615,20.43591159693211,17.824560485319743,12.751648635126546,14.270055160160354,11.759388977322862,28.265224891329026,27.361313807892287,24.747546557592884,20.59011277219461,4.029841500368219,0.5471198838305864,3.8298648835346842,11.214521885631836,9.870099243998364,14.594918214219025,8.809924391233917,6.312788382974761,4.240679534522542,20.83727858736194,10.465528353975833,12.974005476104804,-3.7497795814071067,21.338977419627035,22.706109440881395,0.06980589792572878,18.31112042616361,24.892178792095848,22.15733013629792,22.139854894382474,25.589570059212754,10.748267276911506,18.326229391606848,15.518117841924226,14.796059454687624,5.39171754087444,15.799690939644258,38.72848658608494,17.6952427447776,26.315860205688146,11.715009016832422,27.70020041601636,21.33800189163556,23.742724954349406,25.063066817015773,17.174758542965176,5.944643694394152,-4.907591258662221,1.7771949324496785,7.945286278261365,5.238791735539701,16.722710857375738,8.039493760423202,11.781870738929776,1.4730122320616987,2.1416635708361316,3.6016493159521366,14.745142829182853,3.1891182010508805,15.195491757985977,18.75358957997639,19.942515632230165,-3.9982825562304174,15.988182667949399,6.706761129322929,15.229811445016386,-0.37949077873941883,13.172522268693209,19.401439683183376,19.77227175198794,6.7630640536526565,9.728304216002963,4.5371854031912875,12.336421074828458,11.692605331087364,8.061342703183328,12.101986151304562]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.3%\",\"values\":[-0.6023688511022534,13.96328096833205,6.682726235276905,11.006328973949952,1.1270016960201645,-3.108510645836018,-1.3943736503009805,-2.0349823526479427,6.684079165911009,-3.4568807668929202,-2.316157823352796,-5.499860999478624,-1.3513667107723497,13.760900661727295,4.031816614162613,-8.067679088857478,-6.419789492305879,-1.3278624644356725,-0.563594053480386,-5.889350585104976,1.701063149075422,-2.8388270009454493,-0.9818698892521316,9.220031343733476,5.2258527264532,7.963766057865881,-2.916981004355433,1.599808179442833,-0.036743172429871684,1.638013111798088,1.3459791333061606,1.1153882341445742,9.118752591854452,-0.3576730299879909,-4.742658989994048,-1.1990252429155888,4.453139568410764,3.129609162673128,3.1297323613878083,-3.6847446536770123,-13.523778540766434,5.7500591286817,-6.49117574317722,4.29492757529618,0.24771430859490318,5.372161892346468,-0.4638929280721782,2.663625292287364,0.05061937584153808,3.561323289385537,3.5375261914190226,-7.753932798536015,4.373496653157904,8.354754188851292,-3.2182855456013666,12.949336457039335,-5.663441979323378,-4.8724898556961405,-5.721019921319009,9.36809845481417,-3.5459620761006643,5.652309573485979,2.5861423755265385,11.20554261157816,-0.32536307843182233,-9.382202745458827,4.296657908066059,12.30060375358479,7.197241241923383,5.323206513824758,-7.065679001458829,4.90153355812923,5.634087643375932,7.048703707858355,12.044165410530127,-3.443840079142995,-16.87800601934662,-4.388293234036358,-7.908926436014884,-8.900582896621342,-6.982255741167236,-11.054729558667237,-4.245183234598273,-6.078373792544937,-4.8872192708718325,2.3925225573506723,-11.238948083951204,-4.506661189443729,-11.355388160393863,-0.546981577673443,-8.226345635508213,-1.1004690564285218,-0.8935434194917488,-4.825103018870045,-4.0432394381826064,-1.174745366409531,-11.864415196893866,-3.940005574055834,-10.488339063605787,-9.017632256118624,-5.679649202519469,-4.172792979090403,-2.975577129461443,2.944694973485905,0.24262600600734796,-3.0095480127827283,-3.7041274229205507]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.7%\",\"values\":[-1.4313879318597809,-1.345340328434895,-2.8988591606564533,-6.31772432482184,2.546120546843126,-5.693615802224246,-2.419719950624107,2.643096907480727,-8.123687665685562,3.4317114159697653,6.883301229244093,-5.646827190218466,-7.801198524666155,-11.739479848194005,-2.787153823343226,-0.04010129139388585,-2.568095932209951,5.146900791902378,-7.185734208777548,-6.161651500330322,-3.391505263758002,-5.363059619200616,-1.3924973615979621,-2.6738872496459303,1.555634137171539,-8.270134639951296,-8.65280599762584,-1.6467107279207873,-0.6572924398679483,0.3182199171563007,-0.8991692144048007,-3.2616959895477007,-1.0998708662593302,-4.571733199558724,2.847136156652144,-0.8117181777619531,-3.5904650572289993,-1.1532716849068245,-4.334453126460278,-2.3559824912619938,0.3873973357231968,-0.6381574255130203,1.5007504108594978,2.249909277460577,-4.892043385196032,-5.749500334568027,-0.17013161177874325,-2.653930035949688,-4.382211603423768,1.49357170021779,-8.912559702670563,3.279796378856509,-0.001648640137373869,1.9352050867660724,1.8467771490350482,1.5124246648904738,-3.5065845681915864,-5.2768225336174375,5.793316820650766,-2.56721532693767,-0.9277259086395369,2.8691371290924494,4.856066434270849,2.0795614685151307,-1.5706279760527773,-5.538136787148029,-2.2554114101306615,-14.208838792735428,-8.380342801642112,-8.565867712296008,-2.495354127626511,-9.783503811261191,-8.974662250899415,-12.63083915165049,-13.901588510963109,1.970754788475023,0.8092023292813821,11.15148354245675,1.561083178970062,4.838028211867541,1.5855555668352517,1.4965775090178663,2.5790988137858673,5.209592732556338,3.225258886526545,-2.431964086225078,3.1554324160618172,6.874275796605955,4.943283220059857,2.482657634367664,1.502956747647954,-2.968798629405172,4.448898998929997,1.3159206322451946,0.9640462560466545,-3.243252469766997,2.131616645501171,0.5965824827666972,-2.1673883133182015,-2.542242103415161,3.6331488547756625,-4.868154973949728,-2.7350093940551514,-1.4073705345569487,-6.920408934020776,-2.976993108227227,-3.105601058831591]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.1%\",\"values\":[5.631050342705594,-3.8144743710080244,-6.736300582772111,1.8212783907285466,7.5307152721407435,-0.35387317420805525,1.8612419709255068,5.541050734363105,-7.561280080767835,0.03777561698896861,-1.1358260769180384,-0.7467815379211329,-8.62163942042976,-6.756573501799959,-2.4665342261046885,0.8241626353452904,-0.8996893170534661,0.7439239283117225,-1.2149350702940391,-7.05072066513583,-1.3458524643004801,0.7195081167976093,0.1503508123169932,-2.2803216088008407,-6.189929096490347,1.2608146235980442,1.726918629520568,4.2153514377659596,4.979202524448097,-8.795615730137266,5.318250276396861,9.811861222223914,4.546558174243827,-6.065400881684538,3.9223979335971344,-2.451559959866317,3.898653378441622,5.826024362944995,5.4801062594927386,8.23752210871069,0.653067722261329,-3.0000876068246267,-0.21307396888212995,-4.584871042514836,-2.9977468754344083,-3.609161751774498,-1.982273857500729,-0.9126397818202481,-4.875023458563086,-7.470007146743442,-3.9302780016213377,-5.836754789800693,1.9376640495589512,-10.281365974255653,4.805086097794056,0.10441806253041597,-0.987728859363005,-5.1084632444514595,3.100499280586549,-5.1455140807795035,2.521608632016753,-7.865488532884913,-7.176076567902554,-8.261065159369243,-11.434395836006097,-4.601990984837101,-2.4662873577014337,-4.053544738693274,-6.575398510003801,-0.613385018733946,3.8503872173516327,-8.625729673771348,-4.245205735563793,-7.907743592007594,-5.286104709018203,5.7062191628671615,7.566675644106729,8.203662335998603,3.039444399827849,2.9793092818466103,-0.9554827362679592,5.978189396611881,1.5393208012852246,-0.7686026978261313,5.971112148069218,4.378349468968074,1.9209319206166253,7.962436180331535,2.8185696025208387,0.09003354005600889,1.5433293271334363,1.269585074968269,3.762500446934474,3.9250527312339916,1.4918140237662225,-2.343546602135828,0.5132298563408981,-0.012559684583232247,-0.09252522252494207,3.0809052302749937,1.9311464491009116,1.0920726097555653,2.1691569094552223,-1.6153992094718999,2.6877447156717076,1.766933371763959,-2.8449948510735976]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 2.7%\",\"values\":[-3.065663024221989,-3.896181500813422,-6.934923965449229,-4.354668550629121,4.796534006993468,-4.612776447240108,3.8123073074512646,5.572573270523418,-2.65294487829109,1.6270995330072509,6.8523543629711225,-8.718586420785856,-1.7066051022947648,4.2751309465485585,1.8694733446855578,5.33978600644579,5.873274178835059,4.10729977051113,7.861684710493215,-0.03769047667488,-0.9957266363415959,11.024542218838626,1.0466826033194425,3.567219041082404,-1.2477242246337563,-4.254756573584529,-2.572426863470716,3.363750655621602,0.32075278792314044,1.057294951664562,6.393885895137851,2.163302035389289,1.558518594970465,-0.5037853678844478,0.11378308990276852,0.6234423384981902,0.751987915256055,-1.0850670710816808,1.579567229667295,0.9374197080592874,-4.045214299440801,2.0486703529734096,-4.735092099027254,6.391227549994447,-1.2776041333588746,0.21974999384816435,-9.051154141787492,-2.326030660686611,-3.351023535245997,-0.14116563185742545,-5.734127797127325,-0.36943001965365213,-6.261754077831865,-3.212055966141944,-3.571724487243903,1.3082671542900246,-2.5591308288849164,-1.0211259112803295,-1.2177768786128493,-5.455063990865367,1.6721823546819066,-2.202874344458503,-3.706362385465264,-3.4896267377534356,-5.319961822238247,-5.82602938897649,-6.786558437152811,17.457743632071647,1.3121453742162408,9.347644274124198,3.690753387567978,2.710915366622321,10.476147201802194,4.370968540144425,4.7698261833824045,3.5363014461658033,-3.033524028139721,7.083336582520283,-1.0219896611177104,-0.07690631483883487,-2.800969400533943,4.738045420317473,-5.662654988595915,-0.6721698601767923,-0.8569086479426508,0.8693632897983101,-5.468434121765321,-1.7189938435085232,-2.0570057066033858,1.1079392025446386,-8.104536784733908,-1.9030184542364448,0.49336910004739537,-6.073889108408978,-4.640354690744293,2.550385645201196,-0.3750058848213656,-2.0011221218249053,3.552555331010486,-6.203558707218285,2.226016870768223,-6.409332598211216,2.384875492266012,-8.154089918843752,-6.197827915199729,-3.6226726907954125,-0.27421434676385054]}],\"hovertemplate\":\"color=Cancer_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Cancer_cells\",\"marker\":{\"color\":\"#ab63fa\",\"symbol\":\"circle\"},\"name\":\"Cancer_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 43.3%\",\"values\":[-24.27579188758203,-6.49191798799571,-27.78507176886876,-18.38061248203453,-16.778669581157697,-13.087338906572612,-19.116707693250653,-25.32735419588556,-11.432471162640347,-23.37502903828049,-26.056060957617124,-18.958619582990234,-27.480831293026817,-21.727575590096794,-21.433121769719545,-17.152550988251704,-16.34564191741886,-18.946958093793853,-30.79717099355485,-16.12393959020578,-13.401181528185598,-15.35781685895909,-13.666946322102653,-22.04751413759439,-21.55840866246838,-26.00683285677178,-17.642329280113067,-22.389537815268433,-11.17941334873595,-20.559172811617742,-24.39254067637542,-25.35632543816945,-26.17989939399574,-21.142414280828316,-20.322473112628515,-22.353702803682204,-14.901978849568497,-20.03247008008428,-27.352082023017743,-10.995499846177042,-8.92345862759339,-25.058355009797303,-25.733451566941557,-16.689840316565515,-12.547156236471967,-22.15410831824707,-19.402547983930386,-6.928638479362252,-13.107433104647662,-13.342238303622423,-13.969329168975356,-7.5654427448652255,-10.416339941335107,-22.63402561550747,-20.938253558462215,-25.168842802730964,-22.038863517865636,-12.592076478942097,-20.734879591184523,-18.439983129689136,-15.858414375187854,-13.958775185553261,-16.204540092826228,-12.881826845637065,-16.889588194860426,-23.443073496979594,-22.43398600524135,-17.942222328316888,-17.471113486331117,-17.764486963712166,-10.94929905011083,-12.181218636424273,-12.772174606498712,-6.157155226518993,-13.23285377550182,2.275984125090501,-7.731436661556339,-23.688595228423715,-24.286880585430517,-29.83026461908899,-31.680459877652545,-16.966717187832998,-12.352167853072984,-21.087504179660804,-14.700969201653802,-23.526807908184463,-27.51567535065592,-20.381246385207696,-18.50437736534651,-19.275975857807428,-22.871238626653295]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 18.5%\",\"values\":[-11.379323733662336,-12.891139621259757,-10.650992826347427,-10.123109167513299,-10.754790573985774,-1.0192524830038374,-15.035984019524971,-7.19856600033784,-17.167354603710578,-15.817594631764853,-10.067499815731967,-0.6048536699086171,-15.830423223078949,-17.48710139799467,-16.031153720982285,-8.807402214724114,-14.120637727137892,-9.322371224813592,-10.40520046395942,-7.617897266395577,-14.697602984988588,-5.666256571286195,-7.616774266938884,-18.13279633851767,-17.33743131926463,-14.216184484086654,-14.416724233717872,-11.13000792900962,-18.491180966518534,-14.070221958457761,-11.4959616927021,-18.146371403226716,-14.344161646038827,-12.213857112155416,-10.837384309640754,-10.825807932514106,-12.26861331501533,-12.02674275766444,-18.701099450494233,-14.71953532465657,-18.891772279997653,-14.940889151455904,-14.124216934837381,-10.465193852645587,-10.142350446002043,-13.351128111237355,-13.341892177540565,-20.192540027452694,-19.713835704256645,-15.084054526508249,-10.069087122668863,-8.681761971475753,-26.004702386889548,-8.194108702610922,-15.193419535455854,-11.47112086548913,-9.49952955068294,-10.033641864373045,-7.954727564333074,-11.563039724436763,-11.46801362267173,-20.535480848169037,-11.252310406660857,-7.109159405021284,-9.047266473929733,-10.978800568937952,-14.879433624822713,-14.305256766821874,-12.525523257221566,-9.236493915678743,-15.93040908991357,-8.022613274981921,-14.843790994479907,-5.924817457563523,-0.3277639123202507,10.19411208199854,-19.436425114606337,-6.648875763700126,-3.8071674191102858,-6.414594650911657,-9.183978157989262,-16.723738745265145,-16.315048643635855,-13.591412978988062,-17.591887856910304,-7.301461837081898,-10.33257925436309,0.40000948756306953,-21.076584154083697,-21.47773570860796,-13.2696539138549]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.3%\",\"values\":[2.347159638892229,21.598821360683893,-1.746262123808753,-2.8105830398671676,7.395983491063644,15.111549311864252,17.766839180090503,5.266937954811595,13.331996097641229,3.051836978054463,7.523498237144397,7.929811109985633,-0.6167679438661778,9.09551505910838,7.75480751778919,10.879428668526618,10.367973312847248,-4.517772719565418,0.8060450489119767,-0.7547878870668316,3.429552063510878,3.3252671830550886,7.301731205570238,7.364399762360527,0.37732114385348736,-4.1716804565964605,-7.804206758230828,-10.802159779779089,2.588260077764448,-9.58098622031287,-4.510412093677432,1.9100119699714846,-3.24326455029178,-12.825365395658043,-6.455430952743392,-3.204491786260132,2.9553392044713327,5.644909201609863,4.377085936258237,-4.868210439159559,14.420497130402595,-0.9889261616851709,-0.5369557879838981,6.3559195173688305,-2.284754768653846,-4.43623325930937,-7.411126310606513,3.935009954492879,6.84312476350007,9.09071442123044,5.340793188330647,5.043547801261835,0.42408656610668727,1.7576790913641078,4.997384466275412,6.145425715116494,6.964495423199375,-1.608055756466085,9.141365998750334,-3.0060673989994244,8.42464485772332,7.589637174612774,-7.935214856244159,7.534554115766469,7.184190657441681,2.185173161421031,4.499016674486819,6.02700716779508,-6.939764924458172,5.27230652063816,11.835579589175168,12.389258725072658,8.382183950601606,14.3536827658934,11.30564248255811,-6.7547900795449625,12.245666741246902,2.7945651037063644,4.16611012814351,-6.959893428779305,-4.755404966219347,5.927321846446411,15.753425461991966,13.347557608051783,4.087014153358026,8.771308379288488,0.15468796292089348,14.122785071049503,9.264839115395436,13.885490657183318,7.113785345081675]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.7%\",\"values\":[4.165799091030754,-10.710852895071902,2.3647569639054717,-0.5893209110466228,-3.0136118512239025,-5.797039107592093,-3.6098385413947796,-1.694339193220797,-6.3960905767714,1.5704466911790875,2.958800884822806,-3.22027474065104,-0.7504336439294675,1.408393278238721,4.199848154663165,-2.8867887947864848,-1.6238694193616456,-2.119309556154265,5.322682087715654,-2.284447366577745,1.2336439749228039,0.7384914815575478,-4.2475747962787676,-5.070908619319219,0.3088131955694622,-0.6640913035917484,2.914672838768566,0.2554032478640649,-3.9529125788212918,3.5805647588409526,-0.5920720414226239,-3.9065984382764807,3.770987375658983,3.2890105275458215,0.39722238601892856,0.0815811211793489,2.181198665527554,-0.978353693357034,2.2787124308720075,1.1732777030481292,-1.6290189809226066,3.5763896326408955,2.794543923822482,-2.7822759060003555,-0.5047179274422342,-0.5590037557665062,2.0896815292210302,0.26208369079304017,2.6983179062530382,0.17489341098317945,1.3674723766444812,4.681880455030558,1.402282794335211,1.828096990496913,-2.4066262149492803,1.8285077002585133,-0.8269935671049617,2.3173609672256426,10.30231321705076,6.689238802462359,4.311572837888867,-2.5620981905163687,4.058710544920959,-2.622764591487205,5.7636955881556595,1.7846702868128843,1.054574538209587,0.2705277220968327,5.013712808530597,-6.575858899250411,-1.3444590763548543,-3.0727581603518,-2.8091188251859807,3.2306316886307,-5.176483813658925,-9.584313261249768,-1.5204448101256807,-3.5336817007386623,0.8996082301517787,0.33521928536535084,-2.540531285068976,-2.3121830788865987,-0.13674747530457276,-6.4372502317525235,2.426225640898341,-3.351822696909025,-1.4135057769119421,-0.933568136690079,-0.08910161707725783,-2.248869736660282,-2.3859931171452065]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.1%\",\"values\":[-1.4832861413530893,-4.967722707216535,-3.482854072564528,-3.9241968511527046,-3.1486658141614146,0.394149015907307,-1.6421476229342478,-3.5766578914950804,1.599609376637607,-4.489863293388943,-5.401868972129145,5.198384517705758,-7.380735899716945,-4.615994948873356,-3.3780349534545775,3.9623146233737745,-8.501859124338058,-7.714562965348388,1.7070438130969727,8.022724175910575,-0.6530730390775661,-4.253038983449243,3.0918201853168066,-10.7414718099639,-5.0301068871362435,-6.409593726261178,0.5682103023311236,-1.0741996903261535,-5.21361315376458,1.2460943362113746,-1.4888212269083427,-5.415726575219595,-1.6175411205771486,-0.4827050161240196,-0.7631592994463448,0.03674873167003611,-3.053218763543152,-1.304454577093715,-2.851530013584887,0.4047865783407459,-11.599637890381318,-1.876877750490995,-4.244089510728099,1.5740178180265465,-0.8577882812188338,-4.057352385105377,-1.3767227550218228,-1.5216977598632102,1.2209748291445748,6.859926672535221,-0.3792162817046144,1.5147123497654578,-1.7865766979373876,-4.7614483666124805,-3.3306151866581284,-6.1174868639499005,3.193881376951876,-0.9838831792724828,-6.047634546428211,-6.563422303702804,-0.786723503330556,-10.46530912677886,2.2429700828910004,0.6327039691686593,0.28023963050687567,-4.889668074714999,-7.041280516123311,-1.0445272719154886,-1.4148077489244801,2.4483322958024147,2.756645194695451,-3.5211707285689275,-9.403417654489015,-2.631883532673956,-1.388332515892372,-0.49681532568449516,-9.415169171857977,2.6186595695833343,6.015739257191577,-1.7312914063870215,-9.052739619366603,-5.307766922158138,1.6403495782469875,1.3385046401157854,-2.705344525203786,2.7385792087394423,-7.191388194164714,3.1149451022062555,2.918316684291957,-0.972948554623695,1.3317379942196612]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 2.7%\",\"values\":[-0.150784769178507,-6.356019415729809,2.287018676945913,5.474286913063144,-1.577616558548222,-6.841180900885247,-2.0605736860681376,-0.4194906340407425,-4.160312748318342,4.9721370367941,1.9223519337617705,-9.013962152385792,6.075397483529931,4.97357046412846,6.220177829269241,-6.953829678227262,7.6855580474026075,-1.6304737232109496,1.8797429609171918,-10.867513731691021,7.590574891624497,-2.5212895989726367,-5.593990008932265,11.373211345786132,9.207459629316912,10.772141196921583,2.096307413623203,-1.5814784996373226,-3.446278279658749,4.601109201484515,-6.762001368892034,3.9535763991408683,7.174823768400058,0.8139373021854979,1.553931049587883,-1.982697375754642,-0.3894151138391742,-3.806247164501766,12.185188261120818,-3.8507374436343387,0.7940537033461068,6.4955535390761145,7.982271177634834,-6.828430248249314,-1.5777165188622722,8.065981302182784,1.1926670048523704,-3.72870499132947,-0.7810748896888545,-4.199880499502033,-3.344189581202186,2.85652291058269,2.3614900042783047,7.558430137344659,-4.586324418731911,1.8581714914136938,-4.552112274922218,-3.8796149509176066,0.6880893451727296,2.733042374510013,1.4277721907750345,3.5673513163925765,-4.286444543420791,-8.438789566720459,-3.701495381881668,0.5615063888099531,5.047128841231566,-0.2981759609217594,0.7475402061087131,-6.360745094910205,-3.854566275284847,-7.916612429124508,5.871279035449094,-2.9069593300762038,-7.735253222093001,-2.9752540090607247,8.444653502418673,-6.4495729173816505,-5.102821488653266,0.12349939844648188,5.045708727266792,9.581011089331584,-3.090269552738634,-4.297249000977339,11.46979037942219,-0.8003862026794014,8.260793567356973,-3.1074002751120844,-0.13760824523068096,0.3009674532614824,-2.702153700680006]}],\"hovertemplate\":\"color=Fibroblast<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Fibroblast\",\"marker\":{\"color\":\"#FFA15A\",\"symbol\":\"circle\"},\"name\":\"Fibroblast\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}}], {\"template\":{\"data\":{\"histogram2dcontour\":[{\"type\":\"histogram2dcontour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"choropleth\":[{\"type\":\"choropleth\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"histogram2d\":[{\"type\":\"histogram2d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmap\":[{\"type\":\"heatmap\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmapgl\":[{\"type\":\"heatmapgl\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"contourcarpet\":[{\"type\":\"contourcarpet\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"contour\":[{\"type\":\"contour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"surface\":[{\"type\":\"surface\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"mesh3d\":[{\"type\":\"mesh3d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"scatter\":[{\"fillpattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2},\"type\":\"scatter\"}],\"parcoords\":[{\"type\":\"parcoords\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolargl\":[{\"type\":\"scatterpolargl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"bar\":[{\"error_x\":{\"color\":\"#2a3f5f\"},\"error_y\":{\"color\":\"#2a3f5f\"},\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"bar\"}],\"scattergeo\":[{\"type\":\"scattergeo\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolar\":[{\"type\":\"scatterpolar\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"histogram\":[{\"marker\":{\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"histogram\"}],\"scattergl\":[{\"type\":\"scattergl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatter3d\":[{\"type\":\"scatter3d\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattermapbox\":[{\"type\":\"scattermapbox\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterternary\":[{\"type\":\"scatterternary\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattercarpet\":[{\"type\":\"scattercarpet\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"carpet\":[{\"aaxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"baxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"type\":\"carpet\"}],\"table\":[{\"cells\":{\"fill\":{\"color\":\"#EBF0F8\"},\"line\":{\"color\":\"white\"}},\"header\":{\"fill\":{\"color\":\"#C8D4E3\"},\"line\":{\"color\":\"white\"}},\"type\":\"table\"}],\"barpolar\":[{\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"barpolar\"}],\"pie\":[{\"automargin\":true,\"type\":\"pie\"}]},\"layout\":{\"autotypenumbers\":\"strict\",\"colorway\":[\"#636efa\",\"#EF553B\",\"#00cc96\",\"#ab63fa\",\"#FFA15A\",\"#19d3f3\",\"#FF6692\",\"#B6E880\",\"#FF97FF\",\"#FECB52\"],\"font\":{\"color\":\"#2a3f5f\"},\"hovermode\":\"closest\",\"hoverlabel\":{\"align\":\"left\"},\"paper_bgcolor\":\"white\",\"plot_bgcolor\":\"#E5ECF6\",\"polar\":{\"bgcolor\":\"#E5ECF6\",\"angularaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"radialaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"ternary\":{\"bgcolor\":\"#E5ECF6\",\"aaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"baxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"caxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"coloraxis\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"colorscale\":{\"sequential\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"sequentialminus\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"diverging\":[[0,\"#8e0152\"],[0.1,\"#c51b7d\"],[0.2,\"#de77ae\"],[0.3,\"#f1b6da\"],[0.4,\"#fde0ef\"],[0.5,\"#f7f7f7\"],[0.6,\"#e6f5d0\"],[0.7,\"#b8e186\"],[0.8,\"#7fbc41\"],[0.9,\"#4d9221\"],[1,\"#276419\"]]},\"xaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"yaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"scene\":{\"xaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"yaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"zaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2}},\"shapedefaults\":{\"line\":{\"color\":\"#2a3f5f\"}},\"annotationdefaults\":{\"arrowcolor\":\"#2a3f5f\",\"arrowhead\":0,\"arrowwidth\":1},\"geo\":{\"bgcolor\":\"white\",\"landcolor\":\"#E5ECF6\",\"subunitcolor\":\"white\",\"showland\":true,\"showlakes\":true,\"lakecolor\":\"white\"},\"title\":{\"x\":0.05},\"mapbox\":{\"style\":\"light\"}}},\"legend\":{\"title\":{\"text\":\"color\"},\"tracegroupgap\":0},\"margin\":{\"t\":60},\"dragmode\":\"select\"}, {\"responsive\": true} ).then(function(){\n", + "<div> <div id=\"943b85e5-7439-482d-bc52-cfd49fafdd5d\" class=\"plotly-graph-div\" style=\"height:525px; width:100%;\"></div> <script type=\"text/javascript\"> require([\"plotly\"], function(Plotly) { window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById(\"943b85e5-7439-482d-bc52-cfd49fafdd5d\")) { Plotly.newPlot( \"943b85e5-7439-482d-bc52-cfd49fafdd5d\", [{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 42.5%\",\"values\":[14.332529211990725,7.642687951185881,8.497198098823965,20.99071081079275,20.246560009534846,34.64941088112382,35.75173549293255,21.141172996639984,15.947164723032866,14.268765164407087,37.512110382447204,23.857105045617573,23.869347220574788,11.665067640206102,18.484540115803707,18.77431054115837,10.241755766482214,28.75809118325044,24.547856117715895,22.89734645439984,17.93100823786355,16.97999004150192,10.415899163826191,15.35103328996064,17.427898358671907,12.463256240546144,7.752478290803671,17.10585440811749,10.499556063889194,16.47152929405477,13.162669999564788,25.28647502076559,25.403709399529152,11.945518083095697,19.999653890497537,15.818081646631143,1.1106827884411812,9.358352050884616,13.946018687583035,21.417895549859324,20.53108192288033,15.857740321048169,19.717861045484426,11.404321915044385,14.327097139897878,4.184139297732231,15.299627652235817,10.457844942521056,18.586381927772294,10.088498821871813,12.103113151249374,21.04638838944689,20.790287565729646,-9.67040021145354,-12.271898971374753,-13.816634963040684,13.392257384213693,16.23538285105128,16.70218026156183,22.619427901497545,12.879829987122061,17.54054030320688,20.162113370443578,21.734138407589064,17.33314898065155,13.069140138417293,8.924007094754531,8.064482691430221,19.397460247125533,20.574526119858113,18.991672869699382,16.579319974729316,23.573359734248413,19.433851457157648,21.48843931351292,27.139045377125832,20.09441162783532,19.194132480989012,18.160603772845676,10.965064482951458,15.013050890110158,6.354343022596781,23.628285200353524,23.565010161656026,10.439796814903481,1.4583454559253504,13.014778462146829,12.406470486007581,14.288485495505558,11.281389485948733,1.3856438569622338,19.582272730830162,10.386228958642086,23.421775883379354,0.2865038638237982,20.254337516976364,7.255814989600932,2.1295259465328673,21.05050662603179,3.18644060639185,9.503483067638438,14.484656053141283,13.47321407779847,23.036442910533637,19.310117058010402,23.932845604696062,37.7810427557729,10.832224684856147,6.872371095589076,13.579663499735219,19.183355229031722,18.24093824135496,6.506857993891794,13.696989859961032,10.56532681827732,-2.854993135356645,-10.644345857617274,14.282512458500491,20.05073948372887,15.015388350074922,20.006549880420266]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 17.9%\",\"values\":[-0.8104918823997715,-3.5383935924495926,8.941653955536577,-1.4806291036630008,0.4646048685251337,10.129511438524641,17.690466918006102,-0.6626801507603681,9.166649982037152,7.450386507632137,-1.455943760886996,-2.8459593769959297,12.469000813640637,8.118557945191455,-3.8146143816320635,4.31952342512561,-1.3568864889419472,13.420705934652084,24.767953660836888,-0.03063846420119365,10.856745411851719,7.592830813384243,4.445134790227043,4.162436950684578,-0.34773238501501924,-4.742636471936218,-1.850541914424992,9.472585529481396,-6.623266723360079,-1.584482920673526,-6.994052121406513,-4.844852727245213,-5.144676933495157,-1.9420461221602776,5.773498891703266,3.99565324809987,2.3164064227878702,-8.912820234466887,0.18896260209453244,9.496224079558008,9.54153039058828,3.1664538311391706,-6.293693030319245,-8.697142525208726,-5.586088874529289,-15.064825114992983,-11.528243863933636,-7.6019867003564565,-2.5533516465101234,-5.944168939199145,2.141095077915681,10.970167846860662,1.6196143843797435,-10.654407209943225,-6.647676977161785,-8.123007355631035,4.517515865141029,1.289673627983747,3.1182220054714493,2.907356701364784,-1.0968871724656424,-5.537444542630619,-4.551613005374584,-4.718060365627244,-4.299147199956735,-0.8480556221964322,1.2484539870155422,-1.9726020167340046,9.992052276386746,1.0076065783740584,5.187130606918812,-3.4803764781508812,0.8092111843952954,-0.7218640703700812,1.3396605519746947,10.12979902103425,3.0004826481771523,3.5021815376557326,-3.30644413219163,-3.7985262137629343,3.1160876034233724,-7.524407419385691,11.355032246013707,5.689957067483499,-6.82239135157845,0.03579115919456144,-2.6096948251224097,-7.675590164904796,9.550690249176881,-1.5416973687511084,-2.532410940610937,6.496098781934817,-0.5922470871868275,2.5294253572936602,-1.4772646633334872,6.278036034118641,2.7134081076093386,15.183188592141159,1.0049983463800727,-2.91592283601925,-0.013558801583148927,-6.517096567548226,2.1636066505802303,13.517202225032339,-0.8323918991615602,10.091817094672255,25.756168914178353,-8.81332684610447,-0.2464379790320086,1.999569413680897,-0.8375108467603729,0.7842105691224992,-8.615549341624137,-3.0967242617173087,-10.318782425564686,2.967902877879182,-4.795617508106332,2.3204377350786682,-2.375254387430666,3.175909898913471,8.164482382471586]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.7%\",\"values\":[3.9160580925914976,-0.44898385135950025,-4.9981441794460375,5.6523702669565985,-0.22584708025125866,8.686815593277673,8.634540287208411,10.758378104878778,5.851157329350546,5.0510391856821615,16.902625942466862,9.347734468236931,6.458273546227448,2.0799362525894245,10.466868326952413,3.4764688236484798,-2.4527285344058463,6.291447749429133,-10.255090384437965,-0.9710363894843703,2.3938176882923674,-3.1552228690833615,-9.558502769651248,-0.4195716075486773,3.8699615000727596,7.607841297912546,1.613136796963954,1.1942675747787035,-4.498244959576938,1.6811046423735783,2.1568549439021147,10.68599654307872,11.881205037198754,2.9339903039214743,-4.373451448052166,-2.02097975631002,-5.65568593827869,0.5490022387274109,1.616679826175159,-2.551834107561884,-0.3589359983438958,0.2853115464086915,-2.0317723526564473,1.155701454855171,-0.7979907483284379,-1.82081539113858,1.7407520977800157,-3.978499879682736,6.708082726588096,-1.9771519770276735,-3.858075863442799,-4.731851611473965,6.021088757624244,-7.610864377709934,-8.162102502610887,-7.501596832324893,1.9387504864775504,1.8422045520466444,2.65469099849782,6.122570408786891,0.8760756718379592,5.392224152715619,7.0686425041689755,3.2097008131383813,6.193760654780283,14.742800895374112,0.7065392436817941,-7.393699078208147,-3.8159585840898465,0.7730111307170207,1.5530582043561165,1.7213622817046628,1.6009807598222974,3.28429363831568,9.169582603956583,3.2391598575112406,10.92630909282882,4.512514629876933,2.0528492276137458,-3.2074757444500754,2.425261213628209,7.3204099245192475,0.16985392055577453,1.7013812013013325,0.6491277743433774,-6.850427916563714,-1.004860258317459,6.023634000881418,3.3294502048984342,2.227995087728103,2.0027939271013393,16.0191289325825,24.69401261472831,14.065156758142322,11.269418260990125,9.545737630782737,12.356160181038257,2.1498064659404705,14.289360707744688,12.276889929725487,7.176950819222199,13.29380411477159,15.39077782624856,8.055165757996193,20.17372252611663,7.351518765348029,7.954174354381655,20.780844832668286,10.813315178808248,8.975887726052655,18.881198929922647,20.36743384822577,20.14903718451248,19.85060221750624,22.73694970945952,8.845985499532876,5.8633880249030845,7.396078472754498,25.046496604890862,1.5150414471000522,5.553512217395722]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.8%\",\"values\":[-2.1766437471510636,0.4480781782923884,-1.0898223396688649,7.268778987363953,0.6415313373373788,-6.1332103774971465,4.616120400776775,-3.9753491829543512,1.0268496607703235,-1.1561518170809442,0.5233937604288599,2.0111153256536927,-3.238833138153431,-4.833763770501845,-1.8372559933093733,-3.416623903553419,-4.432397594676891,-7.019653901219181,-0.8134236138908097,-1.6702041392836964,0.3998972223987297,1.4959634536157875,4.160387365646797,-3.619632371261792,0.45486292269648376,-1.9331849490133208,1.0149425301851163,1.883460081187285,2.127446987980897,-0.9066126941892392,2.079673472165213,-6.23844686528247,-1.8476323320185357,-6.773903869488095,-5.919074157453013,-5.025616783033454,-8.174986984410218,-11.527743571178426,1.4258610538990424,-1.8406346308186579,-7.129698229009214,-6.829375312277519,0.028916459531506566,0.02326488668708282,-3.8668234449486856,-0.4514546041059434,-1.857747729939586,-3.5565454654374964,-3.173453580124839,-5.566556619026567,-1.631350479461339,0.13524143547102607,-1.0307267513374871,-9.623880494925022,-8.371966189248607,-7.725430053182934,-4.490651591759906,-5.115564744057648,-2.0893340642121903,-1.9570286237581835,0.416743713787971,-2.1634413768337435,-5.772011447744125,-0.005554662239816599,-3.4828498905087146,-9.94357416195084,-3.7383038162018134,1.2520828561905868,-1.126644469581712,-3.061241970991449,-6.328324531833868,1.4540184587315168,-0.7406012574505492,2.0285965693220547,-3.211479656934838,-1.0739816796917463,-0.8270460200595668,0.6643188739717412,3.458004631819404,-5.568588791563796,-4.866933447880493,-10.813228223225337,-3.7113857806346857,-1.8981895118024918,-8.623635469752482,1.871873612379718,0.5354546719802068,-2.3619759853001163,0.03760377238342727,-0.9394683043194318,-8.899339296357462,4.042336943332462,-8.278043672978974,-3.049470159154266,-8.558282764663721,0.3976806481611004,3.820717738009028,-4.321577831734745,0.11698952583983946,-9.081858296572747,-0.5705449255396868,1.547347505651683,0.18885213592003103,0.4701388621678644,4.702324881392711,10.870836538915569,-0.2297088059416963,-8.748327269059747,-9.77942111231914,-5.765758385739923,-0.6514482709653326,1.7028418386182995,-10.067680830238352,-1.1639402245731119,-9.683474974761662,-6.307176369989038,-9.57843150708897,-1.4501417643347025,-7.270960669535335,4.851756250185918,1.5293219616819762]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.4%\",\"values\":[6.812801267677687,3.7196152201241577,5.915387878407335,-2.359515873408713,1.565704388866675,2.9085593645062877,4.463148131014035,12.196398369652213,12.384434842734432,11.77600545357487,8.173959350586085,10.683632025290612,6.204039394417154,9.971853845768337,7.882225360561104,5.1078315266444045,7.772669097916503,6.08908082231034,14.294815351678555,2.348519410390725,9.663513276867127,3.993526978140645,6.191622513195067,8.731075900960775,7.7609956424393465,7.757898535406653,3.863293769422669,8.769827504627127,-1.9057665936298462,1.0033946839522743,-6.138473793177208,-1.594805969155735,0.007734774990132683,7.959345268540801,-2.316311917245987,5.903445977745262,2.817468973600416,5.804472728350058,5.255448687393654,2.6734214082265026,0.30949525107064807,2.7046092057779343,-1.8426134068646125,-4.785006463336584,-5.092919835243929,-1.9429063520327043,-4.615005588354811,-5.683616512927732,3.590091505534988,1.041060770675611,4.81877972521308,1.2965647242295018,5.038147868098971,1.9044984239926483,5.415082245675536,2.604538472086259,9.086793231321627,12.262087181885367,7.8066470649290505,6.0942745416377155,6.805888676696376,3.866404666173541,5.3122203925322395,5.422188723452256,3.1416980649104893,1.3541650440621973,8.282226199835765,1.3923611789757138,6.613610615749896,1.2705215036902824,3.8485041493930687,4.902799605685996,1.705466756342267,5.226701795852031,5.451682373857134,3.463073772160657,7.52839797349439,5.280275531143445,4.58528092553559,4.084050217468922,7.317469253809759,6.5174947615889876,6.336787228562907,14.3886677927669,4.94969908132998,3.8038160210019556,5.2375521115742,11.591714523804194,6.531346181363468,4.399406355738958,12.526282101340325,-5.839365954694227,-3.720983401778018,-4.5141510337274555,-3.613808946456627,-1.6479969378883021,5.628100512970149,-6.692335268529045,2.5553151899419664,-0.4315194145007398,0.36682274253807823,1.5887050925178334,2.7653516786179813,3.7033795027136613,-11.983708335420674,0.9396505661579588,-11.173738015653225,-5.135797371227762,-5.249164388190396,1.294233384414828,-11.30346738338428,-10.241209427068789,-3.9020286179915393,-6.190547445881049,-7.748698962288128,0.6099638695367015,-3.1374477967117707,-0.6699374195512913,-8.022523159187505,3.3756278747534973,0.369342994538738]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 3.2%\",\"values\":[-2.7656300872338018,-3.159257767514143,-2.119352940293039,-1.5448844014876986,-3.959244037872567,1.1648954000447742,-2.8158509798350253,6.380151310936718,0.1382301229821774,3.4010067684515035,10.838815429112133,2.3459236977643627,0.5149094126498426,1.0118350574148027,0.7700381513162728,2.639923955497068,-0.1839458346027693,5.465156688983686,4.260514705229571,-0.8165420922046037,0.1800067165706988,4.136784722082916,-0.08889964558232369,1.2916924129878322,-2.0720400904664067,1.677367053079844,-1.0474094410348591,-0.21075190918536113,0.09570711631927702,-4.179625529848889,-0.41049094825001986,-0.8392533911754845,-0.30422756340895535,-1.3364847499910004,-1.696033234436825,2.2092902598472826,-2.850357758348288,-4.839610046169609,-2.027903334589363,-1.2924014854582273,4.101286842382472,-0.4568202985620041,5.250603232159921,-3.4251638422002415,0.692929638738025,-0.27222423751660507,5.437781251930481,-1.3284771716913637,3.2688806997420556,3.7943419751156005,3.8502410363470143,-1.3840728462805563,3.713952724524436,-4.279230936936403,-3.66941440141049,-4.216412014113052,-7.541845611826977,1.3655623613795578,-0.4515252209242448,2.829520320315827,-1.7142647383641583,3.1070755774765186,3.0418325876722303,2.883830059903028,-0.009614944703546818,-3.9256652728715844,-1.6657182524337544,-2.9699225018291844,0.230511349218466,0.6984922275497444,3.603711551032066,-3.525466250356079,1.2150365909611542,-2.14855099321047,-1.7383649653173507,-2.295797048319236,0.19541059710466882,-3.1294228306070444,-3.1354469582789117,2.52477533296675,5.414050672363167,-0.10699413389413913,2.1745272743939483,3.9478829572669807,-0.6138346632276692,-3.390478632372202,-1.397059164917487,0.42142927325933727,-3.3280138400972166,-4.625025913461674,-0.06109520614139301,-1.6660605259089476,-3.76663262350529,-4.516477204611346,-6.055945247850341,-3.8633382661026063,4.127143180270747,-7.720144928910849,-0.8232779869215501,-8.143364233800785,-4.73039598105204,-5.261780352735953,-2.7197137417311508,-5.485612193420319,-5.220768025905199,-3.1056740277791497,-8.118084300163108,-4.5745845149895255,-5.902720386709978,-3.657789435976979,-9.259279760941808,-7.444286348462946,-0.7550149767308013,-3.755984868983272,-2.4154375076174395,-12.682966874229047,-6.2116054252594735,-5.697275932966961,-3.509853704902246,-6.089175023189463,-4.161564513114559]}],\"hovertemplate\":\"color=B_and_plasma_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"B_and_plasma_cells\",\"marker\":{\"color\":\"#636efa\",\"symbol\":\"circle\"},\"name\":\"B_and_plasma_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 42.5%\",\"values\":[19.403622281840335,2.112770708204475,13.798038053250965,5.799149109796061,14.163548002622191,12.465029849759862,18.914925133578656,16.58355799509259,12.962054291124588,21.667999956594986,12.424718074108064,18.8253605878646,10.943081330088361,19.531189962455265,9.132638090519102,10.198270196861138,17.438114689587145,9.035487201773666,17.75883407211865,14.748818531444162,19.236620047393455,10.534577016549758,9.974993113499048,2.7918725945537166,9.79163052736058,11.022239629192182,14.772410092732306,15.872334780688819,16.8949806281369,13.590017381164131,5.67667234961352,11.77574026630876,13.89644970847785,2.315929425898786,8.246808663746018,2.6130171748592312,14.800417306477572,17.834489141562727,8.712937221250659,25.47069772378685,10.085449857609625,7.172958678838768,13.42652531100114,11.776878617752072,19.437924081631486,14.899743884086693,12.420013775753986,11.090789612163514,-1.470679668497744,10.181016173582442,26.295695146245713,24.20452589855516,22.684649606468895,14.81441770028834,19.357058823726902,8.868787814899395,18.07494704207588,15.514497934229016,13.956680761903321,12.291781475809104,13.145113598387931,14.695253008524,22.10119089667603,21.143485079168453,15.881232948202316,20.510718004961262,9.088088675921803,18.50617488236074,26.450746528284498,22.474333817086904,19.433629286904946,29.746326570752018,11.08560676269701,25.636659272501145,15.793748664272293,12.4092919468462,11.886042628166727,11.542615861713045,9.312752287443782,9.443697458087383,20.289885799633776,13.81945392423814,10.273695368651625,15.021040122504044,5.0309946987423295,10.20069862418709,16.438537395336038,6.836398226394337,19.59307641449557,17.69436421741975,17.464909661138716,19.47286642885307,22.460913154031275,10.313930778322636,14.099586885057825,13.502245699763261,10.833142234305777,13.687170230334685,5.913326164782688,-0.4907687776225207,21.255281640022908,23.044702073682657,3.850716061909873,6.218791143705236,8.216360787839577,8.758548912382505,9.56799128223097,3.4360751656220625,6.457454873980443,20.52213858717672,4.338675848233439,7.69839420591223,30.0417746447081,16.213408784011534,24.356578259193697,22.09425029863867,16.28934252133,19.96831786680629,33.32950554409529]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 17.9%\",\"values\":[-5.692133818368072,-6.064272770183398,-1.7609798231481275,-6.480716398109512,-5.686783688292741,-9.300743921072451,-8.183507872569361,-2.763612910376305,1.6316225614970545,5.34694721649297,-12.699468129104446,-6.099614621457193,-11.80943495082301,-2.7835290733218336,-3.614722665432582,-11.389167836989952,-7.7347414154799266,-7.850713443650499,-5.852724374794694,-6.287575569451922,0.931865498869201,-2.263774954775489,-20.341284220299883,-10.343343355403137,-15.319420333512005,-4.933363140571699,-4.902935498823286,-2.3530027471171056,-10.294378593871862,-8.826402705881307,-12.532664926836977,-11.40470185403106,-12.243563482090131,-8.572999475566139,-5.820273314082227,-11.921525829485006,-5.48978328078871,-9.097787874130441,-7.339291992611292,0.6029183343856461,-6.4079478203619615,-10.022190531190407,-12.29793049578642,-8.550612621613663,-2.0440127140865694,-10.861962798930278,-10.144033926841846,-17.23780532367784,-9.213814989150455,-9.333147462937069,5.6187622231089165,-2.746349594323263,0.25119984397231715,-2.9730018581069286,-2.1642223220607,-14.787086491302873,0.2208590833360733,-4.376301372543206,-14.51943430731989,-3.5557862657049935,-8.650334922762418,-8.471971805624186,4.115648903220077,-3.5181061145691706,-3.176769429154533,-3.7512058099681638,-4.970068608964352,0.8745818680452966,2.2283707536190005,-1.8192886728373483,1.5377064588439426,2.7409670410383793,-5.941121865160639,-0.28711608989630855,-3.847990791995475,-11.580594339473393,-12.442815017460742,-10.67214955398795,-11.538022649549951,-9.867636089036854,3.5170826325397004,-12.793860926744872,-14.66258385313349,-0.2587812246103629,-8.642407695865202,-6.922472240922492,-3.192701443260987,-6.598671556858756,-4.185035649530011,-3.4135186617359667,-1.972109755871064,-7.637036284984007,-0.8422181459802897,-1.7189962767962,-3.237938560223224,-7.7074830852954115,-8.37050564574054,3.5037914863115605,-10.635430218420854,-1.3844374279042477,-1.6776262404037658,1.9777910414090627,-7.175809931711077,-12.093557989528879,-9.09609896793305,-1.569883162179956,-2.356146390532386,-10.376809634559097,-9.621548895234099,-10.695839752731047,-5.360603168479748,-3.4098957948244344,8.975466989276708,1.0961369529233167,-7.804925708543246,-3.1420359642540996,2.130438526283089,-12.67381145284099,0.8227164740024449]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.7%\",\"values\":[-4.352702244237481,-14.776905742577604,-13.18358425879017,-8.838215312915516,-11.125716885883502,-11.64895155611935,-8.093652811097883,-1.0750214452794502,-7.161656717050818,11.757307978971314,-9.521577374092088,-2.2958161171930787,-7.57957828717389,-9.543845772752178,-10.377921197751693,-8.522651651843038,-9.555552509903025,-8.413701969428875,-7.709383846611283,1.4751614498367949,-7.446319813415608,-6.987107826017092,2.6687448080194094,-10.29550509358114,0.40538471425568234,-16.67540544639411,-10.661991964948685,-14.338574136162064,-7.069060993761666,-12.243721419689887,-9.70610111884335,-4.042256589044049,-6.289444609531182,-13.38354014313601,-11.546857689980945,-3.1613287841902573,-7.248028238516005,-4.796793379187683,-8.69209862717074,-5.492844885628353,-10.084340210223825,-7.0412538934203734,-1.8224938955243641,3.7214943684187856,-6.022647664150817,-10.541090208054676,-11.441526001062254,-0.6399331666476837,-4.900181118296153,-8.625993721509015,-11.715264970547889,-8.631902869097287,-11.251463963579262,-8.230767117726758,-10.223871919694673,-4.626940532879335,-9.261906124163323,-6.433897600820117,2.1248461142502544,-9.867936168413186,-7.412205148662601,-9.96808232502854,-14.097471388100283,-9.261778529630675,-7.199020430079915,-5.941770424082909,-13.142127410923926,-8.685945016453037,-7.436920641505369,-4.11881035198242,-12.29637345887672,-8.934439152570462,-13.041512336809408,-6.03918685016567,-4.035636282272753,-2.1464581157228535,-1.2761203610190686,-7.2939635050867615,-5.302208956533007,-8.515228883354675,-9.444911066301556,-0.879937584664682,0.7075635230313693,2.367767129267998,-12.208152697348538,-10.150195878461837,-13.282203934129239,-5.5550291791927275,-4.209982400272137,-11.265347813493294,-7.158121527796995,-6.251034865013431,-9.390174456845832,-5.092387436740656,-11.605282592427562,-7.534650437327317,-4.7615150779707305,-17.57377958560984,-9.625690367877471,-13.959718681737755,7.733260291564879,-4.075541000364089,-10.884797373451184,-11.844352942890437,-9.395220196296043,-12.723347865776306,-8.397789389312928,-14.803315166127959,-6.225005884292891,-1.83172630583377,-7.501583955496441,6.537396849163428,7.5848022844971945,-5.7883361060892025,8.214781326091181,-0.23108180799948508,4.986394448007828,3.078493914206971,1.138146962227536]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.8%\",\"values\":[7.573783300596363,-2.6989698079333104,-3.017600341514727,2.19433045133043,-2.2273942982827135,0.2654615513054692,-1.4022110006359663,4.544984602685149,-3.9141310877274034,-3.3232368232831933,-3.942184217004854,-4.734684721559432,-1.314764177388699,-0.5103691037608535,-2.311466498769025,-5.9521170422544305,2.7528861452570976,-2.07524547965755,0.4377602198450359,1.1904576934040003,1.1123186670832172,-7.098042360395583,3.942372739768884,-1.4468403431925934,-0.2586420310812311,5.740469484846536,3.15651165384585,3.809601618536574,4.325259725927686,2.382949629126415,-2.5626389590534533,1.5698789677003637,-1.6958911452540402,-3.141018075437486,-3.648417344481398,-4.24567319209123,3.929412106283073,3.6571242246463083,-0.3278658969041752,4.657822013989979,5.042157197885459,0.28835271035086674,-2.255064637251328,0.266800936509596,-4.322952266445559,4.287075873339859,3.5736356375838705,-1.1578702538549956,-0.2680087408292615,-4.0497555789678445,6.295666751874364,6.052999340246597,5.704970321409872,2.850081981366307,8.643546632972306,0.6490424692968257,4.314859082438084,2.328977586701118,0.23635688042531378,8.693498351175528,0.732216292521743,-0.7151222336499706,8.480823968034843,12.286160091762934,-0.7680671354159144,4.946943136506493,-1.5106348714450095,5.726534120444066,7.498562199181032,1.1505903062607472,10.474312648384057,10.205919058558615,0.747075672401309,6.879674237083448,0.31215789417542894,3.51619194714959,3.237261706189572,-0.8371731734332148,-3.74860889505441,0.7099776413003672,7.656765388205017,2.7044774344581732,-3.2535765228553073,-3.9470383959765707,1.0073426539719212,-7.272989443734973,4.9455368411126575,-0.9212944030040429,-1.7113727050507135,0.7991097638701339,-1.2063630426314866,-5.275272395718721,4.125730592538635,-3.909013354879333,7.210422285908819,-2.161214935383547,-5.62749788411999,4.399723650969165,-7.200521903686518,1.2681574934749782,-0.7583486358828266,6.909848749781991,-7.987729583088376,-1.3418111229415737,-1.6927420835442528,0.7732234696746834,1.1073322168112083,-2.7627263173383803,-1.6389085031060069,0.6980267446337266,-1.575115508066682,1.320276964179636,12.9446759078545,3.9925277267834822,8.480323731587763,11.019275967564237,1.9358597811556848,5.717250126079697,6.680794801693571]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.4%\",\"values\":[6.795167552080497,-0.4640224879356641,4.2117515624914255,4.683164546327707,-6.182563346104973,0.4151589592541638,-5.326073738095133,-6.895454260161932,0.044980463445334205,6.013628477737726,-1.4137263705211187,-4.358225690289158,-6.630976103483795,-4.0993377646244085,-1.1911147281338008,-1.570890028389897,-0.40532078976683467,-1.1724182108786696,-6.090231167076639,-11.245150203421627,7.876675087449544,0.12602523963771503,-9.094327662917474,6.9324033859618615,-10.220890685851495,-0.25846240516174585,-5.066128028072141,-0.9663406399817243,-2.635196028108933,-0.6436752051130691,-1.525531417372078,-9.273095549678516,-4.78760736366809,-0.013317177845053877,1.3570331704816667,3.3369727951106576,-3.629345786211788,-3.0277487404025933,-7.006864629131808,-2.105756736170245,2.2660813078172657,-5.586061175177508,-7.031569578006979,-12.887115258942393,-1.6906464933400192,-0.38811758483334047,-2.304957958395858,-4.791474974788912,-0.7023040524771157,2.848904821694757,-0.4196159141238686,0.11689143448600459,-0.8251442082365674,-4.908620007474553,-1.7178745806041982,4.840015822390029,-4.367364039971548,-7.973466134393554,-13.767915834706434,-3.395545616961797,-1.746753886560387,0.4261575900661827,0.7173420934997777,1.6140701554219476,-1.3172301768266883,-5.187685733050459,2.088071329709691,-4.554039971692719,-0.8250143719866408,-6.897684761719175,3.7674101396907185,2.4595692479501996,-4.382511652032042,-4.147740459121453,-7.140962683007371,-7.857717759902819,-4.707194693218519,-5.34409980292725,-7.094928233281181,-6.877981329138137,-2.0364250493070397,-10.070158909550386,-11.514131480779328,4.629199862319323,0.5606473289623616,-1.4729162323210967,1.4605609518885005,2.097157785044053,-5.543643956834832,1.2895139186810134,-2.819087394050447,-0.8948700455783439,0.29692221417101894,-0.825198569486407,2.362882896533612,3.2173773538853494,1.034047124773793,4.782198775698495,1.9189652842117013,-0.3930304296585169,8.6385486073006,2.523839382043679,-4.782400650347165,-5.202580296988493,-6.206056981207049,2.7504940882178484,-0.08116829497164693,0.42861390754335965,0.3333666555026751,-0.33569172059220503,0.36282707266636643,-1.0692001139908944,-4.855548986493762,-3.646049565910946,-5.422477246745545,-2.145145562450495,-0.6147404196471902,-8.395565666324908,-6.3083951914528695]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 3.2%\",\"values\":[1.1740525137452598,0.6194522995624296,1.4116979452333707,-5.738350427581665,1.9664694101469808,-0.2919817835271673,1.378905606750184,-1.0001430840514098,-6.399397258886863,-0.16963745913637268,4.50833079993023,2.3772385224362553,1.289613032801864,2.159061143956561,1.1673075789099039,1.0320816326797433,-1.9955973100895341,-2.789507645879339,3.941092499509458,9.151460913116399,2.450907274688111,3.6473890079026665,4.273714121202266,-3.741168244452658,6.220520688396557,-0.19408944783903823,2.236320165240667,1.8204615960165993,1.7811675076164577,3.87675746424305,-1.9207111507635095,8.526336389178926,3.943571151328981,-4.76048127162867,4.812747131792236,-2.0374400916460758,1.8249139143816318,-0.8658133425587913,1.8802138355417803,-0.31688052353898505,7.457348357065514,7.476167384941426,4.41385928326326,7.472069921159433,3.5606128417935827,1.342814155995613,2.2281932872906163,0.9652375910915142,-1.2884939351264797,-0.8394272929609746,-1.1566472159021417,0.5842009771741354,0.5627923338667818,-1.0323883043772988,0.3073221901282677,-3.4163299366541895,-2.2225494714289513,2.575434614046635,1.1309092052438847,1.6983714940739192,0.3180478310977318,-1.1246101286119632,4.9044496672368,-2.1383560641240864,2.764634557039705,2.914218864148363,-0.3362342414379309,2.0283676329159417,3.7071245527389203,3.8429344147438877,-3.529092488292079,0.6333031165831989,2.677492649979236,2.7064580865032637,2.097274709811909,7.073715683391287,7.542516317610304,4.9574451227193705,2.940436128003915,2.1458489242721908,3.0611019927221332,10.854617807143583,6.137142666324331,1.273689937112697,1.3118507915901296,0.06918185755825025,-1.3395113291531535,6.821629609012828,0.6234052050315874,2.4619186737334537,1.8804402544118883,2.2933933568466767,-0.3655981452031235,-0.7986225631447255,1.9800345873708247,-1.1364971129262806,0.5908739212542092,-2.1472822099135023,-0.7808299905396946,-1.0123532242741202,-3.813019632265686,-2.9693479566674417,-2.41043439481379,1.2588831853359785,1.7368327041242508,-0.8431785739727582,1.8520951764851736,0.428769558539,2.065877347357987,3.784036576871056,-5.269033847597604,-8.233033077042089,-7.549791360767359,0.906123483011288,-4.499769449939621,-2.818009007177712,-3.8622579890691675,-1.6043144223870915,-1.9761230940578045]}],\"hovertemplate\":\"color=Dendritic_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Dendritic_cells\",\"marker\":{\"color\":\"#EF553B\",\"symbol\":\"circle\"},\"name\":\"Dendritic_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 42.5%\",\"values\":[-16.681919153031615,-11.296893276411879,-5.455369691536223,-1.8329579148372783,-6.4751794295070555,-11.031525230244867,-15.716645491000397,-7.488901659579707,-8.917148357040848,-5.387218561287919,-7.875151590417475,-14.444904870628807,-16.19563059270942,-10.757213022263052,-9.796588505080477,-12.205891308309283,-8.32787716762199,-4.734638802587958,-9.798380279568008,-11.334200785749445,-17.894668324179523,-18.174394364804165,-16.907361441101074,-11.557613880503954,-10.0855990324363,-4.717415942790337,-11.369680787048875,-19.62475748590959,-14.186143620626542,-11.783881210736023,-3.9799058107685754]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 17.9%\",\"values\":[-2.4866964760553727,0.5827084616970761,-6.584790126959794,-9.843204474701432,-1.735037112093135,-3.5203890971439935,-0.3648528115117955,-10.917437959041518,-8.137481841552493,-13.17666462701792,-16.0340516466598,-12.249309450814785,-7.044110888655938,-12.35485172204314,-16.541870907670287,-7.365893206475485,-4.305005599332729,-11.727679282920416,-16.17806090479052,-4.301600076456077,-7.38751015486325,-6.083325694074893,-9.485794812327878,-13.649810256624965,-11.455826284271044,-10.14553057600989,-1.2441429122295307,-0.9462311260941736,0.8661076293733264,-3.9148123998510487,-5.53122749805992]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.7%\",\"values\":[2.6185924659809556,-1.018407303529048,10.225681931958816,19.904646500675078,-5.616410932343824,3.1163211888804714,-7.100892494915584,10.551856369121449,4.838372227757695,1.460753556288361,5.05695512014473,-1.716539092530606,-1.4635000834508833,2.2482965254834832,3.5841306523773815,-8.108271926005097,6.777793172685909,2.979251246941288,6.357240783287284,-7.6652734147395645,5.264964247602477,6.50843019057861,-2.8950354344450377,3.775356067187089,-5.462830714200916,-0.46155422978483257,-6.650470731976192,-4.352703555432485,-3.793486004612836,-0.3050314264329532,0.25496446419036234]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.8%\",\"values\":[3.6762665008616078,6.766644662762168,6.123733287812233,9.780139022029173,4.524486048893658,4.787873377486053,3.648982237328458,5.464958411607075,5.614366012423148,1.7358535797936918,-0.014460968870127822,-3.766067808118958,-1.1991938525729064,-1.7058724674915315,4.003989070100161,-3.437928065904411,5.2237200002062,6.5474677003094905,-0.956728047335869,1.937080285692793,-4.499993516026979,-4.179867939580983,3.7174903088048725,0.7447393315798223,-2.3962108946888008,5.263364517421521,2.9231726924067627,5.719908570946161,2.229494715575124,3.196495047941754,9.547705280158665]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.4%\",\"values\":[-0.1283063713900702,0.38143405864212854,0.8543002575797355,-3.7600804208584995,2.385453430153325,0.6070860582841603,0.9319586319699327,-0.1411742222920248,-1.6096741738729843,-2.8300671472675822,-5.474244116933185,-4.094150542766461,-0.42615083257252084,-1.7275787751535507,-2.8449158937252665,0.19302357919142785,1.437828406076573,-0.5641951570270887,-0.08400948879488705,0.18476066027971905,0.6179924567773644,-1.1351875448605788,0.30198351714256,-1.2532164417618834,1.3775625239131852,-4.0107207886394045,-0.3738303926183886,0.08158311282873078,-3.8505344237072916,1.5822323940194407,0.9003545899065515]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 3.2%\",\"values\":[-4.357256773342994,-7.729013711273134,-5.2216531136481334,-5.4398592099452365,-12.305627577556272,-8.177296232482782,-5.675296891091547,-9.07596603922799,-8.317932390031267,-10.30121516941709,-7.416101082904636,-6.7178377327023036,-4.729148340497778,-7.506239007834563,-7.903397122826476,-10.371903864095446,-9.04913319865811,-5.231457614774617,-7.746948531750676,-9.796542376799318,-3.583295613837255,-7.2049922201199035,-4.751859481831092,-8.42056202413112,-8.98676964512599,-7.812353808290694,-7.629435537275462,-4.864725739113075,-5.577830298073223,-10.443103339881866,-10.159666880593742]}],\"hovertemplate\":\"color=Endothelial<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Endothelial\",\"marker\":{\"color\":\"#00cc96\",\"symbol\":\"circle\"},\"name\":\"Endothelial\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 42.5%\",\"values\":[-7.900747032988724,-14.236430087722049,-14.881142565966856,8.185992224233239,-9.35839628334074,-15.08408507237451,-14.89201483884963,-10.511293868206662,-5.526822278742043,-19.66681040829703,-9.740688500051156,-6.371459307153175,-13.208046867663398,-10.453885165768408,-11.646130459986443,-15.740567938868473,-10.724731504722048,-16.65724413489272,-8.870988414022133,-13.702496653823188,-18.85110760161824,-7.507836184472188,-8.587527071236536,5.377498168574093,-6.309368600426573,3.4415204536627195,-3.4187311109913985,-19.029588369725076,-12.501262919785843,-14.94624656443327,-18.420885400490686,-18.323990700353985,-11.85355172475099,-15.783218300560305,-16.700742648686553,-14.567187667586742,-16.33149803517134,-18.428060397654217,-15.511260259148962,-14.270999543450772,-14.817336777120417,-8.488105670856552,-22.355276516515307,-8.846962647557769,-6.391677800395032,-11.387958252786824,-12.47847252958988,-10.185952269079017,-9.883202763175763,-2.0133816641705575,-7.755917342520793,-12.996125575649284,-14.788432672851808,-7.830113753537813,-7.066838249339037,-24.570581924459894,-12.691552101148488,0.1684012490284288,-17.421243223598765,2.2611097361548924,-12.913139798265307,-5.357826175116285,-13.266412117897467,-12.434733256327208,-1.0635539195819115,-14.139994118794416,8.863345351243343,18.406614424614776,-5.853527648553441,2.6511320198910266,-14.934291131725868,-3.5704986701270784,-7.064094592611723,-8.212426154794864,-1.7928814738483354,-14.206226863285657,-12.867919827079241,-20.09985598195333,-25.061878458328735,-29.10278402415135,-6.2692867696998125,-10.444686396461545,-21.05388366428948,-22.89917320104677,-15.825107308449315,-22.477324876553176,-26.96223497656853,-18.49788336522248,-24.906346325367586,-19.53976086749695,-17.14867000355328,-17.407631966891532,-11.101667042486516,-12.798049562157697,-17.684908425832276,-21.079279697387353,-15.748780552679376,-18.141612910711025,-18.32634523528215,-15.371226094707144,-19.927539835047508,-8.793898342168665,-15.64530932661723,-7.481090802758926,-11.673959658393965,-9.737808387349144,-9.245947532262072]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 17.9%\",\"values\":[18.919995269865193,9.701143954425625,-0.2386112802513436,8.975598772878007,21.77508642972416,12.155070128869626,21.941448585611653,27.081498264702283,0.35395336545172906,6.5152550517054975,8.063820909617041,7.835647051494222,11.6535430212538,16.0880249577356,30.243585139576297,14.83637549269717,31.98861429520331,17.928288524848057,26.607436784327575,32.35496800350661,4.31212637661411,22.144925606683813,19.209264298071382,27.29584744322463,17.552918781900917,2.5408980468946005,19.605235335670784,25.684133464550385,20.329887026304963,15.386680057538095,24.181932508025312,21.744049916182377,19.774753700499865,14.49449080175656,10.59041617899231,11.773439567535156,31.288880697754287,28.696198736217934,26.987489300369425,21.660974445280573,-0.1468472948980296,0.19433920974056829,0.9960163615139334,12.085714706764406,9.229760429734942,17.312033291766404,6.127701615773333,5.647476789476855,2.861973300479522,18.736687481017032,10.154734911285814,8.24209441142698,-2.953596079257325,21.504633673708305,21.821442734644393,9.477709392780596,20.57293927039004,23.396632931790602,23.60272169830288,22.968875404692294,27.204224760258167,13.113537893207361,19.169992596245965,19.516430291751767,13.255246226888374,0.5107563612740406,11.270393504314931,35.06455788410068,20.81404085264694,22.75166155160022,12.020963751451132,34.31332498091387,20.57700798851869,28.421681920175168,28.3930572181606,17.8150917653637,1.3025017509000263,-7.247972587663433,2.1530640390380595,10.190534905687215,0.5459561302514571,14.21490559146433,9.447572876977134,12.08363012214338,1.1098906053174122,5.773178849137752,5.899488176837009,16.015356601185626,3.3036944432975726,16.65611707395691,19.898347306889175,20.392857797121994,-2.940388354997953,17.216969010590443,6.934474813878473,17.855913427998555,-4.422213412858078,12.997784458263743,22.69816012035653,22.243441749522972,6.34221289240787,5.056244622048658,3.6310537016254685,10.80687760475136,14.086759386883816,6.164101307138273,7.7663721232734675]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.7%\",\"values\":[-1.7604264592263132,3.8638947795504714,2.473417949564354,4.496111672407371,1.061596289634073,-4.1014311619135135,-4.847051019530076,-8.856776099911658,14.197114741506324,0.5534390841555901,4.964286171758113,1.3159790935852258,0.44879249363792195,10.04955719647131,-0.9644627664818755,-4.519202993787872,-11.172082526564507,-1.904148049824725,-2.70511396128998,-11.879199414019055,-3.3254376724999086,0.38212826692638613,0.5015934564368614,12.960519173883402,8.944138099778622,6.086282338897705,-1.6155312984775447,-5.166163325579187,-3.2563512061391013,6.344609835909145,-5.498893989162884,-5.435631757680342,3.5704080068489423,-2.6568018494677204,1.4265496004840101,0.6293678278177577,-4.5033203505481865,-5.5210845023883275,-5.217760688859671,-8.364813421566687,-6.565359696890449,11.143041547987654,2.7374142348477104,2.1009459824912993,1.7774160438809523,5.040372876704135,6.441739330969414,8.438321424474632,6.668740256176774,6.0670833043173955,5.709903678577047,2.0074039357297098,4.857408306504009,9.742590158054407,-4.303629222392513,1.079052270758614,-8.70735693731097,-8.630383811652882,-12.566329465605024,7.694832074790657,-9.331665956905969,1.6354622866757533,-0.46014952595857306,4.7562351474889795,0.5339712777190511,1.14441808792956,12.692535966323586,17.64675298976014,9.640799999645452,11.552913540854401,-5.248839631758651,2.83918015009881,10.459572855230281,4.382177596760152,9.907738940971493,-7.678855076488778,-8.885269541780895,2.61317864585126,-4.110529643624969,-9.173601230555716,2.266056249140971,-9.456299345620803,-5.485670255960193,-7.736592806899377,-4.2762044668501815,-1.2474588692133484,-11.310086631791174,-8.692037995622547,-9.054891871786204,-4.5350447861577745,-11.439353998806492,-4.812777494416858,0.053787046427223206,-8.502216145303581,-3.0492384793445586,-4.17790673585807,-1.4684002239359282,-4.6616380166864335,-15.28332031670251,-13.863925128790497,-3.2582964320959396,9.118893051130241,2.5460635707724615,9.843414741638195,1.4918619916769202,3.4263168625426745,5.487144876341413]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.8%\",\"values\":[-0.0537795940546027,4.9636759289870165,5.570594659167735,13.453393180139795,-4.456541380679365,2.7924935070275563,-1.9176923535634143,-2.243203659556479,1.7557191510374623,-5.274860520404694,-14.960721128966398,0.4576590420747173,-0.057277507281960016,6.222164150440402,0.5477716353555935,-8.799669112239961,-5.348386155485222,-4.183118274152389,-2.9720632264839435,1.1441983845435564,3.3457637247456393,-9.956150345281173,-3.2234450195945534,-3.4165993776267927,-2.291355965952606,7.921919521606905,2.272978023640487,2.808761379218692,2.2681637071988803,-7.9995576188883035,1.3938515034794974,4.1416196864437955,2.1075019868513336,-0.6271747948057409,-9.907549431073312,-4.162863773250031,7.538531002307636,5.478348894420704,6.445095434934383,2.6496485803245315,-5.955893830803164,-4.929658620728713,-6.012603983257646,-4.058735411301967,1.857886808699082,0.23966102176568183,0.13640073927227458,-1.7012190280791306,-0.8543311728816977,-6.755543044824636,5.175355686552372,-11.698477755641061,1.0827725849731098,-3.005711021836513,-2.402935371260109,4.953834725020483,2.3262958660078965,0.45366675348478225,-2.735217546037185,4.0419143394284935,1.3181021867320781,-0.8809296853166673,-3.0114777539304485,0.038147613640335754,-0.6819637437160466,-0.9024896593123573,-5.224080253823271,-0.9650488396729641,4.4821134165898915,-4.78926635184008,-5.2476279368529894,6.826850572864495,-4.345172775070611,7.805018600290016,8.829208377420429,-3.8547539050342308,-9.13629559174337,-14.116254566433199,-4.403217059976005,-5.436830588764407,-5.286746574444326,-8.380326737064516,-1.062556323419245,-6.282376928519888,-4.75215772699916,2.2244866366930927,-1.5797282082922388,-3.5476852678999906,-6.204786568648321,-3.7185958956844356,-0.4229901844175654,0.624575432821042,-2.7820681581718776,1.214323371343705,-0.8955702936452841,0.4481853080313727,-9.958697757441012,-1.0476604844539668,-1.4862045913337658,3.348781903710634,-7.644276450467186,-1.2656755795769148,-3.253175304759872,0.4134729807483484,4.151044977801335,-1.2417089338791654,-5.3914366799680415]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.4%\",\"values\":[-3.4523125279042164,5.492437538441156,2.6359796498522794,-1.7546753758165288,-1.3132494479849606,-4.481813500193165,-2.2642978060756023,0.6522510895155538,-4.921386534871595,0.14432321031173637,-0.6129883871739497,-7.879186437686645,-4.609326670231442,-5.654292056808716,-0.6950779688716286,-3.4342692713013654,-3.0477194103413288,1.7791845884452222,-5.840914690402992,-0.22567899727011964,-0.5079792687519723,-8.533940247086976,-3.4760662279871473,-5.3472634289224015,2.082839635401604,-6.183278423649231,-9.127685183068994,0.9627175185873024,-0.20706277285519312,0.5638055107461029,1.1397917559661477,-1.37568750078426,0.5354462255115671,-0.9546523319640956,-4.599112324372678,-1.7277242070181038,2.1236140397649357,3.0184720733030184,-0.25059311802936474,-2.4504566502084804,-4.250792306816921,-3.0344750679883363,-3.297008791211476,2.279122212218469,-3.290261297923208,-4.074818435044223,-1.1631144730627099,-4.113701649643669,-5.194057352291915,0.16394463090789907,-6.368586969509843,-1.553429050918303,-0.012610789330244359,6.11002329901853,-1.884750204791824,5.047671922912995,-1.883984473335911,-1.5948798031823463,3.6698401010863133,1.9283214697371593,-0.15317475805331254,5.443268768096081,6.099517379653371,6.23024996459581,2.5822989862999246,-6.992216447451486,-4.733650528830365,-10.661995362593174,-5.211899015442075,-10.128533773115795,-7.17691603710887,-4.316115229885083,-7.918789275877677,-4.162926005376901,-6.266365130214744,-3.3712941448357245,-10.35988301336044,-0.9142442596764349,-3.6724730327694872,-0.11109390042268816,-3.394528129971577,-5.76288148875798,-0.9383905000655892,2.0336271501106316,-2.9984954149944283,-2.744142208836306,1.0826240320217173,0.848813810834254,-0.8630558093268614,-0.703343951319268,0.5535162801998693,-3.181478428783471,1.2937425871708055,-1.6719848696236848,-0.6481725661300775,-2.969589725279203,-6.395902308481097,-1.93940278583244,-2.365615872641996,-2.4893618256051595,-2.0471584121887196,-9.480041680318775,-5.8361592076507645,-3.54267927840111,-6.94398359791191,-6.089604101442374,-5.872611712443662]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 3.2%\",\"values\":[-5.414670336804079,0.388516626322085,3.662798300271323,0.8661234392697559,-4.407465801884762,0.9032555572745893,-0.5160340989259296,-5.5801524922897645,9.43300794691925,3.7889936834881515,0.5512546099356231,2.629704352175371,10.114031934266025,12.828975147824378,1.6428349855964697,-1.0840104098957366,1.2543731969428802,2.9029776166367247,3.377010752740482,4.436359179059937,1.45835583232539,3.5242014751423962,-2.128203659810887,4.642601040780792,5.139398261851032,1.1514904008029125,-0.3710533383830253,-3.1067224728691425,-6.002864616985229,4.344836333356531,-3.173302330323848,-8.058103951185009,-3.3361465887336528,5.193858571326917,-4.290346842319525,1.3920172187758735,-2.5987824563969717,-5.5230436826983365,-7.538978579660606,-7.587630603926997,-3.073002153756185,2.0754513128749363,-1.5222533552601667,7.648625549765501,3.2880120429986945,3.7988026366681087,1.566959909291743,1.1357363755899106,5.353424660817543,6.578077361414298,7.982727218327953,3.1383243834863133,-3.285281872484094,3.6976947776129387,-6.126966437350923,-1.7168046171400806,-1.8168591987378573,6.422820339612926,-5.665791578123437,2.9027419587866867,-2.0054395016612125,6.613078615514447,5.569546362302364,5.728220480368759,9.123369497339965,7.121585305788346,3.5744584658140797,9.716587138896047,8.689689183275533,5.516736876234602,1.4744667805389022,12.286200024660412,7.34513064579574,11.811668950465577,8.895158747270685,-3.059756411904246,-7.199923686841736,-6.376530414910964,-3.9982995104692947,-5.151156749236543,2.626482887811331,-5.078959418326882,-3.021636619492215,-1.9721273606190124,-4.903773934968704,-4.421850716522769,-5.3700756311194,-8.429507878535293,-5.5491846091275,-1.6043093855677033,-7.565014924070918,-4.4407552719983086,-3.934650397148025,-5.888825188889845,-2.9069320471180293,1.993103868213147,-1.5483236289433948,1.1031687386159708,-0.5845070155793154,-6.033667015665203,-3.107117086430851,2.110392141178906,0.6335826655020264,1.9102218405254512,-0.3674232244349622,-1.5483354170463373,5.266177794196306]}],\"hovertemplate\":\"color=Cancer_cells<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Cancer_cells\",\"marker\":{\"color\":\"#ab63fa\",\"symbol\":\"circle\"},\"name\":\"Cancer_cells\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}},{\"dimensions\":[{\"axis\":{\"matches\":true},\"label\":\"PC1: 42.5%\",\"values\":[-27.278425810670058,-14.605393811545056,-28.089167496050344,-22.434534695854783,-19.74170032846431,-14.94493310326094,-25.92231196526005,-26.327339824382292,-17.724404878944007,-27.124681194987254,-27.96300101698023,-17.603413639162596,-30.601129353201863,-27.77316915904595,-27.049952547057714,-15.650983579164102,-23.910054641650824,-19.216380420076675,-30.64320541768008,-14.758746622257632,-19.489267332709186,-19.592606673312332,-16.775311766774312,-25.913968447295865,-25.67365332541916,-27.940086037747506,-18.76276724730871,-22.20397651555315,-12.54409962023275,-19.53837209474834,-22.248335401621794,-28.79210816610921,-28.308492398771385,-19.249138254206066,-17.92949068981475,-21.47627695195097,-18.699451871545868,-20.49834410565282,-30.301556901774987,-8.7248868312093,-16.805978096138425,-28.793926389842955,-28.567230125113298,-21.03160601874093,-13.677508522571589,-23.330809504492912,-19.679013911470435,-12.568604997039792,-17.99062200571608,-15.14117889209881,-14.015427899706719,-9.522478363558353,-15.229336270734521,-25.78776281345347,-21.76914126134889,-28.134981811738488,-20.762441989079747,-17.28253169064552,-22.91281686693661,-19.972072181303112,-18.653898804298393,-20.648584509863383,-13.690245271033945,-13.636243931344417,-19.718506049561235,-23.72827371363983,-25.070285854552324,-19.854844191874975,-15.339005120022637,-19.150984504490715,-19.36411123654689,-15.697145753536066,-19.52866777870281,-11.393828988526511,-13.374973106318732,6.929482600339284,-18.62572862311603,-23.55141694524937,-23.173875134973866,-27.020111989250303,-35.03890661397108,-23.728349666618612,-17.81055275119759,-24.97254986909441,-22.245454133353764,-24.50001521518099,-28.967882595664676,-21.94541690692605,-23.780770047095686,-26.808667891999715,-22.87950717672124]},{\"axis\":{\"matches\":true},\"label\":\"PC2: 17.9%\",\"values\":[-6.602869063478773,-1.4148079271920584,-8.32353406053715,-4.677282930788876,-7.652946890050872,4.45280450640705,-4.834911361487343,-2.0648062778002707,-9.86916650423742,-10.830296283652618,-5.375135124956586,1.1768184022683115,-10.650984663698896,-8.789198660715572,-9.114318255171842,-6.886553779404501,-4.518725004458369,-6.8259200210399555,-8.360514184549821,-9.63755821820504,-8.1746840925213,0.2113942624786045,-2.426192677674135,-12.054979203616675,-11.911554250571559,-10.783845212751572,-13.578801130415066,-10.609335432740114,-17.94029311303664,-15.334325951871687,-12.541726987137059,-12.514479756036327,-11.120423118315227,-14.492023464855219,-12.921940188679113,-10.226776777013683,-8.452116469432923,-11.22761976249755,-15.011692574877133,-18.03643461025301,-10.411681116680729,-10.098242172813213,-9.23805998574629,-4.390282094930095,-8.835663624868268,-11.240111011760952,-13.017730973935906,-15.830662235327104,-16.014279450680696,-12.63447993473568,-10.274626581970942,-8.366284303220853,-23.6676351116195,-1.657544901604677,-11.154460456993391,-5.753938781865573,-8.103828617321923,-6.972648117653477,-4.029923444215551,-9.775465135537619,-8.087891108630137,-12.18753008866414,-15.648667768709608,-5.240313107272552,-6.337663351662816,-9.691259463493447,-11.010124995203865,-11.171581448254404,-15.823989195964785,-5.392396621572551,-6.474040502822446,-3.463787628049084,-6.752866842002741,0.8007454067555946,2.4799903035024466,8.730240132634934,-7.976057248978441,-3.478741638263443,-2.521836309369347,-6.315761929611103,-1.982918563513512,-8.320415407398603,-10.148763257768138,-6.705304628936194,-9.954035129930478,-3.8901886838549062,-5.110661602307884,6.777701126350403,-14.80292260586333,-10.690100763498204,-11.71356025561116]},{\"axis\":{\"matches\":true},\"label\":\"PC3: 8.7%\",\"values\":[-1.8219615584180946,11.448007605904785,-3.086671197003407,-6.860274879530236,7.439989601724317,9.726716852875287,9.102156421420807,0.3768544356620387,7.9670409663816315,1.7129125121741233,4.387444106337225,8.781619059131433,-3.2172999338199286,3.8528793881844914,3.370920853669758,12.422156505500915,2.903901962819926,-3.424182326929565,2.3420006892068383,2.6632399303621392,-2.0057406090757857,-4.406264314187682,3.295686466584884,4.423680952676297,-2.1539578540647635,-4.862884557657122,-4.866723352746031,-6.97579815133548,9.642799796586507,-3.650159252798633,2.145455121941593,-0.4291240466227333,-2.8953990735801165,-3.9426066810096563,4.834573852409949,2.598547159891154,1.3571666610409743,8.63762946601023,2.024260702898964,6.632609651362024,10.954920336530403,-3.7413291261676447,-1.2309832301521841,2.2837263129633807,-1.2558823426187415,-2.7572357188329892,-0.20049697808182076,2.9380699865102757,7.722245703867686,10.832778878807217,6.56015763996625,5.524505273069979,4.703881054419221,-3.150301122658373,3.437300596966052,1.5289758664716557,9.793052909918284,-6.361930064033119,5.8936976666976975,-5.166617205366503,6.59410111109896,3.059472672088495,2.7024609501908916,6.664686829041564,3.763325292248319,1.4867957798104705,2.330021325058163,5.390043513922425,3.279797392168464,3.528603081021849,2.0927522254190176,7.635389231628741,2.152003537827898,7.23066428220857,10.539188488563164,-6.472919565014628,-1.418557604536565,2.2251505691305846,3.215987577635652,1.1142354715861449,-11.738606338008445,-0.9415769537849747,12.40378772077181,8.913046027947752,-1.2920893560238178,8.100928410106745,-2.105860914114582,6.9163495130742785,4.725206105696792,6.646738950196704,7.237524844144101]},{\"axis\":{\"matches\":true},\"label\":\"PC4: 3.8%\",\"values\":[2.0078988012985204,16.074720455876943,1.855519765608083,-1.2993802518981232,3.0282415211788267,12.188593966611833,12.467525594655886,6.079525608787977,10.220930984999516,-0.5119664749775874,2.3369040381310637,9.013926449886336,-0.9929849718972328,1.581705970009124,-0.6001452603874255,11.238141861527954,-0.9559612563267448,-0.14398513173387362,-1.2763322415862235,8.073555742929441,-2.233929162235136,4.284479644401542,9.369400849601226,0.27400579327346186,-3.9565041982619316,-5.52095365539836,-3.608947919898025,-3.048171550315682,1.017303617432591,-7.7192715075899585,2.1857083539640434,2.9560035000850133,-3.448419571323332,-10.22213355036681,-6.853164687813068,-2.2315541335708993,-1.2386401346781732,3.1043235674102907,-3.990087641063579,-2.8725466026628044,2.9786078544341983,-3.092255795466466,-4.528907973015695,8.232316433046488,1.4426517116149469,-4.889751277364694,-6.887717939030699,2.8538252713052152,-0.6680067674448227,3.411391847370142,4.6395260446768924,-2.0373656031708474,-2.7197316803880915,-3.3050159448634155,7.007257065999971,2.9229202391010993,5.52127212614843,2.8128637502252714,-0.9045615055238185,-0.6753137106582341,0.8844185174731527,1.033015524568813,-5.778743056443133,9.146994915040779,2.0617249450553428,2.022658813861812,0.8550900590298967,5.819104124501319,-7.784578711395074,11.5651441419658,6.47838445270458,12.386093082358705,2.2181879788506507,2.753240096854668,9.307493927145238,6.777821470816648,3.4299536478516424,7.8754456595462905,5.134966243187164,-5.883145875707667,2.6692590147996995,-0.7897391947924721,9.254949448420476,13.466597712068332,-6.4529472518813815,5.791793473180474,-2.7609918503991566,8.590678401434001,7.478829046942535,8.894334587416395,6.3857551144650815]},{\"axis\":{\"matches\":true},\"label\":\"PC5: 3.4%\",\"values\":[5.078918884612836,0.34537819018872395,4.049923186417789,1.2765552570773466,-0.45069624950636666,0.35472682908342773,3.9521490325215365,3.2519680001653763,0.18857551749266363,3.366982389105258,5.2360415630054185,-0.7963820621468946,1.8025596389154381,5.744517294452768,5.7809357106785235,1.0758361420167815,2.180736902261679,1.155208638106821,3.8341930922413856,-2.1503654433257853,1.4864643093078136,6.032566316083721,1.1293786858251522,0.7316550393549694,1.2646440489274031,-0.9149536023191387,-0.44649314936574336,-2.1710237609097276,-2.983938837091577,-2.084543807587328,-0.23488533108123083,0.2509242361074661,1.8548343067023683,-3.659581520164556,-5.288971843831737,-3.0395697655743406,2.288809512884367,-0.2098881146283918,0.8212732028120237,-4.4310412109324275,5.402793602448149,2.2986077949542953,1.6858710635153877,2.3851754675600167,-0.26449349373824826,-0.6558300864237675,-2.5114409444687986,1.8456828208205796,1.8980171396765,-1.2991791146595897,3.103212909530044,3.2582146125264697,-0.8112654641089347,4.742918473564936,2.7376603997627207,7.512932106054243,0.4799190301382237,3.3873844378788855,11.816785583057072,10.40260075722323,5.8896674783790655,5.336578787388065,-1.4589975881925772,2.994670407663374,7.596579857494697,3.109511047763389,4.754230206058141,5.241806386077953,-0.435643253785688,-0.5210847244856099,4.12727870487845,4.006699900035439,4.391014274511706,7.252255346074734,-1.1432407077869549,-6.8622420900493735,6.326469419543008,0.5983902889784161,1.252494305479058,-3.88320040920781,3.7411516827341,1.3083391779113835,3.970072378407167,-0.011281966891825711,1.7089226573796914,-0.9267725071274397,0.20369582187502536,2.9989733263423193,1.9681860928793842,3.33581550835706,-0.4007123785928376]},{\"axis\":{\"matches\":true},\"label\":\"PC6: 3.2%\",\"values\":[2.198742698667211,7.974709450207474,5.198325785052623,3.677668137408569,6.500902599880445,-0.021522595907430775,6.479150491908802,5.913959490324048,-2.0929520241615105,5.7192264928990415,6.661598940970958,-5.375134825891802,5.9356712161221035,5.831144184640111,4.425114930784403,-3.6105742629504665,11.02605083974469,5.646800502133901,-1.4015081885908873,-5.831373937094482,0.15510866826384237,0.9774167480629667,-5.563091758681031,11.595526785089314,5.931215431957846,8.32173602891842,-0.3758695636898754,-0.9641680684214425,3.960531508666615,-0.5627102458436519,-0.45347020573860597,5.555220650704475,3.909775583604018,-1.3911297534872498,1.9505917959993369,0.9170769192055462,2.0164946722822923,1.947328503100982,7.93527436421253,-0.0005051365909556286,10.458162710442288,2.8727865729755484,6.731656765693897,-3.28429658473205,1.5163052447646574,5.350546861646856,0.3996132145700458,0.2452234809286357,-2.0518409199017555,-8.491760773849348,0.3495543953197952,-0.6754475995529734,4.113369828064585,2.717889582507938,3.4547667023019373,2.7203802421628707,-2.5866674954004423,0.823975574639254,6.300362180004513,5.640643228548198,0.5916345180934338,4.9382750241490765,-2.4490871684693514,-3.963996271546293,-1.1256946254088918,5.747683173908308,6.839018490051548,-1.6047073743287692,0.9913632083987445,-2.3359545688016112,-7.596095553922318,5.465065675766574,7.887086932547321,-2.433342267772769,2.895924824739715,2.0347399449300596,6.251019939932169,-4.252873870949998,-7.3031343903624215,3.539054021636623,8.737914345842706,5.027278122214256,1.7550008301989943,2.1334184282438047,0.41296269625672655,-1.1291544580343476,8.563361068231112,-4.288226711949552,0.11978876111212795,2.7255664656699947,1.7830293631078575]}],\"hovertemplate\":\"color=Fibroblast<br>%{xaxis.title.text}=%{x}<br>%{yaxis.title.text}=%{y}<extra></extra>\",\"legendgroup\":\"Fibroblast\",\"marker\":{\"color\":\"#FFA15A\",\"symbol\":\"circle\"},\"name\":\"Fibroblast\",\"showlegend\":true,\"type\":\"splom\",\"diagonal\":{\"visible\":false}}], {\"template\":{\"data\":{\"histogram2dcontour\":[{\"type\":\"histogram2dcontour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"choropleth\":[{\"type\":\"choropleth\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"histogram2d\":[{\"type\":\"histogram2d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmap\":[{\"type\":\"heatmap\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"heatmapgl\":[{\"type\":\"heatmapgl\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"contourcarpet\":[{\"type\":\"contourcarpet\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"contour\":[{\"type\":\"contour\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"surface\":[{\"type\":\"surface\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"},\"colorscale\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]]}],\"mesh3d\":[{\"type\":\"mesh3d\",\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}],\"scatter\":[{\"fillpattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2},\"type\":\"scatter\"}],\"parcoords\":[{\"type\":\"parcoords\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolargl\":[{\"type\":\"scatterpolargl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"bar\":[{\"error_x\":{\"color\":\"#2a3f5f\"},\"error_y\":{\"color\":\"#2a3f5f\"},\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"bar\"}],\"scattergeo\":[{\"type\":\"scattergeo\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterpolar\":[{\"type\":\"scatterpolar\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"histogram\":[{\"marker\":{\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"histogram\"}],\"scattergl\":[{\"type\":\"scattergl\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatter3d\":[{\"type\":\"scatter3d\",\"line\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattermapbox\":[{\"type\":\"scattermapbox\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scatterternary\":[{\"type\":\"scatterternary\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"scattercarpet\":[{\"type\":\"scattercarpet\",\"marker\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}}}],\"carpet\":[{\"aaxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"baxis\":{\"endlinecolor\":\"#2a3f5f\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"minorgridcolor\":\"white\",\"startlinecolor\":\"#2a3f5f\"},\"type\":\"carpet\"}],\"table\":[{\"cells\":{\"fill\":{\"color\":\"#EBF0F8\"},\"line\":{\"color\":\"white\"}},\"header\":{\"fill\":{\"color\":\"#C8D4E3\"},\"line\":{\"color\":\"white\"}},\"type\":\"table\"}],\"barpolar\":[{\"marker\":{\"line\":{\"color\":\"#E5ECF6\",\"width\":0.5},\"pattern\":{\"fillmode\":\"overlay\",\"size\":10,\"solidity\":0.2}},\"type\":\"barpolar\"}],\"pie\":[{\"automargin\":true,\"type\":\"pie\"}]},\"layout\":{\"autotypenumbers\":\"strict\",\"colorway\":[\"#636efa\",\"#EF553B\",\"#00cc96\",\"#ab63fa\",\"#FFA15A\",\"#19d3f3\",\"#FF6692\",\"#B6E880\",\"#FF97FF\",\"#FECB52\"],\"font\":{\"color\":\"#2a3f5f\"},\"hovermode\":\"closest\",\"hoverlabel\":{\"align\":\"left\"},\"paper_bgcolor\":\"white\",\"plot_bgcolor\":\"#E5ECF6\",\"polar\":{\"bgcolor\":\"#E5ECF6\",\"angularaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"radialaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"ternary\":{\"bgcolor\":\"#E5ECF6\",\"aaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"baxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"},\"caxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\"}},\"coloraxis\":{\"colorbar\":{\"outlinewidth\":0,\"ticks\":\"\"}},\"colorscale\":{\"sequential\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"sequentialminus\":[[0.0,\"#0d0887\"],[0.1111111111111111,\"#46039f\"],[0.2222222222222222,\"#7201a8\"],[0.3333333333333333,\"#9c179e\"],[0.4444444444444444,\"#bd3786\"],[0.5555555555555556,\"#d8576b\"],[0.6666666666666666,\"#ed7953\"],[0.7777777777777778,\"#fb9f3a\"],[0.8888888888888888,\"#fdca26\"],[1.0,\"#f0f921\"]],\"diverging\":[[0,\"#8e0152\"],[0.1,\"#c51b7d\"],[0.2,\"#de77ae\"],[0.3,\"#f1b6da\"],[0.4,\"#fde0ef\"],[0.5,\"#f7f7f7\"],[0.6,\"#e6f5d0\"],[0.7,\"#b8e186\"],[0.8,\"#7fbc41\"],[0.9,\"#4d9221\"],[1,\"#276419\"]]},\"xaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"yaxis\":{\"gridcolor\":\"white\",\"linecolor\":\"white\",\"ticks\":\"\",\"title\":{\"standoff\":15},\"zerolinecolor\":\"white\",\"automargin\":true,\"zerolinewidth\":2},\"scene\":{\"xaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"yaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2},\"zaxis\":{\"backgroundcolor\":\"#E5ECF6\",\"gridcolor\":\"white\",\"linecolor\":\"white\",\"showbackground\":true,\"ticks\":\"\",\"zerolinecolor\":\"white\",\"gridwidth\":2}},\"shapedefaults\":{\"line\":{\"color\":\"#2a3f5f\"}},\"annotationdefaults\":{\"arrowcolor\":\"#2a3f5f\",\"arrowhead\":0,\"arrowwidth\":1},\"geo\":{\"bgcolor\":\"white\",\"landcolor\":\"#E5ECF6\",\"subunitcolor\":\"white\",\"showland\":true,\"showlakes\":true,\"lakecolor\":\"white\"},\"title\":{\"x\":0.05},\"mapbox\":{\"style\":\"light\"}}},\"legend\":{\"title\":{\"text\":\"color\"},\"tracegroupgap\":0},\"margin\":{\"t\":60},\"dragmode\":\"select\"}, {\"responsive\": true} ).then(function(){\n", " \n", - "var gd = document.getElementById('d3f4fb2e-7e05-4ff9-89c4-1b255a137870');\n", + "var gd = document.getElementById('943b85e5-7439-482d-bc52-cfd49fafdd5d');\n", "var x = new MutationObserver(function (mutations, observer) {{\n", " var display = window.getComputedStyle(gd).display;\n", " if (!display || display === 'none') {{\n", @@ -8281,6 +8281,14 @@ "best_pca.scatter_pca_matrix(color = labels, n_components = 6)\n", "plt.show()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19fcb0e3", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/pyPLNmodels/load.py b/pyPLNmodels/load.py index 6abeffd1..0eb53456 100644 --- a/pyPLNmodels/load.py +++ b/pyPLNmodels/load.py @@ -39,7 +39,12 @@ def load_model(path_of_directory: str) -> Dict[str, Any]: :func:`~pyPLNmodels.load_plnpcacollection` """ working_dir = os.getcwd() - os.chdir(path_of_directory) + try: + os.chdir(path_of_directory) + except FileNotFoundError as err: + raise err( + "The model has not been saved. Please be sure you have the right name of model." + ) all_files = os.listdir() data = {} for filename in all_files: diff --git a/tests/test_getting_started.py b/tests/test_getting_started.py index 4fcc9552..69299741 100644 --- a/tests/test_getting_started.py +++ b/tests/test_getting_started.py @@ -1,6 +1,8 @@ #!/usr/bin/env python # coding: utf-8 +# get_ipython().system('pip install pyPLNmodels') + # ## pyPLNmodels @@ -66,7 +68,7 @@ print(pca.model_parameters["coef"].shape) # ### One can save the model in order to load it back after: pca.save() -dict_init = load_model("PlnPCA_nbcov_1_rank_5") +dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") loaded_pca = PlnPCA(counts, add_const=True, dict_initialization=dict_init) print(loaded_pca) -- GitLab From dba9c49db17ef4cfef1ef687cdf6076e56491766 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 3 Jul 2023 23:28:34 +0200 Subject: [PATCH 058/167] bug in the covariance. Took the covariance_a_posteriori instead of the covariance. --- pyPLNmodels/models.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 492c9106..c4189583 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -411,7 +411,7 @@ class _model(ABC): centered_latent = self.latent_variables - torch.mean( self.latent_variables, axis=0 ) - chol = torch.linalg.cholesky(torch.inverse(self.covariance)) + chol = torch.linalg.cholesky(torch.inverse(self.covariance_a_posteriori)) residus = torch.matmul(centered_latent.unsqueeze(1), chol.unsqueeze(0)) stats.probplot(residus.ravel(), plot=plt) plt.show() @@ -3050,9 +3050,23 @@ class PlnPCA(_model): return string @property - def covariance(self) -> Optional[torch.Tensor]: + def covariance(self) -> torch.Tensor: """ - Property representing the covariance. + Property representing the covariance a posteriori of the latent variables. + + Returns + ------- + Optional[torch.Tensor] + The covariance tensor or None if components are not present. + """ + if hasattr(self, "_components"): + return self._components @ (self._components.T) + return None + + @property + def covariance_a_posteriori(self) -> Optional[torch.Tensor]: + """ + Property representing the covariance a posteriori of the latent variables. Returns ------- -- GitLab From 283e3faf31f018ce228b6d3b156d28150b6b49c1 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 13 Jul 2023 09:51:26 +0200 Subject: [PATCH 059/167] model.covariance now returns a detached tensor. --- pyPLNmodels/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index c4189583..9c2024ee 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3060,7 +3060,7 @@ class PlnPCA(_model): The covariance tensor or None if components are not present. """ if hasattr(self, "_components"): - return self._components @ (self._components.T) + return self.components @ (self.components.T) return None @property -- GitLab From 8041f66cd6a695bd0ff996eda4a46a1c4a9924e0 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sat, 15 Jul 2023 12:28:53 +0200 Subject: [PATCH 060/167] changed pring to print and fix the printing message of PlnPCA. --- pyPLNmodels/models.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 9c2024ee..b751e9e4 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -365,7 +365,7 @@ class _model(ABC): verbose : bool, optional(keyword-only) Whether to print training progress. Defaults to False. """ - self._pring_beginning_message() + self._print_beginning_message() self._beginning_time = time.time() if self._fitted is False: @@ -1668,7 +1668,7 @@ class Pln(_model): covariances = components_var @ (sk_components.T.unsqueeze(0)) return covariances - def _pring_beginning_message(self): + def _print_beginning_message(self): """ Method for printing the beginning message. """ @@ -2087,7 +2087,7 @@ class PlnPCAcollection: """ return [model.rank for model in self.values()] - def _pring_beginning_message(self) -> str: + def _print_beginning_message(self) -> str: """ Method for printing the beginning message. @@ -2150,7 +2150,7 @@ class PlnPCAcollection: verbose : bool, optional(keyword-only) Whether to print verbose output, by default False. """ - self._pring_beginning_message() + self._print_beginning_message() for i in range(len(self.values())): model = self[self.ranks[i]] model.fit( @@ -2912,12 +2912,12 @@ class PlnPCA(_model): """ return self._rank - def _pring_beginning_message(self): + def _print_beginning_message(self): """ Print the beginning message when fitted. """ print("-" * NB_CHARACTERS_FOR_NICE_PLOT) - print(f"Fitting a PlnPCAcollection model with {self._rank} components") + print(f"Fitting a PlnPCA model with {self._rank} components") @property def model_parameters(self) -> Dict[str, torch.Tensor]: -- GitLab From 128a1c035af5a11a4fdb35a5eb09f157b606b274 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 20 Jul 2023 10:14:46 +0200 Subject: [PATCH 061/167] changed initialization to compute only eigenvalues. The init of the components should be 100* faster. --- pyPLNmodels/_initialization.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index e0c3f47e..72de90b3 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -2,6 +2,11 @@ import torch import math from typing import Optional from ._utils import _log_stirling +import time +from sklearn.decomposition import PCA +import seaborn as sns +import matplotlib.pyplot as plt +import numpy as np if torch.cuda.is_available(): DEVICE = torch.device("cuda") @@ -41,7 +46,7 @@ def _init_covariance( def _init_components( - endog: torch.Tensor, exog: torch.Tensor, coef: torch.Tensor, rank: int + endog: torch.Tensor, exog: torch.Tensor, rank: int ) -> torch.Tensor: """ Initialization for components for the Pln model. Get a first guess for covariance @@ -51,12 +56,6 @@ def _init_components( ---------- endog : torch.Tensor Samples with size (n,p) - offsets : torch.Tensor - Offset, size (n,p) - exog : torch.Tensor - Covariates, size (n,d) - coef : torch.Tensor - Coefficient of size (d,p) rank : int The dimension of the latent space, i.e. the reduced dimension. @@ -65,9 +64,11 @@ def _init_components( torch.Tensor Initialization of components of size (p,rank) """ - sigma_hat = _init_covariance(endog, exog, coef).detach() - components = _components_from_covariance(sigma_hat, rank) - return components + log_y = torch.log(endog + (endog == 0) * math.exp(-2)) + pca = PCA(n_components=rank) + pca.fit(log_y) + pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_) + return torch.from_numpy(pca_comp).to(DEVICE) def _init_latent_mean( @@ -102,13 +103,14 @@ def _init_latent_mean( The learning rate of the optimizer. Default is 0.01. eps : float, optional The tolerance. The algorithm will stop as soon as the criterion is lower than the tolerance. - Default is 7e-3. + Default is 7e-1. Returns ------- torch.Tensor The initialized latent mean with size (n,rank) """ + t = time.time() mode = torch.randn(endog.shape[0], components.shape[1], device=DEVICE) mode.requires_grad_(True) optimizer = torch.optim.Rprop([mode], lr=lr) @@ -127,6 +129,7 @@ def _init_latent_mean( keep_condition = False old_mode = torch.clone(mode) i += 1 + print("time mean", time.time() - t) return mode -- GitLab From 055506514648f6137370064a8078bd18cc933dc2 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 20 Jul 2023 10:17:37 +0200 Subject: [PATCH 062/167] GPU support --- pyPLNmodels/_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 805c9dca..681b5c23 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -306,12 +306,12 @@ def _format_model_param( exog = _format_data(exog) if add_const is True: if exog is None: - exog = torch.ones(endog.shape[0], 1) + exog = torch.ones(endog.shape[0], 1).to(DEVICE) else: if _has_null_variance(exog) is False: exog = torch.concat( (exog, torch.ones(endog.shape[0]).unsqueeze(1)), dim=1 - ) + ).to(DEVICE) if offsets is None: if offsets_formula == "logsum": print("Setting the offsets as the log of the sum of endog") -- GitLab From 600615427024821ed5807cbae28b9efc36e4eb0a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 20 Jul 2023 10:18:40 +0200 Subject: [PATCH 063/167] changed arguments of init --- pyPLNmodels/models.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index b751e9e4..6b47bf78 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -205,7 +205,7 @@ class _model(ABC): if self._get_max_components() < 2: raise RuntimeError("Can't perform visualization for dim < 2.") pca = self.sk_PCA(n_components=2) - proj_variables = pca.transform(self.latent_variables) + proj_variables = pca.transform(self.latent_variables.detach().cpu()) x = proj_variables[:, 0] y = proj_variables[:, 1] sns.scatterplot(x=x, y=y, hue=colors, ax=ax) @@ -380,7 +380,7 @@ class _model(ABC): criterion = self._compute_criterion_and_update_plotargs(loss, tol) if abs(criterion) < tol: stop_condition = True - if verbose and self.nb_iteration_done % 50 == 0: + if verbose and self.nb_iteration_done % 50 == 1: self._print_stats() self._print_end_of_fitting_message(stop_condition, tol) self._fitted = True @@ -2938,9 +2938,7 @@ class PlnPCA(_model): if not hasattr(self, "_coef"): super()._smart_init_coef() if not hasattr(self, "_components"): - self._components = _init_components( - self._endog, self._exog, self._coef, self._rank - ) + self._components = _init_components(self._endog, self._exog, self._rank) def _random_init_model_parameters(self): """ -- GitLab From c2ff60af8fe6087914dfa6932ea2e76faacad6cd Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 20 Jul 2023 16:05:11 +0200 Subject: [PATCH 064/167] now we can take batches. --- pyPLNmodels/_utils.py | 2 +- pyPLNmodels/models.py | 156 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 149 insertions(+), 9 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 681b5c23..3f9c6441 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -761,7 +761,7 @@ def get_simulated_count_data( pln_param.covariance, pln_param.coef, ) - return pln_param.endog, pln_param.cov, pln_param.offsets + return endog, pln_param.exog, pln_param.offsets def get_real_count_data( diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 6b47bf78..11987d85 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -76,6 +76,7 @@ class _model(ABC): dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, + batch_size: int = None, ): """ Initializes the model class. @@ -97,6 +98,13 @@ class _model(ABC): Whether to take the log of offsets. Defaults to False. add_const: bool, optional(keyword-only) Whether to add a column of one in the exog. Defaults to True. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). + Raises + ------ + ValueError + If the batch_size is greater than the number of samples, or not int. """ ( self._endog, @@ -107,6 +115,7 @@ class _model(ABC): endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False + self._batch_size = self._handle_batch_size(batch_size) self._plotargs = _PlotArgs(self._WINDOW) if dict_initialization is not None: self._set_init_parameters(dict_initialization) @@ -120,6 +129,7 @@ class _model(ABC): offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, + batch_size: int = None, ): """ Create a model instance from a formula and data. @@ -137,6 +147,9 @@ class _model(ABC): The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). """ endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -147,6 +160,7 @@ class _model(ABC): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=False, + batch_size=batch_size, ) def _set_init_parameters(self, dict_initialization: dict): @@ -166,6 +180,24 @@ class _model(ABC): setattr(self, key, array) self._fitted = True + @property + def batch_size(self) -> int: + """ + The batch size of the model. Should not be greater than the number of samples. + """ + return self._batch_size + + @property + def _current_batch_size(self) -> int: + return self._exog_b.shape[0] + + @batch_size.setter + def batch_size(self, batch_size: int): + """ + Setter for the batch size. Should be an integer not greater than the number of samples. + """ + self._batch_size = self._handle_batch_size(batch_size) + @property def fitted(self) -> bool: """ @@ -216,6 +248,17 @@ class _model(ABC): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) return ax + def _handle_batch_size(self, batch_size): + if batch_size is None: + batch_size = self.n_samples + if batch_size > self.n_samples: + raise ValueError( + f"batch_size ({batch_size}) can not be greater than the number of samples ({self.n_samples})" + ) + elif isinstance(batch_size, int) is False: + raise ValueError(f"batch_size should be int, got {type(batch_size)}") + return batch_size + @property def nb_iteration_done(self) -> int: """ @@ -385,21 +428,65 @@ class _model(ABC): self._print_end_of_fitting_message(stop_condition, tol) self._fitted = True + def _get_batch(self, batch_size, shuffle=False): + """Get the batches required to do a minibatch gradient ascent. + + Args: + batch_size: int. The batch size. Should be lower than n. + + Returns: A generator. Will generate n//batch_size + 1 batches of + size batch_size (except the last one since the rest of the + division is not always 0) + """ + indices = np.arange(self.n_samples) + if shuffle: + np.random.shuffle(indices) + nb_full_batch, last_batch_size = ( + self.n_samples // batch_size, + self.n_samples % batch_size, + ) + self.nb_batches = nb_full_batch + (last_batch_size > 0) + for i in range(nb_full_batch): + yield self._return_batch(indices, i * batch_size, (i + 1) * batch_size) + # Last batch + if last_batch_size != 0: + yield self._return_batch(indices, -last_batch_size, self.n_samples) + + def _return_batch(self, indices, beginning, end): + return ( + self._endog[indices[beginning:end]], + self._exog[beginning:end], + self._offsets[indices[beginning:end]], + self._latent_mean[beginning:end], + self._latent_sqrt_var[beginning:end], + ) + def _trainstep(self): """ - Perform a single training step. + Perform a single pass of the data. Returns ------- torch.Tensor The loss value. """ - self.optim.zero_grad() - loss = -self.compute_elbo() - loss.backward() - self.optim.step() - self._update_closed_forms() - return loss + elbo = 0 + for batch in self._get_batch(self._batch_size): + self._extract_batch(batch) + self.optim.zero_grad() + loss = -self._compute_elbo_b() + loss.backward() + elbo += loss.item() + self.optim.step() + self._update_closed_forms() + return elbo / self.nb_batches + + def _extract_batch(self, batch): + self._endog_b = batch[0] + self._exog_b = batch[1] + self._offsets_b = batch[2] + self._latent_mean_b = batch[3] + self._latent_sqrt_var_b = batch[4] def transform(self): """ @@ -633,7 +720,7 @@ class _model(ABC): float The computed criterion. """ - self._plotargs._elbos_list.append(-loss.item()) + self._plotargs._elbos_list.append(-loss) self._plotargs.running_times.append(time.time() - self._beginning_time) if self._plotargs.iteration_number > self._WINDOW: criterion = abs( @@ -1334,6 +1421,7 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, + batch_size: int = None, ): super().__init__( endog=endog, @@ -1343,6 +1431,7 @@ class Pln(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=add_const, + batch_size=batch_size, ) @classmethod @@ -1370,6 +1459,7 @@ class Pln(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, + batch_size: int = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -1380,6 +1470,7 @@ class Pln(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=False, + batch_size=batch_size, ) @_add_doc( @@ -1619,6 +1710,23 @@ class Pln(_model): self._latent_sqrt_var, ) + def _compute_elbo_b(self): + """ + Method for computing the evidence lower bound (ELBO) on the current batch. + + Returns + ------- + torch.Tensor + The computed ELBO on the current batch. + """ + return profiled_elbo_pln( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + ) + def _smart_init_model_parameters(self): """ Method for smartly initializing the model parameters. @@ -1779,6 +1887,7 @@ class PlnPCAcollection: dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, + batch_size: int = None, ): """ Constructor for PlnPCAcollection. @@ -1801,6 +1910,9 @@ class PlnPCAcollection: Whether to take the logarithm of offsets, by default False. add_const: bool, optional(keyword-only) Whether to add a column of one in the exog. Defaults to True. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). Returns ------- PlnPCAcollection @@ -1831,6 +1943,7 @@ class PlnPCAcollection: ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, + batch_size: int = None, ) -> "PlnPCAcollection": """ Create an instance of PlnPCAcollection from a formula. @@ -1851,6 +1964,10 @@ class PlnPCAcollection: The dictionary of initialization, by default None. take_log_offsets : bool, optional(keyword-only) Whether to take the logarithm of offsets, by default False. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). + Returns ------- PlnPCAcollection @@ -2583,6 +2700,7 @@ class PlnPCA(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, + batch_size: int = None, ): self._rank = rank super().__init__( @@ -2593,6 +2711,7 @@ class PlnPCA(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=add_const, + batch_size=batch_size, ) @classmethod @@ -2624,6 +2743,7 @@ class PlnPCA(_model): rank: int = 5, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + batch_size: int = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -2634,6 +2754,7 @@ class PlnPCA(_model): rank=rank, dict_initialization=dict_initialization, add_const=False, + batch_size=batch_size, ) @_add_doc( @@ -2991,6 +3112,25 @@ class PlnPCA(_model): return [self._components, self._latent_mean, self._latent_sqrt_var] return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] + def _compute_elbo_b(self) -> torch.Tensor: + """ + Compute the evidence lower bound (ELBO) with the current batch. + + Returns + ------- + torch.Tensor + The ELBO value on the current batch. + """ + return elbo_plnpca( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + self._components, + self._coef, + ) + def compute_elbo(self) -> torch.Tensor: """ Compute the evidence lower bound (ELBO). -- GitLab From fbd22ec34b7294a93fc03572410fee8ec5c95acd Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 20 Jul 2023 17:36:09 +0200 Subject: [PATCH 065/167] put paper in the .gitignore. --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index a0185fcb..a95ada79 100644 --- a/.gitignore +++ b/.gitignore @@ -153,6 +153,9 @@ tests/Pln* slides/ index.html +paper/* + + tests/test_models* tests/test_load* tests/test_readme* -- GitLab From 12acf206c86f7239e354afe52a99b0cd35cf9e9d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 21 Jul 2023 09:16:08 +0200 Subject: [PATCH 066/167] Now takes the optimizer with a string instead of the whole torch.optimizer.Adam for example. Allow not to import torch and still choosing its optimizer. --- pyPLNmodels/models.py | 53 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 45 insertions(+), 8 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 11987d85..f01f3972 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -2,7 +2,7 @@ import time from abc import ABC, abstractmethod import warnings import os -from typing import Optional, Dict, List, Type, Any, Iterable, Union +from typing import Optional, Dict, List, Type, Any, Iterable, Union, Literal import pandas as pd import torch @@ -385,7 +385,7 @@ class _model(ABC): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, + class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, @@ -399,8 +399,10 @@ class _model(ABC): The maximum number of iterations. Defaults to 50000. lr : float, optional(keyword-only) The learning rate. Defaults to 0.01. - class_optimizer : torch.optim.Optimizer, optional - The optimizer class. Defaults to torch.optim.Rprop. + class_optimizer : str, optional + The optimizer class. Defaults to "Rprop". If the + batch_size is lower than the number of samples, the Rprop + algorithm should not be used. A warning will be sent. tol : float, optional(keyword-only) The tolerance for convergence. Defaults to 1e-3. do_smart_init : bool, optional(keyword-only) @@ -416,7 +418,7 @@ class _model(ABC): elif len(self._plotargs.running_times) > 0: self._beginning_time -= self._plotargs.running_times[-1] self._put_parameters_to_device() - self.optim = class_optimizer(self._list_of_parameters_needing_gradient, lr=lr) + self._handle_optimizer(class_optimizer, lr) stop_condition = False while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() @@ -428,6 +430,41 @@ class _model(ABC): self._print_end_of_fitting_message(stop_condition, tol) self._fitted = True + def _handle_optimizer(self, class_optimizer, lr): + if class_optimizer == "Rprop": + if self.batch_size < self.n_samples: + optimizer_is_set = False + while optimizer_is_set is False: + msg = ( + f"The Rprop optimizer should not be used when mini batch are used" + f"(i.e. batch_size ({self.batch_size}) < n_samples = {self.n_samples}). " + f"Do you wish to turn to the Adam Optimizer? (y/n) " + ) + print(msg) + turn = str(input()) + if turn == "y": + self.optim = torch.optim.Adam( + self._list_of_parameters_needing_gradient, lr=lr + ) + optimizer_is_set = True + elif turn == "n": + self.optim = torch.optim.Rprop( + self._list_of_parameters_needing_gradient, lr=lr + ) + optimizer_is_set = True + else: + self.optim = torch.optim.Rprop( + self._list_of_parameters_needing_gradient, lr=lr + ) + elif class_optimizer == "Adam": + self.optim = torch.optim.Adam( + self._list_of_parameters_needing_gradient, lr=lr + ) + else: + raise ValueError( + f"Optimizer should be either 'Adam' or 'Rprop', got {class_optimizer}" + ) + def _get_batch(self, batch_size, shuffle=False): """Get the batches required to do a minibatch gradient ascent. @@ -1488,7 +1525,7 @@ class Pln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, + class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, @@ -2244,7 +2281,7 @@ class PlnPCAcollection: nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: Type[torch.optim.Optimizer] = torch.optim.Rprop, + class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, @@ -2772,7 +2809,7 @@ class PlnPCA(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: torch.optim.Optimizer = torch.optim.Rprop, + class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, -- GitLab From 49f1178e837bed0b19dfb2aa6a4012108d696aeb Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 21 Jul 2023 17:40:48 +0200 Subject: [PATCH 067/167] put the batch size in the fit method and removed the possibility to choose the optimizer. Bug when shuffling the dataset. --- pyPLNmodels/_initialization.py | 2 - pyPLNmodels/models.py | 88 ++++++++-------------------------- 2 files changed, 20 insertions(+), 70 deletions(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index 72de90b3..57cc69cf 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -110,7 +110,6 @@ def _init_latent_mean( torch.Tensor The initialized latent mean with size (n,rank) """ - t = time.time() mode = torch.randn(endog.shape[0], components.shape[1], device=DEVICE) mode.requires_grad_(True) optimizer = torch.optim.Rprop([mode], lr=lr) @@ -129,7 +128,6 @@ def _init_latent_mean( keep_condition = False old_mode = torch.clone(mode) i += 1 - print("time mean", time.time() - t) return mode diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index f01f3972..8c45780f 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -76,7 +76,6 @@ class _model(ABC): dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, - batch_size: int = None, ): """ Initializes the model class. @@ -98,9 +97,6 @@ class _model(ABC): Whether to take the log of offsets. Defaults to False. add_const: bool, optional(keyword-only) Whether to add a column of one in the exog. Defaults to True. - batch_size: int, optional(keyword-only) - The batch size when optimizing the elbo. If None, - batch gradient descent will be performed (i.e. batch_size = n_samples). Raises ------ ValueError @@ -115,7 +111,6 @@ class _model(ABC): endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False - self._batch_size = self._handle_batch_size(batch_size) self._plotargs = _PlotArgs(self._WINDOW) if dict_initialization is not None: self._set_init_parameters(dict_initialization) @@ -129,7 +124,6 @@ class _model(ABC): offsets_formula: str = "logsum", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, - batch_size: int = None, ): """ Create a model instance from a formula and data. @@ -147,9 +141,6 @@ class _model(ABC): The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) Whether to take the log of offsets. Defaults to False. - batch_size: int, optional(keyword-only) - The batch size when optimizing the elbo. If None, - batch gradient descent will be performed (i.e. batch_size = n_samples). """ endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -160,7 +151,6 @@ class _model(ABC): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=False, - batch_size=batch_size, ) def _set_init_parameters(self, dict_initialization: dict): @@ -385,10 +375,10 @@ class _model(ABC): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, + batch_size=None, ): """ Fit the model. The lower tol, the more accurate the model. @@ -399,26 +389,25 @@ class _model(ABC): The maximum number of iterations. Defaults to 50000. lr : float, optional(keyword-only) The learning rate. Defaults to 0.01. - class_optimizer : str, optional - The optimizer class. Defaults to "Rprop". If the - batch_size is lower than the number of samples, the Rprop - algorithm should not be used. A warning will be sent. tol : float, optional(keyword-only) The tolerance for convergence. Defaults to 1e-3. do_smart_init : bool, optional(keyword-only) Whether to perform smart initialization. Defaults to True. verbose : bool, optional(keyword-only) Whether to print training progress. Defaults to False. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). """ self._print_beginning_message() self._beginning_time = time.time() - + self._batch_size = self._handle_batch_size(batch_size) if self._fitted is False: self._init_parameters(do_smart_init) elif len(self._plotargs.running_times) > 0: self._beginning_time -= self._plotargs.running_times[-1] self._put_parameters_to_device() - self._handle_optimizer(class_optimizer, lr) + self._handle_optimizer(lr) stop_condition = False while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() @@ -430,39 +419,14 @@ class _model(ABC): self._print_end_of_fitting_message(stop_condition, tol) self._fitted = True - def _handle_optimizer(self, class_optimizer, lr): - if class_optimizer == "Rprop": - if self.batch_size < self.n_samples: - optimizer_is_set = False - while optimizer_is_set is False: - msg = ( - f"The Rprop optimizer should not be used when mini batch are used" - f"(i.e. batch_size ({self.batch_size}) < n_samples = {self.n_samples}). " - f"Do you wish to turn to the Adam Optimizer? (y/n) " - ) - print(msg) - turn = str(input()) - if turn == "y": - self.optim = torch.optim.Adam( - self._list_of_parameters_needing_gradient, lr=lr - ) - optimizer_is_set = True - elif turn == "n": - self.optim = torch.optim.Rprop( - self._list_of_parameters_needing_gradient, lr=lr - ) - optimizer_is_set = True - else: - self.optim = torch.optim.Rprop( - self._list_of_parameters_needing_gradient, lr=lr - ) - elif class_optimizer == "Adam": + def _handle_optimizer(self, lr): + if self.batch_size < self.n_samples: self.optim = torch.optim.Adam( self._list_of_parameters_needing_gradient, lr=lr ) else: - raise ValueError( - f"Optimizer should be either 'Adam' or 'Rprop', got {class_optimizer}" + self.optim = torch.optim.Rprop( + self._list_of_parameters_needing_gradient, lr=lr ) def _get_batch(self, batch_size, shuffle=False): @@ -508,8 +472,9 @@ class _model(ABC): The loss value. """ elbo = 0 - for batch in self._get_batch(self._batch_size): + for batch in self._get_batch(self._batch_size, shuffle=False): self._extract_batch(batch) + # print('current bach', self._current_batch_size) self.optim.zero_grad() loss = -self._compute_elbo_b() loss.backward() @@ -1458,7 +1423,6 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, - batch_size: int = None, ): super().__init__( endog=endog, @@ -1468,7 +1432,6 @@ class Pln(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=add_const, - batch_size=batch_size, ) @classmethod @@ -1496,7 +1459,6 @@ class Pln(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, - batch_size: int = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -1507,7 +1469,6 @@ class Pln(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=False, - batch_size=batch_size, ) @_add_doc( @@ -1525,18 +1486,18 @@ class Pln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, + batch_size: int = None, ): super().fit( nb_max_iteration, lr=lr, - class_optimizer=class_optimizer, tol=tol, do_smart_init=do_smart_init, verbose=verbose, + batch_size=batch_size, ) @_add_doc( @@ -1924,7 +1885,6 @@ class PlnPCAcollection: dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, - batch_size: int = None, ): """ Constructor for PlnPCAcollection. @@ -1980,7 +1940,6 @@ class PlnPCAcollection: ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, - batch_size: int = None, ) -> "PlnPCAcollection": """ Create an instance of PlnPCAcollection from a formula. @@ -2001,9 +1960,6 @@ class PlnPCAcollection: The dictionary of initialization, by default None. take_log_offsets : bool, optional(keyword-only) Whether to take the logarithm of offsets, by default False. - batch_size: int, optional(keyword-only) - The batch size when optimizing the elbo. If None, - batch gradient descent will be performed (i.e. batch_size = n_samples). Returns ------- @@ -2281,10 +2237,10 @@ class PlnPCAcollection: nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, + batch_size: int = None, ): """ Fit each model in the PlnPCAcollection. @@ -2295,14 +2251,15 @@ class PlnPCAcollection: The maximum number of iterations, by default 50000. lr : float, optional(keyword-only) The learning rate, by default 0.01. - class_optimizer : Type[torch.optim.Optimizer], optional(keyword-only) - The optimizer class, by default torch.optim.Rprop. tol : float, optional(keyword-only) The tolerance, by default 1e-3. do_smart_init : bool, optional(keyword-only) Whether to do smart initialization, by default True. verbose : bool, optional(keyword-only) Whether to print verbose output, by default False. + batch_size: int, optional(keyword-only) + The batch size when optimizing the elbo. If None, + batch gradient descent will be performed (i.e. batch_size = n_samples). """ self._print_beginning_message() for i in range(len(self.values())): @@ -2310,7 +2267,6 @@ class PlnPCAcollection: model.fit( nb_max_iteration, lr=lr, - class_optimizer=class_optimizer, tol=tol, do_smart_init=do_smart_init, verbose=verbose, @@ -2737,7 +2693,6 @@ class PlnPCA(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, - batch_size: int = None, ): self._rank = rank super().__init__( @@ -2748,7 +2703,6 @@ class PlnPCA(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=add_const, - batch_size=batch_size, ) @classmethod @@ -2780,7 +2734,6 @@ class PlnPCA(_model): rank: int = 5, offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, - batch_size: int = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -2791,7 +2744,6 @@ class PlnPCA(_model): rank=rank, dict_initialization=dict_initialization, add_const=False, - batch_size=batch_size, ) @_add_doc( @@ -2809,18 +2761,18 @@ class PlnPCA(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - class_optimizer: Literal["Rprop", "Adam"] = "Rprop", tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, + batch_size=None, ): super().fit( nb_max_iteration, lr=lr, - class_optimizer=class_optimizer, tol=tol, do_smart_init=do_smart_init, verbose=verbose, + batch_size=batch_size, ) @_add_doc( -- GitLab From 9e819edfabb9cc8acad805f894929230a91bc91d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 24 Jul 2023 16:05:15 +0200 Subject: [PATCH 068/167] Fixed the shuffle issue. --- pyPLNmodels/models.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 8c45780f..1390e37a 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -454,12 +454,13 @@ class _model(ABC): yield self._return_batch(indices, -last_batch_size, self.n_samples) def _return_batch(self, indices, beginning, end): + to_take = torch.tensor(indices[beginning:end]) return ( - self._endog[indices[beginning:end]], - self._exog[beginning:end], - self._offsets[indices[beginning:end]], - self._latent_mean[beginning:end], - self._latent_sqrt_var[beginning:end], + torch.index_select(self._endog, 0, to_take), + torch.index_select(self._exog, 0, to_take), + torch.index_select(self._offsets, 0, to_take), + torch.index_select(self._latent_mean, 0, to_take), + torch.index_select(self._latent_sqrt_var, 0, to_take), ) def _trainstep(self): @@ -472,9 +473,8 @@ class _model(ABC): The loss value. """ elbo = 0 - for batch in self._get_batch(self._batch_size, shuffle=False): + for batch in self._get_batch(self._batch_size, shuffle=True): self._extract_batch(batch) - # print('current bach', self._current_batch_size) self.optim.zero_grad() loss = -self._compute_elbo_b() loss.backward() -- GitLab From 1064c7d60a58b198a39f885e201e3bacfaf5670b Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 24 Jul 2023 16:26:12 +0200 Subject: [PATCH 069/167] did not take the batch size when fitting the collection --- pyPLNmodels/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 1390e37a..cbccf767 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -2270,6 +2270,7 @@ class PlnPCAcollection: tol=tol, do_smart_init=do_smart_init, verbose=verbose, + batch_size=batch_size, ) if i < len(self.values()) - 1: next_model = self[self.ranks[i + 1]] -- GitLab From fdae97751c6c0e11543bf595e0d2ba5b0f37d317 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 25 Jul 2023 16:22:03 +0200 Subject: [PATCH 070/167] GPU support --- pyPLNmodels/_initialization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index 57cc69cf..02574c29 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -66,7 +66,7 @@ def _init_components( """ log_y = torch.log(endog + (endog == 0) * math.exp(-2)) pca = PCA(n_components=rank) - pca.fit(log_y) + pca.fit(log_y.detach().cpu()) pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_) return torch.from_numpy(pca_comp).to(DEVICE) -- GitLab From 4e2934e8eb6f6191a0b0c77b51ba48a2c3464bf7 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 25 Jul 2023 16:26:04 +0200 Subject: [PATCH 071/167] merge right lines of check_tol branch. --- pyPLNmodels/models.py | 42 ++++++++++++++++++++++++++---------------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index cbccf767..5edbcd30 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -250,7 +250,7 @@ class _model(ABC): return batch_size @property - def nb_iteration_done(self) -> int: + def _nb_iteration_done(self) -> int: """ The number of iterations done. @@ -259,7 +259,7 @@ class _model(ABC): int The number of iterations done. """ - return len(self._plotargs._elbos_list) + return len(self._plotargs._elbos_list) * self._nb_batches @property def n_samples(self) -> int: @@ -429,7 +429,7 @@ class _model(ABC): self._list_of_parameters_needing_gradient, lr=lr ) - def _get_batch(self, batch_size, shuffle=False): + def _get_batch(self, shuffle=False): """Get the batches required to do a minibatch gradient ascent. Args: @@ -442,19 +442,17 @@ class _model(ABC): indices = np.arange(self.n_samples) if shuffle: np.random.shuffle(indices) - nb_full_batch, last_batch_size = ( - self.n_samples // batch_size, - self.n_samples % batch_size, - ) - self.nb_batches = nb_full_batch + (last_batch_size > 0) - for i in range(nb_full_batch): - yield self._return_batch(indices, i * batch_size, (i + 1) * batch_size) + + for i in range(self._nb_full_batch): + yield self._return_batch( + indices, i * self._batch_size, (i + 1) * self._batch_size + ) # Last batch - if last_batch_size != 0: - yield self._return_batch(indices, -last_batch_size, self.n_samples) + if self._last_batch_size != 0: + yield self._return_batch(indices, -self._last_batch_size, self.n_samples) def _return_batch(self, indices, beginning, end): - to_take = torch.tensor(indices[beginning:end]) + to_take = torch.tensor(indices[beginning:end]).to(DEVICE) return ( torch.index_select(self._endog, 0, to_take), torch.index_select(self._exog, 0, to_take), @@ -463,6 +461,18 @@ class _model(ABC): torch.index_select(self._latent_sqrt_var, 0, to_take), ) + @property + def _nb_full_batch(self): + return self.n_samples // self._batch_size + + @property + def _last_batch_size(self): + return self.n_samples % self._batch_size + + @property + def _nb_batches(self): + return self._nb_full_batch + (self._last_batch_size > 0) + def _trainstep(self): """ Perform a single pass of the data. @@ -473,7 +483,7 @@ class _model(ABC): The loss value. """ elbo = 0 - for batch in self._get_batch(self._batch_size, shuffle=True): + for batch in self._get_batch(shuffle=True): self._extract_batch(batch) self.optim.zero_grad() loss = -self._compute_elbo_b() @@ -481,7 +491,7 @@ class _model(ABC): elbo += loss.item() self.optim.step() self._update_closed_forms() - return elbo / self.nb_batches + return elbo / self._nb_batches def _extract_batch(self, batch): self._endog_b = batch[0] @@ -1232,7 +1242,7 @@ class _model(ABC): dict The dictionary of optimization parameters. """ - return {"Number of iterations done": self.nb_iteration_done} + return {"Number of iterations done": self._nb_iteration_done} @property def _useful_properties_string(self): -- GitLab From a63a690934f3167b533efe8f67d3e1456090d477 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 25 Jul 2023 16:33:00 +0200 Subject: [PATCH 072/167] forgot to add _ --- pyPLNmodels/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 5edbcd30..80c4cbf3 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -409,7 +409,7 @@ class _model(ABC): self._put_parameters_to_device() self._handle_optimizer(lr) stop_condition = False - while self.nb_iteration_done < nb_max_iteration and not stop_condition: + while self._nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() criterion = self._compute_criterion_and_update_plotargs(loss, tol) if abs(criterion) < tol: -- GitLab From bc33072178468df912651096b0bfa76dd5ccbce8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 09:33:28 +0200 Subject: [PATCH 073/167] merge changes with check_tol. Basically change the way we compute tolerance. All this to handle batch tolerance. The cumulative elbo is a good indicator. --- pyPLNmodels/_utils.py | 15 +++++++-------- pyPLNmodels/models.py | 22 +++++++++++----------- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 3f9c6441..d2b1aea0 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -24,19 +24,14 @@ else: class _PlotArgs: - def __init__(self, window: int): + def __init__(self): """ Initialize the PlotArgs class. - - Parameters - ---------- - window : int - The size of the window for computing the criterion. """ - self.window = window self.running_times = [] - self.criterions = [1] * window # the first window criterion won't be computed. + self.criterions = [] self._elbos_list = [] + self.cumulative_elbo_list = [0] @property def iteration_number(self) -> int: @@ -50,6 +45,10 @@ class _PlotArgs: """ return len(self._elbos_list) + @property + def cumulative_elbo(self): + return self.cumulative_elbo_list[-1] + def _show_loss(self, ax=None): """ Show the loss of the model (i.e. the negative ELBO). diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 80c4cbf3..c964fb8c 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -57,7 +57,6 @@ class _model(ABC): Base class for all the Pln models. Should be inherited. """ - _WINDOW: int = 15 _endog: torch.Tensor _exog: torch.Tensor _offsets: torch.Tensor @@ -734,14 +733,15 @@ class _model(ABC): """ self._plotargs._elbos_list.append(-loss) self._plotargs.running_times.append(time.time() - self._beginning_time) - if self._plotargs.iteration_number > self._WINDOW: - criterion = abs( - self._plotargs._elbos_list[-1] - - self._plotargs._elbos_list[-1 - self._WINDOW] - ) - self._plotargs.criterions.append(criterion) - return criterion - return tol + self._plotargs.cumulative_elbo_list.append( + self._plotargs.cumulative_elbo_list - loss + ) + criterion = ( + self._plotargs.cumulative_elbo_list[-2] + - self._plotargs.cumulative_elbo_list[-1] + ) / self._plotargs.cumulative_elbo_list[-1] + self._plotargs.criterions.append(criterion) + return criterion def _update_closed_forms(self): """ @@ -2924,11 +2924,11 @@ class PlnPCA(_model): def _endog_predictions(self): covariance_a_posteriori = torch.sum( (self._components**2).unsqueeze(0) - * (self.latent_sqrt_var**2).unsqueeze(1), + * (self._latent_sqrt_var**2).unsqueeze(1), axis=2, ) if self.exog is not None: - XB = self.exog @ self.coef + XB = self._exog @ self._coef else: XB = 0 return torch.exp( -- GitLab From 317abe30842170ff963c8e5c6f31f01c0049b6c2 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 09:37:33 +0200 Subject: [PATCH 074/167] dd tests only on the main and dev branch. --- .gitlab-ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d8c20b0f..c95c78f1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,6 +23,9 @@ tests: - cd tests - python create_readme_and_docstrings_tests.py - pytest . + only: + - main + - dev build_package: -- GitLab From 7fe3f2366b4a65130a917341f1cd06ec96f44d86 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 10:05:23 +0200 Subject: [PATCH 075/167] add the elbo of the zi model. --- pyPLNmodels/elbos.py | 92 ++++++++++++++++++++++++++++---------------- 1 file changed, 59 insertions(+), 33 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 6dcda361..454cfb75 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -172,6 +172,7 @@ def elbo_plnpca( ) / n_samples +## pb with trunc_log ## should rename some variables so that is is clearer when we see the formula def elbo_zi_pln( endog, @@ -179,13 +180,13 @@ def elbo_zi_pln( offsets, latent_mean, latent_sqrt_var, - pi, - covariance, + latent_prob, + components, coef, - _coef_inflation, + coef_inflation, dirac, ): - """Compute the ELBO (Evidence LOwer Bound) for the Zero Inflated Pln model. + """Compute the ELBO (Evidence LOwer Bound) for the Zero Inflated PLN model. See the doc for more details on the computation. Args: @@ -197,41 +198,66 @@ def elbo_zi_pln( pi: torch.tensor. Variational parameter with size (n,p) covariance: torch.tensor. Model parameter with size (p,p) coef: torch.tensor. Model parameter with size (d,p) - _coef_inflation: torch.tensor. Model parameter with size (d,p) + coef_inflation: torch.tensor. Model parameter with size (d,p) Returns: torch.tensor of size 1 with a gradient. """ - if torch.norm(pi * dirac - pi) > 0.0001: - print("Bug") - return False - n_samples = endog.shape[0] - dim = endog.shape[1] - s_rond_s = torch.square(latent_sqrt_var) - offsets_plus_m = offsets + latent_mean - m_minus_xb = latent_mean - exog @ coef - x_coef_inflation = exog @ _coef_inflation - elbo = torch.sum( - (1 - pi) - * ( - endog @ offsets_plus_m - - torch.exp(offsets_plus_m + s_rond_s / 2) - - _log_stirling(endog), - ) - + pi + covariance = components @ (components.T) + if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: + raise RuntimeError("Latent probability is not zero when it should be.") + n_samples, dim = endog.shape + s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) + o_plus_m = offsets + latent_mean + if exog is None: + XB = torch.zeros_like(endog) + xcoef_inflation = torch.zeros_like(endog) + else: + XB = exog @ coef + xcoef_inflation = exog @ coef_inflation + m_minus_xb = latent_mean - XB + + A = torch.exp(o_plus_m + s_rond_s / 2) + inside_a = torch.multiply( + 1 - latent_prob, torch.multiply(endog, o_plus_m) - A - _log_stirling(endog) ) + a = torch.sum(inside_a) - elbo -= torch.sum(pi * _trunc_log(pi) + (1 - pi) * _trunc_log(1 - pi)) - elbo += torch.sum( - pi * x_coef_inflation - torch.log(1 + torch.exp(x_coef_inflation)) + Omega = torch.inverse(covariance) + + m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) + un_moins_rho = 1 - latent_prob + un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb + un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb + inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer + b = -n_samples / 2 * torch.logdet(covariance) + torch.sum(inside_b) + + inside_c = torch.multiply(latent_prob, xcoef_inflation) - torch.log( + 1 + torch.exp(xcoef_inflation) ) + c = torch.sum(inside_c) + log_diag = torch.log(torch.diag(covariance)) + log_S_term = torch.sum( + torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 + ) + y = torch.sum(latent_prob, axis=0) + covariance_term = 1 / 2 * torch.log(torch.diag(covariance)) * y + inside_d = covariance_term + log_S_term + + d = n_samples * dim / 2 + torch.sum(inside_d) - elbo -= 0.5 * torch.trace( - torch.mm( - torch.inverse(covariance), - torch.diag(torch.sum(s_rond_s, dim=0)) + m_minus_xb.T @ m_minus_xb, - ) + inside_e = torch.multiply(latent_prob, _trunc_log(latent_prob)) + torch.multiply( + 1 - latent_prob, _trunc_log(1 - latent_prob) ) - elbo += 0.5 * n_samples * torch.log(torch.det(covariance)) - elbo += 0.5 * n_samples * dim - elbo += 0.5 * torch.sum(torch.log(s_rond_s)) + e = -torch.sum(inside_e) + sum_un_moins_rho_s2 = torch.sum(torch.multiply(1 - latent_prob, s_rond_s), axis=0) + diag_sig_sum_rho = torch.multiply( + torch.diag(covariance), torch.sum(latent_prob, axis=0) + ) + new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) + K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new + inside_f = torch.diag(Omega) * K + f = -1 / 2 * torch.sum(inside_f) + full_diag_omega = torch.diag(Omega).expand(exog.shape[0], -1) + elbo = a + b + c + d + e + f + print(" inside a shape", inside_a.shape) return elbo -- GitLab From effbfac8ad6aa370c476d69e7dd79b619f7676a6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 10:22:35 +0200 Subject: [PATCH 076/167] remove useless sums and optimize a little. --- pyPLNmodels/elbos.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 454cfb75..f7378d0f 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -220,7 +220,6 @@ def elbo_zi_pln( inside_a = torch.multiply( 1 - latent_prob, torch.multiply(endog, o_plus_m) - A - _log_stirling(endog) ) - a = torch.sum(inside_a) Omega = torch.inverse(covariance) @@ -229,12 +228,10 @@ def elbo_zi_pln( un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer - b = -n_samples / 2 * torch.logdet(covariance) + torch.sum(inside_b) inside_c = torch.multiply(latent_prob, xcoef_inflation) - torch.log( 1 + torch.exp(xcoef_inflation) ) - c = torch.sum(inside_c) log_diag = torch.log(torch.diag(covariance)) log_S_term = torch.sum( torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 @@ -243,21 +240,18 @@ def elbo_zi_pln( covariance_term = 1 / 2 * torch.log(torch.diag(covariance)) * y inside_d = covariance_term + log_S_term - d = n_samples * dim / 2 + torch.sum(inside_d) - inside_e = torch.multiply(latent_prob, _trunc_log(latent_prob)) + torch.multiply( 1 - latent_prob, _trunc_log(1 - latent_prob) ) - e = -torch.sum(inside_e) sum_un_moins_rho_s2 = torch.sum(torch.multiply(1 - latent_prob, s_rond_s), axis=0) diag_sig_sum_rho = torch.multiply( torch.diag(covariance), torch.sum(latent_prob, axis=0) ) new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new - inside_f = torch.diag(Omega) * K - f = -1 / 2 * torch.sum(inside_f) + inside_f = -1 / 2 * torch.diag(Omega) * K full_diag_omega = torch.diag(Omega).expand(exog.shape[0], -1) - elbo = a + b + c + d + e + f - print(" inside a shape", inside_a.shape) + elbo = torch.sum(inside_a + inside_c + inside_d) + elbo += torch.sum(inside_b) - n_samples / 2 * torch.logdet(covariance) + elbo += n_samples * dim / 2 + torch.sum(inside_d + inside_f) return elbo -- GitLab From 04b03948ce1bc0566bdb993e21d2e36841b60af4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 10:34:39 +0200 Subject: [PATCH 077/167] rewrite the elbo to avoid doublons and torch multiply are replaced by * --- pyPLNmodels/elbos.py | 56 ++++++++++++++++++++------------------------ 1 file changed, 26 insertions(+), 30 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index f7378d0f..ec743430 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -202,11 +202,15 @@ def elbo_zi_pln( Returns: torch.tensor of size 1 with a gradient. """ - covariance = components @ (components.T) if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: raise RuntimeError("Latent probability is not zero when it should be.") + covariance = components @ (components.T) + diag_cov = torch.diag(covariance) + Omega = torch.inverse(covariance) + diag_omega = torch.diag(Omega) + un_moins_prob = 1 - latent_prob n_samples, dim = endog.shape - s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) + s_rond_s = latent_sqrt_var * latent_sqrt_var o_plus_m = offsets + latent_mean if exog is None: XB = torch.zeros_like(endog) @@ -217,40 +221,32 @@ def elbo_zi_pln( m_minus_xb = latent_mean - XB A = torch.exp(o_plus_m + s_rond_s / 2) - inside_a = torch.multiply( - 1 - latent_prob, torch.multiply(endog, o_plus_m) - A - _log_stirling(endog) - ) - - Omega = torch.inverse(covariance) - + inside_a = un_moins_prob * (endog * o_plus_m - A - _log_stirling(endog)) m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) - un_moins_rho = 1 - latent_prob - un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb - un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb - inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer - - inside_c = torch.multiply(latent_prob, xcoef_inflation) - torch.log( - 1 + torch.exp(xcoef_inflation) + un_moins_prob_m_moins_xb = un_moins_prob * m_minus_xb + un_moins_prob_m_moins_xb_outer = ( + un_moins_prob_m_moins_xb.T @ un_moins_prob_m_moins_xb ) - log_diag = torch.log(torch.diag(covariance)) + inside_b = -1 / 2 * Omega * un_moins_prob_m_moins_xb_outer + + inside_c = latent_prob * xcoef_inflation - torch.log(1 + torch.exp(xcoef_inflation)) + log_diag = torch.log(diag_cov) log_S_term = torch.sum( - torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 + un_moins_prob * torch.log(torch.abs(latent_sqrt_var)), axis=0 ) - y = torch.sum(latent_prob, axis=0) - covariance_term = 1 / 2 * torch.log(torch.diag(covariance)) * y + sum_prob = torch.sum(latent_prob, axis=0) + covariance_term = 1 / 2 * torch.log(diag_cov) * sum_prob inside_d = covariance_term + log_S_term - inside_e = torch.multiply(latent_prob, _trunc_log(latent_prob)) + torch.multiply( - 1 - latent_prob, _trunc_log(1 - latent_prob) - ) - sum_un_moins_rho_s2 = torch.sum(torch.multiply(1 - latent_prob, s_rond_s), axis=0) - diag_sig_sum_rho = torch.multiply( - torch.diag(covariance), torch.sum(latent_prob, axis=0) - ) - new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) - K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new - inside_f = -1 / 2 * torch.diag(Omega) * K - full_diag_omega = torch.diag(Omega).expand(exog.shape[0], -1) + inside_e = torch.multiply( + latent_prob, _trunc_log(latent_prob) + ) + un_moins_prob * _trunc_log(un_moins_prob) + sum_un_moins_prob_s2 = torch.sum(un_moins_prob * s_rond_s, axis=0) + diag_sig_sum_prob = diag_cov * torch.sum(latent_prob, axis=0) + new = torch.sum(latent_prob * un_moins_prob * (m_minus_xb**2), axis=0) + K = sum_un_moins_prob_s2 + diag_sig_sum_prob + new + inside_f = -1 / 2 * diag_omega * K + full_diag_omega = diag_omega.expand(exog.shape[0], -1) elbo = torch.sum(inside_a + inside_c + inside_d) elbo += torch.sum(inside_b) - n_samples / 2 * torch.logdet(covariance) elbo += n_samples * dim / 2 + torch.sum(inside_d + inside_f) -- GitLab From ddc7e3e554cce257f9344fc967dcc785ad5194b0 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 11:39:41 +0200 Subject: [PATCH 078/167] began to merge changements of zi inside. till line 3482 --- pyPLNmodels/_initialization.py | 4 +- pyPLNmodels/models.py | 190 ++++++++++++++++++++++++++++++--- 2 files changed, 177 insertions(+), 17 deletions(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index 02574c29..f5663746 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -14,9 +14,7 @@ else: DEVICE = torch.device("cpu") -def _init_covariance( - endog: torch.Tensor, exog: torch.Tensor, coef: torch.Tensor -) -> torch.Tensor: +def _init_covariance(endog: torch.Tensor, exog: torch.Tensor) -> torch.Tensor: """ Initialization for the covariance for the Pln model. Take the log of endog (careful when endog=0), and computes the Maximum Likelihood diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index c964fb8c..66ea3339 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -18,7 +18,7 @@ from scipy import stats from ._closed_forms import ( _closed_formula_coef, _closed_formula_covariance, - _closed_formula_pi, + _closed_formula_latent_prob, ) from .elbos import elbo_plnpca, elbo_zi_pln, profiled_elbo_pln from ._utils import ( @@ -32,6 +32,7 @@ from ._utils import ( _array2tensor, _handle_data, _add_doc, + _closed_form_latent_prob, ) from ._initialization import ( @@ -110,7 +111,7 @@ class _model(ABC): endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False - self._plotargs = _PlotArgs(self._WINDOW) + self._plotargs = _PlotArgs() if dict_initialization is not None: self._set_init_parameters(dict_initialization) @@ -249,7 +250,7 @@ class _model(ABC): return batch_size @property - def _nb_iteration_done(self) -> int: + def nb_iteration_done(self) -> int: """ The number of iterations done. @@ -408,7 +409,7 @@ class _model(ABC): self._put_parameters_to_device() self._handle_optimizer(lr) stop_condition = False - while self._nb_iteration_done < nb_max_iteration and not stop_condition: + while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() criterion = self._compute_criterion_and_update_plotargs(loss, tol) if abs(criterion) < tol: @@ -1242,7 +1243,7 @@ class _model(ABC): dict The dictionary of optimization parameters. """ - return {"Number of iterations done": self._nb_iteration_done} + return {"Number of iterations done": self.nb_iteration_done} @property def _useful_properties_string(self): @@ -1850,7 +1851,7 @@ class Pln(_model): covariance : torch.Tensor The covariance matrix. """ - pass + raise AttributeError("You can not set the covariance for the Pln model.") class PlnPCAcollection: @@ -3190,7 +3191,7 @@ class PlnPCA(_model): @property def covariance(self) -> torch.Tensor: """ - Property representing the covariance a posteriori of the latent variables. + Property representing the covariance of the latent variables. Returns ------- @@ -3326,13 +3327,137 @@ class PlnPCA(_model): return self.latent_variables -class ZIPln(Pln): +class ZIPln(_model): _NAME = "ZIPln" - _pi: torch.Tensor + _latent_prob: torch.Tensor _coef_inflation: torch.Tensor _dirac: torch.Tensor + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog= get_real_count_data() + >>> zi = ZIPln(endog, add_const = True) + >>> zi.fit() + >>> print(zi) + """, + returns=""" + ZIPln + """, + see_also=""" + :func:`pyPLNmodels.ZIPln.from_formula` + """, + ) + def __init__( + self, + endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], + *, + exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, + offsets_formula: str = "logsum", + dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + take_log_offsets: bool = False, + add_const: bool = True, + use_closed_form: bool = False, + ): + super().__init__( + endog=endog, + exog=exog, + offsets=offsets, + offsets_formula=offsets_formula, + dict_initialization=dict_initialization, + take_log_offsets=take_log_offsets, + add_const=add_const, + ) + self._use_closed_form = use_closed_form + + @classmethod + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> zi = ZIPln.from_formula("endog ~ 1", data = data) + """, + returns=""" + ZIPln + """, + see_also=""" + :class:`pyPLNmodels.ZIPln` + :func:`pyPLNmodels.ZIPln.__init__` + """, + ) + def from_formula( + cls, + formula: str, + data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], + *, + offsets_formula: str = "logsum", + dict_initialization: Optional[Dict[str, torch.Tensor]] = None, + take_log_offsets: bool = False, + use_closed_form: bool = True, + ): + endog, exog, offsets = _extract_data_from_formula(formula, data) + return cls( + endog, + exog=exog, + offsets=offsets, + offsets_formula=offsets_formula, + dict_initialization=dict_initialization, + take_log_offsets=take_log_offsets, + add_const=False, + use_closed_form=use_closed_form, + ) + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog = get_real_count_data() + >>> zi = Pln(endog,add_const = True) + >>> zi.fit() + >>> print(zi) + """, + ) + def fit( + self, + nb_max_iteration: int = 50000, + *, + lr: float = 0.01, + tol: float = 1e-3, + do_smart_init: bool = True, + verbose: bool = False, + batch_size: int = None, + ): + super().fit( + nb_max_iteration, + lr=lr, + tol=tol, + do_smart_init=do_smart_init, + verbose=verbose, + batch_size=batch_size, + ) + + @_add_doc( + _model, + example=""" + >>> import matplotlib.pyplot as plt + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> zi = ZIPln(endog,add_const = True) + >>> zi.fit() + >>> zi.plot_expected_vs_true() + >>> plt.show() + >>> zi.plot_expected_vs_true(colors = labels) + >>> plt.show() + """, + ) + def plot_expected_vs_true(self, ax=None, colors=None): + super().plot_expected_vs_true(ax=ax, colors=colors) + @property def _description(self): return "with full covariance model and zero-inflation." @@ -3346,7 +3471,7 @@ class ZIPln(Pln): def _smart_init_model_parameters(self): super()._smart_init_model_parameters() if not hasattr(self, "_covariance"): - self._covariance = _init_covariance(self._endog, self._exog, self._coef) + self._components = _init_components(self._endog, self._exog, self.dim) if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) @@ -3354,11 +3479,29 @@ class ZIPln(Pln): self._dirac = self._endog == 0 self._latent_mean = torch.randn(self.n_samples, self.dim) self._latent_sqrt_var = torch.randn(self.n_samples, self.dim) - self._pi = ( + self._latent_prob = ( torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE) * self._dirac ) + @property + def _covariance(self): + return self._components @ (self._components.T) + + @property + def covariance(self) -> torch.Tensor: + """ + Property representing the covariance of the latent variables. + + Returns + ------- + Optional[torch.Tensor] + The covariance tensor or None if components are not present. + """ + if hasattr(self, "_components"): + return self.components @ (self.components.T) + return None + def compute_elbo(self): return elbo_zi_pln( self._endog, @@ -3366,7 +3509,7 @@ class ZIPln(Pln): self._offsets, self._latent_mean, self._latent_sqrt_var, - self._pi, + self._latent_prob, self._covariance, self._coef, self._coef_inflation, @@ -3375,9 +3518,19 @@ class ZIPln(Pln): @property def _list_of_parameters_needing_gradient(self): - return [self._latent_mean, self._latent_sqrt_var, self._coef_inflation] + list_parameters = [ + self._latent_mean, + self._latent_sqrt_var, + self._coef_inflation, + self._components, + self._coef, + ] + if self._use_closed_form: + list_parameters.append(self._latent_prob) + return list_parameters def _update_closed_forms(self): + pass self._coef = _closed_formula_coef(self._exog, self._latent_mean) self._covariance = _closed_formula_covariance( self._exog, @@ -3386,7 +3539,7 @@ class ZIPln(Pln): self._coef, self.n_samples, ) - self._pi = _closed_formula_pi( + self._latent_prob = _closed_formula_latent_prob( self._offsets, self._latent_mean, self._latent_sqrt_var, @@ -3395,6 +3548,15 @@ class ZIPln(Pln): self._coef_inflation, ) + @property + def closed_form_latent_prob(self): + """ + The closed form for the latent probability. + """ + return closed_form_latent_prob( + self._exog, self._coef, self._coef_inflation, self._covariance, self._dirac + ) + @property def number_of_parameters(self): return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) -- GitLab From b47ebec4278dabbb881e79fd9cb81b718ac2d994 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 16:35:25 +0200 Subject: [PATCH 079/167] continue to merge changes from the zi branch. --- pyPLNmodels/models.py | 167 ++++++++++++++++++++++++++++-------------- 1 file changed, 111 insertions(+), 56 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 66ea3339..6289640c 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -238,6 +238,12 @@ class _model(ABC): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) return ax + def _update_parameters(self): + """ + Update the parameters with a gradient step and project if necessary. + """ + self.optim.step() + def _handle_batch_size(self, batch_size): if batch_size is None: batch_size = self.n_samples @@ -489,7 +495,7 @@ class _model(ABC): loss = -self._compute_elbo_b() loss.backward() elbo += loss.item() - self.optim.step() + self._udpate_parameters() self._update_closed_forms() return elbo / self._nb_batches @@ -2482,23 +2488,26 @@ class PlnPCAcollection: bic = self.BIC aic = self.AIC loglikes = self.loglikes - bic_color = "blue" - aic_color = "red" - loglikes_color = "orange" - plt.scatter(bic.keys(), bic.values(), label="BIC criterion", c=bic_color) - plt.plot(bic.keys(), bic.values(), c=bic_color) - plt.axvline(self.best_BIC_model_rank, c=bic_color, linestyle="dotted") - plt.scatter(aic.keys(), aic.values(), label="AIC criterion", c=aic_color) - plt.axvline(self.best_AIC_model_rank, c=aic_color, linestyle="dotted") - plt.plot(aic.keys(), aic.values(), c=aic_color) - plt.xticks(list(aic.keys())) - plt.scatter( - loglikes.keys(), - -np.array(list(loglikes.values())), - label="Negative log likelihood", - c=loglikes_color, - ) - plt.plot(loglikes.keys(), -np.array(list(loglikes.values())), c=loglikes_color) + colors = {"BIC": "blue", "AIC": "red", "Negative log likelihood": "orange"} + for criterion, values in zip( + ["BIC", "AIC", "Negative log likelihood"], [bic, aic, loglikes] + ): + plt.scatter( + values.keys(), + values.values(), + label=f"{criterion} criterion", + c=colors[criterion], + ) + plt.plot(values.keys(), values.values(), c=colors[criterion]) + if criterion == "BIC": + plt.axvline( + self.best_BIC_model_rank, c=colors[criterion], linestyle="dotted" + ) + elif criterion == "AIC": + plt.axvline( + self.best_AIC_model_rank, c=colors[criterion], linestyle="dotted" + ) + plt.xticks(list(values.keys())) plt.legend() plt.show() @@ -2696,7 +2705,7 @@ class PlnPCA(_model): ) def __init__( self, - endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], + endog: Union[torch.Tensor, np.ndarray, pd.DataFrame], *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, @@ -3465,16 +3474,21 @@ class ZIPln(_model): def _random_init_model_parameters(self): super()._random_init_model_parameters() self._coef_inflation = torch.randn(self.nb_cov, self.dim) - self._covariance = torch.diag(torch.ones(self.dim)).to(DEVICE) + self._coef = torch.randn(self.nb_cov, self.dim) + self._components = torch.randn(self.nb_cov, self.dim) - # should change the good initialization, especially for _coef_inflation + # should change the good initialization for _coef_inflation def _smart_init_model_parameters(self): + # init of _coef. super()._smart_init_model_parameters() if not hasattr(self, "_covariance"): self._components = _init_components(self._endog, self._exog, self.dim) if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) + def _print_beginning_message(self): + print("Fitting a ZIPln model.") + def _random_init_latent_parameters(self): self._dirac = self._endog == 0 self._latent_mean = torch.randn(self.n_samples, self.dim) @@ -3482,12 +3496,36 @@ class ZIPln(_model): self._latent_prob = ( torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE) * self._dirac - ) + ).double() + + def _smart_init_latent_parameters(self): + self._random_init_latent_parameters() @property def _covariance(self): return self._components @ (self._components.T) + def latent_variables(self): + return self.latent_mean, self.latent_prob + + def _update_parameters(self): + super()._update_parameters() + self._project_latent_prob() + + def _project_latent_prob(self): + """ + Project the latent probability since it must be between 0 and 1. + """ + if self.use_closed_form_prob is False: + with torch.no_grad(): + self._latent_prob = torch.maximum( + self._latent_prob, torch.tensor([0]), out=self._latent_prob + ) + self._latent_prob = torch.minimum( + self._latent_prob, torch.tensor([1]), out=self._latent_prob + ) + self._latent_prob *= self._dirac + @property def covariance(self) -> torch.Tensor: """ @@ -3498,24 +3536,67 @@ class ZIPln(_model): Optional[torch.Tensor] The covariance tensor or None if components are not present. """ - if hasattr(self, "_components"): - return self.components @ (self.components.T) - return None + return self._cpu_attribute_or_none("_covariance") + + @property + def latent_prob(self): + return self._cpu_attribute_or_none("_latent_prob") + + @property + def closed_form_latent_prob(self): + """ + The closed form for the latent probability. + """ + return closed_form_latent_prob( + self._exog, self._coef, self._coef_inflation, self._covariance, self._dirac + ) def compute_elbo(self): + if self._use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob return elbo_zi_pln( self._endog, self._exog, self._offsets, self._latent_mean, self._latent_sqrt_var, - self._latent_prob, - self._covariance, + latent_prob, + self._components, self._coef, self._coef_inflation, self._dirac, ) + def _compute_elbo_b(self): + if self._use_closed_form_prob is True: + latent_prob_b = _closed_form_latent_prob( + self._exog_b, + self._coef, + self._coef_inflation, + self._covariance, + self._dirac_b, + ) + else: + latent_prob_b = self._latent_prob_b + return elbo_zi_pln( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + latent_prob_b, + self._components, + self._coef, + self._coef_inflation, + self._dirac_b, + ) + + @property + def number_of_parameters(self): + return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) + @property def _list_of_parameters_needing_gradient(self): list_parameters = [ @@ -3527,36 +3608,10 @@ class ZIPln(_model): ] if self._use_closed_form: list_parameters.append(self._latent_prob) + if self._exog is not None: + list_parameters.append(self._coef) + list_parameters.append(self._coef_inflation) return list_parameters def _update_closed_forms(self): pass - self._coef = _closed_formula_coef(self._exog, self._latent_mean) - self._covariance = _closed_formula_covariance( - self._exog, - self._latent_mean, - self._latent_sqrt_var, - self._coef, - self.n_samples, - ) - self._latent_prob = _closed_formula_latent_prob( - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - self._dirac, - self._exog, - self._coef_inflation, - ) - - @property - def closed_form_latent_prob(self): - """ - The closed form for the latent probability. - """ - return closed_form_latent_prob( - self._exog, self._coef, self._coef_inflation, self._covariance, self._dirac - ) - - @property - def number_of_parameters(self): - return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) -- GitLab From 62c2d3d24da62146f2e21de9d8a32f3209bdc4b2 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 9 Oct 2023 16:38:27 +0200 Subject: [PATCH 080/167] add gradients to zero inflated class. --- pyPLNmodels/models.py | 313 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 313 insertions(+) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 6289640c..6c0fd0fe 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3615,3 +3615,316 @@ class ZIPln(_model): def _update_closed_forms(self): pass + + def grad_M(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + un_moins_prob = 1 - latent_prob + first = un_moins_prob * ( + self._endog + - torch.exp( + self._offsets + self._latent_mean + self.latent_sqrt_var**2 / 2 + ) + ) + MmoinsXB = self._latent_mean - self._exog @ self._coef + A = (un_moins_prob * MmoinsXB) @ torch.inverse(self._covariance) + diag_omega = torch.diag(torch.inverse(self._covariance)) + full_diag_omega = diag_omega.expand(self.exog.shape[0], -1) + second = -un_moins_prob * A + added = -full_diag_omega * latent_prob * un_moins_prob * (MmoinsXB) + return first + second + added + + def grad_S(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + Omega = torch.inverse(self.covariance) + un_moins_prob = 1 - latent_prob + first = un_moins_prob * torch.exp( + self._offsets + self._latent_mean + self._latent_sqrt_var**2 / 2 + ) + first = -torch.multiply(first, self._latent_sqrt_var) + sec = un_moins_prob * 1 / self._latent_sqrt_var + K = un_moins_prob * ( + torch.multiply( + torch.full((self.n_samples, 1), 1.0), torch.diag(Omega).unsqueeze(0) + ) + ) + third = -self._latent_sqrt_var * K + return first + sec + third + + def grad_theta(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + + un_moins_prob = 1 - latent_prob + MmoinsXB = self._latent_mean - self._exog @ self._coef + A = (un_moins_prob * MmoinsXB) @ torch.inverse(self._covariance) + diag_omega = torch.diag(torch.inverse(self._covariance)) + full_diag_omega = diag_omega.expand(self.exog.shape[0], -1) + added = latent_prob * (MmoinsXB) * full_diag_omega + A += added + second = -un_moins_prob * A + grad_no_closed_form = -self._exog.T @ second + if self.use_closed_form_prob is False: + return grad_no_closed_form + else: + XB_zero = self._exog @ self._coef_inflation + diag = torch.diag(self._covariance) + full_diag = diag.expand(self._exog.shape[0], -1) + XB = self._exog @ self._coef + derivative = d_h_x2(XB_zero, XB, full_diag, self._dirac) + grad_closed_form = self.gradients_closed_form_thetas(derivative) + return grad_closed_form + grad_no_closed_form + + def gradients_closed_form_thetas(self, derivative): + Omega = torch.inverse(self._covariance) + MmoinsXB = self._latent_mean - self._exog @ self._coef + s_rond_s = self._latent_sqrt_var**2 + latent_prob = self.closed_form_latent_prob + A = torch.exp(self._offsets + self._latent_mean + s_rond_s / 2) + poiss_term = ( + self._endog * (self._offsets + self._latent_mean) + - A + - _log_stirling(self._endog) + ) + a = -self._exog.T @ (derivative * poiss_term) + b = self._exog.T @ ( + derivative * MmoinsXB * (((1 - latent_prob) * MmoinsXB) @ Omega) + ) + c = self._exog.T @ (derivative * (self._exog @ self._coef_inflation)) + first_d = derivative * torch.log(torch.abs(self._latent_sqrt_var)) + second_d = ( + 1 / 2 * derivative @ (torch.diag(torch.log(torch.diag(self._covariance)))) + ) + d = -self._exog.T @ (first_d - second_d) + e = -self._exog.T @ ( + derivative * (_trunc_log(latent_prob) - _trunc_log(1 - latent_prob)) + ) + first_f = ( + +1 + / 2 + * self._exog.T + @ (derivative * (s_rond_s @ torch.diag(torch.diag(Omega)))) + ) + second_f = ( + -1 + / 2 + * self._exog.T + @ derivative + @ torch.diag(torch.diag(Omega) * torch.diag(self._covariance)) + ) + full_diag_omega = torch.diag(Omega).expand(self.exog.shape[0], -1) + common = (MmoinsXB) ** 2 * (full_diag_omega) + new_f = -1 / 2 * self._exog.T @ (derivative * common * (1 - 2 * latent_prob)) + f = first_f + second_f + new_f + return a + b + c + d + e + f + + def grad_theta_0(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + grad_no_closed_form = self._exog.T @ latent_prob - self._exog.T @ ( + torch.exp(self._exog @ self._coef_inflation) + / (1 + torch.exp(self._exog @ self._coef_inflation)) + ) + if self.use_closed_form_prob is False: + return grad_no_closed_form + else: + grad_closed_form = self.gradients_closed_form_thetas( + latent_prob * (1 - latent_prob) + ) + return grad_closed_form + grad_no_closed_form + + def grad_C(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + omega = torch.inverse(self._covariance) + if self._coef is not None: + m_minus_xb = self._latent_mean - torch.mm(self._exog, self._coef) + else: + m_minus_xb = self._latent_mean + m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) + + un_moins_rho = 1 - latent_prob + + un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb + un_moins_rho_m_moins_xb_outer = ( + un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb + ) + deter = ( + -self.n_samples + * torch.inverse(self._components @ (self._components.T)) + @ self._components + ) + sec_part_b_grad = ( + omega @ (un_moins_rho_m_moins_xb_outer) @ omega @ self._components + ) + b_grad = deter + sec_part_b_grad + + diag = torch.diag(self.covariance) + rho_t_unn = torch.sum(latent_prob, axis=0) + omega_unp = torch.sum(omega, axis=0) + K = torch.sum(un_moins_rho * self._latent_sqrt_var**2, axis=0) + diag * ( + rho_t_unn + ) + added = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) + K += added + first_part_grad = omega @ torch.diag_embed(K) @ omega @ self._components + x = torch.diag(omega) * rho_t_unn + second_part_grad = -torch.diag_embed(x) @ self._components + y = rho_t_unn + first = torch.multiply(y, 1 / torch.diag(self.covariance)).unsqueeze(1) + second = torch.full((1, self.dim), 1.0) + Diag = (first * second) * torch.eye(self.dim) + last_grad = Diag @ self._components + grad_no_closed_form = b_grad + first_part_grad + second_part_grad + last_grad + if self.use_closed_form_prob is False: + return grad_no_closed_form + else: + s_rond_s = self._latent_sqrt_var**2 + XB_zero = self._exog @ self._coef_inflation + XB = self._exog @ self._coef + A = torch.exp(self._offsets + self._latent_mean + s_rond_s / 2) + poiss_term = ( + self._endog * (self._offsets + self._latent_mean) + - A + - _log_stirling(self._endog) + ) + full_diag_sigma = diag.expand(self._exog.shape[0], -1) + full_diag_omega = torch.diag(omega).expand(self._exog.shape[0], -1) + H3 = d_h_x3(XB_zero, XB, full_diag_sigma, self._dirac) + poiss_term_H = poiss_term * H3 + a = ( + -2 + * ( + ((poiss_term_H.T @ torch.ones(self.n_samples, self.dim))) + * (torch.eye(self.dim)) + ) + @ self._components + ) + B_Omega = ((1 - latent_prob) * m_minus_xb) @ omega + K = H3 * B_Omega * m_minus_xb + b = ( + 2 + * ( + ( + (m_minus_xb * B_Omega * H3).T + @ torch.ones(self.n_samples, self.dim) + ) + * torch.eye(self.dim) + ) + @ self._components + ) + c = ( + 2 + * ( + ((XB_zero * H3).T @ torch.ones(self.n_samples, self.dim)) + * torch.eye(self.dim) + ) + @ self._components + ) + d = ( + -2 + * ( + ( + (torch.log(torch.abs(self._latent_sqrt_var)) * H3).T + @ torch.ones(self.n_samples, self.dim) + ) + * torch.eye(self.dim) + ) + @ self._components + ) + log_full_diag_sigma = torch.log(diag).expand(self._exog.shape[0], -1) + d += ( + ((log_full_diag_sigma * H3).T @ torch.ones(self.n_samples, self.dim)) + * torch.eye(self.dim) + ) @ self._components + e = ( + -2 + * ( + ( + ((_trunc_log(latent_prob) - _trunc_log(1 - latent_prob)) * H3).T + @ torch.ones(self.n_samples, self.dim) + ) + * torch.eye(self.dim) + ) + @ self._components + ) + f = ( + -( + ( + (full_diag_omega * (full_diag_sigma - s_rond_s) * H3).T + @ torch.ones(self.n_samples, self.dim) + ) + * torch.eye(self.dim) + ) + @ self._components + ) + f -= ( + ( + ((1 - 2 * latent_prob) * m_minus_xb**2 * full_diag_omega * H3).T + @ torch.ones(self.n_samples, self.dim) + ) + * torch.eye(self.dim) + ) @ self._components + grad_closed_form = a + b + c + d + e + f + return grad_closed_form + grad_no_closed_form + + def grad_rho(self): + if self.use_closed_form_prob is True: + latent_prob = self.closed_form_latent_prob + else: + latent_prob = self._latent_prob + omega = torch.inverse(self._covariance) + s_rond_s = self._latent_sqrt_var * self._latent_sqrt_var + A = torch.exp(self._offsets + self._latent_mean + s_rond_s / 2) + first = ( + -self._endog * (self._offsets + self._latent_mean) + + A + + _log_stirling(self._endog) + ) + un_moins_prob = 1 - latent_prob + MmoinsXB = self._latent_mean - self._exog @ self._coef + A = (un_moins_prob * MmoinsXB) @ torch.inverse(self._covariance) + second = MmoinsXB * A + third = self._exog @ self._coef_inflation + fourth_first = -torch.log(torch.abs(self._latent_sqrt_var)) + fourth_second = ( + 1 + / 2 + * torch.multiply( + torch.full((self.n_samples, 1), 1.0), + torch.log(torch.diag(self.covariance)).unsqueeze(0), + ) + ) + fourth = fourth_first + fourth_second + fifth = _trunc_log(un_moins_prob) - _trunc_log(latent_prob) + sixth_first = ( + 1 + / 2 + * torch.multiply( + torch.full((self.n_samples, 1), 1.0), torch.diag(omega).unsqueeze(0) + ) + * s_rond_s + ) + sixth_second = ( + -1 + / 2 + * torch.multiply( + torch.full((self.n_samples, 1), 1.0), + (torch.diag(omega) * torch.diag(self._covariance)).unsqueeze(0), + ) + ) + sixth = sixth_first + sixth_second + full_diag_omega = torch.diag(omega).expand(self.exog.shape[0], -1) + seventh = -1 / 2 * (1 - 2 * latent_prob) * (MmoinsXB) ** 2 * (full_diag_omega) + return first + second + third + fourth + fifth + sixth + seventh -- GitLab From 1958d93b3c17d2051e0dab8ba38b9e9de9ebb550 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 10 Oct 2023 19:16:24 +0200 Subject: [PATCH 081/167] pass the tests but ZI or stochasticity have not been tested yet. --- pyPLNmodels/_closed_forms.py | 15 +++++++++ pyPLNmodels/_utils.py | 48 +++++++++++++++++++++++++++-- pyPLNmodels/models.py | 59 ++++++++++++++++++++++-------------- tests/test_pln_full.py | 2 +- tests/test_setters.py | 18 ++++++----- 5 files changed, 109 insertions(+), 33 deletions(-) diff --git a/pyPLNmodels/_closed_forms.py b/pyPLNmodels/_closed_forms.py index b57e7850..3524d48d 100644 --- a/pyPLNmodels/_closed_forms.py +++ b/pyPLNmodels/_closed_forms.py @@ -1,4 +1,5 @@ from typing import Optional +from ._utils import phi import torch # pylint:disable=[C0114] @@ -98,3 +99,17 @@ def _closed_formula_pi( """ poiss_param = torch.exp(offsets + latent_mean + 0.5 * torch.square(latent_sqrt_var)) return torch._sigmoid(poiss_param + torch.mm(exog, _coef_inflation)) * dirac + + +def _closed_formula_latent_prob(exog, coef, coef_infla, cov, dirac): + if exog is not None: + XB = exog @ coef + XB_zero = exog @ coef_infla + else: + XB_zero = 0 + XB = 0 + XB_zero = exog @ coef_infla + pi = torch.sigmoid(XB_zero) + diag = torch.diag(cov) + full_diag = diag.expand(exog.shape[0], -1) + return torch.sigmoid(XB_zero - torch.log(phi(XB, full_diag))) * dirac diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index d2b1aea0..7169b74d 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -79,8 +79,8 @@ class _PlotArgs: """ ax = plt.gca() if ax is None else ax ax.plot( - self.running_times[self.window :], - self.criterions[self.window :], + self.running_times, + self.criterions, label="Delta", ) ax.set_yscale("log") @@ -1004,3 +1004,47 @@ def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also= return fun return wrapper + + +def pf_lambert(x, y): + return x - (1 - (y * torch.exp(-x) + 1) / (x + 1)) + + +def lambert(y, nb_pf=10): + x = torch.log(1 + y) + for _ in range(nb_pf): + x = pf_lambert(x, y) + return x + + +def d_varpsi_x1(mu, sigma2): + W = lambert(sigma2 * torch.exp(mu)) + first = phi(mu, sigma2) + third = 1 / sigma2 + 1 / 2 * 1 / ((1 + W) ** 2) + return -first * W * third + + +def phi(mu, sigma2): + y = sigma2 * torch.exp(mu) + lamby = lambert(y) + log_num = -1 / (2 * sigma2) * (lamby**2 + 2 * lamby) + return torch.exp(log_num) / torch.sqrt(1 + lamby) + + +def d_varpsi_x2(mu, sigma2): + first = d_varpsi_x1(mu, sigma2) / sigma2 + W = lambert(sigma2 * torch.exp(mu)) + second = (W**2 + 2 * W) / 2 / (sigma2**2) * phi(mu, sigma2) + return first + second + + +def d_h_x2(a, x, y, dirac): + rho = torch.sigmoid(a - torch.log(phi(x, y))) * dirac + rho_prime = rho * (1 - rho) + return -rho_prime * d_varpsi_x1(x, y) / phi(x, y) + + +def d_h_x3(a, x, y, dirac): + rho = torch.sigmoid(a - torch.log(phi(x, y))) * dirac + rho_prime = rho * (1 - rho) + return -rho_prime * d_varpsi_x2(x, y) / phi(x, y) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 6c0fd0fe..64ddb104 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -32,7 +32,6 @@ from ._utils import ( _array2tensor, _handle_data, _add_doc, - _closed_form_latent_prob, ) from ._initialization import ( @@ -65,6 +64,7 @@ class _model(ABC): _beginning_time: float _latent_sqrt_var: torch.Tensor _latent_mean: torch.Tensor + _batch_size: int = None def __init__( self, @@ -164,7 +164,10 @@ class _model(ABC): """ if "coef" not in dict_initialization.keys(): print("No coef is initialized.") - self.coef = None + dict_initialization["coef"] = None + if self._NAME == "Pln": + del dict_initialization["covariance"] + del dict_initialization["coef"] for key, array in dict_initialization.items(): array = _format_data(array) setattr(self, key, array) @@ -175,6 +178,8 @@ class _model(ABC): """ The batch size of the model. Should not be greater than the number of samples. """ + if self._batch_size is None: + return self.n_samples return self._batch_size @property @@ -265,7 +270,7 @@ class _model(ABC): int The number of iterations done. """ - return len(self._plotargs._elbos_list) * self._nb_batches + return len(self._plotargs._elbos_list) * self.nb_batches @property def n_samples(self) -> int: @@ -359,7 +364,7 @@ class _model(ABC): def _put_parameters_to_device(self): """ - Move parameters to the device. + Move parameters to the cGPU device if present. """ for parameter in self._list_of_parameters_needing_gradient: parameter.requires_grad_(True) @@ -374,7 +379,6 @@ class _model(ABC): List[torch.Tensor] List of parameters needing gradient. """ - ... def fit( self, @@ -459,9 +463,13 @@ class _model(ABC): def _return_batch(self, indices, beginning, end): to_take = torch.tensor(indices[beginning:end]).to(DEVICE) + if self._exog is not None: + exog_b = torch.index_select(self._exog, 0, to_take) + else: + exog_b = None return ( torch.index_select(self._endog, 0, to_take), - torch.index_select(self._exog, 0, to_take), + exog_b, torch.index_select(self._offsets, 0, to_take), torch.index_select(self._latent_mean, 0, to_take), torch.index_select(self._latent_sqrt_var, 0, to_take), @@ -469,14 +477,14 @@ class _model(ABC): @property def _nb_full_batch(self): - return self.n_samples // self._batch_size + return self.n_samples // self.batch_size @property def _last_batch_size(self): - return self.n_samples % self._batch_size + return self.n_samples % self.batch_size @property - def _nb_batches(self): + def nb_batches(self): return self._nb_full_batch + (self._last_batch_size > 0) def _trainstep(self): @@ -495,9 +503,9 @@ class _model(ABC): loss = -self._compute_elbo_b() loss.backward() elbo += loss.item() - self._udpate_parameters() + self._update_parameters() self._update_closed_forms() - return elbo / self._nb_batches + return elbo / self.nb_batches def _extract_batch(self, batch): self._endog_b = batch[0] @@ -740,8 +748,9 @@ class _model(ABC): """ self._plotargs._elbos_list.append(-loss) self._plotargs.running_times.append(time.time() - self._beginning_time) + elbo = -loss self._plotargs.cumulative_elbo_list.append( - self._plotargs.cumulative_elbo_list - loss + self._plotargs.cumulative_elbo + elbo ) criterion = ( self._plotargs.cumulative_elbo_list[-2] @@ -1652,7 +1661,11 @@ class Pln(_model): ---------- coef : Union[torch.Tensor, np.ndarray, pd.DataFrame] The regression coefficients of the gaussian latent variables. + Raises + ------ + AttributeError since you can not set the coef in the Pln model. """ + raise AttributeError("You can not set the coef in the Pln model.") def _endog_predictions(self): return torch.exp( @@ -3543,17 +3556,17 @@ class ZIPln(_model): return self._cpu_attribute_or_none("_latent_prob") @property - def closed_form_latent_prob(self): + def closed_formula_latent_prob(self): """ The closed form for the latent probability. """ - return closed_form_latent_prob( + return closed_formula_latent_prob( self._exog, self._coef, self._coef_inflation, self._covariance, self._dirac ) def compute_elbo(self): if self._use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob return elbo_zi_pln( @@ -3571,7 +3584,7 @@ class ZIPln(_model): def _compute_elbo_b(self): if self._use_closed_form_prob is True: - latent_prob_b = _closed_form_latent_prob( + latent_prob_b = _closed_formula_latent_prob( self._exog_b, self._coef, self._coef_inflation, @@ -3618,7 +3631,7 @@ class ZIPln(_model): def grad_M(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob un_moins_prob = 1 - latent_prob @@ -3638,7 +3651,7 @@ class ZIPln(_model): def grad_S(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob Omega = torch.inverse(self.covariance) @@ -3658,7 +3671,7 @@ class ZIPln(_model): def grad_theta(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob @@ -3686,7 +3699,7 @@ class ZIPln(_model): Omega = torch.inverse(self._covariance) MmoinsXB = self._latent_mean - self._exog @ self._coef s_rond_s = self._latent_sqrt_var**2 - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob A = torch.exp(self._offsets + self._latent_mean + s_rond_s / 2) poiss_term = ( self._endog * (self._offsets + self._latent_mean) @@ -3727,7 +3740,7 @@ class ZIPln(_model): def grad_theta_0(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob grad_no_closed_form = self._exog.T @ latent_prob - self._exog.T @ ( @@ -3744,7 +3757,7 @@ class ZIPln(_model): def grad_C(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob omega = torch.inverse(self._covariance) @@ -3881,7 +3894,7 @@ class ZIPln(_model): def grad_rho(self): if self.use_closed_form_prob is True: - latent_prob = self.closed_form_latent_prob + latent_prob = self.closed_formula_latent_prob else: latent_prob = self._latent_prob omega = torch.inverse(self._covariance) diff --git a/tests/test_pln_full.py b/tests/test_pln_full.py index 2d61befd..870114a0 100644 --- a/tests/test_pln_full.py +++ b/tests/test_pln_full.py @@ -8,7 +8,7 @@ from tests.utils import filter_models @filter_models(["Pln"]) def test_number_of_iterations_pln_full(fitted_pln): nb_iterations = len(fitted_pln._elbos_list) - assert 50 < nb_iterations < 500 + assert 20 < nb_iterations < 1000 @pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) diff --git a/tests/test_setters.py b/tests/test_setters.py index 828989e8..eb7814d7 100644 --- a/tests/test_setters.py +++ b/tests/test_setters.py @@ -19,7 +19,8 @@ def test_data_setter_with_torch(pln): def test_parameters_setter_with_torch(pln): pln.latent_mean = pln.latent_mean pln.latent_sqrt_var = pln.latent_sqrt_var - pln.coef = pln.coef + if pln._NAME != "Pln": + pln.coef = pln.coef if pln._NAME == "PlnPCA": pln.components = pln.components pln.fit() @@ -50,7 +51,8 @@ def test_parameters_setter_with_numpy(pln): np_coef = None pln.latent_mean = np_latent_mean pln.latent_sqrt_var = np_latent_sqrt_var - pln.coef = np_coef + if pln._NAME != "Pln": + pln.coef = np_coef if pln._NAME == "PlnPCA": pln.components = pln.components.numpy() pln.fit() @@ -81,7 +83,8 @@ def test_parameters_setter_with_pandas(pln): pd_coef = None pln.latent_mean = pd_latent_mean pln.latent_sqrt_var = pd_latent_sqrt_var - pln.coef = pd_coef + if pln._NAME != "Pln": + pln.coef = pd_coef if pln._NAME == "PlnPCA": pln.components = pd.DataFrame(pln.components.numpy()) pln.fit() @@ -141,8 +144,9 @@ def test_fail_parameters_setter_with_torch(pln): d = 0 else: d = pln.exog.shape[-1] - with pytest.raises(ValueError): - pln.coef = torch.zeros(d + 1, dim) + if pln._NAME != "Pln": + with pytest.raises(ValueError): + pln.coef = torch.zeros(d + 1, dim) - with pytest.raises(ValueError): - pln.coef = torch.zeros(d, dim + 1) + with pytest.raises(ValueError): + pln.coef = torch.zeros(d, dim + 1) -- GitLab From 1292015cc1658829e6c60885c1c91580148a5bb6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 10 Oct 2023 22:55:12 +0200 Subject: [PATCH 082/167] add ZI in the __init__ --- pyPLNmodels/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index e785b288..6ed723c7 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -1,4 +1,4 @@ -from .models import PlnPCAcollection, Pln, PlnPCA # pylint:disable=[C0114] +from .models import PlnPCAcollection, Pln, PlnPCA, ZIPln # pylint:disable=[C0114] from .oaks import load_oaks from .elbos import profiled_elbo_pln, elbo_plnpca, elbo_pln from ._utils import ( -- GitLab From 54294d51512374728a1e502b6d3fb4820c17f24b Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 11 Oct 2023 09:03:48 +0200 Subject: [PATCH 083/167] tried to import the ZI. --- pyPLNmodels/models.py | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 64ddb104..7974a1e4 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3382,7 +3382,7 @@ class ZIPln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, - use_closed_form: bool = False, + use_closed_form_prob: bool = False, ): super().__init__( endog=endog, @@ -3393,7 +3393,32 @@ class ZIPln(_model): take_log_offsets=take_log_offsets, add_const=add_const, ) - self._use_closed_form = use_closed_form + self._use_closed_form_prob = use_closed_form_prob + + def _extract_batch(self, batch): + super()._extract_batch(batch) + if self._use_closed_form_prob is False: + self._latent_prob_b = batch[5] + + def _return_batch(self, indices, beginning, end): + pln_batch = super()._return_batch(indices, beginning, end) + if self._use_closed_form_prob is False: + return pln_batch + torch.index_select(self._latent_prob, 0, to_take) + return pln_batch + + def _return_batch(self, indices, beginning, end): + to_take = torch.tensor(indices[beginning:end]).to(DEVICE) + if self._exog is not None: + exog_b = torch.index_select(self._exog, 0, to_take) + else: + exog_b = None + return ( + torch.index_select(self._endog, 0, to_take), + exog_b, + torch.index_select(self._offsets, 0, to_take), + torch.index_select(self._latent_mean, 0, to_take), + torch.index_select(self._latent_sqrt_var, 0, to_take), + ) @classmethod @_add_doc( @@ -3439,7 +3464,7 @@ class ZIPln(_model): example=""" >>> from pyPLNmodels import ZIPln, get_real_count_data >>> endog = get_real_count_data() - >>> zi = Pln(endog,add_const = True) + >>> zi = ZIPln(endog,add_const = True) >>> zi.fit() >>> print(zi) """, @@ -3493,7 +3518,7 @@ class ZIPln(_model): # should change the good initialization for _coef_inflation def _smart_init_model_parameters(self): # init of _coef. - super()._smart_init_model_parameters() + super()._smart_init_coef() if not hasattr(self, "_covariance"): self._components = _init_components(self._endog, self._exog, self.dim) if not hasattr(self, "_coef_inflation"): @@ -3619,7 +3644,7 @@ class ZIPln(_model): self._components, self._coef, ] - if self._use_closed_form: + if self._use_closed_form_prob: list_parameters.append(self._latent_prob) if self._exog is not None: list_parameters.append(self._coef) -- GitLab From f12ef25c56b9a6c527aadbb649ea61d407bd99c5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 12 Oct 2023 10:25:12 +0200 Subject: [PATCH 084/167] finally add the right criterion --- pyPLNmodels/_utils.py | 35 ++++++++++++++++++++-- pyPLNmodels/models.py | 67 ++++++++++++++++++------------------------- 2 files changed, 61 insertions(+), 41 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 7169b74d..582f859f 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -23,15 +23,46 @@ else: DEVICE = torch.device("cpu") -class _PlotArgs: +BETA = 0.03 + + +class _CriterionArgs: def __init__(self): """ Initialize the PlotArgs class. + + Parameters + ---------- + window : int + The size of the window for computing the criterion. """ self.running_times = [] - self.criterions = [] self._elbos_list = [] self.cumulative_elbo_list = [0] + self.new_derivative = 0 + self.normalized_elbo_list = [] + self.criterion_list = [1] + + def update_criterion(self, elbo, running_time): + self._elbos_list.append(elbo) + self.running_times.append(running_time) + self.cumulative_elbo_list.append(self.cumulative_elbo + elbo) + self.normalized_elbo_list.append(-elbo / self.cumulative_elbo_list[-1]) + if self.iteration_number > 1: + current_derivative = np.abs( + (self.normalized_elbo_list[-2] - self.normalized_elbo_list[-1]) + / (self.running_times[-2] - self.running_times[-1]) + ) + old_derivative = self.new_derivative + self.new_derivative = ( + self.new_derivative * (1 - BETA) + current_derivative * BETA + ) + current_hessian = np.abs( + (self.new_derivative - self.old_derivative) + / (self.running_times[-2] - self.running_times[-1]) + ) + self.criterion = self.criterion * (1 - BETA) + current_hessian * BETA + self.criterion_list.append(self.criterion) @property def iteration_number(self) -> int: diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 7974a1e4..41cb12a8 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -22,7 +22,7 @@ from ._closed_forms import ( ) from .elbos import elbo_plnpca, elbo_zi_pln, profiled_elbo_pln from ._utils import ( - _PlotArgs, + _CriterionArgs, _format_data, _nice_string_of_dict, _plot_ellipse, @@ -111,7 +111,7 @@ class _model(ABC): endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False - self._plotargs = _PlotArgs() + self._criterion_args = _CriterionArgs() if dict_initialization is not None: self._set_init_parameters(dict_initialization) @@ -270,7 +270,7 @@ class _model(ABC): int The number of iterations done. """ - return len(self._plotargs._elbos_list) * self.nb_batches + return len(self._criterion_args._elbos_list) * self.nb_batches @property def n_samples(self) -> int: @@ -385,7 +385,7 @@ class _model(ABC): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-3, + tol: float = 1e-8, do_smart_init: bool = True, verbose: bool = False, batch_size=None, @@ -400,7 +400,7 @@ class _model(ABC): lr : float, optional(keyword-only) The learning rate. Defaults to 0.01. tol : float, optional(keyword-only) - The tolerance for convergence. Defaults to 1e-3. + The tolerance for convergence. Defaults to 1e-8. do_smart_init : bool, optional(keyword-only) Whether to perform smart initialization. Defaults to True. verbose : bool, optional(keyword-only) @@ -414,14 +414,14 @@ class _model(ABC): self._batch_size = self._handle_batch_size(batch_size) if self._fitted is False: self._init_parameters(do_smart_init) - elif len(self._plotargs.running_times) > 0: - self._beginning_time -= self._plotargs.running_times[-1] + elif len(self._criterion_args.running_times) > 0: + self._beginning_time -= self._criterion_args.running_times[-1] self._put_parameters_to_device() self._handle_optimizer(lr) stop_condition = False while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() - criterion = self._compute_criterion_and_update_plotargs(loss, tol) + criterion = self._update_criterion_args(loss) if abs(criterion) < tol: stop_condition = True if verbose and self.nb_iteration_done % 50 == 1: @@ -711,14 +711,14 @@ class _model(ABC): if stop_condition is True: print( f"Tolerance {tol} reached " - f"in {self._plotargs.iteration_number} iterations" + f"in {self._criterion_args.iteration_number} iterations" ) else: print( "Maximum number of iterations reached : ", - self._plotargs.iteration_number, + self._criterion_args.iteration_number, "last criterion = ", - np.round(self._plotargs.criterions[-1], 8), + np.round(self._criterion_args.criterions[-1], 8), ) def _print_stats(self): @@ -726,11 +726,11 @@ class _model(ABC): Print the training statistics. """ print("-------UPDATE-------") - print("Iteration number: ", self._plotargs.iteration_number) - print("Criterion: ", np.round(self._plotargs.criterions[-1], 8)) - print("ELBO:", np.round(self._plotargs._elbos_list[-1], 6)) + print("Iteration number: ", self._criterion_args.iteration_number) + print("Criterion: ", np.round(self._criterion_args.criterions[-1], 8)) + print("ELBO:", np.round(self._criterion_args._elbos_list[-1], 6)) - def _compute_criterion_and_update_plotargs(self, loss, tol): + def _update_criterion_args(self, loss): """ Compute the convergence criterion and update the plot arguments. @@ -738,26 +738,15 @@ class _model(ABC): ---------- loss : torch.Tensor The loss value. - tol : float - The tolerance for convergence. Returns ------- float The computed criterion. """ - self._plotargs._elbos_list.append(-loss) - self._plotargs.running_times.append(time.time() - self._beginning_time) - elbo = -loss - self._plotargs.cumulative_elbo_list.append( - self._plotargs.cumulative_elbo + elbo - ) - criterion = ( - self._plotargs.cumulative_elbo_list[-2] - - self._plotargs.cumulative_elbo_list[-1] - ) / self._plotargs.cumulative_elbo_list[-1] - self._plotargs.criterions.append(criterion) - return criterion + current_running_time = time.time() - self._beginning_time + self._criterion_args.update_criterion(-loss, current_running_time) + return self._criterion_args.criterion def _update_closed_forms(self): """ @@ -858,8 +847,8 @@ class _model(ABC): if axes is None: _, axes = plt.subplots(1, nb_axes, figsize=(23, 5)) if self._fitted is True: - self._plotargs._show_loss(ax=axes[2]) - self._plotargs._show_stopping_criterion(ax=axes[1]) + self._criterion_args._show_loss(ax=axes[2]) + self._criterion_args._show_stopping_criterion(ax=axes[1]) self.display_covariance(ax=axes[0]) else: self.display_covariance(ax=axes) @@ -870,7 +859,7 @@ class _model(ABC): """ Property representing the list of ELBO values. """ - return self._plotargs._elbos_list + return self._criterion_args._elbos_list @property def loglike(self): @@ -884,8 +873,8 @@ class _model(ABC): """ if len(self._elbos_list) == 0: t0 = time.time() - self._plotargs._elbos_list.append(self.compute_elbo().item()) - self._plotargs.running_times.append(time.time() - t0) + self._criterion_args._elbos_list.append(self.compute_elbo().item()) + self._criterion_args.running_times.append(time.time() - t0) return self.n_samples * self._elbos_list[-1] @property @@ -1512,7 +1501,7 @@ class Pln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-3, + tol: float = 1e-8, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, @@ -2267,7 +2256,7 @@ class PlnPCAcollection: nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-3, + tol: float = 1e-8, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, @@ -2282,7 +2271,7 @@ class PlnPCAcollection: lr : float, optional(keyword-only) The learning rate, by default 0.01. tol : float, optional(keyword-only) - The tolerance, by default 1e-3. + The tolerance, by default 1e-8. do_smart_init : bool, optional(keyword-only) Whether to do smart initialization, by default True. verbose : bool, optional(keyword-only) @@ -2795,7 +2784,7 @@ class PlnPCA(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-3, + tol: float = 1e-8, do_smart_init: bool = True, verbose: bool = False, batch_size=None, @@ -3474,7 +3463,7 @@ class ZIPln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-3, + tol: float = 1e-8, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, -- GitLab From 583f638d20e6da4453f43ed4c17deaa417189e31 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 12 Oct 2023 10:36:39 +0200 Subject: [PATCH 085/167] minor changes. --- pyPLNmodels/_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 582f859f..053a6448 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -42,6 +42,7 @@ class _CriterionArgs: self.new_derivative = 0 self.normalized_elbo_list = [] self.criterion_list = [1] + self.criterion = 1 def update_criterion(self, elbo, running_time): self._elbos_list.append(elbo) @@ -58,7 +59,7 @@ class _CriterionArgs: self.new_derivative * (1 - BETA) + current_derivative * BETA ) current_hessian = np.abs( - (self.new_derivative - self.old_derivative) + (self.new_derivative - old_derivative) / (self.running_times[-2] - self.running_times[-1]) ) self.criterion = self.criterion * (1 - BETA) + current_hessian * BETA -- GitLab From 12842130989f4d49a681334cda57040d167fc05a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 12 Oct 2023 10:42:16 +0200 Subject: [PATCH 086/167] error --- pyPLNmodels/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 41cb12a8..04ed41de 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -718,7 +718,7 @@ class _model(ABC): "Maximum number of iterations reached : ", self._criterion_args.iteration_number, "last criterion = ", - np.round(self._criterion_args.criterions[-1], 8), + np.round(self._criterion_args.criterion_list[-1], 8), ) def _print_stats(self): @@ -727,7 +727,7 @@ class _model(ABC): """ print("-------UPDATE-------") print("Iteration number: ", self._criterion_args.iteration_number) - print("Criterion: ", np.round(self._criterion_args.criterions[-1], 8)) + print("Criterion: ", np.round(self._criterion_args.criterion_list[-1], 8)) print("ELBO:", np.round(self._criterion_args._elbos_list[-1], 6)) def _update_criterion_args(self, loss): -- GitLab From 004b8f678d7dc90b2f63ebfe74b266528cb8d17f Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 09:31:54 +0200 Subject: [PATCH 087/167] add needed abstract methods to implement a new model. --- pyPLNmodels/models.py | 67 ++++++++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 29 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 04ed41de..e8970fec 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -370,6 +370,7 @@ class _model(ABC): parameter.requires_grad_(True) @property + @abstractmethod def _list_of_parameters_needing_gradient(self): """ A list containing all the parameters that need to be upgraded via a gradient step. @@ -380,6 +381,41 @@ class _model(ABC): List of parameters needing gradient. """ + def _print_beginning_message(self) -> str: + """ + Method for printing the beginning message. + """ + print(f"Fitting a {self._NAME} model with {self._description} \n") + + @abstractmethod + def _endog_predictions(self): + pass + + @abstractmethod + def number_of_parameters(self): + pass + + @abstractmethod + def _compute_elbo_b(self): + pass + + @property + @abstractmethod + def covariance(self): + pass + + @covariance.setter + @abstractmethod + def covariance(self, covariance): + pass + + @property + @abstractmethod + def _description(self): + """ + Describes the model and what it does. + """ + def fit( self, nb_max_iteration: int = 50000, @@ -1793,12 +1829,6 @@ class Pln(_model): covariances = components_var @ (sk_components.T.unsqueeze(0)) return covariances - def _print_beginning_message(self): - """ - Method for printing the beginning message. - """ - print(f"Fitting a Pln model with {self._description}") - @property @_add_doc( _model, @@ -2216,17 +2246,6 @@ class PlnPCAcollection: """ return [model.rank for model in self.values()] - def _print_beginning_message(self) -> str: - """ - Method for printing the beginning message. - - Returns - ------- - str - The beginning message. - """ - return f"Adjusting {len(self.ranks)} Pln models for PCA analysis \n" - @property def dim(self) -> int: """ @@ -3045,13 +3064,6 @@ class PlnPCA(_model): """ return self._rank - def _print_beginning_message(self): - """ - Print the beginning message when fitted. - """ - print("-" * NB_CHARACTERS_FOR_NICE_PLOT) - print(f"Fitting a PlnPCA model with {self._rank} components") - @property def model_parameters(self) -> Dict[str, torch.Tensor]: """ @@ -3235,7 +3247,7 @@ class PlnPCA(_model): @property def _description(self) -> str: """ - Property representing the description. + Description output when fitting and printing the model. Returns ------- @@ -3496,7 +3508,7 @@ class ZIPln(_model): @property def _description(self): - return "with full covariance model and zero-inflation." + return " full covariance model and zero-inflation." def _random_init_model_parameters(self): super()._random_init_model_parameters() @@ -3513,9 +3525,6 @@ class ZIPln(_model): if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) - def _print_beginning_message(self): - print("Fitting a ZIPln model.") - def _random_init_latent_parameters(self): self._dirac = self._endog == 0 self._latent_mean = torch.randn(self.n_samples, self.dim) -- GitLab From 7f02282d1fa552d20ff0086f53b3d51814dfa779 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 12:42:27 +0200 Subject: [PATCH 088/167] rrange the abstract methods. --- pyPLNmodels/models.py | 543 +++++++++++++++++++++--------------------- 1 file changed, 271 insertions(+), 272 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 41cb12a8..d89c9025 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -324,19 +324,7 @@ class _model(ABC): self._coef = None self._coef = torch.randn((self.nb_cov, self.dim), device=DEVICE) - @abstractmethod - def _random_init_model_parameters(self): - """ - Abstract method to randomly initialize model parameters. - """ - pass - @abstractmethod - def _random_init_latent_parameters(self): - """ - Abstract method to randomly initialize latent parameters. - """ - pass def _smart_init_latent_parameters(self): """ @@ -369,16 +357,6 @@ class _model(ABC): for parameter in self._list_of_parameters_needing_gradient: parameter.requires_grad_(True) - @property - def _list_of_parameters_needing_gradient(self): - """ - A list containing all the parameters that need to be upgraded via a gradient step. - - Returns - ------- - List[torch.Tensor] - List of parameters needing gradient. - """ def fit( self, @@ -579,7 +557,7 @@ class _model(ABC): return pca @property - def latent_var(self) -> torch.Tensor: + def latent_variance(self) -> torch.Tensor: """ Property representing the latent variance. @@ -689,13 +667,18 @@ class _model(ABC): ) plt.show() + @property - @abstractmethod - def latent_variables(self): + def _latent_var(self) -> torch.Tensor: """ - Abstract property representing the latent variables. + Property representing the latent variance. + + Returns + ------- + torch.Tensor + The latent variance tensor. """ - pass + return self._latent_sqrt_var**2 def _print_end_of_fitting_message(self, stop_condition: bool, tol: float): """ @@ -754,13 +737,6 @@ class _model(ABC): """ pass - @abstractmethod - def compute_elbo(self): - """ - Compute the Evidence Lower BOund (ELBO) that will be maximized - by pytorch. - """ - pass def display_covariance(self, ax=None, savefig=False, name_file=""): """ @@ -1385,8 +1361,74 @@ class _model(ABC): ax.legend() return ax + @property + @abstractmethod + def latent_variables(self) -> torch.Tensor: + """ + Property representing the latent variables. + + Returns + ------- + torch.Tensor + The latent variables of size (n_samples, dim). + """ + + @abstractmethod + def compute_elbo(self): + """ + Compute the Evidence Lower BOund (ELBO) that will be maximized + by pytorch. + + Returns + ------- + torch.Tensor + The computed ELBO. + """ + + @abstractmethod + def _compute_elbo_b(self): + """ + Compute the Evidence Lower BOund (ELBO) for the current mini-batch. + Returns + ------- + torch.Tensor + The computed ELBO on the current batch. + """ + + @abstractmethod + def _random_init_model_parameters(self): + """ + Abstract method to randomly initialize model parameters. + """ + + @abstractmethod + def _random_init_latent_parameters(self): + """ + Abstract method to randomly initialize latent parameters. + """ + @abstractmethod + def _smart_init_latent_parameters(self): + """ + Method for smartly initializing the latent parameters. + """ + @abstractmethod + def _smart_init_model_parameters(self): + """ + Method for smartly initializing the model parameters. + """ + + @property + @abstractmethod + def _list_of_parameters_needing_gradient(self): + """ + A list containing all the parameters that need to be upgraded via a gradient step. + + Returns + ------- + List[torch.Tensor] + List of parameters needing gradient. + """ -# need to do a good init for M and S class Pln(_model): """ Pln class. @@ -1661,34 +1703,6 @@ class Pln(_model): self._offsets + self._latent_mean + 1 / 2 * self._latent_sqrt_var**2 ) - def _smart_init_latent_parameters(self): - """ - Method for smartly initializing the latent parameters. - """ - self._random_init_latent_parameters() - - def _random_init_latent_parameters(self): - """ - Method for randomly initializing the latent parameters. - """ - if not hasattr(self, "_latent_sqrt_var"): - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE) - ) - if not hasattr(self, "_latent_mean"): - self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) - - @property - def _list_of_parameters_needing_gradient(self): - """ - Property representing the list of parameters needing gradient. - - Returns - ------- - list - The list of parameters needing gradient. - """ - return [self._latent_mean, self._latent_sqrt_var] def _get_max_components(self): """ @@ -1701,60 +1715,6 @@ class Pln(_model): """ return self.dim - def compute_elbo(self): - """ - Method for computing the evidence lower bound (ELBO). - - Returns - ------- - torch.Tensor - The computed ELBO. - Examples - -------- - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> elbo = pln.compute_elbo() - >>> print("elbo", elbo) - >>> print("loglike/n", pln.loglike/pln.n_samples) - """ - return profiled_elbo_pln( - self._endog, - self._exog, - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - ) - - def _compute_elbo_b(self): - """ - Method for computing the evidence lower bound (ELBO) on the current batch. - - Returns - ------- - torch.Tensor - The computed ELBO on the current batch. - """ - return profiled_elbo_pln( - self._endog_b, - self._exog_b, - self._offsets_b, - self._latent_mean_b, - self._latent_sqrt_var_b, - ) - - def _smart_init_model_parameters(self): - """ - Method for smartly initializing the model parameters. - """ - # no model parameters since we are doing a profiled ELBO - - def _random_init_model_parameters(self): - """ - Method for randomly initializing the model parameters. - """ - # no model parameters since we are doing a profiled ELBO @property def _coef(self): @@ -1799,19 +1759,6 @@ class Pln(_model): """ print(f"Fitting a Pln model with {self._description}") - @property - @_add_doc( - _model, - example=""" - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> print(pln.latent_variables.shape) - """, - ) - def latent_variables(self): - return self.latent_mean.detach() @property def number_of_parameters(self): @@ -1861,6 +1808,80 @@ class Pln(_model): """ raise AttributeError("You can not set the covariance for the Pln model.") + def _random_init_latent_sqrt_var(self): + if not hasattr(self, "_latent_sqrt_var"): + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE) + ) + + @property + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) + >>> pln.fit() + >>> print(pln.latent_variables.shape) + """, + ) + def latent_variables(self): + return self.latent_mean.detach() + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) + >>> pln.fit() + >>> elbo = pln.compute_elbo() + >>> print("elbo", elbo) + >>> print("loglike/n", pln.loglike/pln.n_samples) + """ + ) + def compute_elbo(self): + return profiled_elbo_pln( + self._endog, + self._exog, + self._offsets, + self._latent_mean, + self._latent_sqrt_var, + ) + @_add_doc(_model) + def _compute_elbo_b(self): + return profiled_elbo_pln( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + ) + @_add_doc(_model) + def _smart_init_model_parameters(self): + pass + # no model parameters since we are doing a profiled ELBO + + @_add_doc(_model) + def _random_init_model_parameters(self): + pass + # no model parameters since we are doing a profiled ELBO + @_add_doc(_model) + def _smart_init_latent_parameters(self): + self._random_init_latent_sqrt_var() + if not hasattr(self, "_latent_mean"): + self._latent_mean = torch.log(self._endog + (self._endog == 0)) + + @_add_doc(_model) + def _random_init_latent_parameters(self): + self._random_init_latent_sqrt_var() + if not hasattr(self, "_latent_mean"): + self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) + + @_add_doc(_model) + @property + def _list_of_parameters_needing_gradient(self): + return [self._latent_mean, self._latent_sqrt_var] class PlnPCAcollection: """ @@ -2655,7 +2676,7 @@ class PlnPCAcollection: return ".BIC, .AIC, .loglikes" -# Here, setting the value for each key in _dict_parameters +# Here, setting the value for each key _dict_parameters class PlnPCA(_model): """ PlnPCA object where the covariance has low rank. @@ -2881,19 +2902,6 @@ class PlnPCA(_model): variables_names=variables_names, indices_of_variables=indices_of_variables ) - def _check_if_rank_is_too_high(self): - """ - Check if the rank is too high and issue a warning if necessary. - """ - if self.dim < self.rank: - warning_string = ( - f"\nThe requested rank of approximation {self.rank} " - f"is greater than the number of variables {self.dim}. " - f"Setting rank to {self.dim}" - ) - warnings.warn(warning_string) - self._rank = self.dim - @property @_add_doc( _model, @@ -2909,29 +2917,7 @@ class PlnPCA(_model): def latent_mean(self) -> torch.Tensor: return self._cpu_attribute_or_none("_latent_mean") - @property - def latent_sqrt_var(self) -> torch.Tensor: - """ - Property representing the unsigned square root of the latent variance. - - Returns - ------- - torch.Tensor - The latent variance tensor. - """ - return self._cpu_attribute_or_none("_latent_sqrt_var") - - @property - def _latent_var(self) -> torch.Tensor: - """ - Property representing the latent variance. - Returns - ------- - torch.Tensor - The latent variance tensor. - """ - return self._latent_sqrt_var**2 def _endog_predictions(self): covariance_a_posteriori = torch.sum( @@ -3064,103 +3050,9 @@ class PlnPCA(_model): """ return {"coef": self.coef, "components": self.components} - def _smart_init_model_parameters(self): - """ - Initialize the model parameters smartly. - """ - if not hasattr(self, "_coef"): - super()._smart_init_coef() - if not hasattr(self, "_components"): - self._components = _init_components(self._endog, self._exog, self._rank) - def _random_init_model_parameters(self): - """ - Randomly initialize the model parameters. - """ - super()._random_init_coef() - self._components = torch.randn((self.dim, self._rank)).to(DEVICE) - def _random_init_latent_parameters(self): - """ - Randomly initialize the latent parameters. - """ - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) - ) - self._latent_mean = torch.ones((self.n_samples, self._rank)).to(DEVICE) - def _smart_init_latent_parameters(self): - """ - Initialize the latent parameters smartly. - """ - if not hasattr(self, "_latent_mean"): - self._latent_mean = ( - _init_latent_mean( - self._endog, - self._exog, - self._offsets, - self._coef, - self._components, - ) - .to(DEVICE) - .detach() - ) - if not hasattr(self, "_latent_sqrt_var"): - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) - ) - - @property - def _list_of_parameters_needing_gradient(self): - """ - Property representing the list of parameters needing gradient. - - Returns - ------- - List[torch.Tensor] - The list of parameters needing gradient. - """ - if self._coef is None: - return [self._components, self._latent_mean, self._latent_sqrt_var] - return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] - - def _compute_elbo_b(self) -> torch.Tensor: - """ - Compute the evidence lower bound (ELBO) with the current batch. - - Returns - ------- - torch.Tensor - The ELBO value on the current batch. - """ - return elbo_plnpca( - self._endog_b, - self._exog_b, - self._offsets_b, - self._latent_mean_b, - self._latent_sqrt_var_b, - self._components, - self._coef, - ) - - def compute_elbo(self) -> torch.Tensor: - """ - Compute the evidence lower bound (ELBO). - - Returns - ------- - torch.Tensor - The ELBO value. - """ - return elbo_plnpca( - self._endog, - self._exog, - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - self._components, - self._coef, - ) @property def number_of_parameters(self) -> int: @@ -3244,17 +3136,6 @@ class PlnPCA(_model): """ return f" {self.rank} principal component." - @property - def latent_variables(self) -> torch.Tensor: - """ - Property representing the latent variables. - - Returns - ------- - torch.Tensor - The latent variables of size (n_samples, dim). - """ - return torch.matmul(self._latent_mean, self._components.T).detach() @property def projected_latent_variables(self) -> torch.Tensor: @@ -3337,6 +3218,100 @@ class PlnPCA(_model): return self.projected_latent_variables return self.latent_variables + @property + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> endog = get_real_count_data(return_labels=False) + >>> pca = PlnPCA(endog,add_const = True) + >>> pca.fit() + >>> print(pca.latent_variables.shape) + """, + ) + def latent_variables(self) -> torch.Tensor: + return torch.matmul(self._latent_mean, self._components.T).detach() + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> endog = get_real_count_data(return_labels = False) + >>> pca = PlnPCA(endog,add_const = True) + >>> pca.fit() + >>> elbo = pca.compute_elbo() + >>> print("elbo", elbo) + >>> print("loglike/n", pln.loglike/pln.n_samples) + """ + ) + def compute_elbo(self) -> torch.Tensor: + return elbo_plnpca( + self._endog, + self._exog, + self._offsets, + self._latent_mean, + self._latent_sqrt_var, + self._components, + self._coef, + ) + @_add_doc(_model) + def _compute_elbo_b(self) -> torch.Tensor: + return elbo_plnpca( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + self._components, + self._coef, + ) + @_add_doc(_model) + def _random_init_model_parameters(self): + super()._random_init_coef() + self._components = torch.randn((self.dim, self._rank)).to(DEVICE) + + @_add_doc(_model) + def _smart_init_model_parameters(self): + if not hasattr(self, "_coef"): + super()._smart_init_coef() + if not hasattr(self, "_components"): + self._components = _init_components(self._endog, self._exog, self._rank) + + @_add_doc(_model) + def _random_init_latent_parameters(self): + """ + Randomly initialize the latent parameters. + """ + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) + ) + self._latent_mean = torch.ones((self.n_samples, self._rank)).to(DEVICE) + + @_add_doc(_model) + def _smart_init_latent_parameters(self): + if not hasattr(self, "_latent_mean"): + self._latent_mean = ( + _init_latent_mean( + self._endog, + self._exog, + self._offsets, + self._coef, + self._components, + ) + .to(DEVICE) + .detach() + ) + if not hasattr(self, "_latent_sqrt_var"): + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) + ) + + @_add_doc(_model) + @property + def _list_of_parameters_needing_gradient(self): + if self._coef is None: + return [self._components, self._latent_mean, self._latent_sqrt_var] + return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] class ZIPln(_model): _NAME = "ZIPln" @@ -3347,6 +3322,10 @@ class ZIPln(_model): @_add_doc( _model, + params= """ + use_closed_form_prob: bool, optional + Whether or not use the closed formula for the latent probability + """ example=""" >>> from pyPLNmodels import ZIPln, get_real_count_data >>> endog= get_real_count_data() @@ -3532,7 +3511,26 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) - def latent_variables(self): + def latent_variables(self) -> tuple(torch.Tensor, torch.Tensor): + """ + Property representing the latent variables. Two latent + variables are available if exog is not None + + Returns + ------- + tuple(torch.Tensor, torch.Tensor) + The latent variables of a classic Pln model (size (n_samples, dim)) + and zero inflated latent variables of size (n_samples, dim). + Examples + -------- + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> zi = ZIPln(endog,add_const = True) + >>> zi.fit() + >>> latent_mean, latent_inflated = zi.latent_variables + >>> print(latent_mean.shape) + >>> print(latent_inflated.shape) + """ return self.latent_mean, self.latent_prob def _update_parameters(self): @@ -3624,6 +3622,7 @@ class ZIPln(_model): def number_of_parameters(self): return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) + @_add_doc(_model) @property def _list_of_parameters_needing_gradient(self): list_parameters = [ -- GitLab From cb994071627c65cd85c4ab0af189b9859e14662f Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 12:44:34 +0200 Subject: [PATCH 089/167] beginning --- pyPLNmodels/new_model.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 pyPLNmodels/new_model.py diff --git a/pyPLNmodels/new_model.py b/pyPLNmodels/new_model.py new file mode 100644 index 00000000..2d4acd45 --- /dev/null +++ b/pyPLNmodels/new_model.py @@ -0,0 +1,9 @@ +from pyPLNmodels import ZIPln, get_real_count_data + + +endog = get_real_count_data() +zi = ZIPln(endog, add_const = True) +zi.fit(nb_max_iteration = 10) +zi.show() + + -- GitLab From d6e31bdf76c42a56b2e85b2021858752d03b5f5e Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 13:21:08 +0200 Subject: [PATCH 090/167] fixed compilation erros. --- pyPLNmodels/models.py | 61 ++++++++++++++++++++++--------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index d89c9025..905b465b 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -1002,30 +1002,6 @@ class _model(ABC): ) self._latent_mean = latent_mean - @latent_sqrt_var.setter - @_array2tensor - def latent_sqrt_var( - self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] - ): - """ - Setter for the latent variance property. - - Parameters - ---------- - latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The latent variance. - - Raises - ------ - ValueError - If the shape of the latent variance is incorrect. - """ - if latent_sqrt_var.shape != (self.n_samples, self.dim): - raise ValueError( - f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_sqrt_var.shape}" - ) - self._latent_sqrt_var = latent_sqrt_var - def _cpu_attribute_or_none(self, attribute_name): """ Get the CPU attribute or return None. @@ -1760,6 +1736,31 @@ class Pln(_model): print(f"Fitting a Pln model with {self._description}") + + @_model.latent_sqrt_var.setter + @_array2tensor + def latent_sqrt_var( + self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): + """ + Setter for the latent variance property. + + Parameters + ---------- + latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The latent variance. + + Raises + ------ + ValueError + If the shape of the latent variance is incorrect. + """ + if latent_sqrt_var.shape != (self.n_samples, self.dim): + raise ValueError( + f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_sqrt_var.shape}" + ) + self._latent_sqrt_var = latent_sqrt_var + @property def number_of_parameters(self): """ @@ -1878,8 +1879,8 @@ class Pln(_model): if not hasattr(self, "_latent_mean"): self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) - @_add_doc(_model) @property + @_add_doc(_model) def _list_of_parameters_needing_gradient(self): return [self._latent_mean, self._latent_sqrt_var] @@ -2950,7 +2951,7 @@ class PlnPCA(_model): ) self._latent_mean = latent_mean - @latent_sqrt_var.setter + @_model.latent_sqrt_var.setter @_array2tensor def latent_sqrt_var(self, latent_sqrt_var: torch.Tensor): """ @@ -3306,8 +3307,8 @@ class PlnPCA(_model): 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) ) - @_add_doc(_model) @property + @_add_doc(_model) def _list_of_parameters_needing_gradient(self): if self._coef is None: return [self._components, self._latent_mean, self._latent_sqrt_var] @@ -3325,7 +3326,7 @@ class ZIPln(_model): params= """ use_closed_form_prob: bool, optional Whether or not use the closed formula for the latent probability - """ + """, example=""" >>> from pyPLNmodels import ZIPln, get_real_count_data >>> endog= get_real_count_data() @@ -3511,7 +3512,7 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) - def latent_variables(self) -> tuple(torch.Tensor, torch.Tensor): + def latent_variables(self) -> tuple([torch.Tensor, torch.Tensor]): """ Property representing the latent variables. Two latent variables are available if exog is not None @@ -3622,8 +3623,8 @@ class ZIPln(_model): def number_of_parameters(self): return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) - @_add_doc(_model) @property + @_add_doc(_model) def _list_of_parameters_needing_gradient(self): list_parameters = [ self._latent_mean, -- GitLab From ce5cb6b0e6740d41c92acb213f5f4d7173c078ba Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 13:23:15 +0200 Subject: [PATCH 091/167] multiple same parameters in list_of_parameters of the zi --- pyPLNmodels/models.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 905b465b..47231871 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3629,9 +3629,7 @@ class ZIPln(_model): list_parameters = [ self._latent_mean, self._latent_sqrt_var, - self._coef_inflation, self._components, - self._coef, ] if self._use_closed_form_prob: list_parameters.append(self._latent_prob) -- GitLab From 4636aae36ece272d908bf5116c25ffe1ba76797f Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 13:26:54 +0200 Subject: [PATCH 092/167] error when returning batches of zi --- pyPLNmodels/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 47231871..f5d356ae 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3372,7 +3372,7 @@ class ZIPln(_model): def _return_batch(self, indices, beginning, end): pln_batch = super()._return_batch(indices, beginning, end) if self._use_closed_form_prob is False: - return pln_batch + torch.index_select(self._latent_prob, 0, to_take) + return (pln_batch + torch.index_select(self._latent_prob, 0, to_take)) return pln_batch def _return_batch(self, indices, beginning, end): -- GitLab From e9b8a2de5f6771ef1908c03758abfc2c19165db7 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 13:42:49 +0200 Subject: [PATCH 093/167] zi can be fitted now. --- pyPLNmodels/_utils.py | 2 +- pyPLNmodels/models.py | 38 ++++++++++++++------------------------ 2 files changed, 15 insertions(+), 25 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 053a6448..f5f02942 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -112,7 +112,7 @@ class _CriterionArgs: ax = plt.gca() if ax is None else ax ax.plot( self.running_times, - self.criterions, + self.criterion_list, label="Delta", ) ax.set_yscale("log") diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index f5d356ae..a5ece2aa 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -701,7 +701,7 @@ class _model(ABC): "Maximum number of iterations reached : ", self._criterion_args.iteration_number, "last criterion = ", - np.round(self._criterion_args.criterions[-1], 8), + np.round(self._criterion_args.criterion_list[-1], 8), ) def _print_stats(self): @@ -710,7 +710,7 @@ class _model(ABC): """ print("-------UPDATE-------") print("Iteration number: ", self._criterion_args.iteration_number) - print("Criterion: ", np.round(self._criterion_args.criterions[-1], 8)) + print("Criterion: ", np.round(self._criterion_args.criterion_list[-1], 8)) print("ELBO:", np.round(self._criterion_args._elbos_list[-1], 6)) def _update_criterion_args(self, loss): @@ -3366,28 +3366,18 @@ class ZIPln(_model): def _extract_batch(self, batch): super()._extract_batch(batch) + self._dirac_b = batch[5] if self._use_closed_form_prob is False: - self._latent_prob_b = batch[5] + self._latent_prob_b = batch[6] def _return_batch(self, indices, beginning, end): pln_batch = super()._return_batch(indices, beginning, end) + to_take = torch.tensor(indices[beginning:end]).to(DEVICE) + batch = pln_batch + (torch.index_select(self._dirac, 0, to_take),) if self._use_closed_form_prob is False: - return (pln_batch + torch.index_select(self._latent_prob, 0, to_take)) - return pln_batch + return batch + (torch.index_select(self._latent_prob, 0, to_take),) + return batch - def _return_batch(self, indices, beginning, end): - to_take = torch.tensor(indices[beginning:end]).to(DEVICE) - if self._exog is not None: - exog_b = torch.index_select(self._exog, 0, to_take) - else: - exog_b = None - return ( - torch.index_select(self._endog, 0, to_take), - exog_b, - torch.index_select(self._offsets, 0, to_take), - torch.index_select(self._latent_mean, 0, to_take), - torch.index_select(self._latent_sqrt_var, 0, to_take), - ) @classmethod @_add_doc( @@ -3542,15 +3532,15 @@ class ZIPln(_model): """ Project the latent probability since it must be between 0 and 1. """ - if self.use_closed_form_prob is False: + if self._use_closed_form_prob is False: with torch.no_grad(): - self._latent_prob = torch.maximum( - self._latent_prob, torch.tensor([0]), out=self._latent_prob + self._latent_prob_b = torch.maximum( + self._latent_prob_b, torch.tensor([0]), out=self._latent_prob_b ) - self._latent_prob = torch.minimum( - self._latent_prob, torch.tensor([1]), out=self._latent_prob + self._latent_prob_b = torch.minimum( + self._latent_prob, torch.tensor([1]), out=self._latent_prob_b ) - self._latent_prob *= self._dirac + self._latent_prob_b *= self._dirac_b @property def covariance(self) -> torch.Tensor: -- GitLab From 4fe2b3d24e42536c09dc2177b882989aff31e336 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 18:24:18 +0200 Subject: [PATCH 094/167] add contributing, readme and model from new_model branch. --- CONTRIBUTING.md | 71 +++- README.md | 37 ++- pyPLNmodels/models.py | 734 ++++++++++++++++++++---------------------- 3 files changed, 438 insertions(+), 404 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index faf13f3b..e1fb39dc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,15 +1,72 @@ -# Clone the repo +# What to work on +A public roadmap will be available soon. + + +## Fork/clone/pull + +The typical workflow for contributing is: + +1. Fork the `main` branch from the [GitLab repository](https://forgemia.inra.fr/bbatardiere/pyplnmodels). +2. Clone your fork locally. +3. Run `pip install pre-commit` if pre-commit is not already installed. +4. Inside the repository, run 'pre-commit install'. +5. Commit changes. +6. Push the changes to your fork. +7. Send a pull request from your fork back to the original `main` branch. + +## How to implement a new model +You can implement a new model `newmodel` by inheriting from the abstract `_model` class in the `models` module. +The `newmodel` class should contains at least the following code: ``` -git clone git@forgemia.inra.fr:bbatardiere/pyplnmodels -``` +class newmodel(_model): + _NAME="" + def _random_init_latent_sqrt_var(self): + "Implement here" + + @property + def latent_variables(self): + "Implement here" -# Install precommit + def compute_elbo(self): + "Implement here" -In the directory: + def _compute_elbo_b(self): + "Implement here" + def _smart_init_model_parameters(self): + "Implement here" + + def _random_init_model_parameters(self): + "Implement here" + + def _smart_init_latent_parameters(self): + "Implement here" + + def _random_init_latent_parameters(self): + "Implement here" + + @property + def _list_of_parameters_needing_gradient(self): + "Implement here" + @property + def _description(self): + "Implement here" + + @property + def number_of_parameters(self): + "Implement here" ``` -pre-commit install +Then, add `newmodel` in the `__init__.py` file of the pyPLNmodels module. +If `newmodel` is well implemented, running ``` +from pyPLNmodels import newmodel, get_real_count_data -If not found use `pip install pre-commit` before this command. +endog = get_real_count_data() +zi = newmodel(endog, add_const = True) +zi.fit(nb_max_iteration = 10, tol = 0) +``` +should increase the elbo of the model. You should document your functions with +[numpy-style +docstrings](https://numpydoc.readthedocs.io/en/latest/format.html). You can use +the `_add_doc` decorator to inherit the docstrings of the `_model` class. diff --git a/README.md b/README.md index 9401cfe6..f8adaa3f 100644 --- a/README.md +++ b/README.md @@ -16,22 +16,10 @@ <!-- > slides](https://pln-team.github.io/slideshow/) for a --> <!-- > comprehensive introduction. --> -## Getting started -The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see next. +## Getting started +The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see the quickstart next. -## Installation - -**pyPLNmodels** is available on -[pypi](https://pypi.org/project/pyPLNmodels/). The development -version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). - -### Package installation - -``` -pip install pyPLNmodels -``` - -## Usage and main fitting functions +## âš¡ï¸ Quickstart The package comes with an ecological data set to present the functionality ``` @@ -61,7 +49,24 @@ transformed_data = pln.transform() ``` -## References +## 🛠Installation + +**pyPLNmodels** is available on +[pypi](https://pypi.org/project/pyPLNmodels/). The development +version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). + +### Package installation + +``` +pip install pyPLNmodels +``` + +## 👠Contributing + +Feel free to contribute, but read the [CONTRIBUTING.md](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/blob/main/CONTRIBUTING.md) first. A public roadmap will be available soon. + + +## âš¡ï¸ Citations Please cite our work using the following references: - J. Chiquet, M. Mariadassou and S. Robin: Variational inference for diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index e8970fec..e8b316a5 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -324,20 +324,6 @@ class _model(ABC): self._coef = None self._coef = torch.randn((self.nb_cov, self.dim), device=DEVICE) - @abstractmethod - def _random_init_model_parameters(self): - """ - Abstract method to randomly initialize model parameters. - """ - pass - - @abstractmethod - def _random_init_latent_parameters(self): - """ - Abstract method to randomly initialize latent parameters. - """ - pass - def _smart_init_latent_parameters(self): """ Initialize latent parameters smartly. @@ -369,53 +355,6 @@ class _model(ABC): for parameter in self._list_of_parameters_needing_gradient: parameter.requires_grad_(True) - @property - @abstractmethod - def _list_of_parameters_needing_gradient(self): - """ - A list containing all the parameters that need to be upgraded via a gradient step. - - Returns - ------- - List[torch.Tensor] - List of parameters needing gradient. - """ - - def _print_beginning_message(self) -> str: - """ - Method for printing the beginning message. - """ - print(f"Fitting a {self._NAME} model with {self._description} \n") - - @abstractmethod - def _endog_predictions(self): - pass - - @abstractmethod - def number_of_parameters(self): - pass - - @abstractmethod - def _compute_elbo_b(self): - pass - - @property - @abstractmethod - def covariance(self): - pass - - @covariance.setter - @abstractmethod - def covariance(self, covariance): - pass - - @property - @abstractmethod - def _description(self): - """ - Describes the model and what it does. - """ - def fit( self, nb_max_iteration: int = 50000, @@ -615,7 +554,7 @@ class _model(ABC): return pca @property - def latent_var(self) -> torch.Tensor: + def latent_variance(self) -> torch.Tensor: """ Property representing the latent variance. @@ -726,12 +665,16 @@ class _model(ABC): plt.show() @property - @abstractmethod - def latent_variables(self): + def _latent_var(self) -> torch.Tensor: """ - Abstract property representing the latent variables. + Property representing the latent variance. + + Returns + ------- + torch.Tensor + The latent variance tensor. """ - pass + return self._latent_sqrt_var**2 def _print_end_of_fitting_message(self, stop_condition: bool, tol: float): """ @@ -790,14 +733,6 @@ class _model(ABC): """ pass - @abstractmethod - def compute_elbo(self): - """ - Compute the Evidence Lower BOund (ELBO) that will be maximized - by pytorch. - """ - pass - def display_covariance(self, ax=None, savefig=False, name_file=""): """ Display the covariance matrix. @@ -1062,30 +997,6 @@ class _model(ABC): ) self._latent_mean = latent_mean - @latent_sqrt_var.setter - @_array2tensor - def latent_sqrt_var( - self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] - ): - """ - Setter for the latent variance property. - - Parameters - ---------- - latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] - The latent variance. - - Raises - ------ - ValueError - If the shape of the latent variance is incorrect. - """ - if latent_sqrt_var.shape != (self.n_samples, self.dim): - raise ValueError( - f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_sqrt_var.shape}" - ) - self._latent_sqrt_var = latent_sqrt_var - def _cpu_attribute_or_none(self, attribute_name): """ Get the CPU attribute or return None. @@ -1421,8 +1332,95 @@ class _model(ABC): ax.legend() return ax + def _print_beginning_message(self): + """ + Method for printing the beginning message. + """ + print(f"Fitting a {self._NAME} model with {self._description}") + + @property + @abstractmethod + def latent_variables(self) -> torch.Tensor: + """ + Property representing the latent variables. + + Returns + ------- + torch.Tensor + The latent variables of size (n_samples, dim). + """ + + @abstractmethod + def compute_elbo(self): + """ + Compute the Evidence Lower BOund (ELBO) that will be maximized + by pytorch. + + Returns + ------- + torch.Tensor + The computed ELBO. + """ + + @abstractmethod + def _compute_elbo_b(self): + """ + Compute the Evidence Lower BOund (ELBO) for the current mini-batch. + Returns + ------- + torch.Tensor + The computed ELBO on the current batch. + """ + + @abstractmethod + def _random_init_model_parameters(self): + """ + Abstract method to randomly initialize model parameters. + """ + + @abstractmethod + def _random_init_latent_parameters(self): + """ + Abstract method to randomly initialize latent parameters. + """ + + @abstractmethod + def _smart_init_latent_parameters(self): + """ + Method for smartly initializing the latent parameters. + """ + + @abstractmethod + def _smart_init_model_parameters(self): + """ + Method for smartly initializing the model parameters. + """ + + @property + @abstractmethod + def _list_of_parameters_needing_gradient(self): + """ + A list containing all the parameters that need to be upgraded via a gradient step. + + Returns + ------- + List[torch.Tensor] + List of parameters needing gradient. + """ + + @property + @abstractmethod + def _description(self): + pass + + @property + @abstractmethod + def number_of_parameters(self): + """ + Number of parameters of the model. + """ + -# need to do a good init for M and S class Pln(_model): """ Pln class. @@ -1511,15 +1509,13 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): - endog, exog, offsets = _extract_data_from_formula(formula, data) - return cls( - endog, - exog=exog, - offsets=offsets, + super().from_formula( + cls=cls, + formula=formula, + data=data, offsets_formula=offsets_formula, dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, - add_const=False, ) @_add_doc( @@ -1697,35 +1693,6 @@ class Pln(_model): self._offsets + self._latent_mean + 1 / 2 * self._latent_sqrt_var**2 ) - def _smart_init_latent_parameters(self): - """ - Method for smartly initializing the latent parameters. - """ - self._random_init_latent_parameters() - - def _random_init_latent_parameters(self): - """ - Method for randomly initializing the latent parameters. - """ - if not hasattr(self, "_latent_sqrt_var"): - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE) - ) - if not hasattr(self, "_latent_mean"): - self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) - - @property - def _list_of_parameters_needing_gradient(self): - """ - Property representing the list of parameters needing gradient. - - Returns - ------- - list - The list of parameters needing gradient. - """ - return [self._latent_mean, self._latent_sqrt_var] - def _get_max_components(self): """ Method for getting the maximum number of components. @@ -1737,61 +1704,6 @@ class Pln(_model): """ return self.dim - def compute_elbo(self): - """ - Method for computing the evidence lower bound (ELBO). - - Returns - ------- - torch.Tensor - The computed ELBO. - Examples - -------- - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> elbo = pln.compute_elbo() - >>> print("elbo", elbo) - >>> print("loglike/n", pln.loglike/pln.n_samples) - """ - return profiled_elbo_pln( - self._endog, - self._exog, - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - ) - - def _compute_elbo_b(self): - """ - Method for computing the evidence lower bound (ELBO) on the current batch. - - Returns - ------- - torch.Tensor - The computed ELBO on the current batch. - """ - return profiled_elbo_pln( - self._endog_b, - self._exog_b, - self._offsets_b, - self._latent_mean_b, - self._latent_sqrt_var_b, - ) - - def _smart_init_model_parameters(self): - """ - Method for smartly initializing the model parameters. - """ - # no model parameters since we are doing a profiled ELBO - - def _random_init_model_parameters(self): - """ - Method for randomly initializing the model parameters. - """ - # no model parameters since we are doing a profiled ELBO - @property def _coef(self): """ @@ -1829,19 +1741,29 @@ class Pln(_model): covariances = components_var @ (sk_components.T.unsqueeze(0)) return covariances - @property - @_add_doc( - _model, - example=""" - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> print(pln.latent_variables.shape) - """, - ) - def latent_variables(self): - return self.latent_mean.detach() + @_model.latent_sqrt_var.setter + @_array2tensor + def latent_sqrt_var( + self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): + """ + Setter for the latent variance property. + + Parameters + ---------- + latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The latent variance. + + Raises + ------ + ValueError + If the shape of the latent variance is incorrect. + """ + if latent_sqrt_var.shape != (self.n_samples, self.dim): + raise ValueError( + f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_sqrt_var.shape}" + ) + self._latent_sqrt_var = latent_sqrt_var @property def number_of_parameters(self): @@ -1891,6 +1813,84 @@ class Pln(_model): """ raise AttributeError("You can not set the covariance for the Pln model.") + def _random_init_latent_sqrt_var(self): + if not hasattr(self, "_latent_sqrt_var"): + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE) + ) + + @property + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) + >>> pln.fit() + >>> print(pln.latent_variables.shape) + """, + ) + def latent_variables(self): + return self.latent_mean.detach() + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import Pln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> pln = Pln(endog,add_const = True) + >>> pln.fit() + >>> elbo = pln.compute_elbo() + >>> print("elbo", elbo) + >>> print("loglike/n", pln.loglike/pln.n_samples) + """, + ) + def compute_elbo(self): + return profiled_elbo_pln( + self._endog, + self._exog, + self._offsets, + self._latent_mean, + self._latent_sqrt_var, + ) + + @_add_doc(_model) + def _compute_elbo_b(self): + return profiled_elbo_pln( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + ) + + @_add_doc(_model) + def _smart_init_model_parameters(self): + pass + # no model parameters since we are doing a profiled ELBO + + @_add_doc(_model) + def _random_init_model_parameters(self): + pass + # no model parameters since we are doing a profiled ELBO + + @_add_doc(_model) + def _smart_init_latent_parameters(self): + self._random_init_latent_sqrt_var() + if not hasattr(self, "_latent_mean"): + self._latent_mean = torch.log(self._endog + (self._endog == 0)) + + @_add_doc(_model) + def _random_init_latent_parameters(self): + self._random_init_latent_sqrt_var() + if not hasattr(self, "_latent_mean"): + self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) + + @property + @_add_doc(_model) + def _list_of_parameters_needing_gradient(self): + return [self._latent_mean, self._latent_sqrt_var] + class PlnPCAcollection: """ @@ -2246,6 +2246,17 @@ class PlnPCAcollection: """ return [model.rank for model in self.values()] + def _print_beginning_message(self) -> str: + """ + Method for printing the beginning message. + + Returns + ------- + str + The beginning message. + """ + return f"Adjusting {len(self.ranks)} Pln models for PCA analysis \n" + @property def dim(self) -> int: """ @@ -2674,7 +2685,7 @@ class PlnPCAcollection: return ".BIC, .AIC, .loglikes" -# Here, setting the value for each key in _dict_parameters +# Here, setting the value for each key _dict_parameters class PlnPCA(_model): """ PlnPCA object where the covariance has low rank. @@ -2900,19 +2911,6 @@ class PlnPCA(_model): variables_names=variables_names, indices_of_variables=indices_of_variables ) - def _check_if_rank_is_too_high(self): - """ - Check if the rank is too high and issue a warning if necessary. - """ - if self.dim < self.rank: - warning_string = ( - f"\nThe requested rank of approximation {self.rank} " - f"is greater than the number of variables {self.dim}. " - f"Setting rank to {self.dim}" - ) - warnings.warn(warning_string) - self._rank = self.dim - @property @_add_doc( _model, @@ -2928,30 +2926,6 @@ class PlnPCA(_model): def latent_mean(self) -> torch.Tensor: return self._cpu_attribute_or_none("_latent_mean") - @property - def latent_sqrt_var(self) -> torch.Tensor: - """ - Property representing the unsigned square root of the latent variance. - - Returns - ------- - torch.Tensor - The latent variance tensor. - """ - return self._cpu_attribute_or_none("_latent_sqrt_var") - - @property - def _latent_var(self) -> torch.Tensor: - """ - Property representing the latent variance. - - Returns - ------- - torch.Tensor - The latent variance tensor. - """ - return self._latent_sqrt_var**2 - def _endog_predictions(self): covariance_a_posteriori = torch.sum( (self._components**2).unsqueeze(0) @@ -2983,7 +2957,7 @@ class PlnPCA(_model): ) self._latent_mean = latent_mean - @latent_sqrt_var.setter + @_model.latent_sqrt_var.setter @_array2tensor def latent_sqrt_var(self, latent_sqrt_var: torch.Tensor): """ @@ -3076,104 +3050,6 @@ class PlnPCA(_model): """ return {"coef": self.coef, "components": self.components} - def _smart_init_model_parameters(self): - """ - Initialize the model parameters smartly. - """ - if not hasattr(self, "_coef"): - super()._smart_init_coef() - if not hasattr(self, "_components"): - self._components = _init_components(self._endog, self._exog, self._rank) - - def _random_init_model_parameters(self): - """ - Randomly initialize the model parameters. - """ - super()._random_init_coef() - self._components = torch.randn((self.dim, self._rank)).to(DEVICE) - - def _random_init_latent_parameters(self): - """ - Randomly initialize the latent parameters. - """ - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) - ) - self._latent_mean = torch.ones((self.n_samples, self._rank)).to(DEVICE) - - def _smart_init_latent_parameters(self): - """ - Initialize the latent parameters smartly. - """ - if not hasattr(self, "_latent_mean"): - self._latent_mean = ( - _init_latent_mean( - self._endog, - self._exog, - self._offsets, - self._coef, - self._components, - ) - .to(DEVICE) - .detach() - ) - if not hasattr(self, "_latent_sqrt_var"): - self._latent_sqrt_var = ( - 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) - ) - - @property - def _list_of_parameters_needing_gradient(self): - """ - Property representing the list of parameters needing gradient. - - Returns - ------- - List[torch.Tensor] - The list of parameters needing gradient. - """ - if self._coef is None: - return [self._components, self._latent_mean, self._latent_sqrt_var] - return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] - - def _compute_elbo_b(self) -> torch.Tensor: - """ - Compute the evidence lower bound (ELBO) with the current batch. - - Returns - ------- - torch.Tensor - The ELBO value on the current batch. - """ - return elbo_plnpca( - self._endog_b, - self._exog_b, - self._offsets_b, - self._latent_mean_b, - self._latent_sqrt_var_b, - self._components, - self._coef, - ) - - def compute_elbo(self) -> torch.Tensor: - """ - Compute the evidence lower bound (ELBO). - - Returns - ------- - torch.Tensor - The ELBO value. - """ - return elbo_plnpca( - self._endog, - self._exog, - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - self._components, - self._coef, - ) - @property def number_of_parameters(self) -> int: """ @@ -3247,7 +3123,7 @@ class PlnPCA(_model): @property def _description(self) -> str: """ - Description output when fitting and printing the model. + Property representing the description. Returns ------- @@ -3256,18 +3132,6 @@ class PlnPCA(_model): """ return f" {self.rank} principal component." - @property - def latent_variables(self) -> torch.Tensor: - """ - Property representing the latent variables. - - Returns - ------- - torch.Tensor - The latent variables of size (n_samples, dim). - """ - return torch.matmul(self._latent_mean, self._components.T).detach() - @property def projected_latent_variables(self) -> torch.Tensor: """ @@ -3349,6 +3213,103 @@ class PlnPCA(_model): return self.projected_latent_variables return self.latent_variables + @property + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> endog = get_real_count_data(return_labels=False) + >>> pca = PlnPCA(endog,add_const = True) + >>> pca.fit() + >>> print(pca.latent_variables.shape) + """, + ) + def latent_variables(self) -> torch.Tensor: + return torch.matmul(self._latent_mean, self._components.T).detach() + + @_add_doc( + _model, + example=""" + >>> from pyPLNmodels import PlnPCA, get_real_count_data + >>> endog = get_real_count_data(return_labels = False) + >>> pca = PlnPCA(endog,add_const = True) + >>> pca.fit() + >>> elbo = pca.compute_elbo() + >>> print("elbo", elbo) + >>> print("loglike/n", pln.loglike/pln.n_samples) + """, + ) + def compute_elbo(self) -> torch.Tensor: + return elbo_plnpca( + self._endog, + self._exog, + self._offsets, + self._latent_mean, + self._latent_sqrt_var, + self._components, + self._coef, + ) + + @_add_doc(_model) + def _compute_elbo_b(self) -> torch.Tensor: + return elbo_plnpca( + self._endog_b, + self._exog_b, + self._offsets_b, + self._latent_mean_b, + self._latent_sqrt_var_b, + self._components, + self._coef, + ) + + @_add_doc(_model) + def _random_init_model_parameters(self): + super()._random_init_coef() + self._components = torch.randn((self.dim, self._rank)).to(DEVICE) + + @_add_doc(_model) + def _smart_init_model_parameters(self): + if not hasattr(self, "_coef"): + super()._smart_init_coef() + if not hasattr(self, "_components"): + self._components = _init_components(self._endog, self._exog, self._rank) + + @_add_doc(_model) + def _random_init_latent_parameters(self): + """ + Randomly initialize the latent parameters. + """ + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) + ) + self._latent_mean = torch.ones((self.n_samples, self._rank)).to(DEVICE) + + @_add_doc(_model) + def _smart_init_latent_parameters(self): + if not hasattr(self, "_latent_mean"): + self._latent_mean = ( + _init_latent_mean( + self._endog, + self._exog, + self._offsets, + self._coef, + self._components, + ) + .to(DEVICE) + .detach() + ) + if not hasattr(self, "_latent_sqrt_var"): + self._latent_sqrt_var = ( + 1 / 2 * torch.ones((self.n_samples, self._rank)).to(DEVICE) + ) + + @property + @_add_doc(_model) + def _list_of_parameters_needing_gradient(self): + if self._coef is None: + return [self._components, self._latent_mean, self._latent_sqrt_var] + return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] + class ZIPln(_model): _NAME = "ZIPln" @@ -3359,6 +3320,10 @@ class ZIPln(_model): @_add_doc( _model, + params=""" + use_closed_form_prob: bool, optional + Whether or not use the closed formula for the latent probability + """, example=""" >>> from pyPLNmodels import ZIPln, get_real_count_data >>> endog= get_real_count_data() @@ -3398,28 +3363,17 @@ class ZIPln(_model): def _extract_batch(self, batch): super()._extract_batch(batch) + self._dirac_b = batch[5] if self._use_closed_form_prob is False: - self._latent_prob_b = batch[5] + self._latent_prob_b = batch[6] def _return_batch(self, indices, beginning, end): pln_batch = super()._return_batch(indices, beginning, end) - if self._use_closed_form_prob is False: - return pln_batch + torch.index_select(self._latent_prob, 0, to_take) - return pln_batch - - def _return_batch(self, indices, beginning, end): to_take = torch.tensor(indices[beginning:end]).to(DEVICE) - if self._exog is not None: - exog_b = torch.index_select(self._exog, 0, to_take) - else: - exog_b = None - return ( - torch.index_select(self._endog, 0, to_take), - exog_b, - torch.index_select(self._offsets, 0, to_take), - torch.index_select(self._latent_mean, 0, to_take), - torch.index_select(self._latent_sqrt_var, 0, to_take), - ) + batch = pln_batch + (torch.index_select(self._dirac, 0, to_take),) + if self._use_closed_form_prob is False: + return batch + (torch.index_select(self._latent_prob, 0, to_take),) + return batch @classmethod @_add_doc( @@ -3446,7 +3400,7 @@ class ZIPln(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, - use_closed_form: bool = True, + use_closed_form_prob: bool = True, ): endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( @@ -3457,7 +3411,7 @@ class ZIPln(_model): dict_initialization=dict_initialization, take_log_offsets=take_log_offsets, add_const=False, - use_closed_form=use_closed_form, + use_closed_form_prob=use_closed_form_prob, ) @_add_doc( @@ -3508,7 +3462,7 @@ class ZIPln(_model): @property def _description(self): - return " full covariance model and zero-inflation." + return "with full covariance model and zero-inflation." def _random_init_model_parameters(self): super()._random_init_model_parameters() @@ -3541,7 +3495,26 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) - def latent_variables(self): + def latent_variables(self) -> tuple([torch.Tensor, torch.Tensor]): + """ + Property representing the latent variables. Two latent + variables are available if exog is not None + + Returns + ------- + tuple(torch.Tensor, torch.Tensor) + The latent variables of a classic Pln model (size (n_samples, dim)) + and zero inflated latent variables of size (n_samples, dim). + Examples + -------- + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog, labels = get_real_count_data(return_labels = True) + >>> zi = ZIPln(endog,add_const = True) + >>> zi.fit() + >>> latent_mean, latent_inflated = zi.latent_variables + >>> print(latent_mean.shape) + >>> print(latent_inflated.shape) + """ return self.latent_mean, self.latent_prob def _update_parameters(self): @@ -3552,15 +3525,15 @@ class ZIPln(_model): """ Project the latent probability since it must be between 0 and 1. """ - if self.use_closed_form_prob is False: + if self._use_closed_form_prob is False: with torch.no_grad(): - self._latent_prob = torch.maximum( - self._latent_prob, torch.tensor([0]), out=self._latent_prob + self._latent_prob_b = torch.maximum( + self._latent_prob_b, torch.tensor([0]), out=self._latent_prob_b ) - self._latent_prob = torch.minimum( - self._latent_prob, torch.tensor([1]), out=self._latent_prob + self._latent_prob_b = torch.minimum( + self._latent_prob, torch.tensor([1]), out=self._latent_prob_b ) - self._latent_prob *= self._dirac + self._latent_prob_b *= self._dirac_b @property def covariance(self) -> torch.Tensor: @@ -3634,13 +3607,12 @@ class ZIPln(_model): return self.dim * (2 * self.nb_cov + (self.dim + 1) / 2) @property + @_add_doc(_model) def _list_of_parameters_needing_gradient(self): list_parameters = [ self._latent_mean, self._latent_sqrt_var, - self._coef_inflation, self._components, - self._coef, ] if self._use_closed_form_prob: list_parameters.append(self._latent_prob) -- GitLab From 757f76c88d034020d5da4fc700869964b4d8a4dc Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 13 Oct 2023 18:28:27 +0200 Subject: [PATCH 095/167] typo in the contributin --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e1fb39dc..530ced7b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -63,8 +63,8 @@ If `newmodel` is well implemented, running from pyPLNmodels import newmodel, get_real_count_data endog = get_real_count_data() -zi = newmodel(endog, add_const = True) -zi.fit(nb_max_iteration = 10, tol = 0) +model = newmodel(endog, add_const = True) +model.fit(nb_max_iteration = 10, tol = 0) ``` should increase the elbo of the model. You should document your functions with [numpy-style -- GitLab From 20417af7f3d00b9021fc45e8ac7ec0a98301eb20 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 16 Oct 2023 08:16:32 +0200 Subject: [PATCH 096/167] continue to add the contributing and create the tests for the ZI. --- CONTRIBUTING.md | 75 +++++++++-- pyPLNmodels/models.py | 140 +++++++++++--------- tests/conftest.py | 12 +- tests/create_readme_and_docstrings_tests.py | 2 +- 4 files changed, 153 insertions(+), 76 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 530ced7b..2f718217 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,42 +21,91 @@ The `newmodel` class should contains at least the following code: ``` class newmodel(_model): _NAME="" - def _random_init_latent_sqrt_var(self): + @property + def latent_variables(self) -> torch.Tensor: "Implement here" - @property - def latent_variables(self): + def compute_elbo(self) -> torch.Tensor: "Implement here" - def compute_elbo(self): + def _compute_elbo_b(self) -> torch.Tensor: "Implement here" - def _compute_elbo_b(self): + def _smart_init_model_parameters(self)-> None: "Implement here" - def _smart_init_model_parameters(self): + def _random_init_model_parameters(self)-> None: "Implement here" - def _random_init_model_parameters(self): + def _smart_init_latent_parameters(self)-> None: "Implement here" - def _smart_init_latent_parameters(self): + def _random_init_latent_parameters(self)-> None: "Implement here" - def _random_init_latent_parameters(self): + @property + def _list_of_parameters_needing_gradient(self)-> list: + "Implement here" + @property + def _description(self)-> str: "Implement here" @property - def _list_of_parameters_needing_gradient(self): + def number_of_parameters(self) -> int: "Implement here" + @property - def _description(self): + def model_parameters(self)-> Dict[str, torch.Tensor]: "Implement here" @property - def number_of_parameters(self): + def latent_parameters(self)-> Dict[str, torch.Tensor]: "Implement here" ``` +Each value of the 'latent_parameters' dict should be implemented (and protected) both in the +`_random_init_latent_parameters` and '_smart_init_latent_parameters'. +Each value of the 'model_parameters' dict should be implemented (and protected) both in the +`_random_init_model_parameters` and '_smart_init_model_parameters'. +For example, if you have one model parameters `coef` and latent_parameters `latent_mean` and `latent_var`, you should implement such as +```py +class newmodel(_model): + @property + def model_parameters(self) -> Dict[str, torch.Tensor]: + return {"coef":self.coef} + @property + def latent_parameters(self) -> Dict[str, torch.Tensor]: + return {"latent_mean":self.latent_mean, "latent_var":self.latent_var} + + def _random_init_latent_parameters(self): + self._latent_mean = init_latent_mean() + self._latent_var = init_latent_var() + + @property + def _smart_init_model_parameters(self): + self._latent_mean = random_init_latent_mean() + self._latent_var = random_init_latent_var() + + @property + def latent_var(self): + return self._latent_var + + @property + def latent_mean(self): + return self._latent_mean + + def _random_init_model_parameters(self): + self._coef = init_coef() + + def _smart_init_model_parameters(self): + self._coef = random_init_latent_coef() + + @property + def coef(self): + return self._coef +``` + + + Then, add `newmodel` in the `__init__.py` file of the pyPLNmodels module. If `newmodel` is well implemented, running ``` @@ -69,4 +118,4 @@ model.fit(nb_max_iteration = 10, tol = 0) should increase the elbo of the model. You should document your functions with [numpy-style docstrings](https://numpydoc.readthedocs.io/en/latest/format.html). You can use -the `_add_doc` decorator to inherit the docstrings of the `_model` class. +the `_add_doc` decorator (implemented in the `_utils` module) to inherit the docstrings of the `_model` class. diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index e8b316a5..9228de82 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -360,7 +360,7 @@ class _model(ABC): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-8, + tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, batch_size=None, @@ -872,33 +872,6 @@ class _model(ABC): """ return -self.loglike + self.number_of_parameters - @property - def latent_parameters(self): - """ - Property representing the latent parameters. - - Returns - ------- - dict - The dictionary of latent parameters. - """ - return { - "latent_sqrt_var": self.latent_sqrt_var, - "latent_mean": self.latent_mean, - } - - @property - def model_parameters(self): - """ - Property representing the model parameters. - - Returns - ------- - dict - The dictionary of model parameters. - """ - return {"coef": self.coef, "covariance": self.covariance} - @property def dict_data(self): """ @@ -1284,18 +1257,6 @@ class _model(ABC): """ return f"{self._NAME}_nbcov_{self.nb_cov}_dim_{self.dim}" - @property - def _path_to_directory(self): - """ - Property representing the path to the directory. - - Returns - ------- - str - The path to the directory. - """ - return "" - def plot_expected_vs_true(self, ax=None, colors=None): """ Plot the predicted value of the endog against the endog. @@ -1420,6 +1381,30 @@ class _model(ABC): Number of parameters of the model. """ + @property + @abstractmethod + def model_parameters(self) -> Dict[str, torch.Tensor]: + """ + Property representing the model parameters. + + Returns + ------- + dict + The dictionary of model parameters. + """ + + @property + @abstractmethod + def latent_parameters(self) -> Dict[str, torch.Tensor]: + """ + Property representing the latent parameters. + + Returns + ------- + dict + The dictionary of latent parameters. + """ + class Pln(_model): """ @@ -1509,8 +1494,7 @@ class Pln(_model): dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): - super().from_formula( - cls=cls, + return super().from_formula( formula=formula, data=data, offsets_formula=offsets_formula, @@ -1533,7 +1517,7 @@ class Pln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-8, + tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, @@ -1686,7 +1670,8 @@ class Pln(_model): ------ AttributeError since you can not set the coef in the Pln model. """ - raise AttributeError("You can not set the coef in the Pln model.") + msg = "You can not set the coef in the Pln model." + warnings.warn(msg) def _endog_predictions(self): return torch.exp( @@ -1811,7 +1796,7 @@ class Pln(_model): covariance : torch.Tensor The covariance matrix. """ - raise AttributeError("You can not set the covariance for the Pln model.") + warnings.warn("You can not set the covariance for the Pln model.") def _random_init_latent_sqrt_var(self): if not hasattr(self, "_latent_sqrt_var"): @@ -1891,6 +1876,19 @@ class Pln(_model): def _list_of_parameters_needing_gradient(self): return [self._latent_mean, self._latent_sqrt_var] + @property + @_add_doc(_model) + def model_parameters(self) -> Dict[str, torch.Tensor]: + return {"coef": self.coef, "covariance": self.covariance} + + @property + @_add_doc(_model) + def latent_parameters(self): + return { + "latent_sqrt_var": self.latent_sqrt_var, + "latent_mean": self.latent_mean, + } + class PlnPCAcollection: """ @@ -2286,7 +2284,7 @@ class PlnPCAcollection: nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-8, + tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, @@ -2814,7 +2812,7 @@ class PlnPCA(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-8, + tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, batch_size=None, @@ -3038,18 +3036,6 @@ class PlnPCA(_model): """ return self._rank - @property - def model_parameters(self) -> Dict[str, torch.Tensor]: - """ - Property representing the model parameters. - - Returns - ------- - Dict[str, torch.Tensor] - The model parameters. - """ - return {"coef": self.coef, "components": self.components} - @property def number_of_parameters(self) -> int: """ @@ -3310,6 +3296,19 @@ class PlnPCA(_model): return [self._components, self._latent_mean, self._latent_sqrt_var] return [self._components, self._coef, self._latent_mean, self._latent_sqrt_var] + @property + @_add_doc(_model) + def model_parameters(self) -> Dict[str, torch.Tensor]: + return {"coef": self.coef, "components": self.components} + + @property + @_add_doc(_model) + def latent_parameters(self): + return { + "latent_sqrt_var": self.latent_sqrt_var, + "latent_mean": self.latent_mean, + } + class ZIPln(_model): _NAME = "ZIPln" @@ -3429,7 +3428,7 @@ class ZIPln(_model): nb_max_iteration: int = 50000, *, lr: float = 0.01, - tol: float = 1e-8, + tol: float = 1e-3, do_smart_init: bool = True, verbose: bool = False, batch_size: int = None, @@ -3495,6 +3494,7 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) + @property def latent_variables(self) -> tuple([torch.Tensor, torch.Tensor]): """ Property representing the latent variables. Two latent @@ -3624,6 +3624,26 @@ class ZIPln(_model): def _update_closed_forms(self): pass + @property + @_add_doc(_model) + def model_parameters(self) -> Dict[str, torch.Tensor]: + return { + "coef": self.coef, + "components": self.components, + "coef_inflation": self.coef_inflation, + } + + @property + @_add_doc(_model) + def latent_parameters(self): + latent_param = { + "latent_sqrt_var": self.latent_sqrt_var, + "latent_mean": self.latent_mean, + } + if self._use_closed_form_prob is True: + latent_param["latent_prob"] = self.latent_prob + return latent_param + def grad_M(self): if self.use_closed_form_prob is True: latent_prob = self.closed_formula_latent_prob diff --git a/tests/conftest.py b/tests/conftest.py index 3a072f20..588b3e4e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ from pytest_lazyfixture import lazy_fixture as lf import pandas as pd from pyPLNmodels import load_model, load_plnpcacollection -from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection +from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln sys.path.append("../") @@ -78,14 +78,20 @@ def convenient_PlnPCAcollection(*args, **kwargs): def convenientpln(*args, **kwargs): + # no need to dict init since we do not have ranks if isinstance(args[0], str): return Pln.from_formula(*args, **kwargs) return Pln(*args, **kwargs) +def convenientzi(*args, **kwargs): + if isinstance(args[0], str): + return ZIPln.from_formula(*args, **kwargs) + return ZIPln(*args, **kwargs) + + def generate_new_model(model, *args, **kwargs): name_dir = model._directory_name - print("directory name", name_dir) name = model._NAME if name in ("Pln", "PlnPCA"): path = model._path_to_directory + name_dir @@ -94,6 +100,8 @@ def generate_new_model(model, *args, **kwargs): new = convenientpln(*args, **kwargs, dict_initialization=init) if name == "PlnPCA": new = convenient_PlnPCA(*args, **kwargs, dict_initialization=init) + if name == "ZIPln": + new = convenientzi(*args, **kwargs, dict_initialization=init) if name == "PlnPCAcollection": init = load_plnpcacollection(name_dir) new = convenient_PlnPCAcollection(*args, **kwargs, dict_initialization=init) diff --git a/tests/create_readme_and_docstrings_tests.py b/tests/create_readme_and_docstrings_tests.py index d9f27aeb..63aecf9d 100644 --- a/tests/create_readme_and_docstrings_tests.py +++ b/tests/create_readme_and_docstrings_tests.py @@ -43,7 +43,7 @@ def get_example_readme(lines): in_example = False elif in_example is True: example.append(line) - example.pop(0) # The first is pip install pyPLNmodels which is not python code. + example.pop() # The last line is pip install pyPLNmodels which is not python code. return [example] -- GitLab From f6127c3a18435b135faa4ad35f7e59da0b42a439 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 16 Oct 2023 11:41:59 +0200 Subject: [PATCH 097/167] renamed pln with model inf fixtures and fixed some tests for the pln. Now we cannot set exog to None and add_const to False for the ZIPln. --- pyPLNmodels/models.py | 142 ++++++++++++++++------ tests/conftest.py | 259 +++++++++++++++++++++-------------------- tests/test_common.py | 95 +++++++-------- tests/test_pln_full.py | 6 +- 4 files changed, 289 insertions(+), 213 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 9228de82..0588c469 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -397,6 +397,7 @@ class _model(ABC): while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() criterion = self._update_criterion_args(loss) + print("criterion", criterion) if abs(criterion) < tol: stop_condition = True if verbose and self.nb_iteration_done % 50 == 1: @@ -3317,26 +3318,6 @@ class ZIPln(_model): _coef_inflation: torch.Tensor _dirac: torch.Tensor - @_add_doc( - _model, - params=""" - use_closed_form_prob: bool, optional - Whether or not use the closed formula for the latent probability - """, - example=""" - >>> from pyPLNmodels import ZIPln, get_real_count_data - >>> endog= get_real_count_data() - >>> zi = ZIPln(endog, add_const = True) - >>> zi.fit() - >>> print(zi) - """, - returns=""" - ZIPln - """, - see_also=""" - :func:`pyPLNmodels.ZIPln.from_formula` - """, - ) def __init__( self, endog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]], @@ -3349,6 +3330,54 @@ class ZIPln(_model): add_const: bool = True, use_closed_form_prob: bool = False, ): + """ + Initializes the ZIPln class. + + Parameters + ---------- + endog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The count data. + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) + The covariate data. Defaults to None. + offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) + The offsets data. Defaults to None. + offsets_formula : str, optional(keyword-only) + The formula for offsets. Defaults to "logsum". Overriden if + offsets is not None. + dict_initialization : dict, optional(keyword-only) + The initialization dictionary. Defaults to None. + take_log_offsets : bool, optional(keyword-only) + Whether to take the log of offsets. Defaults to False. + add_const : bool, optional(keyword-only) + Whether to add a column of one in the exog. Defaults to True. + If exog is None, add_const is set to True anyway and a warnings + is launched. + use_closed_form_prob : bool, optional + Whether or not use the closed formula for the latent probability. + Default is False. + Raises + ------ + ValueError + If the batch_size is greater than the number of samples, or not int. + Returns + ------- + A ZIPln object + See also + -------- + :func:`pyPLNmodels.ZIPln.from_formula` + Examples + -------- + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog= get_real_count_data() + >>> zi = ZIPln(endog, add_const = True) + >>> zi.fit() + >>> print(zi) + """ + if exog is None and add_const is False: + msg = "No covariates has been given. An intercept is added since " + msg += "a ZIPln must have at least an intercept." + warnings.warn(msg) + add_const = True super().__init__( endog=endog, exog=exog, @@ -3375,22 +3404,6 @@ class ZIPln(_model): return batch @classmethod - @_add_doc( - _model, - example=""" - >>> from pyPLNmodels import ZIPln, get_real_count_data - >>> endog = get_real_count_data() - >>> data = {"endog": endog} - >>> zi = ZIPln.from_formula("endog ~ 1", data = data) - """, - returns=""" - ZIPln - """, - see_also=""" - :class:`pyPLNmodels.ZIPln` - :func:`pyPLNmodels.ZIPln.__init__` - """, - ) def from_formula( cls, formula: str, @@ -3401,6 +3414,39 @@ class ZIPln(_model): take_log_offsets: bool = False, use_closed_form_prob: bool = True, ): + """ + Create a model instance from a formula and data. + + Parameters + ---------- + formula : str + The formula. + data : dict + The data dictionary. Each value can be either a torch.Tensor, + a np.ndarray or pd.DataFrame + offsets_formula : str, optional(keyword-only) + The formula for offsets. Defaults to "logsum". + dict_initialization : dict, optional(keyword-only) + The initialization dictionary. Defaults to None. + take_log_offsets : bool, optional(keyword-only) + Whether to take the log of offsets. Defaults to False. + use_closed_form_prob : bool, optional + Whether or not use the closed formula for the latent probability. + Default is False. + Returns + ------- + A ZIPln object + See also + -------- + :class:`pyPLNmodels.ZIPln` + :func:`pyPLNmodels.ZIPln.__init__` + Examples + -------- + >>> from pyPLNmodels import ZIPln, get_real_count_data + >>> endog = get_real_count_data() + >>> data = {"endog": endog} + >>> zi = ZIPln.from_formula("endog ~ 1", data = data) + """ endog, exog, offsets = _extract_data_from_formula(formula, data) return cls( endog, @@ -3494,6 +3540,18 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) + @property + def components(self) -> torch.Tensor: + """ + Property representing the components. + + Returns + ------- + torch.Tensor + The components. + """ + return self._cpu_attribute_or_none("_components") + @property def latent_variables(self) -> tuple([torch.Tensor, torch.Tensor]): """ @@ -3517,6 +3575,18 @@ class ZIPln(_model): """ return self.latent_mean, self.latent_prob + @property + def coef_inflation(self): + """ + Property representing the coefficients of the zero inflated model. + + Returns + ------- + torch.Tensor or None + The coefficients or None. + """ + return self._cpu_attribute_or_none("_coef_inflation") + def _update_parameters(self): super()._update_parameters() self._project_latent_prob() diff --git a/tests/conftest.py b/tests/conftest.py index 588b3e4e..e40558dd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,10 +12,10 @@ from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln sys.path.append("../") -pytest_plugins = [ - fixture_file.replace("/", ".").replace(".py", "") - for fixture_file in glob.glob("src/**/tests/fixtures/[!__]*.py", recursive=True) -] +# pytest_plugins = [ +# fixture_file.replace("/", ".").replace(".py", "") +# for fixture_file in glob.glob("src/**/tests/fixtures/[!__]*.py", recursive=True) +# ] from tests.import_data import ( @@ -78,7 +78,6 @@ def convenient_PlnPCAcollection(*args, **kwargs): def convenientpln(*args, **kwargs): - # no need to dict init since we do not have ranks if isinstance(args[0], str): return Pln.from_formula(*args, **kwargs) return Pln(*args, **kwargs) @@ -93,8 +92,8 @@ def convenientzi(*args, **kwargs): def generate_new_model(model, *args, **kwargs): name_dir = model._directory_name name = model._NAME - if name in ("Pln", "PlnPCA"): - path = model._path_to_directory + name_dir + if name in ("Pln", "PlnPCA", "ZIPln"): + path = model._directory_name init = load_model(path) if name == "Pln": new = convenientpln(*args, **kwargs, dict_initialization=init) @@ -103,7 +102,7 @@ def generate_new_model(model, *args, **kwargs): if name == "ZIPln": new = convenientzi(*args, **kwargs, dict_initialization=init) if name == "PlnPCAcollection": - init = load_plnpcacollection(name_dir) + init = load_plnpcacollection(model._directory_name) new = convenient_PlnPCAcollection(*args, **kwargs, dict_initialization=init) return new @@ -119,67 +118,67 @@ def cache(func): return new_func -params = [convenientpln, convenient_PlnPCA, convenient_PlnPCAcollection] +params = [convenientpln, convenient_PlnPCA, convenient_PlnPCAcollection, convenientzi] dict_fixtures = {} @pytest.fixture(params=params) -def simulated_pln_0cov_array(request): +def simulated_model_0cov_array(request): cls = request.param - pln = cls( + model = cls( endog_sim_0cov, exog=exog_sim_0cov, offsets=offsets_sim_0cov, add_const=False, ) - return pln + return model @pytest.fixture(params=params) @cache -def simulated_fitted_pln_0cov_array(request): +def simulated_fitted_model_0cov_array(request): cls = request.param - pln = cls( + model = cls( endog_sim_0cov, exog=exog_sim_0cov, offsets=offsets_sim_0cov, add_const=False, ) - pln.fit() - return pln + model.fit() + return model @pytest.fixture(params=params) -def simulated_pln_0cov_formula(request): +def simulated_model_0cov_formula(request): cls = request.param - pln = cls("endog ~ 0", data_sim_0cov) - return pln + model = cls("endog ~ 0", data_sim_0cov) + return model @pytest.fixture(params=params) @cache -def simulated_fitted_pln_0cov_formula(request): +def simulated_fitted_model_0cov_formula(request): cls = request.param - pln = cls("endog ~ 0", data_sim_0cov) - pln.fit() - return pln + model = cls("endog ~ 0", data_sim_0cov) + model.fit() + return model @pytest.fixture -def simulated_loaded_pln_0cov_formula(simulated_fitted_pln_0cov_formula): - simulated_fitted_pln_0cov_formula.save() +def simulated_loaded_model_0cov_formula(simulated_fitted_model_0cov_formula): + simulated_fitted_model_0cov_formula.save() return generate_new_model( - simulated_fitted_pln_0cov_formula, + simulated_fitted_model_0cov_formula, "endog ~ 0", data_sim_0cov, ) @pytest.fixture -def simulated_loaded_pln_0cov_array(simulated_fitted_pln_0cov_array): - simulated_fitted_pln_0cov_array.save() +def simulated_loaded_model_0cov_array(simulated_fitted_model_0cov_array): + simulated_fitted_model_0cov_array.save() return generate_new_model( - simulated_fitted_pln_0cov_array, + simulated_fitted_model_0cov_array, endog_sim_0cov, exog=exog_sim_0cov, offsets=offsets_sim_0cov, @@ -187,87 +186,89 @@ def simulated_loaded_pln_0cov_array(simulated_fitted_pln_0cov_array): ) -sim_pln_0cov_instance = [ - "simulated_pln_0cov_array", - "simulated_pln_0cov_formula", +sim_model_0cov_instance = [ + "simulated_model_0cov_array", + "simulated_model_0cov_formula", ] -instances = sim_pln_0cov_instance + instances +instances = sim_model_0cov_instance + instances dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_0cov_instance", sim_pln_0cov_instance + dict_fixtures, "sim_model_0cov_instance", sim_model_0cov_instance ) -sim_pln_0cov_fitted = [ - "simulated_fitted_pln_0cov_array", - "simulated_fitted_pln_0cov_formula", +sim_model_0cov_fitted = [ + "simulated_fitted_model_0cov_array", + "simulated_fitted_model_0cov_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_0cov_fitted", sim_pln_0cov_fitted + dict_fixtures, "sim_model_0cov_fitted", sim_model_0cov_fitted ) -sim_pln_0cov_loaded = [ - "simulated_loaded_pln_0cov_array", - "simulated_loaded_pln_0cov_formula", +sim_model_0cov_loaded = [ + "simulated_loaded_model_0cov_array", + "simulated_loaded_model_0cov_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_0cov_loaded", sim_pln_0cov_loaded + dict_fixtures, "sim_model_0cov_loaded", sim_model_0cov_loaded ) -sim_pln_0cov = sim_pln_0cov_instance + sim_pln_0cov_fitted + sim_pln_0cov_loaded -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_pln_0cov", sim_pln_0cov) +sim_model_0cov = sim_model_0cov_instance + sim_model_0cov_fitted + sim_model_0cov_loaded +dict_fixtures = add_list_of_fixture_to_dict( + dict_fixtures, "sim_model_0cov", sim_model_0cov +) @pytest.fixture(params=params) @cache -def simulated_pln_2cov_array(request): +def simulated_model_2cov_array(request): cls = request.param - pln_full = cls( + model = cls( endog_sim_2cov, exog=exog_sim_2cov, offsets=offsets_sim_2cov, add_const=False, ) - return pln_full + return model @pytest.fixture -def simulated_fitted_pln_2cov_array(simulated_pln_2cov_array): - simulated_pln_2cov_array.fit() - return simulated_pln_2cov_array +def simulated_fitted_model_2cov_array(simulated_model_2cov_array): + simulated_model_2cov_array.fit() + return simulated_model_2cov_array @pytest.fixture(params=params) @cache -def simulated_pln_2cov_formula(request): +def simulated_model_2cov_formula(request): cls = request.param - pln_full = cls("endog ~ 0 + exog", data_sim_2cov) - return pln_full + model = cls("endog ~ 0 + exog", data_sim_2cov) + return model @pytest.fixture -def simulated_fitted_pln_2cov_formula(simulated_pln_2cov_formula): - simulated_pln_2cov_formula.fit() - return simulated_pln_2cov_formula +def simulated_fitted_model_2cov_formula(simulated_model_2cov_formula): + simulated_model_2cov_formula.fit() + return simulated_model_2cov_formula @pytest.fixture -def simulated_loaded_pln_2cov_formula(simulated_fitted_pln_2cov_formula): - simulated_fitted_pln_2cov_formula.save() +def simulated_loaded_model_2cov_formula(simulated_fitted_model_2cov_formula): + simulated_fitted_model_2cov_formula.save() return generate_new_model( - simulated_fitted_pln_2cov_formula, + simulated_fitted_model_2cov_formula, "endog ~0 + exog", data_sim_2cov, ) @pytest.fixture -def simulated_loaded_pln_2cov_array(simulated_fitted_pln_2cov_array): - simulated_fitted_pln_2cov_array.save() +def simulated_loaded_model_2cov_array(simulated_fitted_model_2cov_array): + simulated_fitted_model_2cov_array.save() return generate_new_model( - simulated_fitted_pln_2cov_array, + simulated_fitted_model_2cov_array, endog_sim_2cov, exog=exog_sim_2cov, offsets=offsets_sim_2cov, @@ -275,147 +276,149 @@ def simulated_loaded_pln_2cov_array(simulated_fitted_pln_2cov_array): ) -sim_pln_2cov_instance = [ - "simulated_pln_2cov_array", - "simulated_pln_2cov_formula", +sim_model_2cov_instance = [ + "simulated_model_2cov_array", + "simulated_model_2cov_formula", ] -instances = sim_pln_2cov_instance + instances +instances = sim_model_2cov_instance + instances dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_2cov_instance", sim_pln_2cov_instance + dict_fixtures, "sim_model_2cov_instance", sim_model_2cov_instance ) -sim_pln_2cov_fitted = [ - "simulated_fitted_pln_2cov_array", - "simulated_fitted_pln_2cov_formula", +sim_model_2cov_fitted = [ + "simulated_fitted_model_2cov_array", + "simulated_fitted_model_2cov_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_2cov_fitted", sim_pln_2cov_fitted + dict_fixtures, "sim_model_2cov_fitted", sim_model_2cov_fitted ) -sim_pln_2cov_loaded = [ - "simulated_loaded_pln_2cov_array", - "simulated_loaded_pln_2cov_formula", +sim_model_2cov_loaded = [ + "simulated_loaded_model_2cov_array", + "simulated_loaded_model_2cov_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "sim_pln_2cov_loaded", sim_pln_2cov_loaded + dict_fixtures, "sim_model_2cov_loaded", sim_model_2cov_loaded ) -sim_pln_2cov = sim_pln_2cov_instance + sim_pln_2cov_fitted + sim_pln_2cov_loaded -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_pln_2cov", sim_pln_2cov) +sim_model_2cov = sim_model_2cov_instance + sim_model_2cov_fitted + sim_model_2cov_loaded +dict_fixtures = add_list_of_fixture_to_dict( + dict_fixtures, "sim_model_2cov", sim_model_2cov +) @pytest.fixture(params=params) @cache -def real_pln_intercept_array(request): +def real_model_intercept_array(request): cls = request.param - pln_full = cls(endog_real, add_const=True) - return pln_full + model = cls(endog_real, add_const=True) + return model @pytest.fixture -def real_fitted_pln_intercept_array(real_pln_intercept_array): - real_pln_intercept_array.fit() - return real_pln_intercept_array +def real_fitted_model_intercept_array(real_model_intercept_array): + real_model_intercept_array.fit() + return real_model_intercept_array @pytest.fixture(params=params) @cache -def real_pln_intercept_formula(request): +def real_model_intercept_formula(request): cls = request.param - pln_full = cls("endog ~ 1", data_real) - return pln_full + model = cls("endog ~ 1", data_real) + return model @pytest.fixture -def real_fitted_pln_intercept_formula(real_pln_intercept_formula): - real_pln_intercept_formula.fit() - return real_pln_intercept_formula +def real_fitted_model_intercept_formula(real_model_intercept_formula): + real_model_intercept_formula.fit() + return real_model_intercept_formula @pytest.fixture -def real_loaded_pln_intercept_formula(real_fitted_pln_intercept_formula): - real_fitted_pln_intercept_formula.save() +def real_loaded_model_intercept_formula(real_fitted_model_intercept_formula): + real_fitted_model_intercept_formula.save() return generate_new_model( - real_fitted_pln_intercept_formula, "endog ~ 1", data=data_real + real_fitted_model_intercept_formula, "endog ~ 1", data=data_real ) @pytest.fixture -def real_loaded_pln_intercept_array(real_fitted_pln_intercept_array): - real_fitted_pln_intercept_array.save() +def real_loaded_model_intercept_array(real_fitted_model_intercept_array): + real_fitted_model_intercept_array.save() return generate_new_model( - real_fitted_pln_intercept_array, + real_fitted_model_intercept_array, endog_real, add_const=True, ) -real_pln_instance = [ - "real_pln_intercept_array", - "real_pln_intercept_formula", +real_model_instance = [ + "real_model_intercept_array", + "real_model_intercept_formula", ] -instances = real_pln_instance + instances +instances = real_model_instance + instances dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "real_pln_instance", real_pln_instance + dict_fixtures, "real_model_instance", real_model_instance ) -real_pln_fitted = [ - "real_fitted_pln_intercept_array", - "real_fitted_pln_intercept_formula", +real_model_fitted = [ + "real_fitted_model_intercept_array", + "real_fitted_model_intercept_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "real_pln_fitted", real_pln_fitted + dict_fixtures, "real_model_fitted", real_model_fitted ) -real_pln_loaded = [ - "real_loaded_pln_intercept_array", - "real_loaded_pln_intercept_formula", +real_model_loaded = [ + "real_loaded_model_intercept_array", + "real_loaded_model_intercept_formula", ] dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "real_pln_loaded", real_pln_loaded + dict_fixtures, "real_model_loaded", real_model_loaded ) -sim_loaded_pln = sim_pln_0cov_loaded + sim_pln_2cov_loaded +sim_loaded_model = sim_model_0cov_loaded + sim_model_2cov_loaded -loaded_pln = real_pln_loaded + sim_loaded_pln -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "loaded_pln", loaded_pln) +loaded_model = real_model_loaded + sim_loaded_model +dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "loaded_model", loaded_model) -simulated_pln_fitted = sim_pln_0cov_fitted + sim_pln_2cov_fitted +simulated_model_fitted = sim_model_0cov_fitted + sim_model_2cov_fitted dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "simulated_pln_fitted", simulated_pln_fitted + dict_fixtures, "simulated_model_fitted", simulated_model_fitted ) -fitted_pln = real_pln_fitted + simulated_pln_fitted -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "fitted_pln", fitted_pln) +fitted_model = real_model_fitted + simulated_model_fitted +dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "fitted_model", fitted_model) -loaded_and_fitted_sim_pln = simulated_pln_fitted + sim_loaded_pln -loaded_and_fitted_real_pln = real_pln_fitted + real_pln_loaded +loaded_and_fitted_sim_model = simulated_model_fitted + sim_loaded_model +loaded_and_fitted_real_model = real_model_fitted + real_model_loaded dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "loaded_and_fitted_real_pln", loaded_and_fitted_real_pln + dict_fixtures, "loaded_and_fitted_real_model", loaded_and_fitted_real_model ) dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "loaded_and_fitted_sim_pln", loaded_and_fitted_sim_pln + dict_fixtures, "loaded_and_fitted_sim_model", loaded_and_fitted_sim_model ) -loaded_and_fitted_pln = fitted_pln + loaded_pln +loaded_and_fitted_model = fitted_model + loaded_model dict_fixtures = add_list_of_fixture_to_dict( - dict_fixtures, "loaded_and_fitted_pln", loaded_and_fitted_pln + dict_fixtures, "loaded_and_fitted_model", loaded_and_fitted_model ) -real_pln = real_pln_instance + real_pln_fitted + real_pln_loaded -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "real_pln", real_pln) +real_model = real_model_instance + real_model_fitted + real_model_loaded +dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "real_model", real_model) -sim_pln = sim_pln_2cov + sim_pln_0cov -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_pln", sim_pln) +sim_model = sim_model_2cov + sim_model_0cov +dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "sim_model", sim_model) -all_pln = real_pln + sim_pln + instances +all_model = real_model + sim_model + instances dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "instances", instances) -dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "all_pln", all_pln) +dict_fixtures = add_list_of_fixture_to_dict(dict_fixtures, "all_model", all_model) -for string_fixture in all_pln: +for string_fixture in all_model: print("string_fixture", string_fixture) dict_fixtures = add_fixture_to_dict(dict_fixtures, string_fixture) diff --git a/tests/test_common.py b/tests/test_common.py index b1a6837c..cec97a72 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -8,82 +8,85 @@ from tests.utils import MSE, filter_models from tests.import_data import true_sim_0cov, true_sim_2cov, endog_real +single_models = ["Pln", "PlnPCA", "ZIPln"] +pln_and_plnpca = ["Pln", "PlnPCA"] -@pytest.mark.parametrize("any_pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_properties(any_pln): - assert hasattr(any_pln, "latent_parameters") - assert hasattr(any_pln, "latent_variables") - assert hasattr(any_pln, "optim_parameters") - assert hasattr(any_pln, "model_parameters") +@pytest.mark.parametrize("any_model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_properties(any_model): + assert hasattr(any_model, "latent_parameters") + assert hasattr(any_model, "latent_variables") + assert hasattr(any_model, "optim_parameters") + assert hasattr(any_model, "model_parameters") -@pytest.mark.parametrize("sim_pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_predict_simulated(sim_pln): - if sim_pln.nb_cov == 0: - assert sim_pln.predict() is None + +@pytest.mark.parametrize("sim_model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(pln_and_plnpca) +def test_predict_simulated(sim_model): + if sim_model.nb_cov == 0: + assert sim_model.predict() is None with pytest.raises(AttributeError): - sim_pln.predict(1) + sim_model.predict(1) else: - X = torch.randn((sim_pln.n_samples, sim_pln.nb_cov)) - prediction = sim_pln.predict(X) - expected = X @ sim_pln.coef + X = torch.randn((sim_model.n_samples, sim_model.nb_cov)) + prediction = sim_model.predict(X) + expected = X @ sim_model.coef assert torch.all(torch.eq(expected, prediction)) -@pytest.mark.parametrize("any_instance_pln", dict_fixtures["instances"]) -def test_verbose(any_instance_pln): - any_instance_pln.fit(verbose=True, tol=0.1) +@pytest.mark.parametrize("any_instance_model", dict_fixtures["instances"]) +def test_verbose(any_instance_model): + any_instance_model.fit(verbose=True, tol=0.1) @pytest.mark.parametrize( - "simulated_fitted_any_pln", dict_fixtures["loaded_and_fitted_sim_pln"] + "simulated_fitted_any_model", dict_fixtures["loaded_and_fitted_sim_model"] ) -@filter_models(["Pln", "PlnPCA"]) -def test_find_right_covariance(simulated_fitted_any_pln): - if simulated_fitted_any_pln.nb_cov == 0: +@filter_models(pln_and_plnpca) +def test_find_right_covariance(simulated_fitted_any_model): + if simulated_fitted_any_model.nb_cov == 0: true_covariance = true_sim_0cov["Sigma"] - elif simulated_fitted_any_pln.nb_cov == 2: + elif simulated_fitted_any_model.nb_cov == 2: true_covariance = true_sim_2cov["Sigma"] else: raise ValueError( - f"Not the right numbers of covariance({simulated_fitted_any_pln.nb_cov})" + f"Not the right numbers of covariance({simulated_fitted_any_model.nb_cov})" ) - mse_covariance = MSE(simulated_fitted_any_pln.covariance - true_covariance) + mse_covariance = MSE(simulated_fitted_any_model.covariance - true_covariance) assert mse_covariance < 0.05 @pytest.mark.parametrize( - "real_fitted_and_loaded_pln", dict_fixtures["loaded_and_fitted_real_pln"] + "real_fitted_and_loaded_model", dict_fixtures["loaded_and_fitted_real_model"] ) -@filter_models(["Pln", "PlnPCA"]) -def test_right_covariance_shape(real_fitted_and_loaded_pln): - assert real_fitted_and_loaded_pln.covariance.shape == ( +@filter_models(single_models) +def test_right_covariance_shape(real_fitted_and_loaded_model): + assert real_fitted_and_loaded_model.covariance.shape == ( endog_real.shape[1], endog_real.shape[1], ) @pytest.mark.parametrize( - "simulated_fitted_any_pln", dict_fixtures["loaded_and_fitted_pln"] + "simulated_fitted_any_model", dict_fixtures["loaded_and_fitted_model"] ) -@filter_models(["Pln", "PlnPCA"]) -def test_find_right_coef(simulated_fitted_any_pln): - if simulated_fitted_any_pln.nb_cov == 2: +@filter_models(pln_and_plnpca) +def test_find_right_coef(simulated_fitted_any_model): + if simulated_fitted_any_model.nb_cov == 2: true_coef = true_sim_2cov["beta"] - mse_coef = MSE(simulated_fitted_any_pln.coef - true_coef) + mse_coef = MSE(simulated_fitted_any_model.coef - true_coef) assert mse_coef < 0.1 - elif simulated_fitted_any_pln.nb_cov == 0: - assert simulated_fitted_any_pln.coef is None + elif simulated_fitted_any_model.nb_cov == 0: + assert simulated_fitted_any_model.coef is None -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_fail_count_setter(pln): +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_fail_count_setter(model): wrong_endog = torch.randint(size=(10, 5), low=0, high=10) with pytest.raises(Exception): - pln.endog = wrong_endog + model.endog = wrong_endog @pytest.mark.parametrize("instance", dict_fixtures["instances"]) @@ -96,9 +99,9 @@ def test__print_end_of_fitting_message(instance): instance.fit(nb_max_iteration=4) -@pytest.mark.parametrize("pln", dict_fixtures["fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_fail_wrong_exog_prediction(pln): - X = torch.randn(pln.n_samples, pln.nb_cov + 1) +@pytest.mark.parametrize("model", dict_fixtures["fitted_model"]) +@filter_models(single_models) +def test_fail_wrong_exog_prediction(model): + X = torch.randn(model.n_samples, model.nb_cov + 1) with pytest.raises(Exception): - pln.predict(X) + model.predict(X) diff --git a/tests/test_pln_full.py b/tests/test_pln_full.py index 870114a0..1115e1ec 100644 --- a/tests/test_pln_full.py +++ b/tests/test_pln_full.py @@ -4,14 +4,14 @@ from tests.conftest import dict_fixtures from tests.utils import filter_models -@pytest.mark.parametrize("fitted_pln", dict_fixtures["fitted_pln"]) +@pytest.mark.parametrize("fitted_pln", dict_fixtures["fitted_model"]) @filter_models(["Pln"]) def test_number_of_iterations_pln_full(fitted_pln): - nb_iterations = len(fitted_pln._elbos_list) + nb_iterations = len(fitted_pln.elbos_list) assert 20 < nb_iterations < 1000 -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["Pln"]) def test_latent_var_full(pln): assert pln.transform().shape == pln.endog.shape -- GitLab From 13bf8f1e0c88f9d3522bdde106c39edbbc81cded Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 16 Oct 2023 15:10:08 +0200 Subject: [PATCH 098/167] right elbo, problem before. --- pyPLNmodels/elbos.py | 203 ++++++++++++++++++++++++------------------- 1 file changed, 113 insertions(+), 90 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index ec743430..cf235ec4 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -5,63 +5,6 @@ from ._closed_forms import _closed_formula_covariance, _closed_formula_coef from typing import Optional -def elbo_pln( - endog: torch.Tensor, - offsets: torch.Tensor, - exog: Optional[torch.Tensor], - latent_mean: torch.Tensor, - latent_sqrt_var: torch.Tensor, - covariance: torch.Tensor, - coef: torch.Tensor, -) -> torch.Tensor: - """ - Compute the ELBO (Evidence Lower Bound) for the Pln model. - - Parameters: - ---------- - endog : torch.Tensor - Counts with size (n, p). - offsets : torch.Tensor - Offset with size (n, p). - exog : torch.Tensor, optional - Covariates with size (n, d). - latent_mean : torch.Tensor - Variational parameter with size (n, p). - latent_sqrt_var : torch.Tensor - Variational parameter with size (n, p). - covariance : torch.Tensor - Model parameter with size (p, p). - coef : torch.Tensor - Model parameter with size (d, p). - - Returns: - ------- - torch.Tensor - The ELBO (Evidence Lower Bound), of size one. - """ - n_samples, dim = endog.shape - s_rond_s = torch.square(latent_sqrt_var) - offsets_plus_m = offsets + latent_mean - if exog is None: - XB = torch.zeros_like(endog) - else: - XB = exog @ coef - m_minus_xb = latent_mean - XB - d_plus_minus_xb2 = ( - torch.diag(torch.sum(s_rond_s, dim=0)) + m_minus_xb.T @ m_minus_xb - ) - elbo = -0.5 * n_samples * torch.logdet(covariance) - elbo += torch.sum( - endog * offsets_plus_m - - 0.5 * torch.exp(offsets_plus_m + s_rond_s) - + 0.5 * torch.log(s_rond_s) - ) - elbo -= 0.5 * torch.trace(torch.inverse(covariance) @ d_plus_minus_xb2) - elbo -= torch.sum(_log_stirling(endog)) - elbo += 0.5 * n_samples * dim - return elbo / n_samples - - def profiled_elbo_pln( endog: torch.Tensor, exog: torch.Tensor, @@ -172,6 +115,78 @@ def elbo_plnpca( ) / n_samples +def log1pexp(x): + # more stable version of log(1 + exp(x)) + return torch.where(x < 50, torch.log1p(torch.exp(x)), x) + + +def elbo_pln( + endog: torch.Tensor, + exog: Optional[torch.Tensor], + offsets: torch.Tensor, + latent_mean: torch.Tensor, + latent_sqrt_var: torch.Tensor, + covariance: torch.Tensor, + coef: torch.Tensor, +) -> torch.Tensor: + """ + Compute the ELBO (Evidence Lower Bound) for the Pln model. + + Parameters: + ---------- + endog : torch.Tensor + Counts with size (n, p). + offsets : torch.Tensor + Offset with size (n, p). + exog : torch.Tensor, optional + Covariates with size (n, d). + latent_mean : torch.Tensor + Variational parameter with size (n, p). + latent_sqrt_var : torch.Tensor + Variational parameter with size (n, p). + covariance : torch.Tensor + Model parameter with size (p, p). + coef : torch.Tensor + Model parameter with size (d, p). + + Returns: + ------- + torch.Tensor + The ELBO (Evidence Lower Bound), of size one. + """ + n_samples, dim = endog.shape + s_rond_s = torch.square(latent_sqrt_var) + offsets_plus_m = offsets + latent_mean + Omega = torch.inverse(covariance) + if exog is None: + XB = torch.zeros_like(endog) + else: + XB = exog @ coef + # print('XB:', XB) + m_minus_xb = latent_mean - XB + m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) + A = torch.exp(offsets_plus_m + s_rond_s / 2) + first_a = torch.sum(endog * offsets_plus_m) + sec_a = -torch.sum(A) + third_a = -torch.sum(_log_stirling(endog)) + a = first_a + sec_a + third_a + diag = torch.diag(torch.sum(s_rond_s, dim=0)) + elbo = torch.clone(a) + b = -0.5 * n_samples * torch.logdet(covariance) + torch.sum( + -1 / 2 * Omega * m_moins_xb_outer + ) + elbo += b + d = n_samples * dim / 2 + torch.sum(+0.5 * torch.log(s_rond_s)) + elbo += d + f = -0.5 * torch.trace(torch.inverse(covariance) @ diag) + elbo += f + # print("a pln", a) + # print("b pln", b) + # print("d pln", d) + # print("f pln", f) + return elbo # / n_samples + + ## pb with trunc_log ## should rename some variables so that is is clearer when we see the formula def elbo_zi_pln( @@ -194,7 +209,7 @@ def elbo_zi_pln( 0: torch.tensor. Offset, size (n,p) exog: torch.tensor. Covariates, size (n,d) latent_mean: torch.tensor. Variational parameter with size (n,p) - latent_sqrt_var: torch.tensor. Variational parameter with size (n,p) + latent_var: torch.tensor. Variational parameter with size (n,p) pi: torch.tensor. Variational parameter with size (n,p) covariance: torch.tensor. Model parameter with size (p,p) coef: torch.tensor. Model parameter with size (d,p) @@ -202,52 +217,60 @@ def elbo_zi_pln( Returns: torch.tensor of size 1 with a gradient. """ - if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: - raise RuntimeError("Latent probability is not zero when it should be.") covariance = components @ (components.T) - diag_cov = torch.diag(covariance) - Omega = torch.inverse(covariance) - diag_omega = torch.diag(Omega) - un_moins_prob = 1 - latent_prob + if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: + print("Bug") + raise RuntimeError("rho error") n_samples, dim = endog.shape - s_rond_s = latent_sqrt_var * latent_sqrt_var + s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) o_plus_m = offsets + latent_mean if exog is None: XB = torch.zeros_like(endog) - xcoef_inflation = torch.zeros_like(endog) + x_coef_inflation = torch.zeros_like(endog) else: XB = exog @ coef - xcoef_inflation = exog @ coef_inflation + x_coef_inflation = exog @ coef_inflation + m_minus_xb = latent_mean - XB A = torch.exp(o_plus_m + s_rond_s / 2) - inside_a = un_moins_prob * (endog * o_plus_m - A - _log_stirling(endog)) + inside_a = torch.multiply( + 1 - latent_prob, torch.multiply(endog, o_plus_m) - A - _log_stirling(endog) + ) + Omega = torch.inverse(covariance) + m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) - un_moins_prob_m_moins_xb = un_moins_prob * m_minus_xb - un_moins_prob_m_moins_xb_outer = ( - un_moins_prob_m_moins_xb.T @ un_moins_prob_m_moins_xb + un_moins_rho = 1 - latent_prob + un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb + un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb + inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer + + inside_c = torch.multiply(latent_prob, x_coef_inflation) - torch.log( + 1 + torch.exp(x_coef_inflation) ) - inside_b = -1 / 2 * Omega * un_moins_prob_m_moins_xb_outer - inside_c = latent_prob * xcoef_inflation - torch.log(1 + torch.exp(xcoef_inflation)) - log_diag = torch.log(diag_cov) + log_diag = torch.log(torch.diag(covariance)) log_S_term = torch.sum( - un_moins_prob * torch.log(torch.abs(latent_sqrt_var)), axis=0 + torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 ) - sum_prob = torch.sum(latent_prob, axis=0) - covariance_term = 1 / 2 * torch.log(diag_cov) * sum_prob + y = torch.sum(latent_prob, axis=0) + covariance_term = 1 / 2 * torch.log(torch.diag(covariance)) * y inside_d = covariance_term + log_S_term - inside_e = torch.multiply( - latent_prob, _trunc_log(latent_prob) - ) + un_moins_prob * _trunc_log(un_moins_prob) - sum_un_moins_prob_s2 = torch.sum(un_moins_prob * s_rond_s, axis=0) - diag_sig_sum_prob = diag_cov * torch.sum(latent_prob, axis=0) - new = torch.sum(latent_prob * un_moins_prob * (m_minus_xb**2), axis=0) - K = sum_un_moins_prob_s2 + diag_sig_sum_prob + new - inside_f = -1 / 2 * diag_omega * K - full_diag_omega = diag_omega.expand(exog.shape[0], -1) - elbo = torch.sum(inside_a + inside_c + inside_d) - elbo += torch.sum(inside_b) - n_samples / 2 * torch.logdet(covariance) - elbo += n_samples * dim / 2 + torch.sum(inside_d + inside_f) - return elbo + inside_e = -torch.multiply(latent_prob, _trunc_log(latent_prob)) - torch.multiply( + 1 - latent_prob, _trunc_log(1 - latent_prob) + ) + sum_un_moins_rho_s2 = torch.sum(torch.multiply(1 - latent_prob, s_rond_s), axis=0) + diag_sig_sum_rho = torch.multiply( + torch.diag(covariance), torch.sum(latent_prob, axis=0) + ) + new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) + K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new + inside_f =-1 / 2 * torch.diag(Omega) * K + first = torch.sum(inside_a + inside_c + inside_e) + second = torch.sum(inside_b) + second -= n_samples / 2 * torch.logdet(covariance) + third = torch.sum(inside_d + inside_f) + third += n_samples*dim/2 + res = first + second + third + return res -- GitLab From dbc13d31c63bbe666c05748f93aa8800d4d1cdb8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 16 Oct 2023 17:06:43 +0200 Subject: [PATCH 099/167] tried to see why the zi bug with real data. The components have zero determinant. --- pyPLNmodels/_initialization.py | 4 ++-- pyPLNmodels/elbos.py | 14 +++++++++++--- pyPLNmodels/models.py | 13 ++++++++----- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index f5663746..ac4472a6 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -44,7 +44,7 @@ def _init_covariance(endog: torch.Tensor, exog: torch.Tensor) -> torch.Tensor: def _init_components( - endog: torch.Tensor, exog: torch.Tensor, rank: int + endog: torch.Tensor, rank: int ) -> torch.Tensor: """ Initialization for components for the Pln model. Get a first guess for covariance @@ -65,7 +65,7 @@ def _init_components( log_y = torch.log(endog + (endog == 0) * math.exp(-2)) pca = PCA(n_components=rank) pca.fit(log_y.detach().cpu()) - pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_) + pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_ + 0.001) return torch.from_numpy(pca_comp).to(DEVICE) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index cf235ec4..3862de9f 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -238,17 +238,16 @@ def elbo_zi_pln( 1 - latent_prob, torch.multiply(endog, o_plus_m) - A - _log_stirling(endog) ) Omega = torch.inverse(covariance) - m_moins_xb_outer = torch.mm(m_minus_xb.T, m_minus_xb) un_moins_rho = 1 - latent_prob un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer - inside_c = torch.multiply(latent_prob, x_coef_inflation) - torch.log( 1 + torch.exp(x_coef_inflation) ) + log_diag = torch.log(torch.diag(covariance)) log_S_term = torch.sum( torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 @@ -267,10 +266,19 @@ def elbo_zi_pln( new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new inside_f =-1 / 2 * torch.diag(Omega) * K + print("inside_a",torch.sum(inside_a)) + print("inside_b",torch.sum(inside_b)) + print("inside_c",torch.sum(inside_c)) + print("inside_d",torch.sum(inside_d)) + print("inside_e",torch.sum(inside_e)) + print("inside_f",torch.sum(inside_f)) first = torch.sum(inside_a + inside_c + inside_e) + print('first', first) second = torch.sum(inside_b) - second -= n_samples / 2 * torch.logdet(covariance) + second -= n_samples * torch.logdet(components) + print('logdet', torch.logdet(components)) third = torch.sum(inside_d + inside_f) third += n_samples*dim/2 + print('third', third) res = first + second + third return res diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index e9bcfd01..49298b4f 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -397,7 +397,6 @@ class _model(ABC): while self.nb_iteration_done < nb_max_iteration and not stop_condition: loss = self._trainstep() criterion = self._update_criterion_args(loss) - print("criterion", criterion) if abs(criterion) < tol: stop_condition = True if verbose and self.nb_iteration_done % 50 == 1: @@ -477,6 +476,8 @@ class _model(ABC): self._extract_batch(batch) self.optim.zero_grad() loss = -self._compute_elbo_b() + if torch.sum(torch.isnan(loss)): + raise ValueError("test") loss.backward() elbo += loss.item() self._update_parameters() @@ -3334,7 +3335,7 @@ class PlnPCA(_model): if not hasattr(self, "_coef"): super()._smart_init_coef() if not hasattr(self, "_components"): - self._components = _init_components(self._endog, self._exog, self._rank) + self._components = _init_components(self._endog, self._rank) @_add_doc(_model) def _random_init_latent_parameters(self): @@ -3490,7 +3491,7 @@ class ZIPln(_model): use_closed_form_prob: bool = True, ): """ - Create a model instance from a formula and data. + Create a ZIPln instance from a formula and data. Parameters ---------- @@ -3585,7 +3586,6 @@ class ZIPln(_model): return "with full covariance model and zero-inflation." def _random_init_model_parameters(self): - super()._random_init_model_parameters() self._coef_inflation = torch.randn(self.nb_cov, self.dim) self._coef = torch.randn(self.nb_cov, self.dim) self._components = torch.randn(self.nb_cov, self.dim) @@ -3595,7 +3595,10 @@ class ZIPln(_model): # init of _coef. super()._smart_init_coef() if not hasattr(self, "_covariance"): - self._components = _init_components(self._endog, self._exog, self.dim) + self._components = _init_components(self._endog, self.dim) + print('sum components', torch.sum(self._components)) + print('sum endog', torch.sum(self._endog)) + print('log det ', torch.logdet(self._components)) if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) -- GitLab From 843782828cc766b633fb085195bc4ff0fcdf050c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 16 Oct 2023 19:46:36 +0200 Subject: [PATCH 100/167] fixed the elbo issu --- pyPLNmodels/_initialization.py | 2 +- pyPLNmodels/_utils.py | 13 +++++++++++++ pyPLNmodels/elbos.py | 12 ++---------- pyPLNmodels/models.py | 6 +++--- tests/conftest.py | 1 - 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index ac4472a6..410283bf 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -65,7 +65,7 @@ def _init_components( log_y = torch.log(endog + (endog == 0) * math.exp(-2)) pca = PCA(n_components=rank) pca.fit(log_y.detach().cpu()) - pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_ + 0.001) + pca_comp = pca.components_.T * np.sqrt(pca.explained_variance_) return torch.from_numpy(pca_comp).to(DEVICE) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index f5f02942..802e5f2e 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -1080,3 +1080,16 @@ def d_h_x3(a, x, y, dirac): rho = torch.sigmoid(a - torch.log(phi(x, y))) * dirac rho_prime = rho * (1 - rho) return -rho_prime * d_varpsi_x2(x, y) / phi(x, y) + +def vec_to_mat(C, p, q): + c = torch.zeros(p, q) + c[torch.tril_indices(p, q, offset=0).tolist()] = C + # c = C.reshape(p,q) + return c + + +def mat_to_vec(matc, p, q): + tril = torch.tril(matc) + # tril = matc.reshape(-1,1).squeeze() + return tril[torch.tril_indices(p, q, offset=0).tolist()] + diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 3862de9f..585b423d 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -266,19 +266,11 @@ def elbo_zi_pln( new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new inside_f =-1 / 2 * torch.diag(Omega) * K - print("inside_a",torch.sum(inside_a)) - print("inside_b",torch.sum(inside_b)) - print("inside_c",torch.sum(inside_c)) - print("inside_d",torch.sum(inside_d)) - print("inside_e",torch.sum(inside_e)) - print("inside_f",torch.sum(inside_f)) first = torch.sum(inside_a + inside_c + inside_e) - print('first', first) second = torch.sum(inside_b) - second -= n_samples * torch.logdet(components) - print('logdet', torch.logdet(components)) + _, logdet = torch.slogdet(components) + second -= n_samples *logdet third = torch.sum(inside_d + inside_f) third += n_samples*dim/2 - print('third', third) res = first + second + third return res diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 49298b4f..db723dba 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -32,6 +32,8 @@ from ._utils import ( _array2tensor, _handle_data, _add_doc, + vec_to_mat, + mat_to_vec, ) from ._initialization import ( @@ -3596,9 +3598,7 @@ class ZIPln(_model): super()._smart_init_coef() if not hasattr(self, "_covariance"): self._components = _init_components(self._endog, self.dim) - print('sum components', torch.sum(self._components)) - print('sum endog', torch.sum(self._endog)) - print('log det ', torch.logdet(self._components)) + if not hasattr(self, "_coef_inflation"): self._coef_inflation = torch.randn(self.nb_cov, self.dim) diff --git a/tests/conftest.py b/tests/conftest.py index e40558dd..85ca2aaf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,7 +37,6 @@ endog_real = data_real["endog"] endog_real = pd.DataFrame(endog_real) endog_real.columns = [f"var_{i}" for i in range(endog_real.shape[1])] - def add_fixture_to_dict(my_dict, string_fixture): my_dict[string_fixture] = [lf(string_fixture)] return my_dict -- GitLab From 41acf9e6788100bff4fa22f806aedb66492f5f4d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 17 Oct 2023 11:07:58 +0200 Subject: [PATCH 101/167] add tests for the zi model and implement some features to pass the tests for the zi --- pyPLNmodels/_utils.py | 2 +- pyPLNmodels/elbos.py | 7 +- pyPLNmodels/models.py | 144 +++++++++++++++++++++++++++++++++++++++--- tests/conftest.py | 6 ++ tests/test_common.py | 5 +- tests/test_zi.py | 73 +++++++++++++++++++++ 6 files changed, 222 insertions(+), 15 deletions(-) create mode 100644 tests/test_zi.py diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 802e5f2e..35b02806 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -1081,6 +1081,7 @@ def d_h_x3(a, x, y, dirac): rho_prime = rho * (1 - rho) return -rho_prime * d_varpsi_x2(x, y) / phi(x, y) + def vec_to_mat(C, p, q): c = torch.zeros(p, q) c[torch.tril_indices(p, q, offset=0).tolist()] = C @@ -1092,4 +1093,3 @@ def mat_to_vec(matc, p, q): tril = torch.tril(matc) # tril = matc.reshape(-1,1).squeeze() return tril[torch.tril_indices(p, q, offset=0).tolist()] - diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 585b423d..5a56bc3d 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -247,7 +247,6 @@ def elbo_zi_pln( 1 + torch.exp(x_coef_inflation) ) - log_diag = torch.log(torch.diag(covariance)) log_S_term = torch.sum( torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 @@ -265,12 +264,12 @@ def elbo_zi_pln( ) new = torch.sum(latent_prob * un_moins_rho * (m_minus_xb**2), axis=0) K = sum_un_moins_rho_s2 + diag_sig_sum_rho + new - inside_f =-1 / 2 * torch.diag(Omega) * K + inside_f = -1 / 2 * torch.diag(Omega) * K first = torch.sum(inside_a + inside_c + inside_e) second = torch.sum(inside_b) _, logdet = torch.slogdet(components) - second -= n_samples *logdet + second -= n_samples * logdet third = torch.sum(inside_d + inside_f) - third += n_samples*dim/2 + third += n_samples * dim / 2 res = first + second + third return res diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index db723dba..81d732b2 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -668,7 +668,6 @@ class _model(ABC): ) plt.show() - @property def _latent_var(self) -> torch.Tensor: """ @@ -1915,8 +1914,8 @@ class Pln(_model): return self.latent_mean.detach() @_add_doc( - _model, - example=""" + _model, + example=""" >>> from pyPLNmodels import Pln, get_real_count_data >>> endog, labels = get_real_count_data(return_labels = True) >>> pln = Pln(endog,add_const = True) @@ -1924,8 +1923,8 @@ class Pln(_model): >>> elbo = pln.compute_elbo() >>> print("elbo", elbo) >>> print("loglike/n", pln.loglike/pln.n_samples) - """ - ) + """, + ) def compute_elbo(self): return profiled_elbo_pln( self._endog, @@ -1934,6 +1933,7 @@ class Pln(_model): self._latent_mean, self._latent_sqrt_var, ) + @_add_doc(_model) def _compute_elbo_b(self): return profiled_elbo_pln( @@ -1943,6 +1943,7 @@ class Pln(_model): self._latent_mean_b, self._latent_sqrt_var_b, ) + @_add_doc(_model) def _smart_init_model_parameters(self): pass @@ -1952,6 +1953,7 @@ class Pln(_model): def _random_init_model_parameters(self): pass # no model parameters since we are doing a profiled ELBO + @_add_doc(_model) def _smart_init_latent_parameters(self): self._random_init_latent_sqrt_var() @@ -1969,6 +1971,7 @@ class Pln(_model): def _list_of_parameters_needing_gradient(self): return [self._latent_mean, self._latent_sqrt_var] + class PlnPCAcollection: """ A collection where value q corresponds to a PlnPCA object with rank q. @@ -3451,6 +3454,7 @@ class ZIPln(_model): >>> zi.fit() >>> print(zi) """ + self._use_closed_form_prob = use_closed_form_prob if exog is None and add_const is False: msg = "No covariates has been given. An intercept is added since " msg += "a ZIPln must have at least an intercept." @@ -3465,7 +3469,6 @@ class ZIPln(_model): take_log_offsets=take_log_offsets, add_const=add_const, ) - self._use_closed_form_prob = use_closed_form_prob def _extract_batch(self, batch): super()._extract_batch(batch) @@ -3490,7 +3493,7 @@ class ZIPln(_model): offsets_formula: str = "logsum", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, - use_closed_form_prob: bool = True, + use_closed_form_prob: bool = False, ): """ Create a ZIPln instance from a formula and data. @@ -3590,7 +3593,7 @@ class ZIPln(_model): def _random_init_model_parameters(self): self._coef_inflation = torch.randn(self.nb_cov, self.dim) self._coef = torch.randn(self.nb_cov, self.dim) - self._components = torch.randn(self.nb_cov, self.dim) + self._components = torch.randn(self.dim, self.dim) # should change the good initialization for _coef_inflation def _smart_init_model_parameters(self): @@ -3656,7 +3659,7 @@ class ZIPln(_model): @property def coef_inflation(self): """ - Property representing the coefficients of the zero inflated model. + Property representing the coefficients of the inflation. Returns ------- @@ -3665,6 +3668,54 @@ class ZIPln(_model): """ return self._cpu_attribute_or_none("_coef_inflation") + @coef_inflation.setter + @_array2tensor + def coef_inflation( + self, coef_inflation: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): + """ + Setter for the coef_inflation property. + + Parameters + ---------- + coef : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The coefficients. + + Raises + ------ + ValueError + If the shape of the coef is incorrect. + """ + if coef_inflation.shape != (self.nb_cov, self.dim): + raise ValueError( + f"Wrong shape for the coef. Expected {(self.nb_cov, self.dim)}, got {coef_inflation.shape}" + ) + self._coef_inflation = coef_inflation + + @_model.latent_sqrt_var.setter + @_array2tensor + def latent_sqrt_var( + self, latent_sqrt_var: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): + """ + Setter for the latent variance property. + + Parameters + ---------- + latent_sqrt_var : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The latent square root of the variance. + + Raises + ------ + ValueError + If the shape of the latent variance is incorrect. + """ + if latent_sqrt_var.shape != (self.n_samples, self.dim): + raise ValueError( + f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_sqrt_var.shape}" + ) + self._latent_sqrt_var = latent_sqrt_var + def _update_parameters(self): super()._update_parameters() self._project_latent_prob() @@ -3695,10 +3746,51 @@ class ZIPln(_model): """ return self._cpu_attribute_or_none("_covariance") + @components.setter + @_array2tensor + def components(self, components: torch.Tensor): + """ + Setter for the components. + + Parameters + ---------- + components : torch.Tensor + The components to set. + + Raises + ------ + ValueError + If the components have an invalid shape. + """ + if components.shape != (self.dim, self.dim): + raise ValueError( + f"Wrong shape. Expected {self.dim, self.dim}, got {components.shape}" + ) + self._components = components + @property def latent_prob(self): return self._cpu_attribute_or_none("_latent_prob") + @latent_prob.setter + @_array2tensor + def latent_prob(self, latent_prob: Union[torch.Tensor, np.ndarray, pd.DataFrame]): + if self._use_closed_form_prob is True: + raise ValueError( + "Can not set the latent prob when the closed form is used." + ) + if latent_prob.shape != (self.n_samples, self.dim): + raise ValueError( + f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_prob.shape}" + ) + if torch.max(latent_prob) > 1 or torch.min(latent_prob) < 0: + raise ValueError(f"Wrong value. All values should be between 0 and 1.") + if torch.norm(latent_prob * (self._endog == 0) - latent_prob) > 0.00000001: + raise ValueError( + "You can not assign non zeros inflation probabilities to non zero counts." + ) + self._latent_prob = latent_prob + @property def closed_formula_latent_prob(self): """ @@ -3781,6 +3873,40 @@ class ZIPln(_model): "coef_inflation": self.coef_inflation, } + def predict_prob_inflation( + self, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame] + ): + """ + Method for estimating the probability of a zero coming from the zero inflated component. + + Parameters + ---------- + exog : Union[torch.Tensor, np.ndarray, pd.DataFrame] + The exog. + + Returns + ------- + torch.Tensor + The predicted values. + + Raises + ------ + RuntimeError + If the shape of the exog is incorrect. + + Notes + ----- + - The mean sigmoid(exog @ coef_inflation) is returned. + - `exog` should have the shape `(_, nb_cov)`, where `nb_cov` is the number of exog variables. + """ + if exog is not None and self.nb_cov == 0: + raise AttributeError("No exog in the model, can't predict") + if exog.shape[-1] != self.nb_cov: + error_string = f"X has wrong shape ({exog.shape}). Should" + error_string += f" be (_, {self.nb_cov})." + raise RuntimeError(error_string) + return torch.sigmoid(exog @ self.coef_inflation) + @property @_add_doc(_model) def latent_parameters(self): diff --git a/tests/conftest.py b/tests/conftest.py index 85ca2aaf..93e50ab5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,6 +37,7 @@ endog_real = data_real["endog"] endog_real = pd.DataFrame(endog_real) endog_real.columns = [f"var_{i}" for i in range(endog_real.shape[1])] + def add_fixture_to_dict(my_dict, string_fixture): my_dict[string_fixture] = [lf(string_fixture)] return my_dict @@ -219,6 +220,11 @@ dict_fixtures = add_list_of_fixture_to_dict( dict_fixtures, "sim_model_0cov", sim_model_0cov ) +sim_model_0cov_fitted_and_loaded = sim_model_0cov_fitted + sim_model_0cov_loaded +dict_fixtures = add_list_of_fixture_to_dict( + dict_fixtures, "sim_model_0cov_fitted_and_loaded", sim_model_0cov_fitted_and_loaded +) + @pytest.fixture(params=params) @cache diff --git a/tests/test_common.py b/tests/test_common.py index cec97a72..0aa81d54 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -85,8 +85,11 @@ def test_find_right_coef(simulated_fitted_any_model): @filter_models(single_models) def test_fail_count_setter(model): wrong_endog = torch.randint(size=(10, 5), low=0, high=10) - with pytest.raises(Exception): + negative_endog = -model._endog + with pytest.raises(ValueError): model.endog = wrong_endog + with pytest.raises(ValueError): + model.endog = negative_endog @pytest.mark.parametrize("instance", dict_fixtures["instances"]) diff --git a/tests/test_zi.py b/tests/test_zi.py new file mode 100644 index 00000000..4ba5af04 --- /dev/null +++ b/tests/test_zi.py @@ -0,0 +1,73 @@ +import pytest +import torch + +from pyPLNmodels import get_simulation_parameters, sample_pln, ZIPln +from tests.conftest import dict_fixtures +from tests.utils import filter_models, MSE + + +@pytest.mark.parametrize("zi", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_properties(zi): + assert hasattr(zi, "latent_prob") + assert hasattr(zi, "coef_inflation") + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_predict(model): + X = torch.randn((model.n_samples, model.nb_cov)) + prediction = model.predict(X) + expected = X @ model.coef + assert torch.all(torch.eq(expected, prediction)) + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_predict_prob(model): + X = torch.randn((model.n_samples, model.nb_cov)) + prediction = model.predict_prob_inflation(X) + expected = torch.sigmoid(X @ model.coef_inflation) + assert torch.all(torch.eq(expected, prediction)) + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_fail_predict_prob(model): + X1 = torch.randn((model.n_samples, model.nb_cov + 1)) + X2 = torch.randn((model.n_samples, model.nb_cov - 1)) + with pytest.raises(RuntimeError): + model.predict_prob_inflation(X1) + with pytest.raises(RuntimeError): + model.predict_prob_inflation(X2) + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_fail_predict(model): + X1 = torch.randn((model.n_samples, model.nb_cov + 1)) + X2 = torch.randn((model.n_samples, model.nb_cov - 1)) + with pytest.raises(RuntimeError): + model.predict(X1) + with pytest.raises(RuntimeError): + model.predict(X2) + + +@pytest.mark.parametrize("model", dict_fixtures["sim_model_0cov_fitted_and_loaded"]) +@filter_models(["ZIPln"]) +def test_no_exog_not_possible(model): + assert model.nb_cov == 1 + assert model._coef_inflation.shape[0] == 1 + + +def test_find_right_covariance_and_coef(): + pln_param = get_simulation_parameters( + n_samples=300, dim=50, nb_cov=2, rank=5, add_const=True + ) + pln_param._coef += 5 + endog = sample_pln(pln_param, seed=0, return_latent=False) + zi = ZIPln(endog, exog=pln_param.exog, offsets=pln_param.offsets) + zi.fit() + mse_covariance = MSE(zi.covariance - pln_param.covariance) + mse_coef = MSE(zi.coef) + assert mse_covariance < 0.5 -- GitLab From 8db7068fb10946c76ce7f4259c442d0c37a4e388 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 17 Oct 2023 19:14:29 +0200 Subject: [PATCH 102/167] write the tests for the zi, the batch size and rewrite the tests for the collection. Need to pass the tests on the getting started. --- .gitignore | 1 + pyPLNmodels/_utils.py | 76 +++++++++-- pyPLNmodels/models.py | 142 +++++++++++--------- tests/conftest.py | 10 +- tests/test_common.py | 16 ++- tests/test_pln_full.py | 2 +- tests/test_plnpcacollection.py | 47 +++++-- tests/test_setters.py | 231 ++++++++++++++++++--------------- tests/test_viz.py | 108 +++++++-------- tests/test_zi.py | 72 ++++++++-- 10 files changed, 452 insertions(+), 253 deletions(-) diff --git a/.gitignore b/.gitignore index a95ada79..c00f1395 100644 --- a/.gitignore +++ b/.gitignore @@ -159,3 +159,4 @@ paper/* tests/test_models* tests/test_load* tests/test_readme* +Getting_started.py diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 35b02806..1082259e 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -494,8 +494,12 @@ def _get_simulation_components(dim: int, rank: int) -> torch.Tensor: return components.to("cpu") -def _get_simulation_coef_cov_offsets( - n_samples: int, nb_cov: int, dim: int, add_const: bool +def _get_simulation_coef_cov_offsets_coefzi( + n_samples: int, + nb_cov: int, + dim: int, + add_const: bool, + zero_inflated: bool, ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """ Get offsets, covariance coefficients with right shapes. @@ -513,6 +517,8 @@ def _get_simulation_coef_cov_offsets( Dimension required of the data. add_const : bool, optional If True, will add a vector of ones in the exog. + zero_inflated : bool + If True, will return a zero_inflated coefficient. Returns ------- @@ -537,14 +543,23 @@ def _get_simulation_coef_cov_offsets( if add_const is True: exog = torch.cat((exog, torch.ones(n_samples, 1)), axis=1) if exog is None: + if zero_inflated is True: + msg = "Can not instantiate a zero inflate model without covariates." + msg += " Please give at least an intercept by setting add_const to True" + raise ValueError(msg) coef = None + coef_inflation = None else: coef = torch.randn(exog.shape[1], dim, device="cpu") + if zero_inflated is True: + coef_inflation = torch.randn(exog.shape[1], dim, device="cpu") + else: + coef_inflation = None offsets = torch.randint( low=0, high=2, size=(n_samples, dim), dtype=torch.float64, device="cpu" ) torch.random.set_rng_state(prev_state) - return coef, exog, offsets + return coef, exog, offsets, coef_inflation class PlnParameters: @@ -555,7 +570,7 @@ class PlnParameters: coef: Union[torch.Tensor, np.ndarray, pd.DataFrame], exog: Union[torch.Tensor, np.ndarray, pd.DataFrame], offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame], - coef_inflation=None, + coef_inflation: Union[torch.Tensor, np.ndarray, pd.DataFrame, None] = None, ): """ Instantiate all the needed parameters to sample from the PLN model. @@ -570,9 +585,8 @@ class PlnParameters: Covariates, size (n, d) or None offsets : : Union[torch.Tensor, np.ndarray, pd.DataFrame](keyword-only) Offset, size (n, p) - _coef_inflation : : Union[torch.Tensor, np.ndarray, pd.DataFrame] or None, optional(keyword-only) + coef_inflation : Union[torch.Tensor, np.ndarray, pd.DataFrame, None], optional(keyword-only) Coefficient for zero-inflation model, size (d, p) or None. Default is None. - """ self._components = _format_data(components) self._coef = _format_data(coef) @@ -713,6 +727,7 @@ def get_simulation_parameters( nb_cov: int = 1, rank: int = 5, add_const: bool = True, + zero_inflated: bool = False, ) -> PlnParameters: """ Generate simulation parameters for a Poisson-lognormal model. @@ -731,18 +746,26 @@ def get_simulation_parameters( The rank of the data components, by default 5. add_const : bool, optional(keyword-only) If True, will add a vector of ones in the exog. + zero_inflated : bool, optional(keyword-only) + If True, the model will be zero inflated. + Default is False. Returns ------- PlnParameters The generated simulation parameters. - """ - coef, exog, offsets = _get_simulation_coef_cov_offsets( - n_samples, nb_cov, dim, add_const + coef, exog, offsets, coef_inflation = _get_simulation_coef_cov_offsets_coefzi( + n_samples, nb_cov, dim, add_const, zero_inflated ) components = _get_simulation_components(dim, rank) - return PlnParameters(components=components, coef=coef, exog=exog, offsets=offsets) + return PlnParameters( + components=components, + coef=coef, + exog=exog, + offsets=offsets, + coef_inflation=coef_inflation, + ) def get_simulated_count_data( @@ -753,6 +776,7 @@ def get_simulated_count_data( nb_cov: int = 1, return_true_param: bool = False, add_const: bool = True, + zero_inflated=False, seed: int = 0, ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: """ @@ -772,19 +796,45 @@ def get_simulated_count_data( Number of exog, by default 1. return_true_param : bool, optional(keyword-only) Whether to return the true parameters of the model, by default False. + zero_inflated: bool, optional(keyword-only) + Whether to use a zero inflated model or not. + Default to False. seed : int, optional(keyword-only) Seed value for random number generation, by default 0. Returns ------- - Tuple[torch.Tensor, torch.Tensor, torch.Tensor] - Tuple containing endog, exog, and offsets. + if return_true_param is False: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor] + Tuple containing endog, exog, and offsets. + else: + if zero_inflated is True: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor] + Tuple containing endog, exog, offsets, covariance, coef, coef_inflation . + else: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor] + Tuple containing endog, exog, offsets, covariance, coef. + """ pln_param = get_simulation_parameters( - n_samples=n_samples, dim=dim, nb_cov=nb_cov, rank=rank, add_const=add_const + n_samples=n_samples, + dim=dim, + nb_cov=nb_cov, + rank=rank, + add_const=add_const, + zero_inflated=zero_inflated, ) endog = sample_pln(pln_param, seed=seed, return_latent=False) if return_true_param is True: + if zero_inflated is True: + return ( + endog, + pln_param.exog, + pln_param.offsets, + pln_param.covariance, + pln_param.coef, + pln_param.coef_inflation, + ) return ( endog, pln_param.exog, diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 81d732b2..01f9353b 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -99,10 +99,6 @@ class _model(ABC): Whether to take the log of offsets. Defaults to False. add_const: bool, optional(keyword-only) Whether to add a column of one in the exog. Defaults to True. - Raises - ------ - ValueError - If the batch_size is greater than the number of samples, or not int. """ ( self._endog, @@ -116,6 +112,7 @@ class _model(ABC): self._criterion_args = _CriterionArgs() if dict_initialization is not None: self._set_init_parameters(dict_initialization) + self._dirac = self._endog == 0 @classmethod def from_formula( @@ -253,7 +250,10 @@ class _model(ABC): def _handle_batch_size(self, batch_size): if batch_size is None: - batch_size = self.n_samples + if hasattr(self, "batch_size"): + batch_size = self.batch_size + else: + batch_size = self.n_samples if batch_size > self.n_samples: raise ValueError( f"batch_size ({batch_size}) can not be greater than the number of samples ({self.n_samples})" @@ -385,6 +385,10 @@ class _model(ABC): batch_size: int, optional(keyword-only) The batch size when optimizing the elbo. If None, batch gradient descent will be performed (i.e. batch_size = n_samples). + Raises + ------ + ValueError + If the batch_size is greater than the number of samples, or not int. """ self._print_beginning_message() self._beginning_time = time.time() @@ -531,7 +535,7 @@ class _model(ABC): def sk_PCA(self, n_components=None): """ - Perform PCA on the latent variables. + Perform the scikit-learn PCA on the latent variables. Parameters ---------- @@ -553,8 +557,9 @@ class _model(ABC): raise ValueError( f"You ask more components ({n_components}) than variables ({self.dim})" ) + latent_variables = self.transform() pca = PCA(n_components=n_components) - pca.fit(self.latent_variables.cpu()) + pca.fit(latent_variables.cpu()) return pca @property @@ -595,9 +600,9 @@ class _model(ABC): f"You ask more components ({n_components}) than variables ({self.dim})" ) pca = self.sk_PCA(n_components=n_components) - proj_variables = pca.transform(self.latent_variables) + latent_variables = self.transform() + proj_variables = pca.transform(latent_variables) components = torch.from_numpy(pca.components_) - labels = { str(i): f"PC{i+1}: {np.round(pca.explained_variance_ratio_*100, 1)[i]}%" for i in range(n_components) @@ -655,7 +660,7 @@ class _model(ABC): n_components = 2 pca = self.sk_PCA(n_components=n_components) - variables = self.latent_variables + variables = self.transform() proj_variables = pca.transform(variables) ## the package is not correctly printing the variance ratio figure, correlation_matrix = plot_pca_correlation_graph( @@ -1808,51 +1813,6 @@ class Pln(_model): 1 / 2 * torch.ones((self.n_samples, self.dim)).to(DEVICE) ) - @property - @_add_doc( - _model, - example=""" - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> print(pln.latent_variables.shape) - """, - ) - def latent_variables(self): - return self.latent_mean.detach() - - @_add_doc( - _model, - example=""" - >>> from pyPLNmodels import Pln, get_real_count_data - >>> endog, labels = get_real_count_data(return_labels = True) - >>> pln = Pln(endog,add_const = True) - >>> pln.fit() - >>> elbo = pln.compute_elbo() - >>> print("elbo", elbo) - >>> print("loglike/n", pln.loglike/pln.n_samples) - """, - ) - def compute_elbo(self): - return profiled_elbo_pln( - self._endog, - self._exog, - self._offsets, - self._latent_mean, - self._latent_sqrt_var, - ) - - @_add_doc(_model) - def _compute_elbo_b(self): - return profiled_elbo_pln( - self._endog_b, - self._exog_b, - self._offsets_b, - self._latent_mean_b, - self._latent_sqrt_var_b, - ) - @_add_doc(_model) def _smart_init_model_parameters(self): pass @@ -2057,7 +2017,7 @@ class PlnPCAcollection: endog, exog, offsets, offsets_formula, take_log_offsets, add_const ) self._fitted = False - self._init_models(ranks, dict_of_dict_initialization) + self._init_models(ranks, dict_of_dict_initialization, add_const=add_const) @classmethod def from_formula( @@ -2129,6 +2089,18 @@ class PlnPCAcollection: """ return self[self.ranks[0]].exog + @property + def batch_size(self) -> torch.Tensor: + """ + Property representing the batch_size. + + Returns + ------- + torch.Tensor + The batch_size. + """ + return self[self.ranks[0]].batch_size + @property def endog(self) -> torch.Tensor: """ @@ -2203,6 +2175,19 @@ class PlnPCAcollection: for model in self.values(): model.endog = endog + @batch_size.setter + def batch_size(self, batch_size: int): + """ + Setter for the batch_size property. + + Parameters + ---------- + batch_size : int + The batch size. + """ + for model in self.values(): + model.batch_size = batch_size + @coef.setter @_array2tensor def coef(self, coef: Union[torch.Tensor, np.ndarray, pd.DataFrame]): @@ -2258,7 +2243,10 @@ class PlnPCAcollection: model.offsets = offsets def _init_models( - self, ranks: Iterable[int], dict_of_dict_initialization: Optional[dict] + self, + ranks: Iterable[int], + dict_of_dict_initialization: Optional[dict], + add_const: bool, ): """ Method for initializing the models. @@ -2282,6 +2270,7 @@ class PlnPCAcollection: offsets=self._offsets, rank=rank, dict_initialization=dict_initialization, + add_const=add_const, ) else: raise TypeError( @@ -2389,6 +2378,10 @@ class PlnPCAcollection: batch_size: int, optional(keyword-only) The batch size when optimizing the elbo. If None, batch gradient descent will be performed (i.e. batch_size = n_samples). + Raises + ------ + ValueError + If the batch_size is greater than the number of samples, or not int. """ self._print_beginning_message() for i in range(len(self.values())): @@ -3606,7 +3599,6 @@ class ZIPln(_model): self._coef_inflation = torch.randn(self.nb_cov, self.dim) def _random_init_latent_parameters(self): - self._dirac = self._endog == 0 self._latent_mean = torch.randn(self.n_samples, self.dim) self._latent_sqrt_var = torch.randn(self.n_samples, self.dim) self._latent_prob = ( @@ -3621,6 +3613,17 @@ class ZIPln(_model): def _covariance(self): return self._components @ (self._components.T) + def _get_max_components(self): + """ + Method for getting the maximum number of components. + + Returns + ------- + int + The maximum number of components. + """ + return self.dim + @property def components(self) -> torch.Tensor: """ @@ -3656,6 +3659,27 @@ class ZIPln(_model): """ return self.latent_mean, self.latent_prob + def transform(self, return_latent_prob=False): + """ + Method for transforming the endog. Can be seen as a normalization of the endog. + + Parameters + ---------- + return_latent_prob: bool, optional + Wheter to return or not the latent_probability of zero inflation. + Returns + ------- + The latent mean if `return_latent_prob` is False and (latent_mean, latent_prob) else. + """ + if return_latent_prob is True: + return self.latent_variables + return self.latent_mean + + def _endog_predictions(self): + return torch.exp( + self._offsets + self._latent_mean + 1 / 2 * self._latent_sqrt_var**2 + ) * (1 - self._latent_prob) + @property def coef_inflation(self): """ @@ -3730,7 +3754,7 @@ class ZIPln(_model): self._latent_prob_b, torch.tensor([0]), out=self._latent_prob_b ) self._latent_prob_b = torch.minimum( - self._latent_prob, torch.tensor([1]), out=self._latent_prob_b + self._latent_prob_b, torch.tensor([1]), out=self._latent_prob_b ) self._latent_prob_b *= self._dirac_b diff --git a/tests/conftest.py b/tests/conftest.py index 93e50ab5..22ea4307 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ import sys import glob -from functools import singledispatch import pytest import torch from pytest_lazyfixture import lazy_fixture as lf @@ -8,6 +7,7 @@ import pandas as pd from pyPLNmodels import load_model, load_plnpcacollection from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln +from pyPLNmodels import get_simulated_count_data sys.path.append("../") @@ -206,6 +206,7 @@ dict_fixtures = add_list_of_fixture_to_dict( dict_fixtures, "sim_model_0cov_fitted", sim_model_0cov_fitted ) + sim_model_0cov_loaded = [ "simulated_loaded_model_0cov_array", "simulated_loaded_model_0cov_formula", @@ -285,12 +286,17 @@ sim_model_2cov_instance = [ "simulated_model_2cov_array", "simulated_model_2cov_formula", ] +sim_model_instance = sim_model_0cov_instance + sim_model_2cov_instance + +dict_fixtures = add_list_of_fixture_to_dict( + dict_fixtures, "sim_model_instance", sim_model_instance +) instances = sim_model_2cov_instance + instances + dict_fixtures = add_list_of_fixture_to_dict( dict_fixtures, "sim_model_2cov_instance", sim_model_2cov_instance ) - sim_model_2cov_fitted = [ "simulated_fitted_model_2cov_array", "simulated_fitted_model_2cov_formula", diff --git a/tests/test_common.py b/tests/test_common.py index 0aa81d54..6cba2cf6 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -8,8 +8,8 @@ from tests.utils import MSE, filter_models from tests.import_data import true_sim_0cov, true_sim_2cov, endog_real -single_models = ["Pln", "PlnPCA", "ZIPln"] pln_and_plnpca = ["Pln", "PlnPCA"] +single_models = ["Pln", "PlnPCA", "ZIPln"] @pytest.mark.parametrize("any_model", dict_fixtures["loaded_and_fitted_model"]) @@ -108,3 +108,17 @@ def test_fail_wrong_exog_prediction(model): X = torch.randn(model.n_samples, model.nb_cov + 1) with pytest.raises(Exception): model.predict(X) + + +@pytest.mark.parametrize("model", dict_fixtures["sim_model_instance"]) +@filter_models(pln_and_plnpca) +def test_batch(model): + model.fit(batch_size=20) + print(model) + model.show() + if model.nb_cov == 2: + true_coef = true_sim_2cov["beta"] + mse_coef = MSE(model.coef - true_coef) + assert mse_coef < 0.1 + elif model.nb_cov == 0: + assert model.coef is None diff --git a/tests/test_pln_full.py b/tests/test_pln_full.py index 1115e1ec..6a8ced3a 100644 --- a/tests/test_pln_full.py +++ b/tests/test_pln_full.py @@ -13,5 +13,5 @@ def test_number_of_iterations_pln_full(fitted_pln): @pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["Pln"]) -def test_latent_var_full(pln): +def test_latent_variables(pln): assert pln.transform().shape == pln.endog.shape diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index 6634f2d2..19b49b18 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -6,16 +6,17 @@ import numpy as np from tests.conftest import dict_fixtures from tests.utils import MSE, filter_models +from tests.import_data import true_sim_0cov, true_sim_2cov -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_best_model(plnpca): best_model = plnpca.best_model() print(best_model) -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_projected_variables(plnpca): best_model = plnpca.best_model() @@ -23,21 +24,20 @@ def test_projected_variables(plnpca): assert plv.shape[0] == best_model.n_samples and plv.shape[1] == best_model.rank -@pytest.mark.parametrize("fitted_pln", dict_fixtures["fitted_pln"]) -@filter_models(["PlnPCA"]) -def test_number_of_iterations_plnpca(fitted_pln): - nb_iterations = len(fitted_pln._elbos_list) - assert 100 < nb_iterations < 5000 +@pytest.mark.parametrize("plnpca", dict_fixtures["sim_model_instance"]) +@filter_models(["PlnPCAcollection"]) +def test_right_nbcov(plnpca): + assert plnpca.nb_cov == 0 or plnpca.nb_cov == 2 -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCA"]) def test_latent_var_pca(plnpca): assert plnpca.transform(project=False).shape == plnpca.endog.shape assert plnpca.transform().shape == (plnpca.n_samples, plnpca.rank) -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_additional_methods_pca(plnpca): plnpca.show() @@ -46,14 +46,14 @@ def test_additional_methods_pca(plnpca): plnpca.loglikes -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_wrong_criterion(plnpca): with pytest.raises(ValueError): plnpca.best_model("AIK") -@pytest.mark.parametrize("collection", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("collection", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_item(collection): print(collection[collection.ranks[0]]) @@ -62,3 +62,28 @@ def test_item(collection): assert collection.ranks[0] in collection assert collection.ranks[0] in list(collection.keys()) collection.get(collection.ranks[0], None) + + +@pytest.mark.parametrize("collection", dict_fixtures["sim_model_instance"]) +@filter_models(["PlnPCAcollection"]) +def test_batch(collection): + collection.fit(batch_size=20) + assert collection.nb_cov == 0 or collection.nb_cov == 2 + if collection.nb_cov == 0: + true_covariance = true_sim_0cov["Sigma"] + for model in collection.values(): + assert model.coef is None + true_coef = None + elif collection.nb_cov == 2: + true_covariance = true_sim_2cov["Sigma"] + true_coef = true_sim_2cov["beta"] + else: + raise ValueError(f"Not the right numbers of covariance({collection.nb_cov})") + for model in collection.values(): + mse_covariance = MSE(model.covariance - true_covariance) + if true_coef is not None: + mse_coef = MSE(model.coef - true_coef) + assert mse_coef < 0.35 + assert mse_covariance < 0.25 + collection.fit() + assert collection.batch_size == collection.n_samples diff --git a/tests/test_setters.py b/tests/test_setters.py index eb7814d7..b3012548 100644 --- a/tests/test_setters.py +++ b/tests/test_setters.py @@ -5,148 +5,169 @@ import torch from tests.conftest import dict_fixtures from tests.utils import MSE, filter_models - -@pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) -def test_data_setter_with_torch(pln): - pln.endog = pln.endog - pln.exog = pln.exog - pln.offsets = pln.offsets - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_parameters_setter_with_torch(pln): - pln.latent_mean = pln.latent_mean - pln.latent_sqrt_var = pln.latent_sqrt_var - if pln._NAME != "Pln": - pln.coef = pln.coef - if pln._NAME == "PlnPCA": - pln.components = pln.components - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) -def test_data_setter_with_numpy(pln): - np_endog = pln.endog.numpy() - if pln.exog is not None: - np_exog = pln.exog.numpy() +single_models = ["Pln", "PlnPCA", "ZIPln"] + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_data_setter_with_torch(model): + model.endog = model.endog + model.exog = model.exog + model.offsets = model.offsets + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_parameters_setter_with_torch(model): + model.latent_mean = model.latent_mean + model.latent_sqrt_var = model.latent_sqrt_var + if model._NAME != "Pln": + model.coef = model.coef + if model._NAME == "PlnPCA" or model._NAME == "ZIPln": + model.components = model.components + if model._NAME == "ZIPln": + model.coef_inflation = model.coef_inflation + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_data_setter_with_numpy(model): + np_endog = model.endog.numpy() + if model.exog is not None: + np_exog = model.exog.numpy() else: np_exog = None - np_offsets = pln.offsets.numpy() - pln.endog = np_endog - pln.exog = np_exog - pln.offsets = np_offsets - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_parameters_setter_with_numpy(pln): - np_latent_mean = pln.latent_mean.numpy() - np_latent_sqrt_var = pln.latent_sqrt_var.numpy() - if pln.coef is not None: - np_coef = pln.coef.numpy() + np_offsets = model.offsets.numpy() + model.endog = np_endog + model.exog = np_exog + model.offsets = np_offsets + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_parameters_setter_with_numpy(model): + np_latent_mean = model.latent_mean.numpy() + np_latent_sqrt_var = model.latent_sqrt_var.numpy() + if model.coef is not None: + np_coef = model.coef.numpy() else: np_coef = None - pln.latent_mean = np_latent_mean - pln.latent_sqrt_var = np_latent_sqrt_var - if pln._NAME != "Pln": - pln.coef = np_coef - if pln._NAME == "PlnPCA": - pln.components = pln.components.numpy() - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) -def test_data_setter_with_pandas(pln): - pd_endog = pd.DataFrame(pln.endog.numpy()) - if pln.exog is not None: - pd_exog = pd.DataFrame(pln.exog.numpy()) + model.latent_mean = np_latent_mean + model.latent_sqrt_var = np_latent_sqrt_var + if model._NAME != "Pln": + model.coef = np_coef + if model._NAME == "PlnPCA" or model._NAME == "ZIPln": + model.components = model.components.numpy() + if model._NAME == "ZIPln": + model.coef_inflation = model.coef_inflation.numpy() + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_batch_size_setter(model): + model.batch_size = 20 + model.fit(nb_max_iteration=3) + assert model.batch_size == 20 + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_fail_batch_size_setter(model): + with pytest.raises(ValueError): + model.batch_size = model.n_samples + 1 + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_data_setter_with_pandas(model): + pd_endog = pd.DataFrame(model.endog.numpy()) + if model.exog is not None: + pd_exog = pd.DataFrame(model.exog.numpy()) else: pd_exog = None - pd_offsets = pd.DataFrame(pln.offsets.numpy()) - pln.endog = pd_endog - pln.exog = pd_exog - pln.offsets = pd_offsets - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_parameters_setter_with_pandas(pln): - pd_latent_mean = pd.DataFrame(pln.latent_mean.numpy()) - pd_latent_sqrt_var = pd.DataFrame(pln.latent_sqrt_var.numpy()) - if pln.coef is not None: - pd_coef = pd.DataFrame(pln.coef.numpy()) + pd_offsets = pd.DataFrame(model.offsets.numpy()) + model.endog = pd_endog + model.exog = pd_exog + model.offsets = pd_offsets + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_parameters_setter_with_pandas(model): + pd_latent_mean = pd.DataFrame(model.latent_mean.numpy()) + pd_latent_sqrt_var = pd.DataFrame(model.latent_sqrt_var.numpy()) + if model.coef is not None: + pd_coef = pd.DataFrame(model.coef.numpy()) else: pd_coef = None - pln.latent_mean = pd_latent_mean - pln.latent_sqrt_var = pd_latent_sqrt_var - if pln._NAME != "Pln": - pln.coef = pd_coef - if pln._NAME == "PlnPCA": - pln.components = pd.DataFrame(pln.components.numpy()) - pln.fit() - - -@pytest.mark.parametrize("pln", dict_fixtures["all_pln"]) -def test_fail_data_setter_with_torch(pln): + model.latent_mean = pd_latent_mean + model.latent_sqrt_var = pd_latent_sqrt_var + if model._NAME != "Pln": + model.coef = pd_coef + if model._NAME == "PlnPCA": + model.components = pd.DataFrame(model.components.numpy()) + if model._NAME == "ZIPln": + model.coef_inflation = pd.DataFrame(model.coef_inflation.numpy()) + model.fit() + + +@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +def test_fail_data_setter_with_torch(model): with pytest.raises(ValueError): - pln.endog = pln.endog - 100 + model.endog = -model.endog - n, p = pln.endog.shape - if pln.exog is None: + n, p = model.endog.shape + if model.exog is None: d = 0 else: - d = pln.exog.shape[-1] + d = model.exog.shape[-1] with pytest.raises(ValueError): - pln.endog = torch.zeros(n + 1, p) + model.endog = torch.zeros(n + 1, p) with pytest.raises(ValueError): - pln.endog = torch.zeros(n, p + 1) + model.endog = torch.zeros(n, p + 1) with pytest.raises(ValueError): - pln.exog = torch.zeros(n + 1, d) + model.exog = torch.zeros(n + 1, d) with pytest.raises(ValueError): - pln.offsets = torch.zeros(n + 1, p) + model.offsets = torch.zeros(n + 1, p) with pytest.raises(ValueError): - pln.offsets = torch.zeros(n, p + 1) + model.offsets = torch.zeros(n, p + 1) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_fail_parameters_setter_with_torch(pln): - n, dim_latent = pln.latent_mean.shape - dim = pln.endog.shape[1] +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_fail_parameters_setter_with_torch(model): + n, dim_latent = model.latent_mean.shape + dim = model.endog.shape[1] with pytest.raises(ValueError): - pln.latent_mean = torch.zeros(n + 1, dim_latent) + model.latent_mean = torch.zeros(n + 1, dim_latent) with pytest.raises(ValueError): - pln.latent_mean = torch.zeros(n, dim_latent + 1) + model.latent_mean = torch.zeros(n, dim_latent + 1) with pytest.raises(ValueError): - pln.latent_sqrt_var = torch.zeros(n + 1, dim_latent) + model.latent_sqrt_var = torch.zeros(n + 1, dim_latent) with pytest.raises(ValueError): - pln.latent_sqrt_var = torch.zeros(n, dim_latent + 1) + model.latent_sqrt_var = torch.zeros(n, dim_latent + 1) - if pln._NAME == "PlnPCA": + if model._NAME == "PlnPCA": with pytest.raises(ValueError): - pln.components = torch.zeros(dim, dim_latent + 1) + model.components = torch.zeros(dim, dim_latent + 1) with pytest.raises(ValueError): - pln.components = torch.zeros(dim + 1, dim_latent) + model.components = torch.zeros(dim + 1, dim_latent) - if pln.exog is None: + if model.exog is None: d = 0 else: - d = pln.exog.shape[-1] - if pln._NAME != "Pln": + d = model.exog.shape[-1] + if model._NAME != "Pln": with pytest.raises(ValueError): - pln.coef = torch.zeros(d + 1, dim) + model.coef = torch.zeros(d + 1, dim) with pytest.raises(ValueError): - pln.coef = torch.zeros(d, dim + 1) + model.coef = torch.zeros(d, dim + 1) diff --git a/tests/test_viz.py b/tests/test_viz.py index be24fcf1..d4f9a738 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -7,47 +7,49 @@ from tests.utils import MSE, filter_models from tests.import_data import true_sim_0cov, true_sim_2cov, labels_real +single_models = ["Pln", "PlnPCA", "ZIPln"] -@pytest.mark.parametrize("any_pln", dict_fixtures["loaded_and_fitted_pln"]) -def test_print(any_pln): - print(any_pln) - - -@pytest.mark.parametrize("any_pln", dict_fixtures["fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_show_coef_transform_covariance_pcaprojected(any_pln): - any_pln.show() - any_pln._plotargs._show_loss() - any_pln._plotargs._show_stopping_criterion() - assert hasattr(any_pln, "coef") - assert callable(any_pln.transform) - assert hasattr(any_pln, "covariance") - assert callable(any_pln.sk_PCA) - assert any_pln.sk_PCA(n_components=None) is not None + +@pytest.mark.parametrize("any_model", dict_fixtures["loaded_and_fitted_model"]) +def test_print(any_model): + print(any_model) + + +@pytest.mark.parametrize("any_model", dict_fixtures["fitted_model"]) +@filter_models(single_models) +def test_show_coef_transform_covariance_pcaprojected(any_model): + any_model.show() + any_model._criterion_args._show_loss() + any_model._criterion_args._show_stopping_criterion() + assert hasattr(any_model, "coef") + assert callable(any_model.transform) + assert hasattr(any_model, "covariance") + assert callable(any_model.sk_PCA) + assert any_model.sk_PCA(n_components=None) is not None with pytest.raises(Exception): - any_pln.sk_PCA(n_components=any_pln.dim + 1) + any_model.sk_PCA(n_components=any_model.dim + 1) -@pytest.mark.parametrize("pln", dict_fixtures["fitted_pln"]) -@filter_models(["Pln"]) -def test_scatter_pca_matrix_pln(pln): - pln.scatter_pca_matrix(n_components=8) +@pytest.mark.parametrize("model", dict_fixtures["fitted_model"]) +@filter_models(["Pln", "ZIPln"]) +def test_scatter_pca_matrix_pln(model): + model.scatter_pca_matrix(n_components=8) -@pytest.mark.parametrize("pln", dict_fixtures["fitted_pln"]) +@pytest.mark.parametrize("model", dict_fixtures["fitted_model"]) @filter_models(["PlnPCA"]) -def test_scatter_pca_matrix_plnpca(pln): - pln.scatter_pca_matrix(n_components=2) - pln.scatter_pca_matrix() +def test_scatter_pca_matrix_plnpca(model): + model.scatter_pca_matrix(n_components=2) + model.scatter_pca_matrix() -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_real_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_label_scatter_pca_matrix(pln): - pln.scatter_pca_matrix(n_components=4, color=labels_real) +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_real_model"]) +@filter_models(single_models) +def test_label_scatter_pca_matrix(model): + model.scatter_pca_matrix(n_components=4, color=labels_real) -@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_pln"]) +@pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_viz_pcacol(plnpca): for model in plnpca.values(): @@ -64,38 +66,38 @@ def test_viz_pcacol(plnpca): plt.show() -@pytest.mark.parametrize("pln", dict_fixtures["real_fitted_pln_intercept_array"]) -@filter_models(["Pln", "PlnPCA"]) -def test_plot_pca_correlation_graph_with_names_only(pln): - pln.plot_pca_correlation_graph([f"var_{i}" for i in range(8)]) +@pytest.mark.parametrize("model", dict_fixtures["real_fitted_model_intercept_array"]) +@filter_models(single_models) +def test_plot_pca_correlation_graph_with_names_only(model): + model.plot_pca_correlation_graph([f"var_{i}" for i in range(8)]) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_sim_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_fail_plot_pca_correlation_graph_without_names(pln): +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_sim_model"]) +@filter_models(single_models) +def test_fail_plot_pca_correlation_graph_without_names(model): with pytest.raises(ValueError): - pln.plot_pca_correlation_graph([f"var_{i}" for i in range(8)]) + model.plot_pca_correlation_graph([f"var_{i}" for i in range(8)]) with pytest.raises(ValueError): - pln.plot_pca_correlation_graph([f"var_{i}" for i in range(6)], [1, 2, 3]) + model.plot_pca_correlation_graph([f"var_{i}" for i in range(6)], [1, 2, 3]) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_sim_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_plot_pca_correlation_graph_without_names(pln): - pln.plot_pca_correlation_graph([f"var_{i}" for i in range(3)], [0, 1, 2]) +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_sim_model"]) +@filter_models(single_models) +def test_plot_pca_correlation_graph_without_names(model): + model.plot_pca_correlation_graph([f"var_{i}" for i in range(3)], [0, 1, 2]) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_expected_vs_true(pln): - pln.plot_expected_vs_true() +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(single_models) +def test_expected_vs_true(model): + model.plot_expected_vs_true() fig, ax = plt.subplots() - pln.plot_expected_vs_true(ax=ax) + model.plot_expected_vs_true(ax=ax) -@pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_real_pln"]) -@filter_models(["Pln", "PlnPCA"]) -def test_expected_vs_true_labels(pln): - pln.plot_expected_vs_true(colors=labels_real) +@pytest.mark.parametrize("model", dict_fixtures["loaded_and_fitted_real_model"]) +@filter_models(single_models) +def test_expected_vs_true_labels(model): + model.plot_expected_vs_true(colors=labels_real) fig, ax = plt.subplots() - pln.plot_expected_vs_true(ax=ax, colors=labels_real) + model.plot_expected_vs_true(ax=ax, colors=labels_real) diff --git a/tests/test_zi.py b/tests/test_zi.py index 4ba5af04..2016accf 100644 --- a/tests/test_zi.py +++ b/tests/test_zi.py @@ -6,6 +6,9 @@ from tests.conftest import dict_fixtures from tests.utils import filter_models, MSE +from pyPLNmodels import get_simulated_count_data + + @pytest.mark.parametrize("zi", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["ZIPln"]) def test_properties(zi): @@ -60,14 +63,67 @@ def test_no_exog_not_possible(model): assert model._coef_inflation.shape[0] == 1 -def test_find_right_covariance_and_coef(): - pln_param = get_simulation_parameters( - n_samples=300, dim=50, nb_cov=2, rank=5, add_const=True +def test_find_right_covariance_coef_and_infla(): + pln_param = get_simulation_parameters(zero_inflated=True, n_samples=1000) + # pln_param._coef += 5 + endog = sample_pln(pln_param, seed=0, return_latent=False) + exog = pln_param.exog + offsets = pln_param.offsets + covariance = pln_param.covariance + coef = pln_param.coef + coef_inflation = pln_param.coef_inflation + endog, exog, offsets, covariance, coef, coef_inflation = get_simulated_count_data( + zero_inflated=True, return_true_param=True, n_samples=1000 ) - pln_param._coef += 5 + zi = ZIPln(endog, exog=exog, offsets=offsets, use_closed_form_prob=False) + zi.fit() + mse_covariance = MSE(zi.covariance - covariance) + mse_coef = MSE(zi.coef - coef) + mse_coef_infla = MSE(zi.coef_inflation - coef_inflation) + assert mse_coef < 3 + assert mse_coef_infla < 3 + assert mse_covariance < 1 + + +@pytest.mark.parametrize("zi", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_latent_variables(zi): + z, w = zi.latent_variables + assert z.shape == zi.endog.shape + assert w.shape == zi.endog.shape + + +@pytest.mark.parametrize("zi", dict_fixtures["loaded_and_fitted_model"]) +@filter_models(["ZIPln"]) +def test_transform(zi): + z = zi.transform() + assert z.shape == zi.endog.shape + z, w = zi.transform(return_latent_prob=True) + assert z.shape == w.shape == zi.endog.shape + + +@pytest.mark.parametrize("model", dict_fixtures["sim_model_instance"]) +@filter_models(["ZIPln"]) +def test_batch(model): + pln_param = get_simulation_parameters(zero_inflated=True, n_samples=1000) + # pln_param._coef += 5 endog = sample_pln(pln_param, seed=0, return_latent=False) - zi = ZIPln(endog, exog=pln_param.exog, offsets=pln_param.offsets) + exog = pln_param.exog + offsets = pln_param.offsets + covariance = pln_param.covariance + coef = pln_param.coef + coef_inflation = pln_param.coef_inflation + endog, exog, offsets, covariance, coef, coef_inflation = get_simulated_count_data( + zero_inflated=True, return_true_param=True, n_samples=1000 + ) + zi = ZIPln(endog, exog=exog, offsets=offsets, use_closed_form_prob=False) + zi.fit(batch_size=20) + mse_covariance = MSE(zi.covariance - covariance) + mse_coef = MSE(zi.coef - coef) + mse_coef_infla = MSE(zi.coef_inflation - coef_inflation) + assert mse_coef < 3 + assert mse_coef_infla < 3 + assert mse_covariance < 1 + zi.show() + print(zi) zi.fit() - mse_covariance = MSE(zi.covariance - pln_param.covariance) - mse_coef = MSE(zi.coef) - assert mse_covariance < 0.5 -- GitLab From 2adadea67bebe5f70906bfe2a3c4a1642903db28 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 17 Oct 2023 19:53:42 +0200 Subject: [PATCH 103/167] add a file in the gitignore and blacked one file ??! --- .gitignore | 1 + pyPLNmodels/_initialization.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index c00f1395..08cb3cfe 100644 --- a/.gitignore +++ b/.gitignore @@ -160,3 +160,4 @@ tests/test_models* tests/test_load* tests/test_readme* Getting_started.py +new_model.py diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index 410283bf..fe649fe0 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -43,9 +43,7 @@ def _init_covariance(endog: torch.Tensor, exog: torch.Tensor) -> torch.Tensor: return sigma_hat -def _init_components( - endog: torch.Tensor, rank: int -) -> torch.Tensor: +def _init_components(endog: torch.Tensor, rank: int) -> torch.Tensor: """ Initialization for components for the Pln model. Get a first guess for covariance that is easier to estimate and then takes the rank largest eigenvectors to get components. -- GitLab From 434c05ee2185c0d73e13477b467d124a8c2d569d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 17 Oct 2023 22:44:07 +0200 Subject: [PATCH 104/167] fixed some tests. --- pyPLNmodels/models.py | 6 +++--- tests/test_plnpcacollection.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 01f9353b..8351056c 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3252,7 +3252,7 @@ class PlnPCA(_model): Parameters ---------- project : bool, optional - Whether to project the latent variables, by default True. + Whether to project the latent variables, by default False. """, returns=""" torch.Tensor @@ -3269,7 +3269,7 @@ class PlnPCA(_model): >>> print(transformed_endog_high_dim.shape) """, ) - def transform(self, project: bool = True) -> torch.Tensor: + def transform(self, project: bool = False) -> torch.Tensor: if project is True: return self.projected_latent_variables return self.latent_variables @@ -3297,7 +3297,7 @@ class PlnPCA(_model): >>> pca.fit() >>> elbo = pca.compute_elbo() >>> print("elbo", elbo) - >>> print("loglike/n", pln.loglike/pln.n_samples) + >>> print("loglike/n", pca.loglike/pca.n_samples) """, ) def compute_elbo(self) -> torch.Tensor: diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index 19b49b18..77016b73 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -33,8 +33,8 @@ def test_right_nbcov(plnpca): @pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCA"]) def test_latent_var_pca(plnpca): - assert plnpca.transform(project=False).shape == plnpca.endog.shape - assert plnpca.transform().shape == (plnpca.n_samples, plnpca.rank) + assert plnpca.transform().shape == plnpca.endog.shape + assert plnpca.transform(project=True).shape == (plnpca.n_samples, plnpca.rank) @pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) -- GitLab From 4c21846307ed65f80a44be868bb5a3101f124bfa Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 18 Oct 2023 14:27:29 +0200 Subject: [PATCH 105/167] fixed some tests for the zi. --- pyPLNmodels/elbos.py | 3 +-- pyPLNmodels/models.py | 37 +++++++++++----------------------- tests/conftest.py | 1 - tests/test_pln_full.py | 4 ++-- tests/test_plnpcacollection.py | 2 +- tests/test_setters.py | 2 +- 6 files changed, 17 insertions(+), 32 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 5a56bc3d..73e77028 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -219,8 +219,7 @@ def elbo_zi_pln( """ covariance = components @ (components.T) if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: - print("Bug") - raise RuntimeError("rho error") + raise RuntimeError("latent_prob error") n_samples, dim = endog.shape s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) o_plus_m = offsets + latent_mean diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 8351056c..d692a9e5 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -242,11 +242,8 @@ class _model(ABC): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) return ax - def _update_parameters(self): - """ - Update the parameters with a gradient step and project if necessary. - """ - self.optim.step() + def _project_parameters(self): + pass def _handle_batch_size(self, batch_size): if batch_size is None: @@ -486,8 +483,8 @@ class _model(ABC): raise ValueError("test") loss.backward() elbo += loss.item() - self._update_parameters() - self._update_closed_forms() + self.optim.step() + self._project_parameters() return elbo / self.nb_batches def _extract_batch(self, batch): @@ -736,12 +733,6 @@ class _model(ABC): self._criterion_args.update_criterion(-loss, current_running_time) return self._criterion_args.criterion - def _update_closed_forms(self): - """ - Update closed-form expressions. - """ - pass - def display_covariance(self, ax=None, savefig=False, name_file=""): """ Display the covariance matrix. @@ -3740,8 +3731,7 @@ class ZIPln(_model): ) self._latent_sqrt_var = latent_sqrt_var - def _update_parameters(self): - super()._update_parameters() + def _project_parameters(self): self._project_latent_prob() def _project_latent_prob(self): @@ -3750,13 +3740,13 @@ class ZIPln(_model): """ if self._use_closed_form_prob is False: with torch.no_grad(): - self._latent_prob_b = torch.maximum( - self._latent_prob_b, torch.tensor([0]), out=self._latent_prob_b + torch.maximum( + self._latent_prob, torch.tensor([0]), out=self._latent_prob ) - self._latent_prob_b = torch.minimum( - self._latent_prob_b, torch.tensor([1]), out=self._latent_prob_b + torch.minimum( + self._latent_prob, torch.tensor([1]), out=self._latent_prob ) - self._latent_prob_b *= self._dirac_b + self._latent_prob *= self._dirac @property def covariance(self) -> torch.Tensor: @@ -3878,16 +3868,13 @@ class ZIPln(_model): self._latent_sqrt_var, self._components, ] - if self._use_closed_form_prob: + if self._use_closed_form_prob is False: list_parameters.append(self._latent_prob) if self._exog is not None: list_parameters.append(self._coef) list_parameters.append(self._coef_inflation) return list_parameters - def _update_closed_forms(self): - pass - @property @_add_doc(_model) def model_parameters(self) -> Dict[str, torch.Tensor]: @@ -3938,7 +3925,7 @@ class ZIPln(_model): "latent_sqrt_var": self.latent_sqrt_var, "latent_mean": self.latent_mean, } - if self._use_closed_form_prob is True: + if self._use_closed_form_prob is False: latent_param["latent_prob"] = self.latent_prob return latent_param diff --git a/tests/conftest.py b/tests/conftest.py index 22ea4307..b77a5d67 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,7 +17,6 @@ sys.path.append("../") # for fixture_file in glob.glob("src/**/tests/fixtures/[!__]*.py", recursive=True) # ] - from tests.import_data import ( data_sim_0cov, data_sim_2cov, diff --git a/tests/test_pln_full.py b/tests/test_pln_full.py index 6a8ced3a..e5959b0e 100644 --- a/tests/test_pln_full.py +++ b/tests/test_pln_full.py @@ -7,8 +7,8 @@ from tests.utils import filter_models @pytest.mark.parametrize("fitted_pln", dict_fixtures["fitted_model"]) @filter_models(["Pln"]) def test_number_of_iterations_pln_full(fitted_pln): - nb_iterations = len(fitted_pln.elbos_list) - assert 20 < nb_iterations < 1000 + nb_iterations = len(fitted_pln._elbos_list) + assert 20 < nb_iterations < 2000 @pytest.mark.parametrize("pln", dict_fixtures["loaded_and_fitted_model"]) diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index 77016b73..0d982d60 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -86,4 +86,4 @@ def test_batch(collection): assert mse_coef < 0.35 assert mse_covariance < 0.25 collection.fit() - assert collection.batch_size == collection.n_samples + assert collection.batch_size == 20 diff --git a/tests/test_setters.py b/tests/test_setters.py index b3012548..f230d858 100644 --- a/tests/test_setters.py +++ b/tests/test_setters.py @@ -8,7 +8,7 @@ from tests.utils import MSE, filter_models single_models = ["Pln", "PlnPCA", "ZIPln"] -@pytest.mark.parametrize("model", dict_fixtures["all_model"]) +@pytest.mark.parametrize("model", dict_fixtures["loaded_model"]) def test_data_setter_with_torch(model): model.endog = model.endog model.exog = model.exog -- GitLab From 5ea42691a90cb660a360415e58bc4d5b9792e3ea Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 18 Oct 2023 15:19:51 +0200 Subject: [PATCH 106/167] change gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 08cb3cfe..b2d69b27 100644 --- a/.gitignore +++ b/.gitignore @@ -150,6 +150,7 @@ test.py ## directories that outputs when running the tests tests/Pln* +tests/ZIPln* slides/ index.html -- GitLab From 2734b4675cdb447c3f7e31d57c13142e52de91f8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 18 Oct 2023 19:32:30 +0200 Subject: [PATCH 107/167] fixed some bug --- pyPLNmodels/models.py | 31 ++++++++++++++++++++++--------- tests/conftest.py | 10 +++++----- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index d692a9e5..76f76fea 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -14,6 +14,7 @@ import plotly.express as px from mlxtend.plotting import plot_pca_correlation_graph import matplotlib from scipy import stats +from statsmodels.discrete.count_model import ZeroInflatedPoisson from ._closed_forms import ( _closed_formula_coef, @@ -430,11 +431,11 @@ class _model(ABC): indices = np.arange(self.n_samples) if shuffle: np.random.shuffle(indices) - for i in range(self._nb_full_batch): - yield self._return_batch( + batch = self._return_batch( indices, i * self._batch_size, (i + 1) * self._batch_size ) + yield batch # Last batch if self._last_batch_size != 0: yield self._return_batch(indices, -self._last_batch_size, self.n_samples) @@ -475,7 +476,7 @@ class _model(ABC): The loss value. """ elbo = 0 - for batch in self._get_batch(shuffle=True): + for batch in self._get_batch(shuffle=False): self._extract_batch(batch) self.optim.zero_grad() loss = -self._compute_elbo_b() @@ -1005,6 +1006,13 @@ class _model(ABC): os.makedirs(path, exist_ok=True) for key, value in self._dict_parameters.items(): filename = f"{path}/{key}.csv" + if key == "latent_prob": + if torch.max(value) > 1 or torch.min(value) < 0: + if ( + torch.norm(self.dirac * self.latent_prob - self.latent_prob) + > 0.0001 + ): + raise Exception("Error is here") if isinstance(value, torch.Tensor): pd.DataFrame(np.array(value.cpu().detach())).to_csv( filename, header=None, index=None @@ -3465,6 +3473,9 @@ class ZIPln(_model): to_take = torch.tensor(indices[beginning:end]).to(DEVICE) batch = pln_batch + (torch.index_select(self._dirac, 0, to_take),) if self._use_closed_form_prob is False: + to_return = torch.index_select(self._latent_prob, 0, to_take) + print("max latent_prbo", torch.max(self._latent_prob)) + print("max to return", torch.max(to_return)) return batch + (torch.index_select(self._latent_prob, 0, to_take),) return batch @@ -3587,6 +3598,12 @@ class ZIPln(_model): self._components = _init_components(self._endog, self.dim) if not hasattr(self, "_coef_inflation"): + # print('shape', self.exog.shape[1]) + # for j in range(self.exog.shape[1]): + # Y_j = self._endog[:,j].numpy() + # offsets_j = self.offsets[:,j].numpy() + # zip_training_results = ZeroInflatedPoisson(endog=Y_j,exog = self.exog.numpy(), exog_infl = self.exog.numpy(), inflation='logit', offsets = offsets_j).fit() + # print('params', zip_training_results.params) self._coef_inflation = torch.randn(self.nb_cov, self.dim) def _random_init_latent_parameters(self): @@ -3740,12 +3757,8 @@ class ZIPln(_model): """ if self._use_closed_form_prob is False: with torch.no_grad(): - torch.maximum( - self._latent_prob, torch.tensor([0]), out=self._latent_prob - ) - torch.minimum( - self._latent_prob, torch.tensor([1]), out=self._latent_prob - ) + self._latent_prob = torch.maximum(self._latent_prob, torch.tensor([0])) + self._latent_prob = torch.minimum(self._latent_prob, torch.tensor([1])) self._latent_prob *= self._dirac @property diff --git a/tests/conftest.py b/tests/conftest.py index b77a5d67..d89a919a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,4 @@ import sys -import glob import pytest import torch from pytest_lazyfixture import lazy_fixture as lf @@ -12,10 +11,6 @@ from pyPLNmodels import get_simulated_count_data sys.path.append("../") -# pytest_plugins = [ -# fixture_file.replace("/", ".").replace(".py", "") -# for fixture_file in glob.glob("src/**/tests/fixtures/[!__]*.py", recursive=True) -# ] from tests.import_data import ( data_sim_0cov, @@ -42,6 +37,11 @@ def add_fixture_to_dict(my_dict, string_fixture): return my_dict +# zi = ZIPln(endog_sim_2cov, exog = exog_sim_2cov) +# zi.fit() +# print(zi) + + def add_list_of_fixture_to_dict( my_dict, name_of_list_of_fixtures, list_of_string_fixtures ): -- GitLab From 1ef2b188044bb806d1ed1e18f80ecf58afcc8aef Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 08:21:21 +0200 Subject: [PATCH 108/167] add init of coef infla but useless --- pyPLNmodels/models.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 76f76fea..49c590d1 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -14,7 +14,6 @@ import plotly.express as px from mlxtend.plotting import plot_pca_correlation_graph import matplotlib from scipy import stats -from statsmodels.discrete.count_model import ZeroInflatedPoisson from ._closed_forms import ( _closed_formula_coef, @@ -3474,8 +3473,6 @@ class ZIPln(_model): batch = pln_batch + (torch.index_select(self._dirac, 0, to_take),) if self._use_closed_form_prob is False: to_return = torch.index_select(self._latent_prob, 0, to_take) - print("max latent_prbo", torch.max(self._latent_prob)) - print("max to return", torch.max(to_return)) return batch + (torch.index_select(self._latent_prob, 0, to_take),) return batch @@ -3598,13 +3595,14 @@ class ZIPln(_model): self._components = _init_components(self._endog, self.dim) if not hasattr(self, "_coef_inflation"): - # print('shape', self.exog.shape[1]) + self._coef_inflation = torch.randn(self.nb_cov, self.dim) # for j in range(self.exog.shape[1]): # Y_j = self._endog[:,j].numpy() # offsets_j = self.offsets[:,j].numpy() - # zip_training_results = ZeroInflatedPoisson(endog=Y_j,exog = self.exog.numpy(), exog_infl = self.exog.numpy(), inflation='logit', offsets = offsets_j).fit() - # print('params', zip_training_results.params) - self._coef_inflation = torch.randn(self.nb_cov, self.dim) + # exog = self.exog[:,j].unsqueeze(1).numpy() + # undzi = ZeroInflatedPoisson(endog=Y_j,exog = exog, exog_infl = exog, inflation='logit', offset = offsets_j) + # zip_training_results = undzi.fit() + # self._coef_inflation[:,j] = zip_training_results.params[1] def _random_init_latent_parameters(self): self._latent_mean = torch.randn(self.n_samples, self.dim) -- GitLab From 3ea7aea012bc456393569b993c0e1e9f83174e75 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 09:17:14 +0200 Subject: [PATCH 109/167] add a checker of getting started file. --- .gitignore | 7 ++--- ...e_getting_started_and_docstrings_tests.py} | 26 +++++++++++++++---- 2 files changed, 25 insertions(+), 8 deletions(-) rename tests/{create_readme_and_docstrings_tests.py => create_readme_getting_started_and_docstrings_tests.py} (75%) diff --git a/.gitignore b/.gitignore index b2d69b27..85c56ae8 100644 --- a/.gitignore +++ b/.gitignore @@ -157,8 +157,9 @@ index.html paper/* -tests/test_models* -tests/test_load* -tests/test_readme* +tests/docstrings_examples/* +tests/getting_started/* +tests/readme_examples/* +tests/test_getting_started.py Getting_started.py new_model.py diff --git a/tests/create_readme_and_docstrings_tests.py b/tests/create_readme_getting_started_and_docstrings_tests.py similarity index 75% rename from tests/create_readme_and_docstrings_tests.py rename to tests/create_readme_getting_started_and_docstrings_tests.py index 63aecf9d..113bf841 100644 --- a/tests/create_readme_and_docstrings_tests.py +++ b/tests/create_readme_getting_started_and_docstrings_tests.py @@ -4,6 +4,7 @@ import os dir_docstrings = "docstrings_examples" dir_readme = "readme_examples" +dir_getting_started = "getting_started" def get_lines(path_to_file, filename, filetype=".py"): @@ -47,11 +48,11 @@ def get_example_readme(lines): return [example] -def write_examples(examples, filename): +def write_file(examples, filename, string_definer, dir): for i in range(len(examples)): example = examples[i] nb_example = str(i + 1) - example_filename = f"test_{filename}_example_{nb_example}.py" + example_filename = f"{dir}/test_{filename}_{string_definer}_{nb_example}.py" try: os.remove(example_filename) except FileNotFoundError: @@ -64,19 +65,34 @@ def write_examples(examples, filename): def filename_to_docstring_example_file(filename, dirname): lines = get_lines("../pyPLNmodels/", filename) examples = get_examples_docstring(lines) - write_examples(examples, filename) + write_file(examples, filename, "example", dir=dirname) def filename_to_readme_example_file(): lines = get_lines("../", "README", filetype=".md") examples = get_example_readme(lines) - write_examples(examples, "readme") + write_file(examples, "readme", "example", dir=dir_readme) + + +lines_getting_started = get_lines("./", "test_getting_started") +new_lines = [] +for line in lines_getting_started: + if len(line) > 20: + if line[0:11] != "get_ipython": + new_lines.append(line) + else: + new_lines.append(line) os.makedirs(dir_readme, exist_ok=True) +os.makedirs(dir_docstrings, exist_ok=True) +os.makedirs(dir_getting_started, exist_ok=True) + +write_file([new_lines], "getting_started", "", dir_getting_started) + filename_to_readme_example_file() -os.makedirs("docstrings_examples", exist_ok=True) + filename_to_docstring_example_file("_utils", dir_docstrings) filename_to_docstring_example_file("models", dir_docstrings) filename_to_docstring_example_file("elbos", dir_docstrings) -- GitLab From f6983d98c3f6dbfb8ccc87948cdd535c9c72dc8e Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 09:21:57 +0200 Subject: [PATCH 110/167] changed the cd. Goes to file to check the readme examples, docstrings examples etc. --- .gitlab-ci.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c95c78f1..bf2bf333 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -20,8 +20,16 @@ tests: pip install '.[tests]' script: - pip install . + - jupyter nbconvert Getting_started.ipynb --to python --output tests/test_getting_started - cd tests - - python create_readme_and_docstrings_tests.py + - python create_readme_getting_started_and_docstrings_tests.py + - cd readme_examples + - pytest . + - cd ../readme_examples + - pytest . + - cd ../getting_started + - pytest . + - cd .. - pytest . only: - main -- GitLab From 350fa49e2e4db13935010be0f3b61de4cefa9493 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 10:44:14 +0200 Subject: [PATCH 111/167] add my own image. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bf2bf333..108e940b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -15,7 +15,7 @@ black: tests: stage: checks - image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + image: "registry.forgemia.inra.fr/bbatardiere/jbleger:main" before_script: pip install '.[tests]' script: -- GitLab From 58e247fc0a3fb6366e47765526171c9303ec6e57 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 10:46:02 +0200 Subject: [PATCH 112/167] fix the name of the image. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 108e940b..9c0bda6f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -15,7 +15,7 @@ black: tests: stage: checks - image: "registry.forgemia.inra.fr/bbatardiere/jbleger:main" + image: "registry.forgemia.inra.fr/bbatardiere/docker-image-pandas-torch-sphinx-jupyter:main" before_script: pip install '.[tests]' script: -- GitLab From cffb3ef1f56e70f55b03ea75f466bce9141ae76b Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 10:50:14 +0200 Subject: [PATCH 113/167] retry the ci --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 85c56ae8..c5443b2c 100644 --- a/.gitignore +++ b/.gitignore @@ -160,6 +160,6 @@ paper/* tests/docstrings_examples/* tests/getting_started/* tests/readme_examples/* -tests/test_getting_started.py +# tests/test_getting_started.py Getting_started.py new_model.py -- GitLab From 98cced733f8ac77a58e884a3b085feb801254d9d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 10:52:38 +0200 Subject: [PATCH 114/167] change the image name. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9c0bda6f..bba6aff6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -15,7 +15,7 @@ black: tests: stage: checks - image: "registry.forgemia.inra.fr/bbatardiere/docker-image-pandas-torch-sphinx-jupyter:main" + image: "registry.forgemia.inra.fr/bbatardiere/docker-image-pandas-torch-sphinx-jupyter" before_script: pip install '.[tests]' script: -- GitLab From cb1e835e08b47c3c0d82a62996717c434655dcb9 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 13:51:53 +0200 Subject: [PATCH 115/167] chang the gitlab and the running of examles since pytest was not testing it. --- .gitlab-ci.yml | 8 +- .../run_readme_docstrings_getting_started.sh | 15 ++ tests/test_getting_started.py | 131 ++++++++++++++---- 3 files changed, 119 insertions(+), 35 deletions(-) create mode 100755 tests/run_readme_docstrings_getting_started.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bba6aff6..ba97ffea 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,13 +23,7 @@ tests: - jupyter nbconvert Getting_started.ipynb --to python --output tests/test_getting_started - cd tests - python create_readme_getting_started_and_docstrings_tests.py - - cd readme_examples - - pytest . - - cd ../readme_examples - - pytest . - - cd ../getting_started - - pytest . - - cd .. + - ./run_readme_docstrings_getting_started.sh - pytest . only: - main diff --git a/tests/run_readme_docstrings_getting_started.sh b/tests/run_readme_docstrings_getting_started.sh new file mode 100755 index 00000000..59489033 --- /dev/null +++ b/tests/run_readme_docstrings_getting_started.sh @@ -0,0 +1,15 @@ +#!/bin/sh +for file in docstrings_examples/* +do + python $file +done + +for file in readme_examples/* +do + python $file +done + +for file in getting_started/* +do + python $file +done diff --git a/tests/test_getting_started.py b/tests/test_getting_started.py index 69299741..605132a0 100644 --- a/tests/test_getting_started.py +++ b/tests/test_getting_started.py @@ -1,49 +1,63 @@ #!/usr/bin/env python # coding: utf-8 -# get_ipython().system('pip install pyPLNmodels') +# In[1]: + + +get_ipython().system('pip install pyPLNmodels') # ## pyPLNmodels # We assume the data comes from a PLN model: $ \text{counts} \sim \mathcal P(\exp(\text{Z}))$, where $Z$ are some unknown latent variables. -# -# -# The goal of the package is to retrieve the latent variables $Z$ given the counts. To do so, one can instantiate a Pln or PlnPCA model, fit it and then extract the latent variables. +# +# +# The goal of the package is to retrieve the latent variables $Z$ given the counts. To do so, one can instantiate a Pln or PlnPCA model, fit it and then extract the latent variables. # ### Import the needed functions -from pyPLNmodels import ( - get_real_count_data, - get_simulated_count_data, - load_model, - Pln, - PlnPCA, - PlnPCAcollection, -) +# In[2]: + + +from pyPLNmodels import get_real_count_data, get_simulated_count_data, load_model, Pln, PlnPCA, PlnPCAcollection import matplotlib.pyplot as plt # ### Load the data -counts, labels = get_real_count_data(return_labels=True) # np.ndarray +# In[3]: + + +counts, labels = get_real_count_data(return_labels=True) # np.ndarray # ### PLN model -pln = Pln(counts, add_const=True) +# In[4]: + + +pln = Pln(counts, add_const = True) pln.fit() +# In[5]: + + print(pln) # #### Once fitted, we can extract multiple variables: +# In[6]: + + gaussian = pln.latent_variables print(gaussian.shape) +# In[7]: + + model_param = pln.model_parameters print(model_param["coef"].shape) print(model_param["covariance"].shape) @@ -51,37 +65,61 @@ print(model_param["covariance"].shape) # ### PlnPCA model -pca = PlnPCA(counts, add_const=True, rank=5) +# In[8]: + + +pca = PlnPCA(counts, add_const = True, rank = 5) pca.fit() +# In[9]: + + print(pca) +# In[10]: + + print(pca.latent_variables.shape) +# In[11]: + + print(pca.model_parameters["components"].shape) print(pca.model_parameters["coef"].shape) # ### One can save the model in order to load it back after: +# In[13]: + + pca.save() dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") -loaded_pca = PlnPCA(counts, add_const=True, dict_initialization=dict_init) +loaded_pca = PlnPCA(counts, add_const = True, dict_initialization= dict_init) print(loaded_pca) # ### One can fit multiple PCA and choose the best rank with BIC or AIC criterion -pca_col = PlnPCAcollection(counts, add_const=True, ranks=[5, 15, 25, 40, 50]) +# In[14]: + + +pca_col = PlnPCAcollection(counts, add_const = True, ranks = [5,15,25,40,50]) pca_col.fit() +# In[15]: + + pca_col.show() +# In[16]: + + print(pca_col) @@ -89,53 +127,90 @@ print(pca_col) # #### AIC best model -print(pca_col.best_model(criterion="AIC")) +# In[17]: + + +print(pca_col.best_model(criterion = "AIC")) # #### BIC best model -print(pca_col.best_model(criterion="BIC")) +# In[18]: + + +print(pca_col.best_model(criterion = "BIC")) # #### Visualization of the individuals (sites) with PCA on the latent variables. +# In[19]: + + pln.viz(colors=labels) plt.show() +# In[20]: + + best_pca = pca_col.best_model() -best_pca.viz(colors=labels) +best_pca.viz(colors = labels) plt.show() -# ### What would give a PCA on the log normalize data ? +# ### What would give a PCA on the log normalize data ? + +# In[21]: + from sklearn.decomposition import PCA import numpy as np import seaborn as sns -sk_pca = PCA(n_components=2) +# In[22]: + + +sk_pca = PCA(n_components = 2) pca_log_counts = sk_pca.fit_transform(np.log(counts + (counts == 0))) -sns.scatterplot(x=pca_log_counts[:, 0], y=pca_log_counts[:, 1], hue=labels) +sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels) # ### Visualization of the variables -pln.plot_pca_correlation_graph(["var_1", "var_2"], indices_of_variables=[0, 1]) +# In[23]: + + +pln.plot_pca_correlation_graph(["var_1","var_2"], indices_of_variables = [0,1]) plt.show() -best_pca.plot_pca_correlation_graph(["var_1", "var_2"], indices_of_variables=[0, 1]) +# In[24]: + + +best_pca.plot_pca_correlation_graph(["var_1","var_2"], indices_of_variables = [0,1]) plt.show() # ### Visualization of each components of the PCA -# +# -pln.scatter_pca_matrix(color=labels, n_components=5) +# In[25]: + + +pln.scatter_pca_matrix(color = labels, n_components = 5) plt.show() -best_pca.scatter_pca_matrix(color=labels, n_components=6) +# In[26]: + + +best_pca.scatter_pca_matrix(color = labels, n_components = 6) plt.show() + + +# In[ ]: + + + + -- GitLab From 1b9347f582042f16bd42661583bc563e3d657d02 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 13:54:16 +0200 Subject: [PATCH 116/167] remove useless file sinc eit is going to be removed --- tests/test_getting_started.py | 216 ---------------------------------- 1 file changed, 216 deletions(-) delete mode 100644 tests/test_getting_started.py diff --git a/tests/test_getting_started.py b/tests/test_getting_started.py deleted file mode 100644 index 605132a0..00000000 --- a/tests/test_getting_started.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -# In[1]: - - -get_ipython().system('pip install pyPLNmodels') - - -# ## pyPLNmodels - -# We assume the data comes from a PLN model: $ \text{counts} \sim \mathcal P(\exp(\text{Z}))$, where $Z$ are some unknown latent variables. -# -# -# The goal of the package is to retrieve the latent variables $Z$ given the counts. To do so, one can instantiate a Pln or PlnPCA model, fit it and then extract the latent variables. - -# ### Import the needed functions - -# In[2]: - - -from pyPLNmodels import get_real_count_data, get_simulated_count_data, load_model, Pln, PlnPCA, PlnPCAcollection -import matplotlib.pyplot as plt - - -# ### Load the data - -# In[3]: - - -counts, labels = get_real_count_data(return_labels=True) # np.ndarray - - -# ### PLN model - -# In[4]: - - -pln = Pln(counts, add_const = True) -pln.fit() - - -# In[5]: - - -print(pln) - - -# #### Once fitted, we can extract multiple variables: - -# In[6]: - - -gaussian = pln.latent_variables -print(gaussian.shape) - - -# In[7]: - - -model_param = pln.model_parameters -print(model_param["coef"].shape) -print(model_param["covariance"].shape) - - -# ### PlnPCA model - -# In[8]: - - -pca = PlnPCA(counts, add_const = True, rank = 5) -pca.fit() - - -# In[9]: - - -print(pca) - - -# In[10]: - - -print(pca.latent_variables.shape) - - -# In[11]: - - -print(pca.model_parameters["components"].shape) -print(pca.model_parameters["coef"].shape) - - -# ### One can save the model in order to load it back after: - -# In[13]: - - -pca.save() -dict_init = load_model("PlnPCA_nbcov_1_dim_200_rank_5") -loaded_pca = PlnPCA(counts, add_const = True, dict_initialization= dict_init) -print(loaded_pca) - - -# ### One can fit multiple PCA and choose the best rank with BIC or AIC criterion - -# In[14]: - - -pca_col = PlnPCAcollection(counts, add_const = True, ranks = [5,15,25,40,50]) -pca_col.fit() - - -# In[15]: - - -pca_col.show() - - -# In[16]: - - -print(pca_col) - - -# ### One can extract the best model found (according to AIC or BIC criterion). - -# #### AIC best model - -# In[17]: - - -print(pca_col.best_model(criterion = "AIC")) - - -# #### BIC best model - -# In[18]: - - -print(pca_col.best_model(criterion = "BIC")) - - -# #### Visualization of the individuals (sites) with PCA on the latent variables. - -# In[19]: - - -pln.viz(colors=labels) -plt.show() - - -# In[20]: - - -best_pca = pca_col.best_model() -best_pca.viz(colors = labels) -plt.show() - - -# ### What would give a PCA on the log normalize data ? - -# In[21]: - - -from sklearn.decomposition import PCA -import numpy as np -import seaborn as sns - - -# In[22]: - - -sk_pca = PCA(n_components = 2) -pca_log_counts = sk_pca.fit_transform(np.log(counts + (counts == 0))) -sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels) - - -# ### Visualization of the variables - -# In[23]: - - -pln.plot_pca_correlation_graph(["var_1","var_2"], indices_of_variables = [0,1]) -plt.show() - - -# In[24]: - - -best_pca.plot_pca_correlation_graph(["var_1","var_2"], indices_of_variables = [0,1]) -plt.show() - - -# ### Visualization of each components of the PCA -# - -# In[25]: - - -pln.scatter_pca_matrix(color = labels, n_components = 5) -plt.show() - - -# In[26]: - - -best_pca.scatter_pca_matrix(color = labels, n_components = 6) -plt.show() - - -# In[ ]: - - - - -- GitLab From 052da666f66e7cff0ef3f545c0e8c1a2c0bf4429 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 14:04:15 +0200 Subject: [PATCH 117/167] remove a file in the ci --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ba97ffea..61e51176 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,6 +23,7 @@ tests: - jupyter nbconvert Getting_started.ipynb --to python --output tests/test_getting_started - cd tests - python create_readme_getting_started_and_docstrings_tests.py + - rm test_getting_started.py - ./run_readme_docstrings_getting_started.sh - pytest . only: -- GitLab From f595aa0ecd3562852e0bdd8c61246975e9d37713 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 14:23:38 +0200 Subject: [PATCH 118/167] gpu support for the from formula. --- pyPLNmodels/_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 1082259e..05d13b00 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -929,6 +929,10 @@ def _extract_data_from_formula( A tuple containing the extracted endog, exog, and offsets. """ + # dmatrices can not deal with GPU matrices + for key,matrix in data.items(): + if isinstance(matrix, torch.Tensor): + data[key] = matrix.cpu() dmatrix = dmatrices(formula, data=data) endog = dmatrix[0] exog = dmatrix[1] -- GitLab From e073928bfb68c926e99bd19e65ca19046a338511 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 15:40:41 +0200 Subject: [PATCH 119/167] add GPU support. --- pyPLNmodels/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 49c590d1..7dfe71a3 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -231,7 +231,7 @@ class _model(ABC): if self._get_max_components() < 2: raise RuntimeError("Can't perform visualization for dim < 2.") pca = self.sk_PCA(n_components=2) - proj_variables = pca.transform(self.latent_variables.detach().cpu()) + proj_variables = pca.transform(self.latent_variables) x = proj_variables[:, 0] y = proj_variables[:, 1] sns.scatterplot(x=x, y=y, hue=colors, ax=ax) @@ -1286,7 +1286,7 @@ class _model(ABC): raise RuntimeError("Please fit the model before.") if ax is None: ax = plt.gca() - predictions = self._endog_predictions().ravel().detach() + predictions = self._endog_predictions().ravel().cpu().detach() if colors is not None: colors = np.repeat(np.array(colors), repeats=self.dim).ravel() sns.scatterplot(x=self.endog.ravel(), y=predictions, hue=colors, ax=ax) @@ -3284,7 +3284,7 @@ class PlnPCA(_model): """, ) def latent_variables(self) -> torch.Tensor: - return torch.matmul(self._latent_mean, self._components.T).detach() + return torch.matmul(self.latent_mean, self.components.T) @_add_doc( _model, -- GitLab From 2b7208ce78b327f73f95d2f6a62e516ba56edea6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 19 Oct 2023 15:41:16 +0200 Subject: [PATCH 120/167] remove the shell script since otherwise it is running two times the test of the examples. --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 61e51176..e45adf60 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -24,7 +24,6 @@ tests: - cd tests - python create_readme_getting_started_and_docstrings_tests.py - rm test_getting_started.py - - ./run_readme_docstrings_getting_started.sh - pytest . only: - main -- GitLab From 46752db4409e8031a0a85faaa6fbccce17f66d3e Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 20 Oct 2023 12:36:07 +0200 Subject: [PATCH 121/167] fix GPU support. --- pyPLNmodels/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 7dfe71a3..ce920b97 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -236,7 +236,7 @@ class _model(ABC): y = proj_variables[:, 1] sns.scatterplot(x=x, y=y, hue=colors, ax=ax) if show_cov is True: - sk_components = torch.from_numpy(pca.components_) + sk_components = torch.from_numpy(pca.components_).to(DEVICE) covariances = self._get_pca_low_dim_covariances(sk_components).detach() for i in range(covariances.shape[0]): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) @@ -3008,7 +3008,7 @@ class PlnPCA(_model): else: XB = 0 return torch.exp( - self._offsets + XB + self.latent_variables + 1 / 2 * covariance_a_posteriori + self._offsets + XB + self.latent_variables.to(DEVICE) + 1 / 2 * covariance_a_posteriori ) @latent_mean.setter -- GitLab From db2a910eee522795c19a375761271247ab5584c8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 20 Oct 2023 12:48:10 +0200 Subject: [PATCH 122/167] fixe one more gpu support bug --- pyPLNmodels/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index ce920b97..ac2af805 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -232,8 +232,8 @@ class _model(ABC): raise RuntimeError("Can't perform visualization for dim < 2.") pca = self.sk_PCA(n_components=2) proj_variables = pca.transform(self.latent_variables) - x = proj_variables[:, 0] - y = proj_variables[:, 1] + x = proj_variables[:, 0].cpu() + y = proj_variables[:, 1].cpu() sns.scatterplot(x=x, y=y, hue=colors, ax=ax) if show_cov is True: sk_components = torch.from_numpy(pca.components_).to(DEVICE) -- GitLab From cc802a77e3a63f557c30d6bedd3abfd0fbace239 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sat, 21 Oct 2023 11:51:49 +0200 Subject: [PATCH 123/167] add the doc for zipln --- docs/source/index.rst | 1 + docs/source/zipln.rst | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100644 docs/source/zipln.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 98f3e0a6..da418320 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -16,6 +16,7 @@ API documentation ./plnpcacollection.rst ./plnpca.rst ./pln.rst + ./zipln.rst .. toctree:: :maxdepth: 1 diff --git a/docs/source/zipln.rst b/docs/source/zipln.rst new file mode 100644 index 00000000..ae0e1e81 --- /dev/null +++ b/docs/source/zipln.rst @@ -0,0 +1,10 @@ + +ZIPln +=== + +.. autoclass:: pyPLNmodels.ZIPln + :members: + :inherited-members: + :special-members: __init__ + :undoc-members: + :show-inheritance: -- GitLab From b929138210ac47ee9eceff62352ff96f9c3a287a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sat, 21 Oct 2023 11:58:57 +0200 Subject: [PATCH 124/167] add GPU support --- pyPLNmodels/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index ac2af805..1249f14b 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -232,12 +232,12 @@ class _model(ABC): raise RuntimeError("Can't perform visualization for dim < 2.") pca = self.sk_PCA(n_components=2) proj_variables = pca.transform(self.latent_variables) - x = proj_variables[:, 0].cpu() - y = proj_variables[:, 1].cpu() + x = proj_variables[:, 0] + y = proj_variables[:, 1] sns.scatterplot(x=x, y=y, hue=colors, ax=ax) if show_cov is True: sk_components = torch.from_numpy(pca.components_).to(DEVICE) - covariances = self._get_pca_low_dim_covariances(sk_components).detach() + covariances = self._get_pca_low_dim_covariances(sk_components).cpu().detach() for i in range(covariances.shape[0]): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) return ax -- GitLab From ced911b2e2a02c321747856a7fde9c8bec34e5df Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sat, 21 Oct 2023 12:13:29 +0200 Subject: [PATCH 125/167] fixed gpu support. --- pyPLNmodels/_utils.py | 3 +-- pyPLNmodels/models.py | 31 ++++++++++--------------------- 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 05d13b00..b8fa8001 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -199,9 +199,8 @@ def _log_stirling(integer: torch.Tensor) -> torch.Tensor: integer_ / math.exp(1) ) - def _trunc_log(tens: torch.Tensor, eps: float = 1e-16) -> torch.Tensor: - integer = torch.min(torch.max(tens, torch.tensor([eps])), torch.tensor([1 - eps])) + integer = torch.min(torch.max(tens, torch.tensor([eps]).to(DEVICE)), torch.tensor([1 - eps]).to(DEVICE)) return torch.log(integer) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 1249f14b..b591a7a8 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -968,7 +968,7 @@ class _model(ABC): raise ValueError( f"Wrong shape. Expected {self.n_samples, self.dim}, got {latent_mean.shape}" ) - self._latent_mean = latent_mean + self._latent_mean = latent_mean.to(DEVICE) def _cpu_attribute_or_none(self, attribute_name): """ @@ -1821,17 +1821,6 @@ class Pln(_model): pass # no model parameters since we are doing a profiled ELBO - @_add_doc(_model) - def _smart_init_latent_parameters(self): - self._random_init_latent_sqrt_var() - if not hasattr(self, "_latent_mean"): - self._latent_mean = torch.log(self._endog + (self._endog == 0)) - - @_add_doc(_model) - def _random_init_latent_parameters(self): - self._random_init_latent_sqrt_var() - if not hasattr(self, "_latent_mean"): - self._latent_mean = torch.ones((self.n_samples, self.dim)).to(DEVICE) @property @_add_doc(_model) @@ -3583,9 +3572,9 @@ class ZIPln(_model): return "with full covariance model and zero-inflation." def _random_init_model_parameters(self): - self._coef_inflation = torch.randn(self.nb_cov, self.dim) - self._coef = torch.randn(self.nb_cov, self.dim) - self._components = torch.randn(self.dim, self.dim) + self._coef_inflation = torch.randn(self.nb_cov, self.dim).to(DEVICE) + self._coef = torch.randn(self.nb_cov, self.dim).to(DEVICE) + self._components = torch.randn(self.dim, self.dim).to(DEVICE) # should change the good initialization for _coef_inflation def _smart_init_model_parameters(self): @@ -3595,7 +3584,7 @@ class ZIPln(_model): self._components = _init_components(self._endog, self.dim) if not hasattr(self, "_coef_inflation"): - self._coef_inflation = torch.randn(self.nb_cov, self.dim) + self._coef_inflation = torch.randn(self.nb_cov, self.dim).to(DEVICE) # for j in range(self.exog.shape[1]): # Y_j = self._endog[:,j].numpy() # offsets_j = self.offsets[:,j].numpy() @@ -3605,12 +3594,12 @@ class ZIPln(_model): # self._coef_inflation[:,j] = zip_training_results.params[1] def _random_init_latent_parameters(self): - self._latent_mean = torch.randn(self.n_samples, self.dim) - self._latent_sqrt_var = torch.randn(self.n_samples, self.dim) + self._latent_mean = torch.randn(self.n_samples, self.dim).to(DEVICE) + self._latent_sqrt_var = torch.randn(self.n_samples, self.dim).to(DEVICE) self._latent_prob = ( torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE) * self._dirac - ).double() + ).double().to(DEVICE) def _smart_init_latent_parameters(self): self._random_init_latent_parameters() @@ -3755,8 +3744,8 @@ class ZIPln(_model): """ if self._use_closed_form_prob is False: with torch.no_grad(): - self._latent_prob = torch.maximum(self._latent_prob, torch.tensor([0])) - self._latent_prob = torch.minimum(self._latent_prob, torch.tensor([1])) + self._latent_prob = torch.maximum(self._latent_prob, torch.tensor([0]).to(DEVICE)) + self._latent_prob = torch.minimum(self._latent_prob, torch.tensor([1]).to(DEVICE)) self._latent_prob *= self._dirac @property -- GitLab From f75157076281358b9ca37f0dd7546820af204390 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 15:15:43 +0200 Subject: [PATCH 126/167] pass all the tests on GPU, going to merge on main. --- pyPLNmodels/models.py | 2 +- tests/import_data.py | 5 +++++ tests/test_common.py | 10 +++++----- tests/test_plnpcacollection.py | 5 ++--- tests/test_zi.py | 12 ++++++------ 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index b591a7a8..624bcf2a 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -3197,7 +3197,7 @@ class PlnPCA(_model): """ Orthogonal components of the model. """ - return torch.linalg.qr(self._components, "reduced")[0] + return torch.linalg.qr(self._components, "reduced")[0].cpu() @property def components(self) -> torch.Tensor: diff --git a/tests/import_data.py b/tests/import_data.py index 9ef5ef7e..9942db40 100644 --- a/tests/import_data.py +++ b/tests/import_data.py @@ -1,10 +1,15 @@ import os +import torch from pyPLNmodels import ( get_simulated_count_data, get_real_count_data, ) +if torch.cuda.is_available(): + DEVICE = "cuda:0" +else: + DEVICE = "cpu" ( endog_sim_0cov, diff --git a/tests/test_common.py b/tests/test_common.py index 6cba2cf6..df49f39c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -46,14 +46,14 @@ def test_verbose(any_instance_model): @filter_models(pln_and_plnpca) def test_find_right_covariance(simulated_fitted_any_model): if simulated_fitted_any_model.nb_cov == 0: - true_covariance = true_sim_0cov["Sigma"] + true_covariance = true_sim_0cov["Sigma"].cpu() elif simulated_fitted_any_model.nb_cov == 2: - true_covariance = true_sim_2cov["Sigma"] + true_covariance = true_sim_2cov["Sigma"].cpu() else: raise ValueError( f"Not the right numbers of covariance({simulated_fitted_any_model.nb_cov})" ) - mse_covariance = MSE(simulated_fitted_any_model.covariance - true_covariance) + mse_covariance = MSE(simulated_fitted_any_model.covariance.cpu() - true_covariance.cpu()) assert mse_covariance < 0.05 @@ -75,7 +75,7 @@ def test_right_covariance_shape(real_fitted_and_loaded_model): def test_find_right_coef(simulated_fitted_any_model): if simulated_fitted_any_model.nb_cov == 2: true_coef = true_sim_2cov["beta"] - mse_coef = MSE(simulated_fitted_any_model.coef - true_coef) + mse_coef = MSE(simulated_fitted_any_model.coef.cpu() - true_coef.cpu()) assert mse_coef < 0.1 elif simulated_fitted_any_model.nb_cov == 0: assert simulated_fitted_any_model.coef is None @@ -118,7 +118,7 @@ def test_batch(model): model.show() if model.nb_cov == 2: true_coef = true_sim_2cov["beta"] - mse_coef = MSE(model.coef - true_coef) + mse_coef = MSE(model.coef.cpu() - true_coef.cpu()) assert mse_coef < 0.1 elif model.nb_cov == 0: assert model.coef is None diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index 0d982d60..2c1db5a4 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -8,7 +8,6 @@ from tests.conftest import dict_fixtures from tests.utils import MSE, filter_models from tests.import_data import true_sim_0cov, true_sim_2cov - @pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_best_model(plnpca): @@ -80,9 +79,9 @@ def test_batch(collection): else: raise ValueError(f"Not the right numbers of covariance({collection.nb_cov})") for model in collection.values(): - mse_covariance = MSE(model.covariance - true_covariance) + mse_covariance = MSE(model.covariance.cpu() - true_covariance.cpu()) if true_coef is not None: - mse_coef = MSE(model.coef - true_coef) + mse_coef = MSE(model.coef.cpu() - true_coef.cpu()) assert mse_coef < 0.35 assert mse_covariance < 0.25 collection.fit() diff --git a/tests/test_zi.py b/tests/test_zi.py index 2016accf..acfaa5bd 100644 --- a/tests/test_zi.py +++ b/tests/test_zi.py @@ -77,9 +77,9 @@ def test_find_right_covariance_coef_and_infla(): ) zi = ZIPln(endog, exog=exog, offsets=offsets, use_closed_form_prob=False) zi.fit() - mse_covariance = MSE(zi.covariance - covariance) - mse_coef = MSE(zi.coef - coef) - mse_coef_infla = MSE(zi.coef_inflation - coef_inflation) + mse_covariance = MSE(zi.covariance.cpu() - covariance.cpu()) + mse_coef = MSE(zi.coef.cpu() - coef.cpu()) + mse_coef_infla = MSE(zi.coef_inflation.cpu() - coef_inflation.cpu()) assert mse_coef < 3 assert mse_coef_infla < 3 assert mse_covariance < 1 @@ -118,9 +118,9 @@ def test_batch(model): ) zi = ZIPln(endog, exog=exog, offsets=offsets, use_closed_form_prob=False) zi.fit(batch_size=20) - mse_covariance = MSE(zi.covariance - covariance) - mse_coef = MSE(zi.coef - coef) - mse_coef_infla = MSE(zi.coef_inflation - coef_inflation) + mse_covariance = MSE(zi.covariance.cpu() - covariance.cpu()) + mse_coef = MSE(zi.coef.cpu() - coef.cpu()) + mse_coef_infla = MSE(zi.coef_inflation.cpu() - coef_inflation.cpu()) assert mse_coef < 3 assert mse_coef_infla < 3 assert mse_covariance < 1 -- GitLab From 1902bc00063d137457fe4ff7b494ab07a772a108 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 15:24:05 +0200 Subject: [PATCH 127/167] balcked --- pyPLNmodels/_utils.py | 8 ++++++-- pyPLNmodels/models.py | 28 ++++++++++++++++++++-------- tests/test_common.py | 4 +++- tests/test_plnpcacollection.py | 1 + 4 files changed, 30 insertions(+), 11 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index b8fa8001..1cb9d2cd 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -199,8 +199,12 @@ def _log_stirling(integer: torch.Tensor) -> torch.Tensor: integer_ / math.exp(1) ) + def _trunc_log(tens: torch.Tensor, eps: float = 1e-16) -> torch.Tensor: - integer = torch.min(torch.max(tens, torch.tensor([eps]).to(DEVICE)), torch.tensor([1 - eps]).to(DEVICE)) + integer = torch.min( + torch.max(tens, torch.tensor([eps]).to(DEVICE)), + torch.tensor([1 - eps]).to(DEVICE), + ) return torch.log(integer) @@ -929,7 +933,7 @@ def _extract_data_from_formula( """ # dmatrices can not deal with GPU matrices - for key,matrix in data.items(): + for key, matrix in data.items(): if isinstance(matrix, torch.Tensor): data[key] = matrix.cpu() dmatrix = dmatrices(formula, data=data) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 624bcf2a..d5854adf 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -237,7 +237,9 @@ class _model(ABC): sns.scatterplot(x=x, y=y, hue=colors, ax=ax) if show_cov is True: sk_components = torch.from_numpy(pca.components_).to(DEVICE) - covariances = self._get_pca_low_dim_covariances(sk_components).cpu().detach() + covariances = ( + self._get_pca_low_dim_covariances(sk_components).cpu().detach() + ) for i in range(covariances.shape[0]): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) return ax @@ -1821,7 +1823,6 @@ class Pln(_model): pass # no model parameters since we are doing a profiled ELBO - @property @_add_doc(_model) def _list_of_parameters_needing_gradient(self): @@ -2997,7 +2998,10 @@ class PlnPCA(_model): else: XB = 0 return torch.exp( - self._offsets + XB + self.latent_variables.to(DEVICE) + 1 / 2 * covariance_a_posteriori + self._offsets + + XB + + self.latent_variables.to(DEVICE) + + 1 / 2 * covariance_a_posteriori ) @latent_mean.setter @@ -3597,9 +3601,13 @@ class ZIPln(_model): self._latent_mean = torch.randn(self.n_samples, self.dim).to(DEVICE) self._latent_sqrt_var = torch.randn(self.n_samples, self.dim).to(DEVICE) self._latent_prob = ( - torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE) - * self._dirac - ).double().to(DEVICE) + ( + torch.empty(self.n_samples, self.dim).uniform_(0, 1).to(DEVICE) + * self._dirac + ) + .double() + .to(DEVICE) + ) def _smart_init_latent_parameters(self): self._random_init_latent_parameters() @@ -3744,8 +3752,12 @@ class ZIPln(_model): """ if self._use_closed_form_prob is False: with torch.no_grad(): - self._latent_prob = torch.maximum(self._latent_prob, torch.tensor([0]).to(DEVICE)) - self._latent_prob = torch.minimum(self._latent_prob, torch.tensor([1]).to(DEVICE)) + self._latent_prob = torch.maximum( + self._latent_prob, torch.tensor([0]).to(DEVICE) + ) + self._latent_prob = torch.minimum( + self._latent_prob, torch.tensor([1]).to(DEVICE) + ) self._latent_prob *= self._dirac @property diff --git a/tests/test_common.py b/tests/test_common.py index df49f39c..bd5ca62c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -53,7 +53,9 @@ def test_find_right_covariance(simulated_fitted_any_model): raise ValueError( f"Not the right numbers of covariance({simulated_fitted_any_model.nb_cov})" ) - mse_covariance = MSE(simulated_fitted_any_model.covariance.cpu() - true_covariance.cpu()) + mse_covariance = MSE( + simulated_fitted_any_model.covariance.cpu() - true_covariance.cpu() + ) assert mse_covariance < 0.05 diff --git a/tests/test_plnpcacollection.py b/tests/test_plnpcacollection.py index 2c1db5a4..761afabc 100644 --- a/tests/test_plnpcacollection.py +++ b/tests/test_plnpcacollection.py @@ -8,6 +8,7 @@ from tests.conftest import dict_fixtures from tests.utils import MSE, filter_models from tests.import_data import true_sim_0cov, true_sim_2cov + @pytest.mark.parametrize("plnpca", dict_fixtures["loaded_and_fitted_model"]) @filter_models(["PlnPCAcollection"]) def test_best_model(plnpca): -- GitLab From 661319d163397ee9a48c48932b9e9c3593a72e1b Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 23:01:30 +0200 Subject: [PATCH 128/167] changed image in publishing package and build package. --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e45adf60..671bb811 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -32,7 +32,7 @@ tests: build_package: stage: build - image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + image: "registry.forgemia.inra.fr/bbatardiere/docker-image-pandas-torch-sphinx-jupyter" before_script: - pip install build script: @@ -59,7 +59,7 @@ publish_package: pages: stage: publish - image: "registry.forgemia.inra.fr/jbleger/docker-image-pandas-torch-sphinx:master" + image: "registry.forgemia.inra.fr/bbatardiere/docker-image-pandas-torch-sphinx-jupyter" before_script: - pip install '.[build-doc]' script: -- GitLab From 0c4e248b598ac65591ed1eb9e5e7f4c4c9f94df6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 23:05:36 +0200 Subject: [PATCH 129/167] add the ZIPln in the readme. --- README.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f8adaa3f..71d47bbc 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyp The package comes with an ecological data set to present the functionality ``` import pyPLNmodels -from pyPLNmodels.models import PlnPCAcollection, Pln +from pyPLNmodels.models import PlnPCAcollection, Pln, ZIPln from pyPLNmodels.oaks import load_oaks oaks = load_oaks() ``` @@ -45,9 +45,15 @@ transformed_data = pln.transform() pca = PlnPCAcollection.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) pca.fit() print(pca) -transformed_data = pln.transform() +transformed_data = pca.transform() +``` +### Zero inflated Poisson Log normal Model (aka ZIPln) +``` +zi = ZIPln.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) +zi.fit() +zi(pca) +transformed_data = zi.transform() ``` - ## 🛠Installation -- GitLab From 21e4c9a2d174671d480a1070e0e1675db36cff58 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 23:08:56 +0200 Subject: [PATCH 130/167] not enough ===== under the ZIPLN --- docs/source/zipln.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/zipln.rst b/docs/source/zipln.rst index ae0e1e81..ccd9573c 100644 --- a/docs/source/zipln.rst +++ b/docs/source/zipln.rst @@ -1,6 +1,6 @@ ZIPln -=== +===== .. autoclass:: pyPLNmodels.ZIPln :members: -- GitLab From d2cce98920b8dbf147d9c41975d9377853936002 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 23:12:52 +0200 Subject: [PATCH 131/167] minor change --- docs/source/zipln.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/source/zipln.rst b/docs/source/zipln.rst index ccd9573c..17fbac45 100644 --- a/docs/source/zipln.rst +++ b/docs/source/zipln.rst @@ -1,4 +1,3 @@ - ZIPln ===== -- GitLab From 52318742764ccad219f6eb60b40c70d727e51019 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Mon, 23 Oct 2023 23:48:21 +0200 Subject: [PATCH 132/167] typo in th eREADME --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 71d47bbc..ad011628 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ transformed_data = pln.transform() pca = PlnPCAcollection.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) pca.fit() print(pca) -transformed_data = pca.transform() +transformed_data = pca.best_model().transform() ``` ### Zero inflated Poisson Log normal Model (aka ZIPln) ``` -- GitLab From 010109b72b80f6a0c7a8b44431c3e77a1928b137 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 24 Oct 2023 00:01:20 +0200 Subject: [PATCH 133/167] useless seaborn --- pyPLNmodels/_initialization.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pyPLNmodels/_initialization.py b/pyPLNmodels/_initialization.py index fe649fe0..95b71246 100644 --- a/pyPLNmodels/_initialization.py +++ b/pyPLNmodels/_initialization.py @@ -4,7 +4,6 @@ from typing import Optional from ._utils import _log_stirling import time from sklearn.decomposition import PCA -import seaborn as sns import matplotlib.pyplot as plt import numpy as np -- GitLab From d6e63006e1f3e19151c28189a08816a27271ba1c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 24 Oct 2023 00:08:52 +0200 Subject: [PATCH 134/167] typo in the README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad011628..25afa02b 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ transformed_data = pca.best_model().transform() ``` zi = ZIPln.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) zi.fit() -zi(pca) +print(zi) transformed_data = zi.transform() ``` -- GitLab From a1bc467d954ddc07c704a231d014f1ef1b7f480d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Tue, 24 Oct 2023 00:11:36 +0200 Subject: [PATCH 135/167] another typo. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 25afa02b..267975de 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ transformed_data = pca.best_model().transform() ``` ### Zero inflated Poisson Log normal Model (aka ZIPln) ``` -zi = ZIPln.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True, ranks = [3,4,5]) +zi = ZIPln.from_formula("counts ~ 1 + tree + dist2ground + orientation ", data = oaks, take_log_offsets = True) zi.fit() print(zi) transformed_data = zi.transform() -- GitLab From 50e510e9e5abd651a3f3007e6cdb3c09188ee5b6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 26 Oct 2023 15:43:55 +0200 Subject: [PATCH 136/167] minor change --- pyPLNmodels/elbos.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 73e77028..7ebfbd43 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -180,11 +180,7 @@ def elbo_pln( elbo += d f = -0.5 * torch.trace(torch.inverse(covariance) @ diag) elbo += f - # print("a pln", a) - # print("b pln", b) - # print("d pln", d) - # print("f pln", f) - return elbo # / n_samples + return elbo ## pb with trunc_log -- GitLab From 8eea8480f39d45ee8e626b7126386a1261ca16aa Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 26 Oct 2023 15:43:55 +0200 Subject: [PATCH 137/167] minor change --- pyPLNmodels/elbos.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 73e77028..7ebfbd43 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -180,11 +180,7 @@ def elbo_pln( elbo += d f = -0.5 * torch.trace(torch.inverse(covariance) @ diag) elbo += f - # print("a pln", a) - # print("b pln", b) - # print("d pln", d) - # print("f pln", f) - return elbo # / n_samples + return elbo ## pb with trunc_log -- GitLab From 51e16bdf9da3f74008016db1e7ef00d92da7db55 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 01:14:14 +0100 Subject: [PATCH 138/167] minor change in the README. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 267975de..31fb242e 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ > though it has memory limitations. Possible fields of applications include > - Genomics (number of times a gene is expressed in a cell) > - Ecology (species abundances) +> > One main functionality is to normalize the count data to obtain more valuable > data. It also analyse the significance of each variable and their correlation as well as the weight of > covariates (if available). -- GitLab From 0a8e0bf4cdbcb95607a47556dd6ebdf9f44849f9 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 01:14:14 +0100 Subject: [PATCH 139/167] minor change in the README. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 267975de..31fb242e 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ > though it has memory limitations. Possible fields of applications include > - Genomics (number of times a gene is expressed in a cell) > - Ecology (species abundances) +> > One main functionality is to normalize the count data to obtain more valuable > data. It also analyse the significance of each variable and their correlation as well as the weight of > covariates (if available). -- GitLab From a825138e77f4c0d1fba373ebe21937e31e3320e4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 16:51:49 +0100 Subject: [PATCH 140/167] updated version in the package. --- pyPLNmodels/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index 6ed723c7..1e4c8c55 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -1,3 +1,5 @@ +import importlib.metadata + from .models import PlnPCAcollection, Pln, PlnPCA, ZIPln # pylint:disable=[C0114] from .oaks import load_oaks from .elbos import profiled_elbo_pln, elbo_plnpca, elbo_pln @@ -30,3 +32,4 @@ __all__ = ( "get_simulation_parameters", "PlnParameters", ) +__version__ = importlib.metadata.version("pyPLNmodels") -- GitLab From 52e02d60709863ef08100c6dbc0f17094bd35148 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 16:51:49 +0100 Subject: [PATCH 141/167] updated version in the package. --- pyPLNmodels/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyPLNmodels/__init__.py b/pyPLNmodels/__init__.py index 6ed723c7..1e4c8c55 100644 --- a/pyPLNmodels/__init__.py +++ b/pyPLNmodels/__init__.py @@ -1,3 +1,5 @@ +import importlib.metadata + from .models import PlnPCAcollection, Pln, PlnPCA, ZIPln # pylint:disable=[C0114] from .oaks import load_oaks from .elbos import profiled_elbo_pln, elbo_plnpca, elbo_pln @@ -30,3 +32,4 @@ __all__ = ( "get_simulation_parameters", "PlnParameters", ) +__version__ = importlib.metadata.version("pyPLNmodels") -- GitLab From be3b4a61511227968fd6a548a76f1ba5bfa12ba5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 16:52:15 +0100 Subject: [PATCH 142/167] remove useless line in ci. --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 671bb811..0e380726 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -50,7 +50,6 @@ publish_package: before_script: - pip install twine script: - - echo ${pypln_token} - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker -- GitLab From ec0ee760079794dee6a012f14b8c65f56a618324 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 16:52:15 +0100 Subject: [PATCH 143/167] remove useless line in ci. --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 671bb811..0e380726 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -50,7 +50,6 @@ publish_package: before_script: - pip install twine script: - - echo ${pypln_token} - TWINE_PASSWORD=${pypln_token} TWINE_USERNAME=__token__ python -m twine upload dist/* tags: - docker -- GitLab From bc9dd3a5e8fc6854b36287743baeb315d930226a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 18:58:41 +0100 Subject: [PATCH 144/167] updated README --- README.md | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 31fb242e..41758cfb 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,18 @@ ## Getting started The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see the quickstart next. +## 🛠Installation + +**pyPLNmodels** is available on +[pypi](https://pypi.org/project/pyPLNmodels/). The development +version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). + +### Package installation + +``` +pip install pyPLNmodels +``` + ## âš¡ï¸ Quickstart The package comes with an ecological data set to present the functionality @@ -56,17 +68,6 @@ print(zi) transformed_data = zi.transform() ``` -## 🛠Installation - -**pyPLNmodels** is available on -[pypi](https://pypi.org/project/pyPLNmodels/). The development -version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). - -### Package installation - -``` -pip install pyPLNmodels -``` ## 👠Contributing -- GitLab From 8efd36b843a38332a3624da487921714cd48e4ab Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Sun, 7 Jan 2024 18:58:41 +0100 Subject: [PATCH 145/167] updated README --- README.md | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 31fb242e..41758cfb 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,18 @@ ## Getting started The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see the quickstart next. +## 🛠Installation + +**pyPLNmodels** is available on +[pypi](https://pypi.org/project/pyPLNmodels/). The development +version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). + +### Package installation + +``` +pip install pyPLNmodels +``` + ## âš¡ï¸ Quickstart The package comes with an ecological data set to present the functionality @@ -56,17 +68,6 @@ print(zi) transformed_data = zi.transform() ``` -## 🛠Installation - -**pyPLNmodels** is available on -[pypi](https://pypi.org/project/pyPLNmodels/). The development -version is available on [GitHub](https://github.com/PLN-team/pyPLNmodels). - -### Package installation - -``` -pip install pyPLNmodels -``` ## 👠Contributing -- GitLab From b91066263aac01306011e3aad275e7dcc324bd06 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 17:54:51 +0100 Subject: [PATCH 146/167] add the right link in README to the getting started. Before it was a link in forgemia. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f8adaa3f..b15d0300 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ <!-- > comprehensive introduction. --> ## Getting started -The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see the quickstart next. +The getting started can be found [here](Getting_started.ipynb). If you need just a quick view of the package, see the quickstart next. ## âš¡ï¸ Quickstart -- GitLab From 723a29ca45d668c67ee11a961951485f2f3bd7fb Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 17:54:51 +0100 Subject: [PATCH 147/167] add the right link in README to the getting started. Before it was a link in forgemia. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f8adaa3f..b15d0300 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ <!-- > comprehensive introduction. --> ## Getting started -The getting started can be found [here](https://forgemia.inra.fr/bbatardiere/pyplnmodels/-/raw/dev/Getting_started.ipynb?inline=false). If you need just a quick view of the package, see the quickstart next. +The getting started can be found [here](Getting_started.ipynb). If you need just a quick view of the package, see the quickstart next. ## âš¡ï¸ Quickstart -- GitLab From 82be72f0e553594d7c418a7887192c631c4ed0fd Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 17:58:12 +0100 Subject: [PATCH 148/167] cleared output of the getting_started. --- Getting_started.ipynb | 45 ++++++------------------------------------- 1 file changed, 6 insertions(+), 39 deletions(-) diff --git a/Getting_started.ipynb b/Getting_started.ipynb index f5f30e8d..fda09530 100644 --- a/Getting_started.ipynb +++ b/Getting_started.ipynb @@ -2,45 +2,12 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "23df6e63", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: pyPLNmodels in /home/bastien/miniconda3/lib/python3.10/site-packages (0.0.57.dev1+g1f0e5e1.d20230605)\n", - "Requirement already satisfied: scipy in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (1.10.0)\n", - "Requirement already satisfied: matplotlib in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (3.6.3)\n", - "Requirement already satisfied: patsy in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (0.5.3)\n", - "Requirement already satisfied: pandas in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.5.3)\n", - "Requirement already satisfied: scikit-learn in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.2.1)\n", - "Requirement already satisfied: torch in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.13.1)\n", - "Requirement already satisfied: numpy in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (1.23.5)\n", - "Requirement already satisfied: seaborn in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (0.12.2)\n", - "Requirement already satisfied: packaging>=20.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (22.0)\n", - "Requirement already satisfied: pyparsing>=2.2.1 in /home/bastien/.local/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (3.0.9)\n", - "Requirement already satisfied: pillow>=6.2.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (9.4.0)\n", - "Requirement already satisfied: fonttools>=4.22.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (4.38.0)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /home/bastien/.local/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (2.8.2)\n", - "Requirement already satisfied: contourpy>=1.0.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (1.0.7)\n", - "Requirement already satisfied: cycler>=0.10 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (0.11.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (1.4.4)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from pandas->pyPLNmodels) (2022.7.1)\n", - "Requirement already satisfied: six in /home/bastien/miniconda3/lib/python3.10/site-packages (from patsy->pyPLNmodels) (1.16.0)\n", - "Requirement already satisfied: joblib>=1.1.1 in /home/bastien/.local/lib/python3.10/site-packages (from scikit-learn->pyPLNmodels) (1.2.0)\n", - "Requirement already satisfied: threadpoolctl>=2.0.0 in /home/bastien/.local/lib/python3.10/site-packages (from scikit-learn->pyPLNmodels) (3.1.0)\n", - "Requirement already satisfied: nvidia-cuda-runtime-cu11==11.7.99 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.7.99)\n", - "Requirement already satisfied: nvidia-cublas-cu11==11.10.3.66 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.10.3.66)\n", - "Requirement already satisfied: nvidia-cudnn-cu11==8.5.0.96 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (8.5.0.96)\n", - "Requirement already satisfied: nvidia-cuda-nvrtc-cu11==11.7.99 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.7.99)\n", - "Requirement already satisfied: typing-extensions in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (4.4.0)\n", - "Requirement already satisfied: setuptools in /home/bastien/miniconda3/lib/python3.10/site-packages (from nvidia-cublas-cu11==11.10.3.66->torch->pyPLNmodels) (65.5.0)\n", - "Requirement already satisfied: wheel in /home/bastien/miniconda3/lib/python3.10/site-packages (from nvidia-cublas-cu11==11.10.3.66->torch->pyPLNmodels) (0.37.1)\n" - ] - } - ], + "metadata": { + "scrolled": true + }, + "outputs": [], "source": [ "!pip install pyPLNmodels" ] @@ -8307,7 +8274,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.8.13" } }, "nbformat": 4, -- GitLab From 65d08df2613f6a6dde23e4a17d804d4a236667f6 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 17:58:12 +0100 Subject: [PATCH 149/167] cleared output of the getting_started. --- Getting_started.ipynb | 45 ++++++------------------------------------- 1 file changed, 6 insertions(+), 39 deletions(-) diff --git a/Getting_started.ipynb b/Getting_started.ipynb index f5f30e8d..fda09530 100644 --- a/Getting_started.ipynb +++ b/Getting_started.ipynb @@ -2,45 +2,12 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "23df6e63", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: pyPLNmodels in /home/bastien/miniconda3/lib/python3.10/site-packages (0.0.57.dev1+g1f0e5e1.d20230605)\n", - "Requirement already satisfied: scipy in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (1.10.0)\n", - "Requirement already satisfied: matplotlib in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (3.6.3)\n", - "Requirement already satisfied: patsy in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (0.5.3)\n", - "Requirement already satisfied: pandas in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.5.3)\n", - "Requirement already satisfied: scikit-learn in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.2.1)\n", - "Requirement already satisfied: torch in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (1.13.1)\n", - "Requirement already satisfied: numpy in /home/bastien/miniconda3/lib/python3.10/site-packages (from pyPLNmodels) (1.23.5)\n", - "Requirement already satisfied: seaborn in /home/bastien/.local/lib/python3.10/site-packages (from pyPLNmodels) (0.12.2)\n", - "Requirement already satisfied: packaging>=20.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (22.0)\n", - "Requirement already satisfied: pyparsing>=2.2.1 in /home/bastien/.local/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (3.0.9)\n", - "Requirement already satisfied: pillow>=6.2.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (9.4.0)\n", - "Requirement already satisfied: fonttools>=4.22.0 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (4.38.0)\n", - "Requirement already satisfied: python-dateutil>=2.7 in /home/bastien/.local/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (2.8.2)\n", - "Requirement already satisfied: contourpy>=1.0.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (1.0.7)\n", - "Requirement already satisfied: cycler>=0.10 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (0.11.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from matplotlib->pyPLNmodels) (1.4.4)\n", - "Requirement already satisfied: pytz>=2020.1 in /home/bastien/miniconda3/lib/python3.10/site-packages (from pandas->pyPLNmodels) (2022.7.1)\n", - "Requirement already satisfied: six in /home/bastien/miniconda3/lib/python3.10/site-packages (from patsy->pyPLNmodels) (1.16.0)\n", - "Requirement already satisfied: joblib>=1.1.1 in /home/bastien/.local/lib/python3.10/site-packages (from scikit-learn->pyPLNmodels) (1.2.0)\n", - "Requirement already satisfied: threadpoolctl>=2.0.0 in /home/bastien/.local/lib/python3.10/site-packages (from scikit-learn->pyPLNmodels) (3.1.0)\n", - "Requirement already satisfied: nvidia-cuda-runtime-cu11==11.7.99 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.7.99)\n", - "Requirement already satisfied: nvidia-cublas-cu11==11.10.3.66 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.10.3.66)\n", - "Requirement already satisfied: nvidia-cudnn-cu11==8.5.0.96 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (8.5.0.96)\n", - "Requirement already satisfied: nvidia-cuda-nvrtc-cu11==11.7.99 in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (11.7.99)\n", - "Requirement already satisfied: typing-extensions in /home/bastien/.local/lib/python3.10/site-packages (from torch->pyPLNmodels) (4.4.0)\n", - "Requirement already satisfied: setuptools in /home/bastien/miniconda3/lib/python3.10/site-packages (from nvidia-cublas-cu11==11.10.3.66->torch->pyPLNmodels) (65.5.0)\n", - "Requirement already satisfied: wheel in /home/bastien/miniconda3/lib/python3.10/site-packages (from nvidia-cublas-cu11==11.10.3.66->torch->pyPLNmodels) (0.37.1)\n" - ] - } - ], + "metadata": { + "scrolled": true + }, + "outputs": [], "source": [ "!pip install pyPLNmodels" ] @@ -8307,7 +8274,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.8.13" } }, "nbformat": 4, -- GitLab From 6caf93652a785962fb7bdf2bc1f934bda2a40e37 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 18:05:42 +0100 Subject: [PATCH 150/167] change a little bit the getting_started. --- Getting_started.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Getting_started.ipynb b/Getting_started.ipynb index fda09530..b6b15f2f 100644 --- a/Getting_started.ipynb +++ b/Getting_started.ipynb @@ -292,7 +292,7 @@ "id": "31c211f1", "metadata": {}, "source": [ - "### One can save the model in order to load it back after:" + "### Save the model in order to load it back after:" ] }, { @@ -334,7 +334,7 @@ "id": "0cad573c", "metadata": {}, "source": [ - "### One can fit multiple PCA and choose the best rank with BIC or AIC criterion" + "### Fit multiple PCA and choose the best rank with BIC or AIC criterion" ] }, { @@ -456,7 +456,7 @@ "id": "bbee8a02", "metadata": {}, "source": [ - "### One can extract the best model found (according to AIC or BIC criterion)." + "### Extract the best model found (according to AIC or BIC criterion)." ] }, { @@ -638,7 +638,7 @@ "source": [ "sk_pca = PCA(n_components = 2)\n", "pca_log_counts = sk_pca.fit_transform(np.log(counts + (counts == 0)))\n", - "sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels)" + "_ = sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels)" ] }, { -- GitLab From fd38d4126405820b68f33fd098294fee493501ad Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 11 Jan 2024 18:05:42 +0100 Subject: [PATCH 151/167] change a little bit the getting_started. --- Getting_started.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Getting_started.ipynb b/Getting_started.ipynb index fda09530..b6b15f2f 100644 --- a/Getting_started.ipynb +++ b/Getting_started.ipynb @@ -292,7 +292,7 @@ "id": "31c211f1", "metadata": {}, "source": [ - "### One can save the model in order to load it back after:" + "### Save the model in order to load it back after:" ] }, { @@ -334,7 +334,7 @@ "id": "0cad573c", "metadata": {}, "source": [ - "### One can fit multiple PCA and choose the best rank with BIC or AIC criterion" + "### Fit multiple PCA and choose the best rank with BIC or AIC criterion" ] }, { @@ -456,7 +456,7 @@ "id": "bbee8a02", "metadata": {}, "source": [ - "### One can extract the best model found (according to AIC or BIC criterion)." + "### Extract the best model found (according to AIC or BIC criterion)." ] }, { @@ -638,7 +638,7 @@ "source": [ "sk_pca = PCA(n_components = 2)\n", "pca_log_counts = sk_pca.fit_transform(np.log(counts + (counts == 0)))\n", - "sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels)" + "_ = sns.scatterplot(x = pca_log_counts[:,0], y = pca_log_counts[:,1], hue = labels)" ] }, { -- GitLab From 24da34c23a8043c4d4d0a3021d1e5a08f09742c4 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 17:04:48 +0100 Subject: [PATCH 152/167] add better error messages, changed the default value of offsets from "logsum" to "zero". --- pyPLNmodels/_utils.py | 5 +++++ pyPLNmodels/elbos.py | 2 +- pyPLNmodels/models.py | 45 ++++++++++++++++++++++++------------------- 3 files changed, 31 insertions(+), 21 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 1cb9d2cd..10ce38c4 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -333,10 +333,15 @@ def _format_model_param( ------ ValueError If endog has negative values or offsets_formula is not None and not "logsum" or "zero" + If endog has one line that is full of zeros. """ endog = _format_data(endog) if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") + if torch.min(torch.sum(endog, axis=0)) < 0.5: + raise ValueError( + "Counts contains individuals containing only zero coutns. Remove it." + ) exog = _format_data(exog) if add_const is True: if exog is None: diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 73e77028..2dca93fb 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -219,7 +219,7 @@ def elbo_zi_pln( """ covariance = components @ (components.T) if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: - raise RuntimeError("latent_prob error") + raise RuntimeError("Latent probability error.") n_samples, dim = endog.shape s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) o_plus_m = offsets + latent_mean diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index d5854adf..32006437 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -74,7 +74,7 @@ class _model(ABC): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -91,8 +91,8 @@ class _model(ABC): offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". Overriden if - offsets is not None. + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -120,7 +120,7 @@ class _model(ABC): formula: str, data: dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, ): @@ -135,7 +135,9 @@ class _model(ABC): The data dictionary. Each value can be either a torch.Tensor, a np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take + the logarithm of the sum (of each line) of the counts. + Overriden (useless) if data["offsets"] is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -482,7 +484,7 @@ class _model(ABC): self.optim.zero_grad() loss = -self._compute_elbo_b() if torch.sum(torch.isnan(loss)): - raise ValueError("test") + raise ValueError("The ELBO contains nan values.") loss.backward() elbo += loss.item() self.optim.step() @@ -1462,7 +1464,7 @@ class Pln(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -1499,7 +1501,7 @@ class Pln(_model): formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): @@ -1957,7 +1959,7 @@ class PlnPCAcollection: *, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, @@ -1975,7 +1977,8 @@ class PlnPCAcollection: offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets, by default None. offsets_formula : str, optional(keyword-only) - The formula for offsets, by default "logsum". + The formula for offsets, by default "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). dict_of_dict_initialization : dict, optional(keyword-only) @@ -2013,7 +2016,7 @@ class PlnPCAcollection: formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, @@ -2029,8 +2032,8 @@ class PlnPCAcollection: The data dictionary. Each value can be either a torch.Tensor, np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets, by default "logsum". - Overriden if data["offsets"] is not None. + The formula for offsets, by default "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if data["offsets"] is not None. ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). dict_of_dict_initialization : dict, optional(keyword-only) @@ -2802,7 +2805,7 @@ class PlnPCA(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", rank: int = 5, dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, @@ -2846,7 +2849,7 @@ class PlnPCA(_model): data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, rank: int = 5, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) @@ -3389,7 +3392,7 @@ class ZIPln(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -3407,8 +3410,8 @@ class ZIPln(_model): offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". Overriden if - offsets is not None. + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -3475,7 +3478,7 @@ class ZIPln(_model): formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, use_closed_form_prob: bool = False, @@ -3491,7 +3494,9 @@ class ZIPln(_model): The data dictionary. Each value can be either a torch.Tensor, a np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". + The formula for offsets. Defaults to "zero". Can be also "logsum" where + we take the logarithm of the sum (of each line) of the counts. Overriden (useless) + if data["offsets"] is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) -- GitLab From a577c741511350132a5f5881ca593b349681f47d Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 17:04:48 +0100 Subject: [PATCH 153/167] add better error messages, changed the default value of offsets from "logsum" to "zero". --- pyPLNmodels/_utils.py | 5 +++++ pyPLNmodels/elbos.py | 2 +- pyPLNmodels/models.py | 45 ++++++++++++++++++++++++------------------- 3 files changed, 31 insertions(+), 21 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 1cb9d2cd..10ce38c4 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -333,10 +333,15 @@ def _format_model_param( ------ ValueError If endog has negative values or offsets_formula is not None and not "logsum" or "zero" + If endog has one line that is full of zeros. """ endog = _format_data(endog) if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") + if torch.min(torch.sum(endog, axis=0)) < 0.5: + raise ValueError( + "Counts contains individuals containing only zero coutns. Remove it." + ) exog = _format_data(exog) if add_const is True: if exog is None: diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 73e77028..2dca93fb 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -219,7 +219,7 @@ def elbo_zi_pln( """ covariance = components @ (components.T) if torch.norm(latent_prob * dirac - latent_prob) > 0.00000001: - raise RuntimeError("latent_prob error") + raise RuntimeError("Latent probability error.") n_samples, dim = endog.shape s_rond_s = torch.multiply(latent_sqrt_var, latent_sqrt_var) o_plus_m = offsets + latent_mean diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index d5854adf..32006437 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -74,7 +74,7 @@ class _model(ABC): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -91,8 +91,8 @@ class _model(ABC): offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". Overriden if - offsets is not None. + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -120,7 +120,7 @@ class _model(ABC): formula: str, data: dict[str : Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, ): @@ -135,7 +135,9 @@ class _model(ABC): The data dictionary. Each value can be either a torch.Tensor, a np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take + the logarithm of the sum (of each line) of the counts. + Overriden (useless) if data["offsets"] is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -482,7 +484,7 @@ class _model(ABC): self.optim.zero_grad() loss = -self._compute_elbo_b() if torch.sum(torch.isnan(loss)): - raise ValueError("test") + raise ValueError("The ELBO contains nan values.") loss.backward() elbo += loss.item() self.optim.step() @@ -1462,7 +1464,7 @@ class Pln(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -1499,7 +1501,7 @@ class Pln(_model): formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, ): @@ -1957,7 +1959,7 @@ class PlnPCAcollection: *, exog: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, offsets: Union[torch.Tensor, np.ndarray, pd.DataFrame] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, @@ -1975,7 +1977,8 @@ class PlnPCAcollection: offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets, by default None. offsets_formula : str, optional(keyword-only) - The formula for offsets, by default "logsum". + The formula for offsets, by default "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). dict_of_dict_initialization : dict, optional(keyword-only) @@ -2013,7 +2016,7 @@ class PlnPCAcollection: formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", ranks: Iterable[int] = range(3, 5), dict_of_dict_initialization: Optional[dict] = None, take_log_offsets: bool = False, @@ -2029,8 +2032,8 @@ class PlnPCAcollection: The data dictionary. Each value can be either a torch.Tensor, np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets, by default "logsum". - Overriden if data["offsets"] is not None. + The formula for offsets, by default "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if data["offsets"] is not None. ranks : Iterable[int], optional(keyword-only) The range of ranks, by default range(3, 5). dict_of_dict_initialization : dict, optional(keyword-only) @@ -2802,7 +2805,7 @@ class PlnPCA(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", rank: int = 5, dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, @@ -2846,7 +2849,7 @@ class PlnPCA(_model): data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, rank: int = 5, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, ): endog, exog, offsets = _extract_data_from_formula(formula, data) @@ -3389,7 +3392,7 @@ class ZIPln(_model): *, exog: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, offsets: Optional[Union[torch.Tensor, np.ndarray, pd.DataFrame]] = None, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, add_const: bool = True, @@ -3407,8 +3410,8 @@ class ZIPln(_model): offsets : Union[torch.Tensor, np.ndarray, pd.DataFrame], optional(keyword-only) The offsets data. Defaults to None. offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". Overriden if - offsets is not None. + The formula for offsets. Defaults to "zero". Can be also "logsum" where we take the logarithm of the sum (of each line) of the counts. + Overriden (useless) if offsets is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) @@ -3475,7 +3478,7 @@ class ZIPln(_model): formula: str, data: Dict[str, Union[torch.Tensor, np.ndarray, pd.DataFrame]], *, - offsets_formula: str = "logsum", + offsets_formula: str = "zero", dict_initialization: Optional[Dict[str, torch.Tensor]] = None, take_log_offsets: bool = False, use_closed_form_prob: bool = False, @@ -3491,7 +3494,9 @@ class ZIPln(_model): The data dictionary. Each value can be either a torch.Tensor, a np.ndarray or pd.DataFrame offsets_formula : str, optional(keyword-only) - The formula for offsets. Defaults to "logsum". + The formula for offsets. Defaults to "zero". Can be also "logsum" where + we take the logarithm of the sum (of each line) of the counts. Overriden (useless) + if data["offsets"] is not None. dict_initialization : dict, optional(keyword-only) The initialization dictionary. Defaults to None. take_log_offsets : bool, optional(keyword-only) -- GitLab From 45361ad49ca8d2c3b3d4dba713afeb6aa51ae8b7 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 17:32:57 +0100 Subject: [PATCH 154/167] add a test that see if the initialization contains zeros. --- pyPLNmodels/_utils.py | 2 +- tests/conftest.py | 1 - tests/test_common.py | 4 ++++ tests/test_wrong_init.py | 19 +++++++++++++++++++ 4 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 tests/test_wrong_init.py diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 10ce38c4..d6c36c12 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -338,7 +338,7 @@ def _format_model_param( endog = _format_data(endog) if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") - if torch.min(torch.sum(endog, axis=0)) < 0.5: + if torch.min(torch.sum(endog, axis=1)) < 0.5: raise ValueError( "Counts contains individuals containing only zero coutns. Remove it." ) diff --git a/tests/conftest.py b/tests/conftest.py index d89a919a..677fd262 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ import sys import pytest -import torch from pytest_lazyfixture import lazy_fixture as lf import pandas as pd diff --git a/tests/test_common.py b/tests/test_common.py index bd5ca62c..748146e8 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -124,3 +124,7 @@ def test_batch(model): assert mse_coef < 0.1 elif model.nb_cov == 0: assert model.coef is None + + +def test_raise_error_on_zero_counts(): + model = Pln() diff --git a/tests/test_wrong_init.py b/tests/test_wrong_init.py new file mode 100644 index 00000000..22edcf46 --- /dev/null +++ b/tests/test_wrong_init.py @@ -0,0 +1,19 @@ +import pytest + +from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln +import torch + +from tests.import_data import ( + data_real, +) + + +endog_real = data_real["endog"] + + +@pytest.mark.parametrize("pln_model", [Pln, PlnPCA, PlnPCAcollection, ZIPln]) +def test_init_with_zeros_pln(pln_model): + endog_with_zeros = endog_real + endog_with_zeros[4, :] *= 0 + with pytest.raises(ValueError): + model = pln_model(endog_with_zeros) -- GitLab From 2f1dc1308a963e5c7a601325f5b40f833a1062ff Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 17:32:57 +0100 Subject: [PATCH 155/167] add a test that see if the initialization contains zeros. --- pyPLNmodels/_utils.py | 2 +- tests/conftest.py | 1 - tests/test_common.py | 4 ++++ tests/test_wrong_init.py | 19 +++++++++++++++++++ 4 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 tests/test_wrong_init.py diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index 10ce38c4..d6c36c12 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -338,7 +338,7 @@ def _format_model_param( endog = _format_data(endog) if torch.min(endog) < 0: raise ValueError("Counts should be only non negative values.") - if torch.min(torch.sum(endog, axis=0)) < 0.5: + if torch.min(torch.sum(endog, axis=1)) < 0.5: raise ValueError( "Counts contains individuals containing only zero coutns. Remove it." ) diff --git a/tests/conftest.py b/tests/conftest.py index d89a919a..677fd262 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ import sys import pytest -import torch from pytest_lazyfixture import lazy_fixture as lf import pandas as pd diff --git a/tests/test_common.py b/tests/test_common.py index bd5ca62c..748146e8 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -124,3 +124,7 @@ def test_batch(model): assert mse_coef < 0.1 elif model.nb_cov == 0: assert model.coef is None + + +def test_raise_error_on_zero_counts(): + model = Pln() diff --git a/tests/test_wrong_init.py b/tests/test_wrong_init.py new file mode 100644 index 00000000..22edcf46 --- /dev/null +++ b/tests/test_wrong_init.py @@ -0,0 +1,19 @@ +import pytest + +from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln +import torch + +from tests.import_data import ( + data_real, +) + + +endog_real = data_real["endog"] + + +@pytest.mark.parametrize("pln_model", [Pln, PlnPCA, PlnPCAcollection, ZIPln]) +def test_init_with_zeros_pln(pln_model): + endog_with_zeros = endog_real + endog_with_zeros[4, :] *= 0 + with pytest.raises(ValueError): + model = pln_model(endog_with_zeros) -- GitLab From daca4e0c204de56e1eaa327ceb2b87a98b271930 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:26:10 +0100 Subject: [PATCH 156/167] forgot to remove '''pip install''' in the tests of the README. --- .gitlab-ci.yml | 2 +- ...create_readme_getting_started_and_docstrings_tests.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0e380726..62595af9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ tests: - jupyter nbconvert Getting_started.ipynb --to python --output tests/test_getting_started - cd tests - python create_readme_getting_started_and_docstrings_tests.py - - rm test_getting_started.py + - rm test_getting_started.py # remove as it has been modified in a directory. - pytest . only: - main diff --git a/tests/create_readme_getting_started_and_docstrings_tests.py b/tests/create_readme_getting_started_and_docstrings_tests.py index 113bf841..624cccd4 100644 --- a/tests/create_readme_getting_started_and_docstrings_tests.py +++ b/tests/create_readme_getting_started_and_docstrings_tests.py @@ -9,7 +9,14 @@ dir_getting_started = "getting_started" def get_lines(path_to_file, filename, filetype=".py"): with open(f"{path_to_file}{filename}{filetype}") as file: - lines = [line.rstrip() for line in file] + lines = [] + for line in file: + rstrip_line = line.rstrip() + if len(rstrip_line) > 4: + if rstrip_line[0:3] != "pip": + lines.append(rstrip_line) + else: + lines.append(rstrip_line) return lines -- GitLab From 59a524a1e8f1ec86428bce4ac2c7a668fec253c8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:26:10 +0100 Subject: [PATCH 157/167] forgot to remove '''pip install''' in the tests of the README. --- .gitlab-ci.yml | 2 +- ...create_readme_getting_started_and_docstrings_tests.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0e380726..62595af9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ tests: - jupyter nbconvert Getting_started.ipynb --to python --output tests/test_getting_started - cd tests - python create_readme_getting_started_and_docstrings_tests.py - - rm test_getting_started.py + - rm test_getting_started.py # remove as it has been modified in a directory. - pytest . only: - main diff --git a/tests/create_readme_getting_started_and_docstrings_tests.py b/tests/create_readme_getting_started_and_docstrings_tests.py index 113bf841..624cccd4 100644 --- a/tests/create_readme_getting_started_and_docstrings_tests.py +++ b/tests/create_readme_getting_started_and_docstrings_tests.py @@ -9,7 +9,14 @@ dir_getting_started = "getting_started" def get_lines(path_to_file, filename, filetype=".py"): with open(f"{path_to_file}{filename}{filetype}") as file: - lines = [line.rstrip() for line in file] + lines = [] + for line in file: + rstrip_line = line.rstrip() + if len(rstrip_line) > 4: + if rstrip_line[0:3] != "pip": + lines.append(rstrip_line) + else: + lines.append(rstrip_line) return lines -- GitLab From a45a41f24465c79e2d7d488a33227951371a14be Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:29:01 +0100 Subject: [PATCH 158/167] renanmed the point fixe lambert function. --- pyPLNmodels/_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index d6c36c12..ca435734 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -1100,14 +1100,14 @@ def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also= return wrapper -def pf_lambert(x, y): +def point_fixe_lambert(x, y): return x - (1 - (y * torch.exp(-x) + 1) / (x + 1)) def lambert(y, nb_pf=10): x = torch.log(1 + y) for _ in range(nb_pf): - x = pf_lambert(x, y) + x = point_fixe_lambert(x, y) return x -- GitLab From 1045b8539d5549fedb993339432a20275539abb8 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:29:01 +0100 Subject: [PATCH 159/167] renanmed the point fixe lambert function. --- pyPLNmodels/_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index d6c36c12..ca435734 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -1100,14 +1100,14 @@ def _add_doc(parent_class, *, params=None, example=None, returns=None, see_also= return wrapper -def pf_lambert(x, y): +def point_fixe_lambert(x, y): return x - (1 - (y * torch.exp(-x) + 1) / (x + 1)) def lambert(y, nb_pf=10): x = torch.log(1 + y) for _ in range(nb_pf): - x = pf_lambert(x, y) + x = point_fixe_lambert(x, y) return x -- GitLab From 4d61c4eb43d352c50d8e49e6d6705bfd27cfd065 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:44:25 +0100 Subject: [PATCH 160/167] add a plt.show() and removed a "with" en trop. --- pyPLNmodels/models.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 32006437..2e6c332c 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -244,6 +244,7 @@ class _model(ABC): ) for i in range(covariances.shape[0]): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) + plt.show() return ax def _project_parameters(self): @@ -504,7 +505,7 @@ class _model(ABC): """ return self.latent_variables - def qq_plots(self): + def _qq_plots(self): centered_latent = self.latent_variables - torch.mean( self.latent_variables, axis=0 ) @@ -1302,6 +1303,7 @@ class _model(ABC): ax.set_ylabel("Predicted values") ax.set_xlabel("Counts") ax.legend() + plt.show() return ax def _print_beginning_message(self): @@ -3578,7 +3580,7 @@ class ZIPln(_model): @property def _description(self): - return "with full covariance model and zero-inflation." + return "full covariance model and zero-inflation." def _random_init_model_parameters(self): self._coef_inflation = torch.randn(self.nb_cov, self.dim).to(DEVICE) -- GitLab From b33cd71b4adcf231464df1fbbdf9230a53feec0a Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:44:25 +0100 Subject: [PATCH 161/167] add a plt.show() and removed a "with" en trop. --- pyPLNmodels/models.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pyPLNmodels/models.py b/pyPLNmodels/models.py index 32006437..2e6c332c 100644 --- a/pyPLNmodels/models.py +++ b/pyPLNmodels/models.py @@ -244,6 +244,7 @@ class _model(ABC): ) for i in range(covariances.shape[0]): _plot_ellipse(x[i], y[i], cov=covariances[i], ax=ax) + plt.show() return ax def _project_parameters(self): @@ -504,7 +505,7 @@ class _model(ABC): """ return self.latent_variables - def qq_plots(self): + def _qq_plots(self): centered_latent = self.latent_variables - torch.mean( self.latent_variables, axis=0 ) @@ -1302,6 +1303,7 @@ class _model(ABC): ax.set_ylabel("Predicted values") ax.set_xlabel("Counts") ax.legend() + plt.show() return ax def _print_beginning_message(self): @@ -3578,7 +3580,7 @@ class ZIPln(_model): @property def _description(self): - return "with full covariance model and zero-inflation." + return "full covariance model and zero-inflation." def _random_init_model_parameters(self): self._coef_inflation = torch.randn(self.nb_cov, self.dim).to(DEVICE) -- GitLab From 7e804580fb71d6c1ca5c1db14ba5aa4fe15015ff Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:47:39 +0100 Subject: [PATCH 162/167] do not know how this file got there. --- pyPLNmodels/new_model.py | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 pyPLNmodels/new_model.py diff --git a/pyPLNmodels/new_model.py b/pyPLNmodels/new_model.py deleted file mode 100644 index 2d4acd45..00000000 --- a/pyPLNmodels/new_model.py +++ /dev/null @@ -1,9 +0,0 @@ -from pyPLNmodels import ZIPln, get_real_count_data - - -endog = get_real_count_data() -zi = ZIPln(endog, add_const = True) -zi.fit(nb_max_iteration = 10) -zi.show() - - -- GitLab From 091ae0db4fddfffafe7e68ff0a38964842963094 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Wed, 17 Jan 2024 18:47:39 +0100 Subject: [PATCH 163/167] do not know how this file got there. --- pyPLNmodels/new_model.py | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 pyPLNmodels/new_model.py diff --git a/pyPLNmodels/new_model.py b/pyPLNmodels/new_model.py deleted file mode 100644 index 2d4acd45..00000000 --- a/pyPLNmodels/new_model.py +++ /dev/null @@ -1,9 +0,0 @@ -from pyPLNmodels import ZIPln, get_real_count_data - - -endog = get_real_count_data() -zi = ZIPln(endog, add_const = True) -zi.fit(nb_max_iteration = 10) -zi.show() - - -- GitLab From fad8682f3fded3f575be69d4c41bb0381766936c Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 18 Jan 2024 07:43:14 +0100 Subject: [PATCH 164/167] forgot to remove a test. --- tests/test_common.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/test_common.py b/tests/test_common.py index 748146e8..bd5ca62c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -124,7 +124,3 @@ def test_batch(model): assert mse_coef < 0.1 elif model.nb_cov == 0: assert model.coef is None - - -def test_raise_error_on_zero_counts(): - model = Pln() -- GitLab From 079d8d3390265fb547b63324eeda9308622f0118 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Thu, 18 Jan 2024 07:43:14 +0100 Subject: [PATCH 165/167] forgot to remove a test. --- tests/test_common.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/test_common.py b/tests/test_common.py index 748146e8..bd5ca62c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -124,7 +124,3 @@ def test_batch(model): assert mse_coef < 0.1 elif model.nb_cov == 0: assert model.coef is None - - -def test_raise_error_on_zero_counts(): - model = Pln() -- GitLab From ef05c213cdc9148aee8b919252e4310799b8bce5 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 19 Jan 2024 21:10:17 +0100 Subject: [PATCH 166/167] put to zero all the endog data created conflicts in the tests. Made a copy instead. --- tests/test_wrong_init.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_wrong_init.py b/tests/test_wrong_init.py index 22edcf46..f975c738 100644 --- a/tests/test_wrong_init.py +++ b/tests/test_wrong_init.py @@ -1,7 +1,7 @@ import pytest from pyPLNmodels.models import Pln, PlnPCA, PlnPCAcollection, ZIPln -import torch +import numpy as np from tests.import_data import ( data_real, @@ -13,7 +13,7 @@ endog_real = data_real["endog"] @pytest.mark.parametrize("pln_model", [Pln, PlnPCA, PlnPCAcollection, ZIPln]) def test_init_with_zeros_pln(pln_model): - endog_with_zeros = endog_real + endog_with_zeros = np.copy(endog_real) endog_with_zeros[4, :] *= 0 with pytest.raises(ValueError): model = pln_model(endog_with_zeros) -- GitLab From 9594766b656fe1784f1580809fc7d36fddd5cd39 Mon Sep 17 00:00:00 2001 From: bastien-mva <bastien.batardiere@gmail.com> Date: Fri, 19 Jan 2024 21:35:41 +0100 Subject: [PATCH 167/167] add stability when computing the elbo. --- pyPLNmodels/_utils.py | 12 +++++++++++- pyPLNmodels/elbos.py | 7 +++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/pyPLNmodels/_utils.py b/pyPLNmodels/_utils.py index ca435734..3e0c17a3 100644 --- a/pyPLNmodels/_utils.py +++ b/pyPLNmodels/_utils.py @@ -340,7 +340,7 @@ def _format_model_param( raise ValueError("Counts should be only non negative values.") if torch.min(torch.sum(endog, axis=1)) < 0.5: raise ValueError( - "Counts contains individuals containing only zero coutns. Remove it." + "Counts contains individuals containing only zero counts. Remove it." ) exog = _format_data(exog) if add_const is True: @@ -1155,3 +1155,13 @@ def mat_to_vec(matc, p, q): tril = torch.tril(matc) # tril = matc.reshape(-1,1).squeeze() return tril[torch.tril_indices(p, q, offset=0).tolist()] + + +def _log1pexp(t): + mask = t > 10 + mask += t < -10 + return torch.where( + mask, + t, + torch.log(1 + torch.exp(t)), + ) diff --git a/pyPLNmodels/elbos.py b/pyPLNmodels/elbos.py index 7bb5b01f..937fa2df 100644 --- a/pyPLNmodels/elbos.py +++ b/pyPLNmodels/elbos.py @@ -1,5 +1,5 @@ import torch # pylint:disable=[C0114] -from ._utils import _log_stirling, _trunc_log +from ._utils import _log_stirling, _trunc_log, _log1pexp from ._closed_forms import _closed_formula_covariance, _closed_formula_coef from typing import Optional @@ -238,10 +238,9 @@ def elbo_zi_pln( un_moins_rho_m_moins_xb = un_moins_rho * m_minus_xb un_moins_rho_m_moins_xb_outer = un_moins_rho_m_moins_xb.T @ un_moins_rho_m_moins_xb inside_b = -1 / 2 * Omega * un_moins_rho_m_moins_xb_outer - inside_c = torch.multiply(latent_prob, x_coef_inflation) - torch.log( - 1 + torch.exp(x_coef_inflation) + inside_c = torch.multiply(latent_prob, x_coef_inflation) - _log1pexp( + x_coef_inflation ) - log_diag = torch.log(torch.diag(covariance)) log_S_term = torch.sum( torch.multiply(1 - latent_prob, torch.log(torch.abs(latent_sqrt_var))), axis=0 -- GitLab