diff --git a/conda-envs/environment-dev.yml b/conda-envs/environment-dev.yml index 676ee30fdd..7155366c33 100644 --- a/conda-envs/environment-dev.yml +++ b/conda-envs/environment-dev.yml @@ -13,7 +13,7 @@ dependencies: - numpy>=1.15.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - networkx - scipy>=1.4.1 diff --git a/conda-envs/environment-docs.yml b/conda-envs/environment-docs.yml index 6aafe66db4..02e28bd3c8 100644 --- a/conda-envs/environment-docs.yml +++ b/conda-envs/environment-docs.yml @@ -11,7 +11,7 @@ dependencies: - numpy>=1.15.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - rich>=13.7.1 - scipy>=1.4.1 diff --git a/conda-envs/environment-jax.yml b/conda-envs/environment-jax.yml index f9c82b620d..cd2f63de23 100644 --- a/conda-envs/environment-jax.yml +++ b/conda-envs/environment-jax.yml @@ -20,7 +20,7 @@ dependencies: - numpyro>=0.8.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - networkx - rich>=13.7.1 diff --git a/conda-envs/environment-test.yml b/conda-envs/environment-test.yml index e81fb4742e..30f685bbdb 100644 --- a/conda-envs/environment-test.yml +++ b/conda-envs/environment-test.yml @@ -16,7 +16,7 @@ dependencies: - numpy>=1.15.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - networkx - rich>=13.7.1 diff --git a/conda-envs/windows-environment-dev.yml b/conda-envs/windows-environment-dev.yml index f892d737c5..75f7ffd9e0 100644 --- a/conda-envs/windows-environment-dev.yml +++ b/conda-envs/windows-environment-dev.yml @@ -13,7 +13,7 @@ dependencies: - numpy>=1.15.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - networkx - rich>=13.7.1 diff --git a/conda-envs/windows-environment-test.yml b/conda-envs/windows-environment-test.yml index e27fe46f2a..b572ef1a84 100644 --- a/conda-envs/windows-environment-test.yml +++ b/conda-envs/windows-environment-test.yml @@ -16,7 +16,7 @@ dependencies: - numpy>=1.15.0 - pandas>=0.24.0 - pip -- pytensor>=2.22.1,<2.23 +- pytensor>=2.23,<2.24 - python-graphviz - networkx - rich>=13.7.1 diff --git a/docs/source/contributing/implementing_distribution.md b/docs/source/contributing/implementing_distribution.md index 5e2627807b..8d0c1750ad 100644 --- a/docs/source/contributing/implementing_distribution.md +++ b/docs/source/contributing/implementing_distribution.md @@ -43,14 +43,9 @@ from typing import List, Tuple class BlahRV(RandomVariable): name: str = "blah" - # Provide the minimum number of (output) dimensions for this RV - # (e.g. `0` for a scalar, `1` for a vector, etc.) - ndim_supp: int = 0 - - # Provide the number of (input) dimensions for each parameter of the RV - # (e.g. if there's only one vector parameter, `[1]`; for two parameters, - # one a matrix and the other a scalar, `[2, 0]`; etc.) - ndims_params: List[int] = [0, 0] + # Provide a numpy-style signature for this RV, which indicates + # the number and core dimensionality of each input and output. + signature: "(),()->()" # The NumPy/PyTensor dtype for this RV (e.g. `"int32"`, `"int64"`). # The standard in the library is `"int64"` for discrete variables @@ -87,8 +82,8 @@ blah = BlahRV() Some important things to keep in mind: 1. Everything inside the `rng_fn` method is pure Python code (as are the inputs) and should __not__ make use of other `PyTensor` symbolic ops. The random method should make use of the `rng` which is a NumPy {class}`~numpy.random.RandomGenerator`, so that samples are reproducible. -1. Non-default `RandomVariable` dimensions will end up in the `rng_fn` via the `size` kwarg. The `rng_fn` will have to take this into consideration for correct output. `size` is the specification used by NumPy and SciPy and works like PyMC `shape` for univariate distributions, but is different for multivariate distributions. For multivariate distributions the __`size` excludes the `ndim_supp` support dimensions__, whereas the __`shape` of the resulting `TensorVariable` or `ndarray` includes the support dimensions__. For more context check {ref}`The dimensionality notebook `. -1. `PyTensor` can automatically infer the output shape of univariate `RandomVariable`s (`ndim_supp=0`). For multivariate distributions (`ndim_supp>=1`), the method `_supp_shape_from_params` must be implemented in the new `RandomVariable` class. This method returns the support dimensionality of an RV given its parameters. In some cases this can be derived from the shape of one of its parameters, in which case the helper {func}`pytensor.tensor.random.utils.supp_shape_from_ref_param_shape` cand be used as is in {class}`~pymc.DirichletMultinomialRV`. In other cases the argument values (and not their shapes) may determine the support shape of the distribution, as happens in the `~pymc.distributions.multivarite._LKJCholeskyCovRV`. In simpler cases they may be constant. +1. Non-default `RandomVariable` dimensions will end up in the `rng_fn` via the `size` kwarg. The `rng_fn` will have to take this into consideration for correct output. `size` is the specification used by NumPy and SciPy and works like PyMC `shape` for univariate distributions, but is different for multivariate distributions. For multivariate distributions the __`size` excludes the support dimensions__, whereas the __`shape` of the resulting `TensorVariable` or `ndarray` includes the support dimensions__. For more context check {ref}`The dimensionality notebook `. +1. `PyTensor` can automatically infer the output shape of univariate `RandomVariable`s. For multivariate distributions, the method `_supp_shape_from_params` must be implemented in the new `RandomVariable` class. This method returns the support dimensionality of an RV given its parameters. In some cases this can be derived from the shape of one of its parameters, in which case the helper {func}`pytensor.tensor.random.utils.supp_shape_from_ref_param_shape` cand be used as is in {class}`~pymc.DirichletMultinomialRV`. In other cases the argument values (and not their shapes) may determine the support shape of the distribution, as happens in the `~pymc.distributions.multivarite._LKJCholeskyCovRV`. In simpler cases they may be constant. 1. It's okay to use the `rng_fn` `classmethods` of other PyTensor and PyMC `RandomVariables` inside the new `rng_fn`. For example if you are implementing a negative HalfNormal `RandomVariable`, your `rng_fn` can simply return `- halfnormal.rng_fn(rng, scale, size)`. *Note: In addition to `size`, the PyMC API also provides `shape`, `dims` and `observed` as alternatives to define a distribution dimensionality, but this is taken care of by {class}`~pymc.Distribution`, and should not require any extra changes.* diff --git a/docs/source/learn/core_notebooks/dimensionality.ipynb b/docs/source/learn/core_notebooks/dimensionality.ipynb index 13f98b7ff0..926de94d4e 100644 --- a/docs/source/learn/core_notebooks/dimensionality.ipynb +++ b/docs/source/learn/core_notebooks/dimensionality.ipynb @@ -402,17 +402,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "shape mismatch: objects cannot be broadcast to a single shape. Mismatch is between arg 0 with shape (3,) and arg 1 with shape (2,).\n", - "Apply node that caused the error: normal_rv{0, (0, 0), floatX, True}(RandomGeneratorSharedVariable(), [], 11, [ 1 10 100], [0.1 0.1])\n", - "Toposort index: 0\n", - "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(0,)), TensorType(int64, shape=()), TensorType(int64, shape=(3,)), TensorType(float64, shape=(2,))]\n", - "Inputs shapes: ['No shapes', (0,), (), (3,), (2,)]\n", - "Inputs strides: ['No strides', (0,), (), (8,), (8,)]\n", - "Inputs values: [Generator(PCG64) at 0x7F6427F8CAC0, array([], dtype=int64), array(11), array([ 1, 10, 100]), array([0.1, 0.1])]\n", - "Outputs clients: [['output'], ['output']]\n", - "\n", - "HINT: Re-running with most PyTensor optimizations disabled could provide a back-trace showing when this node was created. This can be done by setting the PyTensor flag 'optimizer=fast_compile'. If that does not work, PyTensor optimizations can be disabled with 'optimizer=None'.\n", - "HINT: Use the PyTensor flag `exception_verbosity=high` for a debug print-out and storage map footprint of this Apply node.\n" + "Could not broadcast dimensions. Incompatible shapes were [(ScalarConstant(ScalarType(int64), data=3),), (ScalarConstant(ScalarType(int64), data=2),)].\n" ] } ], @@ -446,7 +436,7 @@ { "data": { "text/plain": [ - "array([-0.49526775, -0.94608062, 1.66397913])" + "array([ 0.06413633, 1.29893485, -0.48072495])" ] }, "execution_count": 13, @@ -474,10 +464,10 @@ { "data": { "text/plain": [ - "array([[ 2.22626513, 2.12938134, 0.49074886],\n", - " [ 0.08312601, 1.05049093, 1.91718083],\n", - " [-0.68191815, 1.43771096, 1.76780399],\n", - " [-0.59883241, 0.26954893, 2.74319335]])" + "array([[-0.49526775, -0.94608062, 1.66397913],\n", + " [ 0.703617 , 0.66713031, 0.80725231],\n", + " [ 0.19219926, 1.62987906, 2.30590873],\n", + " [ 1.83763939, -0.19878079, 1.46751553]])" ] }, "execution_count": 14, @@ -508,13 +498,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "shape mismatch: objects cannot be broadcast to a single shape. Mismatch is between arg 0 with shape (3, 4) and arg 1 with shape (3,).\n", - "Apply node that caused the error: normal_rv{0, (0, 0), floatX, True}(RandomGeneratorSharedVariable(), [3 4], 11, [0 1 2], 1.0)\n", + "shape mismatch: objects cannot be broadcast to a single shape. Mismatch is between arg 0 with shape (3, 4) and arg 1 with shape (1, 3).\n", + "Apply node that caused the error: normal_rv{\"(),()->()\"}(RNG(), [3 4], [[0 1 2]], [[1]])\n", "Toposort index: 0\n", - "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(2,)), TensorType(int64, shape=()), TensorType(int64, shape=(3,)), TensorType(float64, shape=())]\n", - "Inputs shapes: ['No shapes', (2,), (), (3,), ()]\n", - "Inputs strides: ['No strides', (8,), (), (8,), ()]\n", - "Inputs values: [Generator(PCG64) at 0x7F64280725E0, array([3, 4]), array(11), array([0, 1, 2]), array(1.)]\n", + "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(2,)), TensorType(int64, shape=(1, 3)), TensorType(int8, shape=(1, 1))]\n", + "Inputs shapes: ['No shapes', (2,), (1, 3), (1, 1)]\n", + "Inputs strides: ['No strides', (8,), (24, 8), (1, 1)]\n", + "Inputs values: [Generator(PCG64) at 0x7F9A2DA91000, array([3, 4]), array([[0, 1, 2]]), array([[1]], dtype=int8)]\n", "Outputs clients: [['output'], ['output']]\n", "\n", "HINT: Re-running with most PyTensor optimizations disabled could provide a back-trace showing when this node was created. This can be done by setting the PyTensor flag 'optimizer=fast_compile'. If that does not work, PyTensor optimizations can be disabled with 'optimizer=None'.\n", @@ -544,9 +534,9 @@ { "data": { "text/plain": [ - "array([[-0.73397401, -0.18717845, -0.78548049, 1.64478883],\n", - " [ 3.54543846, 1.22954216, 2.13674063, 1.94194106],\n", - " [ 0.85294471, 3.52041332, 2.94428975, 3.25944187]])" + "array([[ 1.36252056, 0.90337366, -1.83306938, -1.04031058],\n", + " [ 0.09757005, -0.03093604, 3.29729122, -0.86869013],\n", + " [ 3.51136436, -0.33437459, 1.93223367, 3.71535763]])" ] }, "execution_count": 16, @@ -585,8 +575,8 @@ { "data": { "text/plain": [ - "(array([-0.45755879, 1.59975702, 0.20546749]),\n", - " array([0.29866199, 0.29866199, 0.29866199]))" + "(array([-0.73397401, 2.54543846, -1.14705529]),\n", + " array([-0.45755879, -0.45755879, -0.45755879]))" ] }, "execution_count": 18, @@ -632,7 +622,7 @@ { "data": { "text/plain": [ - "(array([0.55390975, 2.17440418, 1.83014764]), 1)" + "(array([1.29866199, 1.01091254, 0.08414986]), 1)" ] }, "execution_count": 19, @@ -704,7 +694,7 @@ { "data": { "text/plain": [ - "(array([-0.68893796]), 1)" + "(array([0.55390975]), 1)" ] }, "execution_count": 21, @@ -752,7 +742,7 @@ { "data": { "text/plain": [ - "array([0.57262853, 0.34230354, 1.96818163])" + "array([-0.68893796, 1.10911095, -0.30443374])" ] }, "execution_count": 22, @@ -781,7 +771,7 @@ { "data": { "text/plain": [ - "array([1.0623799 , 0.84622693, 0.34046237])" + "array([0.57262853, 0.34230354, 1.96818163])" ] }, "execution_count": 23, @@ -828,11 +818,11 @@ { "data": { "text/plain": [ - "array([[2, 0, 3],\n", - " [1, 1, 3],\n", + "array([[0, 2, 3],\n", " [0, 2, 3],\n", + " [1, 0, 4],\n", " [0, 1, 4],\n", - " [1, 0, 4]])" + " [0, 1, 4]])" ] }, "execution_count": 24, @@ -864,11 +854,11 @@ { "data": { "text/plain": [ - "array([[0, 1, 4],\n", - " [0, 0, 5],\n", - " [3, 1, 1],\n", + "array([[2, 0, 3],\n", + " [1, 1, 3],\n", + " [0, 2, 3],\n", " [0, 1, 4],\n", - " [0, 2, 3]])" + " [1, 0, 4]])" ] }, "execution_count": 25, @@ -895,9 +885,9 @@ { "data": { "text/plain": [ - "array([[2, 0, 3],\n", - " [1, 3, 1],\n", - " [1, 1, 3]])" + "array([[0, 1, 4],\n", + " [0, 0, 5],\n", + " [3, 1, 1]])" ] }, "execution_count": 26, @@ -924,9 +914,9 @@ { "data": { "text/plain": [ - "array([[0, 0, 0, 0, 0],\n", - " [2, 2, 1, 0, 3],\n", - " [3, 3, 4, 5, 2]])" + "array([[2, 1, 1, 0, 2],\n", + " [0, 3, 1, 0, 1],\n", + " [3, 1, 3, 5, 2]])" ] }, "execution_count": 27, @@ -973,8 +963,8 @@ { "data": { "text/plain": [ - "array([[1, 2, 2],\n", - " [0, 3, 7]])" + "array([[0, 2, 3],\n", + " [1, 4, 5]])" ] }, "execution_count": 28, @@ -1010,7 +1000,7 @@ { "data": { "text/plain": [ - "array([[2, 2, 1],\n", + "array([[1, 2, 2],\n", " [0, 3, 7]])" ] }, @@ -1087,8 +1077,8 @@ { "data": { "text/plain": [ - "array([[1, 0, 4],\n", - " [1, 2, 7]])" + "array([[2, 2, 1],\n", + " [1, 1, 8]])" ] }, "execution_count": 31, @@ -1129,7 +1119,7 @@ { "data": { "text/plain": [ - "(0, 1)" + "[0, 1]" ] }, "execution_count": 32, @@ -1145,29 +1135,46 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Implicit batch dimensions must still respect broadcasting rules. The following example is not valid because `n` has batched dimensions of `shape=(2,)` and `p` has batched dimensions of `shape=(3,)` which cannot be broadcasted together." + "Both `ndim_supp` and `ndims_params` are actually extracted from a numpy-like signature" ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'(),(p)->(p)'" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "multinomial_dist.owner.op.signature" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Implicit batch dimensions must still respect broadcasting rules. The following example is not valid because `n` has batched dimensions of `shape=(2,)` and `p` has batched dimensions of `shape=(3,)` which cannot be broadcasted together." + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "operands could not be broadcast together with remapped shapes [original->remapped]: (2,) and requested shape (3,)\n", - "Apply node that caused the error: multinomial_rv{1, (0, 1), int64, True}(RandomGeneratorSharedVariable(), [], 4, [ 5 10], [[0.1 0.3 ... 0.3 0.6]])\n", - "Toposort index: 0\n", - "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(0,)), TensorType(int64, shape=()), TensorType(int64, shape=(2,)), TensorType(float64, shape=(3, 3))]\n", - "Inputs shapes: ['No shapes', (0,), (), (2,), (3, 3)]\n", - "Inputs strides: ['No strides', (0,), (), (8,), (24, 8)]\n", - "Inputs values: [Generator(PCG64) at 0x7F6425B8B060, array([], dtype=int64), array(4), array([ 5, 10]), 'not shown']\n", - "Outputs clients: [['output'], ['output']]\n", - "\n", - "HINT: Re-running with most PyTensor optimizations disabled could provide a back-trace showing when this node was created. This can be done by setting the PyTensor flag 'optimizer=fast_compile'. If that does not work, PyTensor optimizations can be disabled with 'optimizer=None'.\n", - "HINT: Use the PyTensor flag `exception_verbosity=high` for a debug print-out and storage map footprint of this Apply node.\n" + "Could not broadcast dimensions. Incompatible shapes were [(ScalarConstant(ScalarType(int64), data=2),), (ScalarConstant(ScalarType(int64), data=3),)].\n" ] } ], @@ -1202,7 +1209,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 35, "metadata": { "pycharm": { "name": "#%%\n" @@ -1212,11 +1219,11 @@ { "data": { "text/plain": [ - "array([[0, 1, 4],\n", - " [4, 1, 5]])" + "array([[1, 1, 3],\n", + " [2, 1, 7]])" ] }, - "execution_count": 34, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -1234,20 +1241,20 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 36, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "operands could not be broadcast together with remapped shapes [original->remapped]: (2,) and requested shape (2,4)\n", - "Apply node that caused the error: multinomial_rv{1, (0, 1), int64, True}(RandomGeneratorSharedVariable(), [2 4], 4, [ 5 10], [0.1 0.3 0.6])\n", + "operands could not be broadcast together with remapped shapes [original->remapped]: (1,2) and requested shape (2,4)\n", + "Apply node that caused the error: multinomial_rv{\"(),(p)->(p)\"}(RNG(), [2 4], [[ 5 10]], [[[0.1 0.3 0.6]]])\n", "Toposort index: 0\n", - "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(2,)), TensorType(int64, shape=()), TensorType(int64, shape=(2,)), TensorType(float64, shape=(3,))]\n", - "Inputs shapes: ['No shapes', (2,), (), (2,), (3,)]\n", - "Inputs strides: ['No strides', (8,), (), (8,), (8,)]\n", - "Inputs values: [Generator(PCG64) at 0x7F6425AC8120, array([2, 4]), array(4), array([ 5, 10]), array([0.1, 0.3, 0.6])]\n", + "Inputs types: [RandomGeneratorType, TensorType(int64, shape=(2,)), TensorType(int64, shape=(1, 2)), TensorType(float64, shape=(1, 1, 3))]\n", + "Inputs shapes: ['No shapes', (2,), (1, 2), (1, 1, 3)]\n", + "Inputs strides: ['No strides', (8,), (16, 8), (24, 24, 8)]\n", + "Inputs values: [Generator(PCG64) at 0x7F9A2DA91C40, array([2, 4]), array([[ 5, 10]]), array([[[0.1, 0.3, 0.6]]])]\n", "Outputs clients: [['output'], ['output']]\n", "\n", "HINT: Re-running with most PyTensor optimizations disabled could provide a back-trace showing when this node was created. This can be done by setting the PyTensor flag 'optimizer=fast_compile'. If that does not work, PyTensor optimizations can be disabled with 'optimizer=None'.\n", @@ -1282,7 +1289,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 37, "metadata": { "pycharm": { "name": "#%%\n" @@ -1323,7 +1330,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 38, "metadata": { "pycharm": { "name": "#%%\n" @@ -1336,62 +1343,62 @@ "\n", "\n", - "\n", "\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "cluster3\n", - "\n", - "3\n", + "\n", + "3\n", "\n", - "\n", + "\n", "\n", - "y\n", - "\n", - "y\n", - "~\n", - "Normal\n", + "x\n", + "\n", + "x\n", + "~\n", + "Normal\n", "\n", - "\n", + "\n", "\n", - "x\n", - "\n", - "x\n", - "~\n", - "Normal\n", + "y\n", + "\n", + "y\n", + "~\n", + "Normal\n", "\n", "\n", - "\n", + "\n", "x->y\n", - "\n", - "\n", + "\n", + "\n", "\n", "\n", "\n", "sigma\n", - "\n", - "sigma\n", - "~\n", - "HalfNormal\n", + "\n", + "sigma\n", + "~\n", + "HalfNormal\n", "\n", "\n", - "\n", + "\n", "sigma->y\n", - "\n", - "\n", + "\n", + "\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, - "execution_count": 37, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -1415,7 +1422,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 39, "metadata": { "pycharm": { "name": "#%%\n" @@ -1428,55 +1435,55 @@ "\n", "\n", - "\n", "\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "cluster3\n", - "\n", - "3\n", + "\n", + "3\n", "\n", "\n", "cluster4\n", - "\n", - "4\n", + "\n", + "4\n", "\n", "\n", "\n", "scalar (support)\n", - "\n", - "scalar (support)\n", - "~\n", - "Normal\n", + "\n", + "scalar (support)\n", + "~\n", + "Normal\n", "\n", "\n", "\n", "vector (implicit)\n", - "\n", - "vector (implicit)\n", - "~\n", - "Normal\n", + "\n", + "vector (implicit)\n", + "~\n", + "Normal\n", "\n", "\n", "\n", "vector (explicit)\n", - "\n", - "vector (explicit)\n", - "~\n", - "Normal\n", + "\n", + "vector (explicit)\n", + "~\n", + "Normal\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, - "execution_count": 38, + "execution_count": 39, "metadata": {}, "output_type": "execute_result" } @@ -1510,7 +1517,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 40, "metadata": { "pycharm": { "name": "#%%\n" @@ -1523,34 +1530,34 @@ "\n", "\n", - "\n", "\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "clusteryear (3)\n", - "\n", - "year (3)\n", + "\n", + "year (3)\n", "\n", "\n", "\n", "profit\n", - "\n", - "profit\n", - "~\n", - "Normal\n", + "\n", + "profit\n", + "~\n", + "Normal\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, - "execution_count": 39, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } @@ -1575,7 +1582,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 41, "metadata": { "pycharm": { "name": "#%%\n" @@ -1588,34 +1595,34 @@ "\n", "\n", - "\n", "\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "clusteryear (3)\n", - "\n", - "year (3)\n", + "\n", + "year (3)\n", "\n", "\n", "\n", "profit\n", - "\n", - "profit\n", - "~\n", - "Normal\n", + "\n", + "profit\n", + "~\n", + "Normal\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, - "execution_count": 40, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } @@ -1652,7 +1659,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 42, "metadata": { "pycharm": { "name": "#%%\n" @@ -1665,55 +1672,55 @@ "\n", "\n", - "\n", "\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "clustersupport (3)\n", - "\n", - "support (3)\n", + "\n", + "support (3)\n", "\n", "\n", "clusterbatch (4) x support (3)\n", - "\n", - "batch (4) x support (3)\n", + "\n", + "batch (4) x support (3)\n", "\n", "\n", "\n", "vector\n", - "\n", - "vector\n", - "~\n", - "MvNormal\n", + "\n", + "vector\n", + "~\n", + "MvNormal\n", "\n", "\n", "\n", "matrix (explicit)\n", - "\n", - "matrix (explicit)\n", - "~\n", - "MvNormal\n", + "\n", + "matrix (explicit)\n", + "~\n", + "MvNormal\n", "\n", "\n", "\n", "matrix (implicit)\n", - "\n", - "matrix (implicit)\n", - "~\n", - "MvNormal\n", + "\n", + "matrix (implicit)\n", + "~\n", + "MvNormal\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, - "execution_count": 41, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } @@ -1770,7 +1777,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 43, "metadata": { "pycharm": { "name": "#%%\n" @@ -1781,19 +1788,19 @@ "name": "stdout", "output_type": "stream", "text": [ - "Last updated: Mon Jul 17 2023\n", + "Last updated: Wed Jun 19 2024\n", "\n", "Python implementation: CPython\n", - "Python version : 3.10.9\n", - "IPython version : 8.11.0\n", + "Python version : 3.11.8\n", + "IPython version : 8.22.2\n", "\n", - "pytensor: 2.12.3\n", + "pytensor: 2.20.0+3.g66439d283.dirty\n", "\n", - "pymc : 5.2.0\n", - "numpy : None\n", - "pytensor: 2.12.3\n", + "numpy : 1.26.4\n", + "pymc : 5.15.0+1.g58927d608\n", + "pytensor: 2.20.0+3.g66439d283.dirty\n", "\n", - "Watermark: 2.3.1\n", + "Watermark: 2.4.3\n", "\n" ] } @@ -1832,7 +1839,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.11.8" }, "toc": { "base_numbering": 1, diff --git a/docs/source/learn/core_notebooks/pymc_pytensor.ipynb b/docs/source/learn/core_notebooks/pymc_pytensor.ipynb index 66a8c88cc2..3548478708 100644 --- a/docs/source/learn/core_notebooks/pymc_pytensor.ipynb +++ b/docs/source/learn/core_notebooks/pymc_pytensor.ipynb @@ -82,10 +82,10 @@ "output_type": "stream", "text": [ "\n", - "x type: TensorType(float64, ())\n", + "x type: Scalar(float64, shape=())\n", "x name = x\n", "---\n", - "y type: TensorType(float64, (?,))\n", + "y type: Vector(float64, shape=(?,))\n", "y name = y\n", "\n" ] @@ -159,17 +159,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{log,no_inplace} [id A] 'log(x + y)'\n", - " |Elemwise{add,no_inplace} [id B] 'x + y'\n", - " |InplaceDimShuffle{x} [id C]\n", - " | |x [id D]\n", - " |y [id E]\n" + "Log [id A] 'log(x + y)'\n", + " └─ Add [id B] 'x + y'\n", + " ├─ ExpandDims{axis=0} [id C]\n", + " │ └─ x [id D]\n", + " └─ y [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 5, @@ -303,15 +303,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{true_div,no_inplace} [id A] 'a / b'\n", - " |a [id B]\n", - " |b [id C]\n" + "True_div [id A] 'a / b'\n", + " ├─ a [id B]\n", + " └─ b [id C]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 10, @@ -346,17 +346,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{mul,no_inplace} [id A] 'b * c'\n", - " |b [id B]\n", - " |Elemwise{true_div,no_inplace} [id C] 'a / b'\n", - " |a [id D]\n", - " |b [id B]\n" + "Mul [id A] 'b * c'\n", + " ├─ b [id B]\n", + " └─ True_div [id C] 'a / b'\n", + " ├─ a [id D]\n", + " └─ b [id B]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 11, @@ -388,14 +388,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "DeepCopyOp [id A] 'a' 0\n", - " |a [id B]\n" + "DeepCopyOp [id A] 0\n", + " └─ a [id B]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 12, @@ -439,11 +439,11 @@ "output_type": "stream", "text": [ "\n", - "z type: TensorType(float64, (?,))\n", + "z type: Vector(float64, shape=(?,))\n", "z name = x + y\n", - "z owner = Elemwise{add,no_inplace}(InplaceDimShuffle{x}.0, y)\n", - "z owner inputs = [InplaceDimShuffle{x}.0, y]\n", - "z owner op = Elemwise{add,no_inplace}\n", + "z owner = Add(ExpandDims{axis=0}.0, y)\n", + "z owner inputs = [ExpandDims{axis=0}.0, y]\n", + "z owner op = Add\n", "z owner output = [x + y]\n", "\n" ] @@ -480,23 +480,23 @@ "output_type": "stream", "text": [ "---\n", - "Checking variable log(x + y) of type TensorType(float64, (?,))\n", - " > Op is Elemwise{log,no_inplace}\n", + "Checking variable log(x + y) of type Vector(float64, shape=(?,))\n", + " > Op is Log\n", " > Input 0 is x + y\n", "---\n", - "Checking variable x + y of type TensorType(float64, (?,))\n", - " > Op is Elemwise{add,no_inplace}\n", - " > Input 0 is InplaceDimShuffle{x}.0\n", + "Checking variable x + y of type Vector(float64, shape=(?,))\n", + " > Op is Add\n", + " > Input 0 is ExpandDims{axis=0}.0\n", " > Input 1 is y\n", "---\n", - "Checking variable InplaceDimShuffle{x}.0 of type TensorType(float64, (1,))\n", - " > Op is InplaceDimShuffle{x}\n", + "Checking variable ExpandDims{axis=0}.0 of type Vector(float64, shape=(1,))\n", + " > Op is ExpandDims{axis=0}\n", " > Input 0 is x\n", "---\n", - "Checking variable y of type TensorType(float64, (?,))\n", + "Checking variable y of type Vector(float64, shape=(?,))\n", " > y is a root variable\n", "---\n", - "Checking variable x of type TensorType(float64, ())\n", + "Checking variable x of type Scalar(float64, shape=())\n", " > x is a root variable\n" ] } @@ -537,17 +537,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{log,no_inplace} [id A] 'log(x + y)'\n", - " |Elemwise{add,no_inplace} [id B] 'x + y'\n", - " |InplaceDimShuffle{x} [id C]\n", - " | |x [id D]\n", - " |y [id E]\n" + "Log [id A] 'log(x + y)'\n", + " └─ Add [id B] 'x + y'\n", + " ├─ ExpandDims{axis=0} [id C]\n", + " │ └─ x [id D]\n", + " └─ y [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 15, @@ -626,17 +626,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{log,no_inplace} [id A] 'log(x + y)'\n", - " |Elemwise{add,no_inplace} [id B] 'x + y'\n", - " |InplaceDimShuffle{x} [id C]\n", - " | |x [id D]\n", - " |y [id E]\n" + "Log [id A] 'log(x + y)'\n", + " └─ Add [id B] 'x + y'\n", + " ├─ ExpandDims{axis=0} [id C]\n", + " │ └─ x [id D]\n", + " └─ y [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 18, @@ -665,18 +665,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{log,no_inplace} [id A] 'log(exp(x + y))'\n", - " |Elemwise{exp,no_inplace} [id B] 'exp(x + y)'\n", - " |Elemwise{add,no_inplace} [id C] 'x + y'\n", - " |InplaceDimShuffle{x} [id D]\n", - " | |x [id E]\n", - " |y [id F]\n" + "Log [id A] 'log(exp(x + y))'\n", + " └─ Exp [id B] 'exp(x + y)'\n", + " └─ Add [id C] 'x + y'\n", + " ├─ ExpandDims{axis=0} [id D]\n", + " │ └─ x [id E]\n", + " └─ y [id F]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 19, @@ -745,16 +745,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Elemwise{add,no_inplace} [id A] 'x + y' 1\n", - " |InplaceDimShuffle{x} [id B] 0\n", - " | |x [id C]\n", - " |y [id D]\n" + "Add [id A] 'x + y' 1\n", + " ├─ ExpandDims{axis=0} [id B] 0\n", + " │ └─ x [id C]\n", + " └─ y [id D]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 21, @@ -807,7 +807,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAArcAAAIQCAYAAACbhEYhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABCrklEQVR4nO3deVRV9f7/8dcBZHAAhBAkFdDKqbJyQNJyotDMKa28maKZ5pxSqXRT05tiZWmaQ/YttZs2aVpmYeZ4yzG9ZVlZJg7pBTUFFBMU9u+PFufnEVRAZMPH52OtvZb7s4fz3vts4OXnfM7eDsuyLAEAAAAGcLO7AAAAAKC4EG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQboES5nA49Pzzz5foa6akpKhbt24KDAyUw+HQtGnTSvT1cWX27dsnh8Oh+fPnF2n7+fPny+FwaN++fc62li1bqmXLlsVS3+VceM0///zzcjgcOnbsWIm8fnh4uHr37l0ir3W1XOk1AFxLCLcok3744Qd169ZNYWFh8vb21vXXX6977rlHM2bMsLu0UmnEiBFauXKl4uPj9e9//1tt27a1uySUQRs3btTzzz+v1NRUu0vJozTXBqBkedhdAFBYGzduVKtWrVSjRg3169dPISEhOnjwoDZv3qzXXntNQ4cOtbvEUmfNmjXq1KmTnn76abtLQSnx5ZdfFnqbjRs3avz48erdu7f8/f0LvN1ff/0lD4+r++fmUrXt3r1bbm5luy8nLCxMf/31l8qVK2d3KUCpR7hFmTNx4kT5+flp27Ztef6IHTlyxJ6iSrkjR44UKIxkZGSoQoUKV7+gMu7MmTPy9PQs04HJ09Pzqu4/JydHWVlZ8vb2lre391V9rcvx8vKy9fWLg8PhsP08AmVF2f3NjGvW77//rvr16+cb1qpUqeIyP2/ePLVu3VpVqlSRl5eX6tWrp9mzZ+fZLjw8XPfff7/WrVunRo0aycfHR7fccovWrVsnSfr44491yy23yNvbWw0bNtR///tfl+179+6tihUrau/evYqJiVGFChUUGhqqCRMmyLKsyx7ToUOH9Nhjjyk4OFheXl6qX7++3n777TzrzZgxQ/Xr11f58uVVuXJlNWrUSIsWLbrofnPHWlqWpZkzZ8rhcMjhcLgsW79+vQYNGqQqVaqoWrVqzm1nzZql+vXry8vLS6GhoRo8eHCej3xbtmypm2++WTt37lSLFi1Uvnx53XDDDVq8eLEkaf369YqMjJSPj49q166tr7766rLnIisrS2PHjlXDhg3l5+enChUq6K677tLatWsvu630/9/Lr7/+Wk2aNJG3t7dq1qypd955J8+6e/fu1YMPPqiAgACVL19eTZs21YoVK1zWWbdunRwOh95//30999xzuv7661W+fHmlp6c73/cDBw7o/vvvV8WKFXX99ddr5syZkv4ePtO6dWtVqFBBYWFhed6r48eP6+mnn9Ytt9yiihUrytfXV+3atdP3339foGPNz65du9S6dWv5+PioWrVqeuGFF5STk5NnvfzG3F7q+nr++ef1zDPPSJIiIiKc11LuOF6Hw6EhQ4Zo4cKFzusmMTHRuSy/cebHjh3TQw89JF9fXwUGBurJJ5/UmTNnnMsvNc70/H1errb8xtwW5r3/8MMPNXHiRFWrVk3e3t5q06aN9uzZk6emC/Xu3Vvh4eF52nPHHJ9v1apVat68ufz9/VWxYkXVrl1bzz777CXPRe71d+jQIXXu3FkVK1ZUUFCQnn76aWVnZ7vs/88//1TPnj3l6+srf39/xcbG6vvvvy/QON7c3xXffPON4uLiFBQUpAoVKqhLly46evSoy7oXe68vfA9y9/n1119r2LBhCgoKkr+/v5544gllZWUpNTVVvXr1UuXKlVW5cmWNHDnS5Xdp7vmYMmWKpk6dqrCwMPn4+KhFixb68ccfnevNmzdPDocjz+9sSZo0aZLc3d116NChSx4/yh56blHmhIWFadOmTfrxxx918803X3Ld2bNnq379+urYsaM8PDy0fPlyDRo0SDk5ORo8eLDLunv27NEjjzyiJ554Qo8++qimTJmiDh06aM6cOXr22Wc1aNAgSVJCQoIeeuihPB91Zmdnq23btmratKleeuklJSYmaty4cTp37pwmTJhw0RpTUlLUtGlTZzgICgrSF198ob59+yo9PV3Dhw+XJL355psaNmyYunXr5gwBO3fu1JYtW/TII4/ku++7775b//73v9WzZ0/dc8896tWrV551Bg0apKCgII0dO1YZGRmS/v7jO378eEVHR2vgwIHavXu3Zs+erW3btumbb75x+Wj0xIkTuv/++9W9e3c9+OCDmj17trp3766FCxdq+PDhGjBggB555BG9/PLL6tatmw4ePKhKlSpd9Hykp6fr//7v//SPf/xD/fr108mTJ/XWW28pJiZGW7du1W233XbRbXPt2bNH3bp1U9++fRUbG6u3335bvXv3VsOGDVW/fn3neb/zzjt1+vRpDRs2TIGBgVqwYIE6duyoxYsXq0uXLi77/Ne//iVPT089/fTTyszMdPZ8Zmdnq127drr77rv10ksvaeHChRoyZIgqVKigf/7zn+rRo4ceeOABzZkzR7169VJUVJQiIiIk/R2wli1bpgcffFARERFKSUnRG2+8oRYtWuinn35SaGjoZY/1fMnJyWrVqpXOnTun0aNHq0KFCpo7d658fHwuu+3lrq8HHnhAv/76q9577z1NnTpV1113nSQpKCjIuY81a9boww8/1JAhQ3TdddflG+zO99BDDyk8PFwJCQnavHmzpk+frhMnTuT7H5FLKUht5yvsez958mS5ubnp6aefVlpaml566SX16NFDW7ZsKVSdF7Nr1y7df//9uvXWWzVhwgR5eXlpz549+uabby67bXZ2tmJiYhQZGakpU6boq6++0iuvvKJatWpp4MCBkv7uRe/QoYO2bt2qgQMHqk6dOvrkk08UGxtbqDqHDh2qypUra9y4cdq3b5+mTZumIUOG6IMPPijScefuMyQkROPHj9fmzZs1d+5c+fv7a+PGjapRo4YmTZqkzz//XC+//LJuvvnmPL/D3nnnHZ08eVKDBw/WmTNn9Nprr6l169b64YcfFBwcrG7dumnw4MFauHChbr/9dpdtFy5cqJYtW+r6668vcv0opSygjPnyyy8td3d3y93d3YqKirJGjhxprVy50srKysqz7unTp/O0xcTEWDVr1nRpCwsLsyRZGzdudLatXLnSkmT5+PhY+/fvd7a/8cYbliRr7dq1zrbY2FhLkjV06FBnW05OjtW+fXvL09PTOnr0qLNdkjVu3DjnfN++fa2qVatax44dc6mpe/fulp+fn/MYOnXqZNWvX/8yZyd/kqzBgwe7tM2bN8+SZDVv3tw6d+6cs/3IkSOWp6ende+991rZ2dnO9tdff92SZL399tvOthYtWliSrEWLFjnbfvnlF0uS5ebmZm3evNnZnns+582bd8laz507Z2VmZrq0nThxwgoODrYee+yxyx5r7nu5YcMGl2Py8vKynnrqKWfb8OHDLUnWf/7zH2fbyZMnrYiICCs8PNx57GvXrrUkWTVr1sxzPeW+75MmTXKp1cfHx3I4HNb777+f57yc/96fOXPG5RxblmUlJSVZXl5e1oQJE1zaCnLuco9py5YtLsfu5+dnSbKSkpKc7S1atLBatGjhnC/I9fXyyy/n2U+u3Pd8165d+S47/7jHjRtnSbI6duzost6gQYMsSdb3339vWdalj/vCfV6qtrCwMCs2NtY5X9j3vm7dui7X5GuvvWZJsn744Yc8r3W+2NhYKywsLE977vHnmjp1qiXJ5ffEhfI7F7nX3/nXimVZ1u233241bNjQOb9kyRJLkjVt2jRnW3Z2ttW6desCXVe5vyuio6OtnJwcZ/uIESMsd3d3KzU11dl24fuS68L3IHefMTExLvuMioqyHA6HNWDAAGfbuXPnrGrVqrlcr7nnw8fHx/rjjz+c7Vu2bLEkWSNGjHC2/eMf/7BCQ0NdftZ27NhRoGNH2cSwBJQ599xzjzZt2qSOHTvq+++/10svvaSYmBhdf/31+vTTT13WPb/HKi0tTceOHVOLFi20d+9epaWluaxbr149RUVFOecjIyMlSa1bt1aNGjXytO/duzdPbUOGDHH+O7cnNisr66Ifx1uWpSVLlqhDhw6yLEvHjh1zTjExMUpLS9OOHTskSf7+/vrjjz+0bdu2Ap2ngurXr5/c3d2d81999ZWysrI0fPhwl57pfv36ydfXN89HtxUrVlT37t2d87Vr15a/v7/q1q3rPFfSpc/b+dzd3Z29ojk5OTp+/LjOnTunRo0aOc/F5dSrV0933XWXcz4oKEi1a9d2ee3PP/9cTZo0UfPmzV2OpX///tq3b59++uknl33GxsZetAf08ccfd/7b399ftWvXVoUKFfTQQw8523PPy/k1eHl5Oc9xdna2/vzzT+dH0gU91vN9/vnnatq0qZo0aeJy7D169LjstsVxfbVo0UL16tUr8PoXfnqS+2XQzz//vMg1FERh3/s+ffq4jFHOvbYudy0XVO4Qq08++STfISSXM2DAAJf5u+66y6W2xMRElStXTv369XO2ubm55Tn/l9O/f3+X4RR33XWXsrOztX///kLXnKtv374u+4yMjJRlWerbt6+zzd3dXY0aNcr3fHfu3Nml57VJkyaKjIx0uYZ69eqlw4cPuwxtWrhwoXx8fNS1a9ci147Si3CLMqlx48b6+OOPdeLECW3dulXx8fE6efKkunXr5vKH6ZtvvlF0dLQqVKggf39/BQUFOcexXRhuzw+wkuTn5ydJql69er7tJ06ccGl3c3NTzZo1XdpuuukmSXK5v+j5jh49qtTUVM2dO1dBQUEuU58+fST9/y/JjRo1ShUrVlSTJk104403avDgwQX62PJycj8iz5X7h6p27dou7Z6enqpZs2aeP2TVqlXLM37Qz8+vwOctPwsWLNCtt94qb29vBQYGKigoSCtWrMjznl3Mhe+lJFWuXNnltffv35/nGCWpbt26zuXnu/A85fL29s7z8befn99Fz8v5NeTk5Gjq1Km68cYb5eXlpeuuu05BQUHauXNngY/1fPv379eNN96Ypz2/47xQcVxfFztHF3NhrbVq1ZKbm9tFf16KS2Hf+wuvp8qVK0sq2LVcEA8//LCaNWumxx9/XMHBwerevbs+/PDDAgXd/K6//K71qlWrqnz58i7r3XDDDYWq82qch8L83s3vdfK73m+66SaXa+iee+5R1apVtXDhQkl//9y999576tSp0yWHSKHsItyiTPP09FTjxo01adIkzZ49W2fPntVHH30k6e8vnrVp00bHjh3Tq6++qhUrVmjVqlUaMWKEJOX5w3F+72VB2q0CfFHscnJrePTRR7Vq1ap8p2bNmkn6+w/v7t279f7776t58+ZasmSJmjdvrnHjxl1RDQUZj3kpxX3e3n33XfXu3Vu1atXSW2+9pcTERK1atUqtW7cucK/W1XjPLnaeruT4J02apLi4ON1999169913tXLlSq1atUr169cvUg/elSiO6+tKr6UL/zNw4XyuC78sdbUV9XoqaP0+Pj7asGGDvvrqK/Xs2VM7d+7Uww8/rHvuueeyx3qx2q6GK/m5uthxFObnp6g/v+7u7nrkkUe0ZMkSnTlzRmvXrtXhw4f16KOPFml/KP34QhmM0ahRI0nS//73P0nS8uXLlZmZqU8//dSld6Cg37ovrJycHO3du9fZWytJv/76qyRd9Is1QUFBqlSpkrKzsxUdHX3Z16hQoYIefvhhPfzww8rKytIDDzygiRMnKj4+vthuExQWFibp73uDnt8TnZWVpaSkpALVeSUWL16smjVr6uOPP3YJB1ca4i8UFham3bt352n/5ZdfnMuvtsWLF6tVq1Z66623XNpTU1OdX4oqjLCwMP3222952vM7zvxc7vq6WFgrqt9++82lt3fPnj3Kyclx/rzk9gxeeJeO/D4GL0xtJfXeV65cOd+HSuRXv5ubm9q0aaM2bdro1Vdf1aRJk/TPf/5Ta9euveKfubCwMK1du1anT5926b0tyB0fCiu/Y87KynL+Xi5u+V3vv/76a57fub169dIrr7yi5cuX64svvlBQUJBiYmKuSk2wHz23KHPWrl2b7//gc8dY5X7cmPs///PXTUtL07x5865aba+//rrz35Zl6fXXX1e5cuXUpk2bfNd3d3dX165dtWTJEpfb1+Q6/zY7f/75p8syT09P1atXT5Zl6ezZs8V0BFJ0dLQ8PT01ffp0l3P31ltvKS0tTe3bty+218pPfu/bli1btGnTpmJ9nfvuu09bt2512W9GRobmzp2r8PDwQo0dLSp3d/c81/JHH31U5FsT3Xfffdq8ebO2bt3qbDt69Kjz49hLKcj1lXsP5OJ6CljuLdNy5T5hsF27dpIkX19fXXfdddqwYYPLerNmzcqzr8LUVlLvfa1atZSWlqadO3c62/73v/9p6dKlLusdP348z7a5dwXJzMy84jpiYmJ09uxZvfnmm862nJycPOe/ONSqVSvP+zV37tyr1tu+bNkyl5+XrVu3asuWLc5rKNett96qW2+9Vf/3f/+nJUuWqHv37lf9wSKwD+8sypyhQ4fq9OnT6tKli+rUqaOsrCxt3LhRH3zwgcLDw51jVe+99155enqqQ4cOeuKJJ3Tq1Cm9+eabqlKlylXpRfD29lZiYqJiY2MVGRmpL774QitWrNCzzz570VsSSX/fZmjt2rWKjIxUv379VK9ePR0/flw7duzQV1995fzDd++99yokJETNmjVTcHCwfv75Z73++utq3759sY4bCwoKUnx8vMaPH6+2bduqY8eO2r17t2bNmqXGjRtf9Y/y7r//fn388cfq0qWL2rdvr6SkJM2ZM0f16tXTqVOniu11Ro8erffee0/t2rXTsGHDFBAQoAULFigpKUlLliwpkQc03H///ZowYYL69OmjO++8Uz/88IMWLlyYZ+x2QY0cOdL5eOUnn3zSeSuwsLAwl4CVn4JcXw0bNpQk/fOf/1T37t1Vrlw5dejQocgP/khKSlLHjh3Vtm1bbdq0Se+++64eeeQRNWjQwLnO448/rsmTJ+vxxx9Xo0aNtGHDBucnIucrTG0l9d53795do0aNUpcuXTRs2DCdPn1as2fP1k033eTyhcEJEyZow4YNat++vcLCwnTkyBHNmjVL1apVc/nSW1F17txZTZo00VNPPaU9e/aoTp06+vTTT52/W4qzR/7xxx/XgAED1LVrV91zzz36/vvvtXLlyiJ9ElEQN9xwg5o3b66BAwcqMzNT06ZNU2BgoEaOHJln3V69ejmf0siQBLMRblHmTJkyRR999JE+//xzzZ07V1lZWapRo4YGDRqk5557zvnN49q1a2vx4sV67rnn9PTTTyskJEQDBw5UUFCQHnvssWKvy93dXYmJiRo4cKCeeeYZVapUSePGjdPYsWMvuV1wcLC2bt2qCRMm6OOPP9asWbMUGBio+vXr68UXX3Su98QTT2jhwoV69dVXderUKVWrVk3Dhg3Tc889V+zH8vzzzysoKEivv/66RowYoYCAAPXv31+TJk266o//7N27t5KTk/XGG29o5cqVqlevnt5991199NFHzodqFIfg4GBt3LhRo0aN0owZM3TmzBndeuutWr58+VXvnc717LPPKiMjQ4sWLdIHH3ygO+64QytWrNDo0aOLtL+qVatq7dq1Gjp0qCZPnqzAwEANGDBAoaGhLt8+z09Brq/GjRvrX//6l+bMmaPExETl5OQoKSmpyOH2gw8+0NixYzV69Gh5eHhoyJAhevnll13WGTt2rI4eParFixfrww8/VLt27fTFF1/keWBLYWorqfc+MDBQS5cuVVxcnEaOHKmIiAglJCTot99+cwm3HTt21L59+/T222/r2LFjuu6669SiRQuNHz/e+QWrK+Hu7q4VK1boySef1IIFC+Tm5qYuXbpo3LhxatasWbE++axfv35KSkpyjpe/6667tGrVqot+enWlevXqJTc3N02bNk1HjhxRkyZN9Prrr6tq1ap51u3Ro4dGjRqlWrVqudxRBOZxWMXxrRjgGte7d28tXry4WHsWAeBqWrZsmbp06aKvv/7a+cXVsmLfvn2KiIjQyy+/7OyNvZxjx46patWqGjt2rMaMGXOVK4SdGHMLAIDh/vrrL5f57OxszZgxQ76+vrrjjjtsqqpkzZ8/X9nZ2erZs6fdpeAqY1gCAACGGzp0qP766y9FRUUpMzNTH3/8sTZu3KhJkyZd8S3cSrs1a9bop59+0sSJE9W5c+fLPhYaZR/hFgAAw7Vu3VqvvPKKPvvsM505c0Y33HCDZsyY4fJURVNNmDBBGzduVLNmzZx35IDZGHMLAAAAYzDmFgAAAMYg3AIAAMAYjLnV309qOXz4sCpVqlTsj5cEAADAlbMsSydPnlRoaOglH7ZCuJV0+PBhVa9e3e4yAAAAcBkHDx5UtWrVLrqccCs5Hy158OBB+fr62lwNAAAALpSenq7q1atf9pHzhFv9/+dq+/r6Em4BAABKscsNIeULZQAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMW8Pthg0b1KFDB4WGhsrhcGjZsmV51vn555/VsWNH+fn5qUKFCmrcuLEOHDjgXH7mzBkNHjxYgYGBqlixorp27aqUlJQSPAoAAACUFraG24yMDDVo0EAzZ87Md/nvv/+u5s2bq06dOlq3bp127typMWPGyNvb27nOiBEjtHz5cn300Udav369Dh8+rAceeKCkDgEAAACliMOyLMvuIqS/b8i7dOlSde7c2dnWvXt3lStXTv/+97/z3SYtLU1BQUFatGiRunXrJkn65ZdfVLduXW3atElNmzYt0Gunp6fLz89PaWlpPMQBAACgFCpoXiu1Y25zcnK0YsUK3XTTTYqJiVGVKlUUGRnpMnRh+/btOnv2rKKjo51tderUUY0aNbRp0yYbqgYAAICdSm24PXLkiE6dOqXJkyerbdu2+vLLL9WlSxc98MADWr9+vSQpOTlZnp6e8vf3d9k2ODhYycnJF913Zmam0tPTXSYAAACUfR52F3AxOTk5kqROnTppxIgRkqTbbrtNGzdu1Jw5c9SiRYsi7zshIUHjx48vljoBAABQepTantvrrrtOHh4eqlevnkt73bp1nXdLCAkJUVZWllJTU13WSUlJUUhIyEX3HR8fr7S0NOd08ODBYq8fAAAAJa/UhltPT081btxYu3fvdmn/9ddfFRYWJklq2LChypUrp9WrVzuX7969WwcOHFBUVNRF9+3l5SVfX1+XCQAAAGWfrcMSTp06pT179jjnk5KS9N133ykgIEA1atTQM888o4cfflh33323WrVqpcTERC1fvlzr1q2TJPn5+alv376Ki4tTQECAfH19NXToUEVFRRX4TgkAAAAwh623Alu3bp1atWqVpz02Nlbz58+XJL399ttKSEjQH3/8odq1a2v8+PHq1KmTc90zZ87oqaee0nvvvafMzEzFxMRo1qxZlxyWcCFuBQYAAFC6FTSvlZr73NqJcAsAAFC6lfn73AIAAACFRbgFAACAMUrtfW4B4FoUPnqF3SXka9/k9naXAAAFQs8tAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGMPD7gIAwA7ho1fYXQIA4Cqg5xYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAM7pYAALis0nh3iX2T29tdAoBSiJ5bAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMWwNtxs2bFCHDh0UGhoqh8OhZcuWXXTdAQMGyOFwaNq0aS7tx48fV48ePeTr6yt/f3/17dtXp06durqFAwAAoFSyNdxmZGSoQYMGmjlz5iXXW7p0qTZv3qzQ0NA8y3r06KFdu3Zp1apV+uyzz7Rhwwb179//apUMAACAUszW+9y2a9dO7dq1u+Q6hw4d0tChQ7Vy5Uq1b+96T8Off/5ZiYmJ2rZtmxo1aiRJmjFjhu677z5NmTIl3zAMAAAAc5XqMbc5OTnq2bOnnnnmGdWvXz/P8k2bNsnf398ZbCUpOjpabm5u2rJlS0mWCgAAgFKgVD+h7MUXX5SHh4eGDRuW7/Lk5GRVqVLFpc3Dw0MBAQFKTk6+6H4zMzOVmZnpnE9PTy+eggEAAGCrUttzu337dr322muaP3++HA5Hse47ISFBfn5+zql69erFun8AAADYo9SG2//85z86cuSIatSoIQ8PD3l4eGj//v166qmnFB4eLkkKCQnRkSNHXLY7d+6cjh8/rpCQkIvuOz4+Xmlpac7p4MGDV/NQAAAAUEJK7bCEnj17Kjo62qUtJiZGPXv2VJ8+fSRJUVFRSk1N1fbt29WwYUNJ0po1a5STk6PIyMiL7tvLy0teXl5Xr3gAAADYwtZwe+rUKe3Zs8c5n5SUpO+++04BAQGqUaOGAgMDXdYvV66cQkJCVLt2bUlS3bp11bZtW/Xr109z5szR2bNnNWTIEHXv3p07JQAAAFyDbB2W8O233+r222/X7bffLkmKi4vT7bffrrFjxxZ4HwsXLlSdOnXUpk0b3XfffWrevLnmzp17tUoGAABAKWZrz23Lli1lWVaB19+3b1+etoCAAC1atKgYqwIAAEBZVWq/UAYAAAAUFuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYHnYXAMB84aNX2F0CAOAaQc8tAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDFsDbcbNmxQhw4dFBoaKofDoWXLljmXnT17VqNGjdItt9yiChUqKDQ0VL169dLhw4dd9nH8+HH16NFDvr6+8vf3V9++fXXq1KkSPhIAAACUBraG24yMDDVo0EAzZ87Ms+z06dPasWOHxowZox07dujjjz/W7t271bFjR5f1evTooV27dmnVqlX67LPPtGHDBvXv37+kDgEAAACliMOyLMvuIiTJ4XBo6dKl6ty580XX2bZtm5o0aaL9+/erRo0a+vnnn1WvXj1t27ZNjRo1kiQlJibqvvvu0x9//KHQ0NACvXZ6err8/PyUlpYmX1/f4jgcAOcJH73C7hJgoH2T29tdAoASVNC8VqbG3KalpcnhcMjf31+StGnTJvn7+zuDrSRFR0fLzc1NW7Zsueh+MjMzlZ6e7jIBAACg7Csz4fbMmTMaNWqU/vGPfzjTenJysqpUqeKynoeHhwICApScnHzRfSUkJMjPz885Va9e/arWDgAAgJJRJsLt2bNn9dBDD8myLM2ePfuK9xcfH6+0tDTndPDgwWKoEgAAAHbzsLuAy8kNtvv379eaNWtcxliEhIToyJEjLuufO3dOx48fV0hIyEX36eXlJS8vr6tWMwAAAOxRqntuc4Ptb7/9pq+++kqBgYEuy6OiopSamqrt27c729asWaOcnBxFRkaWdLkAAACwma09t6dOndKePXuc80lJSfruu+8UEBCgqlWrqlu3btqxY4c+++wzZWdnO8fRBgQEyNPTU3Xr1lXbtm3Vr18/zZkzR2fPntWQIUPUvXv3At8pAQAAAOawNdx+++23atWqlXM+Li5OkhQbG6vnn39en376qSTptttuc9lu7dq1atmypSRp4cKFGjJkiNq0aSM3Nzd17dpV06dPL5H6AQAAULrYGm5btmypS91mtyC34A0ICNCiRYuKsywAAACUUaV6zC0AAABQGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADG8LC7AAAAiiJ89Aq7S8jXvsnt7S4BuKbRcwsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxbA23GzZsUIcOHRQaGiqHw6Fly5a5LLcsS2PHjlXVqlXl4+Oj6Oho/fbbby7rHD9+XD169JCvr6/8/f3Vt29fnTp1qgSPAgAAAKWFreE2IyNDDRo00MyZM/Nd/tJLL2n69OmaM2eOtmzZogoVKigmJkZnzpxxrtOjRw/t2rVLq1at0meffaYNGzaof//+JXUIAAAAKEU87Hzxdu3aqV27dvkusyxL06ZN03PPPadOnTpJkt555x0FBwdr2bJl6t69u37++WclJiZq27ZtatSokSRpxowZuu+++zRlyhSFhoaW2LEAAADAfqV2zG1SUpKSk5MVHR3tbPPz81NkZKQ2bdokSdq0aZP8/f2dwVaSoqOj5ebmpi1btlx035mZmUpPT3eZAAAAUPaV2nCbnJwsSQoODnZpDw4Odi5LTk5WlSpVXJZ7eHgoICDAuU5+EhIS5Ofn55yqV69ezNUDAADADqU23F5N8fHxSktLc04HDx60uyQAAAAUg1IbbkNCQiRJKSkpLu0pKSnOZSEhITpy5IjL8nPnzun48ePOdfLj5eUlX19flwkAAABlX6kNtxEREQoJCdHq1audbenp6dqyZYuioqIkSVFRUUpNTdX27dud66xZs0Y5OTmKjIws8ZoBAABgL1vvlnDq1Cnt2bPHOZ+UlKTvvvtOAQEBqlGjhoYPH64XXnhBN954oyIiIjRmzBiFhoaqc+fOkqS6deuqbdu26tevn+bMmaOzZ89qyJAh6t69O3dKAAAAuAbZGm6//fZbtWrVyjkfFxcnSYqNjdX8+fM1cuRIZWRkqH///kpNTVXz5s2VmJgob29v5zYLFy7UkCFD1KZNG7m5ualr166aPn16iR8LAAAA7OewLMuyuwi7paeny8/PT2lpaYy/Ba6C8NEr7C4BKDH7Jre3uwTASAXNa6V2zC0AAABQWIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjOFhdwEAik/46BV2lwAAgK3ouQUAAIAxCLcAAAAwBuEWAAAAxihSuG3durVSU1PztKenp6t169ZXWhMAAABQJEUKt+vWrVNWVlae9jNnzug///nPFRcFAAAAFEWh7pawc+dO579/+uknJScnO+ezs7OVmJio66+/vviqAwAAAAqhUOH2tttuk8PhkMPhyHf4gY+Pj2bMmFFsxQEAAACFUahwm5SUJMuyVLNmTW3dulVBQUHOZZ6enqpSpYrc3d2LvUgAAACgIAoVbsPCwiRJOTk5V6UYAAAA4EoU+Qllv/32m9auXasjR47kCbtjx4694sIAAACAwipSuH3zzTc1cOBAXXfddQoJCZHD4XAuczgchFsAAADYokjh9oUXXtDEiRM1atSo4q4HAAAAKLIi3ef2xIkTevDBB4u7FgAAAOCKFCncPvjgg/ryyy+LuxYAAADgihRpWMINN9ygMWPGaPPmzbrllltUrlw5l+XDhg0rluIAAACAwnBYlmUVdqOIiIiL79Dh0N69e6+oqJKWnp4uPz8/paWlydfX1+5ygCILH73C7hKAa96+ye3tLgEwUkHzWpF6bpOSkopcGAAAAHC1FGnMLQAAAFAaFann9rHHHrvk8rfffrtIxQAAAABXokjh9sSJEy7zZ8+e1Y8//qjU1FS1bt26WAoDAAAACqtI4Xbp0qV52nJycjRw4EDVqlXriosCAAAAiqLYxty6ubkpLi5OU6dOLa5dAgAAAIVSrF8o+/3333Xu3Lni3CUAAABQYEUalhAXF+cyb1mW/ve//2nFihWKjY0tlsIkKTs7W88//7zeffddJScnKzQ0VL1799Zzzz0nh8PhfO1x48bpzTffVGpqqpo1a6bZs2frxhtvLLY6AAAAUDYUKdz+97//dZl3c3NTUFCQXnnllcveSaEwXnzxRc2ePVsLFixQ/fr19e2336pPnz7y8/NzPgXtpZde0vTp07VgwQJFRERozJgxiomJ0U8//SRvb+9iqwUAAAClX5HC7dq1a4u7jnxt3LhRnTp1Uvv2fz/tJTw8XO+99562bt0q6e9e22nTpum5555Tp06dJEnvvPOOgoODtWzZMnXv3r1E6gQAAEDpcEVjbo8ePaqvv/5aX3/9tY4ePVpcNTndeeedWr16tX799VdJ0vfff6+vv/5a7dq1k/T3k9KSk5MVHR3t3MbPz0+RkZHatGnTRfebmZmp9PR0lwkAAABlX5F6bjMyMjR06FC98847ysnJkSS5u7urV69emjFjhsqXL18sxY0ePVrp6emqU6eO3N3dlZ2drYkTJ6pHjx6SpOTkZElScHCwy3bBwcHOZflJSEjQ+PHji6VGAAAAlB5F6rmNi4vT+vXrtXz5cqWmpio1NVWffPKJ1q9fr6eeeqrYivvwww+1cOFCLVq0SDt27NCCBQs0ZcoULViw4Ir2Gx8fr7S0NOd08ODBYqoYAAAAdipSz+2SJUu0ePFitWzZ0tl23333ycfHRw899JBmz55dLMU988wzGj16tHPs7C233KL9+/crISFBsbGxCgkJkSSlpKSoatWqzu1SUlJ02223XXS/Xl5e8vLyKpYaAQAAUHoUqef29OnTeYYCSFKVKlV0+vTpKy7q/Ndxc3Mt0d3d3TkUIiIiQiEhIVq9erVzeXp6urZs2aKoqKhiqwMAAABlQ5HCbVRUlMaNG6czZ8442/766y+NHz++WENlhw4dNHHiRK1YsUL79u3T0qVL9eqrr6pLly6SJIfDoeHDh+uFF17Qp59+qh9++EG9evVSaGioOnfuXGx1AAAAoGwo0rCEadOmqW3btqpWrZoaNGgg6e87GXh5eenLL78stuJmzJihMWPGaNCgQTpy5IhCQ0P1xBNPaOzYsc51Ro4cqYyMDPXv31+pqalq3ry5EhMTucctAADANchhWZZVlA1Pnz6thQsX6pdffpEk1a1bVz169JCPj0+xFlgS0tPT5efnp7S0NPn6+tpdDlBk4aNX2F0CcM3bN7m93SUARipoXitSz21CQoKCg4PVr18/l/a3335bR48e1ahRo4qyWwAAAOCKFGnM7RtvvKE6derkaa9fv77mzJlzxUUBAAAARVGkcJucnOxy661cQUFB+t///nfFRQEAAABFUaRwW716dX3zzTd52r/55huFhoZecVEAAABAURRpzG2/fv00fPhwnT17Vq1bt5YkrV69WiNHjizWJ5QBAAAAhVGkcPvMM8/ozz//1KBBg5SVlSVJ8vb21qhRoxQfH1+sBQIAAAAFVaRw63A49OKLL2rMmDH6+eef5ePjoxtvvJFH2gIAAMBWRQq3uSpWrKjGjRsXVy0AAADAFSnSF8oAAACA0ohwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGB52FwAAgEnCR6+wu4Q89k1ub3cJQImh5xYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAY5T6cHvo0CE9+uijCgwMlI+Pj2655RZ9++23zuWWZWns2LGqWrWqfHx8FB0drd9++83GigEAAGCXUh1uT5w4oWbNmqlcuXL64osv9NNPP+mVV15R5cqVneu89NJLmj59uubMmaMtW7aoQoUKiomJ0ZkzZ2ysHAAAAHbwsLuAS3nxxRdVvXp1zZs3z9kWERHh/LdlWZo2bZqee+45derUSZL0zjvvKDg4WMuWLVP37t1LvGYAAADYp1T33H766adq1KiRHnzwQVWpUkW333673nzzTefypKQkJScnKzo62tnm5+enyMhIbdq06aL7zczMVHp6ussEAACAsq9Uh9u9e/dq9uzZuvHGG7Vy5UoNHDhQw4YN04IFCyRJycnJkqTg4GCX7YKDg53L8pOQkCA/Pz/nVL169at3EAAAACgxpTrc5uTk6I477tCkSZN0++23q3///urXr5/mzJlzRfuNj49XWlqaczp48GAxVQwAAAA7lepwW7VqVdWrV8+lrW7dujpw4IAkKSQkRJKUkpLisk5KSopzWX68vLzk6+vrMgEAAKDsK9XhtlmzZtq9e7dL26+//qqwsDBJf3+5LCQkRKtXr3YuT09P15YtWxQVFVWitQIAAMB+pfpuCSNGjNCdd96pSZMm6aGHHtLWrVs1d+5czZ07V5LkcDg0fPhwvfDCC7rxxhsVERGhMWPGKDQ0VJ07d7a3eAAAAJS4Uh1uGzdurKVLlyo+Pl4TJkxQRESEpk2bph49ejjXGTlypDIyMtS/f3+lpqaqefPmSkxMlLe3t42VAwAAwA4Oy7Isu4uwW3p6uvz8/JSWlsb4W5Rp4aNX2F0CgFJo3+T2dpcAXLGC5rVSPeYWAAAAKAzCLQAAAIxBuAUAAIAxCLcAAAAwRqm+WwJQmvHlLQAASh96bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjlKlwO3nyZDkcDg0fPtzZdubMGQ0ePFiBgYGqWLGiunbtqpSUFPuKBAAAgG3KTLjdtm2b3njjDd16660u7SNGjNDy5cv10Ucfaf369Tp8+LAeeOABm6oEAACAncpEuD116pR69OihN998U5UrV3a2p6Wl6a233tKrr76q1q1bq2HDhpo3b542btyozZs321gxAAAA7FAmwu3gwYPVvn17RUdHu7Rv375dZ8+edWmvU6eOatSooU2bNl10f5mZmUpPT3eZAAAAUPZ52F3A5bz//vvasWOHtm3blmdZcnKyPD095e/v79IeHBys5OTki+4zISFB48ePL+5SAQAAYLNS3XN78OBBPfnkk1q4cKG8vb2Lbb/x8fFKS0tzTgcPHiy2fQMAAMA+pTrcbt++XUeOHNEdd9whDw8PeXh4aP369Zo+fbo8PDwUHBysrKwspaamumyXkpKikJCQi+7Xy8tLvr6+LhMAAADKvlI9LKFNmzb64YcfXNr69OmjOnXqaNSoUapevbrKlSun1atXq2vXrpKk3bt368CBA4qKirKjZAAAANioVIfbSpUq6eabb3Zpq1ChggIDA53tffv2VVxcnAICAuTr66uhQ4cqKipKTZs2taNkAAAA2KhUh9uCmDp1qtzc3NS1a1dlZmYqJiZGs2bNsrssAAAA2MBhWZZldxF2S09Pl5+fn9LS0hh/iwILH73C7hIAoED2TW5vdwnAFStoXivVXygDAAAACoNwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjeNhdAAAAuLrCR6+wu4R87Zvc3u4SYCB6bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDE87C4AAABcm8JHr7C7hDz2TW5vdwm4QvTcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDFKfbhNSEhQ48aNValSJVWpUkWdO3fW7t27XdY5c+aMBg8erMDAQFWsWFFdu3ZVSkqKTRUDAADALqU+3K5fv16DBw/W5s2btWrVKp09e1b33nuvMjIynOuMGDFCy5cv10cffaT169fr8OHDeuCBB2ysGgAAAHYo9Q9xSExMdJmfP3++qlSpou3bt+vuu+9WWlqa3nrrLS1atEitW7eWJM2bN09169bV5s2b1bRpUzvKBgAAgA1Kfc/thdLS0iRJAQEBkqTt27fr7Nmzio6Odq5Tp04d1ahRQ5s2bcp3H5mZmUpPT3eZAAAAUPaVqXCbk5Oj4cOHq1mzZrr55pslScnJyfL09JS/v7/LusHBwUpOTs53PwkJCfLz83NO1atXv9qlAwAAoASUqXA7ePBg/fjjj3r//fevaD/x8fFKS0tzTgcPHiymCgEAAGCnUj/mNteQIUP02WefacOGDapWrZqzPSQkRFlZWUpNTXXpvU1JSVFISEi++/Ly8pKXl9fVLhkAAAAlrNT33FqWpSFDhmjp0qVas2aNIiIiXJY3bNhQ5cqV0+rVq51tu3fv1oEDBxQVFVXS5QIAAMBGpb7ndvDgwVq0aJE++eQTVapUyTmO1s/PTz4+PvLz81Pfvn0VFxengIAA+fr6aujQoYqKiuJOCQAAANeYUh9uZ8+eLUlq2bKlS/u8efPUu3dvSdLUqVPl5uamrl27KjMzUzExMZo1a1YJVwoAAAC7OSzLsuwuwm7p6eny8/NTWlqafH197S4HFwgfvcLuEgAA14h9k9vbXQIuoqB5rdSPuQUAAAAKinALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGB52F4DSJXz0CrtLAAAAKDJ6bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABjDw+4CrlXho1fYXQIAALhAaf37vG9ye7tLKDPouQUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDuyUAAACUctzFoeDouQUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjGBNuZ86cqfDwcHl7eysyMlJbt261uyQAAACUMCPC7QcffKC4uDiNGzdOO3bsUIMGDRQTE6MjR47YXRoAAABKkBHh9tVXX1W/fv3Up08f1atXT3PmzFH58uX19ttv210aAAAASlCZf0JZVlaWtm/frvj4eGebm5uboqOjtWnTpny3yczMVGZmpnM+LS1NkpSenn51iz1PTubpEnstAACAq6Eks1Pua1mWdcn1yny4PXbsmLKzsxUcHOzSHhwcrF9++SXfbRISEjR+/Pg87dWrV78qNQIAAJjIb1rJv+bJkyfl5+d30eVlPtwWRXx8vOLi4pzzOTk5On78uAIDA+VwOIr1tdLT01W9enUdPHhQvr6+xbpv5I9zXvI45yWPc17yOOf24LyXvNJ6zi3L0smTJxUaGnrJ9cp8uL3uuuvk7u6ulJQUl/aUlBSFhITku42Xl5e8vLxc2vz9/a9WiZIkX1/fUnWBXAs45yWPc17yOOclj3NuD857ySuN5/xSPba5yvwXyjw9PdWwYUOtXr3a2ZaTk6PVq1crKirKxsoAAABQ0sp8z60kxcXFKTY2Vo0aNVKTJk00bdo0ZWRkqE+fPnaXBgAAgBJkRLh9+OGHdfToUY0dO1bJycm67bbblJiYmOdLZnbw8vLSuHHj8gyDwNXDOS95nPOSxzkveZxze3DeS15ZP+cO63L3UwAAAADKiDI/5hYAAADIRbgFAACAMQi3AAAAMAbhFgAAAMYg3Jagjh07qkaNGvL29lbVqlXVs2dPHT582O6yjLZv3z717dtXERER8vHxUa1atTRu3DhlZWXZXZrRJk6cqDvvvFPly5e/6g9IuVbNnDlT4eHh8vb2VmRkpLZu3Wp3SUbbsGGDOnTooNDQUDkcDi1btszukoyWkJCgxo0bq1KlSqpSpYo6d+6s3bt3212W0WbPnq1bb73V+eCGqKgoffHFF3aXVSSE2xLUqlUrffjhh9q9e7eWLFmi33//Xd26dbO7LKP98ssvysnJ0RtvvKFdu3Zp6tSpmjNnjp599lm7SzNaVlaWHnzwQQ0cONDuUoz0wQcfKC4uTuPGjdOOHTvUoEEDxcTE6MiRI3aXZqyMjAw1aNBAM2fOtLuUa8L69es1ePBgbd68WatWrdLZs2d17733KiMjw+7SjFWtWjVNnjxZ27dv17fffqvWrVurU6dO2rVrl92lFRq3ArPRp59+qs6dOyszM1PlypWzu5xrxssvv6zZs2dr7969dpdivPnz52v48OFKTU21uxSjREZGqnHjxnr99dcl/f1UxurVq2vo0KEaPXq0zdWZz+FwaOnSpercubPdpVwzjh49qipVqmj9+vW6++677S7nmhEQEKCXX35Zffv2tbuUQqHn1ibHjx/XwoULdeeddxJsS1haWpoCAgLsLgMokqysLG3fvl3R0dHONjc3N0VHR2vTpk02VgZcPWlpaZLE7+4Skp2drffff18ZGRmKioqyu5xCI9yWsFGjRqlChQoKDAzUgQMH9Mknn9hd0jVlz549mjFjhp544gm7SwGK5NixY8rOzs7zBMbg4GAlJyfbVBVw9eTk5Gj48OFq1qyZbr75ZrvLMdoPP/ygihUrysvLSwMGDNDSpUtVr149u8sqNMLtFRo9erQcDsclp19++cW5/jPPPKP//ve/+vLLL+Xu7q5evXqJkSGFV9jzLkmHDh1S27Zt9eCDD6pfv342VV52FeWcA8CVGjx4sH788Ue9//77dpdivNq1a+u7777Tli1bNHDgQMXGxuqnn36yu6xCY8ztFTp69Kj+/PPPS65Ts2ZNeXp65mn/448/VL16dW3cuLFMdvvbqbDn/fDhw2rZsqWaNm2q+fPny82N/9cVVlGudcbcFr+srCyVL19eixcvdhnzGRsbq9TUVD4NKgGMuS05Q4YM0SeffKINGzYoIiLC7nKuOdHR0apVq5beeOMNu0spFA+7CyjrgoKCFBQUVKRtc3JyJEmZmZnFWdI1oTDn/dChQ2rVqpUaNmyoefPmEWyL6EqudRQfT09PNWzYUKtXr3aGq5ycHK1evVpDhgyxtzigmFiWpaFDh2rp0qVat24dwdYmOTk5ZTKjEG5LyJYtW7Rt2zY1b95clStX1u+//64xY8aoVq1a9NpeRYcOHVLLli0VFhamKVOm6OjRo85lISEhNlZmtgMHDuj48eM6cOCAsrOz9d1330mSbrjhBlWsWNHe4gwQFxen2NhYNWrUSE2aNNG0adOUkZGhPn362F2asU6dOqU9e/Y455OSkvTdd98pICBANWrUsLEyMw0ePFiLFi3SJ598okqVKjnHk/v5+cnHx8fm6swUHx+vdu3aqUaNGjp58qQWLVqkdevWaeXKlXaXVngWSsTOnTutVq1aWQEBAZaXl5cVHh5uDRgwwPrjjz/sLs1o8+bNsyTlO+HqiY2Nzfecr1271u7SjDFjxgyrRo0alqenp9WkSRNr8+bNdpdktLVr1+Z7TcfGxtpdmpEu9nt73rx5dpdmrMcee8wKCwuzPD09raCgIKtNmzbWl19+aXdZRcKYWwAAABiDwYcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGOP/AUPbzNadgBFpAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArcAAAIQCAYAAACbhEYhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAABCyElEQVR4nO3deVwVdf///+cBZAkFhBAkZdHKLbMuFyTNlUIzt7SyLLFMyzWlUulKTa8UM0tLTbOrtD5pm6ZlFmbmcpWKpu2LaeJSBmoGKCYozO+PfpyvJ1ABgYF3j/vtdm435z3Lec2cAZ6+z3tmHJZlWQIAAAAM4GZ3AQAAAEBZIdwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIVzOFw6PHHH6/Q90xPT1ffvn0VFBQkh8Oh2bNnV+j74+Ls27dPDodDixcvLtX6ixcvlsPh0L59+5xtHTp0UIcOHcqkvgv5+zn/+OOPy+Fw6OjRoxXy/pGRkRo4cGCFvFd5udhzAPgnIdyiSvrmm2/Ut29fRUREyNvbW5dddpluuOEGzZkzx+7SKqUxY8ZozZo1SkxM1P/93/+pS5cudpeEKmjz5s16/PHHlZGRYXcphVTm2gBULA+7CwBKavPmzerYsaPCw8M1ePBghYaG6uDBg9q6daueffZZjRw50u4SK51PPvlEPXv21MMPP2x3KagkPvrooxKvs3nzZk2ePFkDBw5UQEBAsdf7888/5eFRvn9uzlfbrl275OZWtftyIiIi9Oeff6patWp2lwJUeoRbVDlTp06Vv7+/tm/fXuiP2OHDh+0pqpI7fPhwscJIdna2fH19y7+gKu7UqVPy9PSs0oHJ09OzXLefn5+v3NxceXt7y9vbu1zf60K8vLxsff+y4HA4bD+OQFVRdX8z4x/r559/VpMmTYoMa7Vq1XKZXrRokTp16qRatWrJy8tLjRs31vz58wutFxkZqZtvvlkbNmxQixYt5OPjo6ZNm2rDhg2SpHfeeUdNmzaVt7e3mjdvri+++MJl/YEDB6p69erau3ev4uLi5Ovrq7CwME2ZMkWWZV1wn3799Vfde++9CgkJkZeXl5o0aaKXX3650HJz5sxRkyZNdMkll6hmzZpq0aKFli5des7tFoy1tCxL8+bNk8PhkMPhcJm3ceNGDRs2TLVq1VKdOnWc6z7//PNq0qSJvLy8FBYWpuHDhxf6yrdDhw666qqr9PXXX6t9+/a65JJLdPnll2vZsmWSpI0bNyo6Olo+Pj5q0KCBPv744wsei9zcXE2cOFHNmzeXv7+/fH19df3112v9+vUXXFf6f5/lp59+qlatWsnb21v16tXTq6++WmjZvXv36tZbb1VgYKAuueQStW7dWqtXr3ZZZsOGDXI4HHrjjTf02GOP6bLLLtMll1yirKws5+d+4MAB3Xzzzapevbouu+wyzZs3T9Jfw2c6deokX19fRUREFPqsjh07pocfflhNmzZV9erV5efnp65du+qrr74q1r4W5bvvvlOnTp3k4+OjOnXq6IknnlB+fn6h5Yoac3u+8+vxxx/XI488IkmKiopynksF43gdDodGjBihJUuWOM+b5ORk57yixpkfPXpUt912m/z8/BQUFKQHH3xQp06dcs4/3zjTs7d5odqKGnNbks/+rbfe0tSpU1WnTh15e3urc+fO2rNnT6Ga/m7gwIGKjIws1F4w5vhsa9euVdu2bRUQEKDq1aurQYMGevTRR897LArOv19//VW9evVS9erVFRwcrIcfflh5eXku2//999919913y8/PTwEBAYqPj9dXX31VrHG8Bb8rPvvsMyUkJCg4OFi+vr7q3bu3jhw54rLsuT7rv38GBdv89NNPNWrUKAUHBysgIED333+/cnNzlZGRoQEDBqhmzZqqWbOmxo4d6/K7tOB4zJw5U7NmzVJERIR8fHzUvn17ffvtt87lFi1aJIfDUeh3tiRNmzZN7u7u+vXXX8+7/6h66LlFlRMREaEtW7bo22+/1VVXXXXeZefPn68mTZqoR48e8vDw0KpVqzRs2DDl5+dr+PDhLsvu2bNHd955p+6//37dddddmjlzprp3764FCxbo0Ucf1bBhwyRJSUlJuu222wp91ZmXl6cuXbqodevWmjFjhpKTkzVp0iSdOXNGU6ZMOWeN6enpat26tTMcBAcH68MPP9SgQYOUlZWl0aNHS5JefPFFjRo1Sn379nWGgK+//lopKSm68847i9x2u3bt9H//93+6++67dcMNN2jAgAGFlhk2bJiCg4M1ceJEZWdnS/rrj+/kyZMVGxuroUOHateuXZo/f762b9+uzz77zOWr0T/++EM333yz+vXrp1tvvVXz589Xv379tGTJEo0ePVoPPPCA7rzzTj311FPq27evDh48qBo1apzzeGRlZem///2v7rjjDg0ePFjHjx/XSy+9pLi4OG3btk3XXHPNOdctsGfPHvXt21eDBg1SfHy8Xn75ZQ0cOFDNmzdXkyZNnMf9uuuu08mTJzVq1CgFBQXplVdeUY8ePbRs2TL17t3bZZv/+c9/5OnpqYcfflg5OTnOns+8vDx17dpV7dq104wZM7RkyRKNGDFCvr6++ve//63+/fvrlltu0YIFCzRgwADFxMQoKipK0l8Ba+XKlbr11lsVFRWl9PR0vfDCC2rfvr2+//57hYWFXXBfz5aWlqaOHTvqzJkzGj9+vHx9fbVw4UL5+PhccN0LnV+33HKLfvrpJ73++uuaNWuWLr30UklScHCwcxuffPKJ3nrrLY0YMUKXXnppkcHubLfddpsiIyOVlJSkrVu36rnnntMff/xR5H9Ezqc4tZ2tpJ/99OnT5ebmpocffliZmZmaMWOG+vfvr5SUlBLVeS7fffedbr75Zl199dWaMmWKvLy8tGfPHn322WcXXDcvL09xcXGKjo7WzJkz9fHHH+vpp59W/fr1NXToUEl/9aJ3795d27Zt09ChQ9WwYUO9++67io+PL1GdI0eOVM2aNTVp0iTt27dPs2fP1ogRI/Tmm2+War8LthkaGqrJkydr69atWrhwoQICArR582aFh4dr2rRp+uCDD/TUU0/pqquuKvQ77NVXX9Xx48c1fPhwnTp1Ss8++6w6deqkb775RiEhIerbt6+GDx+uJUuW6Nprr3VZd8mSJerQoYMuu+yyUtePSsoCqpiPPvrIcnd3t9zd3a2YmBhr7Nix1po1a6zc3NxCy548ebJQW1xcnFWvXj2XtoiICEuStXnzZmfbmjVrLEmWj4+PtX//fmf7Cy+8YEmy1q9f72yLj4+3JFkjR450tuXn51vdunWzPD09rSNHjjjbJVmTJk1yTg8aNMiqXbu2dfToUZea+vXrZ/n7+zv3oWfPnlaTJk0ucHSKJskaPny4S9uiRYssSVbbtm2tM2fOONsPHz5seXp6WjfeeKOVl5fnbJ87d64lyXr55Zedbe3bt7ckWUuXLnW2/fjjj5Yky83Nzdq6dauzveB4Llq06Ly1njlzxsrJyXFp++OPP6yQkBDr3nvvveC+FnyWmzZtctknLy8v66GHHnK2jR492pJk/e9//3O2HT9+3IqKirIiIyOd+75+/XpLklWvXr1C51PB5z5t2jSXWn18fCyHw2G98cYbhY7L2Z/9qVOnXI6xZVlWamqq5eXlZU2ZMsWlrTjHrmCfUlJSXPbd39/fkmSlpqY629u3b2+1b9/eOV2c8+upp54qtJ0CBZ/5d999V+S8s/d70qRJliSrR48eLssNGzbMkmR99dVXlmWdf7//vs3z1RYREWHFx8c7p0v62Tdq1MjlnHz22WctSdY333xT6L3OFh8fb0VERBRqL9j/ArNmzbIkufye+LuijkXB+Xf2uWJZlnXttddazZs3d04vX77ckmTNnj3b2ZaXl2d16tSpWOdVwe+K2NhYKz8/39k+ZswYy93d3crIyHC2/f1zKfD3z6Bgm3FxcS7bjImJsRwOh/XAAw84286cOWPVqVPH5XwtOB4+Pj7WL7/84mxPSUmxJFljxoxxtt1xxx1WWFiYy8/azp07i7XvqJoYloAq54YbbtCWLVvUo0cPffXVV5oxY4bi4uJ02WWX6b333nNZ9uweq8zMTB09elTt27fX3r17lZmZ6bJs48aNFRMT45yOjo6WJHXq1Enh4eGF2vfu3VuothEjRjj/XdATm5ube86v4y3L0vLly9W9e3dZlqWjR486X3FxccrMzNTOnTslSQEBAfrll1+0ffv2Yh2n4ho8eLDc3d2d0x9//LFyc3M1evRol57pwYMHy8/Pr9BXt9WrV1e/fv2c0w0aNFBAQIAaNWrkPFbS+Y/b2dzd3Z29ovn5+Tp27JjOnDmjFi1aOI/FhTRu3FjXX3+9czo4OFgNGjRwee8PPvhArVq1Utu2bV32ZciQIdq3b5++//57l23Gx8efswf0vvvuc/47ICBADRo0kK+vr2677TZne8FxObsGLy8v5zHOy8vT77//7vxKurj7erYPPvhArVu3VqtWrVz2vX///hdctyzOr/bt26tx48bFXv7v354UXAz6wQcflLqG4ijpZ3/PPfe4jFEuOLcudC4XV8EQq3fffbfIISQX8sADD7hMX3/99S61JScnq1q1aho8eLCzzc3NrdDxv5AhQ4a4DKe4/vrrlZeXp/3795e45gKDBg1y2WZ0dLQsy9KgQYOcbe7u7mrRokWRx7tXr14uPa+tWrVSdHS0yzk0YMAAHTp0yGVo05IlS+Tj46M+ffqUunZUXoRbVEktW7bUO++8oz/++EPbtm1TYmKijh8/rr59+7r8Yfrss88UGxsrX19fBQQEKDg42DmO7e/h9uwAK0n+/v6SpLp16xbZ/scff7i0u7m5qV69ei5tV155pSS53F/0bEeOHFFGRoYWLlyo4OBgl9c999wj6f9dJDdu3DhVr15drVq10hVXXKHhw4cX62vLCyn4irxAwR+qBg0auLR7enqqXr16hf6Q1alTp9D4QX9//2Ift6K88soruvrqq+Xt7a2goCAFBwdr9erVhT6zc/n7ZylJNWvWdHnv/fv3F9pHSWrUqJFz/tn+fpwKeHt7F/r629/f/5zH5ewa8vPzNWvWLF1xxRXy8vLSpZdequDgYH399dfF3tez7d+/X1dccUWh9qL28+/K4vw61zE6l7/XWr9+fbm5uZ3z56WslPSz//v5VLNmTUnFO5eL4/bbb1ebNm103333KSQkRP369dNbb71VrKBb1PlX1Lleu3ZtXXLJJS7LXX755SWqszyOQ0l+7xb1PkWd71deeaXLOXTDDTeodu3aWrJkiaS/fu5ef/119ezZ87xDpFB1EW5RpXl6eqply5aaNm2a5s+fr9OnT+vtt9+W9NeFZ507d9bRo0f1zDPPaPXq1Vq7dq3GjBkjSYX+cJzde1mcdqsYF4pdSEENd911l9auXVvkq02bNpL++sO7a9cuvfHGG2rbtq2WL1+utm3batKkSRdVQ3HGY55PWR+31157TQMHDlT9+vX10ksvKTk5WWvXrlWnTp2K3atVHp/ZuY7Txez/tGnTlJCQoHbt2um1117TmjVrtHbtWjVp0qRUPXgXoyzOr4s9l/7+n4G/Txf4+8VS5a2051Nx6/fx8dGmTZv08ccf6+6779bXX3+t22+/XTfccMMF9/VctZWHi/m5Otd+lOTnp7Q/v+7u7rrzzju1fPlynTp1SuvXr9ehQ4d01113lWp7qPy4oAzGaNGihSTpt99+kyStWrVKOTk5eu+991x6B4p71X1J5efna+/evc7eWkn66aefJOmcF9YEBwerRo0aysvLU2xs7AXfw9fXV7fffrtuv/125ebm6pZbbtHUqVOVmJhYZrcJioiIkPTXvUHP7onOzc1Vampqseq8GMuWLVO9evX0zjvvuISDiw3xfxcREaFdu3YVav/xxx+d88vbsmXL1LFjR7300ksu7RkZGc6LokoiIiJCu3fvLtRe1H4W5ULn17nCWmnt3r3bpbd3z549ys/Pd/68FPQM/v0uHUV9DV6S2irqs69Zs2aRD5Uoqn43Nzd17txZnTt31jPPPKNp06bp3//+t9avX3/RP3MRERFav369Tp486dJ7W5w7PpRUUfucm5vr/L1c1oo633/66adCv3MHDBigp59+WqtWrdKHH36o4OBgxcXFlUtNsB89t6hy1q9fX+T/4AvGWBV83VjwP/+zl83MzNSiRYvKrba5c+c6/21ZlubOnatq1aqpc+fORS7v7u6uPn36aPny5S63rylw9m12fv/9d5d5np6eaty4sSzL0unTp8toD6TY2Fh5enrqueeeczl2L730kjIzM9WtW7cye6+iFPW5paSkaMuWLWX6PjfddJO2bdvmst3s7GwtXLhQkZGRJRo7Wlru7u6FzuW333671Lcmuummm7R161Zt27bN2XbkyBHn17HnU5zzq+AeyGX1FLCCW6YVKHjCYNeuXSVJfn5+uvTSS7Vp0yaX5Z5//vlC2ypJbRX12devX1+ZmZn6+uuvnW2//fabVqxY4bLcsWPHCq1bcFeQnJyci64jLi5Op0+f1osvvuhsy8/PL3T8y0L9+vULfV4LFy4st972lStXuvy8bNu2TSkpKc5zqMDVV1+tq6++Wv/973+1fPly9evXr9wfLAL78Mmiyhk5cqROnjyp3r17q2HDhsrNzdXmzZv15ptvKjIy0jlW9cYbb5Snp6e6d++u+++/XydOnNCLL76oWrVqlUsvgre3t5KTkxUfH6/o6Gh9+OGHWr16tR599NFz3pJI+us2Q+vXr1d0dLQGDx6sxo0b69ixY9q5c6c+/vhj5x++G2+8UaGhoWrTpo1CQkL0ww8/aO7cuerWrVuZjhsLDg5WYmKiJk+erC5duqhHjx7atWuXnn/+ebVs2bLcv8q7+eab9c4776h3797q1q2bUlNTtWDBAjVu3FgnTpwos/cZP368Xn/9dXXt2lWjRo1SYGCgXnnlFaWmpmr58uUV8oCGm2++WVOmTNE999yj6667Tt98842WLFlSaOx2cY0dO9b5eOUHH3zQeSuwiIgIl4BVlOKcX82bN5ck/fvf/1a/fv1UrVo1de/evdQP/khNTVWPHj3UpUsXbdmyRa+99pruvPNONWvWzLnMfffdp+nTp+u+++5TixYttGnTJuc3ImcrSW0V9dn369dP48aNU+/evTVq1CidPHlS8+fP15VXXulyweCUKVO0adMmdevWTRERETp8+LCef/551alTx+Wit9Lq1auXWrVqpYceekh79uxRw4YN9d577zl/t5Rlj/x9992nBx54QH369NENN9ygr776SmvWrCnVNxHFcfnll6tt27YaOnSocnJyNHv2bAUFBWns2LGFlh0wYIDzKY0MSTAb4RZVzsyZM/X222/rgw8+0MKFC5Wbm6vw8HANGzZMjz32mPPK4wYNGmjZsmV67LHH9PDDDys0NFRDhw5VcHCw7r333jKvy93dXcnJyRo6dKgeeeQR1ahRQ5MmTdLEiRPPu15ISIi2bdumKVOm6J133tHzzz+voKAgNWnSRE8++aRzufvvv19LlizRM888oxMnTqhOnToaNWqUHnvssTLfl8cff1zBwcGaO3euxowZo8DAQA0ZMkTTpk0r98d/Dhw4UGlpaXrhhRe0Zs0aNW7cWK+99prefvtt50M1ykJISIg2b96scePGac6cOTp16pSuvvpqrVq1qtx7pws8+uijys7O1tKlS/Xmm2/qX//6l1avXq3x48eXanu1a9fW+vXrNXLkSE2fPl1BQUF64IEHFBYW5nL1eVGKc361bNlS//nPf7RgwQIlJycrPz9fqamppQ63b775piZOnKjx48fLw8NDI0aM0FNPPeWyzMSJE3XkyBEtW7ZMb731lrp27aoPP/yw0ANbSlJbRX32QUFBWrFihRISEjR27FhFRUUpKSlJu3fvdgm3PXr00L59+/Tyyy/r6NGjuvTSS9W+fXtNnjzZeYHVxXB3d9fq1av14IMP6pVXXpGbm5t69+6tSZMmqU2bNmX65LPBgwcrNTXVOV7++uuv19q1a8/57dXFGjBggNzc3DR79mwdPnxYrVq10ty5c1W7du1Cy/bv31/jxo1T/fr1Xe4oAvM4rLK4Kgb4hxs4cKCWLVtWpj2LAFCeVq5cqd69e+vTTz91XrhaVezbt09RUVF66qmnnL2xF3L06FHVrl1bEydO1IQJE8q5QtiJMbcAABjuzz//dJnOy8vTnDlz5Ofnp3/96182VVWxFi9erLy8PN199912l4JyxrAEAAAMN3LkSP3555+KiYlRTk6O3nnnHW3evFnTpk276Fu4VXaffPKJvv/+e02dOlW9evW64GOhUfURbgEAMFynTp309NNP6/3339epU6d0+eWXa86cOS5PVTTVlClTtHnzZrVp08Z5Rw6YjTG3AAAAMAZjbgEAAGAMwi0AAACMwZhb/fWklkOHDqlGjRpl/nhJAAAAXDzLsnT8+HGFhYWd92ErhFtJhw4dUt26de0uAwAAABdw8OBB1alT55zzCbeS89GSBw8elJ+fn83VAAAA4O+ysrJUt27dCz5ynnCr//dcbT8/P8ItAABAJXahIaRcUAYAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIzhYXcBAGCHyPGr7S6hSPumd7O7BACo0ui5BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDFvD7aZNm9S9e3eFhYXJ4XBo5cqVhZb54Ycf1KNHD/n7+8vX11ctW7bUgQMHnPNPnTql4cOHKygoSNWrV1efPn2Unp5egXsBAACAysLWcJudna1mzZpp3rx5Rc7/+eef1bZtWzVs2FAbNmzQ119/rQkTJsjb29u5zJgxY7Rq1Sq9/fbb2rhxow4dOqRbbrmlonYBAAAAlYiHnW/etWtXde3a9Zzz//3vf+umm27SjBkznG3169d3/jszM1MvvfSSli5dqk6dOkmSFi1apEaNGmnr1q1q3bp1+RUPAACASqfSjrnNz8/X6tWrdeWVVyouLk61atVSdHS0y9CFHTt26PTp04qNjXW2NWzYUOHh4dqyZYsNVQMAAMBOlTbcHj58WCdOnND06dPVpUsXffTRR+rdu7duueUWbdy4UZKUlpYmT09PBQQEuKwbEhKitLS0c247JydHWVlZLi8AAABUfbYOSzif/Px8SVLPnj01ZswYSdI111yjzZs3a8GCBWrfvn2pt52UlKTJkyeXSZ0AAACoPCptz+2ll14qDw8PNW7c2KW9UaNGzrslhIaGKjc3VxkZGS7LpKenKzQ09JzbTkxMVGZmpvN18ODBMq8fAAAAFa/ShltPT0+1bNlSu3btcmn/6aefFBERIUlq3ry5qlWrpnXr1jnn79q1SwcOHFBMTMw5t+3l5SU/Pz+XFwAAAKo+W4clnDhxQnv27HFOp6am6ssvv1RgYKDCw8P1yCOP6Pbbb1e7du3UsWNHJScna9WqVdqwYYMkyd/fX4MGDVJCQoICAwPl5+enkSNHKiYmhjslAAAA/APZGm4///xzdezY0TmdkJAgSYqPj9fixYvVu3dvLViwQElJSRo1apQaNGig5cuXq23bts51Zs2aJTc3N/Xp00c5OTmKi4vT888/X+H7AgAAAPs5LMuy7C7CbllZWfL391dmZiZDFIB/iMjxq+0uoUj7pnezuwQAqJSKm9cq7ZhbAAAAoKQItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxvCwuwAAQOUXOX613SUUsm96N7tLAFAJ0XMLAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxuKAMQLmrjBcjAQDMRM8tAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAxbw+2mTZvUvXt3hYWFyeFwaOXKledc9oEHHpDD4dDs2bNd2o8dO6b+/fvLz89PAQEBGjRokE6cOFG+hQMAAKBSsjXcZmdnq1mzZpo3b955l1uxYoW2bt2qsLCwQvP69++v7777TmvXrtX777+vTZs2aciQIeVVMgAAACoxDzvfvGvXruratet5l/n11181cuRIrVmzRt26dXOZ98MPPyg5OVnbt29XixYtJElz5szRTTfdpJkzZxYZhgEAAGCuSj3mNj8/X3fffbceeeQRNWnSpND8LVu2KCAgwBlsJSk2NlZubm5KSUk553ZzcnKUlZXl8gIAAEDVV6nD7ZNPPikPDw+NGjWqyPlpaWmqVauWS5uHh4cCAwOVlpZ2zu0mJSXJ39/f+apbt26Z1g0AAAB7VNpwu2PHDj377LNavHixHA5HmW47MTFRmZmZztfBgwfLdPsAAACwR6UNt//73/90+PBhhYeHy8PDQx4eHtq/f78eeughRUZGSpJCQ0N1+PBhl/XOnDmjY8eOKTQ09Jzb9vLykp+fn8sLAAAAVZ+tF5Sdz913363Y2FiXtri4ON1999265557JEkxMTHKyMjQjh071Lx5c0nSJ598ovz8fEVHR1d4zQAAALCXreH2xIkT2rNnj3M6NTVVX375pQIDAxUeHq6goCCX5atVq6bQ0FA1aNBAktSoUSN16dJFgwcP1oIFC3T69GmNGDFC/fr1404JAAAA/0C2Dkv4/PPPde211+raa6+VJCUkJOjaa6/VxIkTi72NJUuWqGHDhurcubNuuukmtW3bVgsXLiyvkgEAAFCJ2dpz26FDB1mWVezl9+3bV6gtMDBQS5cuLcOqAAAAUFVV2gvKAAAAgJKqtBeUAcA/UeT41XaXAABVGj23AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDFvD7aZNm9S9e3eFhYXJ4XBo5cqVznmnT5/WuHHj1LRpU/n6+iosLEwDBgzQoUOHXLZx7Ngx9e/fX35+fgoICNCgQYN04sSJCt4TAAAAVAa2htvs7Gw1a9ZM8+bNKzTv5MmT2rlzpyZMmKCdO3fqnXfe0a5du9SjRw+X5fr376/vvvtOa9eu1fvvv69NmzZpyJAhFbULAAAAqEQclmVZdhchSQ6HQytWrFCvXr3Oucz27dvVqlUr7d+/X+Hh4frhhx/UuHFjbd++XS1atJAkJScn66abbtIvv/yisLCwYr13VlaW/P39lZmZKT8/v7LYHQBniRy/2u4SYKB907vZXQKAClTcvFalxtxmZmbK4XAoICBAkrRlyxYFBAQ4g60kxcbGys3NTSkpKTZVCQAAALt42F1AcZ06dUrjxo3THXfc4UzraWlpqlWrlstyHh4eCgwMVFpa2jm3lZOTo5ycHOd0VlZW+RQNAACAClUlem5Pnz6t2267TZZlaf78+Re9vaSkJPn7+ztfdevWLYMqAQAAYLdKH24Lgu3+/fu1du1alzEWoaGhOnz4sMvyZ86c0bFjxxQaGnrObSYmJiozM9P5OnjwYLnVDwAAgIpTqYclFATb3bt3a/369QoKCnKZHxMTo4yMDO3YsUPNmzeXJH3yySfKz89XdHT0Obfr5eUlLy+vcq0dAAAAFc/WcHvixAnt2bPHOZ2amqovv/xSgYGBql27tvr27audO3fq/fffV15ennMcbWBgoDw9PdWoUSN16dJFgwcP1oIFC3T69GmNGDFC/fr1K/adEgAAAGAOW8Pt559/ro4dOzqnExISJEnx8fF6/PHH9d5770mSrrnmGpf11q9frw4dOkiSlixZohEjRqhz585yc3NTnz599Nxzz1VI/QAAAKhcbA23HTp00Plus1ucW/AGBgZq6dKlZVkWAAAAqqhKf0EZAAAAUFyEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAY3jYXQCAshM5frXdJQAAYCt6bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGsDXcbtq0Sd27d1dYWJgcDodWrlzpMt+yLE2cOFG1a9eWj4+PYmNjtXv3bpdljh07pv79+8vPz08BAQEaNGiQTpw4UYF7AQAAgMrC1nCbnZ2tZs2aad68eUXOnzFjhp577jktWLBAKSkp8vX1VVxcnE6dOuVcpn///vruu++0du1avf/++9q0aZOGDBlSUbsAAACASsTDzjfv2rWrunbtWuQ8y7I0e/ZsPfbYY+rZs6ck6dVXX1VISIhWrlypfv366YcfflBycrK2b9+uFi1aSJLmzJmjm266STNnzlRYWFiF7QsAAADsV2nH3KampiotLU2xsbHONn9/f0VHR2vLli2SpC1btiggIMAZbCUpNjZWbm5uSklJOee2c3JylJWV5fICAABA1Vdpw21aWpokKSQkxKU9JCTEOS8tLU21atVyme/h4aHAwEDnMkVJSkqSv7+/81W3bt0yrh4AAAB2qLThtjwlJiYqMzPT+Tp48KDdJQEAAKAMVNpwGxoaKklKT093aU9PT3fOCw0N1eHDh13mnzlzRseOHXMuUxQvLy/5+fm5vAAAAFD1VdpwGxUVpdDQUK1bt87ZlpWVpZSUFMXExEiSYmJilJGRoR07djiX+eSTT5Sfn6/o6OgKrxkAAAD2svVuCSdOnNCePXuc06mpqfryyy8VGBio8PBwjR49Wk888YSuuOIKRUVFacKECQoLC1OvXr0kSY0aNVKXLl00ePBgLViwQKdPn9aIESPUr18/7pQAAADwD2RruP3888/VsWNH53RCQoIkKT4+XosXL9bYsWOVnZ2tIUOGKCMjQ23btlVycrK8vb2d6yxZskQjRoxQ586d5ebmpj59+ui5556r8H0BAACA/RyWZVl2F2G3rKws+fv7KzMzk/G3qNIix6+2uwSgwuyb3s3uEgBUoOLmtUo75hYAAAAoKcItAAAAjFGqcNupUydlZGQUas/KylKnTp0utiYAAACgVEoVbjds2KDc3NxC7adOndL//ve/iy4KAAAAKI0S3S3h66+/dv77+++/d3nEbV5enpKTk3XZZZeVXXUAAABACZQo3F5zzTVyOBxyOBxFDj/w8fHRnDlzyqw4AAAAoCRKFG5TU1NlWZbq1aunbdu2KTg42DnP09NTtWrVkru7e5kXCQAAABRHicJtRESEJCk/P79cigEAAAAuRqmfULZ7926tX79ehw8fLhR2J06ceNGFAQAAACVVqnD74osvaujQobr00ksVGhoqh8PhnOdwOAi3AAAAsEWpwu0TTzyhqVOnaty4cWVdDwAAAFBqpbrP7R9//KFbb721rGsBAAAALkqpwu2tt96qjz76qKxrAQAAAC5KqYYlXH755ZowYYK2bt2qpk2bqlq1ai7zR40aVSbFAQAAACXhsCzLKulKUVFR596gw6G9e/deVFEVLSsrS/7+/srMzJSfn5/d5QClFjl+td0lABVm3/RudpcAoAIVN6+Vquc2NTW11IUBAAAA5aVUY24BAACAyqhUPbf33nvveee//PLLpSoGAAAAuBilCrd//PGHy/Tp06f17bffKiMjQ506dSqTwgAAAICSKlW4XbFiRaG2/Px8DR06VPXr17/oogAAAIDSKLMxt25ubkpISNCsWbPKapMAAABAiZTpBWU///yzzpw5U5abBAAAAIqtVMMSEhISXKYty9Jvv/2m1atXKz4+vkwKAwAAAEqqVOH2iy++cJl2c3NTcHCwnn766QveSQEAAAAoL6UKt+vXry/rOgAAAICLVqpwW+DIkSPatWuXJKlBgwYKDg4uk6IAAACA0ijVBWXZ2dm69957Vbt2bbVr107t2rVTWFiYBg0apJMnT5Z1jQAAAECxlCrcJiQkaOPGjVq1apUyMjKUkZGhd999Vxs3btRDDz1U1jUCAAAAxVKqYQnLly/XsmXL1KFDB2fbTTfdJB8fH912222aP39+WdUHAAAAFFupem5PnjypkJCQQu21atViWAIAAABsU6pwGxMTo0mTJunUqVPOtj///FOTJ09WTExMmRUHAAAAlESphiXMnj1bXbp0UZ06ddSsWTNJ0ldffSUvLy999NFHZVogAAAAUFylCrdNmzbV7t27tWTJEv3444+SpDvuuEP9+/eXj49PmRYIAAAAFFepwm1SUpJCQkI0ePBgl/aXX35ZR44c0bhx48qkOAAAAKAkSjXm9oUXXlDDhg0LtTdp0kQLFiy46KIAAACA0ihVuE1LS1Pt2rULtQcHB+u333676KIAAACA0ihVuK1bt64+++yzQu2fffaZwsLCLrooAAAAoDRKNeZ28ODBGj16tE6fPq1OnTpJktatW6exY8fyhDIAAADYplTh9pFHHtHvv/+uYcOGKTc3V5Lk7e2tcePGKTExsUwLBAAAAIqrVOHW4XDoySef1IQJE/TDDz/Ix8dHV1xxhby8vMq6PgAAAKDYSjXmtkD16tXVsmVLXXXVVeUSbPPy8jRhwgRFRUXJx8dH9evX13/+8x9ZluVcxrIsTZw4UbVr15aPj49iY2O1e/fuMq8FAAAAld9Fhdvy9uSTT2r+/PmaO3eufvjhBz355JOaMWOG5syZ41xmxowZeu6557RgwQKlpKTI19dXcXFxLo8GBgAAwD9DqYYlVJTNmzerZ8+e6tatmyQpMjJSr7/+urZt2ybpr17b2bNn67HHHlPPnj0lSa+++qpCQkK0cuVK9evXz7baAQAAUPEqdc/tddddp3Xr1umnn36SJH311Vf69NNP1bVrV0lSamqq0tLSFBsb61zH399f0dHR2rJlyzm3m5OTo6ysLJcXAAAAqr5K3XM7fvx4ZWVlqWHDhnJ3d1deXp6mTp2q/v37S/rrYRKSFBIS4rJeSEiIc15RkpKSNHny5PIrHAAAALao1D23b731lpYsWaKlS5dq586deuWVVzRz5ky98sorF7XdxMREZWZmOl8HDx4so4oBAABgp0rdc/vII49o/PjxzrGzTZs21f79+5WUlKT4+HiFhoZKktLT010eB5yenq5rrrnmnNv18vLitmUAAAAGqtQ9tydPnpSbm2uJ7u7uys/PlyRFRUUpNDRU69atc87PyspSSkqKYmJiKrRWAAAA2K9S99x2795dU6dOVXh4uJo0aaIvvvhCzzzzjO69915Jfz1MYvTo0XriiSd0xRVXKCoqShMmTFBYWJh69eplb/EAAACocJU63M6ZM0cTJkzQsGHDdPjwYYWFhen+++/XxIkTncuMHTtW2dnZGjJkiDIyMtS2bVslJyfL29vbxsoBAABgB4d19uO+/qGysrLk7++vzMxM+fn52V0OUGqR41fbXQJQYfZN72Z3CQAqUHHzWqUecwsAAACUBOEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMIaH3QUAAFAakeNX211CkfZN72Z3CcA/Gj23AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBk8oA0qpsj4dCQCAfzJ6bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwRqUPt7/++qvuuusuBQUFycfHR02bNtXnn3/unG9ZliZOnKjatWvLx8dHsbGx2r17t40VAwAAwC6VOtz+8ccfatOmjapVq6YPP/xQ33//vZ5++mnVrFnTucyMGTP03HPPacGCBUpJSZGvr6/i4uJ06tQpGysHAACAHTzsLuB8nnzySdWtW1eLFi1ytkVFRTn/bVmWZs+erccee0w9e/aUJL366qsKCQnRypUr1a9fvwqvGQAAAPap1D237733nlq0aKFbb71VtWrV0rXXXqsXX3zROT81NVVpaWmKjY11tvn7+ys6Olpbtmyxo2QAAADYqFKH271792r+/Pm64oortGbNGg0dOlSjRo3SK6+8IklKS0uTJIWEhLisFxIS4pxXlJycHGVlZbm8AAAAUPVV6mEJ+fn5atGihaZNmyZJuvbaa/Xtt99qwYIFio+PL/V2k5KSNHny5LIqEwAAAJVEpe65rV27tho3buzS1qhRIx04cECSFBoaKklKT093WSY9Pd05ryiJiYnKzMx0vg4ePFjGlQMAAMAOlTrctmnTRrt27XJp++mnnxQRESHpr4vLQkNDtW7dOuf8rKwspaSkKCYm5pzb9fLykp+fn8sLAAAAVV+lHpYwZswYXXfddZo2bZpuu+02bdu2TQsXLtTChQslSQ6HQ6NHj9YTTzyhK664QlFRUZowYYLCwsLUq1cve4sHAABAhavU4bZly5ZasWKFEhMTNWXKFEVFRWn27Nnq37+/c5mxY8cqOztbQ4YMUUZGhtq2bavk5GR5e3vbWDkAAADs4LAsy7K7CLtlZWXJ399fmZmZDFFAsUWOX213CQAqoX3Tu9ldAmCk4ua1Sj3mFgAAACgJwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMD7sLAADAJJHjV9tdQiH7pnezuwSgwtBzCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBhVKtxOnz5dDodDo0ePdradOnVKw4cPV1BQkKpXr64+ffooPT3dviIBAABgmyoTbrdv364XXnhBV199tUv7mDFjtGrVKr399tvauHGjDh06pFtuucWmKgEAAGCnKhFuT5w4of79++vFF19UzZo1ne2ZmZl66aWX9Mwzz6hTp05q3ry5Fi1apM2bN2vr1q02VgwAAAA7VIlwO3z4cHXr1k2xsbEu7Tt27NDp06dd2hs2bKjw8HBt2bLlnNvLyclRVlaWywsAAABVn4fdBVzIG2+8oZ07d2r79u2F5qWlpcnT01MBAQEu7SEhIUpLSzvnNpOSkjR58uSyLhUAAAA2q9Q9twcPHtSDDz6oJUuWyNvbu8y2m5iYqMzMTOfr4MGDZbZtAAAA2KdSh9sdO3bo8OHD+te//iUPDw95eHho48aNeu655+Th4aGQkBDl5uYqIyPDZb309HSFhoaec7teXl7y8/NzeQEAAKDqq9TDEjp37qxvvvnGpe2ee+5Rw4YNNW7cONWtW1fVqlXTunXr1KdPH0nSrl27dODAAcXExNhRMgAAAGxUqcNtjRo1dNVVV7m0+fr6KigoyNk+aNAgJSQkKDAwUH5+fho5cqRiYmLUunVrO0oGAACAjSp1uC2OWbNmyc3NTX369FFOTo7i4uL0/PPP210WAAAAbOCwLMuyuwi7ZWVlyd/fX5mZmYy/RbFFjl9tdwkAUCz7pnezuwTgohU3r1XqC8oAAACAkiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjFHl73ML83HLLQAAUFz03AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiVPtwmJSWpZcuWqlGjhmrVqqVevXpp165dLsucOnVKw4cPV1BQkKpXr64+ffooPT3dpooBAABgl0ofbjdu3Kjhw4dr69atWrt2rU6fPq0bb7xR2dnZzmXGjBmjVatW6e2339bGjRt16NAh3XLLLTZWDQAAADt42F3AhSQnJ7tML168WLVq1dKOHTvUrl07ZWZm6qWXXtLSpUvVqVMnSdKiRYvUqFEjbd26Va1bt7ajbAAAANig0vfc/l1mZqYkKTAwUJK0Y8cOnT59WrGxsc5lGjZsqPDwcG3ZssWWGgEAAGCPSt9ze7b8/HyNHj1abdq00VVXXSVJSktLk6enpwICAlyWDQkJUVpaWpHbycnJUU5OjnM6Kyur3GoGAABAxalSPbfDhw/Xt99+qzfeeOOitpOUlCR/f3/nq27dumVUIQAAAOxUZcLtiBEj9P7772v9+vWqU6eOsz00NFS5ubnKyMhwWT49PV2hoaFFbisxMVGZmZnO18GDB8uzdAAAAFSQSj8swbIsjRw5UitWrNCGDRsUFRXlMr958+aqVq2a1q1bpz59+kiSdu3apQMHDigmJqbIbXp5ecnLy6vcawcAoDKIHL/a7hKKtG96N7tLgIEqfbgdPny4li5dqnfffVc1atRwjqP19/eXj4+P/P39NWjQICUkJCgwMFB+fn4aOXKkYmJiuFMCAADAP0ylD7fz58+XJHXo0MGlfdGiRRo4cKAkadasWXJzc1OfPn2Uk5OjuLg4Pf/88xVcKQAAAOxW6cOtZVkXXMbb21vz5s3TvHnzKqAiAAAAVFZV5oIyAAAA4EIItwAAADBGpR+WgIpVWa+oBQAAKA56bgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxPOwuAAAA/DNFjl9tdwmF7Jveze4ScJHouQUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDO5za5PKeG8/AACAqo6eWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGIMnlAEAAPz/KusTRPdN72Z3CVUGPbcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDGMCbfz5s1TZGSkvL29FR0drW3bttldEgAAACqYEeH2zTffVEJCgiZNmqSdO3eqWbNmiouL0+HDh+0uDQAAABXIYVmWZXcRFys6OlotW7bU3LlzJUn5+fmqW7euRo4cqfHjx19w/aysLPn7+yszM1N+fn7lXa6kyvt4PwAAgOKqyMcCFzeveVRYReUkNzdXO3bsUGJiorPNzc1NsbGx2rJlS5Hr5OTkKCcnxzmdmZkp6a+DVlHyc05W2HsBAACUh4rMTgXvdaF+2Sofbo8ePaq8vDyFhIS4tIeEhOjHH38scp2kpCRNnjy5UHvdunXLpUYAAAAT+c+u+Pc8fvy4/P39zzm/yofb0khMTFRCQoJzOj8/X8eOHVNQUJAcDkeJt5eVlaW6devq4MGDFTas4Z+E41u+OL7li+Nbvji+5YvjW744viVjWZaOHz+usLCw8y5X5cPtpZdeKnd3d6Wnp7u0p6enKzQ0tMh1vLy85OXl5dIWEBBw0bX4+flxcpYjjm/54viWL45v+eL4li+Ob/ni+Bbf+XpsC1T5uyV4enqqefPmWrdunbMtPz9f69atU0xMjI2VAQAAoKJV+Z5bSUpISFB8fLxatGihVq1aafbs2crOztY999xjd2kAAACoQEaE29tvv11HjhzRxIkTlZaWpmuuuUbJycmFLjIrL15eXpo0aVKhoQ4oGxzf8sXxLV8c3/LF8S1fHN/yxfEtH0bc5xYAAACQDBhzCwAAABQg3AIAAMAYhFsAAAAYg3ALAAAAYxBuy1iPHj0UHh4ub29v1a5dW3fffbcOHTpkd1lG2LdvnwYNGqSoqCj5+Piofv36mjRpknJzc+0uzRhTp07Vddddp0suuaRMHmzyTzdv3jxFRkbK29tb0dHR2rZtm90lGWPTpk3q3r27wsLC5HA4tHLlSrtLMkZSUpJatmypGjVqqFatWurVq5d27dpld1nGmD9/vq6++mrngxtiYmL04Ycf2l2WUQi3Zaxjx4566623tGvXLi1fvlw///yz+vbta3dZRvjxxx+Vn5+vF154Qd99951mzZqlBQsW6NFHH7W7NGPk5ubq1ltv1dChQ+0upcp78803lZCQoEmTJmnnzp1q1qyZ4uLidPjwYbtLM0J2draaNWumefPm2V2KcTZu3Kjhw4dr69atWrt2rU6fPq0bb7xR2dnZdpdmhDp16mj69OnasWOHPv/8c3Xq1Ek9e/bUd999Z3dpxuBWYOXsvffeU69evZSTk6Nq1arZXY5xnnrqKc2fP1979+61uxSjLF68WKNHj1ZGRobdpVRZ0dHRatmypebOnSvprycn1q1bVyNHjtT48eNtrs4sDodDK1asUK9evewuxUhHjhxRrVq1tHHjRrVr187ucowUGBiop556SoMGDbK7FCPQc1uOjh07piVLlui6664j2JaTzMxMBQYG2l0G4CI3N1c7duxQbGyss83NzU2xsbHasmWLjZUBJZeZmSlJ/K4tB3l5eXrjjTeUnZ2tmJgYu8sxBuG2HIwbN06+vr4KCgrSgQMH9O6779pdkpH27NmjOXPm6P7777e7FMDF0aNHlZeXV+gpiSEhIUpLS7OpKqDk8vPzNXr0aLVp00ZXXXWV3eUY45tvvlH16tXl5eWlBx54QCtWrFDjxo3tLssYhNtiGD9+vBwOx3lfP/74o3P5Rx55RF988YU++ugjubu7a8CAAWL0x7mV9PhK0q+//qouXbro1ltv1eDBg22qvGoozfEFAEkaPny4vv32W73xxht2l2KUBg0a6Msvv1RKSoqGDh2q+Ph4ff/993aXZQzG3BbDkSNH9Pvvv593mXr16snT07NQ+y+//KK6detq8+bNfOVwDiU9vocOHVKHDh3UunVrLV68WG5u/B/tfEpz/jLm9uLk5ubqkksu0bJly1zGgcbHxysjI4Nvc8oYY27Lx4gRI/Tuu+9q06ZNioqKsrsco8XGxqp+/fp64YUX7C7FCB52F1AVBAcHKzg4uFTr5ufnS5JycnLKsiSjlOT4/vrrr+rYsaOaN2+uRYsWEWyL4WLOX5SOp6enmjdvrnXr1jkDV35+vtatW6cRI0bYWxxwAZZlaeTIkVqxYoU2bNhAsK0A+fn55IQyRLgtQykpKdq+fbvatm2rmjVr6ueff9aECRNUv359em3LwK+//qoOHTooIiJCM2fO1JEjR5zzQkNDbazMHAcOHNCxY8d04MAB5eXl6csvv5QkXX755apevbq9xVUxCQkJio+PV4sWLdSqVSvNnj1b2dnZuueee+wuzQgnTpzQnj17nNOpqan68ssvFRgYqPDwcBsrq/qGDx+upUuX6t1331WNGjWc48T9/f3l4+Njc3VVX2Jiorp27arw8HAdP35cS5cu1YYNG7RmzRq7SzOHhTLz9ddfWx07drQCAwMtLy8vKzIy0nrggQesX375xe7SjLBo0SJLUpEvlI34+Pgij+/69evtLq1KmjNnjhUeHm55enparVq1srZu3Wp3ScZYv359kedqfHy83aVVeef6Pbto0SK7SzPCvffea0VERFienp5WcHCw1blzZ+ujjz6yuyyjMOYWAAAAxmDAIgAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADG+P8AvT6jf874v00AAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -840,7 +840,7 @@ { "data": { "text/plain": [ - "TensorType(float64, ())" + "TensorType(float64, shape=())" ] }, "execution_count": 24, @@ -870,18 +870,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "normal_rv{0, (0, 0), floatX, False}.1 [id A] 'y'\n", - " |RandomGeneratorSharedVariable() [id B]\n", - " |TensorConstant{[]} [id C]\n", - " |TensorConstant{11} [id D]\n", - " |TensorConstant{0} [id E]\n", - " |TensorConstant{1} [id F]\n" + "normal_rv{\"(),()->()\"}.1 [id A] 'y'\n", + " ├─ RNG() [id B]\n", + " ├─ NoneConst{None} [id C]\n", + " ├─ 0 [id D]\n", + " └─ 1 [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 25, @@ -901,7 +900,6 @@ "The inputs are always in the following order:\n", "1. `rng` shared variable\n", "2. `size`\n", - "3. `dtype` (number code)\n", "4. `arg1`\n", "5. `arg2`\n", "6. `argn`" @@ -923,7 +921,7 @@ { "data": { "text/plain": [ - "array(-1.4186441)" + "array(0.13049757)" ] }, "execution_count": 26, @@ -952,16 +950,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sample 0: -1.4186441029543038\n", - "Sample 1: -1.4186441029543038\n", - "Sample 2: -1.4186441029543038\n", - "Sample 3: -1.4186441029543038\n", - "Sample 4: -1.4186441029543038\n", - "Sample 5: -1.4186441029543038\n", - "Sample 6: -1.4186441029543038\n", - "Sample 7: -1.4186441029543038\n", - "Sample 8: -1.4186441029543038\n", - "Sample 9: -1.4186441029543038\n" + "Sample 0: 0.13049756565216164\n", + "Sample 1: 0.13049756565216164\n", + "Sample 2: 0.13049756565216164\n", + "Sample 3: 0.13049756565216164\n", + "Sample 4: 0.13049756565216164\n", + "Sample 5: 0.13049756565216164\n", + "Sample 6: 0.13049756565216164\n", + "Sample 7: 0.13049756565216164\n", + "Sample 8: 0.13049756565216164\n", + "Sample 9: 0.13049756565216164\n" ] } ], @@ -1013,18 +1011,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "normal_rv{0, (0, 0), floatX, False}.1 [id A]\n", - " |RandomGeneratorSharedVariable() [id B]\n", - " |TensorConstant{[]} [id C]\n", - " |TensorConstant{11} [id D]\n", - " |TensorConstant{0} [id E]\n", - " |TensorConstant{1.0} [id F]\n" + "normal_rv{\"(),()->()\"}.1 [id A]\n", + " ├─ RNG() [id B]\n", + " ├─ NoneConst{None} [id C]\n", + " ├─ 0 [id D]\n", + " └─ 1 [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 28, @@ -1062,16 +1059,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sample 0: 1.3064743941879295\n", - "Sample 1: 1.3064743941879295\n", - "Sample 2: 1.3064743941879295\n", - "Sample 3: 1.3064743941879295\n", - "Sample 4: 1.3064743941879295\n", - "Sample 5: 1.3064743941879295\n", - "Sample 6: 1.3064743941879295\n", - "Sample 7: 1.3064743941879295\n", - "Sample 8: 1.3064743941879295\n", - "Sample 9: 1.3064743941879295\n" + "Sample 0: 1.563007625068052\n", + "Sample 1: 1.563007625068052\n", + "Sample 2: 1.563007625068052\n", + "Sample 3: 1.563007625068052\n", + "Sample 4: 1.563007625068052\n", + "Sample 5: 1.563007625068052\n", + "Sample 6: 1.563007625068052\n", + "Sample 7: 1.563007625068052\n", + "Sample 8: 1.563007625068052\n", + "Sample 9: 1.563007625068052\n" ] } ], @@ -1095,7 +1092,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAArcAAAIQCAYAAACbhEYhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABCc0lEQVR4nO3deVyU9f7//+egMiCrKGsiopamqZUmUuaeu2appZnhkpaipdTJ6Jzczik82mKZS/V16XzSY1kuZSfLBbUSzTQzLT0ulJbiLigmKFy/P/wxxxGURWDg3eN+u83txvW+3nPN67rmYnjynvdcY7MsyxIAAABgADdXFwAAAAAUF8ItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi1QRthsNk2YMKFUH/Po0aPq3bu3qlatKpvNpmnTppXq4+PG/PLLL7LZbJo/f36R7j9//nzZbDb98ssvjrbWrVurdevWxVJffq4+5ydMmCCbzaYTJ06UyuPXrFlTAwcOLJXHKik3eg4AJiLcwig//vijevfurYiICHl4eOimm27Sfffdp+nTp7u6tDJpzJgx+uKLLxQfH6//+7//U6dOnVxdEsqhjRs3asKECTpz5oyrS8mlLNcGoGRUdHUBQHHZuHGj2rRpoxo1amjo0KEKCQnRoUOHtGnTJr3xxhsaNWqUq0ssc9auXav7779fzz77rKtLQRnx5ZdfFvo+Gzdu1MSJEzVw4ED5+/sX+H5//PGHKlYs2T9D16ttz549cnMr32M8ERER+uOPP1SpUiVXlwKUGYRbGOOll16Sn5+ftmzZkuuP2LFjx1xTVBl37NixAoWR9PR0eXl5lXxB5dyFCxfk7u5ergOTu7t7iW4/OztbmZmZ8vDwkIeHR4k+Vn7sdrtLH7842Gw2lx9HoKwpv6/AwFX279+vBg0a5BnWgoKCnJbnzZuntm3bKigoSHa7XfXr19esWbNy3a9mzZrq1q2b1q1bp6ZNm8rT01MNGzbUunXrJElLlixRw4YN5eHhoSZNmuj77793uv/AgQPl7e2tAwcOqGPHjvLy8lJYWJgmTZoky7Ly3afff/9dgwcPVnBwsOx2uxo0aKC5c+fm6jd9+nQ1aNBAlStXVpUqVdS0aVMtXLjwmtvNmWtpWZZmzJghm80mm83mtG79+vUaMWKEgoKCVL16dcd9Z86cqQYNGshutyssLEyxsbG53vJt3bq1brvtNu3YsUOtWrVS5cqVVadOHX300UeSpPXr1ysqKkqenp6qW7euVq9ene+xyMzM1Lhx49SkSRP5+fnJy8tL9957rxITE/O9r/S/5/Lrr79Ws2bN5OHhoVq1aulf//pXrr4HDhxQnz59FBAQoMqVK6t58+b67LPPnPqsW7dONptNixYt0t/+9jfddNNNqly5stLS0hzP+8GDB9WtWzd5e3vrpptu0owZMyRdnj7Ttm1beXl5KSIiItdzderUKT377LNq2LChvL295evrq86dO+uHH34o0L7mZdeuXWrbtq08PT1VvXp1/eMf/1B2dnaufnnNub3e+TVhwgT95S9/kSRFRkY6zqWcebw2m00jR47UggULHOfNypUrHevymmd+4sQJPfTQQ/L19VXVqlX19NNP68KFC47115tneuU286strzm3hXnuP/zwQ7300kuqXr26PDw81K5dO+3bty9XTVcbOHCgatasmas9Z87xlVatWqUWLVrI399f3t7eqlu3rl544YXrHouc8+/3339Xz5495e3trcDAQD377LPKyspy2v7Jkyc1YMAA+fr6yt/fXzExMfrhhx8KNI8357Viw4YNeuKJJ1S1alX5+vrqscce0+nTpx39YmJiVK1aNV28eDHXNjp06KC6des6lnPOl8WLF6t+/fry9PRUdHS0fvzxR0nS22+/rTp16sjDw0OtW7d2mi+eY/PmzerSpYuqVKkiLy8vNWrUSG+88cZ19wVmYeQWxoiIiFBSUpJ27typ22677bp9Z82apQYNGqhHjx6qWLGiPv30U40YMULZ2dmKjY116rtv3z498sgjeuKJJ/Too4/qlVdeUffu3TV79my98MILGjFihCQpISFBDz30UK63OrOystSpUyc1b95cU6ZM0cqVKzV+/HhdunRJkyZNumaNR48eVfPmzR0v9oGBgfr88881ZMgQpaWlafTo0ZKkd999V0899ZR69+7tCAE7duzQ5s2b9cgjj+S57ZYtW+r//u//NGDAAN1333167LHHcvUZMWKEAgMDNW7cOKWnp0u6/Md34sSJat++vYYPH649e/Zo1qxZ2rJli7755hunt0ZPnz6tbt26qW/fvurTp49mzZqlvn37asGCBRo9erSefPJJPfLII5o6dap69+6tQ4cOycfH55rHIy0tTf/v//0/9evXT0OHDtXZs2c1Z84cdezYUd9++61uv/32a943x759+9S7d28NGTJEMTExmjt3rgYOHKgmTZqoQYMGjuN+99136/z583rqqadUtWpVvffee+rRo4c++ugjPfDAA07b/Pvf/y53d3c9++yzysjIcIx8ZmVlqXPnzmrZsqWmTJmiBQsWaOTIkfLy8tJf//pX9e/fXw8++KBmz56txx57TNHR0YqMjJR0OWAtW7ZMffr0UWRkpI4ePaq3335brVq10k8//aSwsLB89/VKKSkpatOmjS5duqTnn39eXl5eeuedd+Tp6ZnvffM7vx588EH997//1b///W+9/vrrqlatmiQpMDDQsY21a9fqww8/1MiRI1WtWrU8g92VHnroIdWsWVMJCQnatGmT3nzzTZ0+fTrPf0SupyC1Xamwz/3kyZPl5uamZ599VqmpqZoyZYr69++vzZs3F6rOa9m1a5e6deumRo0aadKkSbLb7dq3b5+++eabfO+blZWljh07KioqSq+88opWr16tV199VbVr19bw4cMlXR5F7969u7799lsNHz5c9erV0/LlyxUTE1OoOkeOHCl/f39NmDDB8Zrw66+/Ov4JGDBggP71r3/piy++ULdu3Rz3S0lJ0dq1azV+/Hin7X311Vf65JNPHK/FCQkJ6tatm5577jnNnDlTI0aM0OnTpzVlyhQNHjxYa9euddx31apV6tatm0JDQ/X0008rJCREP//8s1asWKGnn366UPuFcswCDPHll19aFSpUsCpUqGBFR0dbzz33nPXFF19YmZmZufqeP38+V1vHjh2tWrVqObVFRERYkqyNGzc62r744gtLkuXp6Wn9+uuvjva3337bkmQlJiY62mJiYixJ1qhRoxxt2dnZVteuXS13d3fr+PHjjnZJ1vjx4x3LQ4YMsUJDQ60TJ0441dS3b1/Lz8/PsQ/333+/1aBBg3yOTt4kWbGxsU5t8+bNsyRZLVq0sC5duuRoP3bsmOXu7m516NDBysrKcrS/9dZbliRr7ty5jrZWrVpZkqyFCxc62nbv3m1Jstzc3KxNmzY52nOO57x5865b66VLl6yMjAynttOnT1vBwcHW4MGD893XnOdyw4YNTvtkt9utZ555xtE2evRoS5L11VdfOdrOnj1rRUZGWjVr1nTse2JioiXJqlWrVq7zKed5f/nll51q9fT0tGw2m7Vo0aJcx+XK5/7ChQtOx9iyLCs5Odmy2+3WpEmTnNoKcuxy9mnz5s1O++7n52dJspKTkx3trVq1slq1auVYLsj5NXXq1FzbyZHznO/atSvPdVfu9/jx4y1JVo8ePZz6jRgxwpJk/fDDD5ZlXX+/r97m9WqLiIiwYmJiHMuFfe5vvfVWp3PyjTfesCRZP/74Y67HulJMTIwVERGRqz1n/3O8/vrrliSn14mr5XUscs6/K88Vy7KsO+64w2rSpIlj+eOPP7YkWdOmTXO0ZWVlWW3bti3QeZXzWtGkSROn19kpU6ZYkqzly5c7tlm9enXr4Ycfdrr/a6+9ZtlsNuvAgQOONkmW3W53er5yXltDQkKstLQ0R3t8fLzTc3vp0iUrMjLSioiIsE6fPu30WNnZ2dfdF5iFaQkwxn333aekpCT16NFDP/zwg6ZMmaKOHTvqpptu0ieffOLU98oRq9TUVJ04cUKtWrXSgQMHlJqa6tS3fv36io6OdixHRUVJktq2basaNWrkaj9w4ECu2kaOHOn4OWckNjMz85pvx1uWpY8//ljdu3eXZVk6ceKE49axY0elpqZq27ZtkiR/f3/99ttv2rJlS4GOU0ENHTpUFSpUcCyvXr1amZmZGj16tNPI9NChQ+Xr65vrrVtvb2/17dvXsVy3bl35+/vr1ltvdRwr6frH7UoVKlRwjIpmZ2fr1KlTunTpkpo2beo4FvmpX7++7r33XsdyYGCg6tat6/TY//nPf9SsWTO1aNHCaV+GDRumX375RT/99JPTNmNiYq45Avr44487fvb391fdunXl5eWlhx56yNGec1yurMFutzuOcVZWlk6ePOl4S7qg+3ql//znP2revLmaNWvmtO/9+/fP977FcX61atVK9evXL3D/q989yfkw6H/+858i11AQhX3uBw0a5DRHOefcyu9cLqicKVbLly/PcwpJfp588kmn5XvvvdeptpUrV6pSpUoaOnSoo83NzS3X8c/PsGHDnN61GT58uCpWrOh4vtzc3NS/f3998sknOnv2rKPfggULdPfddzvescjRrl07p9H9nNeIXr16Ob27c/Vrx/fff6/k5GSNHj061/S0q6d7wGyEWxjlrrvu0pIlS3T69Gl9++23io+P19mzZ9W7d2+nP0zffPON2rdvLy8vL/n7+yswMNAxj+3qcHtlgJUkPz8/SVJ4eHie7VfONZMuv7DXqlXLqe2WW26RpDzni0nS8ePHdebMGb3zzjsKDAx0ug0aNEjS/z4kN3bsWHl7e6tZs2a6+eabFRsbW6C3LfNz9R+cX3/9VZKc5sdJlz+AVKtWLcf6HNWrV8/1B8XPz6/Axy0v7733nho1aiQPDw9VrVpVgYGB+uyzz3I9Z9dy9XMpSVWqVHF67F9//TXXPkrSrbfe6lh/pauPUw4PD49cb3/7+fld87hcWUN2drZef/113XzzzbLb7apWrZoCAwO1Y8eOAu/rlX799VfdfPPNudrz2s+rFcf5da1jdC1X11q7dm25ubld8/eluBT2ub/6fKpSpYqkgp3LBfHwww/rnnvu0eOPP67g4GD17dtXH374YYGCbl7nX17nemhoqCpXruzUr06dOoWq8+rny9vbW6GhoU7P12OPPaY//vhDS5culXT5ShVbt27VgAEDcm2vqK+5+/fvl6R8p6XBfIRbGMnd3V133XWXXn75Zc2aNUsXL17U4sWLJV1+AWzXrp1OnDih1157TZ999plWrVqlMWPGSFKuPxxXjl4WpN0qwAfF8pNTw6OPPqpVq1blebvnnnskXf7Du2fPHi1atEgtWrTQxx9/rBYtWuSax1ZYBZmPeT3Ffdzef/99DRw4ULVr19acOXO0cuVKrVq1Sm3bti3wqFZJPGfXOk43sv8vv/yy4uLi1LJlS73//vv64osvtGrVKjVo0KBII3g3ojjOrxs9l67+Z+Bao3BXf1iqpBX1fCpo/Z6entqwYYNWr16tAQMGaMeOHXr44Yd133335buv16rNVerXr68mTZro/fffl3T599nd3d3pXYwcrnjNhVn4QBmM17RpU0nSkSNHJEmffvqpMjIy9MknnziNEBT0U/eFlZ2drQMHDjhGayXpv//9ryRd84M1gYGB8vHxUVZWltq3b5/vY3h5eenhhx/Www8/rMzMTD344IN66aWXFB8fX2yXCYqIiJB0ecTlypHozMxMJScnF6jOG/HRRx+pVq1aWrJkiVM4uNEQf7WIiAjt2bMnV/vu3bsd60vaRx99pDZt2mjOnDlO7WfOnHF8KKowIiIitHfv3lztee1nXvI7v4r7Ld+9e/c6jfbu27dP2dnZjt+XnBHSq6/ScfXIqlS4t6NL67mvUqVKnl8qkVf9bm5uateundq1a6fXXntNL7/8sv76178qMTHxhn/nIiIilJiYqPPnzzuN3hbkig9X2rt3r9q0aeNYPnfunI4cOaIuXbo49XvssccUFxenI0eOaOHCheratavjuSwOtWvXliTt3LmzxF+PULYxcgtjJCYm5vkffM68r5y3G3P++7+yb2pqqubNm1ditb311luOny3L0ltvvaVKlSqpXbt2efavUKGCevXqpY8//lg7d+7Mtf748eOOn0+ePOm0zt3dXfXr15dlWXleeqeo2rdvL3d3d7355ptOx27OnDlKTU1V165di+2x8pLX87Z582YlJSUV6+N06dJF3377rdN209PT9c4776hmzZqFmjtaVBUqVMh1Li9evFi///57kbbXpUsXbdq0Sd9++62j7fjx41qwYEG+9y3I+ZVzDeTi+hawnEum5cj5hsHOnTtLknx9fVWtWjVt2LDBqd/MmTNzbaswtZXWc1+7dm2lpqZqx44djrYjR4443rLPcerUqVz3zbkqSEZGxg3X0bFjR128eFHvvvuuoy07OzvX8c/PO++84/RaM2vWLF26dMnxfOXo16+fbDabnn76aR04cECPPvroje3AVe68805FRkZq2rRpuZ5vRnf/XBi5hTFGjRql8+fP64EHHlC9evWUmZmpjRs36oMPPlDNmjUdc1U7dOggd3d3de/eXU888YTOnTund999V0FBQY7R3eLk4eGhlStXKiYmRlFRUfr888/12Wef6YUXXrjmJYmky5cZSkxMVFRUlIYOHar69evr1KlT2rZtm1avXu34w9ehQweFhITonnvuUXBwsH7++We99dZb6tq163UvrVVYgYGBio+P18SJE9WpUyf16NFDe/bs0cyZM3XXXXcV+x+qq3Xr1k1LlizRAw88oK5duyo5OVmzZ89W/fr1de7cuWJ7nOeff17//ve/1blzZz311FMKCAjQe++9p+TkZH388cel8gUN3bp106RJkzRo0CDdfffd+vHHH7VgwYJcc7cL6rnnnnN8vfLTTz/tuBRYRESEU8DKS0HOryZNmkiS/vrXv6pv376qVKmSunfvXuQv/khOTlaPHj3UqVMnJSUl6f3339cjjzyixo0bO/o8/vjjmjx5sh5//HE1bdpUGzZscLwjcqXC1FZaz33fvn01duxYPfDAA3rqqad0/vx5zZo1S7fccovTBwYnTZqkDRs2qGvXroqIiNCxY8c0c+ZMVa9e3elDb0XVs2dPNWvWTM8884z27dunevXq6ZNPPnG8thR01DszM1Pt2rVzXApx5syZatGihXr06OHULzAwUJ06ddLixYvl7+9f7P8Qu7m5adasWerevbtuv/12DRo0SKGhodq9e7d27dqlL774olgfD2VYqV+fASghn3/+uTV48GCrXr16lre3t+Xu7m7VqVPHGjVqlHX06FGnvp988onVqFEjy8PDw6pZs6b1z3/+05o7d26uSwZFRERYXbt2zfVYyuMSWjmX5Jk6daqjLSYmxvLy8rL2799vdejQwapcubIVHBxsjR8/PtelnnTVJYwsy7KOHj1qxcbGWuHh4ValSpWskJAQq127dtY777zj6PP2229bLVu2tKpWrWrZ7Xardu3a1l/+8hcrNTU132OW137kXN5ny5Yted7nrbfesurVq2dVqlTJCg4OtoYPH57rsjutWrXK8/JRhTmeV8vOzrZefvllKyIiwrLb7dYdd9xhrVix4pqXVSroY1996SvLsqz9+/dbvXv3tvz9/S0PDw+rWbNm1ooVK5z65FwOavHixbm2mfO85/VYBTkuFy5csJ555hkrNDTU8vT0tO655x4rKSkpV60FvRSYZVnWjh07rFatWlkeHh7WTTfdZP3973+35syZk++lwAp6fv3973+3brrpJsvNzc1pm9d7bq8+53MuhfXTTz9ZvXv3tnx8fKwqVapYI0eOtP744w+n+54/f94aMmSI5efnZ/n4+FgPPfSQdezYsTx/j65V29WXArOsG3vuC/N8fPnll9Ztt91mubu7W3Xr1rXef//9XJcCW7NmjXX//fdbYWFhlru7uxUWFmb169fP+u9//3vdx7zW+Xf19i3Lso4fP2498sgjlo+Pj+Xn52cNHDjQ+uabbyxJTpesy0vOa8X69eutYcOGWVWqVLG8vb2t/v37WydPnszzPh9++KElyRo2bFie6wv62mpZ134evv76a+u+++6zfHx8LC8vL6tRo0bW9OnTr7svMIvNshirB0rKwIED9dFHHxXryCIAlKRly5bpgQce0Ndff+344Gpe5s+fr0GDBmnLli2OzzbkZ/ny5erZs6c2bNjgdFk+oDgx5xYAgD+pP/74w2k5KytL06dPl6+vr+68885if7x3331XtWrVKpZpFcC1MOcWAIA/qVGjRumPP/5QdHS0MjIytGTJEm3cuFEvv/zyDV/C7UqLFi3Sjh079Nlnn+mNN97gSxVQogi3AAD8SbVt21avvvqqVqxYoQsXLqhOnTqaPn2607cqFod+/frJ29tbQ4YM0YgRI4p128DVmHMLAAAAYzDnFgAAAMYg3AIAAMAYzLnV5W9kOXz4sHx8fJjkDgAAUAZZlqWzZ88qLCzsul+qQriVdPjwYYWHh7u6DAAAAOTj0KFDql69+jXXE24lx1dIHjp0SL6+vi6uBgAAAFdLS0tTeHh4vl8tT7jV/74/29fXl3ALAABQhuU3hZQPlAEAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGNUdOWDJyQkaMmSJdq9e7c8PT11991365///Kfq1q3r6HPhwgU988wzWrRokTIyMtSxY0fNnDlTwcHBjj4HDx7U8OHDlZiYKG9vb8XExCghIUEVK7p09wCg8Cb4ubqC3CakuroCACgwl47crl+/XrGxsdq0aZNWrVqlixcvqkOHDkpPT3f0GTNmjD799FMtXrxY69ev1+HDh/Xggw861mdlZalr167KzMzUxo0b9d5772n+/PkaN26cK3YJAAAALmSzLMtydRE5jh8/rqCgIK1fv14tW7ZUamqqAgMDtXDhQvXu3VuStHv3bt16661KSkpS8+bN9fnnn6tbt246fPiwYzR39uzZGjt2rI4fPy53d/d8HzctLU1+fn5KTU2Vr69vie4jAFwXI7cAkKeC5rUyNec2NfXyC2hAQIAkaevWrbp48aLat2/v6FOvXj3VqFFDSUlJkqSkpCQ1bNjQaZpCx44dlZaWpl27dpVi9QAAAHC1MjMpNTs7W6NHj9Y999yj2267TZKUkpIid3d3+fv7O/UNDg5WSkqKo8+VwTZnfc66vGRkZCgjI8OxnJaWVly7AQAAABcqMyO3sbGx2rlzpxYtWlTij5WQkCA/Pz/HLTw8vMQfEwAAACWvTITbkSNHasWKFUpMTFT16tUd7SEhIcrMzNSZM2ec+h89elQhISGOPkePHs21PmddXuLj45Wamuq4HTp0qBj3BgAAAK7i0nBrWZZGjhyppUuXau3atYqMjHRa36RJE1WqVElr1qxxtO3Zs0cHDx5UdHS0JCk6Olo//vijjh075uizatUq+fr6qn79+nk+rt1ul6+vr9MNAAAA5Z9L59zGxsZq4cKFWr58uXx8fBxzZP38/OTp6Sk/Pz8NGTJEcXFxCggIkK+vr0aNGqXo6Gg1b95cktShQwfVr19fAwYM0JQpU5SSkqK//e1vio2Nld1ud+XuAQAAoJS5NNzOmjVLktS6dWun9nnz5mngwIGSpNdff11ubm7q1auX05c45KhQoYJWrFih4cOHKzo6Wl5eXoqJidGkSZNKazcAAABQRpSp69y6Cte5BVBmcJ1bAMhTubzOLQAAAHAjCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMEZFVxcAAC4xwc/VFQAASgAjtwAAADCGS8Pthg0b1L17d4WFhclms2nZsmVO6202W563qVOnOvrUrFkz1/rJkyeX8p4AAACgLHBpuE1PT1fjxo01Y8aMPNcfOXLE6TZ37lzZbDb16tXLqd+kSZOc+o0aNao0ygcAAEAZ49I5t507d1bnzp2vuT4kJMRpefny5WrTpo1q1arl1O7j45OrLwAAAP58ys2c26NHj+qzzz7TkCFDcq2bPHmyqlatqjvuuENTp07VpUuXrrutjIwMpaWlOd0AAABQ/pWbqyW899578vHx0YMPPujU/tRTT+nOO+9UQECANm7cqPj4eB05ckSvvfbaNbeVkJCgiRMnlnTJAAAAKGU2y7IsVxchXf7w2NKlS9WzZ88819erV0/33Xefpk+fft3tzJ07V0888YTOnTsnu92eZ5+MjAxlZGQ4ltPS0hQeHq7U1FT5+voWeR8AlCNcCqzgJqS6ugIAUFpamvz8/PLNa+Vi5Parr77Snj179MEHH+TbNyoqSpcuXdIvv/yiunXr5tnHbrdfM/gCAACg/CoXc27nzJmjJk2aqHHjxvn23b59u9zc3BQUFFQKlQEAAKAscenI7blz57Rv3z7HcnJysrZv366AgADVqFFD0uUh6MWLF+vVV1/Ndf+kpCRt3rxZbdq0kY+Pj5KSkjRmzBg9+uijqlKlSqntBwAAAMoGl4bb7777Tm3atHEsx8XFSZJiYmI0f/58SdKiRYtkWZb69euX6/52u12LFi3ShAkTlJGRocjISI0ZM8axHQAAAPy5lJkPlLlSQScoAzAIHygrOD5QBqAMKGheKxdzbgEAAICCINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABjDpeF2w4YN6t69u8LCwmSz2bRs2TKn9QMHDpTNZnO6derUyanPqVOn1L9/f/n6+srf319DhgzRuXPnSnEvAAAAUFa4NNymp6ercePGmjFjxjX7dOrUSUeOHHHc/v3vfzut79+/v3bt2qVVq1ZpxYoV2rBhg4YNG1bSpQMAAKAMqujKB+/cubM6d+583T52u10hISF5rvv555+1cuVKbdmyRU2bNpUkTZ8+XV26dNErr7yisLCwYq8ZAP50Jvi5uoK8TUh1dQUAyqAyP+d23bp1CgoKUt26dTV8+HCdPHnSsS4pKUn+/v6OYCtJ7du3l5ubmzZv3uyKcgEAAOBCLh25zU+nTp304IMPKjIyUvv379cLL7ygzp07KykpSRUqVFBKSoqCgoKc7lOxYkUFBAQoJSXlmtvNyMhQRkaGYzktLa3E9gEAAAClp0yH2759+zp+btiwoRo1aqTatWtr3bp1ateuXZG3m5CQoIkTJxZHiQAAAChDyvy0hCvVqlVL1apV0759+yRJISEhOnbsmFOfS5cu6dSpU9ecpytJ8fHxSk1NddwOHTpUonUDAACgdJSrcPvbb7/p5MmTCg0NlSRFR0frzJkz2rp1q6PP2rVrlZ2draioqGtux263y9fX1+kGAACA8s+l0xLOnTvnGIWVpOTkZG3fvl0BAQEKCAjQxIkT1atXL4WEhGj//v167rnnVKdOHXXs2FGSdOutt6pTp04aOnSoZs+erYsXL2rkyJHq27cvV0oAAAD4E3LpyO13332nO+64Q3fccYckKS4uTnfccYfGjRunChUqaMeOHerRo4duueUWDRkyRE2aNNFXX30lu93u2MaCBQtUr149tWvXTl26dFGLFi30zjvvuGqXAAAA4EI2y7IsVxfhamlpafLz81NqaipTFIA/i7J67VYUHNe5Bf5UCprXytWcWwAAAOB6CLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGCMiq4uAMCfwAQ/V1cAAPiTYOQWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjuDTcbtiwQd27d1dYWJhsNpuWLVvmWHfx4kWNHTtWDRs2lJeXl8LCwvTYY4/p8OHDTtuoWbOmbDab023y5MmlvCcAAAAoC1wabtPT09W4cWPNmDEj17rz589r27ZtevHFF7Vt2zYtWbJEe/bsUY8ePXL1nTRpko4cOeK4jRo1qjTKBwAAQBnj0kuBde7cWZ07d85znZ+fn1atWuXU9tZbb6lZs2Y6ePCgatSo4Wj38fFRSEhIidYKAACAsq9czblNTU2VzWaTv7+/U/vkyZNVtWpV3XHHHZo6daouXbp03e1kZGQoLS3N6QYAAIDyr9x8icOFCxc0duxY9evXT76+vo72p556SnfeeacCAgK0ceNGxcfH68iRI3rttdeuua2EhARNnDixNMoGAABAKbJZlmW5ughJstlsWrp0qXr27Jlr3cWLF9WrVy/99ttvWrdunVO4vdrcuXP1xBNP6Ny5c7Lb7Xn2ycjIUEZGhmM5LS1N4eHhSk1Nve62ARQR31CGkjAh1dUVAChFaWlp8vPzyzevlfmR24sXL+qhhx7Sr7/+qrVr1+YbPqOionTp0iX98ssvqlu3bp597Hb7NYMvAAAAyq8yHW5zgu3evXuVmJioqlWr5nuf7du3y83NTUFBQaVQIQAAAMoSl4bbc+fOad++fY7l5ORkbd++XQEBAQoNDVXv3r21bds2rVixQllZWUpJSZEkBQQEyN3dXUlJSdq8ebPatGkjHx8fJSUlacyYMXr00UdVpUoVV+0WAAAAXMSlc27XrVunNm3a5GqPiYnRhAkTFBkZmef9EhMT1bp1a23btk0jRozQ7t27lZGRocjISA0YMEBxcXGFmnZQ0DkcAIqIObcoCcy5Bf5UysWc29atW+t62Tq/3H3nnXdq06ZNxV0WAAAAyqlydZ1bAAAA4HoItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxihRu27ZtqzNnzuRqT0tLU9u2bW+0JgAAAKBIihRu161bp8zMzFztFy5c0FdffXXDRQEAAABFUbEwnXfs2OH4+aefflJKSopjOSsrSytXrtRNN91UfNUBAAAAhVCocHv77bfLZrPJZrPlOf3A09NT06dPL7biAAAAgMIoVLhNTk6WZVmqVauWvv32WwUGBjrWubu7KygoSBUqVCj2IgEAAICCKFS4jYiIkCRlZ2eXSDEAAADAjShUuL3S3r17lZiYqGPHjuUKu+PGjbvhwgAAuK4Jfq6uIG8TUl1dAfCnVqRw++6772r48OGqVq2aQkJCZLPZHOtsNhvhFgAAAC5RpHD7j3/8Qy+99JLGjh1b3PUAAAAARVak69yePn1affr0Ke5aAAAAgBtSpHDbp08fffnll8VdCwAAAHBDijQtoU6dOnrxxRe1adMmNWzYUJUqVXJa/9RTTxVLcQAAAEBh2CzLsgp7p8jIyGtv0GbTgQMHbqio0paWliY/Pz+lpqbK19fX1eUA5imrn2oHSgJXSwBKREHzWpFGbpOTk4tcGAAAAFBSijTnFgAAACiLijRyO3jw4Ouunzt3bpGKAQAAAG5EkcLt6dOnnZYvXryonTt36syZM2rbtm2xFAYAAAAUVpHC7dKlS3O1ZWdna/jw4apdu/YNFwUAAAAURbHNuXVzc1NcXJxef/314tokAAAAUCjF+oGy/fv369KlS8W5SQAAAKDAijQtIS4uzmnZsiwdOXJEn332mWJiYoqlMAAAAKCwihRuv//+e6dlNzc3BQYG6tVXX833SgoAAABASSlSuE1MTCzuOgAAAIAbVqRwm+P48ePas2ePJKlu3boKDAwslqIAAACAoijSB8rS09M1ePBghYaGqmXLlmrZsqXCwsI0ZMgQnT9/vrhrBAAAAAqkSOE2Li5O69ev16effqozZ87ozJkzWr58udavX69nnnmmuGsEAAAACqRI4fbjjz/WnDlz1LlzZ/n6+srX11ddunTRu+++q48++qjA29mwYYO6d++usLAw2Ww2LVu2zGm9ZVkaN26cQkND5enpqfbt22vv3r1OfU6dOqX+/fvL19dX/v7+GjJkiM6dO1eU3QIAAEA5V6Rwe/78eQUHB+dqDwoKKtS0hPT0dDVu3FgzZszIc/2UKVP05ptvavbs2dq8ebO8vLzUsWNHXbhwwdGnf//+2rVrl1atWqUVK1Zow4YNGjZsWOF3CgAAAOWezbIsq7B3ateunapWrap//etf8vDwkCT98ccfiomJ0alTp7R69erCF2KzaenSperZs6eky6O2YWFheuaZZ/Tss89KklJTUxUcHKz58+erb9+++vnnn1W/fn1t2bJFTZs2lSStXLlSXbp00W+//aawsLACPXZaWpr8/PyUmpoqX1/fQtcOIB8T/FxdAVB6JqS6ugLASAXNa0W6WsK0adPUqVMnVa9eXY0bN5Yk/fDDD7Lb7fryyy+LVvFVkpOTlZKSovbt2zva/Pz8FBUVpaSkJPXt21dJSUny9/d3BFtJat++vdzc3LR582Y98MADeW47IyNDGRkZjuW0tLRiqRkAAACuVaRw27BhQ+3du1cLFizQ7t27JUn9+vVT//795enpWSyFpaSkSFKu6Q/BwcGOdSkpKQoKCnJaX7FiRQUEBDj65CUhIUETJ04sljoBAABQdhQp3CYkJCg4OFhDhw51ap87d66OHz+usWPHFktxJSU+Pt7pK4TT0tIUHh7uwooAAABQHIr0gbK3335b9erVy9XeoEEDzZ49+4aLkqSQkBBJ0tGjR53ajx496lgXEhKiY8eOOa2/dOmSTp065eiTF7vd7rjKQ84NAAAA5V+Rwm1KSopCQ0NztQcGBurIkSM3XJQkRUZGKiQkRGvWrHG0paWlafPmzYqOjpYkRUdH68yZM9q6daujz9q1a5Wdna2oqKhiqQMAAADlR5GmJYSHh+ubb75RZGSkU/s333xT4CsUSNK5c+e0b98+x3JycrK2b9+ugIAA1ahRQ6NHj9Y//vEP3XzzzYqMjNSLL76osLAwxxUVbr31VnXq1ElDhw7V7NmzdfHiRY0cOVJ9+/YtVB0AAAAwQ5HC7dChQzV69GhdvHhRbdu2lSStWbNGzz33XKG+oey7775TmzZtHMs582BjYmI0f/58Pffcc0pPT9ewYcN05swZtWjRQitXrnRcfkySFixYoJEjR6pdu3Zyc3NTr1699OabbxZltwAAAFDOFek6t5Zl6fnnn9ebb76pzMxMSZKHh4fGjh2rcePGFXuRJY3r3AIljOvc4s+E69wCJaKgea1I4TbHuXPn9PPPP8vT01M333yz7HZ7UTflUoRboIQRbvFnQrgFSkSJfolDDm9vb9111103sgkAAACg2BTpagkAAABAWUS4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxR5sNtzZo1ZbPZct1iY2MlSa1bt8617sknn3Rx1QAAAHCFiq4uID9btmxRVlaWY3nnzp2677771KdPH0fb0KFDNWnSJMdy5cqVS7VGAAAAlA1lPtwGBgY6LU+ePFm1a9dWq1atHG2VK1dWSEhIaZcGAACAMqbMh9srZWZm6v3331dcXJxsNpujfcGCBXr//fcVEhKi7t2768UXX2T0Fn9OE/xcXQEAAC5VrsLtsmXLdObMGQ0cONDR9sgjjygiIkJhYWHasWOHxo4dqz179mjJkiXX3E5GRoYyMjIcy2lpaSVZNgAAAEpJuQq3c+bMUefOnRUWFuZoGzZsmOPnhg0bKjQ0VO3atdP+/ftVu3btPLeTkJCgiRMnlni9AAAAKF1l/moJOX799VetXr1ajz/++HX7RUVFSZL27dt3zT7x8fFKTU113A4dOlSstQIAAMA1ys3I7bx58xQUFKSuXbtet9/27dslSaGhodfsY7fbZbfbi7M8AAAAlAHlItxmZ2dr3rx5iomJUcWK/yt5//79Wrhwobp06aKqVatqx44dGjNmjFq2bKlGjRq5sGIAAAC4QrkIt6tXr9bBgwc1ePBgp3Z3d3etXr1a06ZNU3p6usLDw9WrVy/97W9/c1GlAAAAcKVyEW47dOggy7JytYeHh2v9+vUuqAgAAABlUbn5QBkAAACQH8ItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGqOjqAgAAMMoEP1dXkNuEVFdXAJQaRm4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYZTrcTpgwQTabzelWr149x/oLFy4oNjZWVatWlbe3t3r16qWjR4+6sGIAAAC4UpkOt5LUoEEDHTlyxHH7+uuvHevGjBmjTz/9VIsXL9b69et1+PBhPfjggy6sFgAAAK5U0dUF5KdixYoKCQnJ1Z6amqo5c+Zo4cKFatu2rSRp3rx5uvXWW7Vp0yY1b968tEsFAACAi5X5kdu9e/cqLCxMtWrVUv/+/XXw4EFJ0tatW3Xx4kW1b9/e0bdevXqqUaOGkpKSrrvNjIwMpaWlOd0AAABQ/pXpcBsVFaX58+dr5cqVmjVrlpKTk3Xvvffq7NmzSklJkbu7u/z9/Z3uExwcrJSUlOtuNyEhQX5+fo5beHh4Ce4FAAAASkuZnpbQuXNnx8+NGjVSVFSUIiIi9OGHH8rT07PI242Pj1dcXJxjOS0tjYALAABggDI9cns1f39/3XLLLdq3b59CQkKUmZmpM2fOOPU5evRonnN0r2S32+Xr6+t0AwAAQPlXrsLtuXPntH//foWGhqpJkyaqVKmS1qxZ41i/Z88eHTx4UNHR0S6sEgAAAK5SpqclPPvss+revbsiIiJ0+PBhjR8/XhUqVFC/fv3k5+enIUOGKC4uTgEBAfL19dWoUaMUHR3NlRIAAAD+pMp0uP3tt9/Ur18/nTx5UoGBgWrRooU2bdqkwMBASdLrr78uNzc39erVSxkZGerYsaNmzpzp4qoBAADgKjbLsixXF+FqaWlp8vPzU2pqKvNvUb5N8HN1BQDKogmprq4AuGEFzWvlas4tAAAAcD2EWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGNUdHUBQLk1wc/VFQAAgKswcgsAAABjEG4BAABgDMItAAAAjMGcWwAATFdWPyMwIdXVFcBAjNwCAADAGIRbAAAAGINwCwAAAGOU6XCbkJCgu+66Sz4+PgoKClLPnj21Z88epz6tW7eWzWZzuj355JMuqhgAAACuVKbD7fr16xUbG6tNmzZp1apVunjxojp06KD09HSnfkOHDtWRI0cctylTprioYgAAALhSmb5awsqVK52W58+fr6CgIG3dulUtW7Z0tFeuXFkhISGlXR4AAADKmDI9cnu11NTLlwwJCAhwal+wYIGqVaum2267TfHx8Tp//rwrygMAAICLlemR2ytlZ2dr9OjRuueee3Tbbbc52h955BFFREQoLCxMO3bs0NixY7Vnzx4tWbLkmtvKyMhQRkaGYzktLa1EawcAAEDpKDfhNjY2Vjt37tTXX3/t1D5s2DDHzw0bNlRoaKjatWun/fv3q3bt2nluKyEhQRMnTizRegEAAFD6ysW0hJEjR2rFihVKTExU9erVr9s3KipKkrRv375r9omPj1dqaqrjdujQoWKtFwAAAK5RpkduLcvSqFGjtHTpUq1bt06RkZH53mf79u2SpNDQ0Gv2sdvtstvtxVUmAAAAyogyHW5jY2O1cOFCLV++XD4+PkpJSZEk+fn5ydPTU/v379fChQvVpUsXVa1aVTt27NCYMWPUsmVLNWrUyMXVAwAAoLSV6XA7a9YsSZe/qOFK8+bN08CBA+Xu7q7Vq1dr2rRpSk9PV3h4uHr16qW//e1vLqgWAAAArlamw61lWdddHx4ervXr15dSNQAAACjrysUHygAAAICCINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjFHR1QUA+Zrg5+oKAABAOcHILQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDS4EBAADXKIuXepyQ6uoKcIMYuQUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAGX7/rKnzlIAAAZU9Z/Pss8Te6EBi5BQAAgDEYucX/lNX/VgEAAAqIkVsAAAAYw5iR2xkzZmjq1KlKSUlR48aNNX36dDVr1szVZQEAANy4svjuahmdB2zEyO0HH3yguLg4jR8/Xtu2bVPjxo3VsWNHHTt2zNWlAQAAoBQZEW5fe+01DR06VIMGDVL9+vU1e/ZsVa5cWXPnznV1aQAAAChF5X5aQmZmprZu3ar4+HhHm5ubm9q3b6+kpKQ875ORkaGMjAzHcmrq5WH1tLS0ki3WqQir9B4LAACguJVmbtL/cpplXT9Dlftwe+LECWVlZSk4ONipPTg4WLt3787zPgkJCZo4cWKu9vDw8BKpEQAAwDiTXTMP+OzZs/Lzu/Zjl/twWxTx8fGKi4tzLGdnZ+vUqVOqWrWqbDabCysrfWlpaQoPD9ehQ4fk6+vr6nLKJY5h8eA4Fg+OY/HgOBYPjuON4xj+j2VZOnv2rMLCwq7br9yH22rVqqlChQo6evSoU/vRo0cVEhKS533sdrvsdrtTm7+/f0mVWC74+vr+6X9pbhTHsHhwHIsHx7F4cByLB8fxxnEML7veiG2Ocv+BMnd3dzVp0kRr1qxxtGVnZ2vNmjWKjo52YWUAAAAobeV+5FaS4uLiFBMTo6ZNm6pZs2aaNm2a0tPTNWjQIFeXBgAAgFJkRLh9+OGHdfz4cY0bN04pKSm6/fbbtXLlylwfMkNudrtd48ePzzVNAwXHMSweHMfiwXEsHhzH4sFxvHEcw8KzWfldTwEAAAAoJ8r9nFsAAAAgB+EWAAAAxiDcAgAAwBiEWwAAABiDcAuHHj16qEaNGvLw8FBoaKgGDBigw4cPu7qscuWXX37RkCFDFBkZKU9PT9WuXVvjx49XZmamq0srV1566SXdfffdqly58p/+C1YKY8aMGapZs6Y8PDwUFRWlb7/91tUllTsbNmxQ9+7dFRYWJpvNpmXLlrm6pHInISFBd911l3x8fBQUFKSePXtqz549ri6r3Jk1a5YaNWrk+PKG6Ohoff75564uq1wg3MKhTZs2+vDDD7Vnzx59/PHH2r9/v3r37u3qssqV3bt3Kzs7W2+//bZ27dql119/XbNnz9YLL7zg6tLKlczMTPXp00fDhw93dSnlxgcffKC4uDiNHz9e27ZtU+PGjdWxY0cdO3bM1aWVK+np6WrcuLFmzJjh6lLKrfXr1ys2NlabNm3SqlWrdPHiRXXo0EHp6emuLq1cqV69uiZPnqytW7fqu+++U9u2bXX//fdr165dri6tzONSYLimTz75RD179lRGRoYqVark6nLKralTp2rWrFk6cOCAq0spd+bPn6/Ro0frzJkzri6lzIuKitJdd92lt956S9Llb2oMDw/XqFGj9Pzzz7u4uvLJZrNp6dKl6tmzp6tLKdeOHz+uoKAgrV+/Xi1btnR1OeVaQECApk6dqiFDhri6lDKNkVvk6dSpU1qwYIHuvvtugu0NSk1NVUBAgKvLgMEyMzO1detWtW/f3tHm5uam9u3bKykpyYWVAZdfAyXxOngDsrKytGjRIqWnpys6OtrV5ZR5hFs4GTt2rLy8vFS1alUdPHhQy5cvd3VJ5dq+ffs0ffp0PfHEE64uBQY7ceKEsrKycn0rY3BwsFJSUlxUFXD5HYTRo0frnnvu0W233ebqcsqdH3/8Ud7e3rLb7XryySe1dOlS1a9f39VllXmEW8M9//zzstls173t3r3b0f8vf/mLvv/+e3355ZeqUKGCHnvsMTFzpfDHUZJ+//13derUSX369NHQoUNdVHnZUZRjCKB8i42N1c6dO7Vo0SJXl1Iu1a1bV9u3b9fmzZs1fPhwxcTE6KeffnJ1WWUec24Nd/z4cZ08efK6fWrVqiV3d/dc7b/99pvCw8O1cePGP/3bIIU9jocPH1br1q3VvHlzzZ8/X25u/B9ZlHORObcFk5mZqcqVK+ujjz5ymh8aExOjM2fO8A5METHn9saMHDlSy5cv14YNGxQZGenqcozQvn171a5dW2+//barSynTKrq6AJSswMBABQYGFum+2dnZkqSMjIziLKlcKsxx/P3339WmTRs1adJE8+bNI9j+/27kXMT1ubu7q0mTJlqzZo0jiGVnZ2vNmjUaOXKka4vDn45lWRo1apSWLl2qdevWEWyLUXZ2Nn+TC4BwC0nS5s2btWXLFrVo0UJVqlTR/v379eKLL6p27dp/+lHbwvj999/VunVrRURE6JVXXtHx48cd60JCQlxYWfly8OBBnTp1SgcPHlRWVpa2b98uSapTp468vb1dW1wZFRcXp5iYGDVt2lTNmjXTtGnTlJ6erkGDBrm6tHLl3Llz2rdvn2M5OTlZ27dvV0BAgGrUqOHCysqP2NhYLVy4UMuXL5ePj49j3refn588PT1dXF35ER8fr86dO6tGjRo6e/asFi5cqHXr1umLL75wdWllnwVYlrVjxw6rTZs2VkBAgGW3262aNWtaTz75pPXbb7+5urRyZd68eZakPG8ouJiYmDyPYWJioqtLK9OmT59u1ahRw3J3d7eaNWtmbdq0ydUllTuJiYl5nnsxMTGuLq3cuNZr4Lx581xdWrkyePBgKyIiwnJ3d7cCAwOtdu3aWV9++aWryyoXmHMLAAAAYzAZEAAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABj/H/TPNJABGeeJwAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArcAAAIQCAYAAACbhEYhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA/k0lEQVR4nO3dd3RUdf7/8dcEyCSEFAKpEkJABUFApEQU6dJBlCJFDYKgNIVYMO6uBHY1fMGCS1V/FHeFZS00YUXpUQlFECMoLAEUVHqZQJAEkvv7w5NZhgRSSDLJh+fjnDkn93PLvG/J5JXPfOaOzbIsSwAAAIABPNxdAAAAAFBUCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEIt0ApYbPZFB8fX6LPeezYMfXu3VtVqlSRzWbT1KlTS/T5cWN++ukn2Ww2zZ8/v1Drz58/XzabTT/99JOzrXXr1mrdunWR1JeXq6/5+Ph42Ww2nTx5skSev0aNGho0aFCJPFdxudFrADAR4RZG+f7779W7d29FRkbKy8tLt9xyix544AFNmzbN3aWVSmPHjtXnn3+uuLg4/fOf/1SnTp3cXRLKoE2bNik+Pl5nz551dyk5lObaABSP8u4uACgqmzZtUps2bVS9enUNHTpUoaGhOnz4sDZv3qy3335bo0ePdneJpc66dev04IMP6vnnn3d3KSglvvjiiwKvs2nTJk2YMEGDBg1SQEBAvtf7/fffVb588f4Zul5te/fulYdH2e7jiYyM1O+//64KFSq4uxSg1CDcwhivvvqq/P39tW3bthx/xI4fP+6eokq548eP5yuMpKWlycfHp/gLKuMuXrwoT0/PMh2YPD09i3X7WVlZysjIkJeXl7y8vIr1ufJit9vd+vxFwWazuf04AqVN2X0FBq6yf/9+1atXL9ewFhwc7DI9b948tW3bVsHBwbLb7apbt65mzZqVY70aNWqoW7du2rBhg5o0aSJvb2/Vr19fGzZskCQtXrxY9evXl5eXlxo3bqxvv/3WZf1BgwapUqVKOnDggDp27CgfHx+Fh4dr4sSJsiwrz3369ddfNXjwYIWEhMhut6tevXqaO3dujuWmTZumevXqqWLFiqpcubKaNGmihQsXXnO72WMtLcvSjBkzZLPZZLPZXOZt3LhRI0aMUHBwsKpVq+Zcd+bMmapXr57sdrvCw8M1cuTIHG/5tm7dWnfeeaeSk5PVqlUrVaxYUbfeeqs+/vhjSdLGjRsVHR0tb29v1a5dW2vWrMnzWGRkZOiVV15R48aN5e/vLx8fH91///1av359nutK/zuXX331lZo1ayYvLy/VrFlT//jHP3Ise+DAAfXp00eBgYGqWLGi7rnnHq1cudJlmQ0bNshms2nRokX685//rFtuuUUVK1ZUamqq87wfOnRI3bp1U6VKlXTLLbdoxowZkv4YPtO2bVv5+PgoMjIyx7k6ffq0nn/+edWvX1+VKlWSn5+fOnfurO+++y5f+5qb3bt3q23btvL29la1atX0t7/9TVlZWTmWy23M7fWur/j4eL3wwguSpKioKOe1lD2O12azadSoUVqwYIHzulm1apVzXm7jzE+ePKm+ffvKz89PVapU0bPPPquLFy86519vnOmV28yrttzG3Bbk3H/44Yd69dVXVa1aNXl5ealdu3ZKSUnJUdPVBg0apBo1auRozx5zfKXVq1erRYsWCggIUKVKlVS7dm29/PLL1z0W2dffr7/+qp49e6pSpUoKCgrS888/r8zMTJftnzp1So899pj8/PwUEBCgmJgYfffdd/kax5v9WpGYmKinnnpKVapUkZ+fnx5//HGdOXPGuVxMTIyqVq2qS5cu5dhGhw4dVLt2bed09vXy0UcfqW7duvL29lbz5s31/fffS5Leeecd3XrrrfLy8lLr1q1dxotn27Jli7p06aLKlSvLx8dHDRo00Ntvv33dfYFZ6LmFMSIjI5WUlKRdu3bpzjvvvO6ys2bNUr169dSjRw+VL19en376qUaMGKGsrCyNHDnSZdmUlBQNGDBATz31lB599FG9/vrr6t69u2bPnq2XX35ZI0aMkCQlJCSob9++Od7qzMzMVKdOnXTPPfdo8uTJWrVqlcaPH6/Lly9r4sSJ16zx2LFjuueee5wv9kFBQfrss880ZMgQpaamasyYMZKk9957T88884x69+7tDAHJycnasmWLBgwYkOu2W7ZsqX/+85967LHH9MADD+jxxx/PscyIESMUFBSkV155RWlpaZL++OM7YcIEtW/fXsOHD9fevXs1a9Ysbdu2TV9//bXLW6NnzpxRt27d1K9fP/Xp00ezZs1Sv379tGDBAo0ZM0ZPP/20BgwYoClTpqh37946fPiwfH19r3k8UlNT9f/+3/9T//79NXToUJ07d05z5sxRx44dtXXrVt11113XXDdbSkqKevfurSFDhigmJkZz587VoEGD1LhxY9WrV8953O+9915duHBBzzzzjKpUqaL3339fPXr00Mcff6yHHnrIZZt//etf5enpqeeff17p6enOns/MzEx17txZLVu21OTJk7VgwQKNGjVKPj4++tOf/qSBAwfq4Ycf1uzZs/X444+refPmioqKkvRHwFq6dKn69OmjqKgoHTt2TO+8845atWqlH374QeHh4Xnu65WOHj2qNm3a6PLly3rppZfk4+Ojd999V97e3nmum9f19fDDD+u///2v/vWvf+mtt95S1apVJUlBQUHObaxbt04ffvihRo0apapVq+Ya7K7Ut29f1ahRQwkJCdq8ebP+/ve/68yZM7n+I3I9+antSgU995MmTZKHh4eef/55ORwOTZ48WQMHDtSWLVsKVOe17N69W926dVODBg00ceJE2e12paSk6Ouvv85z3czMTHXs2FHR0dF6/fXXtWbNGr3xxhuqVauWhg8fLumPXvTu3btr69atGj58uOrUqaNly5YpJiamQHWOGjVKAQEBio+Pd74m/Pzzz85/Ah577DH94x//0Oeff65u3bo51zt69KjWrVun8ePHu2zvyy+/1PLly52vxQkJCerWrZtefPFFzZw5UyNGjNCZM2c0efJkDR48WOvWrXOuu3r1anXr1k1hYWF69tlnFRoaqh9//FErVqzQs88+W6D9QhlmAYb44osvrHLlylnlypWzmjdvbr344ovW559/bmVkZORY9sKFCznaOnbsaNWsWdOlLTIy0pJkbdq0ydn2+eefW5Isb29v6+eff3a2v/POO5Yka/369c62mJgYS5I1evRoZ1tWVpbVtWtXy9PT0zpx4oSzXZI1fvx45/SQIUOssLAw6+TJky419evXz/L393fuw4MPPmjVq1cvj6OTO0nWyJEjXdrmzZtnSbJatGhhXb582dl+/Phxy9PT0+rQoYOVmZnpbJ8+fbolyZo7d66zrVWrVpYka+HChc62PXv2WJIsDw8Pa/Pmzc727OM5b96869Z6+fJlKz093aXtzJkzVkhIiDV48OA89zX7XCYmJrrsk91ut5577jln25gxYyxJ1pdffulsO3funBUVFWXVqFHDue/r16+3JFk1a9bMcT1ln/fXXnvNpVZvb2/LZrNZixYtynFcrjz3Fy9edDnGlmVZBw8etOx2uzVx4kSXtvwcu+x92rJli8u++/v7W5KsgwcPOttbtWpltWrVyjmdn+trypQpObaTLfuc7969O9d5V+73+PHjLUlWjx49XJYbMWKEJcn67rvvLMu6/n5fvc3r1RYZGWnFxMQ4pwt67u+44w6Xa/Ltt9+2JFnff/99jue6UkxMjBUZGZmjPXv/s7311luWJJfXiavldiyyr78rrxXLsqxGjRpZjRs3dk5/8sknliRr6tSpzrbMzEyrbdu2+bqusl8rGjdu7PI6O3nyZEuStWzZMuc2q1WrZj3yyCMu67/55puWzWazDhw44GyTZNntdpfzlf3aGhoaaqWmpjrb4+LiXM7t5cuXraioKCsyMtI6c+aMy3NlZWVdd19gFoYlwBgPPPCAkpKS1KNHD3333XeaPHmyOnbsqFtuuUXLly93WfbKHiuHw6GTJ0+qVatWOnDggBwOh8uydevWVfPmzZ3T0dHRkqS2bduqevXqOdoPHDiQo7ZRo0Y5f87uic3IyLjm2/GWZemTTz5R9+7dZVmWTp486Xx07NhRDodDO3bskCQFBATol19+0bZt2/J1nPJr6NChKleunHN6zZo1ysjI0JgxY1x6pocOHSo/P78cb91WqlRJ/fr1c07Xrl1bAQEBuuOOO5zHSrr+cbtSuXLlnL2iWVlZOn36tC5fvqwmTZo4j0Ve6tatq/vvv985HRQUpNq1a7s893/+8x81a9ZMLVq0cNmXYcOG6aefftIPP/zgss2YmJhr9oA++eSTzp8DAgJUu3Zt+fj4qG/fvs727ONyZQ12u915jDMzM3Xq1CnnW9L53dcr/ec//9E999yjZs2auez7wIED81y3KK6vVq1aqW7duvle/up3T7I/DPqf//yn0DXkR0HP/RNPPOEyRjn72srrWs6v7CFWy5Yty3UISV6efvppl+n777/fpbZVq1apQoUKGjp0qLPNw8Mjx/HPy7Bhw1zetRk+fLjKly/vPF8eHh4aOHCgli9frnPnzjmXW7Bgge69917nOxbZ2rVr59K7n/0a0atXL5d3d65+7fj222918OBBjRkzJsfwtKuHe8BshFsYpWnTplq8eLHOnDmjrVu3Ki4uTufOnVPv3r1d/jB9/fXXat++vXx8fBQQEKCgoCDnOLarw+2VAVaS/P39JUkRERG5tl851kz644W9Zs2aLm233367JOU6XkySTpw4obNnz+rdd99VUFCQy+OJJ56Q9L8PyY0bN06VKlVSs2bNdNttt2nkyJH5etsyL1f/wfn5558lyWV8nPTHB5Bq1qzpnJ+tWrVqOf6g+Pv75/u45eb9999XgwYN5OXlpSpVqigoKEgrV67Mcc6u5epzKUmVK1d2ee6ff/45xz5K0h133OGcf6Wrj1M2Ly+vHG9/+/v7X/O4XFlDVlaW3nrrLd12222y2+2qWrWqgoKClJycnO99vdLPP/+s2267LUd7bvt5taK4vq51jK7l6lpr1aolDw+Pa/6+FJWCnvurr6fKlStLyt+1nB+PPPKI7rvvPj355JMKCQlRv3799OGHH+Yr6OZ2/eV2rYeFhalixYouy916660FqvPq81WpUiWFhYW5nK/HH39cv//+u5YsWSLpjztVbN++XY899liO7RX2NXf//v2SlOewNJiPcAsjeXp6qmnTpnrttdc0a9YsXbp0SR999JGkP14A27Vrp5MnT+rNN9/UypUrtXr1ao0dO1aScvzhuLL3Mj/tVj4+KJaX7BoeffRRrV69OtfHfffdJ+mPP7x79+7VokWL1KJFC33yySdq0aJFjnFsBZWf8ZjXU9TH7YMPPtCgQYNUq1YtzZkzR6tWrdLq1avVtm3bfPdqFcc5u9ZxupH9f+211xQbG6uWLVvqgw8+0Oeff67Vq1erXr16herBuxFFcX3d6LV09T8D1+qFu/rDUsWtsNdTfuv39vZWYmKi1qxZo8cee0zJycl65JFH9MADD+S5r9eqzV3q1q2rxo0b64MPPpD0x++zp6eny7sY2dzxmguz8IEyGK9JkyaSpCNHjkiSPv30U6Wnp2v58uUuPQT5/dR9QWVlZenAgQPO3lpJ+u9//ytJ1/xgTVBQkHx9fZWZman27dvn+Rw+Pj565JFH9MgjjygjI0MPP/ywXn31VcXFxRXZbYIiIyMl/dHjcmVPdEZGhg4ePJivOm/Exx9/rJo1a2rx4sUu4eBGQ/zVIiMjtXfv3hzte/bscc4vbh9//LHatGmjOXPmuLSfPXvW+aGogoiMjNS+fftytOe2n7nJ6/oq6rd89+3b59Lbm5KSoqysLOfvS3YP6dV36bi6Z1Uq2NvRJXXuK1eunOuXSuRWv4eHh9q1a6d27drpzTff1GuvvaY//elPWr9+/Q3/zkVGRmr9+vW6cOGCS+9tfu74cKV9+/apTZs2zunz58/ryJEj6tKli8tyjz/+uGJjY3XkyBEtXLhQXbt2dZ7LolCrVi1J0q5du4r99QilGz23MMb69etz/Q8+e9xX9tuN2f/9X7msw+HQvHnziq226dOnO3+2LEvTp09XhQoV1K5du1yXL1eunHr16qVPPvlEu3btyjH/xIkTzp9PnTrlMs/T01N169aVZVm53nqnsNq3by9PT0/9/e9/dzl2c+bMkcPhUNeuXYvsuXKT23nbsmWLkpKSivR5unTpoq1bt7psNy0tTe+++65q1KhRoLGjhVWuXLkc1/JHH32kX3/9tVDb69KlizZv3qytW7c6206cOKEFCxbkuW5+rq/seyAX1beAZd8yLVv2Nwx27txZkuTn56eqVasqMTHRZbmZM2fm2FZBaiupc1+rVi05HA4lJyc7244cOeJ8yz7b6dOnc6ybfVeQ9PT0G66jY8eOunTpkt577z1nW1ZWVo7jn5d3333X5bVm1qxZunz5svN8Zevfv79sNpueffZZHThwQI8++uiN7cBV7r77bkVFRWnq1Kk5zje9uzcXem5hjNGjR+vChQt66KGHVKdOHWVkZGjTpk3697//rRo1ajjHqnbo0EGenp7q3r27nnrqKZ0/f17vvfeegoODnb27RcnLy0urVq1STEyMoqOj9dlnn2nlypV6+eWXr3lLIumP2wytX79e0dHRGjp0qOrWravTp09rx44dWrNmjfMPX4cOHRQaGqr77rtPISEh+vHHHzV9+nR17dr1urfWKqigoCDFxcVpwoQJ6tSpk3r06KG9e/dq5syZatq0aZH/obpat27dtHjxYj300EPq2rWrDh48qNmzZ6tu3bo6f/58kT3PSy+9pH/961/q3LmznnnmGQUGBur999/XwYMH9cknn5TIFzR069ZNEydO1BNPPKF7771X33//vRYsWJBj7HZ+vfjii86vV3722WedtwKLjIx0CVi5yc/11bhxY0nSn/70J/Xr108VKlRQ9+7dC/3FHwcPHlSPHj3UqVMnJSUl6YMPPtCAAQPUsGFD5zJPPvmkJk2apCeffFJNmjRRYmKi8x2RKxWktpI69/369dO4ceP00EMP6ZlnntGFCxc0a9Ys3X777S4fGJw4caISExPVtWtXRUZG6vjx45o5c6aqVavm8qG3wurZs6eaNWum5557TikpKapTp46WL1/ufG3Jb693RkaG2rVr57wV4syZM9WiRQv16NHDZbmgoCB16tRJH330kQICAor8H2IPDw/NmjVL3bt311133aUnnnhCYWFh2rNnj3bv3q3PP/+8SJ8PpViJ358BKCafffaZNXjwYKtOnTpWpUqVLE9PT+vWW2+1Ro8ebR07dsxl2eXLl1sNGjSwvLy8rBo1alj/93//Z82dOzfHLYMiIyOtrl275ngu5XILrexb8kyZMsXZFhMTY/n4+Fj79++3OnToYFWsWNEKCQmxxo8fn+NWT7rqFkaWZVnHjh2zRo4caUVERFgVKlSwQkNDrXbt2lnvvvuuc5l33nnHatmypVWlShXLbrdbtWrVsl544QXL4XDkecxy24/s2/ts27Yt13WmT59u1alTx6pQoYIVEhJiDR8+PMdtd1q1apXr7aMKcjyvlpWVZb322mtWZGSkZbfbrUaNGlkrVqy45m2V8vvcV9/6yrIsa//+/Vbv3r2tgIAAy8vLy2rWrJm1YsUKl2Wybwf10Ucf5dhm9nnP7bnyc1wuXrxoPffcc1ZYWJjl7e1t3XfffVZSUlKOWvN7KzDLsqzk5GSrVatWlpeXl3XLLbdYf/3rX605c+bkeSuw/F5ff/3rX61bbrnF8vDwcNnm9c7t1dd89q2wfvjhB6t3796Wr6+vVblyZWvUqFHW77//7rLuhQsXrCFDhlj+/v6Wr6+v1bdvX+v48eO5/h5dq7arbwVmWTd27gtyPr744gvrzjvvtDw9Pa3atWtbH3zwQY5bga1du9Z68MEHrfDwcMvT09MKDw+3+vfvb/33v/+97nNe6/q7evuWZVknTpywBgwYYPn6+lr+/v7WoEGDrK+//tqS5HLLutxkv1Zs3LjRGjZsmFW5cmWrUqVK1sCBA61Tp07lus6HH35oSbKGDRuW6/z8vrZa1rXPw1dffWU98MADlq+vr+Xj42M1aNDAmjZt2nX3BWaxWRZ99UBxGTRokD7++OMi7VkEgOK0dOlSPfTQQ/rqq6+cH1zNzfz58/XEE09o27Ztzs825GXZsmXq2bOnEhMTXW7LBxQlxtwCAHCT+v33312mMzMzNW3aNPn5+enuu+8u8ud77733VLNmzSIZVgFcC2NuAQC4SY0ePVq///67mjdvrvT0dC1evFibNm3Sa6+9dsO3cLvSokWLlJycrJUrV+rtt9/mSxVQrAi3AADcpNq2bas33nhDK1as0MWLF3Xrrbdq2rRpLt+qWBT69++vSpUqaciQIRoxYkSRbhu4GmNuAQAAYAzG3AIAAMAYhFsAAAAYgzG3+uMbWX777Tf5+voyyB0AAKAUsixL586dU3h4+HW/VIVwK+m3335TRESEu8sAAABAHg4fPqxq1apdcz7hVnJ+heThw4fl5+fn5moAAABwtdTUVEVEROT51fKEW/3v+7P9/PwItwAAAKVYXkNI+UAZAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwRnl3FwAAuEK8v7sryCne4e4KACDf6LkFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAx3BpuExMT1b17d4WHh8tms2np0qUu8202W66PKVOmOJepUaNGjvmTJk0q4T0BAABAaeDWcJuWlqaGDRtqxowZuc4/cuSIy2Pu3Lmy2Wzq1auXy3ITJ050WW706NElUT4AAABKmfLufPLOnTurc+fO15wfGhrqMr1s2TK1adNGNWvWdGn39fXNsSwAAABuPmVmzO2xY8e0cuVKDRkyJMe8SZMmqUqVKmrUqJGmTJmiy5cvu6FCAAAAuJtbe24L4v3335evr68efvhhl/ZnnnlGd999twIDA7Vp0ybFxcXpyJEjevPNN6+5rfT0dKWnpzunU1NTi61uAAAAlJwyE27nzp2rgQMHysvLy6U9NjbW+XODBg3k6empp556SgkJCbLb7bluKyEhQRMmTCjWegEAAFDyysSwhC+//FJ79+7Vk08+meey0dHRunz5sn766adrLhMXFyeHw+F8HD58uAirBQAAgLuUiZ7bOXPmqHHjxmrYsGGey+7cuVMeHh4KDg6+5jJ2u/2avboAAAAou9wabs+fP6+UlBTn9MGDB7Vz504FBgaqevXqkv4YD/vRRx/pjTfeyLF+UlKStmzZojZt2sjX11dJSUkaO3asHn30UVWuXLnE9gMAAAClg1vD7TfffKM2bdo4p7PHz8bExGj+/PmSpEWLFsmyLPXv3z/H+na7XYsWLVJ8fLzS09MVFRWlsWPHuozDBQAAwM3DZlmW5e4i3C01NVX+/v5yOBzy8/NzdzkAbmbx/u6uIKd4h7srAIB857Uy8YEyAAAAID8ItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxR3t0FAIBbxPu7uwIAQDGg5xYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMZwa7hNTExU9+7dFR4eLpvNpqVLl7rMHzRokGw2m8ujU6dOLsucPn1aAwcOlJ+fnwICAjRkyBCdP3++BPcCAAAApYVbw21aWpoaNmyoGTNmXHOZTp066ciRI87Hv/71L5f5AwcO1O7du7V69WqtWLFCiYmJGjZsWHGXDgAAgFKovDufvHPnzurcufN1l7Hb7QoNDc113o8//qhVq1Zp27ZtatKkiSRp2rRp6tKli15//XWFh4cXec0AAAAovUr9mNsNGzYoODhYtWvX1vDhw3Xq1CnnvKSkJAUEBDiDrSS1b99eHh4e2rJlizvKBQAAgBu5tec2L506ddLDDz+sqKgo7d+/Xy+//LI6d+6spKQklStXTkePHlVwcLDLOuXLl1dgYKCOHj16ze2mp6crPT3dOZ2amlps+wAAAICSU6rDbb9+/Zw/169fXw0aNFCtWrW0YcMGtWvXrtDbTUhI0IQJE4qiRAAAAJQipX5YwpVq1qypqlWrKiUlRZIUGhqq48ePuyxz+fJlnT59+prjdCUpLi5ODofD+Th8+HCx1g0AAICSUabC7S+//KJTp04pLCxMktS8eXOdPXtW27dvdy6zbt06ZWVlKTo6+prbsdvt8vPzc3kAAACg7HPrsITz5887e2El6eDBg9q5c6cCAwMVGBioCRMmqFevXgoNDdX+/fv14osv6tZbb1XHjh0lSXfccYc6deqkoUOHavbs2bp06ZJGjRqlfv36cacEAACAm5Bbe26/+eYbNWrUSI0aNZIkxcbGqlGjRnrllVdUrlw5JScnq0ePHrr99ts1ZMgQNW7cWF9++aXsdrtzGwsWLFCdOnXUrl07denSRS1atNC7777rrl0CAACAG9ksy7LcXYS7paamyt/fXw6HgyEKwM0i3t/dFZQd8Q53VwAA+c5rZWrMLQAAAHA9hFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjlHd3AQCAUi7e390V5C7e4e4KAJRC9NwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYfEMZgOJXWr/hCgBgHHpuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwhlvDbWJiorp3767w8HDZbDYtXbrUOe/SpUsaN26c6tevLx8fH4WHh+vxxx/Xb7/95rKNGjVqyGazuTwmTZpUwnsCAACA0sCt4TYtLU0NGzbUjBkzcsy7cOGCduzYob/85S/asWOHFi9erL1796pHjx45lp04caKOHDnifIwePbokygcAAEAp49ZbgXXu3FmdO3fOdZ6/v79Wr17t0jZ9+nQ1a9ZMhw4dUvXq1Z3tvr6+Cg0NLdZaAQAAUPqVqTG3DodDNptNAQEBLu2TJk1SlSpV1KhRI02ZMkWXL1++7nbS09OVmprq8gAAAEDZV2a+xOHixYsaN26c+vfvLz8/P2f7M888o7vvvluBgYHatGmT4uLidOTIEb355pvX3FZCQoImTJhQEmUDAACgBNksy7LcXYQk2Ww2LVmyRD179swx79KlS+rVq5d++eUXbdiwwSXcXm3u3Ll66qmndP78ednt9lyXSU9PV3p6unM6NTVVERERcjgc1902gELiG8pQHOId7q4AQAlKTU2Vv79/nnmt1PfcXrp0SX379tXPP/+sdevW5Rk+o6OjdfnyZf3000+qXbt2rsvY7fZrBl8AAACUXaU63GYH23379mn9+vWqUqVKnuvs3LlTHh4eCg4OLoEKAQAAUJq4NdyeP39eKSkpzumDBw9q586dCgwMVFhYmHr37q0dO3ZoxYoVyszM1NGjRyVJgYGB8vT0VFJSkrZs2aI2bdrI19dXSUlJGjt2rB599FFVrlzZXbsFAAAAN3HrmNsNGzaoTZs2OdpjYmIUHx+vqKioXNdbv369WrdurR07dmjEiBHas2eP0tPTFRUVpccee0yxsbEFGnaQ3zEcAAqJMbcoDoy5BW4qZWLMbevWrXW9bJ1X7r777ru1efPmoi4LAAAAZVSZus8tAAAAcD2EWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiFCrdt27bV2bNnc7Snpqaqbdu2N1oTAAAAUCiFCrcbNmxQRkZGjvaLFy/qyy+/zPd2EhMT1b17d4WHh8tms2np0qUu8y3L0iuvvKKwsDB5e3urffv22rdvn8syp0+f1sCBA+Xn56eAgAANGTJE58+fL8xuAQAAoIwrULhNTk5WcnKyJOmHH35wTicnJ+vbb7/VnDlzdMstt+R7e2lpaWrYsKFmzJiR6/zJkyfr73//u2bPnq0tW7bIx8dHHTt21MWLF53LDBw4ULt379bq1au1YsUKJSYmatiwYQXZLQAAABjCZlmWld+FPTw8ZLPZJP3Rq3o1b29vTZs2TYMHDy54ITablixZop49ezq3Hx4erueee07PP/+8JMnhcCgkJETz589Xv3799OOPP6pu3bratm2bmjRpIklatWqVunTpol9++UXh4eH5eu7U1FT5+/vL4XDIz8+vwLUDyEO8v7srAEpOvMPdFQBGym9eK1+QjR48eFCWZalmzZraunWrgoKCnPM8PT0VHByscuXKFb7qq57r6NGjat++vbPN399f0dHRSkpKUr9+/ZSUlKSAgABnsJWk9u3by8PDQ1u2bNFDDz2U67bT09OVnp7unE5NTS2SmgEAAOBeBQq3kZGRkqSsrKxiKeZKR48elSSFhIS4tIeEhDjnHT16VMHBwS7zy5cvr8DAQOcyuUlISNCECROKuGIAAAC4W4HC7ZX27dun9evX6/jx4znC7iuvvHLDhRWnuLg4xcbGOqdTU1MVERHhxooAAABQFAoVbt977z0NHz5cVatWVWhoqHMcrvTH2NmiCLehoaGSpGPHjiksLMzZfuzYMd11113OZY4fP+6y3uXLl3X69Gnn+rmx2+2y2+03XCMAAABKl0KF27/97W969dVXNW7cuKKuxykqKkqhoaFau3atM8ympqZqy5YtGj58uCSpefPmOnv2rLZv367GjRtLktatW6esrCxFR0cXW20AAAAonQoVbs+cOaM+ffrc8JOfP39eKSkpzumDBw9q586dCgwMVPXq1TVmzBj97W9/02233aaoqCj95S9/UXh4uPOOCnfccYc6deqkoUOHavbs2bp06ZJGjRqlfv365ftOCQAAADBHob7EoU+fPvriiy9u+Mm/+eYbNWrUSI0aNZIkxcbGqlGjRs5hDS+++KJGjx6tYcOGqWnTpjp//rxWrVolLy8v5zYWLFigOnXqqF27durSpYtatGihd99994ZrAwAAQNlToPvcZktISNCbb76prl27qn79+qpQoYLL/GeeeabICiwJ3OcWKGbc5xY3E+5zCxSL/Oa1QoXbqKioa2/QZtOBAwcKukm3ItwCxYxwi5sJ4RYoFsXyJQ7ZDh48WOjCAAAAgOJSqDG3AAAAQGlUqJ7bwYMHX3f+3LlzC1UMAAAAcCMKfSuwK126dEm7du3S2bNn1bZt2yIpDAAAACioQoXbJUuW5GjLysrS8OHDVatWrRsuCgAAACiMIhtz6+HhodjYWL311ltFtUkAAACgQIr0A2X79+/X5cuXi3KTAAAAQL4ValhCbGysy7RlWTpy5IhWrlypmJiYIikMAAAAKKhChdtvv/3WZdrDw0NBQUF644038ryTAgAAAFBcChVu169fX9R1AAAAADesUOE224kTJ7R3715JUu3atRUUFFQkRQEAAACFUagPlKWlpWnw4MEKCwtTy5Yt1bJlS4WHh2vIkCG6cOFCUdcIAAAA5Euhwm1sbKw2btyoTz/9VGfPntXZs2e1bNkybdy4Uc8991xR1wgAAADkS6GGJXzyySf6+OOP1bp1a2dbly5d5O3trb59+2rWrFlFVR8AAACQb4Xqub1w4YJCQkJytAcHBzMsAQAAAG5TqHDbvHlzjR8/XhcvXnS2/f7775owYYKaN29eZMUBAAAABVGoYQlTp05Vp06dVK1aNTVs2FCS9N1338lut+uLL74o0gIBAACA/CpUuK1fv7727dunBQsWaM+ePZKk/v37a+DAgfL29i7SAgEAAID8KlS4TUhIUEhIiIYOHerSPnfuXJ04cULjxo0rkuIAAACAgijUmNt33nlHderUydFer149zZ49+4aLAgAAAAqjUOH26NGjCgsLy9EeFBSkI0eO3HBRAAAAQGEUKtxGRETo66+/ztH+9ddfKzw8/IaLAgAAAAqjUGNuhw4dqjFjxujSpUtq27atJGnt2rV68cUX+YYyAAAAuE2hwu0LL7ygU6dOacSIEcrIyJAkeXl5ady4cYqLiyvSAgEAAID8slmWZRV25fPnz+vHH3+Ut7e3brvtNtnt9qKsrcSkpqbK399fDodDfn5+7i4HME+8v7srAEpOvMPdFQBGym9eK1TPbbZKlSqpadOmN7IJAAAAoMjcULgFUMrQQwoAuMkV6m4JAAAAQGlEuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYo9SH2xo1ashms+V4jBw5UpLUunXrHPOefvppN1cNAAAAdyjv7gLysm3bNmVmZjqnd+3apQceeEB9+vRxtg0dOlQTJ050TlesWLFEawQAAEDpUOrDbVBQkMv0pEmTVKtWLbVq1crZVrFiRYWGhpZ0aQAAAChlSv2whCtlZGTogw8+0ODBg2Wz2ZztCxYsUNWqVXXnnXcqLi5OFy5cuO520tPTlZqa6vIAAABA2Vfqe26vtHTpUp09e1aDBg1ytg0YMECRkZEKDw9XcnKyxo0bp71792rx4sXX3E5CQoImTJhQAhUDAACgJNksy7LcXUR+dezYUZ6envr000+vucy6devUrl07paSkqFatWrkuk56ervT0dOd0amqqIiIi5HA45OfnV+R1AyUm3t/dFQCId7i7AsBIqamp8vf3zzOvlZme259//llr1qy5bo+sJEVHR0vSdcOt3W6X3W4v8hoBAADgXmVmzO28efMUHBysrl27Xne5nTt3SpLCwsJKoCoAAACUJmWi5zYrK0vz5s1TTEyMypf/X8n79+/XwoUL1aVLF1WpUkXJyckaO3asWrZsqQYNGrixYgAAALhDmQi3a9as0aFDhzR48GCXdk9PT61Zs0ZTp05VWlqaIiIi1KtXL/35z392U6UAAABwpzIRbjt06KDcPvcWERGhjRs3uqEiAAAAlEZlZswtAAAAkBfCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAY5d1dAAAARon3d3cFOcU73F0BUGLouQUAAIAxCLcAAAAwRqkelhAfH68JEya4tNWuXVt79uyRJF28eFHPPfecFi1apPT0dHXs2FEzZ85USEiIO8rFzaY0vvUIAMBNrtT33NarV09HjhxxPr766ivnvLFjx+rTTz/VRx99pI0bN+q3337Tww8/7MZqAQAA4E6luudWksqXL6/Q0NAc7Q6HQ3PmzNHChQvVtm1bSdK8efN0xx13aPPmzbrnnntKulQAAAC4Wanvud23b5/Cw8NVs2ZNDRw4UIcOHZIkbd++XZcuXVL79u2dy9apU0fVq1dXUlKSu8oFAACAG5Xqntvo6GjNnz9ftWvX1pEjRzRhwgTdf//92rVrl44ePSpPT08FBAS4rBMSEqKjR49ed7vp6elKT093TqemphZH+QAAAChhpTrcdu7c2flzgwYNFB0drcjISH344Yfy9vYu9HYTEhJyfFANAAAAZV+pH5ZwpYCAAN1+++1KSUlRaGioMjIydPbsWZdljh07lusY3SvFxcXJ4XA4H4cPHy7GqgEAAFBSylS4PX/+vPbv36+wsDA1btxYFSpU0Nq1a53z9+7dq0OHDql58+bX3Y7dbpefn5/LAwAAAGVfqR6W8Pzzz6t79+6KjIzUb7/9pvHjx6tcuXLq37+//P39NWTIEMXGxiowMFB+fn4aPXq0mjdvzp0SAAAAblKlOtz+8ssv6t+/v06dOqWgoCC1aNFCmzdvVlBQkCTprbfekoeHh3r16uXyJQ4AAAC4Odksy7LcXYS7paamyt/fXw6HgyEKyD++oQxAWRHvcHcFwA3Lb14rU2NuAQAAgOsh3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGOUd3cBAACgmMX7u7uC3MU73F0BDETPLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGINwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBilOtwmJCSoadOm8vX1VXBwsHr27Km9e/e6LNO6dWvZbDaXx9NPP+2migEAAOBOpTrcbty4USNHjtTmzZu1evVqXbp0SR06dFBaWprLckOHDtWRI0ecj8mTJ7upYgAAALhTeXcXcD2rVq1ymZ4/f76Cg4O1fft2tWzZ0tlesWJFhYaGlnR5AAAAKGVKdc/t1RwOhyQpMDDQpX3BggWqWrWq7rzzTsXFxenChQvuKA8AAABuVqp7bq+UlZWlMWPG6L777tOdd97pbB8wYIAiIyMVHh6u5ORkjRs3Tnv37tXixYuvua309HSlp6c7p1NTU4u1dgAAAJSMMhNuR44cqV27dumrr75yaR82bJjz5/r16yssLEzt2rXT/v37VatWrVy3lZCQoAkTJhRrvShC8f7urgAAAJQRZWJYwqhRo7RixQqtX79e1apVu+6y0dHRkqSUlJRrLhMXFyeHw+F8HD58uEjrBQAAgHuU6p5by7I0evRoLVmyRBs2bFBUVFSe6+zcuVOSFBYWds1l7Ha77HZ7UZUJAACAUqJUh9uRI0dq4cKFWrZsmXx9fXX06FFJkr+/v7y9vbV//34tXLhQXbp0UZUqVZScnKyxY8eqZcuWatCggZurBwAAQEkr1eF21qxZkv74ooYrzZs3T4MGDZKnp6fWrFmjqVOnKi0tTREREerVq5f+/Oc/u6FaAAAAuFupDreWZV13fkREhDZu3FhC1QAAAKC0KxMfKAMAAADyg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGKO/uAgAAwE0q3t/dFeQU73B3BbhB9NwCAADAGIRbAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYg3ALAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDEItwAAADAG4RYAAADGKO/uAm5a8f7urgAAAFyttP59jne4u4Iyg55bAAAAGINwCwAAAGMQbgEAAGAMwi0AAACMQbgFAACAMQi3AAAAMAbhFgAAAMYg3AIAAMAYhFsAAAAYw5hwO2PGDNWoUUNeXl6Kjo7W1q1b3V0SAAAASpgR4fbf//63YmNjNX78eO3YsUMNGzZUx44ddfz4cXeXBgAAgBJksyzLcncRNyo6OlpNmzbV9OnTJUlZWVmKiIjQ6NGj9dJLL+W5fmpqqvz9/eVwOOTn51fc5f6htH53NQAAQH7EO0r06fKb18qXYE3FIiMjQ9u3b1dcXJyzzcPDQ+3bt1dSUlKu66Snpys9Pd057XD8cXJSU1OLt1iXIsr8/xQAAOBmVpK5Sf/LaXn1y5b5cHvy5EllZmYqJCTEpT0kJER79uzJdZ2EhARNmDAhR3tERESx1AgAAGCcSe55F/rcuXPy97/2c5f5cFsYcXFxio2NdU5nZWXp9OnTqlKlimw2W6G2mZqaqoiICB0+fLjkhjbcxDjeJYdjXbI43iWL411yONYly8TjbVmWzp07p/Dw8OsuV+bDbdWqVVWuXDkdO3bMpf3YsWMKDQ3NdR273S673e7SFhAQUCT1+Pn5GXMRlQUc75LDsS5ZHO+SxfEuORzrkmXa8b5ej222Mn+3BE9PTzVu3Fhr1651tmVlZWnt2rVq3ry5GysDAABASSvzPbeSFBsbq5iYGDVp0kTNmjXT1KlTlZaWpieeeMLdpQEAAKAEGRFuH3nkEZ04cUKvvPKKjh49qrvuukurVq3K8SGz4mS32zV+/Pgcwx1QPDjeJYdjXbI43iWL411yONYl62Y+3kbc5xYAAACQDBhzCwAAAGQj3AIAAMAYhFsAAAAYg3ALAAAAYxBui0mPHj1UvXp1eXl5KSwsTI899ph+++03d5dlnJ9++klDhgxRVFSUvL29VatWLY0fP14ZGRnuLs1Yr776qu69915VrFixyL78BP8zY8YM1ahRQ15eXoqOjtbWrVvdXZKREhMT1b17d4WHh8tms2np0qXuLslYCQkJatq0qXx9fRUcHKyePXtq79697i7LWLNmzVKDBg2cX97QvHlzffbZZ+4uq0QRbotJmzZt9OGHH2rv3r365JNPtH//fvXu3dvdZRlnz549ysrK0jvvvKPdu3frrbfe0uzZs/Xyyy+7uzRjZWRkqE+fPho+fLi7SzHOv//9b8XGxmr8+PHasWOHGjZsqI4dO+r48ePuLs04aWlpatiwoWbMmOHuUoy3ceNGjRw5Ups3b9bq1at16dIldejQQWlpae4uzUjVqlXTpEmTtH37dn3zzTdq27atHnzwQe3evdvdpZUYbgVWQpYvX66ePXsqPT1dFSpUcHc5RpsyZYpmzZqlAwcOuLsUo82fP19jxozR2bNn3V2KMaKjo9W0aVNNnz5d0h/fthgREaHRo0frpZdecnN15rLZbFqyZIl69uzp7lJuCidOnFBwcLA2btyoli1burucm0JgYKCmTJmiIUOGuLuUEkHPbQk4ffq0FixYoHvvvZdgWwIcDocCAwPdXQZQIBkZGdq+fbvat2/vbPPw8FD79u2VlJTkxsqAouVwOCSJ1+kSkJmZqUWLFiktLU3Nmzd3dzklhnBbjMaNGycfHx9VqVJFhw4d0rJly9xdkvFSUlI0bdo0PfXUU+4uBSiQkydPKjMzM8c3K4aEhOjo0aNuqgooWllZWRozZozuu+8+3Xnnne4ux1jff/+9KlWqJLvdrqefflpLlixR3bp13V1WiSHcFsBLL70km8123ceePXucy7/wwgv69ttv9cUXX6hcuXJ6/PHHxSiQ/CnosZakX3/9VZ06dVKfPn00dOhQN1VeNhXmeANAQY0cOVK7du3SokWL3F2K0WrXrq2dO3dqy5YtGj58uGJiYvTDDz+4u6wSw5jbAjhx4oROnTp13WVq1qwpT0/PHO2//PKLIiIitGnTppvqrYHCKuix/u2339S6dWvdc889mj9/vjw8+L+tIApzbTPmtmhlZGSoYsWK+vjjj13GfsbExOjs2bO881OMGHNbMkaNGqVly5YpMTFRUVFR7i7nptK+fXvVqlVL77zzjrtLKRHl3V1AWRIUFKSgoKBCrZuVlSVJSk9PL8qSjFWQY/3rr7+qTZs2aty4sebNm0ewLYQbubZRNDw9PdW4cWOtXbvWGbKysrK0du1ajRo1yr3FATfAsiyNHj1aS5Ys0YYNGwi2bpCVlXVT5Q/CbTHYsmWLtm3bphYtWqhy5crav3+//vKXv6hWrVr02haxX3/9Va1bt1ZkZKRef/11nThxwjkvNDTUjZWZ69ChQzp9+rQOHTqkzMxM7dy5U5J06623qlKlSu4troyLjY1VTEyMmjRpombNmmnq1KlKS0vTE0884e7SjHP+/HmlpKQ4pw8ePKidO3cqMDBQ1atXd2Nl5hk5cqQWLlyoZcuWydfX1zmG3N/fX97e3m6uzjxxcXHq3LmzqlevrnPnzmnhwoXasGGDPv/8c3eXVnIsFLnk5GSrTZs2VmBgoGW3260aNWpYTz/9tPXLL7+4uzTjzJs3z5KU6wPFIyYmJtfjvX79eneXZoRp06ZZ1atXtzw9Pa1mzZpZmzdvdndJRlq/fn2u13FMTIy7SzPOtV6j582b5+7SjDR48GArMjLS8vT0tIKCgqx27dpZX3zxhbvLKlGMuQUAAIAxGJwIAAAAYxBuAQAAYAzCLQAAAIxBuAUAAIAxCLcAAAAwBuEWAAAAxiDcAgAAwBiEWwAAABiDcAsAAABjEG4BAABgDMItAAAAjEG4BQAAgDH+P7+Lub6X1atYAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -1143,18 +1140,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "normal_rv{0, (0, 0), floatX, False}.1 [id A] 'z'\n", - " |RandomGeneratorSharedVariable() [id B]\n", - " |TensorConstant{[]} [id C]\n", - " |TensorConstant{11} [id D]\n", - " |TensorConstant{(2,) of 0} [id E]\n", - " |TensorConstant{[1. 2.]} [id F]\n" + "normal_rv{\"(),()->()\"}.1 [id A] 'z'\n", + " ├─ RNG() [id B]\n", + " ├─ NoneConst{None} [id C]\n", + " ├─ [0 0] [id D]\n", + " └─ [1 2] [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 31, @@ -1185,7 +1181,7 @@ { "data": { "text/plain": [ - "[z ~ N(, )]" + "[z ~ Normal(, )]" ] }, "execution_count": 32, @@ -1206,18 +1202,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "normal_rv{0, (0, 0), floatX, False}.1 [id A] 'z'\n", - " |RandomGeneratorSharedVariable() [id B]\n", - " |TensorConstant{[]} [id C]\n", - " |TensorConstant{11} [id D]\n", - " |TensorConstant{(2,) of 0} [id E]\n", - " |TensorConstant{[1. 2.]} [id F]\n" + "normal_rv{\"(),()->()\"}.1 [id A] 'z'\n", + " ├─ RNG() [id B]\n", + " ├─ NoneConst{None} [id C]\n", + " ├─ [0 0] [id D]\n", + " └─ [1 2] [id E]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 33, @@ -1246,16 +1241,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sample 0: [-0.30775592 1.21469108]\n", - "Sample 1: [-0.30775592 1.21469108]\n", - "Sample 2: [-0.30775592 1.21469108]\n", - "Sample 3: [-0.30775592 1.21469108]\n", - "Sample 4: [-0.30775592 1.21469108]\n", - "Sample 5: [-0.30775592 1.21469108]\n", - "Sample 6: [-0.30775592 1.21469108]\n", - "Sample 7: [-0.30775592 1.21469108]\n", - "Sample 8: [-0.30775592 1.21469108]\n", - "Sample 9: [-0.30775592 1.21469108]\n" + "Sample 0: [-0.52267608 0.39548652]\n", + "Sample 1: [-0.52267608 0.39548652]\n", + "Sample 2: [-0.52267608 0.39548652]\n", + "Sample 3: [-0.52267608 0.39548652]\n", + "Sample 4: [-0.52267608 0.39548652]\n", + "Sample 5: [-0.52267608 0.39548652]\n", + "Sample 6: [-0.52267608 0.39548652]\n", + "Sample 7: [-0.52267608 0.39548652]\n", + "Sample 8: [-0.52267608 0.39548652]\n", + "Sample 9: [-0.52267608 0.39548652]\n" ] } ], @@ -1281,16 +1276,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sample 0: [-1.2390824 0.3744465]\n", - "Sample 1: [0.76748461 0.95086347]\n", - "Sample 2: [-1.11985098 -1.94744586]\n", - "Sample 3: [-0.62003335 0.10075427]\n", - "Sample 4: [-0.75744869 0.69140323]\n", - "Sample 5: [-0.95472672 -1.0814984 ]\n", - "Sample 6: [-0.81052179 -2.05414581]\n", - "Sample 7: [0.37456894 1.76040717]\n", - "Sample 8: [-0.61006854 -0.05034957]\n", - "Sample 9: [1.19039658 1.10460999]\n" + "Sample 0: [-2.58733016 -3.81858679]\n", + "Sample 1: [-0.69680903 -2.28542543]\n", + "Sample 2: [-1.18813193 -0.27770537]\n", + "Sample 3: [-0.77909163 -2.80947154]\n", + "Sample 4: [ 0.69670727 -0.96752983]\n", + "Sample 5: [-0.45281936 -0.92540881]\n", + "Sample 6: [-0.8067077 3.32004609]\n", + "Sample 7: [-0.0540222 0.4704397]\n", + "Sample 8: [0.71531685 2.08470485]\n", + "Sample 9: [-0.49200616 -2.02573444]\n" ] } ], @@ -1306,7 +1301,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAp4AAAKqCAYAAACTnV4oAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA4iElEQVR4nO3deZhcZZ03/F91d7rT2ZoQOmxpsoCiCQjzJCGyDCaQYXlZnxnQR0FDZOJCwqqOCTMaUDE4cDlKkIC8CswrDKCIMIMsARM3tkCEETHBCJEYhCQEOntvdd4/fOixTQgJcN/VaT6f6zpXUqdP3b+7zqk69a27Tp1TKoqiCAAASKyq0h0AAOCdQfAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAE3nFKpVJcdNFFle7GVp1xxhkxbNiwSncD4G0leAJvyq9//es45ZRTYujQodG7d+/Yc8894+/+7u9i9uzZle5at7N06dIolUpx+eWXb/HvF110UZRKpVi1atVbqvP000/HRRddFEuXLn1L7QCkIngC2+3BBx+MMWPGxJNPPhlTpkyJK6+8Mv7xH/8xqqqq4pvf/Galu9cjXHvttbF48eLtus/TTz8dF198seAJdFs1le4AsOO55JJLoqGhIRYsWBA77bRTl7+tWLGiMp3qYXr16lXpLmy3TZs2RW1tbVRVGdMAtszeAdhuv//972PUqFGbhc6IiMGDB3e5fd1118URRxwRgwcPjrq6uhg5cmTMmTNns/sNGzYsjj/++Jg/f36MGTMm6uvrY//994/58+dHRMQPf/jD2H///aN3794xevTo+NWvftXl/meccUb069cvnn322Tj66KOjb9++sccee8SXvvSlKIriDR/T8uXL4+Mf/3jsuuuuUVdXF6NGjYrvfve7my03e/bsGDVqVPTp0ycGDhwYY8aMiZtuuukN299eWzrG8+abb47Ro0dH//79Y8CAAbH//vt3jjBff/31ceqpp0ZExIQJE6JUKkWpVOpcfxERV111VYwaNSrq6upijz32iKlTp8arr766We1vfetbMWLEiKivr4+DDjoofv7zn8f48eNj/PjxncvMnz8/SqVS3HzzzfEv//Ivseeee0afPn1izZo1sXr16vjsZz8b+++/f/Tr1y8GDBgQxx57bDz55JNd6rzWxq233hoXX3xx7LnnntG/f/845ZRTorm5OVpaWuK8886LwYMHR79+/WLy5MnR0tLytqxfoDKMeALbbejQofHQQw/FU089Ffvtt99Wl50zZ06MGjUqTjzxxKipqYn//M//jLPOOivK5XJMnTq1y7JLliyJj3zkI/HJT34yTj/99Lj88svjhBNOiKuvvjouvPDCOOussyIiYtasWfHBD34wFi9e3GV0raOjI4455ph4//vfH//6r/8a99xzT8ycOTPa29vjS1/60uv28aWXXor3v//9USqVYtq0adHY2Bh33313nHnmmbFmzZo477zzIuLPX3+fc845ccopp8S5554bmzZtiv/+7/+ORx55JD7ykY+84XrbsGHDFo/j3LBhwxved+7cufHhD384jjzyyPja174WERG//e1v45e//GWce+65cfjhh8c555wTV1xxRVx44YXx3ve+NyKi89+LLrooLr744pg4cWJ8+tOfjsWLF8ecOXNiwYIF8ctf/rJzhHXOnDkxbdq0+Nu//ds4//zzY+nSpXHyySfHwIEDY8iQIZv168tf/nLU1tbGZz/72WhpaYna2tp4+umn40c/+lGceuqpMXz48HjppZfimmuuiQ984APx9NNPxx577NGljVmzZkV9fX1Mnz49lixZErNnz45evXpFVVVVvPLKK3HRRRfFww8/HNdff30MHz48vvjFL77h+gK6qQJgO913331FdXV1UV1dXRx88MHFP/3TPxX33ntv0drautmyGzZs2Gze0UcfXYwYMaLLvKFDhxYRUTz44IOd8+69994iIor6+vriD3/4Q+f8a665poiIYt68eZ3zJk2aVEREcfbZZ3fOK5fLxXHHHVfU1tYWK1eu7JwfEcXMmTM7b5955pnF7rvvXqxatapLn/7P//k/RUNDQ+djOOmkk4pRo0a9wdrZ3HPPPVdExBtOf9nHSZMmFUOHDu28fe655xYDBgwo2tvbX7fO97///c3WS1EUxYoVK4ra2triqKOOKjo6OjrnX3nllUVEFN/97neLoiiKlpaWYtCgQcXYsWOLtra2zuWuv/76IiKKD3zgA53z5s2bV0REMWLEiM228aZNm7rUeW0d1NXVFV/60pc2a2O//fbr8tz58Ic/XJRKpeLYY4/t0sbBBx/cZZ0AOx5ftQPb7e/+7u/ioYceihNPPDGefPLJ+Nd//dc4+uijY88994w777yzy7L19fWd/29ubo5Vq1bFBz7wgXj22Wejubm5y7IjR46Mgw8+uPP2uHHjIiLiiCOOiL322muz+c8+++xmfZs2bVrn/18bwWxtbY37779/i4+lKIq47bbb4oQTToiiKGLVqlWd09FHHx3Nzc2xcOHCiIjYaaed4o9//GMsWLBgm9bTX/vEJz4Rc+fO3Wz66Ec/+ob33WmnnWL9+vUxd+7c7a57//33R2tra5x33nldRoinTJkSAwYMiLvuuisiIh577LF4+eWXY8qUKVFT8z9fiJ122mkxcODALbY9adKkLts4IqKurq6zTkdHR7z88svRr1+/2HfffTvX5V/62Mc+1uWY1nHjxkVRFPHxj3+8y3Ljxo2LZcuWRXt7+3auAaC78FU78KaMHTs2fvjDH0Zra2s8+eSTcfvtt8e//du/xSmnnBJPPPFEjBw5MiIifvnLX8bMmTPjoYce2uwr5ebm5mhoaOi8/ZfhMiI6/9bU1LTF+a+88kqX+VVVVTFixIgu89797ndHRLzuL71XrlwZr776anz729+Ob3/721tc5rUfTH3+85+P+++/Pw466KDYZ5994qijjoqPfOQjceihh27xfn/tXe96V0ycOHGz+b/4xS/e8L5nnXVW3HrrrXHsscfGnnvuGUcddVR88IMfjGOOOeYN7/uHP/whIiL23XffLvNra2tjxIgRnX9/7d999tmny3I1NTWve07R4cOHbzavXC7HN7/5zbjqqqviueeei46Ojs6/DRo0aLPlt2e7l8vlaG5u3mI7QPdnxBN4S2pra2Ps2LHx1a9+NebMmRNtbW3x/e9/PyL+/COkI488MlatWhVf//rX46677oq5c+fG+eefHxF/Dih/qbq6eos1Xm9+sQ0/Gnojr/Xh9NNP3+Jo5Ny5czuD5Xvf+95YvHhx3HzzzXHYYYfFbbfdFocddljMnDnzLffjjQwePDieeOKJuPPOO+PEE0+MefPmxbHHHhuTJk1KXntr/nq0MyLiq1/9alxwwQVx+OGHx/e+97249957Y+7cuTFq1KjNtnlEZbY7UBlGPIG3zZgxYyIi4k9/+lNERPznf/5ntLS0xJ133tllVGvevHlJ6pfL5Xj22Wc7RzkjIp555pmIiNcdsWtsbIz+/ftHR0fHFkcj/1rfvn3jQx/6UHzoQx+K1tbW+Pu///u45JJLYsaMGdG7d++35XG8ntra2jjhhBPihBNOiHK5HGeddVZcc8018YUvfCH22WefKJVKW7zf0KFDIyJi8eLFXUaEW1tb47nnnut83K8tt2TJkpgwYULncu3t7bF06dJ43/vet039/MEPfhATJkyI73znO13mv/rqq7HLLrts+wMGehwjnsB2mzdv3hZHnX784x9HxP98pfvaiNVfLtvc3BzXXXddsr5deeWVnf8viiKuvPLK6NWrVxx55JFbXL66ujr+4R/+IW677bZ46qmnNvv7ypUrO///8ssvd/lbbW1tjBw5MoqiiLa2trfpEWzZX9euqqrqDIKvnWKob9++ERGbnSJp4sSJUVtbG1dccUWXbfGd73wnmpub47jjjouIP39wGDRoUFx77bVdjqO88cYbNzusYWuqq6s3e358//vfj+XLl29zG0DPZMQT2G5nn312bNiwIf73//7f8Z73vCdaW1vjwQcfjFtuuSWGDRsWkydPjoiIo446qnOU7pOf/GSsW7curr322hg8eHDnqOjbqXfv3nHPPffEpEmTYty4cXH33XfHXXfdFRdeeGE0Nja+7v0uvfTSmDdvXowbNy6mTJkSI0eOjNWrV8fChQvj/vvvj9WrV3c+nt122y0OPfTQ2HXXXeO3v/1tXHnllXHcccdF//793/bH85f+8R//MVavXh1HHHFEDBkyJP7whz/E7Nmz48ADD+w8ZdKBBx4Y1dXV8bWvfS2am5ujrq6u8xyqM2bMiIsvvjiOOeaYOPHEE2Px4sVx1VVXxdixY+P000+PiD8H6YsuuijOPvvsOOKII+KDH/xgLF26NK6//vrYe++9X3dE9a8df/zx8aUvfSkmT54chxxySPz617+OG2+8cbPjb4F3HiOewHa7/PLLY8KECfHjH/84Lrjggrjgggvi0UcfjbPOOiseeeSRzhPL77vvvvGDH/wgSqVSfPazn42rr746PvGJT8S5556bpF/V1dVxzz33xIsvvhif+9znYsGCBTFz5sz48pe/vNX77brrrvHoo4/G5MmT44c//GFMmzYtvvnNb8bq1as7z5kZEZ3h+etf/3pMnTo1fvSjH8U555wT3/ve95I8nr90+umnR+/eveOqq66Ks846K2644Yb40Ic+FHfffXfnL8h32223uPrqq2PFihVx5plnxoc//OF4+umnI+LP5/G88sor4/nnn4/zzz8/br311vjEJz4R9913X5dflE+bNi2uuOKKeP755+Ozn/1s/PznP48777wzdtppp20+lODCCy+Mz3zmM3HvvffGueeeGwsXLoy77rprsx8LAe88pcJR2kAPcMYZZ8QPfvCDWLduXaW70uOUy+VobGyMv//7v49rr7220t0BdmBGPAHotGnTps2Oz/z3f//3WL16dZdLZgK8GY7xBKDTww8/HOeff36ceuqpMWjQoFi4cGF85zvfif3226/zWvAAb5bgCUCnYcOGRVNTU1xxxRWxevXq2HnnneNjH/tYXHrppVFbW1vp7gE7OMd4AgCQhWM8AQDIQvAEACCLbn2MZ7lcjhdeeCH69++/zScuBgAgn6IoYu3atbHHHnt0nlf49XTr4PnCCy844TAAwA5g2bJlMWTIkK0u062D52uXoDss/p+oiV5vsDQAb1opw5FXRTl9DSC79miLX8SPt+nSwd06eL729XpN9IqakuAJkEyO4BmCJ/RI//f8SNtyWKQfFwEAkIXgCQBAFoInAABZCJ4AAGSRNHh2dHTEF77whRg+fHjU19fH3nvvHV/+8pfDVToBAN55kv6q/Wtf+1rMmTMnbrjhhhg1alQ89thjMXny5GhoaIhzzjknZWkAALqZpMHzwQcfjJNOOimOO+64iIgYNmxY/Md//Ec8+uijKcsCANANJf2q/ZBDDokHHnggnnnmmYiIePLJJ+MXv/hFHHvssSnLAgDQDSUd8Zw+fXqsWbMm3vOe90R1dXV0dHTEJZdcEqeddtoWl29paYmWlpbO22vWrEnZPQAAMko64nnrrbfGjTfeGDfddFMsXLgwbrjhhrj88svjhhtu2OLys2bNioaGhs7JddoBAHqOUpHwJ+ZNTU0xffr0mDp1aue8r3zlK/G9730vFi1atNnyWxrxbGpqivFxkktmAqTkWu3Am9RetMX8uCOam5tjwIABW1026VftGzZsiKqqrjuz6urqKJe3vPOpq6uLurq6lF0CAKBCkgbPE044IS655JLYa6+9YtSoUfGrX/0qvv71r8fHP/7xlGUBAOiGkgbP2bNnxxe+8IU466yzYsWKFbHHHnvEJz/5yfjiF7+YsiwAAN1Q0mM836o1a9ZEQ0ODYzwBUnOMJ/Ambc8xnq7VDgBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGSR9ATyAOwgnGOze0l9XlXbmwox4gkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJBFTaU7ALmVanolr1G0tyWvEaXEnxuLctr2I9I/BrZdhu2d5bXX0ZG8RpbXRo4aUAH2+gAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWNZXuAORWtLelL1JK/5muVF2duELq9vNsi1JNr+Q1cig6OtIWyPCcBbCnAQAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALJIHjyXL18ep59+egwaNCjq6+tj//33j8ceeyx1WQAAupmkJ5B/5ZVX4tBDD40JEybE3XffHY2NjfG73/0uBg4cmLIsAADdUNLg+bWvfS2ampriuuuu65w3fPjwlCUBAOimkn7Vfuedd8aYMWPi1FNPjcGDB8ff/M3fxLXXXvu6y7e0tMSaNWu6TAAA9AxJg+ezzz4bc+bMiXe9611x7733xqc//ek455xz4oYbbtji8rNmzYqGhobOqampKWX3AADIqFQURZGq8dra2hgzZkw8+OCDnfPOOeecWLBgQTz00EObLd/S0hItLS2dt9esWRNNTU0xPk6KmlKvVN2Et18p/QkjStXVyWukVrS3Ja9RqukZ+46io6PSXXjLcjxns6ynopy+BuxA2ou2mB93RHNzcwwYMGCryyZ9d9x9991j5MiRXea9973vjeeff36Ly9fV1cWAAQO6TAAA9AxJg+ehhx4aixcv7jLvmWeeiaFDh6YsCwBAN5Q0eJ5//vnx8MMPx1e/+tVYsmRJ3HTTTfHtb387pk6dmrIsAADdUNLgOXbs2Lj99tvjP/7jP2K//faLL3/5y/GNb3wjTjvttJRlAQDohpKexzMi4vjjj4/jjz8+dRkAALo512oHACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCySn06JHiTD9cezyHCd5RzXpC5VldK2X1ubtP2IiI71O/71xyMiqvr1TV4j9XOqvHZt0vYjIkp9+iSvUWzYkLxGltfGunXJa0Al9JAkAQBAdyd4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWNZXuwDtGKUPGL8pJm6/qlf7pUqqtTV6jaG1NXqPc1p68RlV936Tt59gW1TU9YxdUqu+dvkhV2n1IVe+6pO1HRBRr1qavUS6S14iOjuQlqhK//nLso0rV1clrFBm2Rer31oiIUk2vpO0X7W1J298eRjwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyqKl0B94xinLyEtX9+iVtv2htTdp+RER5U0vyGlX1vZPXqGncJXmNKCd+TvXtk7b9iIhe6XdB7YPSP45eL61NXiM2bEzbfnX6cYjSgP7Ja1QPbEheo/ziiuQ1inKRukDa9iOiVJX+9V20p38cORTtbWkLlFK/vqsitvEpa8QTAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyCJb8Lz00kujVCrFeeedl6skAADdSJbguWDBgrjmmmvife97X45yAAB0Q8mD57p16+K0006La6+9NgYOHJi6HAAA3VTy4Dl16tQ47rjjYuLEiW+4bEtLS6xZs6bLBABAz5D0Qqo333xzLFy4MBYsWLBNy8+aNSsuvvjilF0CAKBCko14Llu2LM4999y48cYbo3fv3tt0nxkzZkRzc3PntGzZslTdAwAgs2Qjno8//nisWLEi/tf/+l+d8zo6OuJnP/tZXHnlldHS0hLV1dVd7lNXVxd1dXWpugQAQAUlC55HHnlk/PrXv+4yb/LkyfGe97wnPv/5z28WOgEA6NmSBc/+/fvHfvvt12Ve3759Y9CgQZvNBwCg53PlIgAAskj6q/a/Nn/+/JzlAADoRox4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWWU+n9KaVqv487cCqeqe/FGjH+g1J26/u2ydp+xERpVIpfY1+fZPXiPreyUuU+yauUZ1+W5Rr0++CNuyefltU7ZL+9d1rXUfS9osM27v2lU3Ja1Rtak9fY2P6x5Faed365DWKtvTbIkc2KGW40mLRkfb1HUW527S/Y6c5AAB2GIInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGRRU+kObJOiHBHldO2X0ufvoq09eY2eoFRbm7xGsdOA5DXK/euS12hprE/a/sadq5O2HxHRUVtKXmPj4OQlsui9KvH2SL8pohjeK3mNnX+7KXmNmlL6J1XV+rSPo5ThPalUnX4fUrS2Jq9RzlAjueQ5pyqi2OYlAQAgPcETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyKKm0h14pyj1Sr+qq3rXpS1QKqVtPyKiKn2Njob65DXWDUtfY+POaT83tvdJ2nxERGzYs0heozywNXmN2j5tyWtsqulI2n7r4oak7UdE1K9MXiJaBvZKXqPcK/2YTV1H2u1d2q0xafsREeVn/5C8Rqku8fteRERbe/oaqRXlbtO+EU8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgi6TBc9asWTF27Njo379/DB48OE4++eRYvHhxypIAAHRTSYPnT3/605g6dWo8/PDDMXfu3Ghra4ujjjoq1q9fn7IsAADdUNLL6dxzzz1dbl9//fUxePDgePzxx+Pwww9PWRoAgG4m6zGezc3NERGx88475ywLAEA3kO1a7eVyOc4777w49NBDY7/99tviMi0tLdHS0tJ5e82aNbm6BwBAYtlGPKdOnRpPPfVU3Hzzza+7zKxZs6KhoaFzampqytU9AAASyxI8p02bFv/1X/8V8+bNiyFDhrzucjNmzIjm5ubOadmyZTm6BwBABkm/ai+KIs4+++y4/fbbY/78+TF8+PCtLl9XVxd1dXUpuwQAQIUkDZ5Tp06Nm266Ke64447o379/vPjiixER0dDQEPX19SlLAwDQzST9qn3OnDnR3Nwc48ePj913371zuuWWW1KWBQCgG0r+VTsAAES4VjsAAJkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZJH0dEo7ilJ1dfIa5U0tyWvUDN4lbYHe6a8qVR40IHmNtoba5DWqN6U/ldi6prTtFzUZToc2eFPyEjsN2Ji8Rmtb+n3IGe9+JGn7V714ZNL2IyJaMqynDYPT16jeKf2YTc263mnbX9GctP2IiFKOKxFmeP/OkRGK9rbkNboLI54AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFjWV7kB3UHR0JK9R1Sv9qi42bEzafqkq/eeUqrWbktco79k3eY1X9q1OXiNKaZtvOvCFtAUi4tVN9clrnD7i0eQ1epXS70Nayr2Stv/RQ3+ZtP2IiNt+f0DyGuuLhuQ1Gn6fvESUyuW0BWrTPp8iIkq1tclrFC0tyWuUqhLvbCOiSF6h+zDiCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkEVNpTuwTUpVf55SNV9dnaztzhq1telr9K5LW6Bf37TtR0THwD7pa9T2jM9b7QPbk7b/wisNSduPiFh02P+XvEY5yslrPNe+IXmNteW0u+v/d9XhSduPiOhdm/Y5GxHxar8ieY1SR/IS0d437XtG9YuvJG0/IiKqSslLdKxP/9qr6pUhKiXMOHlURWzjS29Hf6QAAOwgBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyCJL8PzWt74Vw4YNi969e8e4cePi0UcfzVEWAIBuJHnwvOWWW+KCCy6ImTNnxsKFC+OAAw6Io48+OlasWJG6NAAA3Ujy4Pn1r389pkyZEpMnT46RI0fG1VdfHX369Invfve7qUsDANCNJA2era2t8fjjj8fEiRP/p2BVVUycODEeeuihlKUBAOhmkl6AdNWqVdHR0RG77rprl/m77rprLFq0aLPlW1paoqWlpfP2mjVrUnYPAICMutWv2mfNmhUNDQ2dU1NTU6W7BADA2yRp8Nxll12iuro6XnrppS7zX3rppdhtt902W37GjBnR3NzcOS1btixl9wAAyChp8KytrY3Ro0fHAw880DmvXC7HAw88EAcffPBmy9fV1cWAAQO6TAAA9AxJj/GMiLjgggti0qRJMWbMmDjooIPiG9/4Rqxfvz4mT56cujQAAN1I8uD5oQ99KFauXBlf/OIX48UXX4wDDzww7rnnns1+cAQAQM+WPHhGREybNi2mTZuWoxQAAN1Ut/pVOwAAPZfgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFllOp/SWFeWIKKdrviNZ0/9To7U1eY1SXV3aAhkeQ9Wm2uQ1ajaley511tiQ4TNduZS+RmKf+OMhyWscMmBJ8hrPtTQmr/Hshl2Stl9TSv+6WJ/h9Z1jOKW6tUheo6hO/PquzbAtWjK8Z/RO/L4XEeWNG5PXSK7UfcYZu09PAADo0QRPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMiiptId2Balml5RKvVK1n7R0ZGs7ZyK1ta0BV5uSdt+RJR6pdvOr2mvG5C8Rjn9w4g+z1cnbX9DfW3S9iMifvH8iOQ12oakXU8REWvaeievsXhlY9L2e9e2J20/IiJ+0z95ieoM72q9NqR/z6jZkHZ7lPumf85Wrd+QvEYOVbXp94Xltgyvv27CiCcAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZCF4AgCQheAJAEAWNZXuwLYo2tuiKKVrv6q2Nl3j/1e5rT15jarq6rTtNw5K2n5ERNSkfQwREX2f35C8Ru2auuQ1Xnl32udt7YL0r4tX35N+F/TTNe9OXqOqtiN5jY6NaddVyyvpt0XDH5OXiPqX02+L2ldak9eofiXxfupPK9K2HxFFe/r3vfLGjclrRCnDGF1RTl8jpe3ovxFPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIItkwXPp0qVx5plnxvDhw6O+vj723nvvmDlzZrS2pj//GQAA3U+yMwYvWrQoyuVyXHPNNbHPPvvEU089FVOmTIn169fH5ZdfnqosAADdVLLgecwxx8QxxxzTeXvEiBGxePHimDNnjuAJAPAOlPUYz+bm5th5551zlgQAoJvIdq32JUuWxOzZs7c62tnS0hItLS2dt9esWZOjawAAZLDdI57Tp0+PUqm01WnRokVd7rN8+fI45phj4tRTT40pU6a8btuzZs2KhoaGzqmpqWn7HxEAAN1SqSiKYnvusHLlynj55Ze3usyIESOitrY2IiJeeOGFGD9+fLz//e+P66+/PqqqXj/rbmnEs6mpKcbHSVFT6rU93dwuVf+3rymV29qT16jqXZe2/cZBSduPiIg+9clLdPTrnbxG205pt0VExCvvTvy8LaVtPiLi1feU0xfpm+G1V9uRvEbHxrRfUNW8kv4LsIbfpX9S1b+cflv0ebHljRd6i6pf2ZC2wJ9WpG0/IqI9/WuvY9265DWilOGoxCLDvjCh9qIt5scd0dzcHAMGDNjqstu9p2lsbIzGxsZtWnb58uUxYcKEGD16dFx33XVbDZ0REXV1dVFXl/4NGwCA/JJ9xF2+fHmMHz8+hg4dGpdffnmsXLmy82+77bZbqrIAAHRTyYLn3LlzY8mSJbFkyZIYMmRIl79t57f7AAD0AMkOXDjjjDOiKIotTgAAvPO4VjsAAFkIngAAZCF4AgCQheAJAEAWgicAAFkIngAAZJH+Gmlvh1JV0ktWlVtbk7XdkxRr0l+arJTh8qXVq9emr7FuU/IaA6MhafsvjU2/LRqeSf/Zt6MuwyVx013Rt1Ov9WnbH7A0/eUN1w5J/5bTe1X6/XnV+vQ1Sqkvs9wr/bboWJf4ScsOyYgnAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkEVNpTuwTYpyRJQr3Yu3ppQ+45c3bkzaflVHR9L2IyJiVfoSRUtL8hpVQ4ckr1G9qT1p+7s+lrT5iIjo1Zx+W7Q11CWv0dq/OnmNcm0pafu1r7YlbT8iYuDG9PvxXqs3JK9R2pR+XcXadWnbLxdp24+IUq/0EaPI8b5U7OD5o5sx4gkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJCF4AkAQBaCJwAAWQieAABkIXgCAJBFTaU7sE1KVX+eUinK6drOWSOxolwkr1FuXpO8RlXvuuQ14pXm5CV6bdiYtv0+9Unbj4goatPvgmpbOpLXqFlfm7xGkXiYoNeL6V97sXFT+hrlDPvaXumft8X6DWnb70j/uogcNdjhGPEEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIIkvwbGlpiQMPPDBKpVI88cQTOUoCANDNZAme//RP/xR77LFHjlIAAHRTyYPn3XffHffdd19cfvnlqUsBANCNJb3g7EsvvRRTpkyJH/3oR9GnT583XL6lpSVaWlo6b69Zk+HawQAAZJFsxLMoijjjjDPiU5/6VIwZM2ab7jNr1qxoaGjonJqamlJ1DwCAzLY7eE6fPj1KpdJWp0WLFsXs2bNj7dq1MWPGjG1ue8aMGdHc3Nw5LVu2bHu7BwBAN7XdX7V/5jOfiTPOOGOry4wYMSJ+8pOfxEMPPRR1dXVd/jZmzJg47bTT4oYbbtjsfnV1dZstDwBAz7DdwbOxsTEaGxvfcLkrrrgivvKVr3TefuGFF+Loo4+OW265JcaNG7e9ZQEA2MEl+3HRXnvt1eV2v379IiJi7733jiFDhqQqCwBAN+XKRQAAZJH0dEp/adiwYVEURa5yAAB0M0Y8AQDIQvAEACALwRMAgCwETwAAssj246J3vFKGjF+U0zbf3pa0/YjIsp6KtvbkNTpefiV5jeo9d0tbYOXqtO1HRKmqlL5Gfe/kNar+uDF5jaK1NW37GX78mfoxRESU+vRJXqN4eUPyGlFdnbT58sb0z9me8L4XET3ncXQTRjwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyEDwBAMhC8AQAIAvBEwCALARPAACyqKl0B7ZJUY6IcqV78dYUO3j/e5BSVanSXXhblP/0UtL2i3KRtP2IiFKvDLugdeuTlyjV7Bi70neCcobtXbS3Ja9Rqum1Q7cfEVF0dCSvkYX377eVEU8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsBE8AALIQPAEAyELwBAAgC8ETAIAsairdAXYgpZ7xOaXc2pq+SI51VZTTtp/hMRQdHclrVPWuS16jvHFT8hqlXml310Vbe9L2I/Js7+Svi4g8r432trQFesI+ih1Sz0gSAAB0e4InAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkEXS4HnXXXfFuHHjor6+PgYOHBgnn3xyynIAAHRjyS6Fcdttt8WUKVPiq1/9ahxxxBHR3t4eTz31VKpyAAB0c0mCZ3t7e5x77rlx2WWXxZlnntk5f+TIkSnKAQCwA0jyVfvChQtj+fLlUVVVFX/zN38Tu+++exx77LFvOOLZ0tISa9as6TIBANAzJAmezz77bEREXHTRRfEv//Iv8V//9V8xcODAGD9+fKxevfp17zdr1qxoaGjonJqamlJ0DwCACtiu4Dl9+vQolUpbnRYtWhTlcjkiIv75n/85/uEf/iFGjx4d1113XZRKpfj+97//uu3PmDEjmpubO6dly5a9tUcHAEC3sV3HeH7mM5+JM844Y6vLjBgxIv70pz9FRNdjOuvq6mLEiBHx/PPPv+596+rqoq6ubnu6BADADmK7gmdjY2M0Nja+4XKjR4+Ourq6WLx4cRx22GEREdHW1hZLly6NoUOHvrmeAgCwQ0vyq/YBAwbEpz71qZg5c2Y0NTXF0KFD47LLLouIiFNPPTVFSQAAurlk5/G87LLLoqamJj760Y/Gxo0bY9y4cfGTn/wkBg4cmKokAADdWKkoiqLSnXg9a9asiYaGhhgfJ0VNqVelu0Oph1xhtSinr5FjXaV+HD1ke1f1Tn/ceNHWnrxGqVeycYKIyPMYio6O5DW8vrdRT3gMdBvtRVvMjzuiubk5BgwYsNVle8Y7CwAA3Z7gCQBAFoInAABZCJ4AAGSR9mh1epYecqB4qSb9D9WK9rbkNXrKj39SK2/cWOkuvC2yPKdSy/Gc9brYNn6ERYV4hQIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGRRU+kOQG5Fe1ulu/D2KMpp2y9l+Fya+jHQvdje7yy2N1tgxBMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACCLmkp3AOiminKle8BfKiUeJ7C9t13qbZFDju2dYz153u5wesCrBwCAHYHgCQBAFoInAABZCJ4AAGQheAIAkIXgCQBAFoInAABZCJ4AAGSRLHg+88wzcdJJJ8Uuu+wSAwYMiMMOOyzmzZuXqhwAAN1csuB5/PHHR3t7e/zkJz+Jxx9/PA444IA4/vjj48UXX0xVEgCAbixJ8Fy1alX87ne/i+nTp8f73ve+eNe73hWXXnppbNiwIZ566qkUJQEA6OaSBM9BgwbFvvvuG//+7/8e69evj/b29rjmmmti8ODBMXr06BQlAQDo5mpSNFoqleL++++Pk08+Ofr37x9VVVUxePDguOeee2LgwIGve7+WlpZoaWnpvL1mzZoU3QMAoAK2a8Rz+vTpUSqVtjotWrQoiqKIqVOnxuDBg+PnP/95PProo3HyySfHCSecEH/6059et/1Zs2ZFQ0ND59TU1PSWHyAAAN1DqSiKYlsXXrlyZbz88stbXWbEiBHx85//PI466qh45ZVXYsCAAZ1/e9e73hVnnnlmTJ8+fYv33dKIZ1NTU4yPk6Km1GtbuwnQ85QSn/2uKKdtvydJvS1yyLG9c6wnz9tuob1oi/lxRzQ3N3fJfVuyXV+1NzY2RmNj4xsut2HDhoiIqKrq+qSrqqqKcvn1nyR1dXVRV1e3PV0CAGAHkeTjyMEHHxwDBw6MSZMmxZNPPhnPPPNMfO5zn4vnnnsujjvuuBQlAQDo5pIEz1122SXuueeeWLduXRxxxBExZsyY+MUvfhF33HFHHHDAASlKAgDQzSX5VXtExJgxY+Lee+9N1TwAADuYHnCENAAAOwLBEwCALARPAACySHaM59vhtVOMtkdbxDafbRSgJ3Iez+6jB4zZZNnezuP5TtEebRHxP7lta7p18Fy7dm1ERPwiflzhngBUmA/f3YdtsW2sp3ectWvXRkNDw1aX2a4rF+VWLpfjhRdeiP79+0epVKp0dzbz2pWVli1b9oZn6mf7Wb9pWb9pWb9pWb9pWb9p9bT1WxRFrF27NvbYY4/NLh7017r1iGdVVVUMGTKk0t14QwMGDOgRT5zuyvpNy/pNy/pNy/pNy/pNqyet3zca6XxNDzhQBQCAHYHgCQBAFoLnW1BXVxczZ86Murq6SnelR7J+07J+07J+07J+07J+03onr99u/eMiAAB6DiOeAABkIXgCAJCF4AkAQBaCJwAAWQieb5MTTzwx9tprr+jdu3fsvvvu8dGPfjReeOGFSnerR1i6dGmceeaZMXz48Kivr4+99947Zs6cGa2trZXuWo9xySWXxCGHHBJ9+vSJnXbaqdLd6RG+9a1vxbBhw6J3794xbty4ePTRRyvdpR7hZz/7WZxwwgmxxx57RKlUih/96EeV7lKPMmvWrBg7dmz0798/Bg8eHCeffHIsXry40t3qMebMmRPve9/7Ok8cf/DBB8fdd99d6W5lJXi+TSZMmBC33nprLF68OG677bb4/e9/H6ecckqlu9UjLFq0KMrlclxzzTXxm9/8Jv7t3/4trr766rjwwgsr3bUeo7W1NU499dT49Kc/Xemu9Ai33HJLXHDBBTFz5sxYuHBhHHDAAXH00UfHihUrKt21Hd769evjgAMOiG9961uV7kqP9NOf/jSmTp0aDz/8cMydOzfa2triqKOOivXr11e6az3CkCFD4tJLL43HH388HnvssTjiiCPipJNOit/85jeV7lo2TqeUyJ133hknn3xytLS0RK9evSrdnR7nsssuizlz5sSzzz5b6a70KNdff32cd9558eqrr1a6Kzu0cePGxdixY+PKK6+MiIhyuRxNTU1x9tlnx/Tp0yvcu56jVCrF7bffHieffHKlu9JjrVy5MgYPHhw//elP4/DDD690d3qknXfeOS677LI488wzK92VLIx4JrB69eq48cYb45BDDhE6E2lubo6dd9650t2AzbS2tsbjjz8eEydO7JxXVVUVEydOjIceeqiCPYPt19zcHBFhf5tAR0dH3HzzzbF+/fo4+OCDK92dbATPt9HnP//56Nu3bwwaNCief/75uOOOOyrdpR5pyZIlMXv27PjkJz9Z6a7AZlatWhUdHR2x6667dpm/6667xosvvlihXsH2K5fLcd5558Whhx4a++23X6W702P8+te/jn79+kVdXV186lOfittvvz1GjhxZ6W5lI3huxfTp06NUKm11WrRoUefyn/vc5+JXv/pV3HfffVFdXR0f+9jHwpEMr297129ExPLly+OYY46JU089NaZMmVKhnu8Y3sz6BXjN1KlT46mnnoqbb7650l3pUfbdd9944okn4pFHHolPf/rTMWnSpHj66acr3a1sHOO5FStXroyXX355q8uMGDEiamtrN5v/xz/+MZqamuLBBx98Rw2hb4/tXb8vvPBCjB8/Pt7//vfH9ddfH1VVPjdtzZt5/jrG861rbW2NPn36xA9+8IMuxx5OmjQpXn31Vd+EvI0c45nOtGnT4o477oif/exnMXz48Ep3p0ebOHFi7L333nHNNddUuitZ1FS6A91ZY2NjNDY2vqn7lsvliIhoaWl5O7vUo2zP+l2+fHlMmDAhRo8eHdddd53QuQ3eyvOXN6+2tjZGjx4dDzzwQGcgKpfL8cADD8S0adMq2zl4A0VRxNlnnx233357zJ8/X+jMoFwuv6OyguD5NnjkkUdiwYIFcdhhh8XAgQPj97//fXzhC1+Ivffe22jn22D58uUxfvz4GDp0aFx++eWxcuXKzr/ttttuFexZz/H888/H6tWr4/nnn4+Ojo544oknIiJin332iX79+lW2czugCy64ICZNmhRjxoyJgw46KL7xjW/E+vXrY/LkyZXu2g5v3bp1sWTJks7bzz33XDzxxBOx8847x1577VXBnvUMU6dOjZtuuinuuOOO6N+/f+dxyQ0NDVFfX1/h3u34ZsyYEccee2zstddesXbt2rjpppti/vz5ce+991a6a/kUvGX//d//XUyYMKHYeeedi7q6umLYsGHFpz71qeKPf/xjpbvWI1x33XVFRGxx4u0xadKkLa7fefPmVbprO6zZs2cXe+21V1FbW1scdNBBxcMPP1zpLvUI8+bN2+JzddKkSZXuWo/wevva6667rtJd6xE+/vGPF0OHDi1qa2uLxsbG4sgjjyzuu+++SncrK8d4AgCQhQPlAADIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAshA8AQDIQvAEACALwRMAgCwETwAAsvj/AXkj2vArP/8bAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAqYAAAKqCAYAAADouZzkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/H5lhTAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA4lklEQVR4nO3de5xVdb0//veeGWa4zog4iBfkon4tvPYARdJvgpKXH946R+pbWUAeKgUDqY7gt3NQO4qVj06pheQp6DxOHrXMS8e8oAc6lZomR9M8YKSIgRcQnUGQGWb2+v3R18kJECjWXp/B5/Px2A/Ya6/9eb/32pd57c9ee+1SlmVZAABAwaqKbgAAACIEUwAAEiGYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFOArSiVSnHppZcW3cY7mjhxYgwePLjoNgB2GcEUyM2TTz4Z55xzTgwaNCi6d+8e++23X3zwgx+Ma6+9tujWkrNixYoolUpx9dVXb/XySy+9NEqlUqxdu/avqvP000/HpZdeGitWrPirxgHIg2AK5OLBBx+MESNGxBNPPBGTJ0+O6667Lv7u7/4uqqqq4pvf/GbR7e0Wbrjhhli2bNlOXefpp5+Oyy67TDAFklRTdAPA7umKK66IhoaGePTRR2OPPfbodNkrr7xSTFO7mW7duhXdwk7btGlT1NbWRlWVeRFgS14ZgFz8/ve/j0MPPXSLUBoR0b9//07n58+fHyeeeGL0798/6urqYtiwYTF37twtrjd48OA4/fTTY/HixTFixIjo0aNHHH744bF48eKIiPjxj38chx9+eHTv3j2GDx8e//3f/93p+hMnTozevXvHs88+G6ecckr06tUr9t1337j88ssjy7Lt3qZVq1bFpz71qdh7772jrq4uDj300Pje9763xXrXXnttHHroodGzZ8/o27dvjBgxIm688cbtjr+ztraP6U033RTDhw+PPn36RH19fRx++OEdM9QLFiyI8ePHR0TEmDFjolQqRalU6th+ERHf/va349BDD426urrYd999Y8qUKfH6669vUftb3/pWDB06NHr06BHHHHNM/PznP4/Ro0fH6NGjO9ZZvHhxlEqluOmmm+JLX/pS7LffftGzZ89obm6OdevWxRe+8IU4/PDDo3fv3lFfXx+nnXZaPPHEE53qvDXGLbfcEpdddlnst99+0adPnzjnnHOiqakpWlpaYvr06dG/f//o3bt3TJo0KVpaWnbJ9gUqz4wpkItBgwbFQw89FE899VQcdthh77ju3Llz49BDD40zzzwzampq4ic/+UlccMEFUS6XY8qUKZ3WXb58eXzsYx+Lz3zmM3HuuefG1VdfHWeccUZcf/31cckll8QFF1wQERFz5syJD3/4w7Fs2bJOs3Pt7e1x6qmnxrHHHhtf/epX45577onZs2dHW1tbXH755dvs8eWXX45jjz02SqVSTJ06NRobG+Puu++O8847L5qbm2P69OkR8ceP1z/3uc/FOeecE9OmTYtNmzbFb37zm/jVr34VH/vYx7a73TZu3LjV/Ug3bty43esuXLgwPvrRj8ZJJ50UX/nKVyIi4n/+53/il7/8ZUybNi0+8IEPxOc+97m45ppr4pJLLon3vve9EREd/1566aVx2WWXxdixY+P888+PZcuWxdy5c+PRRx+NX/7ylx0ztHPnzo2pU6fG//7f/zsuuuiiWLFiRZx99tnRt2/f2H///bfo68tf/nLU1tbGF77whWhpaYna2tp4+umn4/bbb4/x48fHkCFD4uWXX4558+bFCSecEE8//XTsu+++ncaYM2dO9OjRI2bOnBnLly+Pa6+9Nrp16xZVVVXx2muvxaWXXhoPP/xwLFiwIIYMGRL/+I//uN3tBSQoA8jBfffdl1VXV2fV1dXZqFGjsr//+7/P7r333qy1tXWLdTdu3LjFslNOOSUbOnRop2WDBg3KIiJ78MEHO5bde++9WURkPXr0yJ5//vmO5fPmzcsiIlu0aFHHsgkTJmQRkV144YUdy8rlcjZu3ListrY2W7NmTcfyiMhmz57dcf68887L9tlnn2zt2rWdevo//+f/ZA0NDR234ayzzsoOPfTQ7WydLT333HNZRGz39PYeJ0yYkA0aNKjj/LRp07L6+vqsra1tm3V++MMfbrFdsizLXnnllay2tjY7+eSTs/b29o7l1113XRYR2fe+970sy7KspaUl69evX3b00Udnmzdv7lhvwYIFWURkJ5xwQseyRYsWZRGRDR06dIv7eNOmTZ3qvLUN6urqsssvv3yLMQ477LBOj52PfvSjWalUyk477bROY4waNarTNgG6Fh/lA7n44Ac/GA899FCceeaZ8cQTT8RXv/rVOOWUU2K//faLO++8s9O6PXr06Ph/U1NTrF27Nk444YR49tlno6mpqdO6w4YNi1GjRnWcHzlyZEREnHjiiXHAAQdssfzZZ5/dorepU6d2/P+tGdDW1ta4//77t3pbsiyLW2+9Nc4444zIsizWrl3bcTrllFOiqakplixZEhERe+yxR/zhD3+IRx99dIe205/79Kc/HQsXLtzi9IlPfGK7191jjz1iw4YNsXDhwp2ue//990dra2tMnz690wzz5MmTo76+Pu66666IiPj1r38dr776akyePDlqav70odvHP/7x6Nu371bHnjBhQqf7OCKirq6uo057e3u8+uqr0bt37zjkkEM6tuXbffKTn+y0T+3IkSMjy7L41Kc+1Wm9kSNHxgsvvBBtbW07uQWAFPgoH8jN0UcfHT/+8Y+jtbU1nnjiibjtttvin//5n+Occ86Jxx9/PIYNGxYREb/85S9j9uzZ8dBDD23xkXVTU1M0NDR0nH97+IyIjssGDhy41eWvvfZap+VVVVUxdOjQTsv+1//6XxER2/ym+po1a+L111+P73znO/Gd73xnq+u89YWuiy++OO6///445phj4qCDDoqTTz45Pvaxj8Vxxx231ev9uYMPPjjGjh27xfJf/OIX273uBRdcELfcckucdtppsd9++8XJJ58cH/7wh+PUU0/d7nWff/75iIg45JBDOi2vra2NoUOHdlz+1r8HHXRQp/Vqamq2eUzVIUOGbLGsXC7HN7/5zfj2t78dzz33XLS3t3dc1q9fvy3W35n7vVwuR1NT01bHAdJmxhTIXW1tbRx99NFx5ZVXxty5c2Pz5s3xwx/+MCL++CWpk046KdauXRtf//rX46677oqFCxfGRRddFBF/DDBvV11dvdUa21qe7cCXmrbnrR7OPffcrc5mLly4sCN4vve9741ly5bFTTfdFMcff3zceuutcfzxx8fs2bP/6j62p3///vH444/HnXfeGWeeeWYsWrQoTjvttJgwYULutd/Jn8+WRkRceeWVMWPGjPjABz4Q//Zv/xb33ntvLFy4MA499NAt7vOIYu53oPLMmAIVNWLEiIiIePHFFyMi4ic/+Um0tLTEnXfe2WlWbNGiRbnUL5fL8eyzz3bMkkZEPPPMMxER25zxa2xsjD59+kR7e/tWZzP/XK9eveIjH/lIfOQjH4nW1tb4m7/5m7jiiiti1qxZ0b17911yO7altrY2zjjjjDjjjDOiXC7HBRdcEPPmzYt/+Id/iIMOOihKpdJWrzdo0KCIiFi2bFmnGeXW1tZ47rnnOm73W+stX748xowZ07FeW1tbrFixIo444ogd6vNHP/pRjBkzJr773e92Wv7666/HXnvtteM3GNitmDEFcrFo0aKtzlr99Kc/jYg/fWT81ozX29dtamqK+fPn59bbdddd1/H/LMviuuuui27dusVJJ5201fWrq6vjb//2b+PWW2+Np556aovL16xZ0/H/V199tdNltbW1MWzYsMiyLDZv3ryLbsHW/XntqqqqjqD41iGUevXqFRGxxSGgxo4dG7W1tXHNNdd0ui+++93vRlNTU4wbNy4i/vjGol+/fnHDDTd02o/zBz/4wRa7TbyT6urqLR4fP/zhD2PVqlU7PAaw+zFjCuTiwgsvjI0bN8aHPvSheM973hOtra3x4IMPxs033xyDBw+OSZMmRUTEySef3DHL95nPfCbeeOONuOGGG6J///4ds6q7Uvfu3eOee+6JCRMmxMiRI+Puu++Ou+66Ky655JJobGzc5vWuuuqqWLRoUYwcOTImT54cw4YNi3Xr1sWSJUvi/vvvj3Xr1nXcngEDBsRxxx0Xe++9d/zP//xPXHfddTFu3Ljo06fPLr89b/d3f/d3sW7dujjxxBNj//33j+effz6uvfbaOOqoozoOCXXUUUdFdXV1fOUrX4mmpqaoq6vrOIbsrFmz4rLLLotTTz01zjzzzFi2bFl8+9vfjqOPPjrOPffciPhj0L700kvjwgsvjBNPPDE+/OEPx4oVK2LBggVx4IEHbnNG9s+dfvrpcfnll8ekSZPi/e9/fzz55JPxgx/8YIv9f4F3FzOmQC6uvvrqGDNmTPz0pz+NGTNmxIwZM+KRRx6JCy64IH71q191HHj/kEMOiR/96EdRKpXiC1/4Qlx//fXx6U9/OqZNm5ZLX9XV1XHPPffESy+9FF/84hfj0UcfjdmzZ8eXv/zld7ze3nvvHY888khMmjQpfvzjH8fUqVPjm9/8Zqxbt67jmKER0RGuv/71r8eUKVPi9ttvj8997nPxb//2b7ncnrc799xzo3v37vHtb387Lrjggvj+978fH/nIR+Luu+/u+Ab8gAED4vrrr49XXnklzjvvvPjoRz8aTz/9dET88Tim1113XaxcuTIuuuiiuOWWW+LTn/503HfffZ2+ET916tS45pprYuXKlfGFL3whfv7zn8edd94Ze+yxxw7vqnDJJZfE5z//+bj33ntj2rRpsWTJkrjrrru2+DIT8O5SyuwhDrxLTJw4MX70ox/FG2+8UXQru51yuRyNjY3xN3/zN3HDDTcU3Q7QRZkxBWCnbNq0aYv9Q//1X/811q1b1+knSQF2ln1MAdgpDz/8cFx00UUxfvz46NevXyxZsiS++93vxmGHHRbjx48vuj2gCxNMAdgpgwcPjoEDB8Y111wT69atiz333DM++clPxlVXXRW1tbVFtwd0YfYxBQAgCfYxBQAgCYIpAABJ6PL7mJbL5Vi9enX06dNnhw/sDABA5WRZFuvXr499992347jKW9Plg+nq1asdkBkAoAt44YUXYv/999/m5V0+mL71E3/Hx/8XNdFtO2vDu0ipAnvqZOX8a+wu8r4/KnFfVOIxtbvw3IBO2mJz/CJ+ut2fZu7ywfStj+9rolvUlART6FCREOGP7w7L/f4QTNPiuQGd/L9jQG1vt0uvMgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSUFN0A/CuVcr5fWFWznd8dk7O90epujrX8SMisvb23GvsNjy/4S9ixhQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgoPpqtWrYpzzz03+vXrFz169IjDDz88fv3rXxfdFgAAFVZTZPHXXnstjjvuuBgzZkzcfffd0djYGL/73e+ib9++RbYFAEABCg2mX/nKV2LgwIExf/78jmVDhgwpsCPYfVT17Jl7jaylJfcapZpuudfI2jbnXiNKhX9A9dcrZfnXyMr519gdVOLx5L6gAIW+Ut55550xYsSIGD9+fPTv3z/e9773xQ033FBkSwAAFKTQYPrss8/G3Llz4+CDD4577703zj///Pjc5z4X3//+97d5nZaWlmhubu50AgCg6yv0o/xyuRwjRoyIK6+8MiIi3ve+98VTTz0V119/fUyYMGGr15kzZ05cdtlllWwTAIAKKHTGdJ999olhw4Z1Wvbe9743Vq5cuc3rzJo1K5qamjpOL7zwQt5tAgBQAYXOmB533HGxbNmyTsueeeaZGDRo0DavU1dXF3V1dXm3BgBAhRU6Y3rRRRfFww8/HFdeeWUsX748brzxxvjOd74TU6ZMKbItAAAKUGgwPfroo+O2226Lf//3f4/DDjssvvzlL8c3vvGN+PjHP15kWwAAFKDQj/IjIk4//fQ4/fTTi24DAICC7QZHfAYAYHcgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEhCTdEN8DalnN8nZOV8x4+IUnV17jVy304RkbW3516jqntdvgXa87+/q3r3zr1GlEr512itwHv0LMt1+FJtba7jR0RkmzfnX6O1AjUq8PyuxOst7I7MmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJJQU3QDvE1WLrqDv1qpplvuNbL29txrVNXW5l4jb1X775N7jey113OvUdqzb/41mtfnXiPb+Gau45ca++U6fkRErHk1/xpZln+NiqjOdfRKvA7uDn+T6HrMmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkJBVMr7rqqiiVSjF9+vSiWwEAoMKSCaaPPvpozJs3L4444oiiWwEAoABJBNM33ngjPv7xj8cNN9wQffv2LbodAAAKkEQwnTJlSowbNy7Gjh1bdCsAABSkpugGbrrppliyZEk8+uijO7R+S0tLtLS0dJxvbm7OqzUAACqo0GD6wgsvxLRp02LhwoXRvXv3HbrOnDlz4rLLLsu5s4KU8p3ALlVX5zp+RESpW/4PqVL3uvxr9Omde42oKuU6fNZrx55Tf1WNPfbLvUa5Ot/tFBHRvt8eudeInG9Gt7Ub8i0QEaXGfvnXeHNT7jWirS33Eu2vvpZ7jbxV4m9G1t6eew26llKWZVlRxW+//fb40Ic+FNVve/C3t7dHqVSKqqqqaGlp6XRZxNZnTAcOHBij46yoKXWrWO+52A2CaVUFQmNUImDvDsF0jz65jh8RkXWrwB+uSgTTHhV47dgdgummzbnXCMF0h1Qi0JVyfo2KEEzfTdqyzbE47oimpqaor6/f5nqFzpiedNJJ8eSTT3ZaNmnSpHjPe94TF1988RahNCKirq4u6uoqEH4AAKioQoNpnz594rDDDuu0rFevXtGvX78tlgMAsHtL4lv5AABQ+Lfy/9zixYuLbgEAgAKYMQUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEhCTdENdBmlrp/hs/b23GuUN7XkXqO6X9/ca0SW5V6ifcCeuY7f2rd7ruNHRGwYkP9LSO36cu41NvWtzr1GXVO+t+PNvWpzHT8ioq6pLfca3V6rwGvIi6/mXqOqd69cx8/e3JTr+BERWdvm3Guwg3aDDBJRFbEDf1p3h1sKAMBuQDAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJNQU3QCVU6oq5V6jqnev3GtEluVeYvOQ/rnXKHerznX8piHdch0/IqL5oPzvi6wm/8dt7Wu5l4iNA/KdB+ixJtfhIyKipiX/uYz2fXrkXqPnht651yhtasm3QLf8/3yXWzblXiNKFZgfy8r518jbu+g2mDEFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQhMKD6Zw5c+Loo4+OPn36RP/+/ePss8+OZcuWFd0WAAAVVngw/dnPfhZTpkyJhx9+OBYuXBibN2+Ok08+OTZs2FB0awAAVFBN0Q3cc889nc4vWLAg+vfvH4899lh84AMfKKgrAAAqrfAZ0z/X1NQUERF77rlnwZ0AAFBJhc+Yvl25XI7p06fHcccdF4cddthW12lpaYmWlpaO883NzZVqDwCAHCU1YzplypR46qmn4qabbtrmOnPmzImGhoaO08CBAyvYIQAAeUkmmE6dOjX+4z/+IxYtWhT777//NtebNWtWNDU1dZxeeOGFCnYJAEBeCv8oP8uyuPDCC+O2226LxYsXx5AhQ95x/bq6uqirq6tQdwAAVErhwXTKlClx4403xh133BF9+vSJl156KSIiGhoaokePHgV3BwBApRQeTOfOnRsREaNHj+60fP78+TFx4sQdH6hU9cdTF1aqKuU6flVDfa7jR0SUunXLvUZ5wF6513jjgPzfFJVzfva9dlh7vgUiovuAjbnX6FaT/+2of++m3GusfnmPXMdvPayc6/gRES1LeuZeo+G5/G9Hub577jWqm3P+ZC/LfztV1eW/ncqtrbnXoGspPJhmWVZ0CwAAJKBrTzECALDbEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJKGm6AaonGzDm7nXKO3dK/calVDVmuVe44198n1fuOfg13MdPyJiRP8Xcq9xat/f5F7j4TcOyr3GLS+OyLlCOefxI944uC33GvUrKzBfkuX//I5SKf8aeavO/74oVeW/nbJyJR5T+T//3i3MmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJJQU3QDu0xWjohy0V38dUrd8h2+V49cx4+IyJrfyL1Gqb5X7jWq2rLca7R3z3f8dWv65FsgInrt25J7jX9/+djca9w89IHca7z8nvpcx1/X0jPX8SMinnxyUO412utKudfIqvOfk8n26J3r+KU3N+U6fkRE/q+CEVHK/74oVeWfDbL23Eu8a5gxBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASUgimH7rW9+KwYMHR/fu3WPkyJHxyCOPFN0SAAAVVngwvfnmm2PGjBkxe/bsWLJkSRx55JFxyimnxCuvvFJ0awAAVFDhwfTrX/96TJ48OSZNmhTDhg2L66+/Pnr27Bnf+973im4NAIAKKjSYtra2xmOPPRZjx47tWFZVVRVjx46Nhx56qMDOAACotJoii69duzba29tj77337rR87733jqVLl271Oi0tLdHS0tJxvrm5OdceAQCojMI/yt9Zc+bMiYaGho7TwIEDi24JAIBdoNBgutdee0V1dXW8/PLLnZa//PLLMWDAgK1eZ9asWdHU1NRxeuGFFyrRKgAAOSs0mNbW1sbw4cPjgQce6FhWLpfjgQceiFGjRm31OnV1dVFfX9/pBABA11foPqYRETNmzIgJEybEiBEj4phjjolvfOMbsWHDhpg0aVLRrQEAUEGFB9OPfOQjsWbNmvjHf/zHeOmll+Koo46Ke+65Z4svRAEAsHsrPJhGREydOjWmTp1adBsAABSoy30rHwCA3ZNgCgBAEpL4KL9LKFUgw2flfMcvZ/mOHxFRVcq9RNatOvcaG/vnX6M259+GKPdp2f5Kf6UNbXW51zi0fnXuNb6y7qDca/y+uV+u4/epzf/+jj5tuZdo75b/Y6qtd7fca1Q3v5lzgfz/JmUt+T+msgr8XSpV4O8Su44ZUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBJqim6gy8jKFSiR8/uELMt3/Igo1XXPvUZ7926516h/fnPuNZqG5Hs7Wlf1ynX8iIhFy47KvcYeR63Jvca4gb/NvUZ7zs/vp3+3X67jR0T0XJH/c6+uOf/X2po38n9+l15/I9fxy+vzHT8ioqpHj9xrtG/YmHuNrJz/374o5f33O//nRSrMmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJJQU3QDXUWpurroFv5q2ebNudcoRY/ca1RvaM29RmlzOfcaPerzfUxtXpn/+85N/bPca7yyqm/uNeavPi73GqXWfO+P+uX5v0bVNudeIiL/h1RFXkOiuuvP+5Q3tRTdwq6R5f96zq7T9Z85AADsFgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYUF0xUrVsR5550XQ4YMiR49esSBBx4Ys2fPjtbWChzGAwCA5BR2HNOlS5dGuVyOefPmxUEHHRRPPfVUTJ48OTZs2BBXX311UW0BAFCQwoLpqaeeGqeeemrH+aFDh8ayZcti7ty5gikAwLtQUvuYNjU1xZ577ll0GwAAFCCZnyRdvnx5XHvttdudLW1paYmWlj/9TFpzcyV+Iw8AgLzt8hnTmTNnRqlUesfT0qVLO11n1apVceqpp8b48eNj8uTJ7zj+nDlzoqGhoeM0cODAXX0TAAAoQCnLsmxXDrhmzZp49dVX33GdoUOHRm1tbURErF69OkaPHh3HHntsLFiwIKqq3jkrb23GdODAgTE6zoqaUre//gZsQ6m6OrexK6VUV5d7jaqG+txrZI19c69Rrs3/w4Q3hvTKdfzmA/LfU2dT/1368rFVm/u2514jSvnfjlJrvvdHn+X5v0bVVuADqrqmcu416p9pyr1G6fX1uY5fXvPOf2d3hWxzW/412ivw/M7yf0yxfW3Z5lgcd0RTU1PU1287K+zyv76NjY3R2Ni4Q+uuWrUqxowZE8OHD4/58+dvN5RGRNTV1UVdBQIWAACVVdg+pqtWrYrRo0fHoEGD4uqrr441a9Z0XDZgwICi2gIAoCCFBdOFCxfG8uXLY/ny5bH//vt3umwX710AAEAXUNjhoiZOnBhZlm31BADAu09SxzEFAODdSzAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSUNhxTLuaSvxsWt4/e5q1bs51/IiI7M1NudcovZz/T/FV1+T/1Gh47Y1cx6/a3D/X8SMimrL8fwaz/vf53xfl/H7NuENVzk+/cgVezfsu25h7jW6rX8+9RlTiZzDLOR/6MO/xo0I/Fwp/xowpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCTVFN8CfZO3tuY5fqs51+IiIyFpacq9Rqu2We42oqcTGynIdvvey13IdPyKixys9c6+RVeDtc7lb/vd3Kef7u/rNtlzHj4ioat6Ue43I+XUwIiI25f86VX69Odfx8/57ERFRqirlXiMr5/u8oOsxYwoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEhCTdENUDlZe3v+NTa15F6jVFube41o3Zx/jba2XIcv9emd6/gRETVZlnuNaC/nX6MStyPv50ZVKd/xI/K/DRGRbXwz9xrlN/OvEaWuP++TlSvwvMgq8PymS+n6zxwAAHYLgikAAElIIpi2tLTEUUcdFaVSKR5//PGi2wEAoABJBNO///u/j3333bfoNgAAKFDhwfTuu++O++67L66++uqiWwEAoECFfiv/5ZdfjsmTJ8ftt98ePXv2LLIVAAAKVlgwzbIsJk6cGJ/97GdjxIgRsWLFih26XktLS7S0/OmQJc3NzTl1CABAJe3yj/JnzpwZpVLpHU9Lly6Na6+9NtavXx+zZs3aqfHnzJkTDQ0NHaeBAwfu6psAAEABSlm2a48svWbNmnj11VffcZ2hQ4fGhz/84fjJT34SpdKfDgrd3t4e1dXV8fGPfzy+//3vb/W6W5sxHThwYIyOs6Km1G3X3Aj+chU4qHR1fZ/ca0SpAgcr3w0OsB8NFbgvHGB/xzjA/g5zgP0dU4kfZXGA/XePtmxzLI47oqmpKerr67e53i7/KL+xsTEaGxu3u94111wT//RP/9RxfvXq1XHKKafEzTffHCNHjtzm9erq6qKurm6X9AoAQDoK28f0gAMO6HS+d+8/zu4ceOCBsf/++xfREgAABer6nzUAALBbKPRwUW83ePDg2MW7uwIA0IWYMQUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAElI5idJ2U1k5dxLlDdszL1GJW5HqaZbvuO3t+c6fkRE9uIrudeI6t3k/XM5359cLtXm+3iKiCg3rc+9RiVkOd8XfyyyOd/xSxV4XlTgdRD+3G7yig8AQFcnmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASaopugN1MKf/3Olnb5txrlKqrc6+RtbfnOn7bmldzHT8iolRVyr1GVs5yr1HVo3vuNaK9nOvw5Tc25Dp+RP6P2YrJ8r0vKmJ3uA2wFWZMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAk1RTdw1113xeWXXx6/+c1vonv37nHCCSfE7bffXnRbu6dSBd6HZOX8a1RA1t6ef5FSlu/4Fbgvsgpspopor8C2atuc7/jlnB9PEVGqKuVeoxJ2m8ct7IYKDaa33nprTJ48Oa688so48cQTo62tLZ566qkiWwIAoCCFBdO2traYNm1afO1rX4vzzjuvY/mwYcOKagkAgAIVto/pkiVLYtWqVVFVVRXve9/7Yp999onTTjvNjCkAwLtUYcH02WefjYiISy+9NL70pS/Ff/zHf0Tfvn1j9OjRsW7dum1er6WlJZqbmzudAADo+nZ5MJ05c2aUSqV3PC1dujTK5T9+2eD//t//G3/7t38bw4cPj/nz50epVIof/vCH2xx/zpw50dDQ0HEaOHDgrr4JAAAUYJfvY/r5z38+Jk6c+I7rDB06NF588cWI6LxPaV1dXQwdOjRWrly5zevOmjUrZsyY0XG+ublZOAUA2A3s8mDa2NgYjY2N211v+PDhUVdXF8uWLYvjjz8+IiI2b94cK1asiEGDBm3zenV1dVFXV7fL+gUAIA2FfSu/vr4+PvvZz8bs2bNj4MCBMWjQoPja174WERHjx48vqi0AAApS6HFMv/a1r0VNTU184hOfiDfffDNGjhwZ//mf/xl9+/Ytsi0AAApQyrIs/58LyVFzc3M0NDTE6Dgrakrdim4nbX75KS153x/uix1WVdc99xp++SkdFfllN6CTtmxzLI47oqmpKerr67e5XmGHiwIAgLcTTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAk1BTdABWUlYvuYNcoVeD9VCW21e5yf+wGyq2t+RfZDe7vrL3oDoDdnRlTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEmqKbgB2WlYuugPeUqrAe9tK3N8eUwBJMGMKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAkiCYAgCQhEKD6TPPPBNnnXVW7LXXXlFfXx/HH398LFq0qMiWAAAoSKHB9PTTT4+2trb4z//8z3jsscfiyCOPjNNPPz1eeumlItsCAKAAhQXTtWvXxu9+97uYOXNmHHHEEXHwwQfHVVddFRs3boynnnqqqLYAAChIYcG0X79+ccghh8S//uu/xoYNG6KtrS3mzZsX/fv3j+HDhxfVFgAABakpqnCpVIr7778/zj777OjTp09UVVVF//7945577om+fftu83otLS3R0tLScb65ubkS7QJbk5WL7oBKKlVgLsNjCt7VdvmrzMyZM6NUKr3jaenSpZFlWUyZMiX69+8fP//5z+ORRx6Js88+O84444x48cUXtzn+nDlzoqGhoeM0cODAXX0TAAAoQCnLsmxXDrhmzZp49dVX33GdoUOHxs9//vM4+eST47XXXov6+vqOyw4++OA477zzYubMmVu97tZmTAcOHBij46yoKXXbNTcCgC2ZMQX+Qm3Z5lgcd0RTU1On3PfndvlH+Y2NjdHY2Ljd9TZu3BgREVVVnV/oqqqqolze9gtTXV1d1NXV/XVNAgCQnMK+/DRq1Kjo27dvTJgwIZ544ol45pln4otf/GI899xzMW7cuKLaAgCgIIUF07322ivuueeeeOONN+LEE0+MESNGxC9+8Yu444474sgjjyyqLQAAClLYt/IjIkaMGBH33ntvkS0AAJCIQn/5CQAA3iKYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJCEmqIbAKCLyMpFdwDs5syYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCYIpAABJEEwBAEiCYAoAQBIEUwAAklBTdAMA7AKlCswzZOX8awDvamZMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJAimAAAkQTAFACAJgikAAEkQTAEASIJgCgBAEgRTAACSIJgCAJAEwRQAgCQIpgAAJEEwBQAgCbkF0yuuuCLe//73R8+ePWOPPfbY6jorV66McePGRc+ePaN///7xxS9+Mdra2vJqCQCAhNXkNXBra2uMHz8+Ro0aFd/97ne3uLy9vT3GjRsXAwYMiAcffDBefPHF+OQnPxndunWLK6+8Mq+2AABIVCnLsizPAgsWLIjp06fH66+/3mn53XffHaeffnqsXr069t5774iIuP766+Piiy+ONWvWRG1t7Q6N39zcHA0NDTE6zoqaUrdd3T5A11CqwJ5ZWTn/GsBuqS3bHIvjjmhqaor6+vptrlfYPqYPPfRQHH744R2hNCLilFNOiebm5vjtb39bVFsAABQkt4/yt+ell17qFEojouP8Sy+9tM3rtbS0REtLS8f55ubmfBoEAKCidmrGdObMmVEqld7xtHTp0rx6jYiIOXPmRENDQ8dp4MCBudYDAKAydmrG9POf/3xMnDjxHdcZOnToDo01YMCAeOSRRzote/nllzsu25ZZs2bFjBkzOs43NzcLpwAAu4GdCqaNjY3R2Ni4SwqPGjUqrrjiinjllVeif//+ERGxcOHCqK+vj2HDhm3zenV1dVFXV9dx/q3vbrXF5ohcv8YFkDJffgLS1RabI+JPuW1bctvHdOXKlbFu3bpYuXJltLe3x+OPPx4REQcddFD07t07Tj755Bg2bFh84hOfiK9+9avx0ksvxZe+9KWYMmVKp+C5PevXr4+IiF/ET/O4GQBdgzfmQBewfv36aGho2ObluR0uauLEifH9739/i+WLFi2K0aNHR0TE888/H+eff34sXrw4evXqFRMmTIirrroqamp2PC+Xy+VYvXp19OnTJ0ql0q5qv2Le2hXhhRdeeMfDJ/AnttnOs812nm2282yznWeb7TzbbOelsM2yLIv169fHvvvuG1VV2/6EJ/fjmPLO3joO6/aO68Wf2GY7zzbbebbZzrPNdp5ttvNss53XlbZZYccxBQCAtxNMAQBIgmBasLq6upg9e/ZOfeHr3c4223m22c6zzXaebbbzbLOdZ5vtvK60zexjCgBAEsyYAgCQBMEUAIAkCKYAACRBMAUAIAmCaULOPPPMOOCAA6J79+6xzz77xCc+8YlYvXp10W0la8WKFXHeeefFkCFDokePHnHggQfG7Nmzo7W1tejWknbFFVfE+9///ujZs2fsscceRbeTrG9961sxePDg6N69e4wcOTIeeeSRoltK1n/913/FGWecEfvuu2+USqW4/fbbi24peXPmzImjjz46+vTpE/3794+zzz47li1bVnRbSZs7d24cccQRUV9fH/X19TFq1Ki4++67i26ry7jqqquiVCrF9OnTi27lHQmmCRkzZkzccsstsWzZsrj11lvj97//fZxzzjlFt5WspUuXRrlcjnnz5sVvf/vb+Od//ue4/vrr45JLLim6taS1trbG+PHj4/zzzy+6lWTdfPPNMWPGjJg9e3YsWbIkjjzyyDjllFPilVdeKbq1JG3YsCGOPPLI+Na3vlV0K13Gz372s5gyZUo8/PDDsXDhwti8eXOcfPLJsWHDhqJbS9b+++8fV111VTz22GPx61//Ok488cQ466yz4re//W3RrSXv0UcfjXnz5sURRxxRdCvbl5GsO+64IyuVSllra2vRrXQZX/3qV7MhQ4YU3UaXMH/+/KyhoaHoNpJ0zDHHZFOmTOk4397enu27777ZnDlzCuyqa4iI7Lbbbiu6jS7nlVdeySIi+9nPflZ0K11K3759s3/5l38puo2krV+/Pjv44IOzhQsXZieccEI2bdq0olt6R2ZME7Vu3br4wQ9+EO9///ujW7duRbfTZTQ1NcWee+5ZdBt0Ya2trfHYY4/F2LFjO5ZVVVXF2LFj46GHHiqwM3ZnTU1NERFev3ZQe3t73HTTTbFhw4YYNWpU0e0kbcqUKTFu3LhOr2kpE0wTc/HFF0evXr2iX79+sXLlyrjjjjuKbqnLWL58eVx77bXxmc98puhW6MLWrl0b7e3tsffee3davvfee8dLL71UUFfszsrlckyfPj2OO+64OOyww4puJ2lPPvlk9O7dO+rq6uKzn/1s3HbbbTFs2LCi20rWTTfdFEuWLIk5c+YU3coOE0xzNnPmzCiVSu94Wrp0acf6X/ziF+O///u/47777ovq6ur45Cc/Gdm77Me5dnabRUSsWrUqTj311Bg/fnxMnjy5oM6L85dsMyANU6ZMiaeeeipuuummoltJ3iGHHBKPP/54/OpXv4rzzz8/JkyYEE8//XTRbSXphRdeiGnTpsUPfvCD6N69e9Ht7DA/SZqzNWvWxKuvvvqO6wwdOjRqa2u3WP6HP/whBg4cGA8++OC76qOKnd1mq1evjtGjR8exxx4bCxYsiKqqd9/7rb/kcbZgwYKYPn16vP766zl317W0trZGz54940c/+lGcffbZHcsnTJgQr7/+uk8xtqNUKsVtt93WaduxbVOnTo077rgj/uu//iuGDBlSdDtdztixY+PAAw+MefPmFd1Kcm6//fb40Ic+FNXV1R3L2tvbo1QqRVVVVbS0tHS6LBU1RTewu2tsbIzGxsa/6LrlcjkiIlpaWnZlS8nbmW22atWqGDNmTAwfPjzmz5//rgylEX/d44zOamtrY/jw4fHAAw90hKtyuRwPPPBATJ06tdjm2G1kWRYXXnhh3HbbbbF48WKh9C9ULpffdX8jd9RJJ50UTz75ZKdlkyZNive85z1x8cUXJxlKIwTTZPzqV7+KRx99NI4//vjo27dv/P73v49/+Id/iAMPPPBdNVu6M1atWhWjR4+OQYMGxdVXXx1r1qzpuGzAgAEFdpa2lStXxrp162LlypXR3t4ejz/+eEREHHTQQdG7d+9im0vEjBkzYsKECTFixIg45phj4hvf+EZs2LAhJk2aVHRrSXrjjTdi+fLlHeefe+65ePzxx2PPPfeMAw44oMDO0jVlypS48cYb44477og+ffp07L/c0NAQPXr0KLi7NM2aNStOO+20OOCAA2L9+vVx4403xuLFi+Pee+8turUk9enTZ4t9lt/6DkvS+zIXe1AA3vKb3/wmGzNmTLbnnntmdXV12eDBg7PPfvaz2R/+8IeiW0vW/Pnzs4jY6oltmzBhwla32aJFi4puLSnXXnttdsABB2S1tbXZMccckz388MNFt5SsRYsWbfUxNWHChKJbS9a2Xrvmz59fdGvJ+tSnPpUNGjQoq62tzRobG7OTTjopu++++4puq0vpCoeLso8pAABJeHfukAcAQHIEUwAAkiCYAgCQBMEUAIAkCKYAACRBMAUAIAmCKQAASRBMAQBIgmAKAEASBFMAAJIgmAIAkATBFACAJPz/xbB5EEri6yIAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -1348,12 +1343,12 @@ "text/latex": [ "$$\n", " \\begin{array}{rcl}\n", - " \\text{z} &\\sim & \\operatorname{N}(\\text{},~\\text{})\n", + " \\text{z} &\\sim & \\operatorname{Normal}(\\text{},~\\text{})\n", " \\end{array}\n", " $$" ], "text/plain": [ - "z ~ N(, )" + "z ~ Normal(, )" ] }, "execution_count": 37, @@ -1401,38 +1396,38 @@ "output_type": "stream", "text": [ "Check{sigma > 0} [id A] 'z_logprob'\n", - " |Elemwise{sub,no_inplace} [id B]\n", - " | |Elemwise{sub,no_inplace} [id C]\n", - " | | |Elemwise{mul,no_inplace} [id D]\n", - " | | | |InplaceDimShuffle{x} [id E]\n", - " | | | | |TensorConstant{-0.5} [id F]\n", - " | | | |Elemwise{pow,no_inplace} [id G]\n", - " | | | |Elemwise{true_div,no_inplace} [id H]\n", - " | | | | |Elemwise{sub,no_inplace} [id I]\n", - " | | | | | |z [id J]\n", - " | | | | | |TensorConstant{(2,) of 0} [id K]\n", - " | | | | |TensorConstant{[1. 2.]} [id L]\n", - " | | | |InplaceDimShuffle{x} [id M]\n", - " | | | |TensorConstant{2} [id N]\n", - " | | |InplaceDimShuffle{x} [id O]\n", - " | | |Elemwise{log,no_inplace} [id P]\n", - " | | |Elemwise{sqrt,no_inplace} [id Q]\n", - " | | |TensorConstant{6.283185307179586} [id R]\n", - " | |Elemwise{log,no_inplace} [id S]\n", - " | |TensorConstant{[1. 2.]} [id L]\n", - " |All [id T]\n", - " |MakeVector{dtype='bool'} [id U]\n", - " |All [id V]\n", - " |Elemwise{gt,no_inplace} [id W]\n", - " |TensorConstant{[1. 2.]} [id L]\n", - " |InplaceDimShuffle{x} [id X]\n", - " |TensorConstant{0} [id Y]\n" + " ├─ Sub [id B]\n", + " │ ├─ Sub [id C]\n", + " │ │ ├─ Mul [id D]\n", + " │ │ │ ├─ ExpandDims{axis=0} [id E]\n", + " │ │ │ │ └─ -0.5 [id F]\n", + " │ │ │ └─ Pow [id G]\n", + " │ │ │ ├─ True_div [id H]\n", + " │ │ │ │ ├─ Sub [id I]\n", + " │ │ │ │ │ ├─ z [id J]\n", + " │ │ │ │ │ └─ [0 0] [id K]\n", + " │ │ │ │ └─ [1 2] [id L]\n", + " │ │ │ └─ ExpandDims{axis=0} [id M]\n", + " │ │ │ └─ 2 [id N]\n", + " │ │ └─ ExpandDims{axis=0} [id O]\n", + " │ │ └─ Log [id P]\n", + " │ │ └─ Sqrt [id Q]\n", + " │ │ └─ 6.283185307179586 [id R]\n", + " │ └─ Log [id S]\n", + " │ └─ [1 2] [id L]\n", + " └─ All{axes=None} [id T]\n", + " └─ MakeVector{dtype='bool'} [id U]\n", + " └─ All{axes=None} [id V]\n", + " └─ Gt [id W]\n", + " ├─ [1 2] [id L]\n", + " └─ ExpandDims{axis=0} [id X]\n", + " └─ 0 [id Y]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 39, @@ -1527,38 +1522,38 @@ "output_type": "stream", "text": [ "Check{sigma > 0} [id A] 'z_logprob'\n", - " |Elemwise{sub,no_inplace} [id B]\n", - " | |Elemwise{sub,no_inplace} [id C]\n", - " | | |Elemwise{mul,no_inplace} [id D]\n", - " | | | |InplaceDimShuffle{x} [id E]\n", - " | | | | |TensorConstant{-0.5} [id F]\n", - " | | | |Elemwise{pow,no_inplace} [id G]\n", - " | | | |Elemwise{true_div,no_inplace} [id H]\n", - " | | | | |Elemwise{sub,no_inplace} [id I]\n", - " | | | | | |z [id J]\n", - " | | | | | |TensorConstant{(2,) of 0} [id K]\n", - " | | | | |TensorConstant{[1. 2.]} [id L]\n", - " | | | |InplaceDimShuffle{x} [id M]\n", - " | | | |TensorConstant{2} [id N]\n", - " | | |InplaceDimShuffle{x} [id O]\n", - " | | |Elemwise{log,no_inplace} [id P]\n", - " | | |Elemwise{sqrt,no_inplace} [id Q]\n", - " | | |TensorConstant{6.283185307179586} [id R]\n", - " | |Elemwise{log,no_inplace} [id S]\n", - " | |TensorConstant{[1. 2.]} [id L]\n", - " |All [id T]\n", - " |MakeVector{dtype='bool'} [id U]\n", - " |All [id V]\n", - " |Elemwise{gt,no_inplace} [id W]\n", - " |TensorConstant{[1. 2.]} [id L]\n", - " |InplaceDimShuffle{x} [id X]\n", - " |TensorConstant{0} [id Y]\n" + " ├─ Sub [id B]\n", + " │ ├─ Sub [id C]\n", + " │ │ ├─ Mul [id D]\n", + " │ │ │ ├─ ExpandDims{axis=0} [id E]\n", + " │ │ │ │ └─ -0.5 [id F]\n", + " │ │ │ └─ Pow [id G]\n", + " │ │ │ ├─ True_div [id H]\n", + " │ │ │ │ ├─ Sub [id I]\n", + " │ │ │ │ │ ├─ z [id J]\n", + " │ │ │ │ │ └─ [0 0] [id K]\n", + " │ │ │ │ └─ [1 2] [id L]\n", + " │ │ │ └─ ExpandDims{axis=0} [id M]\n", + " │ │ │ └─ 2 [id N]\n", + " │ │ └─ ExpandDims{axis=0} [id O]\n", + " │ │ └─ Log [id P]\n", + " │ │ └─ Sqrt [id Q]\n", + " │ │ └─ 6.283185307179586 [id R]\n", + " │ └─ Log [id S]\n", + " │ └─ [1 2] [id L]\n", + " └─ All{axes=None} [id T]\n", + " └─ MakeVector{dtype='bool'} [id U]\n", + " └─ All{axes=None} [id V]\n", + " └─ Gt [id W]\n", + " ├─ [1 2] [id L]\n", + " └─ ExpandDims{axis=0} [id X]\n", + " └─ 0 [id Y]\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1654,7 +1649,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 46, @@ -1677,7 +1672,7 @@ { "data": { "text/plain": [ - "array([-1.41907251, -1.01111034, -0.16152042])" + "array([-0.19787136, 0.50478153, -0.1464596 ])" ] }, "execution_count": 47, @@ -1747,7 +1742,9 @@ { "data": { "text/plain": [ - "{mu ~ N(0, 2): mu, sigma ~ N**+(0, 3): sigma_log__, x ~ N(mu, sigma): x}" + "{mu ~ Normal(0, 2): mu,\n", + " sigma ~ HalfNormal(0, 3): sigma_log__,\n", + " x ~ Normal(mu, sigma): x}" ] }, "execution_count": 50, @@ -1803,7 +1800,7 @@ { "data": { "text/plain": [ - "array([ -1.61208571, -11.32440364, 9.08106147])" + "array([ -1.61208572, -11.32440366, 9.08106147])" ] }, "execution_count": 52, @@ -1883,7 +1880,7 @@ { "data": { "text/plain": [ - "[array(-1.61208571), array(-11.32440364), array(9.08106147)]" + "[array(-1.61208572), array(-11.32440366), array(9.08106147)]" ] }, "execution_count": 54, @@ -1920,21 +1917,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "Last updated: Tue Dec 06 2022\n", + "Last updated: Wed Jun 19 2024\n", "\n", "Python implementation: CPython\n", - "Python version : 3.11.0\n", - "IPython version : 8.7.0\n", + "Python version : 3.11.8\n", + "IPython version : 8.22.2\n", "\n", - "pytensor: 2.8.10\n", + "pytensor: 2.20.0+3.g66439d283.dirty\n", "\n", - "numpy : 1.23.4\n", - "scipy : 1.9.3\n", - "pymc : 4.4.0+207.g7c3068a1c\n", - "pytensor : 2.8.10\n", - "matplotlib: 3.6.2\n", + "pymc : 5.15.0+1.g58927d608\n", + "matplotlib: 3.8.3\n", + "scipy : 1.12.0\n", + "numpy : 1.26.4\n", + "pytensor : 2.20.0+3.g66439d283.dirty\n", "\n", - "Watermark: 2.3.1\n", + "Watermark: 2.4.3\n", "\n" ] } @@ -1976,9 +1973,9 @@ }, "hide_input": false, "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "pymc", "language": "python", - "name": "python3" + "name": "pymc" }, "language_info": { "codemirror_mode": { @@ -1990,7 +1987,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.8" }, "toc": { "base_numbering": 1, @@ -2012,5 +2009,5 @@ } }, "nbformat": 4, - "nbformat_minor": 1 + "nbformat_minor": 4 } diff --git a/pymc/distributions/censored.py b/pymc/distributions/censored.py index 14963d0517..0d33f06b39 100644 --- a/pymc/distributions/censored.py +++ b/pymc/distributions/censored.py @@ -36,7 +36,7 @@ class CensoredRV(SymbolicRandomVariable): """Censored random variable""" inline_logprob = True - signature = "(),(),()->()" + extended_signature = "(),(),()->()" _print_name = ("Censored", "\\operatorname{Censored}") @classmethod diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index 99b56da019..a66c4e04e2 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -30,7 +30,8 @@ from pytensor.graph.op import Op from pytensor.raise_op import Assert from pytensor.tensor import gamma as gammafn -from pytensor.tensor import gammaln +from pytensor.tensor import gammaln, get_underlying_scalar_constant_value +from pytensor.tensor.exceptions import NotScalarConstantError from pytensor.tensor.extra_ops import broadcast_shape from pytensor.tensor.math import betaincinv, gammaincinv, tanh from pytensor.tensor.random.basic import ( @@ -182,16 +183,20 @@ def transform_params(*args): upper = args[bound_args_indices[1]] if lower is not None: - if isinstance(lower, TensorConstant) and np.all(lower.value == -np.inf): - lower = None - else: - lower = pt.as_tensor_variable(lower) + lower = pt.as_tensor_variable(lower) + try: + if get_underlying_scalar_constant_value(lower) == -np.inf: + lower = None + except NotScalarConstantError: + pass if upper is not None: - if isinstance(upper, TensorConstant) and np.all(upper.value == np.inf): - upper = None - else: - upper = pt.as_tensor_variable(upper) + upper = pt.as_tensor_variable(upper) + try: + if get_underlying_scalar_constant_value(upper) == np.inf: + upper = None + except NotScalarConstantError: + pass return lower, upper @@ -294,7 +299,7 @@ class Uniform(BoundedContinuous): """ rv_op = uniform - bound_args_indices = (3, 4) # Lower, Upper + bound_args_indices = (2, 3) # Lower, Upper @classmethod def dist(cls, lower=0, upper=1, **kwargs): @@ -352,8 +357,7 @@ def uniform_default_transform(op, rv): class FlatRV(RandomVariable): name = "flat" - ndim_supp = 0 - ndims_params = [] + signature = "->()" dtype = "floatX" _print_name = ("Flat", "\\operatorname{Flat}") @@ -379,7 +383,7 @@ def dist(cls, **kwargs): return res def support_point(rv, size): - return pt.zeros(size) + return pt.zeros(() if rv_size_is_none(size) else size) def logp(value): return pt.zeros_like(value) @@ -392,8 +396,7 @@ def logcdf(value): class HalfFlatRV(RandomVariable): name = "half_flat" - ndim_supp = 0 - ndims_params = [] + signature = "->()" dtype = "floatX" _print_name = ("HalfFlat", "\\operatorname{HalfFlat}") @@ -416,7 +419,7 @@ def dist(cls, **kwargs): return res def support_point(rv, size): - return pt.ones(size) + return pt.ones(() if rv_size_is_none(size) else size) def logp(value): return pt.switch(pt.lt(value, 0), -np.inf, pt.zeros_like(value)) @@ -537,8 +540,7 @@ def icdf(value, mu, sigma): class TruncatedNormalRV(RandomVariable): name = "truncated_normal" - ndim_supp = 0 - ndims_params = [0, 0, 0, 0] + signature = "(),(),(),()->()" dtype = "floatX" _print_name = ("TruncatedNormal", "\\operatorname{TruncatedNormal}") @@ -645,7 +647,7 @@ class TruncatedNormal(BoundedContinuous): """ rv_op = truncated_normal - bound_args_indices = (5, 6) # indexes for lower and upper args + bound_args_indices = (4, 5) # indexes for lower and upper args @classmethod def dist( @@ -872,8 +874,7 @@ def icdf(value, loc, sigma): class WaldRV(RandomVariable): name = "wald" - ndim_supp = 0 - ndims_params = [0, 0, 0] + signature = "(),(),()->()" dtype = "floatX" _print_name = ("Wald", "\\operatorname{Wald}") @@ -1229,7 +1230,7 @@ def icdf(value, alpha, beta): class KumaraswamyRV(SymbolicRandomVariable): name = "kumaraswamy" - signature = "[rng],[size],(),()->[rng],()" + extended_signature = "[rng],[size],(),()->[rng],()" _print_name = ("Kumaraswamy", "\\operatorname{Kumaraswamy}") @classmethod @@ -1532,7 +1533,7 @@ def icdf(value, mu, b): class AsymmetricLaplaceRV(SymbolicRandomVariable): name = "asymmetriclaplace" - signature = "[rng],[size],(),(),()->[rng],()" + extended_signature = "[rng],[size],(),(),()->[rng],()" _print_name = ("AsymmetricLaplace", "\\operatorname{AsymmetricLaplace}") @classmethod @@ -1900,8 +1901,7 @@ def icdf(value, nu, mu, sigma): class SkewStudentTRV(RandomVariable): name = "skewstudentt" - ndim_supp = 0 - ndims_params = [0, 0, 0, 0] + signature = "(),(),(),()->()" dtype = "floatX" _print_name = ("SkewStudentT", "\\operatorname{SkewStudentT}") @@ -2078,7 +2078,7 @@ class Pareto(BoundedContinuous): """ rv_op = pareto - bound_args_indices = (4, None) # lower-bounded by `m` + bound_args_indices = (3, None) # lower-bounded by `m` @classmethod def dist(cls, alpha, m, **kwargs): @@ -2611,8 +2611,7 @@ def dist(cls, nu, **kwargs): class WeibullBetaRV(RandomVariable): name = "weibull" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "floatX" _print_name = ("Weibull", "\\operatorname{Weibull}") @@ -2734,7 +2733,7 @@ def icdf(value, alpha, beta): class HalfStudentTRV(SymbolicRandomVariable): name = "halfstudentt" - signature = "[rng],[size],(),()->[rng],()" + extended_signature = "[rng],[size],(),()->[rng],()" _print_name = ("HalfStudentT", "\\operatorname{HalfStudentT}") @classmethod @@ -2848,7 +2847,7 @@ def logp(value, nu, sigma): class ExGaussianRV(SymbolicRandomVariable): name = "exgaussian" - signature = "[rng],[size],(),(),()->[rng],()" + extended_signature = "[rng],[size],(),(),()->[rng],()" _print_name = ("ExGaussian", "\\operatorname{ExGaussian}") @classmethod @@ -3066,8 +3065,7 @@ def logp(value, mu, kappa): class SkewNormalRV(RandomVariable): name = "skewnormal" - ndim_supp = 0 - ndims_params = [0, 0, 0] + signature = "(),(),()->()" dtype = "floatX" _print_name = ("SkewNormal", "\\operatorname{SkewNormal}") @@ -3233,7 +3231,7 @@ class Triangular(BoundedContinuous): """ rv_op = triangular - bound_args_indices = (3, 5) # lower, upper + bound_args_indices = (2, 4) # lower, upper @classmethod def dist(cls, lower=0, upper=1, c=0.5, *args, **kwargs): @@ -3404,8 +3402,7 @@ def icdf(value, mu, beta): class RiceRV(RandomVariable): name = "rice" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "floatX" _print_name = ("Rice", "\\operatorname{Rice}") @@ -3622,7 +3619,7 @@ def icdf(value, mu, s): class LogitNormalRV(SymbolicRandomVariable): name = "logit_normal" - signature = "[rng],[size],(),()->[rng],()" + extended_signature = "[rng],[size],(),()->[rng],()" _print_name = ("logitNormal", "\\operatorname{logitNormal}") @classmethod @@ -3724,6 +3721,15 @@ def logp(value, mu, sigma): def _interpolated_argcdf(p, pdf, cdf, x): + if np.prod(cdf.shape[:-1]) != 1 or np.prod(pdf.shape[:-1]) != 1 or np.prod(x.shape[:-1]) != 1: + raise NotImplementedError( + "Function not implemented for batched points. " + "Open an issue in https://github.com/pymc-devs/pymc if you need this functionality" + ) + cdf = cdf.squeeze(tuple(range(cdf.ndim - 1))) + pdf = pdf.squeeze(tuple(range(pdf.ndim - 1))) + x = x.squeeze(tuple(range(x.ndim - 1))) + index = np.searchsorted(cdf, p) - 1 slope = (pdf[index + 1] - pdf[index]) / (x[index + 1] - x[index]) @@ -3745,8 +3751,7 @@ def _interpolated_argcdf(p, pdf, cdf, x): class InterpolatedRV(RandomVariable): name = "interpolated" - ndim_supp = 0 - ndims_params = [1, 1, 1] + signature = "(x),(x),(x)->()" dtype = "floatX" _print_name = ("Interpolated", "\\operatorname{Interpolated}") @@ -3836,7 +3841,9 @@ def support_point(rv, size, x_points, pdf_points, cdf_points): Estimates the expectation integral using the trapezoid rule; cdf_points are not used. """ x_fx = pt.mul(x_points, pdf_points) # x_i * f(x_i) for all xi's in x_points - support_point = pt.sum(pt.mul(pt.diff(x_points), x_fx[1:] + x_fx[:-1])) / 2 + support_point = ( + pt.sum(pt.mul(pt.diff(x_points, axis=-1), x_fx[..., 1:] + x_fx[..., :-1])) / 2 + ) if not rv_size_is_none(size): support_point = pt.full(size, support_point) @@ -3847,7 +3854,7 @@ def logp(value, x_points, pdf_points, cdf_points): # x_points and pdf_points are expected to be non-symbolic arrays wrapped # within a tensor.constant. We use the .data method to retrieve them interp = InterpolatedUnivariateSpline(x_points.data, pdf_points.data, k=1, ext="zeros") - Z = interp.integral(x_points.data[0], x_points.data[-1]) + Z = interp.integral(x_points.data[..., 0], x_points.data[..., -1]) # interp and Z are converted to symbolic variables here interp_op = SplineWrapper(interp) @@ -3859,16 +3866,15 @@ def logp(value, x_points, pdf_points, cdf_points): @_default_transform.register(Interpolated) def interpolated_default_transform(op, rv): def transform_params(*params): - _, _, _, x_points, _, _ = params - return x_points[0], x_points[-1] + _, _, x_points, _, _ = params + return x_points[..., 0], x_points[..., -1] return transforms.Interval(bounds_fn=transform_params) class MoyalRV(RandomVariable): name = "moyal" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "floatX" _print_name = ("Moyal", "\\operatorname{Moyal}") @@ -3977,8 +3983,7 @@ class PolyaGammaRV(RandomVariable): """Polya-Gamma random variable.""" name = "polyagamma" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "floatX" _print_name = ("PG", "\\operatorname{PG}") @@ -3992,14 +3997,7 @@ def rng_fn(cls, rng, h, z, size=None) -> np.ndarray: Parameters ---------- - rng : {None, int, array_like[ints], SeedSequence, BitGenerator, Generator} - A seed to initialize the random number generator. If None, then fresh, - unpredictable entropy will be pulled from the OS. If an ``int`` or - ``array_like[ints]`` is passed, then it will be passed to - `SeedSequence` to derive the initial `BitGenerator` state. One may also - pass in a `SeedSequence` instance. - Additionally, when passed a `BitGenerator`, it will be wrapped by - `Generator`. If passed a `Generator`, it will be returned unaltered. + rng : Generator h : scalar or sequence The shape parameter of the distribution. z : scalar or sequence @@ -4012,10 +4010,11 @@ def rng_fn(cls, rng, h, z, size=None) -> np.ndarray: to the largest integer smaller than its value (e.g (2.1, 1) -> (2, 1)). This parameter only applies if `h` and `z` are scalars. """ - # handle the kind of rng passed to the sampler - bg = rng._bit_generator if isinstance(rng, np.random.RandomState) else rng + # random_polyagamma needs explicit size to work correctly + if size is None: + size = np.broadcast_shapes(h.shape, z.shape) return np.asarray( - random_polyagamma(h, z, size=size, random_state=bg).astype(pytensor.config.floatX) + random_polyagamma(h, z, size=size, random_state=rng).astype(pytensor.config.floatX) ) diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py index 958c9785e9..caa957326a 100644 --- a/pymc/distributions/discrete.py +++ b/pymc/distributions/discrete.py @@ -392,7 +392,7 @@ def logcdf(value, p): class DiscreteWeibullRV(SymbolicRandomVariable): name = "discrete_weibull" - signature = "[rng],[size],(),()->[rng],()" + extended_signature = "[rng],[size],(),()->[rng],()" _print_name = ("dWeibull", "\\operatorname{dWeibull}") @classmethod @@ -971,8 +971,7 @@ def logcdf(value, good, bad, n): class DiscreteUniformRV(ScipyRandomVariable): name = "discrete_uniform" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "int64" _print_name = ("DiscreteUniform", "\\operatorname{DiscreteUniform}") @@ -1158,23 +1157,18 @@ def support_point(rv, size, p): def logp(value, p): k = pt.shape(p)[-1] - p_ = p value_clip = pt.clip(value, 0, k - 1) - if p.ndim > 1: - if p.ndim > value_clip.ndim: - value_clip = pt.shape_padleft(value_clip, p_.ndim - value_clip.ndim) - elif p.ndim < value_clip.ndim: - p = pt.shape_padleft(p, value_clip.ndim - p_.ndim) - pattern = (p.ndim - 1, *range(p.ndim - 1)) - a = pt.log( - pt.take_along_axis( - p.dimshuffle(pattern), - value_clip, - ) - ) - else: - a = pt.log(p[value_clip]) + # In the standard case p has one more dimension than value + dim_diff = p.type.ndim - value.type.ndim + if dim_diff > 1: + # p brodacasts implicitly beyond value + value_clip = pt.shape_padleft(value_clip, dim_diff - 1) + elif dim_diff < 1: + # value broadcasts implicitly beyond p + p = pt.shape_padleft(p, 1 - dim_diff) + + a = pt.log(pt.take_along_axis(p, value_clip[..., None], axis=-1).squeeze(-1)) res = pt.switch( pt.or_(pt.lt(value, 0), pt.gt(value, k - 1)), @@ -1184,8 +1178,8 @@ def logp(value, p): return check_parameters( res, - 0 <= p_, - p_ <= 1, + 0 <= p, + p <= 1, pt.isclose(pt.sum(p, axis=-1), 1), msg="0 <= p <=1, sum(p) = 1", ) diff --git a/pymc/distributions/distribution.py b/pymc/distributions/distribution.py index d4063470fa..e5c2e68684 100644 --- a/pymc/distributions/distribution.py +++ b/pymc/distributions/distribution.py @@ -187,11 +187,14 @@ def _random(*args, **kwargs): if rv_type is not None: # Create dispatch functions - signature = getattr(rv_type, "signature", None) size_idx: int | None = None params_idxs: tuple[int] | None = None - if signature is not None: - _, size_idx, params_idxs = SymbolicRandomVariable.get_idxs(signature) + if issubclass(rv_type, SymbolicRandomVariable): + extended_signature = getattr(rv_type, "extended_signature", None) + if extended_signature is not None: + [_, size_idx, params_idxs], _ = ( + SymbolicRandomVariable.get_input_output_type_idxs(extended_signature) + ) class_change_dist_size = clsdict.get("change_dist_size") if class_change_dist_size: @@ -206,7 +209,7 @@ def change_dist_size(op, rv, new_size, expand): @_logprob.register(rv_type) def logp(op, values, *dist_params, **kwargs): if isinstance(op, RandomVariable): - rng, size, dtype, *dist_params = dist_params + rng, size, *dist_params = dist_params elif params_idxs: dist_params = [dist_params[i] for i in params_idxs] [value] = values @@ -218,7 +221,7 @@ def logp(op, values, *dist_params, **kwargs): @_logcdf.register(rv_type) def logcdf(op, value, *dist_params, **kwargs): if isinstance(op, RandomVariable): - rng, size, dtype, *dist_params = dist_params + rng, size, *dist_params = dist_params elif params_idxs: dist_params = [dist_params[i] for i in params_idxs] return class_logcdf(value, *dist_params) @@ -229,7 +232,7 @@ def logcdf(op, value, *dist_params, **kwargs): @_icdf.register(rv_type) def icdf(op, value, *dist_params, **kwargs): if isinstance(op, RandomVariable): - rng, size, dtype, *dist_params = dist_params + rng, size, *dist_params = dist_params elif params_idxs: dist_params = [dist_params[i] for i in params_idxs] return class_icdf(value, *dist_params) @@ -250,7 +253,7 @@ def icdf(op, value, *dist_params, **kwargs): @_support_point.register(rv_type) def support_point(op, rv, *dist_params): if isinstance(op, RandomVariable): - rng, size, dtype, *dist_params = dist_params + rng, size, *dist_params = dist_params return class_support_point(rv, size, *dist_params) elif params_idxs and size_idx is not None: size = dist_params[size_idx] @@ -301,7 +304,7 @@ class SymbolicRandomVariable(OpFromGraph): classmethod `cls.rv_op`, taking care to clone and resize random inputs, if needed. """ - signature: str = None + extended_signature: str = None """Numpy-like vectorized signature of the distribution. It allows tokens [rng], [size] to identify the special inputs. @@ -320,28 +323,21 @@ class SymbolicRandomVariable(OpFromGraph): _print_name: tuple[str, str] = ("Unknown", "\\operatorname{Unknown}") """Tuple of (name, latex name) used for for pretty-printing variables of this type""" - @staticmethod - def _parse_signature(signature: str) -> tuple[str, str]: - """Parse signature as if special tokens were vector elements""" - # Regex to split across commas not inside parenthesis - # Copied from https://stackoverflow.com/a/26634150 - fake_signature = signature.replace("[rng]", "(rng)").replace("[size]", "(size)") - return _parse_gufunc_signature(fake_signature) + @_class_or_instancemethod + @property + def signature(cls_or_self) -> None | str: + # Convert "expanded" signature into "vanilla" signature that has no rng and size tokens + extended_signature = cls_or_self.extended_signature + if extended_signature is None: + return None - @staticmethod - def _parse_params_signature(signature): - """Parse the signature of the distribution's parameters, ignoring rng and size tokens.""" + # Remove special tokens special_tokens = r"|".join((r"\[rng\],?", r"\[size\],?")) - params_signature = re.sub(special_tokens, "", signature) + signature = re.sub(special_tokens, "", extended_signature) # Remove dandling commas - params_signature = re.sub(r",(?=[->])|,$", "", params_signature) + signature = re.sub(r",(?=[->])|,$", "", signature) - # Numpy gufunc signature doesn't accept empty inputs - if params_signature.startswith("->"): - # Pretent there was at least one scalar input and then discard that - return [], _parse_gufunc_signature("()" + params_signature)[1] - else: - return _parse_gufunc_signature(params_signature) + return signature @_class_or_instancemethod @property @@ -350,7 +346,7 @@ def ndims_params(cls_or_self) -> Sequence[int] | None: signature = cls_or_self.signature if signature is None: return None - inputs_signature, _ = cls_or_self._parse_params_signature(signature) + inputs_signature, _ = _parse_gufunc_signature(signature) return [len(sig) for sig in inputs_signature] @_class_or_instancemethod @@ -363,51 +359,99 @@ def ndim_supp(cls_or_self) -> int | None: signature = cls_or_self.signature if signature is None: return None - _, outputs_params_signature = cls_or_self._parse_params_signature(signature) + _, outputs_params_signature = _parse_gufunc_signature(signature) return max(len(out_sig) for out_sig in outputs_params_signature) + @_class_or_instancemethod + def _parse_extended_signature(cls_or_self) -> tuple[tuple[str, ...], tuple[str, ...]] | None: + extended_signature = cls_or_self.extended_signature + if extended_signature is None: + return None + + fake_signature = extended_signature.replace("[rng]", "(rng)").replace("[size]", "(size)") + return _parse_gufunc_signature(fake_signature) + @_class_or_instancemethod @property def default_output(cls_or_self) -> int | None: - signature = cls_or_self.signature - if signature is None: + extended_signature = cls_or_self.extended_signature + if extended_signature is None: return None - _, outputs_signature = cls_or_self._parse_signature(signature) + _, [_, candidate_default_output] = cls_or_self.get_input_output_type_idxs( + extended_signature + ) - # If there is a single non `[rng]` outputs, that is the default one! - candidate_default_output = [ - i for i, out_sig in enumerate(outputs_signature) if out_sig != ("rng",) - ] if len(candidate_default_output) == 1: return candidate_default_output[0] else: return None @staticmethod - def get_idxs(signature: str) -> tuple[tuple[int], int | None, tuple[int]]: - """Parse signature and return indexes for *[rng], [size] and parameters""" - inputs_signature, outputs_signature = SymbolicRandomVariable._parse_signature(signature) - rng_idxs = [] + def get_input_output_type_idxs( + extended_signature: str | None, + ) -> tuple[tuple[tuple[int], int | None, tuple[int]], tuple[tuple[int], tuple[int]]]: + """Parse extended_signature and return indexes for *[rng], [size] and parameters as well as outputs""" + if extended_signature is None: + raise ValueError("extended_signature must be provided") + + fake_signature = extended_signature.replace("[rng]", "(rng)").replace("[size]", "(size)") + inputs_signature, outputs_signature = _parse_gufunc_signature(fake_signature) + + input_rng_idxs = [] size_idx = None - params_idxs = [] + input_params_idxs = [] for i, inp_sig in enumerate(inputs_signature): if inp_sig == ("size",): size_idx = i elif inp_sig == ("rng",): - rng_idxs.append(i) + input_rng_idxs.append(i) else: - params_idxs.append(i) - return tuple(rng_idxs), size_idx, tuple(params_idxs) + input_params_idxs.append(i) + + output_rng_idxs = [] + output_params_idxs = [] + for i, out_sig in enumerate(outputs_signature): + if out_sig == ("rng",): + output_rng_idxs.append(i) + else: + output_params_idxs.append(i) + + return ( + (tuple(input_rng_idxs), size_idx, tuple(input_params_idxs)), + (tuple(output_rng_idxs), tuple(output_params_idxs)), + ) + + def rng_params(self, node) -> tuple[Variable, ...]: + """Extract the rng parameters from the node's inputs""" + [rng_args_idxs, _, _], _ = self.get_input_output_type_idxs(self.extended_signature) + return tuple(node.inputs[i] for i in rng_args_idxs) + + def size_param(self, node) -> Variable | None: + """Extract the size parameter from the node's inputs""" + [_, size_arg_idx, _], _ = self.get_input_output_type_idxs(self.extended_signature) + return node.inputs[size_arg_idx] if size_arg_idx is not None else None + + def dist_params(self, node) -> tuple[Variable, ...]: + """Extract distribution parameters from the node's inputs""" + [_, _, param_args_idxs], _ = self.get_input_output_type_idxs(self.extended_signature) + return tuple(node.inputs[i] for i in param_args_idxs) def __init__( self, *args, + extended_signature: str | None = None, **kwargs, ): """Initialize a SymbolicRandomVariable class.""" + if extended_signature is not None: + self.extended_signature = extended_signature + if "signature" in kwargs: - self.signature = kwargs.pop("signature") + self.extended_signature = kwargs.pop("signature") + warnings.warn( + "SymbolicRandomVariables signature argument was renamed to extended_signature." + ) if "ndim_supp" in kwargs: # For backwards compatibility we allow passing ndim_supp without signature @@ -437,26 +481,25 @@ def batch_ndim(self, node: Apply) -> int: @_change_dist_size.register(SymbolicRandomVariable) -def change_symbolic_rv_size(op, rv, new_size, expand) -> TensorVariable: - if op.signature is None: +def change_symbolic_rv_size(op: SymbolicRandomVariable, rv, new_size, expand) -> TensorVariable: + extended_signature = op.extended_signature + if extended_signature is None: raise NotImplementedError( f"SymbolicRandomVariable {op} without signature requires custom `_change_dist_size` implementation." ) - inputs_signature = op.signature.split("->")[0].split(",") - if "[size]" not in inputs_signature: + + size = op.size_param(rv.owner) + if size is None: raise NotImplementedError( - f"SymbolicRandomVariable {op} without [size] in signature requires custom `_change_dist_size` implementation." + f"SymbolicRandomVariable {op} without [size] in extended_signature requires custom `_change_dist_size` implementation." ) - size_arg_idx = inputs_signature.index("[size]") - size = rv.owner.inputs[size_arg_idx] + + params = op.dist_params(rv.owner) if expand: new_size = tuple(new_size) + tuple(size) - numerical_inputs = [ - inp for inp, sig in zip(rv.owner.inputs, inputs_signature) if sig not in ("[size]", "[rng]") - ] - return op.rv_op(*numerical_inputs, size=new_size) + return op.rv_op(*params, size=new_size) class Distribution(metaclass=DistributionMeta): @@ -627,10 +670,12 @@ def dist( shape = convert_shape(shape) size = convert_size(size) - # SymbolicRVs don't always have `ndim_supp` until they are created - ndim_supp = getattr(cls.rv_type, "ndim_supp", None) + # `ndim_supp` may be available at the class level or at the instance level + ndim_supp = getattr(cls.rv_op, "ndim_supp", getattr(cls.rv_type, "ndim_supp", None)) if ndim_supp is None: + # Initialize Ops and check the ndim_supp that is now required to exist ndim_supp = cls.rv_op(*dist_params, **kwargs).owner.op.ndim_supp + create_size = find_size(shape=shape, size=size, ndim_supp=ndim_supp) rv_out = cls.rv_op(*dist_params, size=create_size, **kwargs) @@ -774,7 +819,6 @@ def dist( default_support_point, rv_name=class_name, has_fallback=random is not None, - ndim_supp=ndim_supp, ) if random is None: @@ -824,15 +868,15 @@ def rv_op( # Dispatch custom methods @_logprob.register(rv_type) - def custom_dist_logp(op, values, rng, size, dtype, *dist_params, **kwargs): + def custom_dist_logp(op, values, rng, size, *dist_params, **kwargs): return logp(values[0], *dist_params) @_logcdf.register(rv_type) - def custom_dist_logcdf(op, value, rng, size, dtype, *dist_params, **kwargs): + def custom_dist_logcdf(op, value, rng, size, *dist_params, **kwargs): return logcdf(value, *dist_params, **kwargs) @_support_point.register(rv_type) - def custom_dist_support_point(op, rv, rng, size, dtype, *dist_params): + def custom_dist_support_point(op, rv, rng, size, *dist_params): return support_point(rv, size, *dist_params) rv_op = rv_type() @@ -895,8 +939,8 @@ def dist( if ndims_params is None: ndims_params = [0] * len(dist_params) signature = safe_signature( - core_inputs=[pt.tensor(shape=(None,) * ndim_param) for ndim_param in ndims_params], - core_outputs=[pt.tensor(shape=(None,) * ndim_supp)], + core_inputs_ndim=ndims_params, + core_outputs_ndim=[ndim_supp], ) return super().dist( @@ -923,7 +967,8 @@ def rv_op( class_name: str, ): size = normalize_size_param(size) - dummy_size_param = size.type() + # If it's NoneConst, just use that as the dummy + dummy_size_param = size.type() if isinstance(size, TensorVariable) else size dummy_dist_params = [dist_param.type() for dist_param in dist_params] with new_or_existing_block_model_access( error_msg_on_access="Model variables cannot be created in the dist function. Use the `.dist` API" @@ -1003,7 +1048,11 @@ def change_custom_dist_size(op, rv, new_size, expand): return new_rv - rngs, rngs_updates = zip(*dummy_updates_dict.items()) + if dummy_updates_dict: + rngs, rngs_updates = zip(*dummy_updates_dict.items()) + else: + rngs, rngs_updates = (), () + inputs = [*dummy_params, *rngs] outputs = [dummy_rv, *rngs_updates] signature = cls._infer_final_signature( @@ -1437,9 +1486,11 @@ def func(*args, **kwargs): return func -def default_support_point(rv, size, *rv_inputs, rv_name=None, has_fallback=False, ndim_supp=0): - if ndim_supp == 0: - return pt.zeros(size, dtype=rv.dtype) +def default_support_point(rv, size, *rv_inputs, rv_name=None, has_fallback=False): + if None not in rv.type.shape: + return pt.zeros(rv.type.shape) + elif rv.owner.op.ndim_supp == 0 and not rv_size_is_none(size): + return pt.zeros(size) elif has_fallback: return pt.zeros_like(rv) else: @@ -1450,24 +1501,26 @@ def default_support_point(rv, size, *rv_inputs, rv_name=None, has_fallback=False ) -class DiracDeltaRV(RandomVariable): +class DiracDeltaRV(SymbolicRandomVariable): name = "diracdelta" - ndim_supp = 0 - ndims_params = [0] + extended_signature = "[size],()->()" _print_name = ("DiracDelta", "\\operatorname{DiracDelta}") - def make_node(self, rng, size, dtype, c): - c = pt.as_tensor_variable(c) - return super().make_node(rng, size, c.dtype, c) + def do_constant_folding(self, fgraph: "FunctionGraph", node: Apply) -> bool: + # Because the distribution does not have RNGs we have to prevent constant-folding + return False @classmethod - def rng_fn(cls, rng, c, size=None): - if size is None: - return c.copy() - return np.full(size, c) + def rv_op(cls, c, *, size=None, rng=None): + size = normalize_size_param(size) + c = pt.as_tensor(c) + if rv_size_is_none(size): + out = c.copy() + else: + out = pt.full(size, c) -diracdelta = DiracDeltaRV() + return cls(inputs=[size, c], outputs=[out])(size, c) class DiracDelta(Discrete): @@ -1482,7 +1535,8 @@ class DiracDelta(Discrete): that use DiracDelta, such as Mixtures. """ - rv_op = diracdelta + rv_type = DiracDeltaRV + rv_op = DiracDeltaRV.rv_op @classmethod def dist(cls, c, *args, **kwargs): @@ -1598,7 +1652,7 @@ def create_partial_observed_rv( # Make a clone of the observedRV, with a distinct rng so that observed and # unobserved are never treated as equivalent (and mergeable) nodes by pytensor. - _, size, _, *inps = observed_rv.owner.inputs + _, size, *inps = observed_rv.owner.inputs observed_rv = observed_rv.owner.op(*inps, size=size) # For all other cases use the more general PartialObservedRV diff --git a/pymc/distributions/mixture.py b/pymc/distributions/mixture.py index 1b3e1c57bb..667ac5e693 100644 --- a/pymc/distributions/mixture.py +++ b/pymc/distributions/mixture.py @@ -134,15 +134,15 @@ def rv_op(cls, weights, *components, size=None): s = ",".join(f"s{i}" for i in range(components[0].owner.op.ndim_supp)) if len(components) == 1: comp_s = ",".join((*s, "w")) - signature = f"[rng],(w),({comp_s})->[rng],({s})" + extended_signature = f"[rng],(w),({comp_s})->[rng],({s})" else: comps_s = ",".join(f"({s})" for _ in components) - signature = f"[rng],(w),{comps_s}->[rng],({s})" + extended_signature = f"[rng],(w),{comps_s}->[rng],({s})" return MarginalMixtureRV( inputs=[mix_indexes_rng, weights, *components], outputs=[mix_indexes_rng_next, mix_out], - signature=signature, + extended_signature=extended_signature, )(mix_indexes_rng, weights, *components) @classmethod diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index 359b0743dd..56bd7c5fe3 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -24,11 +24,19 @@ import pytensor.tensor as pt import scipy -from pytensor.graph.basic import Apply, Constant, Variable +from pytensor.graph.basic import Apply, Variable from pytensor.graph.op import Op from pytensor.raise_op import Assert -from pytensor.sparse.basic import sp_sum -from pytensor.tensor import TensorConstant, gammaln, sigmoid +from pytensor.sparse.basic import DenseFromSparse, sp_sum +from pytensor.tensor import ( + TensorConstant, + TensorVariable, + gammaln, + get_underlying_scalar_constant_value, + sigmoid, +) +from pytensor.tensor.elemwise import DimShuffle +from pytensor.tensor.exceptions import NotScalarConstantError from pytensor.tensor.linalg import cholesky, det, eigh, solve_triangular, trace from pytensor.tensor.linalg import inv as matrix_inverse from pytensor.tensor.random.basic import dirichlet, multinomial, multivariate_normal @@ -36,7 +44,6 @@ from pytensor.tensor.random.utils import ( broadcast_params, normalize_size_param, - supp_shape_from_ref_param_shape, ) from pytensor.tensor.type import TensorType from scipy import stats @@ -62,7 +69,6 @@ ) from pymc.distributions.shape_utils import ( _change_dist_size, - broadcast_dist_samples_shape, change_dist_size, get_support_shape, implicit_size_from_params, @@ -98,6 +104,15 @@ solve_upper = partial(solve_triangular, lower=False) +def _squeeze_to_ndim(var: TensorVariable | np.ndarray, ndim: int): + squeeze = pt.squeeze if isinstance(var, TensorVariable) else np.squeeze + extra_dims = var.ndim - ndim + if extra_dims: + return squeeze(var, axis=tuple(range(extra_dims))) + else: + return var + + class SimplexContinuous(Continuous): """Base class for simplex continuous distributions""" @@ -280,19 +295,10 @@ def logp(value, mu, cov): class MvStudentTRV(RandomVariable): name = "multivariate_studentt" - ndim_supp = 1 - ndims_params = [0, 1, 2] + signature = "(),(n),(n,n)->(n)" dtype = "floatX" _print_name = ("MvStudentT", "\\operatorname{MvStudentT}") - def _supp_shape_from_params(self, dist_params, param_shapes=None): - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=1, - ) - @classmethod def rng_fn(cls, rng, nu, mu, cov, size): if size is None: @@ -596,7 +602,7 @@ def logp(value, n, p): class DirichletMultinomialRV(SymbolicRandomVariable): name = "dirichlet_multinomial" - signature = "[rng],[size],(),(p)->[rng],(p)" + extended_signature = "[rng],[size],(),(p)->[rng],(p)" _print_name = ("DirichletMultinomial", "\\operatorname{DirichletMultinomial}") @classmethod @@ -802,7 +808,7 @@ class OrderedMultinomial: def __new__(cls, name, *args, compute_p=True, **kwargs): out_rv = _OrderedMultinomial(name, *args, **kwargs) if compute_p: - pm.Deterministic(f"{name}_probs", out_rv.owner.inputs[4], dims=kwargs.get("dims")) + pm.Deterministic(f"{name}_probs", out_rv.owner.inputs[-1], dims=kwargs.get("dims")) return out_rv @classmethod @@ -861,23 +867,14 @@ def __str__(self): class WishartRV(RandomVariable): name = "wishart" - ndim_supp = 2 - ndims_params = [0, 2] + signature = "(),(p,p)->(p,p)" dtype = "floatX" _print_name = ("Wishart", "\\operatorname{Wishart}") - def _supp_shape_from_params(self, dist_params, param_shapes=None): - # The shape of second parameter `V` defines the shape of the output. - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=1, - ) - @classmethod def rng_fn(cls, rng, nu, V, size): scipy_size = size if size else 1 # Default size for Scipy's wishart.rvs is 1 + V = _squeeze_to_ndim(V, 2) result = stats.wishart.rvs(int(nu), V, size=scipy_size, random_state=rng) if size == (1,): return result[np.newaxis, ...] @@ -1094,26 +1091,25 @@ def _lkj_normalizing_constant(eta, n): class _LKJCholeskyCovBaseRV(RandomVariable): name = "_lkjcholeskycovbase" - ndim_supp = 1 - ndims_params = [0, 0, 1] + signature = "(),(),(d)->(n)" dtype = "floatX" _print_name = ("_lkjcholeskycovbase", "\\operatorname{_lkjcholeskycovbase}") - def make_node(self, rng, size, dtype, n, eta, D): + def make_node(self, rng, size, n, eta, D): n = pt.as_tensor_variable(n) - if not n.ndim == 0: - raise ValueError("n must be a scalar (ndim=0).") + if not all(n.type.broadcastable): + raise ValueError("n must be a scalar.") eta = pt.as_tensor_variable(eta) - if not eta.ndim == 0: - raise ValueError("eta must be a scalar (ndim=0).") + if not all(eta.type.broadcastable): + raise ValueError("eta must be a scalar.") D = pt.as_tensor_variable(D) - return super().make_node(rng, size, dtype, n, eta, D) + return super().make_node(rng, size, n, eta, D) def _supp_shape_from_params(self, dist_params, param_shapes): - n = dist_params[0] + n = dist_params[0].squeeze() return ((n * (n + 1)) // 2,) def rng_fn(self, rng, n, eta, D, size): @@ -1122,6 +1118,9 @@ def rng_fn(self, rng, n, eta, D, size): size = D.shape[:-1] flat_size = np.prod(size).astype(int) + n = n.squeeze() + eta = eta.squeeze() + C = LKJCorrRV._random_corr_matrix(rng=rng, n=n, eta=eta, flat_size=flat_size) D = D.reshape(flat_size, n) C *= D[..., :, np.newaxis] * D[..., np.newaxis, :] @@ -1144,7 +1143,7 @@ def rng_fn(self, rng, n, eta, D, size): # _LKJCholeskyCovBaseRV requires a properly shaped `D`, which means the variable can't # be safely resized. Because of this, we add the thin SymbolicRandomVariable wrapper class _LKJCholeskyCovRV(SymbolicRandomVariable): - signature = "[rng],(),(),(n)->[rng],(n)" + extended_signature = "[rng],(),(),(n)->[rng],(n)" _print_name = ("_lkjcholeskycov", "\\operatorname{_lkjcholeskycov}") @classmethod @@ -1256,13 +1255,15 @@ def _LKJCholeksyCovRV_logp(op, values, rng, n, eta, sd_dist, **kwargs): det_invjac = det_invjac.sum() # TODO: _lkj_normalizing_constant currently requires `eta` and `n` to be constants - if not isinstance(n, Constant): + try: + n = int(get_underlying_scalar_constant_value(n)) + except NotScalarConstantError: raise NotImplementedError("logp only implemented for constant `n`") - n = int(n.data) - if not isinstance(eta, Constant): + try: + eta = float(get_underlying_scalar_constant_value(eta)) + except NotScalarConstantError: raise NotImplementedError("logp only implemented for constant `eta`") - eta = float(eta.data) norm = _lkj_normalizing_constant(eta, n) @@ -1445,24 +1446,23 @@ def helper_deterministics(cls, n, packed_chol): class LKJCorrRV(RandomVariable): name = "lkjcorr" - ndim_supp = 1 - ndims_params = [0, 0] + signature = "(),()->(n)" dtype = "floatX" _print_name = ("LKJCorrRV", "\\operatorname{LKJCorrRV}") - def make_node(self, rng, size, dtype, n, eta): + def make_node(self, rng, size, n, eta): n = pt.as_tensor_variable(n) - if not n.ndim == 0: - raise ValueError("n must be a scalar (ndim=0).") + if not all(n.type.broadcastable): + raise ValueError("n must be a scalar.") eta = pt.as_tensor_variable(eta) - if not eta.ndim == 0: - raise ValueError("eta must be a scalar (ndim=0).") + if not all(eta.type.broadcastable): + raise ValueError("eta must be a scalar.") - return super().make_node(rng, size, dtype, n, eta) + return super().make_node(rng, size, n, eta) def _supp_shape_from_params(self, dist_params, **kwargs): - n = dist_params[0] + n = dist_params[0].squeeze() dist_shape = ((n * (n - 1)) // 2,) return dist_shape @@ -1472,8 +1472,10 @@ def rng_fn(cls, rng, n, eta, size): if size is None: flat_size = 1 else: - flat_size = np.prod(size) + flat_size = np.prod(size).astype(int) + n = n.squeeze() + eta = eta.squeeze() C = cls._random_corr_matrix(rng=rng, n=n, eta=eta, flat_size=flat_size) triu_idx = np.triu_indices(n, k=1) @@ -1550,10 +1552,11 @@ def logp(value, n, eta): # TODO: PyTensor does not have a `triu_indices`, so we can only work with constant # n (or else find a different expression) - if not isinstance(n, Constant): + try: + n = int(get_underlying_scalar_constant_value(n)) + except NotScalarConstantError: raise NotImplementedError("logp only implemented for constant `n`") - n = int(n.data) shape = n * (n - 1) // 2 tri_index = np.zeros((n, n), dtype="int32") tri_index[np.triu_indices(n, k=1)] = np.arange(shape) @@ -1563,9 +1566,10 @@ def logp(value, n, eta): value = pt.fill_diagonal(value, 1) # TODO: _lkj_normalizing_constant currently requires `eta` and `n` to be constants - if not isinstance(eta, Constant): + try: + eta = float(get_underlying_scalar_constant_value(eta)) + except NotScalarConstantError: raise NotImplementedError("logp only implemented for constant `eta`") - eta = float(eta.data) result = _lkj_normalizing_constant(eta, n) result += (eta - 1.0) * pt.log(det(value)) return check_parameters( @@ -1670,38 +1674,18 @@ def vec_to_corr_mat(cls, vec, n): class MatrixNormalRV(RandomVariable): name = "matrixnormal" - ndim_supp = 2 - ndims_params = [2, 2, 2] + signature = "(m,n),(m,m),(n,n)->(m,n)" dtype = "floatX" _print_name = ("MatrixNormal", "\\operatorname{MatrixNormal}") - def _supp_shape_from_params(self, dist_params, param_shapes=None): - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=0, - ) - @classmethod def rng_fn(cls, rng, mu, rowchol, colchol, size=None): - size = to_tuple(size) - dist_shape = to_tuple([rowchol.shape[0], colchol.shape[0]]) + if size is None: + size = np.broadcast_shapes(mu.shape[:-2], rowchol.shape[:-2], colchol.shape[:-2]) + dist_shape = (rowchol.shape[-2], colchol.shape[-2]) output_shape = size + dist_shape - - # Broadcasting all parameters - shapes = [mu.shape, output_shape] - broadcastable_shape = broadcast_dist_samples_shape(shapes, size=size) - mu = np.broadcast_to(mu, shape=broadcastable_shape) - rowchol = np.broadcast_to(rowchol, shape=size + rowchol.shape[-2:]) - - colchol = np.broadcast_to(colchol, shape=size + colchol.shape[-2:]) - colchol = np.swapaxes(colchol, -1, -2) # Take transpose - standard_normal = rng.standard_normal(output_shape) - samples = mu + np.matmul(rowchol, np.matmul(standard_normal, colchol)) - - return samples + return mu + np.matmul(rowchol, np.matmul(standard_normal, np.swapaxes(colchol, -1, -2))) matrixnormal = MatrixNormalRV() @@ -1892,35 +1876,30 @@ def logp(value, mu, rowchol, colchol): return norm - 0.5 * trquaddist - m * half_collogdet - n * half_rowlogdet -class KroneckerNormalRV(RandomVariable): - name = "kroneckernormal" +class KroneckerNormalRV(SymbolicRandomVariable): ndim_supp = 1 - ndims_params = [1, 0, 2] - dtype = "floatX" _print_name = ("KroneckerNormal", "\\operatorname{KroneckerNormal}") - def _supp_shape_from_params(self, dist_params, param_shapes=None): - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=0, - ) - - def rng_fn(self, rng, mu, sigma, *covs, size=None): - size = size if size else covs[-1] - covs = covs[:-1] if covs[-1] == size else covs - - cov = reduce(scipy.linalg.kron, covs) - - if sigma: - cov = cov + sigma**2 * np.eye(cov.shape[0]) + @classmethod + def rv_op(cls, mu, sigma, *covs, size=None, rng=None): + mu = pt.as_tensor(mu) + sigma = pt.as_tensor(sigma) + covs = [pt.as_tensor(cov) for cov in covs] + rng = normalize_rng_param(rng) + size = normalize_size_param(size) - x = multivariate_normal.rng_fn(rng=rng, mean=mu, cov=cov, size=size) - return x + cov = reduce(pt.linalg.kron, covs) + cov = cov + sigma**2 * pt.eye(cov.shape[-2]) + next_rng, draws = multivariate_normal(mean=mu, cov=cov, size=size, rng=rng).owner.outputs + covs_sig = ",".join(f"(a{i},b{i})" for i in range(len(covs))) + extended_signature = f"[rng],[size],(m),(),{covs_sig}->[rng],(m)" -kroneckernormal = KroneckerNormalRV() + return KroneckerNormalRV( + inputs=[rng, size, mu, sigma, *covs], + outputs=[next_rng, draws], + extended_signature=extended_signature, + )(rng, size, mu, sigma, *covs) class KroneckerNormal(Continuous): @@ -2011,7 +1990,8 @@ class KroneckerNormal(Continuous): .. [1] Saatchi, Y. (2011). "Scalable inference for structured Gaussian process models" """ - rv_op = kroneckernormal + rv_type = KroneckerNormalRV + rv_op = KroneckerNormalRV.rv_op @classmethod def dist(cls, mu, covs=None, chols=None, evds=None, sigma=None, *args, **kwargs): @@ -2036,14 +2016,10 @@ def dist(cls, mu, covs=None, chols=None, evds=None, sigma=None, *args, **kwargs) return super().dist([mu, sigma, *covs], **kwargs) - def support_point(rv, size, mu, covs, chols, evds): - mean = mu - if not rv_size_is_none(size): - support_point_size = pt.concatenate([size, mu.shape]) - mean = pt.full(support_point_size, mu) - return mean + def support_point(rv, rng, size, mu, sigma, *covs): + return pt.full_like(rv, mu) - def logp(value, mu, sigma, *covs): + def logp(value, rng, size, mu, sigma, *covs): """ Calculate log-probability of Multivariate Normal distribution with Kronecker-structured covariance at specified value. @@ -2093,57 +2069,51 @@ def logp(value, mu, sigma, *covs): class CARRV(RandomVariable): name = "car" - ndim_supp = 1 - ndims_params = [1, 2, 0, 0] + signature = "(m),(m,m),(),(),()->(m)" dtype = "floatX" _print_name = ("CAR", "\\operatorname{CAR}") - def make_node(self, rng, size, dtype, mu, W, alpha, tau): + def make_node(self, rng, size, mu, W, alpha, tau, W_is_valid): mu = pt.as_tensor_variable(mu) - W = pytensor.sparse.as_sparse_or_tensor_variable(W) - if not W.ndim == 2: - raise ValueError("W must be a matrix (ndim=2).") - - sparse = isinstance(W.type, pytensor.sparse.SparseTensorType) - msg = "W must be a symmetric adjacency matrix." - if sparse: - abs_diff = pytensor.sparse.basic.mul(pytensor.sparse.sign(W - W.T), W - W.T) - W = Assert(msg)(W, pt.isclose(pytensor.sparse.sp_sum(abs_diff), 0)) - else: - W = Assert(msg)(W, pt.allclose(W, W.T)) - tau = pt.as_tensor_variable(tau) - alpha = pt.as_tensor_variable(alpha) + W_is_valid = pt.as_tensor_variable(W_is_valid, dtype=bool) - return super().make_node(rng, size, dtype, mu, W, alpha, tau) + if not (W.ndim >= 2 and all(W.type.broadcastable[:-2])): + raise TypeError("W must be a matrix") + if not all(tau.type.broadcastable): + raise TypeError("tau must be a scalar") + if not all(alpha.type.broadcastable): + raise TypeError("alpha must be a scalar") - def _supp_shape_from_params(self, dist_params, param_shapes=None): - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=0, - ) + return super().make_node(rng, size, mu, W, alpha, tau, W_is_valid) @classmethod - def rng_fn(cls, rng: np.random.RandomState, mu, W, alpha, tau, size): + def rng_fn(cls, rng: np.random.RandomState, mu, W, alpha, tau, W_is_valid, size): """ Implementation of algorithm from paper Havard Rue, 2001. "Fast sampling of Gaussian Markov random fields," Journal of the Royal Statistical Society Series B, Royal Statistical Society, vol. 63(2), pages 325-338. DOI: 10.1111/1467-9868.00288 """ + if not W_is_valid.all(): + raise ValueError("W must be a valid adjacency matrix") + if np.any(alpha >= 1) or np.any(alpha <= -1): raise ValueError("the domain of alpha is: -1 < alpha < 1") + # TODO: If there are batch dims, even if W was already sparse, + # we will have some expensive dense_from_sparse and sparse_from_dense + # operations that we should avoid. See https://github.com/pymc-devs/pytensor/issues/839 + W = _squeeze_to_ndim(W, 2) if not scipy.sparse.issparse(W): W = scipy.sparse.csr_matrix(W) + tau = scipy.sparse.csr_matrix(_squeeze_to_ndim(tau, 0)) + alpha = scipy.sparse.csr_matrix(_squeeze_to_ndim(alpha, 0)) + s = np.asarray(W.sum(axis=0))[0] D = scipy.sparse.diags(s) - tau = scipy.sparse.csr_matrix(tau) - alpha = scipy.sparse.csr_matrix(alpha) Q = tau.multiply(D - alpha.multiply(W)) @@ -2222,12 +2192,21 @@ class CAR(Continuous): @classmethod def dist(cls, mu, W, alpha, tau, *args, **kwargs): - return super().dist([mu, W, alpha, tau], **kwargs) + # This variable has an expensive validation check, that we want to constant-fold if possible + # So it's passed as an explicit input + W = pytensor.sparse.as_sparse_or_tensor_variable(W) + if isinstance(W.type, pytensor.sparse.SparseTensorType): + abs_diff = pytensor.sparse.basic.mul(pytensor.sparse.sign(W - W.T), W - W.T) + W_is_valid = pt.isclose(pytensor.sparse.sp_sum(abs_diff), 0) + else: + W_is_valid = pt.allclose(W, W.T) + + return super().dist([mu, W, alpha, tau, W_is_valid], **kwargs) - def support_point(rv, size, mu, W, alpha, tau): + def support_point(rv, size, mu, W, alpha, tau, W_is_valid): return pt.full_like(rv, mu) - def logp(value, mu, W, alpha, tau): + def logp(value, mu, W, alpha, tau, W_is_valid): """ Calculate log-probability of a CAR-distributed vector at specified value. This log probability function differs from @@ -2244,8 +2223,22 @@ def logp(value, mu, W, alpha, tau): TensorVariable """ - sparse = isinstance(W, pytensor.sparse.SparseConstant | pytensor.sparse.SparseVariable) - + # If expand_dims were added to (a potentially sparse) W, retrieve the non-expanded W + extra_dims = W.type.ndim - 2 + if extra_dims: + if ( + W.owner + and isinstance(W.owner.op, DimShuffle) + and W.owner.op.new_order == (*("x",) * extra_dims, 0, 1) + ): + W = W.owner.inputs[0] + else: + W = pt.squeeze(W, axis=tuple(range(extra_dims))) + + if W.owner and isinstance(W.owner.op, DenseFromSparse): + W = W.owner.inputs[0] + + sparse = isinstance(W, pytensor.sparse.SparseVariable) if sparse: D = sp_sum(W, axis=0) Dinv_sqrt = pt.diag(1 / pt.sqrt(D)) @@ -2261,7 +2254,7 @@ def logp(value, mu, W, alpha, tau): if value.ndim == 1: value = value[None, :] - logtau = d * pt.log(tau).sum() + logtau = d * pt.log(tau).sum(axis=-1) logdet = pt.log(1 - alpha.T * lam[:, None]).sum() delta = value - mu @@ -2277,30 +2270,22 @@ def logp(value, mu, W, alpha, tau): -1 < alpha, alpha < 1, tau > 0, - msg="-1 < alpha < 1, tau > 0", + W_is_valid, + msg="-1 < alpha < 1, tau > 0, W is a symmetric adjacency matrix.", ) class ICARRV(RandomVariable): name = "icar" - ndim_supp = 1 - ndims_params = [2, 1, 1, 0, 0, 0] + signature = "(m,m),(),()->(m)" dtype = "floatX" _print_name = ("ICAR", "\\operatorname{ICAR}") - def __call__(self, W, node1, node2, N, sigma, zero_sum_stdev, size=None, **kwargs): - return super().__call__(W, node1, node2, N, sigma, zero_sum_stdev, size=size, **kwargs) - - def _supp_shape_from_params(self, dist_params, param_shapes=None): - return supp_shape_from_ref_param_shape( - ndim_supp=self.ndim_supp, - dist_params=dist_params, - param_shapes=param_shapes, - ref_param_idx=0, - ) + def __call__(self, W, sigma, zero_sum_stdev, size=None, **kwargs): + return super().__call__(W, sigma, zero_sum_stdev, size=size, **kwargs) @classmethod - def rng_fn(cls, rng, size, W, node1, node2, N, sigma, zero_sum_stdev): + def rng_fn(cls, rng, size, W, sigma, zero_sum_stdev): raise NotImplementedError("Cannot sample from ICAR prior") @@ -2396,6 +2381,7 @@ class ICAR(Continuous): @classmethod def dist(cls, W, sigma=1, zero_sum_stdev=0.001, **kwargs): + # Note: These checks are forcing W to be non-symbolic if not W.ndim == 2: raise ValueError("W must be matrix with ndim=2") @@ -2408,6 +2394,16 @@ def dist(cls, W, sigma=1, zero_sum_stdev=0.001, **kwargs): if np.any((W != 0) & (W != 1)): raise ValueError("W must be composed of only 1s and 0s") + W = pt.as_tensor_variable(W, dtype=int) + sigma = pt.as_tensor_variable(sigma) + zero_sum_stdev = pt.as_tensor_variable(zero_sum_stdev) + return super().dist([W, sigma, zero_sum_stdev], **kwargs) + + def support_point(rv, size, W, sigma, zero_sum_stdev): + N = pt.shape(W)[-2] + return pt.zeros(N) + + def logp(value, W, sigma, zero_sum_stdev): # convert adjacency matrix to edgelist representation # An edgelist is a pair of lists. # If node i and node j are connected then one list @@ -2415,26 +2411,9 @@ def dist(cls, W, sigma=1, zero_sum_stdev=0.001, **kwargs): # index value. # We only use the lower triangle here because adjacency # is a undirected connection. + N = pt.shape(W)[-2] + node1, node2 = pt.eq(pt.tril(W), 1).nonzero() - node1, node2 = np.where(np.tril(W) == 1) - - node1 = pt.as_tensor_variable(node1, dtype=int) - node2 = pt.as_tensor_variable(node2, dtype=int) - - W = pt.as_tensor_variable(W, dtype=int) - - N = pt.shape(W)[0] - N = pt.as_tensor_variable(N) - - sigma = pt.as_tensor_variable(sigma) - zero_sum_stdev = pt.as_tensor_variable(zero_sum_stdev) - - return super().dist([W, node1, node2, N, sigma, zero_sum_stdev], **kwargs) - - def support_point(rv, size, W, node1, node2, N, sigma, zero_sum_stdev): - return pt.zeros(N) - - def logp(value, W, node1, node2, N, sigma, zero_sum_stdev): pairwise_difference = (-1 / (2 * sigma**2)) * pt.sum(pt.square(value[node1] - value[node2])) zero_sum = ( -0.5 * pt.pow(pt.sum(value) / (zero_sum_stdev * N), 2) @@ -2447,26 +2426,26 @@ def logp(value, W, node1, node2, N, sigma, zero_sum_stdev): class StickBreakingWeightsRV(RandomVariable): name = "stick_breaking_weights" - ndim_supp = 1 - ndims_params = [0, 0] + signature = "(),()->(k)" dtype = "floatX" _print_name = ("StickBreakingWeights", "\\operatorname{StickBreakingWeights}") - def make_node(self, rng, size, dtype, alpha, K): + def make_node(self, rng, size, alpha, K): alpha = pt.as_tensor_variable(alpha) K = pt.as_tensor_variable(K, dtype=int) - if K.ndim > 0: + if not all(K.type.broadcastable): raise ValueError("K must be a scalar.") - return super().make_node(rng, size, dtype, alpha, K) + return super().make_node(rng, size, alpha, K) def _supp_shape_from_params(self, dist_params, param_shapes): K = dist_params[1] - return (K + 1,) + return (K.squeeze() + 1,) @classmethod def rng_fn(cls, rng, alpha, K, size): + K = K.squeeze() if K < 0: raise ValueError("K needs to be positive.") @@ -2542,6 +2521,7 @@ def dist(cls, alpha, K, *args, **kwargs): return super().dist([alpha, K], **kwargs) def support_point(rv, size, alpha, K): + K = K.squeeze() alpha = alpha[..., np.newaxis] support_point = (alpha / (1 + alpha)) ** pt.arange(K) support_point *= 1 / (1 + alpha) @@ -2634,11 +2614,11 @@ def rv_op(cls, sigma, support_shape, *, size=None, rng=None): zerosum_rv -= zerosum_rv.mean(axis=-axis - 1, keepdims=True) support_str = ",".join([f"d{i}" for i in range(n_zerosum_axes)]) - signature = f"[rng],(),(s),[size]->[rng],({support_str})" + extended_signature = f"[rng],(),(s),[size]->[rng],({support_str})" return ZeroSumNormalRV( inputs=[rng, sigma, support_shape, size], outputs=[next_rng, zerosum_rv], - signature=signature, + extended_signature=extended_signature, )(rng, sigma, support_shape, size) diff --git a/pymc/distributions/shape_utils.py b/pymc/distributions/shape_utils.py index 1d0fee588d..220abac80d 100644 --- a/pymc/distributions/shape_utils.py +++ b/pymc/distributions/shape_utils.py @@ -28,18 +28,18 @@ from pytensor import config from pytensor import tensor as pt -from pytensor.graph.basic import Variable +from pytensor.graph.basic import Constant, Variable from pytensor.graph.op import Op, compute_test_value from pytensor.raise_op import Assert from pytensor.tensor.random.op import RandomVariable from pytensor.tensor.shape import SpecifyShape +from pytensor.tensor.type_other import NoneTypeT from pytensor.tensor.variable import TensorVariable from pymc.model import modelcontext from pymc.pytensorf import convert_observed_data __all__ = [ - "broadcast_dist_samples_shape", "to_tuple", "rv_size_is_none", "change_dist_size", @@ -89,86 +89,6 @@ def _check_shape_type(shape): return tuple(out) -def broadcast_dist_samples_shape(shapes, size=None): - """Apply shape broadcasting to shape tuples but assuming that the shapes - correspond to draws from random variables, with the `size` tuple possibly - prepended to it. The `size` prepend is ignored to consider if the supplied - `shapes` can broadcast or not. It is prepended to the resulting broadcasted - `shapes`, if any of the shape tuples had the `size` prepend. - - Parameters - ---------- - shapes: Iterable of tuples holding the distribution samples shapes - size: None, int or tuple (optional) - size of the sample set requested. - - Returns - ------- - tuple of the resulting shape - - Examples - -------- - .. code-block:: python - - size = 100 - shape0 = (size,) - shape1 = (size, 5) - shape2 = (size, 4, 5) - out = broadcast_dist_samples_shape([shape0, shape1, shape2], - size=size) - assert out == (size, 4, 5) - - .. code-block:: python - - size = 100 - shape0 = (size,) - shape1 = (5,) - shape2 = (4, 5) - out = broadcast_dist_samples_shape([shape0, shape1, shape2], - size=size) - assert out == (size, 4, 5) - - .. code-block:: python - - size = 100 - shape0 = (1,) - shape1 = (5,) - shape2 = (4, 5) - out = broadcast_dist_samples_shape([shape0, shape1, shape2], - size=size) - assert out == (4, 5) - """ - if size is None: - broadcasted_shape = np.broadcast_shapes(*shapes) - if broadcasted_shape is None: - tmp = ", ".join([f"{s}" for s in shapes]) - raise ValueError(f"Cannot broadcast provided shapes {tmp} given size: {size}") - return broadcasted_shape - shapes = [_check_shape_type(s) for s in shapes] - _size = to_tuple(size) - # samples shapes without the size prepend - sp_shapes = [s[len(_size) :] if _size == s[: min([len(_size), len(s)])] else s for s in shapes] - try: - broadcast_shape = np.broadcast_shapes(*sp_shapes) - except ValueError: - tmp = ", ".join([f"{s}" for s in shapes]) - raise ValueError(f"Cannot broadcast provided shapes {tmp} given size: {size}") - broadcastable_shapes = [] - for shape, sp_shape in zip(shapes, sp_shapes): - if _size == shape[: len(_size)]: - # If size prepends the shape, then we have to add broadcasting axis - # in the middle - p_shape = ( - shape[: len(_size)] - + (1,) * (len(broadcast_shape) - len(sp_shape)) - + shape[len(_size) :] - ) - else: - p_shape = shape - broadcastable_shapes.append(p_shape) - return np.broadcast_shapes(*broadcastable_shapes) - - # User-provided can be lazily specified as scalars Shape: TypeAlias = int | TensorVariable | Sequence[int | Variable] Dims: TypeAlias = str | Sequence[str | None] @@ -197,7 +117,7 @@ def convert_dims(dims: Dims | None) -> StrongDims | None: def convert_shape(shape: Shape) -> StrongShape | None: """Process a user-provided shape variable into None or a valid shape object.""" - if shape is None: + if shape is None or (isinstance(shape, Variable) and isinstance(shape.type, NoneTypeT)): return None elif isinstance(shape, int) or (isinstance(shape, TensorVariable) and shape.ndim == 0): shape = (shape,) @@ -215,21 +135,19 @@ def convert_shape(shape: Shape) -> StrongShape | None: def convert_size(size: Size) -> StrongSize | None: """Process a user-provided size variable into None or a valid size object.""" - if size is None: + if size is None or (isinstance(size, Variable) and isinstance(size.type, NoneTypeT)): return None elif isinstance(size, int) or (isinstance(size, TensorVariable) and size.ndim == 0): - size = (size,) + return (size,) elif isinstance(size, TensorVariable) and size.ndim == 1: - size = tuple(size) + return tuple(size) elif isinstance(size, list | tuple): - size = tuple(size) + return tuple(size) else: raise ValueError( f"The `size` parameter must be a tuple, TensorVariable, int or list. Actual: {type(size)}" ) - return size - def shape_from_dims(dims: StrongDims, model) -> StrongShape: """Determines shape from a `dims` tuple. @@ -291,11 +209,11 @@ def find_size( return None -def rv_size_is_none(size: Variable | None) -> bool: - """Check whether an rv size is None (ie., pt.Constant([]))""" +def rv_size_is_none(size: TensorVariable | Constant | None) -> bool: + """Check whether an rv size is None (i.e., NoneConst)""" if size is None: return True - return size.type.shape == (0,) # type: ignore [attr-defined] + return isinstance(size.type, NoneTypeT) @singledispatch @@ -350,8 +268,8 @@ def change_dist_size( else: new_size = tuple(new_size) # type: ignore - # TODO: Get rid of unused expand argument - new_dist = _change_dist_size(dist.owner.op, dist, new_size=new_size, expand=expand) + op = dist.owner.op + new_dist = _change_dist_size(op, dist, new_size=new_size, expand=expand) _add_future_warning_tag(new_dist) new_dist.name = dist.name @@ -368,7 +286,7 @@ def change_dist_size( def change_rv_size(op, rv, new_size, expand) -> TensorVariable: # Extract the RV node that is to be resized rv_node = rv.owner - old_rng, old_size, dtype, *dist_params = rv_node.inputs + old_rng, old_size, *dist_params = rv_node.inputs if expand: shape = tuple(rv_node.op._infer_shape(old_size, dist_params)) @@ -379,7 +297,7 @@ def change_rv_size(op, rv, new_size, expand) -> TensorVariable: # to not unnecessarily pick up a `Cast` in some cases (see #4652). new_size = pt.as_tensor(new_size, ndim=1, dtype="int64") - new_rv = rv_node.op(*dist_params, size=new_size, dtype=dtype) + new_rv = rv_node.op(*dist_params, size=new_size, dtype=rv.type.dtype) # Replicate "traditional" rng default_update, if that was set for old_rng default_update = getattr(old_rng, "default_update", None) diff --git a/pymc/distributions/simulator.py b/pymc/distributions/simulator.py index fa2d0af08f..02c76e2c6d 100644 --- a/pymc/distributions/simulator.py +++ b/pymc/distributions/simulator.py @@ -20,6 +20,7 @@ from pytensor.graph.op import Apply, Op from pytensor.tensor.random.op import RandomVariable +from pytensor.tensor.utils import safe_signature from pytensor.tensor.variable import TensorVariable from scipy.spatial import cKDTree @@ -39,8 +40,6 @@ class SimulatorRV(RandomVariable): """ name = "SimulatorRV" - ndim_supp = None - ndims_params = None dtype = "floatX" _print_name = ("Simulator", "\\operatorname{Simulator}") @@ -153,7 +152,8 @@ def dist( # type: ignore distance="gaussian", sum_stat="identity", epsilon=1, - ndim_supp=0, + signature=None, + ndim_supp=None, ndims_params=None, dtype="floatX", class_name: str = "Simulator", @@ -199,13 +199,19 @@ def dist( # type: ignore if unnamed_params: raise ValueError("Cannot pass both unnamed parameters and `params`") - # Assume scalar ndims_params - if ndims_params is None: - ndims_params = [0] * len(params) + if signature is None: + # Assume scalar ndims_params + temp_ndims_params = ndims_params if ndims_params is not None else [0] * len(params) + # Assume scalar ndim_supp + temp_ndim_supp = ndim_supp if ndim_supp is not None else 0 + signature = safe_signature( + core_inputs_ndim=temp_ndims_params, core_outputs_ndim=[temp_ndim_supp] + ) return super().dist( params, fn=fn, + signature=signature, ndim_supp=ndim_supp, ndims_params=ndims_params, dtype=dtype, @@ -228,6 +234,7 @@ def rv_op( sum_stat, epsilon, class_name, + signature, **kwargs, ): sim_op = type( @@ -237,6 +244,7 @@ def rv_op( name=class_name, ndim_supp=ndim_supp, ndims_params=ndims_params, + signature=signature, dtype=dtype, inplace=False, fn=fn, @@ -250,7 +258,7 @@ def rv_op( @_support_point.register(SimulatorRV) # type: ignore def simulator_support_point(op, rv, *inputs): - sim_inputs = inputs[3:] + sim_inputs = op.dist_params(rv.owner) # Take the mean of 10 draws multiple_sim = rv.owner.op(*sim_inputs, size=pt.concatenate([[10], rv.shape])) return pt.mean(multiple_sim, axis=0) diff --git a/pymc/distributions/timeseries.py b/pymc/distributions/timeseries.py index d48b734ae2..dcac2708e9 100644 --- a/pymc/distributions/timeseries.py +++ b/pymc/distributions/timeseries.py @@ -108,13 +108,15 @@ def rv_op(cls, init_dist, innovation_dist, steps, size=None): innov_supp_dims = [f"d{i}" for i in range(dist_ndim_supp)] innov_supp_str = ",".join(innov_supp_dims) out_supp_str = ",".join(["t", *innov_supp_dims]) - signature = f"({innov_supp_str}),({innov_supp_str}),(s),[rng]->({out_supp_str}),[rng]" + extended_signature = ( + f"({innov_supp_str}),({innov_supp_str}),(s),[rng]->({out_supp_str}),[rng]" + ) return RandomWalkRV( [init_dist, innovation_dist, steps], # We pass steps_ through just so we can keep a reference to it, even though # it's no longer needed at this point [grw], - signature=signature, + extended_signature=extended_signature, )(init_dist, innovation_dist, steps) @@ -419,7 +421,7 @@ def get_dists( class AutoRegressiveRV(SymbolicRandomVariable): """A placeholder used to specify a log-likelihood for an AR sub-graph.""" - signature = "(o),(),(o),(s),[rng]->[rng],(t)" + extended_signature = "(o),(),(o),(s),[rng]->[rng],(t)" ar_order: int constant_term: bool _print_name = ("AR", "\\operatorname{AR}") @@ -713,7 +715,7 @@ def ar_support_point(op, rv, rhos, sigma, init_dist, steps, noise_rng): class GARCH11RV(SymbolicRandomVariable): """A placeholder used to specify a GARCH11 graph.""" - signature = "(),(),(),(),(),(s),[rng]->[rng],(t)" + extended_signature = "(),(),(),(),(),(s),[rng]->[rng],(t)" _print_name = ("GARCH11", "\\operatorname{GARCH11}") @classmethod @@ -913,7 +915,7 @@ def step(*prev_args): outputs=[noise_next_rng, sde_out], dt=dt, sde_fn=sde_fn, - signature=f"(),(s),{','.join('()' for _ in sde_pars)},[rng]->[rng],(t)", + extended_signature=f"(),(s),{','.join('()' for _ in sde_pars)},[rng]->[rng],(t)", )(init_dist, steps, *sde_pars, noise_rng) def update(self, node: Node): diff --git a/pymc/distributions/transforms.py b/pymc/distributions/transforms.py index d8998889cf..0c2a43b1f1 100644 --- a/pymc/distributions/transforms.py +++ b/pymc/distributions/transforms.py @@ -216,7 +216,7 @@ class Interval(IntervalTransform): .. code-block:: python - def get_bounds(rng, size, dtype, mu, sigma): + def get_bounds(rng, size, mu, sigma): return 0, None with pm.Model(): @@ -227,7 +227,7 @@ def get_bounds(rng, size, dtype, mu, sigma): .. code-block:: python - def get_bounds(rng, size, dtype, mu, sigma): + def get_bounds(rng, size, mu, sigma): return mu - 1, None interval = pm.distributions.transforms.Interval(bounds_fn=get_bounds) diff --git a/pymc/distributions/truncated.py b/pymc/distributions/truncated.py index 1ef4a1da32..1b7fd7ddce 100644 --- a/pymc/distributions/truncated.py +++ b/pymc/distributions/truncated.py @@ -457,7 +457,7 @@ def truncated_logcdf(op: TruncatedRV, value, *inputs, **kwargs): @_truncated.register(NormalRV) -def _truncated_normal(op, lower, upper, size, rng, old_size, dtype, mu, sigma): +def _truncated_normal(op, lower, upper, size, rng, old_size, mu, sigma): return TruncatedNormal.dist( mu=mu, sigma=sigma, @@ -465,5 +465,5 @@ def _truncated_normal(op, lower, upper, size, rng, old_size, dtype, mu, sigma): upper=upper, rng=None, # Do not reuse rng to avoid weird dependencies size=size, - dtype=dtype, + dtype=op.dtype, ) diff --git a/pymc/logprob/order.py b/pymc/logprob/order.py index fb19370bfc..f15506712f 100644 --- a/pymc/logprob/order.py +++ b/pymc/logprob/order.py @@ -95,8 +95,8 @@ def find_measurable_max(fgraph: FunctionGraph, node: Apply) -> list[TensorVariab return None # univariate i.i.d. test which also rules out other distributions - for params in base_var.owner.inputs[3:]: - if params.type.ndim != 0: + for params in base_var.owner.op.dist_params(base_var.owner): + if not all(params.type.broadcastable): return None # Check whether axis covers all dimensions @@ -107,7 +107,7 @@ def find_measurable_max(fgraph: FunctionGraph, node: Apply) -> list[TensorVariab # distinguish measurable discrete and continuous (because logprob is different) measurable_max: Max - if base_var.owner.op.dtype.startswith("int"): + if base_var.type.dtype.startswith("int"): measurable_max = MeasurableMaxDiscrete(list(axis)) else: measurable_max = MeasurableMax(list(axis)) @@ -202,8 +202,8 @@ def find_measurable_max_neg(fgraph: FunctionGraph, node: Apply) -> list[TensorVa return None # univariate i.i.d. test which also rules out other distributions - for params in base_rv.owner.inputs[3:]: - if params.type.ndim != 0: + for params in base_rv.owner.op.dist_params(base_rv.owner): + if not all(params.type.broadcastable): return None # Check whether axis is supported or not @@ -217,7 +217,7 @@ def find_measurable_max_neg(fgraph: FunctionGraph, node: Apply) -> list[TensorVa # distinguish measurable discrete and continuous (because logprob is different) measurable_min: Max - if base_rv.owner.op.dtype.startswith("int"): + if base_rv.type.dtype.startswith("int"): measurable_min = MeasurableDiscreteMaxNeg(list(axis)) else: measurable_min = MeasurableMaxNeg(list(axis)) diff --git a/pymc/logprob/tensor.py b/pymc/logprob/tensor.py index c489ed23ff..4b18b22da8 100644 --- a/pymc/logprob/tensor.py +++ b/pymc/logprob/tensor.py @@ -104,7 +104,7 @@ def naive_bcast_rv_lift(fgraph, node): _, lifted_rv = size_lift_res lifted_node = lifted_rv.owner - rng, size, dtype, *dist_params = lifted_node.inputs + rng, size, *dist_params = lifted_node.inputs new_dist_params = [ pt.broadcast_to( @@ -113,7 +113,7 @@ def naive_bcast_rv_lift(fgraph, node): ) for param in dist_params ] - bcasted_node = lifted_node.op.make_node(rng, size, dtype, *new_dist_params) + bcasted_node = lifted_node.op.make_node(rng, size, *new_dist_params) if pytensor.config.compute_test_value != "off": compute_test_value(bcasted_node) diff --git a/pymc/model/core.py b/pymc/model/core.py index ea22375d4d..2a57dde280 100644 --- a/pymc/model/core.py +++ b/pymc/model/core.py @@ -1853,7 +1853,7 @@ def first_line(exc): def debug_parameters(rv): if isinstance(rv.owner.op, RandomVariable): - inputs = rv.owner.inputs[3:] + inputs = rv.owner.op.dist_params(rv.owner) else: inputs = [inp for inp in rv.owner.inputs if not isinstance(inp.type, RandomType)] rv_inputs = pytensor.function( diff --git a/pymc/model_graph.py b/pymc/model_graph.py index 2910e49d42..30b79bb194 100644 --- a/pymc/model_graph.py +++ b/pymc/model_graph.py @@ -172,8 +172,8 @@ def _filter_non_parameter_inputs(var): # Don't show shape-related dependencies return [] if isinstance(node.op, RandomVariable): - # Filter out rng, dtype and size parameters or RandomVariable nodes - return node.inputs[3:] + # Filter out rng and size parameters or RandomVariable nodes + return node.op.dist_params(node) else: # Otherwise return all inputs return node.inputs diff --git a/pymc/printing.py b/pymc/printing.py index f1a34c6f95..6695cf38fc 100644 --- a/pymc/printing.py +++ b/pymc/printing.py @@ -20,10 +20,8 @@ from pytensor.tensor.basic import TensorVariable, Variable from pytensor.tensor.elemwise import DimShuffle from pytensor.tensor.random.basic import RandomVariable -from pytensor.tensor.random.var import ( - RandomGeneratorSharedVariable, - RandomStateSharedVariable, -) +from pytensor.tensor.random.type import RandomType +from pytensor.tensor.type_other import NoneTypeT from pymc.model import Model @@ -41,16 +39,18 @@ def str_for_dist( LaTeX or plain, optionally with distribution parameter values included.""" if include_params: - # first 3 args are always (rng, size, dtype), rest is relevant for distribution - if isinstance(dist.owner.op, RandomVariable): + if isinstance(dist.owner.op, RandomVariable) or getattr( + dist.owner.op, "extended_signature", None + ): dist_args = [ - _str_for_input_var(x, formatting=formatting) for x in dist.owner.inputs[3:] + _str_for_input_var(x, formatting=formatting) + for x in dist.owner.op.dist_params(dist.owner) ] else: dist_args = [ _str_for_input_var(x, formatting=formatting) for x in dist.owner.inputs - if not isinstance(x, RandomStateSharedVariable | RandomGeneratorSharedVariable) + if not isinstance(x.type, RandomType | NoneTypeT) ] print_name = dist.name diff --git a/pymc/pytensorf.py b/pymc/pytensorf.py index 3bb03f2cca..2d6910fc4c 100644 --- a/pymc/pytensorf.py +++ b/pymc/pytensorf.py @@ -43,10 +43,7 @@ from pytensor.tensor.elemwise import Elemwise from pytensor.tensor.random.op import RandomVariable from pytensor.tensor.random.type import RandomType -from pytensor.tensor.random.var import ( - RandomGeneratorSharedVariable, - RandomStateSharedVariable, -) +from pytensor.tensor.random.var import RandomGeneratorSharedVariable from pytensor.tensor.rewriting.shape import ShapeFeature from pytensor.tensor.sharedvar import SharedVariable, TensorSharedVariable from pytensor.tensor.subtensor import AdvancedIncSubtensor, AdvancedIncSubtensor1 @@ -762,12 +759,10 @@ def largest_common_dtype(tensors): def find_rng_nodes( variables: Iterable[Variable], -) -> list[RandomStateSharedVariable | RandomGeneratorSharedVariable]: - """Return RNG variables in a graph""" +) -> list[RandomGeneratorSharedVariable]: + """Return shared RNG variables in a graph""" return [ - node - for node in graph_inputs(variables) - if isinstance(node, RandomStateSharedVariable | RandomGeneratorSharedVariable) + node for node in graph_inputs(variables) if isinstance(node, RandomGeneratorSharedVariable) ] @@ -784,14 +779,7 @@ def replace_rng_nodes(outputs: Sequence[TensorVariable]) -> list[TensorVariable] return outputs graph = FunctionGraph(outputs=outputs, clone=False) - new_rng_nodes: list[np.random.RandomState | np.random.Generator] = [] - for rng_node in rng_nodes: - rng_cls: type - if isinstance(rng_node, pt.random.var.RandomStateSharedVariable): - rng_cls = np.random.RandomState - else: - rng_cls = np.random.Generator - new_rng_nodes.append(pytensor.shared(rng_cls(np.random.PCG64()))) + new_rng_nodes = [pytensor.shared(np.random.Generator(np.random.PCG64())) for _ in rng_nodes] graph.replace_all(zip(rng_nodes, new_rng_nodes), import_missing=True) return cast(list[TensorVariable], graph.outputs) @@ -808,12 +796,7 @@ def reseed_rngs( np.random.PCG64(sub_seed) for sub_seed in np.random.SeedSequence(seed).spawn(len(rngs)) ] for rng, bit_generator in zip(rngs, bit_generators): - new_rng: np.random.RandomState | np.random.Generator - if isinstance(rng, pt.random.var.RandomStateSharedVariable): - new_rng = np.random.RandomState(bit_generator) - else: - new_rng = np.random.Generator(bit_generator) - rng.set_value(new_rng, borrow=True) + rng.set_value(np.random.Generator(bit_generator), borrow=True) def collect_default_updates_inner_fgraph(node: Apply) -> dict[Variable, Variable]: diff --git a/pymc/sampling/forward.py b/pymc/sampling/forward.py index 13696c8c49..c8f08afdd0 100644 --- a/pymc/sampling/forward.py +++ b/pymc/sampling/forward.py @@ -38,10 +38,7 @@ walk, ) from pytensor.graph.fg import FunctionGraph -from pytensor.tensor.random.var import ( - RandomGeneratorSharedVariable, - RandomStateSharedVariable, -) +from pytensor.tensor.random.var import RandomGeneratorSharedVariable from pytensor.tensor.sharedvar import SharedVariable from rich.console import Console from rich.progress import BarColumn, TextColumn, TimeElapsedColumn, TimeRemainingColumn @@ -107,7 +104,7 @@ def compile_forward_sampling_function( compiled function or after inference has been run. These variables are: - Variables in the outputs list - - ``SharedVariable`` instances that are not ``RandomStateSharedVariable`` or ``RandomGeneratorSharedVariable``, and whose values changed with respect to what they were at inference time + - ``SharedVariable`` instances that are not ``RandomGeneratorSharedVariable``, and whose values changed with respect to what they were at inference time - Variables that are in the `basic_rvs` list but not in the ``vars_in_trace`` list - Variables that are keys in the ``givens_dict`` - Variables that have volatile inputs @@ -207,7 +204,7 @@ def shared_value_matches(var): or node in givens_dict or ( # SharedVariables, except RandomState/Generators isinstance(node, SharedVariable) - and not isinstance(node, RandomStateSharedVariable | RandomGeneratorSharedVariable) + and not isinstance(node, RandomGeneratorSharedVariable) and not shared_value_matches(node) ) or ( # Basic RVs that are not in the trace diff --git a/pymc/step_methods/metropolis.py b/pymc/step_methods/metropolis.py index a816728cef..d752999ec1 100644 --- a/pymc/step_methods/metropolis.py +++ b/pymc/step_methods/metropolis.py @@ -426,11 +426,11 @@ def competence(var): if isinstance(distribution, CategoricalRV): # TODO: We could compute the initial value of `k` # if we had a model object. - # k_graph = var.owner.inputs[3].shape[-1] + # k_graph = var.owner.inputs[-1].shape[-1] # (k_graph,), _ = rvs_to_value_vars((k_graph,), apply_transforms=True) # k = model.fn(k_graph)(initial_point) try: - k = var.owner.inputs[3].shape[-1].eval() + k = var.owner.inputs[-1].shape[-1].eval() if k == 2: return Competence.COMPATIBLE except MissingInputError: @@ -533,11 +533,11 @@ def competence(var): if isinstance(distribution, CategoricalRV): # TODO: We could compute the initial value of `k` # if we had a model object. - # k_graph = var.owner.inputs[3].shape[-1] + # k_graph = var.owner.inputs[-1].shape[-1] # (k_graph,), _ = rvs_to_value_vars((k_graph,), apply_transforms=True) # k = model.fn(k_graph)(initial_point) try: - k = var.owner.inputs[3].shape[-1].eval() + k = var.owner.inputs[-1].shape[-1].eval() if k == 2: return Competence.IDEAL except MissingInputError: @@ -580,7 +580,7 @@ def __init__(self, vars, proposal="uniform", order="random", model=None): distr = getattr(rv_var.owner, "op", None) if isinstance(distr, CategoricalRV): - k_graph = rv_var.owner.inputs[3].shape[-1] + k_graph = rv_var.owner.inputs[-1].shape[-1] (k_graph,) = model.replace_rvs_by_values((k_graph,)) k = model.compile_fn(k_graph, inputs=model.value_vars, on_unused_input="ignore")( initial_point @@ -696,11 +696,11 @@ def competence(var): if isinstance(distribution, CategoricalRV): # TODO: We could compute the initial value of `k` # if we had a model object. - # k_graph = var.owner.inputs[3].shape[-1] + # k_graph = var.owner.inputs[-1].shape[-1] # (k_graph,), _ = rvs_to_value_vars((k_graph,), apply_transforms=True) # k = model.fn(k_graph)(initial_point) try: - k = var.owner.inputs[3].shape[-1].eval() + k = var.owner.inputs[-1].shape[-1].eval() if k > 2: return Competence.IDEAL except MissingInputError: diff --git a/pymc/testing.py b/pymc/testing.py index 57e1697cc7..a2a985581f 100644 --- a/pymc/testing.py +++ b/pymc/testing.py @@ -893,7 +893,7 @@ def test_distribution(self): def get_random_state(self, reset=False): if self.random_state is None or reset: - self.random_state = nr.RandomState(20160911) + self.random_state = nr.default_rng(20160911) return self.random_state def _instantiate_pymc_rv(self, dist_params=None): @@ -912,16 +912,15 @@ def check_pymc_draws_match_reference(self): def check_pymc_params_match_rv_op(self): op = self.pymc_rv.owner.op if isinstance(op, RandomVariable): - _, _, _, *pytensor_dist_inputs = self.pymc_rv.owner.inputs + pytensor_dist_inputs = op.dist_params(self.pymc_rv.owner) else: - inputs_signature, _ = op.signature.split("->") - pytensor_dist_inputs = [ - inp - for inp, inp_signature in zip( - self.pymc_rv.owner.inputs, inputs_signature.split(",") - ) - if inp_signature not in ("[rng]", "[size]") - ] + extended_signature = op.extended_signature + if extended_signature is None: + raise NotImplementedError("Op requires extended signature to be tested") + [_, _, dist_params_idxs], _ = op.get_input_output_type_idxs(extended_signature) + dist_inputs = self.pymc_rv.owner.inputs + pytensor_dist_inputs = [dist_inputs[i] for i in dist_params_idxs] + assert len(self.expected_rv_op_params) == len(pytensor_dist_inputs) for (expected_name, expected_value), actual_variable in zip( self.expected_rv_op_params.items(), pytensor_dist_inputs @@ -930,6 +929,9 @@ def check_pymc_params_match_rv_op(self): if isinstance(expected_value, pytensor.tensor.Variable): expected_value = expected_value.eval() + # RVs introduce expand_dims on the parameters, but the tests do not expect this + implicit_expand_dims = actual_variable.type.ndim - np.ndim(expected_value) + actual_variable = actual_variable.squeeze(tuple(range(implicit_expand_dims))) npt.assert_almost_equal(expected_value, actual_variable.eval(), decimal=self.decimal) def check_rv_size(self): @@ -937,18 +939,15 @@ def check_rv_size(self): sizes_to_check = self.sizes_to_check or [None, (), 1, (1,), 5, (4, 5), (2, 4, 2)] sizes_expected = self.sizes_expected or [(), (), (1,), (1,), (5,), (4, 5), (2, 4, 2)] for size, expected in zip(sizes_to_check, sizes_expected): - pymc_rv = self.pymc_dist.dist(**self.pymc_dist_params, size=size) - expected_symbolic = tuple(pymc_rv.shape.eval()) - actual = pymc_rv.eval().shape + rv = self.pymc_dist.dist(**self.pymc_dist_params, size=size) + expected_symbolic = tuple(rv.shape.eval()) + actual = rv.eval().shape assert actual == expected_symbolic assert expected_symbolic == expected, (size, expected_symbolic, expected) # test multi-parameters sampling for univariate distributions (with univariate inputs) - if ( - self.pymc_dist.rv_type.ndim_supp == 0 - and self.pymc_dist.rv_type.ndims_params - and sum(self.pymc_dist.rv_type.ndims_params) == 0 - ): + rv_op = rv.owner.op + if rv_op.ndim_supp == 0 and rv_op.ndims_params == 0: params = { k: p * np.ones(self.repeated_params_shape) for k, p in self.pymc_dist_params.items() } @@ -959,9 +958,9 @@ def check_rv_size(self): (5, self.repeated_params_shape), ] for size, expected in zip(sizes_to_check, sizes_expected): - pymc_rv = self.pymc_dist.dist(**params, size=size) - expected_symbolic = tuple(pymc_rv.shape.eval()) - actual = pymc_rv.eval().shape + rv = self.pymc_dist.dist(**params, size=size) + expected_symbolic = tuple(rv.shape.eval()) + actual = rv.eval().shape assert actual == expected_symbolic == expected def validate_tests_list(self): @@ -975,9 +974,7 @@ def seeded_scipy_distribution_builder(dist_name: str) -> Callable: def seeded_numpy_distribution_builder(dist_name: str) -> Callable: - return lambda self: ft.partial( - getattr(np.random.RandomState, dist_name), self.get_random_state() - ) + return lambda self: getattr(self.get_random_state(), dist_name) def assert_no_rvs(vars: Sequence[Variable]) -> None: diff --git a/requirements-dev.txt b/requirements-dev.txt index 942fc5df16..b3e7370b1d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -17,7 +17,7 @@ numpydoc pandas>=0.24.0 polyagamma pre-commit>=2.8.0 -pytensor>=2.22.1,<2.23 +pytensor>=2.23,<2.24 pytest-cov>=2.5 pytest>=3.0 rich>=13.7.1 diff --git a/requirements.txt b/requirements.txt index 8fd2f6e091..c330cd56dd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ cachetools>=4.2.1 cloudpickle numpy>=1.15.0 pandas>=0.24.0 -pytensor>=2.22.1,<2.23 +pytensor>=2.23,<2.24 rich>=13.7.1 scipy>=1.4.1 threadpoolctl>=3.1.0,<4.0.0 diff --git a/tests/distributions/test_censored.py b/tests/distributions/test_censored.py index 2a4e6481d8..9ce836cfc8 100644 --- a/tests/distributions/test_censored.py +++ b/tests/distributions/test_censored.py @@ -93,8 +93,8 @@ def test_censored_invalid_dist(self): def test_change_dist_size(self): base_dist = pm.Censored.dist(pm.Normal.dist(), -1, 1, size=(3, 2)) - new_dist = change_dist_size(base_dist, (4,)) - assert new_dist.eval().shape == (4,) + new_dist = change_dist_size(base_dist, (4, 1)) + assert new_dist.eval().shape == (4, 1) new_dist = change_dist_size(base_dist, (4,), expand=True) assert new_dist.eval().shape == (4, 3, 2) diff --git a/tests/distributions/test_continuous.py b/tests/distributions/test_continuous.py index 9c9ec3a4ec..e68de7732b 100644 --- a/tests/distributions/test_continuous.py +++ b/tests/distributions/test_continuous.py @@ -84,7 +84,7 @@ def test_upper_bounded(self): with pm.Model() as model: pm.TruncatedNormal(bounded_rv_name, mu=1, sigma=2, lower=None, upper=3) ( - (_, _, _, _, _, lower, upper), + (_, _, _, _, lower, upper), lower_interval, upper_interval, ) = self.get_dist_params_and_interval_bounds(model, bounded_rv_name) @@ -98,7 +98,7 @@ def test_lower_bounded(self): with pm.Model() as model: pm.TruncatedNormal(bounded_rv_name, mu=1, sigma=2, lower=-2, upper=None) ( - (_, _, _, _, _, lower, upper), + (_, _, _, _, lower, upper), lower_interval, upper_interval, ) = self.get_dist_params_and_interval_bounds(model, bounded_rv_name) @@ -118,14 +118,14 @@ def test_lower_bounded_vector(self): upper=None, ) ( - (_, _, _, _, _, lower, upper), + (_, _, _, _, lower, upper), lower_interval, upper_interval, ) = self.get_dist_params_and_interval_bounds(model, bounded_rv_name) - assert np.array_equal(lower.value, [-1, 0]) - assert upper.value == np.inf - assert np.array_equal(lower_interval.value, [-1, 0]) + assert np.array_equal(lower.eval(), [-1, 0]) + assert np.array_equal(upper.eval(), [np.inf]) + assert np.array_equal(lower_interval.eval(), [-1, 0]) assert upper_interval is None def test_lower_bounded_broadcasted(self): @@ -139,14 +139,14 @@ def test_lower_bounded_broadcasted(self): upper=np.array([np.inf, np.inf]), ) ( - (_, _, _, _, _, lower, upper), + (_, _, _, _, lower, upper), lower_interval, upper_interval, ) = self.get_dist_params_and_interval_bounds(model, bounded_rv_name) - assert lower.value == -1 - assert np.array_equal(upper.value, [np.inf, np.inf]) - assert lower_interval.value == -1 + assert np.array_equal(lower.eval(), [-1]) + assert np.array_equal(upper.eval(), [np.inf, np.inf]) + assert np.array_equal(lower_interval.eval(), [-1]) assert upper_interval is None @@ -1844,9 +1844,7 @@ def asymmetriclaplace_rng_fn(self, b, kappa, mu, size, uniform_rng_fct): return draws def seeded_asymmetriclaplace_rng_fn(self): - uniform_rng_fct = ft.partial( - getattr(np.random.RandomState, "uniform"), self.get_random_state() - ) + uniform_rng_fct = self.get_random_state().uniform return ft.partial(self.asymmetriclaplace_rng_fn, uniform_rng_fct=uniform_rng_fct) pymc_dist = pm.AsymmetricLaplace @@ -1880,12 +1878,8 @@ def exgaussian_rng_fn(self, mu, sigma, nu, size, normal_rng_fct, exponential_rng return normal_rng_fct(mu, sigma, size=size) + exponential_rng_fct(scale=nu, size=size) def seeded_exgaussian_rng_fn(self): - normal_rng_fct = ft.partial( - getattr(np.random.RandomState, "normal"), self.get_random_state() - ) - exponential_rng_fct = ft.partial( - getattr(np.random.RandomState, "exponential"), self.get_random_state() - ) + normal_rng_fct = self.get_random_state().normal + exponential_rng_fct = self.get_random_state().exponential return ft.partial( self.exgaussian_rng_fn, normal_rng_fct=normal_rng_fct, @@ -1977,9 +1971,7 @@ def kumaraswamy_rng_fn(self, a, b, size, uniform_rng_fct): return (1 - (1 - uniform_rng_fct(size=size)) ** (1 / b)) ** (1 / a) def seeded_kumaraswamy_rng_fn(self): - uniform_rng_fct = ft.partial( - getattr(np.random.RandomState, "uniform"), self.get_random_state() - ) + uniform_rng_fct = self.get_random_state().uniform return ft.partial(self.kumaraswamy_rng_fn, uniform_rng_fct=uniform_rng_fct) pymc_dist = pm.Kumaraswamy @@ -2049,7 +2041,7 @@ class TestTruncatedNormalUpperTau(BaseTestDistributionRandom): class TestTruncatedNormalUpperArray(BaseTestDistributionRandom): pymc_dist = pm.TruncatedNormal lower, upper, mu, tau = ( - np.array([-np.inf, -np.inf]), + np.array([-np.inf]), np.array([3, 2]), np.array([0, 0]), np.array( @@ -2416,9 +2408,7 @@ def weibull_rng_fn(self, size, alpha, beta, std_weibull_rng_fct): return beta * std_weibull_rng_fct(alpha, size=size) def seeded_weibul_rng_fn(self): - std_weibull_rng_fct = ft.partial( - getattr(np.random.RandomState, "weibull"), self.get_random_state() - ) + std_weibull_rng_fct = self.get_random_state().weibull return ft.partial(self.weibull_rng_fn, std_weibull_rng_fct=std_weibull_rng_fct) pymc_dist = pm.Weibull diff --git a/tests/distributions/test_discrete.py b/tests/distributions/test_discrete.py index d81dd1d228..ecf86370e2 100644 --- a/tests/distributions/test_discrete.py +++ b/tests/distributions/test_discrete.py @@ -374,6 +374,37 @@ def test_categorical(self, n): lambda value, p: categorical_logpdf(value, p), ) + def test_categorical_logp_batch_dims(self): + # Core case + p = np.array([0.2, 0.3, 0.5]) + value = np.array(2.0) + logp_expr = logp(pm.Categorical.dist(p=p, shape=value.shape), value) + assert logp_expr.type.ndim == 0 + np.testing.assert_allclose(logp_expr.eval(), np.log(0.5)) + + # Explicit batched value broadcasts p + bcast_p = p[None] # shape (1, 3) + batch_value = np.array([0, 1]) # shape(3,) + logp_expr = logp(pm.Categorical.dist(p=bcast_p, shape=batch_value.shape), batch_value) + assert logp_expr.type.ndim == 1 + np.testing.assert_allclose(logp_expr.eval(), np.log([0.2, 0.3])) + + # Explicit batched value and batched p + batch_p = np.array([p[::-1], p]) + logp_expr = logp(pm.Categorical.dist(p=batch_p, shape=batch_value.shape), batch_value) + assert logp_expr.type.ndim == 1 + np.testing.assert_allclose(logp_expr.eval(), np.log([0.5, 0.3])) + + # Implicit batch value broadcasts p + logp_expr = logp(pm.Categorical.dist(p=p, shape=()), batch_value) + assert logp_expr.type.ndim == 1 + np.testing.assert_allclose(logp_expr.eval(), np.log([0.2, 0.3])) + + # Implicit batch p broadcasts value + logp_expr = logp(pm.Categorical.dist(p=batch_p, shape=None), value) + assert logp_expr.type.ndim == 1 + np.testing.assert_allclose(logp_expr.eval(), np.log([0.2, 0.5])) + @pytensor.config.change_flags(compute_test_value="raise") def test_categorical_bounds(self): with pm.Model(): @@ -407,7 +438,7 @@ def test_categorical_p_not_normalized(self): with pytest.warns(UserWarning, match="They will be automatically rescaled"): with pm.Model() as m: x = pm.Categorical("x", p=[1, 1, 1, 1, 1]) - assert np.isclose(m.x.owner.inputs[3].sum().eval(), 1.0) + assert np.isclose(m.x.owner.inputs[-1].sum().eval(), 1.0) def test_categorical_negative_p_symbolic(self): value = np.array([[1, 1, 1]]) @@ -476,9 +507,9 @@ def test_orderedlogistic_dimensions(shape): clogp = pm.logp(c, np.ones_like(obs)).sum().eval() * loge expected = -np.prod((size, *shape)) - assert c.owner.inputs[3].ndim == (len(shape) + 1) + assert c.owner.inputs[-1].type.shape == (1, *shape, 10) assert np.allclose(clogp, expected) - assert ol.owner.inputs[3].ndim == (len(shape) + 1) + assert ol.owner.inputs[-1].type.shape == (1, *shape, 10) assert np.allclose(ologp, expected) @@ -654,9 +685,7 @@ def discrete_weibul_rng_fn(self, size, q, beta, uniform_rng_fct): return np.ceil(np.power(np.log(1 - uniform_rng_fct(size=size)) / np.log(q), 1.0 / beta)) - 1 def seeded_discrete_weibul_rng_fn(self): - uniform_rng_fct = ft.partial( - getattr(np.random.RandomState, "uniform"), self.get_random_state() - ) + uniform_rng_fct = self.get_random_state().uniform return ft.partial(self.discrete_weibul_rng_fn, uniform_rng_fct=uniform_rng_fct) pymc_dist = pm.DiscreteWeibull @@ -759,8 +788,8 @@ class TestLogitCategorical(BaseTestDistributionRandom): expected_rv_op_params = { "p": sp.softmax(np.array([[0.28, 0.62, 0.10], [0.28, 0.62, 0.10]]), axis=-1) } - sizes_to_check = [None, (), (2,), (4, 2), (1, 2)] - sizes_expected = [(2,), (2,), (2,), (4, 2), (1, 2)] + sizes_to_check = [None, (2,), (4, 2), (1, 2)] + sizes_expected = [(2,), (2,), (4, 2), (1, 2)] checks_to_run = [ "check_pymc_params_match_rv_op", @@ -841,7 +870,7 @@ def test_implied_degenerate_shape(self): class TestOrderedLogistic: def test_expected_categorical(self): categorical = OrderedLogistic.dist(eta=0, cutpoints=np.array([-2, 0, 2])) - p = categorical.owner.inputs[3].eval() + p = categorical.owner.inputs[-1].eval() expected_p = np.array([0.11920292, 0.38079708, 0.38079708, 0.11920292]) np.testing.assert_allclose(p, expected_p) @@ -888,7 +917,7 @@ def test_compute_p(self): class TestOrderedProbit: def test_expected_categorical(self): categorical = OrderedProbit.dist(eta=0, cutpoints=np.array([-2, 0, 2])) - p = categorical.owner.inputs[3].eval() + p = categorical.owner.inputs[-1].eval() expected_p = np.array([0.02275013, 0.47724987, 0.47724987, 0.02275013]) np.testing.assert_allclose(p, expected_p) diff --git a/tests/distributions/test_distribution.py b/tests/distributions/test_distribution.py index 4a48bf4319..c87336d409 100644 --- a/tests/distributions/test_distribution.py +++ b/tests/distributions/test_distribution.py @@ -48,7 +48,7 @@ create_partial_observed_rv, support_point, ) -from pymc.distributions.shape_utils import change_dist_size, to_tuple +from pymc.distributions.shape_utils import change_dist_size, rv_size_is_none, to_tuple from pymc.distributions.transforms import log from pymc.exceptions import BlockModelAccessError from pymc.logprob.basic import conditional_logp, logcdf, logp @@ -296,7 +296,7 @@ def logp(value, mu): [ (None, None, 0.0), (None, 5, np.zeros(5)), - ("custom_support_point", None, 5), + ("custom_support_point", (), 5), ("custom_support_point", (2, 5), np.full((2, 5), 5)), ], ) @@ -314,7 +314,7 @@ def test_custom_dist_moment_future_warning(self): with pytest.warns( FutureWarning, match="`moment` argument is deprecated. Use `support_point` instead." ): - x = CustomDist("x", moment=moment) + x = CustomDist("x", moment=moment, size=()) assert_support_point_is_expected(model, 5, check_finite_logp=False) @pytest.mark.parametrize("size", [(), (2,), (3, 2)], ids=str) @@ -459,9 +459,7 @@ def custom_dist(mu, sigma, size): (2, np.ones(5)), None, np.exp(2 + np.ones(5)), - lambda mu, sigma, size: pt.exp( - pm.Normal.dist(mu, sigma, size=size) + pt.ones(size) - ), + lambda mu, sigma, size: pt.exp(pm.Normal.dist(mu, sigma, size=size) + 1.0), ), ( (1, 2), @@ -563,6 +561,8 @@ def custom_dist(mu, sigma, size): def test_random_multiple_rngs(self): def custom_dist(p, sigma, size): idx = pm.Bernoulli.dist(p=p) + if rv_size_is_none(size): + size = pt.broadcast_shape(p, sigma) comps = pm.Normal.dist([-sigma, sigma], 1e-1, size=(*size, 2)).T return comps[idx] @@ -656,6 +656,9 @@ def old_random(size): def test_scan(self): def trw(nu, sigma, steps, size): + if rv_size_is_none(size): + size = () + def step(xtm1, nu, sigma): x = pm.StudentT.dist(nu=nu, mu=xtm1, sigma=sigma, shape=size) return x, collect_default_updates([x]) @@ -749,25 +752,25 @@ def dist(p, size): out = CustomDist.dist([0.25, 0.75], dist=dist, signature="(p)->()") # Size and updates are added automatically to the signature - assert out.owner.op.signature == "[size],(p),[rng]->(),[rng]" + assert out.owner.op.extended_signature == "[size],(p),[rng]->(),[rng]" assert out.owner.op.ndim_supp == 0 assert out.owner.op.ndims_params == [1] # When recreated internally, the whole signature may already be known out = CustomDist.dist([0.25, 0.75], dist=dist, signature="[size],(p),[rng]->(),[rng]") - assert out.owner.op.signature == "[size],(p),[rng]->(),[rng]" + assert out.owner.op.extended_signature == "[size],(p),[rng]->(),[rng]" assert out.owner.op.ndim_supp == 0 assert out.owner.op.ndims_params == [1] # A safe signature can be inferred from ndim_supp and ndims_params out = CustomDist.dist([0.25, 0.75], dist=dist, ndim_supp=0, ndims_params=[1]) - assert out.owner.op.signature == "[size],(i00),[rng]->(),[rng]" + assert out.owner.op.extended_signature == "[size],(i00),[rng]->(),[rng]" assert out.owner.op.ndim_supp == 0 assert out.owner.op.ndims_params == [1] # Otherwise be default we assume everything is scalar, even though it's wrong in this case out = CustomDist.dist([0.25, 0.75], dist=dist) - assert out.owner.op.signature == "[size],(),[rng]->(),[rng]" + assert out.owner.op.extended_signature == "[size],(),[rng]->(),[rng]" assert out.owner.op.ndim_supp == 0 assert out.owner.op.ndims_params == [0] diff --git a/tests/distributions/test_multivariate.py b/tests/distributions/test_multivariate.py index 5d2eb0db56..2848fa2989 100644 --- a/tests/distributions/test_multivariate.py +++ b/tests/distributions/test_multivariate.py @@ -640,7 +640,7 @@ def test_multinomial_p_not_normalized(self): with pm.Model() as m: x = pm.Multinomial("x", n=5, p=[1, 1, 1, 1, 1]) # test stored p-vals have been normalised - assert np.isclose(m.x.owner.inputs[4].sum().eval(), 1.0) + assert np.isclose(m.x.owner.inputs[-1].sum().eval(), 1.0) def test_multinomial_negative_p_symbolic(self): # Passing symbolic negative p does not raise an immediate error, but evaluating @@ -898,15 +898,15 @@ def test_car_matrix_check(sparse): W = pytensor.sparse.csr_from_dense(W) car_dist = pm.CAR.dist(mu, W, alpha, tau) - with pytest.raises(AssertionError, match="W must be a symmetric adjacency matrix"): + with pytest.raises(ParameterValueError, match="W is a symmetric adjacency matrix"): logp(car_dist, xs).eval() # W.ndim != 2 if not sparse: W = np.array([0.0, 1.0, 2.0, 0.0]) W = pytensor.tensor.as_tensor_variable(W) - with pytest.raises(ValueError, match="W must be a matrix"): - car_dist = pm.CAR.dist(mu, W, alpha, tau) + with pytest.raises(TypeError, match="W must be a matrix"): + pm.CAR.dist(mu, W, alpha, tau) @pytest.mark.parametrize("alpha", [1, -1]) @@ -926,7 +926,7 @@ def test_car_alpha_bounds(alpha): with pytest.raises(ValueError, match="the domain of alpha is: -1 < alpha < 1"): pm.draw(car_dist) - with pytest.raises(ValueError, match="-1 < alpha < 1, tau > 0"): + with pytest.raises(ParameterValueError, match="-1 < alpha < 1, tau > 0"): pm.logp(car_dist, values).eval() @@ -2245,8 +2245,8 @@ def check_rv_size(self): def check_draws_match_expected(self): # TODO: Find better comparison: - rng = self.get_random_state(reset=True) - x = _LKJCholeskyCov.dist(n=2, eta=10_000, sd_dist=pm.DiracDelta.dist([0.5, 2.0])) + rng = np.random.default_rng(2248) + x = _LKJCholeskyCov.dist(n=2, eta=100_000, sd_dist=pm.DiracDelta.dist([0.5, 2.0])) assert np.all(np.abs(draw(x, random_seed=rng) - np.array([0.5, 0, 2.0])) < 0.01) @@ -2255,9 +2255,6 @@ class TestICAR(BaseTestDistributionRandom): pymc_dist_params = {"W": np.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), "sigma": 2} expected_rv_op_params = { "W": np.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), - "node1": np.array([1, 2, 2]), - "node2": np.array([0, 0, 1]), - "N": 3, "sigma": 2, "zero_sum_strength": 0.001, } @@ -2418,56 +2415,56 @@ def test_mvnormal_blockwise_solve_opt(): def test_mvnormal_mu_convenience(): """Test that mu is broadcasted to the length of cov and provided a default of zero""" x = pm.MvNormal.dist(cov=np.eye(3)) - mu = x.owner.inputs[3] + mu = x.owner.inputs[2] np.testing.assert_allclose(mu.eval(), np.zeros((3,))) x = pm.MvNormal.dist(mu=1, cov=np.eye(3)) - mu = x.owner.inputs[3] + mu = x.owner.inputs[2] np.testing.assert_allclose(mu.eval(), np.ones((3,))) x = pm.MvNormal.dist(mu=np.ones((1, 1)), cov=np.eye(3)) - mu = x.owner.inputs[3] + mu = x.owner.inputs[2] np.testing.assert_allclose( mu.eval(), np.ones((1, 3)), ) x = pm.MvNormal.dist(mu=np.ones((10, 1)), cov=np.eye(3)) - mu = x.owner.inputs[3] + mu = x.owner.inputs[2] np.testing.assert_allclose( mu.eval(), np.ones((10, 3)), ) x = pm.MvNormal.dist(mu=np.ones((10, 1, 1)), cov=np.full((2, 3, 3), np.eye(3))) - mu = x.owner.inputs[3] + mu = x.owner.inputs[2] np.testing.assert_allclose(mu.eval(), np.ones((10, 2, 3))) def test_mvstudentt_mu_convenience(): """Test that mu is broadcasted to the length of scale and provided a default of zero""" x = pm.MvStudentT.dist(nu=4, scale=np.eye(3)) - mu = x.owner.inputs[4] + mu = x.owner.inputs[3] np.testing.assert_allclose(mu.eval(), np.zeros((3,))) x = pm.MvStudentT.dist(nu=4, mu=1, scale=np.eye(3)) - mu = x.owner.inputs[4] + mu = x.owner.inputs[3] np.testing.assert_allclose(mu.eval(), np.ones((3,))) x = pm.MvStudentT.dist(nu=4, mu=np.ones((1, 1)), scale=np.eye(3)) - mu = x.owner.inputs[4] + mu = x.owner.inputs[3] np.testing.assert_allclose( mu.eval(), np.ones((1, 3)), ) x = pm.MvStudentT.dist(nu=4, mu=np.ones((10, 1)), scale=np.eye(3)) - mu = x.owner.inputs[4] + mu = x.owner.inputs[3] np.testing.assert_allclose( mu.eval(), np.ones((10, 3)), ) x = pm.MvStudentT.dist(nu=4, mu=np.ones((10, 1, 1)), scale=np.full((2, 3, 3), np.eye(3))) - mu = x.owner.inputs[4] + mu = x.owner.inputs[3] np.testing.assert_allclose(mu.eval(), np.ones((10, 2, 3))) diff --git a/tests/distributions/test_shape_utils.py b/tests/distributions/test_shape_utils.py index e6be429c99..951772570d 100644 --- a/tests/distributions/test_shape_utils.py +++ b/tests/distributions/test_shape_utils.py @@ -29,7 +29,6 @@ from pymc import ShapeError from pymc.distributions.shape_utils import ( - broadcast_dist_samples_shape, change_dist_size, convert_dims, convert_shape, @@ -37,7 +36,6 @@ get_support_shape, get_support_shape_1d, rv_size_is_none, - to_tuple, ) from pymc.model import Model @@ -100,28 +98,6 @@ def test_broadcasting(self, fixture_shapes): out = np.broadcast_shapes(*shapes) assert out == expected_out - def test_broadcast_dist_samples_shape(self, fixture_sizes, fixture_shapes): - size = fixture_sizes - shapes = fixture_shapes - size_ = to_tuple(size) - shapes_ = [ - s if s[: min([len(size_), len(s)])] != size_ else s[len(size_) :] for s in shapes - ] - try: - expected_out = np.broadcast(*(np.empty(s) for s in shapes_)).shape - except ValueError: - expected_out = None - if expected_out is not None and any( - s[: min([len(size_), len(s)])] == size_ for s in shapes - ): - expected_out = size_ + expected_out - if expected_out is None: - with pytest.raises(ValueError): - broadcast_dist_samples_shape(shapes, size=size) - else: - out = broadcast_dist_samples_shape(shapes, size=size) - assert out == expected_out - class TestSizeShapeDimsObserved: @pytest.mark.parametrize("param_shape", [(), (2,)]) @@ -384,7 +360,7 @@ def test_rv_size_is_none(): assert rv_size_is_none(rv.owner.inputs[1]) rv = pm.Normal.dist(0, 1, size=()) - assert rv_size_is_none(rv.owner.inputs[1]) + assert not rv_size_is_none(rv.owner.inputs[1]) rv = pm.Normal.dist(0, 1, size=1) assert not rv_size_is_none(rv.owner.inputs[1]) diff --git a/tests/distributions/test_simulator.py b/tests/distributions/test_simulator.py index 928582968a..bddf440a1e 100644 --- a/tests/distributions/test_simulator.py +++ b/tests/distributions/test_simulator.py @@ -21,10 +21,7 @@ from pytensor.graph import ancestors from pytensor.tensor.random.op import RandomVariable -from pytensor.tensor.random.var import ( - RandomGeneratorSharedVariable, - RandomStateSharedVariable, -) +from pytensor.tensor.random.var import RandomGeneratorSharedVariable from pytensor.tensor.sort import SortOp import pymc as pm @@ -257,7 +254,7 @@ def test_upstream_rngs_not_in_compiled_logp(self, seeded_test): shared_rng_vars = [ node for node in ancestors(compiled_graph) - if isinstance(node, RandomStateSharedVariable | RandomGeneratorSharedVariable) + if isinstance(node, RandomGeneratorSharedVariable) ] assert len(shared_rng_vars) == 1 diff --git a/tests/distributions/test_transform.py b/tests/distributions/test_transform.py index 8d464f206a..f1d71504ce 100644 --- a/tests/distributions/test_transform.py +++ b/tests/distributions/test_transform.py @@ -385,7 +385,7 @@ def test_beta(self, a, b, size): ) def test_uniform(self, lower, upper, size): def transform_params(*inputs): - _, _, _, lower, upper = inputs + _, _, lower, upper = inputs lower = pt.as_tensor_variable(lower) if lower is not None else None upper = pt.as_tensor_variable(upper) if upper is not None else None return lower, upper @@ -406,7 +406,7 @@ def transform_params(*inputs): ) def test_triangular(self, lower, c, upper, size): def transform_params(*inputs): - _, _, _, lower, _, upper = inputs + _, _, lower, _, upper = inputs lower = pt.as_tensor_variable(lower) if lower is not None else None upper = pt.as_tensor_variable(upper) if upper is not None else None return lower, upper @@ -502,7 +502,7 @@ def test_beta_ordered(self, a, b, size): ) def test_uniform_ordered(self, lower, upper, size): def transform_params(*inputs): - _, _, _, lower, upper = inputs + _, _, lower, upper = inputs lower = pt.as_tensor_variable(lower) if lower is not None else None upper = pt.as_tensor_variable(upper) if upper is not None else None return lower, upper diff --git a/tests/distributions/test_truncated.py b/tests/distributions/test_truncated.py index e67ca598dd..cf4824df74 100644 --- a/tests/distributions/test_truncated.py +++ b/tests/distributions/test_truncated.py @@ -125,8 +125,8 @@ def test_truncation_specialized_op(shape_info): # Test RNG is not reused assert xt.owner.inputs[0] is not rng - lower_upper = pt.stack(xt.owner.inputs[5:]) - assert np.all(lower_upper.eval() == [5, 15]) + lower_upper = pt.stack(xt.owner.inputs[4:]) + assert np.all(lower_upper.eval().squeeze() == [5, 15]) @pytest.mark.parametrize("lower, upper", [(-1, np.inf), (-1, 1.5), (-np.inf, 1.5)]) diff --git a/tests/logprob/test_basic.py b/tests/logprob/test_basic.py index c2f9635fa9..cba014a98e 100644 --- a/tests/logprob/test_basic.py +++ b/tests/logprob/test_basic.py @@ -291,7 +291,7 @@ def test_joint_logp_incsubtensor(indices, size): mu = pm.floatX(np.power(10, np.arange(np.prod(size)))).reshape(size) data = mu[indices] sigma = 0.001 - rng = np.random.RandomState(232) + rng = np.random.default_rng(232) a_val = rng.normal(mu, sigma, size=size).astype(pytensor.config.floatX) rng = pytensor.shared(rng, borrow=False) diff --git a/tests/logprob/test_scan.py b/tests/logprob/test_scan.py index 30a76680e7..6c731ad5bd 100644 --- a/tests/logprob/test_scan.py +++ b/tests/logprob/test_scan.py @@ -76,7 +76,7 @@ def test_convert_outer_out_to_in_sit_sot(): This should be a single SIT-SOT replacement. """ - rng_state = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(1234))) + rng_state = np.random.default_rng(123) rng_tt = pytensor.shared(rng_state, name="rng", borrow=True) rng_tt.tag.is_rng = True rng_tt.default_update = rng_tt diff --git a/tests/logprob/test_transform_value.py b/tests/logprob/test_transform_value.py index 52cbe0a006..2490ab61e7 100644 --- a/tests/logprob/test_transform_value.py +++ b/tests/logprob/test_transform_value.py @@ -193,7 +193,7 @@ def test_original_values_output_dict(): pt.random.dirichlet, (np.array([[0.7, 0.3], [0.9, 0.1]]),), lambda alpha: DirichletScipyDist(alpha), - (), + None, ), pytest.param( pt.random.dirichlet, diff --git a/tests/logprob/test_utils.py b/tests/logprob/test_utils.py index 47cb65f195..3192d0c586 100644 --- a/tests/logprob/test_utils.py +++ b/tests/logprob/test_utils.py @@ -42,6 +42,7 @@ from pytensor import tensor as pt from pytensor.compile import get_default_mode from pytensor.graph.basic import ancestors, equal_computations +from pytensor.tensor.random.basic import NormalRV from pytensor.tensor.random.op import RandomVariable import pymc as pm @@ -184,8 +185,8 @@ def test_unvalued_rv_model(self): res_y = res.owner.inputs[1] # Graph should have be cloned, and therefore y and res_y should have different ids assert res_y is not y - assert res_y.owner.op == pt.random.normal - assert res_y.owner.inputs[3] is x_value + assert isinstance(res_y.owner.op, NormalRV) + assert res_y.owner.inputs[2] is x_value def test_no_change_inplace(self): # Test that calling rvs_to_value_vars in models with nested transformations diff --git a/tests/model/test_core.py b/tests/model/test_core.py index 565f199f85..95ce3265eb 100644 --- a/tests/model/test_core.py +++ b/tests/model/test_core.py @@ -1644,7 +1644,7 @@ def test_invalid_parameter(self, fn, capfd): # var dlogp is 0 or 1 without a likelihood assert "No problems found" in out else: - assert "The parameters evaluate to:\n0: 0.0\n1: [ 1. -1. 1.]" in out + assert "The parameters evaluate to:\n0: [0.]\n1: [ 1. -1. 1.]" in out if fn == "logp": assert "This does not respect one of the following constraints: sigma > 0" in out else: diff --git a/tests/model/test_fgraph.py b/tests/model/test_fgraph.py index a964f1faf6..9a65be36b7 100644 --- a/tests/model/test_fgraph.py +++ b/tests/model/test_fgraph.py @@ -22,6 +22,7 @@ import pymc as pm +from pymc.distributions.shape_utils import rv_size_is_none from pymc.model.fgraph import ( ModelDeterministic, ModelFreeRV, @@ -109,7 +110,7 @@ def test_data(inline_views): y = pm.Data("y", [10.0, 11.0, 12.0], dims=("test_dim",)) sigma = pm.MutableData("sigma", [1.0], shape=(1,)) b0 = pm.Data("b0", np.zeros((1,)), shape=((1,))) - b1 = pm.DiracDelta("b1", 1.0) + b1 = pm.Normal("b1", 1.0, sigma=1e-8) mu = pm.Deterministic("mu", b0 + b1 * x, dims=("test_dim",)) obs = pm.Normal("obs", mu=mu, sigma=sigma, observed=y, dims=("test_dim",)) @@ -127,12 +128,12 @@ def test_data(inline_views): # ObservedRV(obs, y, *dims) not ObservedRV(obs, Named(y), *dims) assert obs.owner.inputs[1] is memo[y].owner.inputs[0] # ObservedRV(Normal(..., sigma), ...) not ObservedRV(Normal(..., Named(sigma)), ...) - assert obs.owner.inputs[0].owner.inputs[4] is memo[sigma].owner.inputs[0] + assert obs.owner.inputs[0].owner.inputs[-1] is memo[sigma].owner.inputs[0] else: assert mu_inp.owner.inputs[0] is memo[b0] assert mu_inp.owner.inputs[1].owner.inputs[1] is memo[x] assert obs.owner.inputs[1] is memo[y] - assert obs.owner.inputs[0].owner.inputs[4] is memo[sigma] + assert obs.owner.inputs[0].owner.inputs[-1] is memo[sigma] m_new = model_from_fgraph(m_fgraph) @@ -180,14 +181,14 @@ def test_shared_variable(): with pm.Model() as m_old: test = pm.Normal("test", mu=mu, sigma=sigma, observed=obs) - assert test.owner.inputs[3] is mu - assert test.owner.inputs[4] is sigma + assert test.owner.inputs[2] is mu + assert test.owner.inputs[3] is sigma assert m_old.rvs_to_values[test] is obs m_new = clone_model(m_old) test_new = m_new["test"] # Shared Variables are cloned but still point to the same memory - mu_new, sigma_new = test_new.owner.inputs[3:5] + mu_new, sigma_new = test_new.owner.op.dist_params(test_new.owner) obs_new = m_new.rvs_to_values[test_new] assert mu_new is not mu assert sigma_new is not sigma @@ -224,8 +225,8 @@ def test_deterministics(inline_views): z = pm.Normal("z", y__) # Deterministic mu is in the graph of x to y but not sigma - assert m["y"].owner.inputs[3] is m["mu"] - assert m["y"].owner.inputs[4] is not m["sigma"] + assert m["y"].owner.inputs[2] is m["mu"] + assert m["y"].owner.inputs[3] is not m["sigma"] fg, _ = fgraph_from_model(m, inlined_views=inline_views) @@ -234,27 +235,27 @@ def test_deterministics(inline_views): # [Det(mu), Det(sigma)] mu = det_mu.owner.inputs[0] sigma = det_sigma.owner.inputs[0] - assert y.owner.inputs[0].owner.inputs[4] is sigma + assert y.owner.inputs[0].owner.inputs[3] is sigma assert det_y_ is not det_y__ assert det_y_.owner.inputs[0] is y if not inline_views: # FreeRV(y(mu, sigma)) not FreeRV(y(Det(mu), Det(sigma))) - assert y.owner.inputs[0].owner.inputs[3] is mu + assert y.owner.inputs[0].owner.inputs[2] is mu # FreeRV(z(y)) not FreeRV(z(Det(Det(y)))) - assert z.owner.inputs[0].owner.inputs[3] is y + assert z.owner.inputs[0].owner.inputs[2] is y # Det(y), not Det(Det(y)) assert det_y__.owner.inputs[0] is y else: - assert y.owner.inputs[0].owner.inputs[3] is det_mu - assert z.owner.inputs[0].owner.inputs[3] is det_y__ + assert y.owner.inputs[0].owner.inputs[2] is det_mu + assert z.owner.inputs[0].owner.inputs[2] is det_y__ assert det_y__.owner.inputs[0] is det_y_ # Both mu and sigma deterministics are now in the graph of x to y m = model_from_fgraph(fg) - assert m["y"].owner.inputs[3] is m["mu"] - assert m["y"].owner.inputs[4] is m["sigma"] + assert m["y"].owner.inputs[2] is m["mu"] + assert m["y"].owner.inputs[3] is m["sigma"] # But not y_* in y to z, since there was no real Op in between - assert m["z"].owner.inputs[3] is m["y"] + assert m["z"].owner.inputs[2] is m["y"] assert m["y_"].owner.inputs[0] is m["y"] assert m["y__"].owner.inputs[0] is m["y"] @@ -303,10 +304,10 @@ def non_centered_param(fgraph: FunctionGraph, node): rv, value, *dims = node.inputs if not isinstance(rv.owner.op, pm.Normal): return - rng, size, dtype, loc, scale = rv.owner.inputs + rng, size, loc, scale = rv.owner.inputs # Only apply rewrite if size information is explicit - if size.ndim == 0: + if rv_size_is_none(size): return None try: diff --git a/tests/sampling/test_forward.py b/tests/sampling/test_forward.py index 619ae74384..92925b33ad 100644 --- a/tests/sampling/test_forward.py +++ b/tests/sampling/test_forward.py @@ -27,6 +27,7 @@ from arviz.tests.helpers import check_multiple_attrs from pytensor import Mode, shared from pytensor.compile import SharedVariable +from pytensor.graph import graph_inputs from scipy import stats import pymc as pm @@ -114,8 +115,8 @@ class TestCompileForwardSampler: def get_function_roots(function): return [ var - for var in pytensor.graph.basic.graph_inputs(function.maker.fgraph.outputs) - if var.name + for var in graph_inputs(function.maker.fgraph.outputs) + if var.name not in (None, "NoneConst") ] @staticmethod @@ -212,7 +213,7 @@ def test_volatile_parameters(self): vars_in_trace=[mu, nested_mu, sigma], basic_rvs=model.basic_RVs, givens_dict={ - mu: np.array(1.0) + mu: pytensor.shared(np.array(1.0), name="mu") }, # mu will be considered volatile because it's in givens ) assert volatile_rvs == {nested_mu, obs} diff --git a/tests/test_initial_point.py b/tests/test_initial_point.py index b2f5d501fb..8aaeee879d 100644 --- a/tests/test_initial_point.py +++ b/tests/test_initial_point.py @@ -263,8 +263,7 @@ def test_support_point_from_dims(self, rv_cls): def test_support_point_not_implemented_fallback(self): class MyNormalRV(RandomVariable): name = "my_normal" - ndim_supp = 0 - ndims_params = [0, 0] + signature = "(),()->()" dtype = "floatX" @classmethod diff --git a/tests/test_pytensorf.py b/tests/test_pytensorf.py index ef7c3b9385..f6084718f8 100644 --- a/tests/test_pytensorf.py +++ b/tests/test_pytensorf.py @@ -27,7 +27,6 @@ from pytensor.compile.builders import OpFromGraph from pytensor.graph.basic import Variable, equal_computations from pytensor.tensor.random.basic import normal, uniform -from pytensor.tensor.random.var import RandomStateSharedVariable from pytensor.tensor.subtensor import AdvancedIncSubtensor, AdvancedIncSubtensor1 from pytensor.tensor.variable import TensorVariable @@ -638,22 +637,13 @@ def test_reseed_rngs(): bit_generators = [default_rng(sub_seed) for sub_seed in np.random.SeedSequence(seed).spawn(2)] - rngs = [ - pytensor.shared(rng_type(default_rng())) - for rng_type in (np.random.Generator, np.random.RandomState) - ] + rngs = [pytensor.shared(np.random.Generator(default_rng())) for _ in range(2)] for rng, bit_generator in zip(rngs, bit_generators): - if isinstance(rng, RandomStateSharedVariable): - assert rng.get_value()._bit_generator.state != bit_generator.state - else: - assert rng.get_value().bit_generator.state != bit_generator.state + assert rng.get_value().bit_generator.state != bit_generator.state reseed_rngs(rngs, seed) for rng, bit_generator in zip(rngs, bit_generators): - if isinstance(rng, RandomStateSharedVariable): - assert rng.get_value()._bit_generator.state == bit_generator.state - else: - assert rng.get_value().bit_generator.state == bit_generator.state + assert rng.get_value().bit_generator.state == bit_generator.state def test_constant_fold():