diff --git a/baseline.py b/baseline.py new file mode 100644 index 0000000..c22620e --- /dev/null +++ b/baseline.py @@ -0,0 +1,128 @@ +import numpy as np +import networkx as nx +import copy +import pandas as pd +import xlwt +import torch +from torch import nn +import torch.optim as optim +from torch_geometric.utils import from_networkx +from torch.utils.data import Dataset, DataLoader +from torch_geometric.data import Data, Batch +from torch_geometric.nn.conv import MessagePassing +from torch_sparse import SparseTensor, matmul +import torch.nn.functional as F + + +def tgrad_qp(A, b, x): + # A: nodes * k * n + # X: nodes * n + # Y: nodes * k + '''grad_A = np.zeros(x.shape) + for i in range(x.shape[0]): + grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i]) + return grad_A''' + x_ = torch.unsqueeze(x, axis = -1) + b_ = torch.unsqueeze(b, axis = -1) + + A_t = A.transpose(2,3) + grad_A = A_t @ (A @ x_ - b_) + # print(A.shape, x.shape, b.shape) + grad_A = torch.squeeze(grad_A, axis = -1) + return grad_A + +def torch_soft(x, tau): + return F.relu(x - tau) - F.relu( - x - tau) + +def opt_distance(x,opt): + error = 0 + batch_size = x.shape[0] + num_of_nodes = x.shape[1] + error = np.linalg.norm(x-opt)**2 + return error/num_of_nodes/batch_size + +def hist_nmse(x_hist,opt): + error = [] + iteration = len(x_hist) + #print(iteration) + for k in range(iteration): + error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt))) + return error + + +######################################################### +# PGEXTRA +######################################################### + + +def torch_PGEXTRA(W, A, b, max_iter, step_size,tau): + (batch_size, num_of_nodes, _, dim) = A.shape + init_x = torch.zeros((batch_size, num_of_nodes, dim)) + + + (batch_size, num_of_nodes, dim) = init_x.shape + I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0) + I = I.repeat(batch_size, 1, 1) + + W_hat = (W + I)/2 + + #initialization + k = 1 + x_0 = init_x + x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0) + x_1 = torch_soft(x_12, tau*step_size) + + x_hist = [init_x,x_1] #add for plot + while (k < max_iter): + + x_32 = W@x_1 + x_12 - W_hat@x_0 - \ + step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)) + x_2 = torch_soft(x_32, tau*step_size) + + x_0 = x_1 + x_1 = x_2 + x_12 = x_32 + + k = k + 1 + + x_hist.append(x_2) + + return x_2,x_hist + +######################################################### +# Prox-DGD +######################################################### +def torchProx_DGD(W, A, b, max_iter, step_size,tau): + (batch_size, num_of_nodes, _, dim) = A.shape + init_x = torch.zeros((batch_size, num_of_nodes, dim)) + + + (batch_size, num_of_nodes, dim) = init_x.shape + I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0) + I = I.repeat(batch_size, 1, 1) + + W_hat = (W + I)/2 + + #initialization + k = 1 + x_0 = init_x + x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0) + x_1 = torch_soft(x_12, tau*step_size) + + x_hist = [init_x,x_1] #add for plot + while (k < max_iter): + + x_32 = W@x_1 - step_size*tgrad_qp(A, b, x_1) + x_2 = torch_soft(x_32, tau * step_size) + + x_0 = x_1 + x_1 = x_2 + x_12 = x_32 + + k = k + 1 + + x_hist.append(x_2) + + return x_2,x_hist + + diff --git a/convergence30L.ipynb b/convergence30L.ipynb deleted file mode 100644 index d0fea68..0000000 --- a/convergence30L.ipynb +++ /dev/null @@ -1,1270 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/xiezhq/.wanghe_env/lib/python3.7/site-packages/torch_sparse/tensor.py:46: UserWarning: This overload of nonzero is deprecated:\n", - "\tnonzero()\n", - "Consider using one of the following signatures instead:\n", - "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:882.)\n", - " index = mat.nonzero()\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import networkx as nx\n", - "import copy\n", - "import pandas as pd\n", - "import xlwt\n", - "import torch\n", - "from torch import nn\n", - "import torch.optim as optim\n", - "from torch_geometric.utils import from_networkx\n", - "from torch.utils.data import Dataset, DataLoader\n", - "from torch_geometric.data import Data, Batch\n", - "from torch_geometric.nn.conv import MessagePassing\n", - "from torch_sparse import SparseTensor, matmul\n", - "import torch.nn.functional as F\n", - "import matplotlib.pyplot as plt\n", - "\n", - "num_nodes = 5\n", - "num_edges = 6\n", - "n = 100\n", - "m = 300\n", - "k = 60\n", - "train_num = 1000\n", - "test_num = 100\n", - "num_layers = 50\n", - "nnz = 30\n", - "\n", - "#less nnz =5; m = 50; k = 10\n", - "\n", - "def metropolis(adjacency_matrix):\n", - " num_of_nodes = adjacency_matrix.shape[0]\n", - " metropolis=np.zeros((num_of_nodes,num_of_nodes))\n", - " for i in range(num_of_nodes):\n", - " for j in range(num_of_nodes):\n", - " if adjacency_matrix[i,j]==1:\n", - " d_i = np.sum(adjacency_matrix[i,:])\n", - " d_j = np.sum(adjacency_matrix[j,:])\n", - " metropolis[i,j]=1/(1+max(d_i,d_j))\n", - " metropolis[i,i]=1-sum(metropolis[i,:])\n", - " return metropolis\n", - "\n", - "class SynDataset(Dataset):\n", - " def __init__(self, samples):\n", - " self.samples = samples\n", - " self.A = []; \n", - " self.y = []; \n", - " self.x_true = []\n", - " self.pyg_data=[]\n", - " self.process()\n", - " \n", - " \n", - " def gen_func(self, num_of_nodes, n, m, k):\n", - " A_all = np.random.randn(m, n)\n", - " x = np.random.randn(n)\n", - " x_norm = 0\n", - "\n", - " while(x_norm < 1e-2):\n", - " x_mask = np.random.rand(n)\n", - " x_mask[x_mask < 1 - nnz/100] = 0\n", - " x_mask[x_mask > 0] = 1\n", - " x_norm = np.linalg.norm(x * x_mask)\n", - "\n", - " x = x * x_mask\n", - " x = x/np.linalg.norm(x)\n", - " \n", - " SNR_db = 30\n", - " SNR = 10**(SNR_db/10)\n", - " \n", - " noise = np.random.randn(m) * np.sqrt(1/SNR)\n", - " y_all = A_all@x + noise\n", - "\n", - " A = np.zeros((num_of_nodes, k , n))\n", - " y = np.zeros((num_of_nodes, k))\n", - " for ii in range(num_of_nodes):\n", - " start = (k*ii) % m; end = (k*(ii+1) )%m\n", - " if(start > end):\n", - " A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0)\n", - " y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), \n", - " np.expand_dims(y_all[:end], axis = 0)), axis = 1)\n", - " else:\n", - " A[ii,:,:] = A_all[start:end,:]\n", - " y[ii,:] = np.expand_dims(y_all[start:end], axis = 0)\n", - " \n", - " x = np.expand_dims(x, axis = 0)\n", - " x = x.repeat(num_of_nodes, axis = 0)\n", - " \n", - " return A, y, x\n", - "\n", - " def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True):\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k = 0\n", - " while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False:\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k += 1\n", - " # print(\"Check if connected: \", nx.is_connected(G))\n", - " # nx.draw(G)\n", - " \n", - " edge_index = from_networkx(G).edge_index\n", - " adj = nx.to_numpy_matrix(G)\n", - " return G, adj,edge_index\n", - " \n", - " def process(self):\n", - " _, adj,edge_index = self.gen_graph(num_nodes, num_edges)\n", - " self.edge_index = edge_index\n", - " W = metropolis(adj)\n", - " self.W = [torch.tensor(W, dtype = torch.float)] * self.samples\n", - " \n", - " \n", - " for ii in range(self.samples):\n", - " A, y, x_true = self.gen_func(num_nodes, n, m, k)\n", - " self.A.append(torch.tensor(A, dtype = torch.float) ); \n", - " self.y.append(torch.tensor(y, dtype = torch.float) ); \n", - " self.x_true.append(torch.tensor(x_true, dtype = torch.float) )\n", - " \n", - " edge_weight=torch.tensor(W,dtype=torch.float)\n", - " self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) \n", - " \n", - " \n", - "\n", - " def __getitem__(self, idx):\n", - " return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx]\n", - "\n", - " def __len__(self):\n", - " \"\"\"Number of graphs in the dataset\"\"\"\n", - " return len(self.A)\n", - " \n", - " \n", - "def collate(samples):\n", - " # The input `samples` is a list of pairs\n", - " # (graph, label).\n", - " W, A, y, x_true, pyg_data = map(list, zip(*samples))\n", - " W = torch.stack(W)\n", - " A = torch.stack(A)\n", - " y = torch.stack(y)\n", - " x_true = torch.stack(x_true)\n", - " pyg_data = Batch.from_data_list(pyg_data)\n", - " return W, A, y, x_true, pyg_data\n", - "class MetropolisConv(MessagePassing):\n", - " def __init__(self):\n", - " super(MetropolisConv, self).__init__(aggr='add') # \"Add\" aggregation.\n", - "\n", - " def forward(self, x, pyg_data):\n", - " (B, N, D)=x.shape\n", - " out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1)\n", - " return out.view(B,N,D)\n", - "\n", - " def message_and_aggregate(self, adj_t, x):\n", - " return matmul(adj_t, x, reduce=self.aggr)\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = di ** power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - "\n", - "\n", - "train_data = SynDataset(train_num)\n", - "val_data = SynDataset(test_num)\n", - "test_data = SynDataset(test_num)\n", - "train_loader = DataLoader(train_data, batch_size=32, shuffle=True, collate_fn=collate)\n", - "val_loader = DataLoader(val_data, batch_size=100, shuffle=False, collate_fn=collate)\n", - "test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-PGEXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.00015240458333209972 tensor(0.0080, grad_fn=) tensor(0.0007, grad_fn=)\n", - "1.5373161836862437e-06 tensor(0.0050, grad_fn=) tensor(0.0026, grad_fn=)\n", - "7.83289729966441e-07 tensor(0.0044, grad_fn=) tensor(0.0030, grad_fn=)\n", - "6.118405604382815e-07 tensor(0.0037, grad_fn=) tensor(0.0033, grad_fn=)\n", - "5.260294031472768e-07 tensor(0.0032, grad_fn=) tensor(0.0035, grad_fn=)\n", - "4.6372197815003346e-07 tensor(0.0028, grad_fn=) tensor(0.0037, grad_fn=)\n", - "4.20715747573297e-07 tensor(0.0024, grad_fn=) tensor(0.0039, grad_fn=)\n", - "3.883507488922078e-07 tensor(0.0021, grad_fn=) tensor(0.0041, grad_fn=)\n", - "3.4693574679778294e-07 tensor(0.0019, grad_fn=) tensor(0.0042, grad_fn=)\n", - "3.213548982472503e-07 tensor(0.0018, grad_fn=) tensor(0.0044, grad_fn=)\n", - "3.1172257397571457e-07 tensor(0.0018, grad_fn=) tensor(0.0044, grad_fn=)\n", - "3.1235354658321057e-07 tensor(0.0020, grad_fn=) tensor(0.0045, grad_fn=)\n", - "3.0961734154288933e-07 tensor(0.0023, grad_fn=) tensor(0.0045, grad_fn=)\n", - "3.13946010521704e-07 tensor(0.0027, grad_fn=) tensor(0.0045, grad_fn=)\n", - "3.265478962788393e-07 tensor(0.0029, grad_fn=) tensor(0.0045, grad_fn=)\n", - "3.0418221541594903e-07 tensor(0.0026, grad_fn=) tensor(0.0046, grad_fn=)\n", - "3.1065748196112963e-07 tensor(0.0029, grad_fn=) tensor(0.0046, grad_fn=)\n", - "3.1601884487031384e-07 tensor(0.0032, grad_fn=) tensor(0.0046, grad_fn=)\n", - "3.3754516426398595e-07 tensor(0.0033, grad_fn=) tensor(0.0047, grad_fn=)\n", - "3.1284611079485103e-07 tensor(0.0029, grad_fn=) tensor(0.0046, grad_fn=)\n", - "3.049301540158922e-07 tensor(0.0026, grad_fn=) tensor(0.0047, grad_fn=)\n", - "2.9688041180975233e-07 tensor(0.0023, grad_fn=) tensor(0.0048, grad_fn=)\n", - "3.032886670695234e-07 tensor(0.0026, grad_fn=) tensor(0.0048, grad_fn=)\n", - "2.902714948405105e-07 tensor(0.0026, grad_fn=) tensor(0.0049, grad_fn=)\n", - "3.298082065228414e-07 tensor(0.0041, grad_fn=) tensor(0.0046, grad_fn=)\n", - "5.506631106655391e-07 tensor(0.0050, grad_fn=) tensor(0.0036, grad_fn=)\n", - "3.704209454724605e-07 tensor(0.0039, grad_fn=) tensor(0.0043, grad_fn=)\n", - "3.264226648980184e-07 tensor(0.0035, grad_fn=) tensor(0.0047, grad_fn=)\n", - "3.103840588991602e-07 tensor(0.0031, grad_fn=) tensor(0.0048, grad_fn=)\n", - "2.999576818041305e-07 tensor(0.0029, grad_fn=) tensor(0.0050, grad_fn=)\n", - "3.0347825230592207e-07 tensor(0.0029, grad_fn=) tensor(0.0050, grad_fn=)\n", - "2.9623767350273056e-07 tensor(0.0029, grad_fn=) tensor(0.0050, grad_fn=)\n", - "2.9641843291017267e-07 tensor(0.0030, grad_fn=) tensor(0.0050, grad_fn=)\n", - "2.9198075157665926e-07 tensor(0.0029, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.8723161449306644e-07 tensor(0.0027, grad_fn=) tensor(0.0051, grad_fn=)\n", - "3.097361336301674e-07 tensor(0.0033, grad_fn=) tensor(0.0049, grad_fn=)\n", - "2.9133521639579385e-07 tensor(0.0031, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.842473882935792e-07 tensor(0.0031, grad_fn=) tensor(0.0052, grad_fn=)\n", - "2.8145143193825106e-07 tensor(0.0036, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.8482853942080055e-07 tensor(0.0032, grad_fn=) tensor(0.0052, grad_fn=)\n", - "3.2854422826744667e-07 tensor(0.0048, grad_fn=) tensor(0.0048, grad_fn=)\n", - "2.82457824241078e-07 tensor(0.0034, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.913078347432929e-07 tensor(0.0031, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.8635842408419876e-07 tensor(0.0026, grad_fn=) tensor(0.0052, grad_fn=)\n", - "2.901639941654821e-07 tensor(0.0029, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.876583815591971e-07 tensor(0.0026, grad_fn=) tensor(0.0051, grad_fn=)\n", - "2.8022408038452795e-07 tensor(0.0022, grad_fn=) tensor(0.0052, grad_fn=)\n", - "2.9243085730712437e-07 tensor(0.0037, grad_fn=) tensor(0.0050, grad_fn=)\n", - "2.7858472595454487e-07 tensor(0.0024, grad_fn=) tensor(0.0052, grad_fn=)\n", - "2.809495027733533e-07 tensor(0.0025, grad_fn=) tensor(0.0053, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_PGEXTRA(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_PGEXTRA, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - " \n", - "###main\n", - "model_PGEXTRA = Net_PGEXTRA(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_PGEXTRA.parameters(), lr=1e-4)\n", - "model_PGEXTRA.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.83,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_PGEXTRA.lam[1], model_PGEXTRA.step_size[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0005299581494000449 tensor(0.0075, grad_fn=) tensor(0.0033, grad_fn=)\n", - "7.238334751491493e-05 tensor(0.0070, grad_fn=) tensor(0.0035, grad_fn=)\n", - "5.440667541734001e-05 tensor(0.0068, grad_fn=) tensor(0.0038, grad_fn=)\n", - "4.628787928595557e-05 tensor(0.0066, grad_fn=) tensor(0.0040, grad_fn=)\n", - "4.289864750717243e-05 tensor(0.0064, grad_fn=) tensor(0.0043, grad_fn=)\n", - "4.171779494299699e-05 tensor(0.0064, grad_fn=) tensor(0.0045, grad_fn=)\n", - "4.090801394340815e-05 tensor(0.0063, grad_fn=) tensor(0.0048, grad_fn=)\n", - "4.00582073325495e-05 tensor(0.0062, grad_fn=) tensor(0.0052, grad_fn=)\n", - "3.9135704128057114e-05 tensor(0.0061, grad_fn=) tensor(0.0057, grad_fn=)\n", - "3.736674648280314e-05 tensor(0.0059, grad_fn=) tensor(0.0064, grad_fn=)\n", - "3.5070178512341954e-05 tensor(0.0056, grad_fn=) tensor(0.0072, grad_fn=)\n", - "3.411230312622138e-05 tensor(0.0056, grad_fn=) tensor(0.0080, grad_fn=)\n", - "3.344333163113333e-05 tensor(0.0059, grad_fn=) tensor(0.0088, grad_fn=)\n", - "3.2310661481460556e-05 tensor(0.0064, grad_fn=) tensor(0.0098, grad_fn=)\n", - "3.122570370805988e-05 tensor(0.0068, grad_fn=) tensor(0.0108, grad_fn=)\n", - "2.9104821521741542e-05 tensor(0.0073, grad_fn=) tensor(0.0121, grad_fn=)\n", - "2.6016351398538973e-05 tensor(0.0078, grad_fn=) tensor(0.0134, grad_fn=)\n", - "2.2591991239551135e-05 tensor(0.0081, grad_fn=) tensor(0.0149, grad_fn=)\n", - "1.934232034273009e-05 tensor(0.0086, grad_fn=) tensor(0.0164, grad_fn=)\n", - "1.6076565941602894e-05 tensor(0.0092, grad_fn=) tensor(0.0177, grad_fn=)\n", - "1.4498555543696057e-05 tensor(0.0103, grad_fn=) tensor(0.0189, grad_fn=)\n", - "1.3564983646574547e-05 tensor(0.0116, grad_fn=) tensor(0.0199, grad_fn=)\n", - "1.2781853911292274e-05 tensor(0.0134, grad_fn=) tensor(0.0208, grad_fn=)\n", - "1.2254147776502577e-05 tensor(0.0155, grad_fn=) tensor(0.0215, grad_fn=)\n", - "1.1883150222047334e-05 tensor(0.0180, grad_fn=) tensor(0.0222, grad_fn=)\n", - "1.1518768815221847e-05 tensor(0.0207, grad_fn=) tensor(0.0228, grad_fn=)\n", - "1.1252920160131907e-05 tensor(0.0237, grad_fn=) tensor(0.0234, grad_fn=)\n", - "1.100952400179267e-05 tensor(0.0269, grad_fn=) tensor(0.0239, grad_fn=)\n", - "1.0879225641247103e-05 tensor(0.0304, grad_fn=) tensor(0.0245, grad_fn=)\n", - "1.0685557896294995e-05 tensor(0.0341, grad_fn=) tensor(0.0250, grad_fn=)\n", - "1.0495871038074256e-05 tensor(0.0382, grad_fn=) tensor(0.0256, grad_fn=)\n", - "1.0257936992275063e-05 tensor(0.0425, grad_fn=) tensor(0.0262, grad_fn=)\n", - "1.0058390273570694e-05 tensor(0.0471, grad_fn=) tensor(0.0269, grad_fn=)\n", - "9.933126762007305e-06 tensor(0.0520, grad_fn=) tensor(0.0277, grad_fn=)\n", - "9.79466301487264e-06 tensor(0.0571, grad_fn=) tensor(0.0285, grad_fn=)\n", - "9.639087721780015e-06 tensor(0.0625, grad_fn=) tensor(0.0295, grad_fn=)\n", - "9.552644115728981e-06 tensor(0.0681, grad_fn=) tensor(0.0305, grad_fn=)\n", - "9.423503001926292e-06 tensor(0.0739, grad_fn=) tensor(0.0316, grad_fn=)\n", - "9.343192203914441e-06 tensor(0.0799, grad_fn=) tensor(0.0327, grad_fn=)\n", - "9.255932411633694e-06 tensor(0.0861, grad_fn=) tensor(0.0338, grad_fn=)\n", - "9.180420789789423e-06 tensor(0.0924, grad_fn=) tensor(0.0347, grad_fn=)\n", - "9.11575509121576e-06 tensor(0.0986, grad_fn=) tensor(0.0354, grad_fn=)\n", - "9.052671373410703e-06 tensor(0.1047, grad_fn=) tensor(0.0360, grad_fn=)\n", - "8.985739611944155e-06 tensor(0.1104, grad_fn=) tensor(0.0366, grad_fn=)\n", - "8.948941484732131e-06 tensor(0.1157, grad_fn=) tensor(0.0370, grad_fn=)\n", - "8.957309518109469e-06 tensor(0.1201, grad_fn=) tensor(0.0374, grad_fn=)\n", - "8.932085222568276e-06 tensor(0.1240, grad_fn=) tensor(0.0378, grad_fn=)\n", - "8.913578028568736e-06 tensor(0.1271, grad_fn=) tensor(0.0382, grad_fn=)\n", - "8.893165187373597e-06 tensor(0.1299, grad_fn=) tensor(0.0384, grad_fn=)\n", - "8.85833776465006e-06 tensor(0.1320, grad_fn=) tensor(0.0387, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_DGD(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_DGD, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " #x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " # self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1)\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "\n", - "\n", - "model_DGD = Net_DGD(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_DGD.parameters(), lr=1e-4)\n", - "model_DGD.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_DGD(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.93,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_DGD.lam[1], model_DGD.step_size[1])" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\nclass Net_NIDS(torch.nn.Module):\\n def __init__(self, step_size, num_layers, num_nodes):\\n super(Net_NIDS, self).__init__()\\n self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\\n self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\\n self.c = nn.Parameter(torch.ones(num_layers)*step_size)\\n self.num_layers = num_layers\\n self.conv=MetropolisConv()\\n \\n def tgrad_qp(self, A, b, x):\\n # A: nodes * k * n\\n # X: nodes * n\\n # Y: nodes * k\\n grad_A = np.zeros(x.shape)\\n for i in range(x.shape[0]):\\n grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\\n return grad_A\\n x_ = torch.unsqueeze(x, axis = -1)\\n b_ = torch.unsqueeze(b, axis = -1)\\n\\n A_t = A.transpose(2,3)\\n grad_A = A_t @ (A @ x_ - b_)\\n grad_A = torch.squeeze(grad_A, axis = -1)\\n return grad_A\\n \\n def act(self, x, ii):\\n tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\\n return F.relu(x - tau) - F.relu( - x - tau)\\n \\n def forward(self, W, A, b,pyg_data, max_iter):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n ret_z = []\\n \\n k = 1\\n x_0 = init_x\\n x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\\n x_1 = self.act(x_12, 0)\\n \\n x_hist = [init_x,x_1]\\n \\n while (k < max_iter):\\n c = self.c[k]/(2*torch.max(self.step_size[k]))\\n #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n #print(W_hat)\\n temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\\n conv_result = self.conv(temp,pyg_data)\\n x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\\n #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\\n #x_32 =x_12 - x_1 + w@temp\\n x_2 = self.act(x_32, k)\\n \\n ret_z.append(x_2)\\n\\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n\\n k = k + 1\\n x_hist.append(x_2)\\n \\n ret_z = torch.stack(ret_z)\\n return ret_z, x_2,x_hist\\nmodel_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\\noptimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\\nmodel_NIDS.train()\\nepoch_losses = []\\nfor epoch in range(500):\\n epoch_loss = 0\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\\n z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\\n loss = step_loss(0.83,z, x_true)\\n \\n optimizer.zero_grad()\\n loss.backward()\\n optimizer.step()\\n epoch_loss += loss.detach().item()\\n epoch_loss /= (iter + 1)\\n if(epoch % 10 == 0):\\n print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\\n'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "class Net_NIDS(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers, num_nodes):\n", - " super(Net_NIDS, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\n", - " self.c = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " \n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " \n", - " while (k < max_iter):\n", - " c = self.c[k]/(2*torch.max(self.step_size[k]))\n", - " #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " #print(W_hat)\n", - " temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " conv_result = self.conv(temp,pyg_data)\n", - " x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\n", - " #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\n", - " #x_32 =x_12 - x_1 + w@temp\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "model_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\n", - "optimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\n", - "model_NIDS.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.83,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\n", - "'''" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Origin Methods" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "def tgrad_qp(A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - " \n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " # print(A.shape, x.shape, b.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - "\n", - "def torch_soft(x, tau):\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - "\n", - "def opt_distance(x,opt):\n", - " error = 0\n", - " batch_size = x.shape[0]\n", - " num_of_nodes = x.shape[1]\n", - " error = np.linalg.norm(x-opt)**2\n", - " return error/num_of_nodes/batch_size\n", - "\n", - "def hist_nmse(x_hist,opt):\n", - " error = []\n", - " iteration = len(x_hist)\n", - " #print(iteration)\n", - " for k in range(iteration):\n", - " error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt)))\n", - " return error\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin PG-EXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.13372513105541475 \t 0.0408734134671704\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.13343716019589918 \t 0.04000160481608373\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.13313904180978897 \t 0.03903774684386417\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.1332816909508274 \t 0.03621758377271363\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.13905003238760583 \t 0.04155931941443169\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.2927638932213376 \t 0.2069211997606617\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.07927415251129605 \t 0.018730914407643583\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.0786370414547896 \t 0.017819617515559116\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.07792730932453014 \t 0.016858828401515098\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.0757836815533683 \t 0.015276610518412894\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.08049656483295439 \t 0.022012851087541207\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.24006546234137385 \t 0.19087335745296333\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.0402854335657612 \t 0.006722754912862457\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.03941448216283289 \t 0.005966144229831371\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.038454413292958636 \t 0.005273334669776688\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.035625497970542715 \t 0.006016077719681363\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.040880536000739084 \t 0.013685644006236657\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.20604657200159454 \t 0.18433393092727784\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.00640948351305451 \t 0.0006975805647377982\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.0056579419007235 \t 0.0004622503986997817\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.004976148909411251 \t 0.0004187880421213937\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.005759805915488414 \t 0.00299922179167379\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.013389664507599377 \t 0.010879030340820918\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.18410002416756652 \t 0.18238959756942497\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 553573731.2077812 \t 1.498902167069106e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 523961906.721125 \t 1.4234469312620988e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 488094292.00453323 \t 1.3315703121084944e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 251891251.2540703 \t 7.119442274838458e+21\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 75580921.81069532 \t 2.2457501815977896e+21\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.18236803234503168 \t 0.18236708525291762\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t inf \t nan\n" - ] - } - ], - "source": [ - "def torch_PGEXTRA(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 + x_12 - W_hat@x_0 - \\\n", - " step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_PGEXTRA(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " \n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.002, 'tau': 0.1}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.14528994270335716 \t 0.04625664968772798\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.1450785847578736 \t 0.04537581772543672\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.14487721806014453 \t 0.04440891480121036\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.14568550992533166 \t 0.041621361089528366\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.15203932291467206 \t 0.047143680175759525\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.30615092567439023 \t 0.21510140790953483\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.09096051111738962 \t 0.02306828007093327\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.09037740510981213 \t 0.022086736343633673\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.08974000033638002 \t 0.021051808206449207\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.08812911430411806 \t 0.019172873314535537\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.09338265026763111 \t 0.026173103356469368\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.25482204687753257 \t 0.1997921530275562\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.05098040640773251 \t 0.009619757754708302\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.050092768744111255 \t 0.008712170114104197\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.04912033375982037 \t 0.007854652986114274\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.046343878285515984 \t 0.00822684159900848\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.05201389910466969 \t 0.016532563354827404\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.22173487649929485 \t 0.19507110703946817\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.012490944118953962 \t 0.0013244014998036846\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.011447349706195837 \t 0.0009096930317537328\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.010445024514360284 \t 0.0007770026989783503\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.010436722595697574 \t 0.00393510351715679\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.019242669186503917 \t 0.013333906185225827\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.2040197457123595 \t 0.2008579279701298\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 435370023672891.4 \t 1.0030094976498447e+34\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 419536852270696.5 \t 9.694775894821206e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 400228842401964.0 \t 9.283946974566845e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 265330132756531.2 \t 6.37501409675475e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 143127086611662.84 \t 3.6349477271791455e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.22072704625538792 \t 0.2207067688026309\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t inf \t nan\n" - ] - } - ], - "source": [ - "def torch_DGD(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 - step_size*tgrad_qp(A, b, x_1)\n", - " x_2 = torch_soft(x_32, tau * step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_DGD(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.002, 'tau': 0.1}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin NIDS" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\ndef torch_NIDS(W, A, b, max_iter, step_size,tau):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n c = 1/(2*step_size)\\n \\n (batch_size, num_of_nodes, dim) = init_x.shape\\n I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\\n I = I.repeat(batch_size, 1, 1)\\n \\n \\n #initialization\\n k = 1\\n x_0 = init_x\\n #print(alpha.unsqueeze(-1).shape)\\n x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\\n x_1 = torch_soft(x_12, tau*step_size)\\n \\n x_hist = [init_x,x_1] #add for plot\\n while (k < max_iter):\\n W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\\n x_2 = torch_soft(x_32, tau*step_size)\\n \\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n k = k + 1\\n \\n x_hist.append(x_2)\\n \\n return x_2,x_hist\\nlams = [5e-4,1e-3, 5e-3,1e-2]\\ntaus = [1e-2, 5e-1, 1, 5]\\nbest_error = 100\\nbest_par = {}\\n#cs = [ 5e-1, 1,10,20,50,200]\\nfor lam in lams:\\n for tau in taus:\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\\n loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\\n loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\\n \\n print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\\n print(lam,\\'\\t\\', tau, \\'\\t\\',1/(2*lam),\\'\\t\\',loss1,\\'\\t\\',loss2)\\n if loss2 < best_error:\\n best_par[\\'lam\\'] = lam\\n best_par[\\'tau\\'] = tau\\n best_par[\\'c\\'] = 1/(2*lam)\\n best_error = loss2\\n'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "def torch_NIDS(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " c = 1/(2*step_size)\n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " #print(alpha.unsqueeze(-1).shape)\n", - " x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,1e-3, 5e-3,1e-2]\n", - "taus = [1e-2, 5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "#cs = [ 5e-1, 1,10,20,50,200]\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\n", - " print(lam,'\\t', tau, '\\t',1/(2*lam),'\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_par['c'] = 1/(2*lam)\n", - " best_error = loss2\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "#print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# PLOT" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"\\nfor iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\\n _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\\n \\n original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \\t 2 )\\n original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\\n original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\\n\\n\\norigin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\\norigin_DGD_error = hist_nmse(original_DGD_hist,x_true)\\norigin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\\npred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\\npred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\\n\\n#plt.rc('text',usetex=True)nn\\n\\nx = [i for i in range(num_layers+1)]\\nplt.plot(x,origin_DGD_error[:num_layers+1])\\nplt.plot(x,origin_PGEXTRA_error[:num_layers+1])\\nplt.plot(x,origin_NIDS_error[:num_layers+1])\\n\\nplt.plot(x,pred_DGD_error[:num_layers+1])\\nplt.plot(x,pred_PGEXTRA_error[:num_layers+1])\\n\\n\\nplt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \\nplt.xlabel('iterations',fontsize= 'x-large')\\nplt.ylabel('NMSE',fontsize= 'x-large')\\n\\nplt.show()\\n\"" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \t 2 )\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\n", - " original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "\n", - "#plt.rc('text',usetex=True)nn\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,origin_DGD_error[:num_layers+1])\n", - "plt.plot(x,origin_PGEXTRA_error[:num_layers+1])\n", - "plt.plot(x,origin_NIDS_error[:num_layers+1])\n", - "\n", - "plt.plot(x,pred_DGD_error[:num_layers+1])\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1])\n", - "\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "plt.show()\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 300,0.002,0.1 )\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 300,0.002,0.1)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 300,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "writer_error=pd.ExcelWriter(\"./error_fig/noise1/\"+figure_name+\".xls\")\n", - "df_error= pd.DataFrame({'PG-EXTRA':origin_PGEXTRA_error,'DGD':origin_DGD_error})\n", - "df_error.to_excel(writer_error,sheet_name='Origin')\n", - " \n", - "df_feasibility= pd.DataFrame({'PG-EXTRA':pred_PGEXTRA_error,'DGD':pred_DGD_error})\n", - "df_feasibility.to_excel(writer_error,sheet_name='GNN')\n", - "writer_error.save() " - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnWd4VEUXgN9J7wkpEBJCDU2qEJXekQ7SQQURRRERFAQVLGD7RJpKF0GqgCC9d6QISG/Se0JJAuk9me/HTbaElE1IZ97nuQ935t47c3bZ7NmZ04SUEoVCoVAoTMUsvwVQKBQKReFCKQ6FQqFQZAmlOBQKhUKRJZTiUCgUCkWWUIpDoVAoFFlCKQ6FQqFQZAmlOBQKhUKRJZTiUCgUCkWWUIpDoVAoFFnCIr8FyA3c3d1l2bJl81sMhUKhKFQcP348SErpkdl9RVJxlC1blmPHjuW3GAqFQlGoEELcMuU+tVWlUCgUiiyhFIdCoVAosoRSHAqFQqHIEkpxKBQKhSJLFEnjuEKh0AgLC+Phw4fEx8fntyiKAoK9vT2lSpXCzCz764ZCoziEEG2BnwFz4Dcp5Q/5LJJCUaAJCwvjwYMHeHt7Y2trixAiv0VS5DNJSUn4+/sTFBRE8eLFsz1OodiqEkKYAzOAdsBzQF8hxHM5PU/wwzus+qI3K796PaeHVijynIcPH+Lt7Y2dnZ1SGgoAzMzMKFGiBKGhoU81TmFZcbwIXJVSXgcQQiwHugAXcmqCvX9Mwvl/86gWD8FOED82Dksrq5waXqHIc+Lj47G1tc1vMRQFDEtLSxISEp5qjEKx4gC8gTsG7bvJfTqEEO8IIY4JIY4FBgZmeYLqLfqQmPxuuIXB7t8+z760CkUBQa00FKnJic9EYVEcmSKl/FVK6Sel9PPwyDRi/gncPUtxvaL+11n8vk3IpKScFFGhUCiKBIVFcfgDPgbtUsl9OYpNu/a6c/cbiZzZszKnp1AoFMmULVsWW1tbHBwcKFGiBAMGDCAiIiLX5xVCYG9vj4ODA25ubrRs2ZIVK1Y8cd+OHTto3rw5jo6OuLm5Ubt2bSZMmEBMTAwA48aNw9LSEkdHRxwdHalUqRJDhw7l3r17uf4a8pvCojj+BSoKIcoJIayAPsD6nJ6kXo8hRFpr585hArlpolp1KBS5yIYNG4iIiODEiRMcO3aMb7/91ui6lJKkXPgbPH36NBEREVy6dIkBAwYwdOhQxo8fr7u+cuVKevTowauvvsqtW7cIDg5mxYoV3L17lzt39LvmvXv3Jjw8nEePHrFmzRru379P3bp1i7zyKBSKQ0qZAAwFtgH/AX9KKc/n9DzFnb249Jyjrh13J5jzhzbm9DQKhSIV3t7etGvXjnPnztGsWTPGjh1Lw4YNsbOz4/r16wQEBNC5c2dcXV3x9fVl7ty5umfbt2/PyJEjde0+ffowcOBAk+Z1d3enX79+zJo1i//9738EBwcjpWTEiBF8+eWXDBo0CFdXVwAqV67MtGnTqFix4hPjWFpaUq1aNVasWIGHhweTJ09+ynekYFNYvKqQUm4GNuf2PKJFQzi5VTu/ZYXN7i9IrNcec4tC81YpFIWOO3fusHnzZrp168b+/ftZvHgxW7ZsoXLlykgpadmyJdWrVycgIICLFy/SunVrKlSoQIsWLZg/fz41a9akQ4cO3Lt3j6NHj3L69Okszd+lSxcSEhI4evQo5cqV4+7du3Tv3j3Lr8Pc3JwuXbqwbdu2LD9bmFDfhqmo1KYXUdO3YhcLDmFmeAf5c2zNT7zU8+P8Fk2heCrKfropz+a6+UMHk+575ZVXsLCwwNnZmQ4dOjBmzBjatWvHgAEDqFatGqAplYMHD7Jp0yZsbGyoXbs2b7/9NosWLaJFixZ4enoya9Ys3njjDaKjo1m7di2Ojo6ZzGyMpaUl7u7uPHr0SPesp6en7nqfPn3YunUrcXFxzJkzh379+qU7lpeXF48ePcrS/IWNQrFVlZfUKuXHqUqWuvbte3ZUOv8ToY+y7uKrUCgyZu3atYSEhHDr1i1mzpypizvx8dH7wgQEBODq6mqkDMqUKYO/v94/plOnTiQmJlK5cmUaNWqk669WrRoODg44ODiwf//+dOWIj48nMDAQV1dX3NzcAIzsFMuXLyckJIQ6deqQmJiY4Wvy9/fXbW8VVZTiSIWlmSVR9arp2mH3bShGOBeXfZqPUikUzxaGsQYpv+DDw8N1fbdv38bbWx/KNXbsWKpWrcq9e/dYtmyZrv/8+fNEREQQERFB48aN051v3bp1WFhY8OKLL1K5cmW8vb1ZvXp1luVOSkpiw4YNGc5VFFBbVWlQsmkbkn47hZkE+0AzEmLN8Hv4FxdPv02VWvXzWzyFIluYun1U0PDx8aFBgwZ89tlnTJo0icuXLzNv3jyWLl0KwN9//83vv//O6dOnuX79Ol27dqVJkyZGiiU9Hj16xJYtWxgxYgSffPKJbrUxefJkBg0ahJOTEz169MDFxYWrV6/y4MGDNMdJSEjgypUrjBs3jvv37zNixIicewMKIGrFkQbVy9fjipd2LhBE3rfCXEji148kLj7jZapCoch5li1bxs2bN/Hy8qJr166MHz+eVq1aERYWRv/+/Zk+fTre3t40btyYt956izfffBMpZbrj1apVCwcHB3x9ffntt9+YOnUqX3/9te567969+fPPP1myZAk+Pj64u7vTq1cv3nnnHXr27Km7b8WKFTg4OODs7Eznzp1xc3Pj+PHjeHl55er7kd+IjN7cwoqfn598mprjCUkJ/Di4Ll3/jgPAonwMFV/UjF3bK3/Dy32H5YicCkVu8t9//1G1atX8FkNRAEnvsyGEOC6l9MvsebXiSAMLMwsi61bStSMfOpCiX2tfnMy1uwH5JJlCoVDkP0pxpINHnfpE2Gjn1hEJPAzX9j6LixDOLBlDfKKKKFcoFM8mSnGkQ80StTlbVu/ZEWXXTnfeKXodf6zfmh9iKRQKRb6jFEc61PSoyanyesUh7kYS4KJt/VmIJKqcHM/JW0U7yEehUCjSQimOdHC3dedB9ZK6dvThI7i8MJIEzAF4yewim5b+RFiMquWsUCieLZTiyICyvnX5t6J+1RG8dBNRdd7Rtd+N/Z1vVh7K0O1PoVAoihpKcWRATY+arGhiRooZPGL3bixLdiHapgQAHiKMmpd+Yfm/d9IfRKFQKIoYSnFkQG2P2twuLvinqn7VETjrN2w7T9S1XzPfxV/r13E+4OmKvysUCkVhQSmODPAt5gvAn43NSErWHZGHDhEVU5rECq0AMBOSr8x+4/3FRwmJissvURUKhSLPUIojA6zNrXG1ceWem2B/Nf2qI2zjRsw7TCLJXAv0qGF2k5Zhaxi+/BSJScreoVCYQkalYzMr25qamzdvIoTQZcJNOVJKwnbv3p1BgwYZPdO1a1eGDh3K999/r7vfxsYGc3NzXTsltbthuVlvb29GjBiRZpbcAQMGYGFhoSoAPuuUsNPsGXtqGiiO7duRzqUxazpK1zfSYhXXrpxnyo5LeS6jQlFYSat0rKllW9MiJCRElw03IiKC3r17AzBjxgxWr17Nnj17AC3H1IkTJ/jhhx8YM2aM7v7Zs2dTv359Xfv8eX2h0ZRys/v27WPFihXMnz/faO7IyEj++usvnJ2dWbJkSQ6/UwWLAq84hBDjhBD+QohTyUf7vJy/pL3mkvufjyChmFYPIDEoiKh//4WGw6G49ovETsTyncV8Zuy5yrpT/umOp1AoniSldOzZs2ezXLbVFDw9PXUZb2/fvs2wYcOYM2cODg4OWR7L19eXhg0bcurUKaP+v/76CxcXF7788ksWLlyYLTkLCwVecSQzVUpZO/nI9fKxhnjaa1XApJkgqJ7+Qxu2eQuYW0LnX5Boq5Gm5mfoanaAUavOcOpOSF6KqVAUalJKx9rZ2WW7bGtmDBgwgAoVKlCnTh3atm1L27ZtszXOxYsX2b9/P76+vkb9CxcupG/fvvTp04eLFy9y/PjxnBC7QKLqcWRCiuIAuFKnOJ5btPPw7dvx/PILRCk/xEuD4cgsAMZZLuRgbHXeXniMNUMa4ONqlx9iKxRPMs45D+cyzcswdenYvn37smrVqmyXbXV3dzdq//PPP0ZZYBs3bsz27dt5/fXXs/iC0FX/i4qKok+fPgwZMkR37fbt2+zZs4fJkydTokQJWrZsyaJFi6hbt26W5ykMFJYVx1AhxBkhxHwhRLG0bhBCvCOEOCaEOBYYmHNlXg0VxwWvJCxKaDaPxJAQIg8f0S60+BxcygDgLKL4wXIuQRExvLngX0KjVGS5QpEeqUvHpnzxZ1a21dAAfvv2bd29QUFBhISE6A5DpXHlyhUmTZrEkCFDGDlyJPHxWfvbPHHiBBEREaxYsYIjR44QGRmpu7Z48WKqVq1K7dq1AXjttdf4448/sjxHYaFAKA4hxE4hxLk0ji7ALKACUBu4B0xOawwp5a9SSj8ppZ+Hh0eOyWaoOO5HP8CpbRtdO2xL8q6ZtQN0maHrb2F+ip7m+7j6MIJBi48Ro4o/KRQmYWrZVkMDeOnSpTMdV0rJ22+/zYcffsi0adOwt7dnwoQJWZZPCEGvXr2oX7++UeGnRYsWcf36dTw9PfH09GTEiBEEBQWxeXOe7qznGQViq0pK2cqU+4QQc4GNuSyOESnGcYD7kfdxavcpjxYuAiB8x07kV18hrKygXGN4aTAcmQ3AlxaLOZxUlaM34MPlp5jxWh3MzUSacygUeYKJ20f5iZmZWZbLtprCrFmzCAoKYsyYMZiZmTFv3jzq1atHjx49qFKlSpbH+/TTT6lXrx6ffvopN27c4Nq1a5w8eRLDH60jR45k0aJFdOnSJdtyF1QKxIojI4QQJQ2aXYFzeTm/u607ZkJ7m4JjgjGrXhXL5LKQSWFhRBw8qL+55VfgWgEARxHNz5YzMCeRrefv8/nasyqnlUJhAqaWbU0LFxcXo22sKVOmcPv2bcaMGcO8efOwsrIC4LnnnmPkyJEMGjQoW3+XNWrUoEmTJkycOJGFCxfSpUsXatSooVtxeHp6Mnz4cDZu3MijR0Uvi3aBLx0rhFiMtk0lgZvAu1LKDKNrnrZ0bGparWzFgyjt187mrpuxnvsnwXN/A8CpUye8J/6ov/nucZj/MiQlAPBzQlemJmgf9veaVeCTtln/daNQZAdVOlaRHkW+dKyUsp+UsoaUsqaUsnNmSiM3MLJzRN3Hqb0+lCRi1y6SoqP1N5eqC83H6pofWKzjRfEfALP2XmP67iu5L7BCoVDkIgVecRQEUts5rKtUwapcOQCSoqKI2Pe38QMNh0PZxgCYkcSvdjNxQ9tfnrT9Mr/tv543gisUCkUuoBSHCRitOCLvI4QwWnWEpfacMDOHbr+CnVan3CUxmAXOv5KSoP3bTf+x8NDNXJdboVAocgOlOEwgteIAcGqvr0EesXcvicnJ2XQ4eWnKIzmqvEbsSX5w19cp/2r9eRYcvJF7QisUCkUuoRSHCXja6RXHvUjNxGJdoQLWyW58Mi6OiH37nnzQtxU0+VjX7BmxlLc9r+ra4zZcYN4BpTwUCkXhQikOE/B0MDaOp+DU5mXdefiOnWk/3Owznb1DIBkbM4X23vrU0N9svMCMPVfTflahUCgKIEpxmIDhiiNlqwrAsZU+bjHi779Jio198mEzc+jxOzh5AyBiQphmMZUGpW11t0zcdonJ2y+pOA+FQlEoUIrDBFxtXLEy0wKHwuPCiYqPAsDK1xersmUBkFFRRB48lPYADh7QaxGYWQJg/uAsi9yX0qC8q+6WabuvMn7DBZJUISiFQlHAUYrDBIQQlLAvoWunrDqEEDi21q86wnfsSH+QUn7QXh8oaHFhFQurHqFZZX2KggWHbjJy5WniE5NyUHqFQqHIWZTiMBFDz6q7EXd1546tW+vOI3bvRiYkpD+I30CoO0DXtNw9nrkNQulQUx8nsuakP+8sOkZUXAbjKBRFhOXLl/PSSy9hb29P8eLFeemll5g5cyZSSgYMGIAQgqNHj+ruv3r1KkLoc741a9YMGxsbo8qAO3fupGzyTkBajBs3DktLSxwcHHBxcaFBgwb8888/ufL6UjNgwACsrKxwdHTE0dGR6tWr89lnnxEaapxH7N69ewwaNAgvLy8cHBwoX748AwYM4OLFi8CTpXJLlChBx44d2ZHRj9ccRCkOEynnVE53vvLSSt25TfXqWCTXDkgMDSUqs1Qn7SaCTz3tXCZhufotfmllT98X9Rk+91wKpO/cIzyKjMu5F6BQFDAmT57M8OHDGTVqFPfv3+fBgwfMnj2bgwcPEhenffZdXV35/PPPMxzH3t6eb775Jktz9+7dm4iICAIDA2nUqBHdunVL08aYkNEPwWwyevRowsPDCQwM5Pfff+fw4cM0bNhQl6Y9ODiYBg0aEBUVxf79+wkPD+fEiRM0bdr0CcWQUir39OnTtG7dmq5du7JgwYIclzk1SnGYSO8qvRHJMRl77+7l5MOTAAgzMxxbttTdF749E41vYQW9F+uM5cSGYr68D9+39WZoc31FsdN3Qug+6xC3giPTGUihKLyEhoby5ZdfMnPmTHr06IGjoyNCCJ5//nmWLl2KtbU1AG+88QZnzpxhX1ru7skMGzaMZcuWce3atSzLYWlpyRtvvMH9+/cJDg5mwYIFNGzYkI8++gg3NzfGjRtHUlIS3377LWXKlKF48eL0799ft0JYsWIF5cqVIywsDIAtW7bg6emJKTWBbGxseOGFF1i/fj3BwcH8/vvvAEydOhUnJycWL15MhQoVEELg4uLCm2++yQcffJDmWClJFceNG8cnn3xCUlLubncrxWEilYpVon15fbT4zyd+1v1CMbJz7NqFzOw/zaE49F0GlsnVAR9dR/zZn49bluObLtVIWYnfCIqk28xDnLz9OEdfi0KR3/zzzz/ExsZmmnLczs6OMWPGMHbs2HTv8fb2ZtCgQXz11VdZliM2NpYFCxbosvACHDlyhPLly/PgwQPGjh3LggULWLBgAXv27OH69etEREQwdOhQQFu5NGjQgGHDhhEcHMxbb73Fb7/9RlZqAjk6OtK6dWv2798PaFttXbt2xcws61/P3bp14+HDh1y6dCnLz2aFAlGPo7Dwfq332XZjGwkygeMPjnMw4CCNvBth5+eHubMziaGhJDx4QMz589jWqJHxYCVrQdc58GdyGcyb+2HDcPq9MhMPR2uGLz9FbEISwZFx9Pn1MD/1rk27GiUzHlOhyIAaCzP5TOYgZ984m+H1oKAg3N3dsbDQfwU1aNCACxcuEBsby7Zt23T97777LpMmTWLLli1UrFgxzfE+++wzfH19OX/+vEny/fnnn2zcuBErKyuqV6/OmjVrdNe8vLx0v+wtLCxYunQpI0aMoHz58gD873//o3r16vz+++9YWFgwY8YMatasSbNmzejUqRMdO3Y0SQZDvLy8dDXKg4KCjErnrl+/nv79+5OYmEj9+vXZvn17huMAuZ7KXa04soCPkw/dK3XXtReeXwiAsLDAoXlzXX+6wYCpea4ztPhC3z79B+z9gbbVS/LHoHoUs9Pcd2MTknhv6Qlm7b2mYj0URQI3NzeCgoKMbAiHDh0iJCQENzc3o60Wa2trvvjiC7744ou0hgLAw8ODoUOH8uWXXxr1L126VGdAbtdOnyaoV69ehISE8PDhQ3bv3m1UG9zHx8dojICAAMqUKaNrlylThoSEBF1hKRcXF3r27Mm5c+cYOXKk7r7vv/9eN/fgwYMzfD/8/f1xdXXVvTeGpXM7d+5MSEgIU6dO1dl+MhoH0I2VWyjFkUUGVh+oOz8deJqE5LobRttVO01UHACNR8Lz/fTtfT/AicXULVOMNUMaUtbNTndpwtaLjF51htgEVYpWUbipX78+1tbWrFu3zqT733zzTUJCQjIsKTtq1Cj27Nmj++UOWu3vlBKzW7ZsMWkuQ68t0H7F37p1S9e+ffs2FhYWlCihueifOnWK+fPn07dvX4YNG6a7b8yYMbq5Z8+ene58ERER7Ny5k8aNtQwTLVu2ZO3atdmyU6xZs4bixYtTuXLlLD+bFdRWVRbxcvDC096T+5H3iU6I5mrIVaq4VuGAVwQ+1haYxyYQd/06sdevY528tM0QIaDjVAjzh2u7tb4Nw8DOlbJVOrBmSEMGLznOkRva0nPl8bvcCIpkdr+6uDtY5+IrVRQ1Mts+yktcXFz46quvGDJkCFJK2rRpg729PWfOnNF5FxliYWHB+PHjjb6Y0xpz5MiR/Pjjjzg6OuaYrH379mXChAm0a9cODw8PxowZQ+/evbGwsCAmJobXX3+d77//noEDB+Ln58fMmTMZMmRIpuPGxsZy7tw5PvnkE4oVK8abb74JwIgRI1iyZAn9+vXj66+/pnz58kRERHDq1Kl0x3rw4AErV65k/Pjx/Pzzz9myj2SFArHiEEL0FEKcF0IkCSH8Ul37TAhxVQhxSQjRJr9kNKSme03d+ZnAM/x7/19GH/2cY2X1KwGTt6sAzC2h50LwTN6Dlkmw8k24eYBi9lYsfusletYtpbv92K3HdJl+kPMBBb+GtEKRHqNHj2bKlCn8+OOPlChRghIlSvDuu+8yYcIEGjRo8MT9ffv2pWTJjO18w4cPx9zcPEflHDhwIP369aNJkyaUK1cOGxsbpk2bBmi2FR8fH9577z2sra1ZsmQJn3/+OVeupF+wLUWxubm50b9/f+rWrcuhQ4ewt7cHwN3dncOHD2NjY0OjRo1wdHSkdu3ahIeHM2vWLKOxXFxcsLe3p0aNGmzevJmVK1cycODAtKbNUQpE6VghRFUgCZgDfCylPJbc/xywDHgR8AJ2ApWklBnu1eR06djULDy/kEnHJgHQuUJnrMytWHV5FY3PJfHBBm15aVOjBuVW/pm1gSMewryX4XFyxlwrR+i/DkrVRUrJb/tv8P2W/0j5L7OxNGNij1p0quWVUy9NUYRQpWMV6VEkSsdKKf+TUqblP9YFWC6ljJVS3gCuoimRfKWWRy3d+enA0+y7o/mYn6ggSEx+R2POniU+ICBrAzsUh35rwCE5vUlcOCzpCvdOI4RgUJPyzH/jBRyttR3GmPgkPlh2kv9t+Y8ElaZEoVDkEQVCcWSAN3DHoH03uS9fqeJaBQsz7cv7VtgtAqO1YJ9IW8HlCja6+8K2bE3z+QxxLQf91oJtsldETCgsegUeaG6GzasUZ837DSnvbq97ZM6+6wz4/V8Vaa5QKPKEPFMcQoidQohzaRwZRwCZPv47QohjQohjpkRtPg02FjZUKVYlzWsHDbrDTPTieIISz0H/tWDjrLWjH8GCjnBfM276FndgzfsNaW6QIPHA1SA6TTvA6Tsh2ZtToVAoTCTPFIeUspWUsnoaR0b+eP6AoVN1qeS+tMb/VUrpJ6X0y0rUZnapVbxWmv0HK8RDclBTzLlzxN2+nb0JStaC19eAtZPWjn4ECztBgOZZ4Wxrybw3XmB4S31AlH9IND1n/8MfR26reA+FQpFrFPStqvVAHyGEtRCiHFAROJrJM3mCoWeVIZG2Aov6ettStrarUihVV9u2sk5ZeTyGhZ3hjvYWmJkJPmpdid/6++FooymruMQkxqw5y8iVp1WGXYVCkSsUCMUhhOgqhLgL1Ac2CSG2AUgpzwN/AheArcD7mXlU5RU1PdJWHABxzV7QnWd7uyqFUnWNt61ik20e1/VJ31o9V4INQxtRxVPvu776hD+vzDjI1YcRTze/QqFQpKJAKA4p5RopZSkppbWUsoSUso3Bte+klBWklJWllE/5LZxzeDt442qTdlj/I78KCCutYmDsxYvEXr/xlJPVgTc2gp2WhI34SFjaEy7p346y7vasGdKQ7nX08R6XH0TQefoB1py8m3pEhUKhyDYFQnEURoQQtC6jFXFytHKkfsn6umvB5tE4NG2qa4dt2vT0E5asCW9uAcfkAKjEWFj+GpxeobvF1sqcST1r8mP3mlhbaP+1UXGJfLTiNKNXnSY6rkAs1hQKRSFHKY6nYNQLo/ip+U8s77Ac32L6WhqPYx7j1KGDrh26dm3mqdZNwaOSpjyKldXaMhHWvAOH9XlwhBD0esGHNUOMXXb/PHaXztMPcOl++NPLoVAonmmU4ngKrM2taVm6JaWdShttWz2OeYxDi+aYOWt2iXh/f6L+zaFIdtdyMHAbFK+m79v6Cez6Ggw8qZ7zcmL9B414pbY+qvzKQ23raumRW8rrSlEgKIilYzMr25oWKXKkZMN1cHCgU6dOAGzYsAFPT0+jVOfr1q3D29ubW7duGT0jhMDe3l7X3r9/v67crIODA66urrRu3TpNWfbu3YsQggkTJmT8pucASnHkEMWsi+nOg2OCMbOywtlw1WGQ7/+pcfSENzeBz0v6vv2TYf0HkKj3pHKwtmBq79r82L0mNpbaf3VsQhJj15zjvSUnCIlSAYOK/KMglo7NStnW1EyfPl2XDTciIoINGzYA0KlTJ1q0aMFHH30EaOVe33vvPWbNmkWZMmWMngE4ffq0rp2SMXf06NFERETg7++Pt7c3b7311hPzL1y4EFdXVxYtWpSl9yI7KMWRQxSz0SuOxzFaxT7nrl11fWHbt5OURtbPbGNbTHPVrWiQ9/HkYljxGsTp50nZutr4gbHX1dbz92n3834OXw/OOZkUChMpqKVjs1O21RR++eUXtmzZwrZt2/joo49o2rQpnTt3zvI4tra29OrV64lMuZGRkaxatYoZM2Zw5coVcjNXHyjFkWOk3qoCsKleDeuKmu1DRkURti39yl3ZwsoO+iyFWq/q+y5v1QIFI4OMbvUt7sja9xvSv76+IM290Bj6zj3Mj1svEq9yXSnykIJaOvZpyrZmhLu7Oz///DOvvfYaGzdu5JdffsnWOJGRkSxbtgxfX1+j/tWrV+Pg4EDPnj1p06YNCxcuzAmx00XV48ghjBRHrKY4hBA4v9KVhxMnAhC6ejUu3bqm+Xy2MbeEV2aCU0ltuwrA/zj81gpe/wvcKuhutbE05+su1Wnk687ov84QEhWPlDBz7zUOXA1iau/aVPBwyFn5FAWG/6rkXabcqhf/y/B6QS0d+zRlW4c2+9XsAAAgAElEQVQNG8bHH3+sa3/wwQdGW2j16tUjNDSUXr16ZakmOcCkSZOYPn06YWFhlClT5okCWAsXLqR3796Ym5vz6quvMmzYMKZMmYKlpWWW5jEVteLIIQy3qh7F6I1gzp07QXJ9gKhjx4gzqCSWYwgBLb+E9pOAZMPh4xua8rh95InbX67mybYPm9DQ103Xd+ZuKB1/OcCSw8pwrsh9CmrpWFPKtg4ePFg35vfff6+795dffiEkJER3pLa7vPPOO/Tv35/NmzcbGeNN4eOPPyYkJISbN29ia2vLpUv6ZOJ37txhz549vPbaawB06dKFmJgYNuVEGEA6KMWRQzhYOmBppmn36IRoohOiAbDw8MChSRPdfSGrc9BInpoXB2lbVxa2Wjv6ESzqDOefnLOEkw2LB77E2PZVsTLXPgbR8Yl8vvYcAxf8y8PwmNyTU/HMU1BLx5pStnX27Nm6MceMGWOS/PPmzePOnTvMnDmT77//nrfffjvT+uFpUbp0aX7++WeGDx9OdLT2HbN48WKSkpLo1KkTnp6elC9fnpiYmFzdrlJbVTmEEIJiNsV4GPUQ0Owctg7aF7hL925E7NkDaN5VHh8MRVjk0ltfpQMM2Ah/9IaoIEiIgZUD4PFNaPihtjpJxsxMq/HR0NedD1ec5PIDzatjz6VA2kz9m++71qBdjYwrrikKD5ltH+UlBbV0bHbKtmZGQEAAo0aNYt26dVhbWzN48GCWLVvGd999x/jx47M8XuvWrfHy8uLXX39l+PDhLFy4kK+++orBgwfr7jl69Cg9e/YkODgYNze3DEbLHmrFkYMY2jkMt6scmjbFPPk/L+HhQyIPHsxdQUr5wds7wM3AgLZznOaum/Dkr5znvJxYP7QRbzUqp+t7HBXPe0tP8NGKU4RGx+euvIpnkoJYOjYrZVtTM3ToUKOYjJTtryFDhtCnTx+da60Qgrlz5/LTTz+ZbJNJzahRo/jxxx/Zt28ft27d4v3338fT01N3dO7cGV9fX5YtW5at8TOjQJSOzWlyu3Rseryz/R3+uaftXc5oOYMmpfRbVA9+nMij+fMBcGzdmlLTsudVkSWiHsGK1+GWgaIq2xh6L9bcedPg4NUgPl55mnuh+q0qTycbfuxRkyaVcj9dvSLnUKVjFelRJErHFhVcbZ90yU3BpXs33Xn4nj0kGESR5hp2rlop2lp99X0392tG8+C0fd4b+rqz9cMmdHteX2jxflgM/ecfZcyas0TEqlTtCsWzjlIcOYhh9LjhVhWAdYUK2NaurTUSEnI2kjwjLKzhlVnQwsAjJfgqzG1hlJrdEGdbS6b0rs3s1+vgZm+l6//jyG3a/vQ3/1xTQYMKxbOMUhw5SFpBgIa49Oypv758Rc4kPjQFIaDJx9BzAVgk10SPCYEl3eDY/HQfa1u9JNs+asLLz5XQ9d19HE3fuYf5at05ItXqQ6F4JlGKIwdJzzieglP7dvrEh3fu5L6RPDXVusKbm8EhOcApKQE2fgSbRxvluDLE3cGaOf3qMrV3LZxt9cFEC/+5Rduf1epDoXgWUYojBzHKVxX75IrDzNYWl1de0d+zbHmeyGWEd10YtBs8DSoYHp0DS3topWnTQAhB1+dLsf2jJrSsUlzXf+eRtvr4Yu05ZftQKJ4hCoTiEEL0FEKcF0IkCSH8DPrLCiGihRCnko/ZGY2T3xitOKLTNn679OmtO4/Yu5f4gIBcl+sJnL1h4FZ4ziBP0PU9MLclBF5O97ESTjb89oYfU3rVwslGH4ey+PAt2kz9m/1XAnNTakU2yCiQTfFskhOetAVCcQDngG7A32lcuyalrJ18DE7jeoEhrXxVqbEuVw77BsnVApOSePznn3kh2pNY2UOPBdD0U33fo2vwW0u4nH4+HiEE3eqUYseIprSqql99+IdE02/eUUavOq3iPgoI9vb2+Pv7ExcXp9LIKAB0qeNtbGyeapwCETkupfwPMCrQUhhJL19Valz69iXykBbvEbpmLR7DhiFyOBunSZiZQfPPoHgVWPMeJERDbBj80QtaffVEpLkhJZxsmNvfj3WnAhi34TwhUZqy+PPYXfZeCuTbV6rzcjXPNJ9V5A2lSpUiKCiIW7duGeWEUjzb2NjYUKpUqacao0AojkwoJ4Q4CYQBn0sp9+e3QOnhYOmAhZkFCUkJRCdEExUfhZ2l3RP3OTZvjrmLC4khISQ8eED0yZPYJUeZ5gvVuoJreVj2KoTdBaQWaX7/HHSepqVvTwMhBK88701DX3fGrT/PprNacriH4bG8s/g4HWqWZFynang4Wufda1HoMDMzo3jx4hQvXjzzmxWKLJBnP3OFEDuFEOfSODJKyH8PKC2lfB4YAfwhhHBKZ/x3hBDHhBDHAgPzZ69dCIG3gz5w7vC9w0bXQ2JCGHtgLFNO/Yx961a6/rDNmSdfy3VK1oJ39kLp+vq+c6tg/ssQcjvDRz0crZnxWh1mv14Hdwe9kth05h6tpuxj5bE7aqtEoShC5JnikFK2klJWT+NINz2mlDJWShmcfH4cuAZUSufeX6WUflJKv6zmus9JXi7zsu583VXjlzbt5DTWX1vPgvMLuO6nz7kTtm0bMjExz2RMFwcP6L8e/Abq++6fhTlN4freTB9vW70ku0Y0pWdd/TI4NDqeUavO0G/eUW4F52AFRIVCkW8UFON4mgghPIQQ5snn5YGKwPX8lSpjOlfQl4P8++7fOltHkkxi1+1dumuXylhg7u4OQGJQEFH//pu3gqaHhRV0nAqdfobkNPFEP4LFXeHgz5DJysHZzpKJPWux+K0XKVXMVtd/4GoQbX76mzn7rpGgqg0qFIWaAqE4hBBdhRB3gfrAJiFESvmvJsAZIcQpYBUwWEqZB0mesk9Z57LU9tBSiyTIBDZf3wzA+aDzBMfog+WC4h7h1EZfL7xAbFcZUneAcbCgTIIdX2op2mMjMn28cUUPtn/UhLcblcMs2b4eE5/E/7ZcpMuMg5y9G5proisUitzFJMUhhHDN5Lq5EKJOdoWQUq6RUpaSUlpLKUtIKdsk9/8lpayW7IpbR0q5Ibtz5CVdfPVmm3XXtO2qPXf2GN0TGBWIU3t9RbLw7duR8QXMjdXnRXh3H/i8pO+7sFZz2Q26kunjdlYWfN7xOdYMaUjVknrT1PmAMLrMOMA3Gy+otCUKRSHE1BVHoBBC55ohhDgphDD053IHCsheS/7TpmwbrM01I/HFRxe5+Ogi++4aJxQMig7C9vnnsSih5YFKDAkpONtVhjh6whsb4YW39X2BF+HX5nBhvUlD1PJxYf3QhnzStgrWFtpHLknCvAM3eHnq3+z670FuSK5QKHIJUxVHamd+X8Aqk3ueWRytHGlZuqWu/dn+z7j82DgiOyg6CGFmhmPr1rq+8D1780rErGFhBR0mwyuz9UkS48Lhz36w/Yt081wZYmluxnvNKrD9oyY08nXX9fuHRPPWwmMMWXqcB2GqXK1CURjISRuH8rc0oN9z/TDX7PpcDbn6xPWg6CAAHJo30/VF7NlTsN1Wa/eFt7aDSxl936FftLrm4fdNGqKMmz2L33qRKb1q4WqQsn3z2fu0mryPRf/cJDGpAL8HCoWiYBjHiyLV3avzUd2P0r0elRClBQi+8AJmdlqAXfzdu8RdS7vAUoGhZC3N7lFRb9jn1kGY3RhuHjBpiJS0JTtHNKWHgetueGwCX647T7eZBznnr4znCkVBxVTFITFeUaRuK9Kg/3P9aVO2jVGfvaW97jwoOggzKyvsGzXS9UXs3ZtX4mUf22LQdzm0+BxE8kco8iEs7AT7J4OJifVc7a2Y1LMWywbVo7y7/n05fTeUztM147nKuqtQFDyyYuM4LIS4LIS4DNgDuw3ah3JNwkKMEIKvG3xNNbdqALQt25YKLhV01wOjtQh3h+bNdX0F1s6RGjMzaDJKK01rl2yzkEmw62tY1kerd24i9Su4seXDxnzUqhJWqYznrSbvY8vZewV7C0+heMYQpvxBCiG+MmUwKeX4p5YoB/Dz85PHjh3LbzF0JCQlcD30OuWcyzFq3yhdIOCkppNoU7YNCcHBXGnUWAuuMzOj4sEDWBQrlsmoBYiwAFj5JtwxSLHi7KNVHCyVad17I24ERfL52rMcvGpcIKpZZQ/Gd65GGTf7dJ5UKBRPixDiuJQy0z9ak5IcFhSFUFixMLOgUjEtU4q7rd6jKMVAbuHmhm2tWkSfOgVJSUTu349z585pjlUgcfKCARth13g4NE3rC70D89tC66+h3nvpZtlNTTl3e5a89RLrTwfwzcYLBEXEAbD3UiAvT/2b95v78m7T8lhbmOfWq1EoFJnwVMZxIURjIUT3zAIEFXrSUhxgvF0VNOdXEkMLmXHY3BJe/hb6/AE2WnlckuJh22ew4vV0qwumhRCCLrW92TWiGa/XK63TObEJSUzZcZl2P+3nwJWgjAdRKBS5hqmR40OFEJ+n6lsH7ANWAleEEFVyQb4ih6HiCIzSZ/F1at8OYanlhoq7do07779PUmxsnsv31FTpAO/+DV7P6/suboQ5TeDu8SwN5Wxnybev1GDtkIZU99ZHnl8PiuT1eUcY+scJ7oeq2A+FIq8xdcXRH9Dl1k5Ohd4e6Ae8AFwBxuS4dEUQD1t95t6gGP2vZisfH0p+/72uHX3sOPfGjM1T2XKMYmVh4DZ4yaBgY8htmN8G/pmRaaLE1NTycWHd+40Y37kajtb63dWNZ+7RcvJeftt/nXiVOFGhyDNMVRwVgJMG7fbARinl0uR052PREhIqMsFoqyrKeLvFuVNHio8apWuHbdpEzIULeSZbjmJhDe0mQK9FYG24dTUGlr+aJa8rAHMzwRsNyrLr46a8UttL1x8Zl8i3m/6j4y8HOHI9OIMRFApFTmGq4rBFq8CXQj2M64NfAVSZMRNIz8aRguvAN43SkIRu2pQncuUaz3XRAga9DHJgXtqsBQzePpLl4Yo72vBTn+f5Y9BL+BZ30A/5IJzevx5mxIpTPAxX21cKRW5iquK4C9QEEEIUA6oB/xhc98BYsSjSwdXWFZGc1utx7GMSk4wLOAkhcO7WVdcO27IFaWJAXYHFtZy2dVVviL4v7C783i5LAYOGNKjgzuZhjfmsXRXsrPQeVqtP+tNy0j7mH7ih6n4oFLmEqYpjBfCLEGIIsBC4Axw1uO4HXMph2YoklmaWFLPRYjSSZBKPYh5xK+wWUfFRunscGjbEzFnb3kkIuKe56RZ2LKyg7f+gzzKwcdH6ZKIWMLikG0Q8zPKQVhZmvNu0AjtHNKVDDX1FxfDYBL7eeIGO09T2lUKRG5iqOL5D86D6Di0z7mtSSsOfc32BQr6nkne42brpzqccn0LHNR3puKYjkfFaaVVhZYXTy/oStGEbi9BbW6U9DD5gXOPj+h6Y1RCu7c7WkF4utsx4rQ6L33rRKHXJxfva9tWHy0+qzLsKRQ5ikuKQUsZIKQdIKYtJKZ+TUh5Kdb2ZlPLH3BGx6GHoWbXx+kZASz9y9J5+EefUob3uPGzbNmRCEcrZ5OIDAzZBoxHosvFHPoTF3WDnOEjMXkGrxhU92PJhY0a3rYytpX77au2pAFpM2suvf19T3lcKRQ5QILLjCiEmCiEuCiHOCCHWCCFcDK59JoS4KoS4JIRok9E4hQVDA7kh/hH+unO7F17AwkNTMInBwUQeybohuUBjbgmtvoJ+q8E+xa9CwoGpmu3j8c1sDWttYc6QZr7sGtmUDjX121eRcYl8v/ki7X5WwYMKxdNiagDgr6YcTyHHDqC6lLImcBn4LHne54A+aMb4tsBMIUShzzVhiuIQ5uY4tmura4etN63aXqGjQgtt66q8PnKeu/9qXlfn/sr2sF4utsx4tQ5L3zb2vrr6MILX5x3hvSXHufs4KoMRFApFepi64ngbeBnNvlExncM3u0JIKbdLKVP2Yg4DKUUaugDLpZSxUsobwFXgxezOU1BIT3Hcjbhr1HbupM9XFbZtO4kREbkqV77hWAJeXw2txoNZcoBfbBisGgjr3oe4yGwP3dDXnS3DG/N5h6o4GAQPbjl3n1ZT9vHzzivExCdmMIJCoUiNqYpjHVASSARmAa2llM1THS1ySKaBwJbkc280D64U7ib3FWoMbRyGGK44AGyqV8O6kpYcUcbEELZ5c67Llm+YmUGjDzW3XcMKgyeXwJymcO90toe2NDfj7cbl2T2yKd2e1398YuKTmLrzMi0n72PrOZW6XaEwFVON412BssBe4AfAXwjxoxCioqkTCSF2CiHOpXF0MbhnLJAALM3Ki0h+9h0hxDEhxLHAwMDMH8hHDL2qDAmICDD68hJC4NKju64d8lf2t24KDaX8YPB+qN5D3xd8BX5rpaUreYqYluJONkzpXZtVg+tTzUuf+8o/JJrBS07Qb95RrjwIfxrpFYpnApON41LKe1LK79DSj7yR/O9ZIcQeIYSNCc+3klJWT+NYByCEGAB0RHP1Tfn29Ad8DIYpldyX1vi/Sin9pJR+Hh5p/6IvKJRzLoeF0LZNqrpWxc5CKx0bGR9JaKxxVlynTp0gOflhzOkzxF65krfC5gc2ztD9N+gyE1IqJibGaelK/uiZrZgPQ/zKurJ+aCO+61qdYnaWuv4DV4No+/N+vt5wgdDo7Hl2KRTPAln2qpIaW4HZaPaIxkCmiiMjhBBtgdFAZymlocVyPdBHCGEthCiHZks5mtYYhQl3W3cmNJlAr0q9mNx0Ml4O+txLqberLIoVw7GFfhcwZPWaPJMzXxECnn9NW32UrK3vv7oTZjWAKzueanhzM8FrL5Vhz8fN6F+/DGbJXsGJSZL5B2/QYtJelh29TWKS2r5SKFKTJcUhhCgphBgjhLgOLEZTHJWllCFPKcd0wBHYIYQ4JYSYDSClPA/8CVwAtgLvSymLhCXz5bIv80X9L/Bx8qGUQyldf2oDOYBL926689B165Dxz9CvYbcK8NYOaDBM3xcZCEt7wObREP90gX0udlZ83aU6m4Y1pl55fVmZ4Mg4Plt9ls7TD3D0RtYSMioURR1T3XE7CiHWAjeAZsAngI+U8lMp5bWnFUJK6Sul9JFS1k4+Bhtc+05KWUFKWVlKuSWjcQor3o56g23qFQeAfcOGWJQoAUDio0dEHDyYZ7IVCCys4OVvoN9acPDU9x+dA3Obw4PzTz1F1ZJOLBtUjxmv1sHbxVbXfz4gjF5z/uGDZSfxD4l+6nkUiqKAqSuO9UAttJQjCwBLoKcQ4lXDI5dkLPJ4OxgojvAnFYcwN8e5U0ddO3TdujyRq8BRoTm8dwgq66PqeXgBfm0Oh2c9leEcNGeEDjVLsnNEUz5sVREbS/2fx4bTAbScvJepOy4THVckFr0KRbYRprggCiFM+YuUUsoCEZzn5+cnjx07lt9imMzu27sZvmc4AA29GzK71ewn7om5fJkbnTUHNGFlRcWDBzB3dMxTOQsMUsKx+bBtLCQYrAIqtIRXZoKjZ/rPZgH/kGj+t/k/Np65Z9Tv5WzDJ+2q0LmWF8LEWuoKRWFACHFcSumX2X2muuOamXAUCKVRGEm94th6cyvt/mrHlGNTdP02lSphXbUqADIujvBt2/JczgKDEPDCW1qdD88a+v5ru2BmffhvY45M4+1iy/RX67DinXpG7rsBoTEMX36KHrP/4fSdpzXvKRSFD1NXHCZV95NS/p35XblPYVtxRMRFUH9ZfQCszKywNrcmPF6LJ9jefTslHbScS8ELFvDwhwkA2Pn5UWbJ4vwRuCCREAu7v4VD0wCDz/Lz/aDtD2DtkO6jWSExSbLy2B0mbrtEcGSc0bVudbz5pG0VSjg9lXOhQpHvmLriyMpWlUSXytSIlAGklNIijet5TmFTHACNljd6IoYDYE7rOTTwagBAQmAgV5o20+3lV9i5E6tShT6QPme48TesGQxhBjaiYuWg26/gk3NZasJi4pm++yq/H7xBfKL+b8fW0pz3mlVgUOPy2FqpxbeicJKjW1VoQXilk/81PMqhRZLHkE5gnsI0DLerDLkbrnfPtfDwwL5RQ107ZNXKXJer0FCuCbx3EKrpXZd5fAPmt9FWJNlM1Z4aJxtLxrSvyo6PmtL6uRK6/uj4RKbsuEzLyXtZd8pfpS9RFGlMtXH4pz6A54HNwBDga6BSLspZ5DFFcQC4dNen4ghZvoKkGFWgSIdtMegxH7rNBetkm4RMgr8nwrzWEJRzUfdl3e2Z29+PpW+/RBVPvZNCiv2j68xDHL/1OMfmUygKElmOHBdC1BFC7AZWA7uBilLKCVLK2ByX7hnCMAjQkNQBgY4tW2DppUWaJ4aEEFpU061nFyGgZi/NbbdsY31/wEktVfuRXzWvrByioa87m4Y15vuuNXCzt9L1n7oTQvdZh/hg2UmVvl1R5DBZcQghfIQQS4B/gRCgmpTyAymlqoqTA5i64hAWFhTr10/XfrRokdoWSQsXH+i/Hlp/A+bJX+gJ0bBlFCzuCmEBOTaVuZng1ZdKs2dUM95tWh4rc+P4jxaT9zFh60XCY56hiH9FkcbUyPEfgEtoiQ2bSCm7SSmfgWx7eUdl18q68+pu1XXnd8LvPKEYXHp0x8xOS4wYd/UakQeNKvkqUjAzg4bDYNAeKF5N3399D8ysB2dX5ejqw8nGks/aVWXniKa0r6GPJYlLSGLW3ms0n7SXJYdvkaDK1yoKOVnxqooGDmHk82iMlPLlnBMt+xRGryopJfPPzedG6A2G1xlOhzUdiE4Obtvfez8uNi5G99//7nseL9bcce0bNaL0b3PzXOZCRXpuu9W6QocpYOea7qPZ5d+bj/hm4wXO3DX2lqtY3IEx7avSrLKHCiBUFChy2h13ARkojBSklG+aJF0uUxgVR2q6re/Glcfaom5Zh2VUd69udD3u9m2utWmr+8VcduVKbGtUf2IcRSpuHYI170LIbX2fQwnoPA0q5XxJ+6QkyfrTAUzYepF7ocaODA193RjTvirVvJxzfF6FIjvkdOT4ACnlm5kdTy+2IgWjjLnhT2bMtSpdGieDmuRBc55MU6JIgzINNMN5nf76vogH8EcvrUxtTFiOTmdmJnjleW/2fNyMUW0qY28Q43HwajAdpx1g5J+nuReqEigqCg9Z9qpS5A2lHPWK4074nTTvcXtXl0SYiJ27iLl0KdflKhJYO2orjL4rwL64vv/kEq3Wx/V9OT6ljaU57zf3Ze+o5rz2UmnMkwuASAl/nbhLs4l7+XHrRcKUAV1RCFCKo4Di46gvfJhWjQ4Am8qVcGzdStcOnjMn1+UqUlRuC+8f0ewcKYTegUWdYdPHEBeZ41N6OFrzXdcabPuwMS2r6JVWbEISM/deo9nEvSw4eIO4BGVAVxRclOIooGS2VZWC4aojbMtWYq89dXmUZws7V+i5QAsctC2m7/93LsxqCLf+yZVpfYs7Mm/ACywbVI8a3nobx6PIOMZtuEDrqfvYcDpAuVorCiRKcRRQTNmqArCtXg37psk5KKUk8Odfclu0okn17jDkCFRqp+97fAN+bwdbP4O43Aniq1/BjXXvN+Sn3rWNCkjdCo7ig2Un6TLjIIeuqVApRcGiQCgOIcREIcRFIcQZIcQaIYRLcn9ZIUR0cjlZXUnZZwEvBy9Eck7J+5H3ic8g15LHB/qyquHbtxN99lyuy1ckcSwBfZfBK7PAOmUVIOHwTJjdKNdWHykG9F0jmzK2fVWcbS11187cDeXVuUcY8PtRLgTkrOFeocguBUJxADuA6lLKmsBl4DODa9fSKilb1LE2t6a4nbYHLpEERKYf6WxbvRqObfUeVoFTp+a6fEUWIaD2qzDkH60wVAqPruX66sPG0pxBTcrz96jmWgS6hf7Pc++lQDpM28+Hy09y55FKYaLIXwqE4pBSbpdSJiQ3DwNpJ256xjA0kGe0XQXgMWwYmGuunpGHDhF5+HCuylbkcfaG1//SvK9SEiamrD5mNYCbB3JvajstAn3vx83oWbcUKTGCUsLaUwG0mLyXcevPExiu0sMp8ocCoThSMRDYYtAuJ4Q4KYTYJ4RonN5DRRFDO8fJhydJkul72liXL4dz11d07aDZysPqqRFCi/cY8g/46r3XeHwDFnSATSMhNjzXpvdysWViz1psHd6EVlX1HljxiZIFh27SdOIeJm+/pFx4FXmOSZHjOTKREDuBtIpBj5VSrku+ZyzgB3STUkohhDXgIKUMFkLUBdaiJVd8YrNXCPEO8A5A6dKl6966dSu3XkqeMef0HKafmq5rl3YszegXRtPUpykAhwIOccD/AL0q9aKsc1niAwK42qq1vtDTju1Y+fikObYii0gJp5bC1jFgWHDL2Qc6/WSsWHKJozce8ePWixxLla7dxc6S95pWoH/9sqqIlOKpyNGUI3mBEGIA8C7QUkqZ5iauEGIv8LGUMsN8IkUh5QjApUeXeG3za8Qm6rck7C3t2d1zN+Fx4XRY04HYxFieL/48i9otAuD2u+8SuU+r4Os2+F2Kf/hhvsheZAkLgI0fweWtxv21XoU23+VKzitDpJTsvviQidsucfG+8WqnuKM1H7TwpfcLpY3sIwqFqeR0BcBcRQjRFhgNdDZUGkIIDyGEefJ5eaAicD1/pMx7KrtWZk3nNbxe9XXsLLRsuJHxkey+s5tNNzbpFMrZwLM6ryuX7t11z4euWYtMTMx7wYsyTl7Qdzl0+w1sDZTE6T9gxotwbnWOZtxNjRCCllVLsGlYY37qXZvSrna6aw/DY/li3XlaTN7Ln8fuqCy8ilyjQKw4hBBXAWsgOLnrsJRysBCiO1p1wXggCfhKSrkhs/GKyorDkLln5vLLSS1Go6F3Qx5EPuBqyFXd9dWdV1OxWEVkXBxXmjUn8dEjAHzmzMahadN8kbnIExEIW0bD+dXG/ZXaQYfJmoE9l4lLSOLPY3f4ZdcVHqYylpd3t2d4q4p0rOmlS3GiUGREoduqyti9umcAACAASURBVEmKouLwj/Cn7V9t073+Y5MfaVdOC1578ONEHs2fD4Bj69aUmqaCAnOVi5s1Q3m4gcu0lSO0+gr8BoJZ7tsdYuITWfTPTWbtvcbjKGNjeaUSDgxvWYl21T0xUwpEkQGFaqtKkTneDt7UKV4n3euGqw+XHvrtqrDdu4i/fz9XZXvmqdIe3j+sKYkU4sJh88cw72W4n/sBmTaW5rzTpAL7P2nByNaVcLSx0F27/CCC9/84Qftf9rP13D2Skorej0VF3qIURyGiQ/kO6V67+livOK47x3LJR/uVKxKTuDr5u1yX7ZnHxhk6ToU3t4B7JX2//zH4tSns+CrXAgcNcbC24IOWFTnwSQuGtfA1SuN+8X44g5ecoMO0A0qBKJ4KpTgKEW3KtsHCTP9L0kLoz1NWHAERAQzZNYQVjfRfCnLjLmKv6hWLIhcp0wAGH4Bmn+lrnSclwMGftHK1V3bkiRjOtpaMeLkyBz5pwXvNKmBnoED+uxfG4CXaCmTTGaVAFFlHKY5ChLO1M028m+ja3Sp20+WzuhN+h9DYUIbsHEJQdBDnyppxqpx2TUjJQ5WGJO+wsIZmn2oKpHQDfX/ILVjaA1b0g1D/PBGlmL0Vn7Stwv7RWhoTW0vjFcj7f5ygzU9/s+6UP4lKgShMRCmOQsZ7td+jmHUxvB28GVRzEKWdSgNaPqvZp2dzLVRLq25pZslfLfXZViN27SbqxMkMx04MD0fGxeWe8M8aHpVhwCboPB0Ma8b/t15z3T00HTJIXpmTuDlY81m7quz/RFMghiuQKw8jGL78FK2m7OPPY3eIV268ikxQXlWFkMSkRMyEGUIIhu8ezu47uwFNWcQnaV9EQ2sP5W7EXUpP/otGF7T/Y4dWLfGZPj3NMSP27ePO0A+wcHWl/Lq1mLu4pHmfIptEBsH2L7R4D0OKV9Ncd8vUz1NxHkXGMXf/dRYduklknHGsj7eLLe80KU/vF3ywsVSR6M8SyquqCGNuZo5IznznW8xX15+iNASCLr5daFe2Hasa6f+LI/buIyEwMM0xHy1dCvHxJDx4QPiuXbko/TOKvTt0nQUDNoNHFX3/w/Pwe1tY/S6EP8gzcVyTt7AOftqCYS0r4mTgheUfEs1X68/TaMJuZuy5Smi0yoWlMEYpjkJORZeKT/S9WPJFPO09ebHki0R7uXIhJV1VQgIha9emOU7sfxd15/H+ebP//kxStiG8ux9ajQdLfdQ3Z5bDtLpwaFqebV8BuNhZMaJ1JQ5+2oLRbSvjZm+luxYUEcfEbZdo+MNu/rflPx6ExeSZXIqCjVIchZwKLhWe6OtUvhMAFmYWtC7Tmt219P/NIatWPVGONCE42GglEu+ffu0PRQ5gYQWNPoSh/8JzXfT9ceGw/XMtbfu13XkqkqONJUOa+XLgkxaM6/QcXs42umsRsQnM2XedRhN2M3rVaa48yL2MwIrCgVIchZyyTmWN3HJtzG1oVUafqfXFki9yuIogylprx9+6zf/bO+/4uKpr33/XVPXereIm27hh44apLkBwQjAkEAgQShL4JKEkPG5IuNwX7r28EFJeQsglPJNGSUJIjCkxhOIGBuMKuOEmWS6SrC6NrD6a2e+PfaSZUbEt25JG0v5+Pvsz5+yz58w6c2bmN7ustZq2bAk5R8vevSH7pscxQMRnw1eeh6+9AslBPceq/fDCtfDiV6FmYEOzRbrs3H7hGNZ9fyG/uP5cxqfFdB7z+hR/31rM5b96n68/u4UNhVUmJ/oIxQjHEMdpd5IXl9e5vyh3EdHO6M799Kh02pzC+smBUBN1y5eHnKO1i3C0lRrhGFDGLYJvb4Ar/o8OVdLBvjfhqXm6F9Li6f35/YDLYeO6Wdm8871LeOZrszgvN3SxxJq9Fdz0u01c9ZsPeOWTYtrazUqskYQRjmHAzPSZndtfyv9SyLH0qHQA1swI3Orjb71Ne3V1537LnlDhaC8rR3nNhOiA4nDBBffCvdtgxi2Bel+bnvd4ciZs+T342ns/Rz9gswlXTMlgxXcuZPm35nPF5PTOjIQAu0vruf+l7Vz00zX8ZvUBqhtMVsKRgFmOOwyobq7m2d3PMjZ+LNfmXxtyzOv3MuuFWSgUP/mTj3Fl+n6nfOfbOt0sUHjVVbQVFIY8b9yqVbiy+z+6q6EXSrbp/OZHN4XWp06Cy/8b8q8g5Bd8ADlY2cAfPyxi+bZiWryhPQ2Xw8bV52Zx+wWjmToqflDsM5w+JjruCBKOk7HgpQVUt1Qzf4+f+1/VX3RbfDz5a1aD3c6+WbOhS96O3OeeI3re3MEw19CBUjpk+7uPgKdLzvnRF2sBGdV74Mv+praxjb9sOszzHx3uFtIdYFZeIrfOz+PKqRm4HcYfZChg/DgMnaRH6+GqTRMFlaVzV/s9HupeXkHrgYJuogFmgjwsEIGpX9arrxb/KHT+49B6+N1C+McdUF3Y+zn6kcRoF/cs0gEVn7hhBudmh/Ywth2u5bt/+5QLH1/Dz97aS3Ft/wd5NAwMRjhGAGlRlljYhLprA7Guqp/9Ey27dvb4HCMcYYQzEi5+AO77BOZ8EyTo3/vuFTp8ycr/BfXHBsU8l8PGNTNH8do9F/HKdy5g6YwsnPbAMFpVQxu/XVfIxT9by+1/2sw7u8tMdsIhjhGOEUDHBDlA0UVjsScmAtBeeozK/3mq85gjK7Nz21tqfDnCjphUHZ7k7k0w6apAvb8dtv5BT6C/+yNoqhk0E2fmJvLrG2ey4YeLeeDyCWQG+YMoBev2VXLXC9u44PE1/PztvRypNr2QoUjYCIeIPCoiO0TkUxF5R0SyrHoRkSdFpMA6PniDukOUjh4HQLmvlqTbbu3c9wWtropdHPD/MD2OMCYlH278C3xjFeRdFKhvb4YPfw1PTIM1P4bmukEzMTXWzb2L81n/4EKWfW0WF+enhByvON7KU2sLueTna/nqMxt55ZNimtu6D5kawpOwEQ7g50qp6UqpGcBK4EdW/RIg3yp3AU8Pkn1DluAeR1lTGclf/zoxixd3axe7eFHnthGOIUDOHLh9JdzyMmSeG6hva4D3fwZPTIe1PxlUAXHYbXxuSgYvfGMe739/Id9eMI6UGHdIm48OVnP/S9uZ8+NV/GD5DjYX1RjHwjAnbIRDKVUftBsNdHxylgLPK81GIEFEMrudwNArwT2OiqYKxOUi+9dPEL80EO7CkZ5O5LmBHx9vWRmqfWB9BgyngQiMvwzuXAfXPxcaQLHVA+89rnsgqx+FxupeTzMQ5CZH8YMrJ/HRQ4tY9rVZLJyYSnAK9IbWdl7aepSvLPuIS36+ll++s4/CyobBM9jQK46TNxk4ROTHwK2AB1hoVY8CgtciFlt1gzMTOAQJ7nFUNFUAIA4HmT95DNe4cTSsXUvyXXdii4zEnpKCr6oKfD7ay8txjjK+HEMCmw2mXAPnfBF2rYD3fgrVB/Sx1npY/wvY+DTMvgPm3w1xWYNmqtPqhXxuSgZlnhZe/riYl7cVc7CqsbPN0ZpmnlxTwJNrCpg2Kp6rz83iqnMzyYyPPMGZDQPFgPpxiMgqIKOHQw8rpV4LavcQEKGUekREVgKPK6U+sI6tBn6glApx1BCRu9BDWeTm5s46fPhwf13GkKOhrYH5L+p8D267my03b+kMy96VohtuoGX7DgDyXnieqDlzBsxOw1nE74NdL8N7PwsISAd2F5x7I1xwn54vCQOUUnx8pI7l24p5Y0cp9S0993bnjk7iC9MzWTI1g7S4iB7bGE6fIe0AKCK5wJtKqakisgxYp5R60Tq2D1iglOq1x2EcALtz/l/Pp9Gr/9Gtv2E9CRE9J2oqvv9+jv/rLQAyH/8JCddcM2A2GvoBvw8+ew3e/4XO/RGCwMTPwwX3QO78QfNE70qL18favRW8+mkJa/dW0tbD0l0RmJ2XyJKpmVw5NYOsBNMTORucqnCEzVCViOQrpTr+Gi0FOgIovQ7cIyJ/A+YBnhOJhqFn0qLSKPIUAVDeVN6rcLiChqbMBPkwwGaHqV+CKdfC/rfhg18GhTFRsO8NXbJmwvl36zDvDtcJT9nfRDjtLJmWyZJpmXiavLy9u4zXt5eyobCKjrToSsGWQ7VsOVTLf6/8jGmj4rlicjqXT0lnYnpsrz1qw9khbIQDeFxEJgJ+4DDwLav+TeDzQAHQBNwxOOYNbYKFo6KpgolJE3ts5wwRDuPLMWwQgYlXwoTPwZGN8OETsP+twPHST2DFN3Uk3jnfgPNug9j03s83QMRHOfnKnBy+MieHqoZW3tpVxhs7jrGpqLpTRAB2lnjYWeLh/767n+zESC47J52Fk9KYNybJpL/tB8JGOJRSX+6lXgF3D7A5w46eJsh7wml6HMMbEZ3fPG8+VOyFjU/B9pfAZ8WaaiiDtT/WcyOTl2oRCZNhrJQYN7ecn8ct5+dR1dDKO7vLeWt3GR8VVuH1BVSkuLaZZzcc4tkNh4h02rlwfDKXTkzj0vxUcpOjTvAKhlMlbITD0L8EC0d5U++5rZ3Z2Z3b3qNHe21nGAakTYKrfwOLfgTb/qTDtjdYnw2/F3Yt1yVlol6NNf0GiEoaXJstUmLc3DQvl5vm5eJp9rJuXwXvfFbOe/sqaWgNTKw3e32s2lPBqj36z9Lo5Cguzk/lwvEpzB+XTHykc7AuYUhjhGOE0NWXozec2dn636VSeI8dw9/ais3t7rW9YRgQkwqXPggXfk9PpG/5XWg496p98NYPdTiTSVfBzJth7EI9fxIGxEc6WTpjFEtnjKKt3c/mohrW7K1gzd5yDnUJaXKouolD1Yd5YeNhbALTRsVz/rhk5o9NZvboJGLc5ifxVDDv0gghJOzICXocNpcLZ2amjlWlFN7iYtzjuuc1NwxDHC6Yfr0ux7bD1j/CzuXaEx10UqndK3SJzYJzb9C9kLRzBtfuIFwOGxflp3BRfgo/+uJkiqoaWbevgvf2V7LpYA3N3kBYE7+C7cUethd7WPbeQew2YWpWHHNGJzFnTBKz8xJJjjF/mnoiLJfjnilmOW53dlft5sY3bgRgQuIEXr765V7bHvn612nc8BEA2b/9LbGLFvba1jDMaT2uxePj56H0457bZEyDqdfpEPAJOQNrXx9obfex7VAtHxRU8UFBFTtLPJzs529MSjQzcxM4LzeRmbkJTEyPxWEPm4AbZ50htxzX0L905OSAE/c4AJy5uWAJR5txpBzZuGP1/MbsO6BsF3z6V9jxEjRVBdqU7dRl1SOQPUcv/T3n6rATEbfDzgXjU7hgfAoPAp4mL5uKqvnoYDWbDtawp6y+m5AUVTVSVNXIio/1QpEIp41po+KZnp3A9Ox4po2KZ3RyNDbb4C8eGEiMcIwQkiKScIiDdtWOp9WDp9VDvLvn1J6u3LzO7bYjJxeOhg8+pP6f/yThhq8QdZ4JXjxsyZgKVz4Gl/8XFKzSArL3zcCKLIDiLbq8/e/aN2TSVTDpCzqGVhiszAomPsrJFVMyuGKKDmbhafKy9XANmw/VsKWohl0l9d2cD1u8/k7/kQ5i3A4mZ8YxOSuOyZlxnJMZR356zLBeBmyEY4RgExuTkiaxq3oXAB+VfsSVY67ssa1rdEA4vIePnPC8/rY2Sh54AL/HQ8OGD8lfuxZxmI/VsMbuhIlLdGnxwN43dHiTwrWggkKjl36iy5pHIXEMTLgSJlwBeReCI/zmDuKjnCw+J53F5+jeeYvXx64SD58ereOTI3V8fKSWY56Wbs9raG1n8yEtOB3YRA9zTcrQIjIxPZbxaTHkJUfjcgz9oS7zDR9BXJJ9SadwvF/8fu/CkZvbuX2yoSpvcTF+jwcAX2UVzdu3EzVr1lmy2BD2RMTDjJt0aaqBvSth9ytQ9L5OMNVBbRFseloXZ5TOmT5+MYxbBMnjw643AtqDffboJGaPDixBrqhvYXuxh53Fdewo8bCz2EN1Y1u35/oVFFY2UljZCEFJNh02IS85inGpMYxNjWFsSjRjU6PJS44mJcY1ZDzejXCMIC7JvoTfbv8tAOtL1uPz+7D3sKTSmZMTsiS3vbWF3+x6muLjxdw7815Gx4/ubNtVWBrWvWeEY6QSlQTn3apLc50OcbLvDShYHViZBeBtggNv6wIQNwrGXApjLtaCEmZzI8GkxUVw+eQILp+seyVKKcrrW9lV4mHPsXo+O1bP3rLjHKpu7HHivd2vAoJC6FxjjNtBblIUeclR5CZHkZMYRU5SFDmJkWQlRIbV0JdZVTWC8Cs/i/+xmKpmPbH5wpIXmJE2o8e2BxYtor1UhwQ78sz3+beiXwGQFpnGs0ueJSdWf7lrnn+e8sd+0vk8d34+Y//5en9ehmGo0d4Khz6AA+9qsag5eOL28bnasz13PuTM0/MjtqE1vNPU1s7+8gb2lx9nf9lxDlQ0UFDRQEld82mfMzXWTVZCJNkJkWTGR5CZEElWfATp8RFkxkeQEuPGeYYrvsyqKkM3bGLj4lEX80rBK4AerupNOFy5eZ3C8em2f4HVW69oruDOd+7kuSufIz06nbYjod7lrQcO4C0txZk1ePkeDGGGw62HpcYvhiWPa+EoWK3nRA6t1/lCgvEcgR1H9OQ7gDsesmfpFVujZutJ95jUgb+OPhDlcjAjJ4EZOaHBRBtb2ymqaqSgooGD1oqtg5UNHK5uCvF474nK461UHm9l+9GeMzqKaI/69Dg3t80fzfWz+6/nZoRjhHFJ9iWdwrG+ZD33nXdfj+1cubk0bdwIQF3hHkgKjL2WNJRw39r7ePELL9J2tPvkecN775H41a/2g/WGYUHSWJg7FubeCb527R9yaD0Urdce695Qb29aPVC4RpcO4rIha4ZOmZsxDdKnQnx2WM6VBBPtdjB1VDxTR4WuaFRKUdXQxpGaJo7UNHKkupmjtU0cqWmipLaZY57mkKCOPaFUQFwaTyJCZ4oRjhHG/Kz5OGwO2v3t7K3ZS1ljGRnR3XNrufICK6vSavyAnczoTCqbKmlX7XxW/RmFdYXYelh1dXzdOiMchlPD7oCcubpc/AD4vNpr/fAGLSJHN0FjZffn1RfrsndloC4iAdImQ/pk7c2eOknH2YpOCXtBERFSY92kxrqZlZfY7bjX56e8voXSuhZK6poorWvhmKeZMk8LZfUtlHlaqG5s65xXSe/nJFdGOEYY0c5oZqfPZuMx3Zv4oOQDrptwXbd2rrzAyqoMa8n6HVPv4KPSj1h7dC0AO8u3c04PEXSbNm7C39yMLdIk1zH0EbsTsmfrAvpvdG0RFG+Dkq1Q8jGU7YD27stiaamDIxt0CSYyEVImQHI+JI+FpHG615M0Rjs4DgGcdhvZiVFkJ0bROW7cBa/PT+XxVsrrWxidHN2v9hjhGIFcPOriTuHYdGxTj8LhzA0WDoVd7FyRdwWN3sZO4SjYv4lzvF4A7MnJ2BMSaCssRLW20rhpE7ELFvT/xRiGNyLWj/xYHUMLdK+kcp8WkGPbLc/1XXpIqyeaawO9l65EpUDiaEjMg4RciM+xHrP1aq+IuH67tLON024jKyFyQLIhGuEYgczLnNe5vblsM0opRIQmbxNuuxu7zR7iy5HmgQtS55Icmcy0lGmd9VUHAgvUXTk5REydSlthIQCt+w8Y4TD0D3an9mLPmKr9R0D3TDzFUPEZlO/WwlK5B6oOdJ8zCaapSpeSXlZhuuMgLgtiM63HDIjJ0EmuYtIhJg2iU8EVE/bDYWcTIxwjkPzEfBLdidS21lLTUkNBXQHHGo/xvbXfIzUylb9+4a8kRybjiXcS7/FiU3B1xBwApiRPQRAUivbi4s5zuvJyQ+ZFTC4Pw4Aiov0/EnJ0lsMO/H44XgpV+6G6EKoLoKZIr+yqPaTzjpyI1nqorIfKvSdu54jUAhKdrB+jknWJTNT+LZFJejsyESIT9HyMO3bIik1YCIeIPIrOM+4HKoDblVKlIrIAeA0ospquUEr99+BYOXywiY3ZGbN59/C7gB6uWlGwAq/fS2ljKcv3L+eL475IQUo7s6ze/5SNFbAQYlwxjEsYR0FdAWm1gTg+zpxcXLmB5X9tRjgM4YDNpoed4rO1l3owfh8cP6YFpO4I1B7Wj/XFuvdSX9rzXEpPtDfrZcSeE4foCUFsukcTEa+HxNwdj3FaVNwxuifjjtWPruigxyhwdjxG6Tr7wCWlCgvhAH6ulPrfACJyH/AjAjnH1yulrho0y4Yp8zLmdQrHn/f8mZKGwCT3W4feIsYVw7szhVmFeplG0/JX8N19L/a4OKamTKWgroD0QJw3XLk52uPcwvQ4DGGPzR4QlZ5QSodRqS/RAlNfqjMkHi+DhgqdZrehQq/6OlWBCTm/X0/ot/Tsl9FnbA4tIs4oWPjvMOu2s3PeHggL4VBKBXsARQPDz509zJibObdzO1g0AArqCnjhsxcoHS8cTYGcKvA3NlL70kuk3Hkn01Km8WrBq2TUBm6TKzdX5ysPClWivF7EaVJzGoYoItbQUzJkTu+9nVI6b0lTFTRWayFpqtb7zbVafJprdRiW5hodGLK5DryNZ9def7seWmutP/kQ3BkSFsIBICI/Bm4FPEBw5qD5IrIdKAX+TSm1ezDsG26MjhtNWmQaFc09p5EtaSgBEVbOs/PtN3TE05rnnyfpttuYnjodlCI96I+SMzcXm9uNIz2d9rIy8PvxlpaGzHsYDMMSET3EFBGnV3+dKj4vtNRbvQ6P9aN/vHtpa4C2Rr3tbYLWBv3Y1gDeZl3aGkMjEzujzv51BjFgwiEiq4DunmbwsFLqNaXUw8DDIvIQcA/wCPAxkKeUahCRzwOvAvm9nP8u4C6A3KAVQYaeERHmZs5l5cGAA1WkI5Lm9tBYOp5Lp+PYUkJ7RQW+yiqKv/0dMr5yHVnNEUS26X9MEh2FPVE7Lbmys7VwAG1Hi41wGAy9YXcGejRng/Y2LSje5n73TxmwyGFKqcuUUlN7KK91afoX4MvWc+qVUg3W9puAU0RSejn/M0qp2Uqp2amp4R3HJlyYmzE3ZP+eGfcQ6wz9wC0YezlJt93aud/44Ycc++79PL4sMKbblpncGQ462P/D20M4EoPB0E84XHrFVlymnljvR8Ii5KSIBPcilgJ7rfoMsX6RRGQu2t7qgbdweBLszxHpiOTa/GtZnLc4pM2i3EUk3nILcZ9fElIf0RLoFlclBTqurpzARGPb0WIMBsPwIyyEA3hcRHaJyA7gCuC7Vv11wC5rjuNJ4EY1HOPADxJZMVl869xvMSpmFA/NfYhYVyxLxgQEYnzCePLi8rC53Yz65S8Z99a/SP72t7Cnhnb6DsQFHKyc2cErq0yPw2AYjph8HIYQlFL8bMvP2Fq+lQfnPMicjDnd2vjb2ih/5e9sfuYx/KJ48honr9/1IbGuWJq3b+fQDTcC4J40ibGvvjLQl2AwGE4Tk4/DcFqICD+Y+4MTtrG5XGTecAsvRq9kd/VuQLGtfBsLchZ08+XoCGdiMBiGD+EyVGUYggT7gmw6pgPI2RMTsUXryJz+xkZ8tbU9PtdgMAxdjHAYTpt5GaHBEkH3WIwHucEwvDHCYThtZqbNxCF6tHN/7X5qWmqALiurjhjhMBiGG0Y4DKdNlDOKaamBMOtbyrYAOuBhB95iIxwGw3DDTI4bzoi5GXP5pOITADYf28znRn8upMdxfM1afMcbUO1eRAR7UjLx11yDMz1tsEw2GAxniBEOwxkxL3Mey3YsA+D1wteJdkVzU/rkzuMtO3fSsnNnyHOqnn6apFtuJvHmm3FmZg6ovQaD4cwxfhyGM6LV18rCvy/keNvxzrq0Zhe/fqoVu9d3gmdqHFmZRM2YQcSUKURMnowzJxdnWiricqG8XpTfj83tPul5lFKsPLiSVYdXcVneZXxx3BfP6LoMhpHIqfpxGOEwnDHbyrfx2KbH2F+7v7NuZoGfefsUvig3juRkomMSiZUocjYfJuZg+clPareBz0oUlZKE5GUjiQngdCBuN5IQBwlxKL8fb1MDm0o3srftKE1u8NngwuyLuGrCNdgjI7G5XEhHcTr1o9uNLTISR/JZCjBnMAwDjHAY4RhQ/MrP6iOrWbZ9Gftq9/XaTpRi3l7F5Z8o8ksVEf2bNuCENOalMvvt9wfPAIMhzDCe44YBxSY2Ls+7nMtyL2Nr+Vb+uuevbCjdQFN7U0g7JcLGc4SN54Ddp8irgHHHFGPLFLkViuTjkNCgl/v5LIdzez/9t6lTZzmRjsEwQjDCYTiriAhzMuYwJ2MOSinKm8o5WHeQ8qZyqluqqW2ppdHbSJO3iVZfK625rdT72tni9/KRasfv96Pa21F+H14H4PWRUNtGcmUr7lY/dp/C1eYnutFHVJMfZRO8LhuxjhhmRU8iod3F3qrPOFxXhN0PznZwtYPDp3C0g8Oqc/qgPs58/A2G08F8cwz9hoiQEZ1BRnRP+bv6j2y0Q+K+mn34lA+f34dX+WhVes7EJtp9KTnCzG8YDKeDEQ7DsGRC4gQmJE4YbDMMhmGJ8Rw3GAwGQ58wwmEwGAyGPmGEw2AwGAx9IuyEQ0QeEBElIinWvojIkyJSICI7ROS8wbbRYDAYRjJhJRwikoPOOR6crHoJkG+Vu4CnB8E0g8FgMFiElXAAvwIeBIJdvpYCzyvNRiBBRExkPIPBYBgkwkY4RGQpUKKU2t7l0CggOKlDsVVnMBgMhkFgQP04RGQV0JM32MPAv6OHqU733Hehh7IAGkSk94BJJycFqDqD5w82Q91+MNcQDgx1+8FcQ1/JO5VGYRHkUESmAauBjsBG2UApMBf4L2CdUupFq+0+YIFS6lg/2rP1VAJ9hStD3X4w1xAODHX7wVxDfxEWQ1VKqZ1KqTSl1Gil1Gj0cNR5Sqky4HXgVmt11fmApz9Fw2AwGAwnZiiEHHkT+DxQgO6R3DG45hgMBsPIJiyFw+p1dGwr4O4BNuGZAX69s81Qtx/MNYQDQ91+MNfQdt623AAAB7xJREFUL4TFHIfBYDAYhg5hMcdhMBgMhqGDEY4gRORKEdlnhTf54WDbcyqISI6IrBWRz0Rkt4h816pPEpF3ReSA9Zg42LaeCBGxi8gnIrLS2h8jIpuse/GSiLgG28YTISIJIrJcRPaKyB4RmT8E78H91mdol4i8KCIR4X4fROSPIlIhIruC6np838MxfFEv9v/c+hztEJFXRCQh6NhDlv37RORzg2O1EY5ORMQOPIUOcTIZ+KqITB5cq06JduABpdRk4HzgbsvuHwKrlVL56KXO4S6E3wX2BO3/FPiVUmo8UAt8Y1CsOnV+DbyllJoEnIu+liFzD0RkFHAfMFspNRWwAzcS/vfhWeDKLnW9ve/hGL7oWbrb/y4wVSk1HdgPPARgfa9vBKZYz/mt9bs14BjhCDAXKFBKHVRKtQF/Q4c7CWuUUseUUh9b28fRP1ij0LY/ZzV7DrhmcCw8OSKSDXwB+L21L8AiYLnVJNztjwcuAf4AoJRqU0rVMYTugYUDiBQRBxAFHCPM74NS6n2gpkt1b+972IUv6sl+pdQ7Sql2a3cj2q8NtP1/U0q1KqWK0CtN5w6YsUEY4Qgw5EObiMhoYCawCUgP8ncpA9IHyaxT4Ql0jDK/tZ8M1AV9ecL9XowBKoE/WcNtvxeRaIbQPVBKlQC/QAcYPQZ4gG0MrfvQQW/v+1D8jn8d+Je1HTb2G+EYJohIDPAy8D2lVH3wMWtJc1gunxORq4AKpdS2wbblDHAA5wFPK6VmAo10GZYK53sAYM0DLEWLYBYQTfchlCFHuL/vJ0JEHkYPRf9lsG3pihGOACVATtB+tlUX9oiIEy0af1FKrbCqyzu64dZjxWDZdxIuBK4WkUPo4cFF6PmCBGvIBML/XhQDxUqpTdb+crSQDJV7AHAZUKSUqlRKeYEV6HszlO5DB72970PmOy4itwNXATergM9E2NhvhCPAFiDfWkXiQk9CvT7INp0Uaz7gD8AepdQvgw69Dtxmbd8GvDbQtp0KSqmHlFLZltPnjcAapdTNwFrgOqtZ2NoPYIXGOSoiE62qxcBnDJF7YHEEOF9EoqzPVMc1DJn7EERv7/uQCF8kIleih26vVko1BR16HbhRRNwiMgY9yb95MGxEKWWKVdChTfYDhcDDg23PKdp8EborvgP41CqfR88TrAYOAKuApMG29RSuZQGw0toei/5SFAD/ANyDbd9JbJ8BbLXuw6tA4lC7B+iAonuBXcALgDvc7wPwInpOxovu+X2jt/cdEPTKyUJgJ3oFWTjaX4Cey+j4Pv+/oPYPW/bvA5YMlt3Gc9xgMBgMfcIMVRkMBoOhTxjhMBgMBkOfMMJhMBgMhj5hhMNgMBgMfcIIh8FgMBj6hBEOw4hCRJ4VkVWDbUdXRGSdiPx+sO0wGE4FsxzXMKKwAhLalFK11g/1eKXUggF8/f8AvqmCslxa9UlAu+oSLsZgCEfCMnWswdBfKKU8/XFeEXEpHVX5tFBKdY3wajCELWaoyjCi6BiqEpH/RHvpXioiyiq3W21iROTXIlIiIk1WxNsvBZ1jtNX+ZhF5U0QagUetUBa/E5FCEWkWkYMi8piIuK3n3Q48CuQFveZ/WsdChqpExCkij1s2tIlO1HVTl2tRIvIdEXlBRI6LSLGIPNSlzVLL/iYRqRORzSIysx/eWsMIwvQ4DCOVX6Bj/YwBOkTBY8Vp+ic6PMUNQCk6AODfRGSJUmp10Dl+CvwAuNvaF3RAvZuAcmA6sAwdTuIR4CVgEnAzMMd6TkMv9j2GDqn9LWA7Ol7Un0WkvIsNjwD/AfwnOprt/4jIZqXUahHJQIcJ+Q/rMQIddr8dg+EMMMJhGJEopRpEpBloUzpIIQAisgCYj87p0DGs9YwVFO9edAykDpYppbqGvH44aPuQiIwDvgM8opRqFpEGwBf8ml0RkSh0Nr77lVL/sKofE5E51vmDbXhJKfU7a/spEbkHLXSrgUzACfxdKXXIahOcZdFgOC2McBgMocwBXECJ7nx04kIHzQumW2RSEbkT+CYwGp3TwkHfh4THW6/3fpf697DSiAbxaZf9UgKJi3YAbwO7RORdYB2wQil1FIPhDDDCYTCEYkNnv5vTw7Guk9+NwTsicj06+uoP0T/y9cD1wI/Pvpm92qSwhEop5RORJehruQz4MvC4iFyvlFrZjzYZhjlGOAwjmTbA3qVuK5AARCildvXxfJcAn6igvChWOt+TvWZXCoBW63zBNlzaZf+kKL3efrNVHhORt4A7ACMchtPGCIdhJFMEXC8iU9CT2ceBNegcDitE5EH0cE8icAHQEjSf0BP7gG+IyFL0D/xVBCbeg18zQ0Tmo4e+mlRosh6UUk0i8iR6pVYlgcnxpcDlp3pxInIBOiHTO+icD/noCfs/nOo5DIaeMMtxDSOZP6AzP24AKoGvWv/Qr0anTv0VOrHRG8AX0Al0TsQydAKkPwGfAPPQq52CeRW9wukN6zUf7OVcDwO/A55Ai9AtwC1dVlSdDA96ov81tEj9EZ2/+tE+nMNg6IbxHDcYDAZDnzA9DoPBYDD0CSMcBoPBYOgTRjgMBoPB0CeMcBgMBoOhTxjhMBgMBkOfMMJhMBgMhj5hhMNgMBgMfcIIh8FgMBj6hBEOg8FgMPSJ/w/9OnntO6CP6AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "#plt.rc('text',usetex=True)nn\n", - "#plt.xscale('log')\n", - "long_end = 125\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=3)\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=3)\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=3)\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=3)\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "plt.savefig(\"./error_fig/noise1/\"+figure_name+\".eps\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd4U9UbwPHvSdK9924pe1sBAVFQQRQVRZGNyEZEBAVxgAP0pwgOQJEpWwRUliA4UFSUvTcyW1pW9945vz/ShhY60tI2KZzP8+Sxubm5903l6Zt7zznvK6SUKIqiKIqpNOYOQFEURaleVOJQFEVRykQlDkVRFKVMVOJQFEVRykQlDkVRFKVMVOJQFEVRykQlDkVRFKVMVOJQFEVRykQlDkVRFKVMdOYOoDJ4enrKGjVqmDsMRVGUamXfvn0xUkqv0va7LRNHjRo12Lt3r7nDUBRFqVaEEOGm7KduVSmKoihlohKHoiiKUiYqcSiKoihlohKHoiiKUia35eC4oigGSUlJXLt2jezsbHOHolgIBwcHAgMD0WjKf91QbRKHEKITMAPQAl9LKT82c0iKYtGSkpK4evUqAQEB2NnZIYQwd0iKmen1eqKiooiJicHb27vcx6kWt6qEEFrgK+AxoCHQWwjR0LxRKYplu3btGgEBAdjb26ukoQCg0Wjw8fEhMTHx1o5TQfFUtpbAGSnlOSllFrAS6FLRJ1m56Cc6vbKE5Qs2VPShFaXKZWdnY2dnZ+4wFAtjZWVFTk7OLR2juiSOAOBigeeReduMhBDDhBB7hRB7o6Ojy3WSs8fPk5mZyY87zpQ/UkWxIOpKQ7lRRfybqC6Jo1RSynlSyhZSyhZeXqWumC9S4waBnHcJIMLWncy0jAqOUFEU5fZQXRJHFBBU4Hlg3rYK1aHz/Wj1uVx1cOf3Tdsr+vCKohRQo0YN7OzscHR0xMfHhwEDBpCSklLp5xVC4ODggKOjIx4eHnTo0IFVq1bdtN9vv/3GQw89hJOTEx4eHoSFhTFlyhQyMgxfKidOnIiVlRVOTk44OTlRt25dRo4cyeXLlyv9M5hbdUkce4A6QohQIYQ10Av4saJP4ujtSXDKNfRCw6Yd/1X04RVFucGGDRtISUlh//797N27l//973+FXpdSotfrK/y8hw4dIiUlhVOnTjFgwABGjhzJpEmTjK9///33dOvWjT59+hAeHk5sbCyrVq0iMjKSixev3zXv2bMnycnJxMXFsXbtWq5cuULz5s1v++RRLRKHlDIHGAn8ApwAvpNSHquMc9XRJwFwJkVfKf9gFUW5WUBAAI899hhHjx7lwQcfZMKECdx3333Y29tz7tw5Ll26xFNPPYW7uzu1a9dm/vz5xvc+/vjjjB071vi8V69eDBo0yKTzenp60q9fP2bPns3kyZOJjY1FSsmYMWN49913GTp0KO7u7gDUq1ePL7/8kjp16tx0HCsrKxo1asSqVavw8vLis88+u8XfiGWrNus4pJSbgE2VfZ5mfo78mg5pWhv2/3OQFu2aVfYpFeWOd/HiRTZt2kTXrl3Ztm0by5YtY/PmzdSrVw8pJR06dKBx48ZcunSJkydP0rFjR2rVqkX79u1ZuHAhTZs25YknnuDy5cvs3r2bQ4cOlen8Xbp0IScnh927dxMaGkpkZCTPPvtsmT+HVqulS5cu/PLLL2V+b3VSbRJHVbm7jg8chmRre77bckQlDuW2caJ+gyo5T4OTJ0ze9+mnn0an0+Hi4sITTzzB+PHjeeyxxxgwYACNGjUCDEnl33//5aeffsLW1pawsDCGDBnC0qVLad++Pb6+vsyePZv+/fuTnp7OunXrcHJyKlPMVlZWeHp6EhcXZ3yvr6+v8fVevXrx888/k5WVxdy5c+nXr1+xx/L39ycuLq5M569uqsWtqqrUqGUTOmVE4JmewM+p9mRmZJk7JEW5ba1bt46EhATCw8OZNWuWcd1JUND1uTCXLl3C3d29UDIICQkhKur6/Jgnn3yS3Nxc6tWrx/3332/c3qhRIxwdHXF0dGTbtm3FxpGdnU10dDTu7u54eHgAFBqnWLlyJQkJCTRr1ozc3NwSP1NUVJTx9tbtSl1x3MCxdk1mff4CHV79hiQre375cRtP9ehg7rAU5ZaV5UrA3AquNcj/Bp+cnGxMHhEREQQEXF/KNWHCBBo0aMD58+dZsWIFvXv3BuDYMdOGQtevX49Op6Nly5a4ubkREBDAmjVrCo2dmEKv17NhwwYefvjhMr2vulFXHEXQaDQ85a8F4PvdJjXEUhSlkgQFBdGmTRveeustMjIyOHz4MAsWLOC5554D4O+//2bRokUsXbqUJUuW8PLLLxe6GilJXFwcy5cv56WXXuKNN97Aw8MDjUbDZ599xqRJk5g/fz7x8fFIKTl9+jRXr14t8jg5OTmcOHGC3r17c+XKFcaMGVNhn98SqSuOIlw+cQZnOys8ouLZYevB1fOR+IQGmjssRbljrVixguHDh+Pv74+bmxuTJk3i4YcfJikpieeff56ZM2cSEBBAQEAAgwcPZuDAgfzyyy/FrpK+6667EEJgbW3NXXfdxbRp0+jTp4/x9Z49e+Li4sLkyZN59dVXsbGxITg4mGHDhtG9e3fjfqtWrWLdunVIKfH396djx47s27cPf3//Sv+dmJOQUpo7hgrXokULeSs9x9e/N53RmXUISIslyt6D15xjGDm+fwVGqCiV78SJEzRoUDUD4kr1Uty/DSHEPilli9Ler25VFaFeoGFgK8HWcD91/aVspOpnoCiKAqjEUST/moE4ZaWSqrHGLjeT045+HFj/m7nDUhRFsQgqcRTBJiSYkKQrADRzMdwj/W7rcXOGpCiKYjFU4iiCVWAgNfISR826wQD8qvMn9ZSqX6UoiqISRxE0trbUwVCl83J8GkEigzg7F375ZqOZI1MURTE/lTiKcZ9dJlp9LlnpGTxzt2Gh0brwTLKvXTNzZIqiKOal1nEUI2z6x+zX2uDibM+lhHS+3Pc7230acm7RN9R74/Ze3KMoilISdcVRDJ2bGy7O9gD4u9pxf4AdOVoda3acIzchwczRKYqimI9KHKXQ6yX7I+Lp8YBhscxm/7uJ/Wa5maNSFEUxH5U4ipF+6BDnu/eg+4QVdJ21HR9nGzxsBBHOvmxf/wf61FRzh6go1VpJrWNLa9t6owsXLiCEMFbCzX/kt4R99tlnGTp0aKH3PPPMM4wcOZKPPvrIuL+trS1ardb4PL+0e8F2swEBAYwZM6bIKrkDBgxAp9OpDoB3KpmrJ+PIEXySogE4GpVEt1ahAGzybET8qu/MGZ6i3BaKah1ratvWoiQkJJCSkmJ89OzZE4CvvvqKNWvWsHXrVsBQY2r//v18/PHHjB8/3rj/nDlzuPfee43PC1bXzW83+9dff7Fq1SoWLlxY6NypqamsXr0aFxcXvvnmmwr+TVkWi08cQoiJQogoIcTBvMfjVXFerasLAPUSDP9QD0Um0PMeQ4+AvwLuJmLRUnJT1FWHolSE/NaxR44cKXPbVlP4+vry2WefMXToUCIiIhg1ahRz587F0dGxzMeqXbs29913HwcPHiy0ffXq1bi6uvLuu++yZMmScsVZXVh84sgzTUoZlveo9PaxAFoXQ+Koe/UsAIcuJlDTy5GWoe5k6qzZah9C3OLFVRGKotz28lvH2tvbl7tta2kGDBhArVq1aNasGZ06daJTp07lOs7JkyfZtm0btWvXLrR9yZIl9O7dm169enHy5En27dtXEWFbJDUdtxhaZ2cAAi+fwdZKw4XYNOJTs+jdMojd5+PYGNqGxxfOxa13L3R5HcMUxZLVePOnKjnPhY+fMHnfG1vH9u7dmx9++KHcbVs9PT0LPd+xY0ehKrBt27bl119/NfbyKIv87n9paWn06tWLESNGGF+LiIhg69atfPbZZ/j4+NChQweWLl1K8+bNy3ye6qC6XHGMFEIcFkIsFEK4FbWDEGKYEGKvEGJvdHT0LZ9QWFmhcXBAl5tDIx/D5eyhyAQea+yHp6M151wDOGznS8zcubd8LkW5U93YOjb/D39pbVsLDoBHREQY942JiSEhIcH4KJg0Tp8+zaeffsqIESMYO3Ys2WWseL1//35SUlJYtWoVu3btIrXABJlly5bRoEEDwsLCAOjbty/ffvttmc9RXVjEFYcQYgvgW8RLE4DZwAeAzPvvZ8CgG3eUUs4D5oGhH0dFxKV1cUGfmkoTD2v2RcLhyEQerOdNn1YhfPH7adbVakvTFd/i/nx/rAMDSj+gophRWa4EzKVevXomtW3Nn32V78KFCyUeV0rJkCFDeOWVV3j33Xe57777mDJlCm+//XaZ4hNC0KNHD9avX8/777/P9OnTAVi6dCkRERHGK6WcnBxiY2PZtGkTXbp0KdM5qgOLSBxSSpMa9Aoh5gNVVjDK5eku6FNTea65Hz0eaEDdvCuP51oHM/vPM+z0a8yVo444zZhBwCdTqyosRblt5bdtHTp0KM7OznTr1g1XV1fOnDlTbNtWU8yePZuYmBjGjx+PRqNhwYIFtG7dmm7dulG/fv0yH+/NN9+kdevWvPnmm5w/f56zZ89y4MABvLy8jPuMHTuWpUuXqsRhDkIIPyll/nXrM8DRqjq316hRAPjcsN3byZbOTf1ZeyCKDbXbMWTDetx698a+2d1VFZqi3LZMbdtaFFdX10LP33//fbp168b48eP5+eefsba2BqBhw4aMHTuWoUOH8vfffxfbYrY4TZo0oV27dnzyySekpqbSpUsXmjRpUmif0aNH07ZtW+Li4oyzw24XFt86VgixDAjDcKvqAvBCgURSpFttHWuKI5GJPDnzHxxFLkt/fAe3erWp8f13CK22Us+rKKZSrWOV4tz2rWOllP2klE2klE2llE+VljQqUva1a6Tt309WZBTf7b1I9znb2XzEcPomgS60CHEjRWr5vUkHMo4fJ2H16qoKTVEUxWwsPnGYU/zybwnv05fEH9dzNTGDPRfi2XU+zvj6oPsNK8nX1+9ArtAQPW06uYmJ5gpXURSlSqjEUQJt3v3S3IQEwoINPx+4eL0y7qONfAn1dCAyA3a060pufDzRM74wS6yKoihVRSWOEuSvHtcnJtI00JA4TlxKIjPHMJdcqxG8+EAtAFaFtkOv1RG/YgXpN5QiUBRFuZ2oxFGC/HpVuQmJuNhZUcvLgaxcPScvJxv3efruAPxdbDmTkMXRvi+DlFx+511kVpa5wlYURalUKnGUIP+KI3/cIv+q40jU9XEMa52G4Q8arjqW2tdHFxJM5unTxC5YUMXRKoqiVA2VOEpQcIwDoHGAIZEcjSo8AN6jRRCejjYcu5LM2RETAIiZNZvMc+erMFpFUZSqoRJHCW684mgV6k7fVsG0r+9daD9bKy1D2xpmWM2L0uL8TFdkdjaX334bWUSzF0VRlOpMJY4SaN3cCF27htDVPwCGK44Pn2nCI41uLqv1XOsQ3B2s2R+RwMluQ9B6eZK+fz9xi2/vuvyKotx5VOIogdBqsW3QACt//1L3dbDRGWdYTd8ehe/77wMQPX06madPV2qcilJdrVy5klatWuHg4IC3tzetWrVi1qxZSCkZMGAAQgh2795t3P/MmTOFyoM8+OCD2NraFuoMuGXLFmrUqFHsOSdOnIiVlRWOjo64urrSpk0bduzYUSmf70YDBgzA2toaJycnnJycaNy4MW+99RaJN6z/unz5MkOHDsXf3x9HR0dq1qzJgAEDOHnyJHBzq1wfHx86d+7Mb7/9ViWfQyUOE+WXZrmcmM76g1HsC4+/aZ/nWofg5WTDkahEdng3wKXbs8jsbC698SbyNi2vrCjl9dlnnzF69GjGjRvHlStXuHr1KnPmzOHff/8lK29Woru7e6kVbB0cHPjggw/KdO6ePXuSkpJCdHQ0999/P127dqWo8ks5OTllOq4pXn/9dZKTk4mOjmbRokXs3LmT++67z1imPTY2ljZt2pCWlsa2bdtITk5m//79PPDAAzclhvxWuYcOHaJjx44888wzLK6CBnMqcZQi/cgRIgYP4epHkwH46fBlRq88yA/7bu59bGet5aW8GVaf//ofXm+8gVVAABnHjxMze3aVxq0oliwxMZF3332XWbNm0a1bN5ycnBBCcPfdd7N8+XJsbGwA6N+/P4cPH+avv/4q9lijRo1ixYoVnD17tsxxWFlZ0b9/f65cuUJsbCyLFy/mvvvu49VXX8XDw4OJEyei1+v53//+R0hICN7e3jz//PPGK4RVq1YRGhpKUlISAJs3b8bX1xdTegLZ2tpyzz338OOPPxIbG8uiRYsAmDZtGs7OzixbtoxatWohhMDV1ZWBAwfy8ssvF3ksX19fRo8ezcSJE3njjTfQ6/Vl/l2UhUocpRBaLan//kvS5s3I3NwCM6uSity/d6tg/F1sOXU1mZ/PJeM3+SMQgpg5c0nbs6cqQ1cUi7Vjxw4yMzNLLTlub2/P+PHjmTBhQrH7BAQEMHToUN57770yx5GZmcnixYsJCgoyNpHatWsXNWvW5OrVq0yYMIHFixezePFitm7dyrlz50hJSWHkyJGA4cqlTZs2jBo1itjYWAYPHszXX39dqLx6aZycnOjYsSPbtm0DDLfannnmGTSasv957tq1K9euXePUqVNlfm9ZqMRRCpsGDbAKDiY3Joa0ffto5G9oKXvqSjJZOTdndRudlpHt6wDw+W//YdWsBR5Dh4JeT9Rr48iJv/kWl6JUlRP1GxT7iF/1nXG/+FXflbhvQee7Plvk9pLExMTg6emJTne9s0ObNm1wdXXFzs6Ov//+27j9hRdeICIigs2bNxd7vLfeeosNGzZw7Ngxk87/3Xff4erqSlBQEPv27WPt2rXG1/z9/Xn55ZfR6XTY2dmxfPlyxowZQ82aNXF0dGTy5MmsXLnSeBvrq6++4o8//uDBBx/kySefpHPnzib/HgqeMy7OUAcvJiamUOvcH3/8EVdXV5ycnHjkkUdKPQ5gPFZlUYmjFEIInB81/M9K3rIFJ1sranoaVpD/dzW5yPd0bxFITU8HzseksnxXOF4vj8QuLIycq1e5/Nb4Iu+lKsqdxMPDg5iYmEJjCNu3bychIQEPD49Ct1psbGx45513eOedd4o9npeXFyNHjuTdd98ttH358uXGAeTHHnvMuL1Hjx4kJCRw7do1/vjjj0K9wYOCggod49KlS4SEhBifh4SEkJOTY2ws5erqSvfu3Tl69GihroUfffSR8dzDhw8v8fcRFRVl7Nnh4eFRqHXuU089RUJCAtOmTTOO/ZR0HKDS+3+oxGEC+xaG8vT5s6MaFbMQMJ+VVsObjxm6is34/TRJ2RDw2adonJ1J+fNP4pctq4KoFeVmDU6eKPbh1rOHcT+3nj1K3Leg0DWri9xeknvvvRcbGxvWr19v0v4DBw4kISGBNWvWFLvPuHHj2Lp1K/v27TNu69u3LykpKaSkpJR4xVLQjU2d/P39CQ8PNz6PiIhAp9Ph42No8Xbw4EEWLlxI7969GZXX/A1g/PjxxnPPmTOn2POlpKSwZcsW2rZtC0CHDh1Yt25ducYp1q5di7e3N/Xq1Svze8tCJQ4TWAUFA5AdYRgQb+zjAMDRS8WXUO/Y0IfWNd1JSMtm5tbTWAUE4Pc/w8yPq598SvpR0y6pFeV25OrqynvvvceIESP44YcfSE5ORq/Xc/DgQePsooJ0Oh2TJk1iypQpJR5z7NixTJ1asW2ce/fuzbRp0zh//jwpKSmMHz+enj17otPpyMjI4LnnnuOjjz5i0aJFREVFMWvWLJOOm5mZyb59+3j66adxc3Nj4MCBAIwZM4b4+Hj69evH2bNnkVKSnJzMwRKKp169epWZM2cyadIkJk+eXK7xkbKwiMQhhOguhDgmhNALIVrc8NpbQogzQohTQohHzRGfVWAACEH25cukHzqEy6Rx6JCkZRW/KlwIwdtPNEQIWLz9AuGxqTg/8ghufXpDdjZRY8eQm5JShZ9CUSzL66+/zueff87UqVPx8fHBx8eHF154gSlTptCmTZub9u/duzd+fn4lHnP06NFoK7gL56BBg+jXrx/t2rUjNDQUW1tbvvzyS8AwthIUFMSLL76IjY0N33zzDW+//TanS1i7NXXqVJycnPDw8OD555+nefPmbN++HQcHwxdST09Pdu7cia2tLffffz9OTk6EhYWRnJzM7BtmZ7q6uuLg4ECTJk3YtGkT33//PYMGDarQz18Ui2gdK4RoAOiBucBrUsq9edsbAiuAloA/sAWoK6UssY5HZbSOjf5yJlo3NxLXriXl+An0QnDX8dLbn4/57iBr9kfxWGNfZj/XHH1mJhd69CTz1CmcHnmEgBnTy9zvWFFMoVrHKsW5LVrHSilPSCmLmj/WBVgppcyUUp4HzmBIIlXO6+WRuD/XF5u6ddFJPdZ602pQjXu0HnZWWjYfvcKfp66hsbEhYPo0NI6OJP/6K3Gqiq6iKNWMRSSOEgQABVfaReZtMxu35/oChmm6er0kNbPklaV+Lna82tEwPfed9UdJz8rFJjQU/6mGe7XXPp9G6vbtlRu0oihKBaqyxCGE2CKEOFrEo+QVQKYff5gQYq8QYq8pqzbLKvvqVRI3bCT90CEA/tT5ctf7v/LBxuOlvnfgfaHU93XiYlw6M7ca7n06tW+P54gXDes7xowlO28anaIoiqWrssQhpXxYStm4iEdJ8/GigIKTqgPzthV1/HlSyhZSyhZlWbVpqvQDB7g0bhzRn34GgFtyLMkZORws0IO8OFZaDR8+0wSAeX+f43Te+g/Pl17CoV1bchMSiHx5FPqMjAqPW1EUpaJZ+q2qH4FeQggbIUQoUAfYXcp7KoV1sGFKrj4tDYB7h/VGqxH8dzW51NtVAM1D3OjdMpjsXMn4tUfQ6yVCqyVg6lSsgoLIOH6cK5PeV4sDFUWxeBaROIQQzwghIoF7gZ+EEL8ASCmPAd8Bx4GfgZdKm1FVWazyEgeA95tv4Nv1aer7OqGXhVvJluSNTvXwdLRhz4V4Fm+/ABi6DAZ++QXC1pbEtWtV/w5FUSyeRSQOKeVaKWWglNJGSukjpXy0wGsfSilrSSnrSSlNW/pZCbSOjsafrQIM4/NhQYbWsodMuF0F4GpvzUfPNAZg6i8nORdtWMdhW78+/h9/DMC1qVNJ3rq1wuJWFEWpaBaROKoLr1dG4/jAA5CdTdy339LU01D6+UCEaYkD4JFGvnS9O4CMbD2vfX+IXL3h1pRzp0fxHPUySMmlsa+Rceq/SvkMiqIot0oljjLwHD6coLlziJ41i6vvf0ATW0PBsX0R8WUam3jvyUZ4O9mwPyKBBf+cu378F1/E+fHH0aelEfnii+TExlb4Z1AURblVKnGUg9bBcNsqRJPFB083ZtGAe8r0fhd7K6Y82xSAT3/9jzPXDLOshBD4ffQhtk2bkn3pEpEjX0ZfSjVMRanOLLF1bGltW4uSH0d+NVxHR0eefPJJADZs2ICvr2+hUufr168nICCA8PDwQu8RQuDg4GB8vm3bNmO7WUdHR9zd3enYsWORsfz5558IIUqs51VRVOIoB01eTRmZlka/1iE0DnApc9mQh+p706NFIFk5esZ+f5icXEMlTI2tLYEzv0Tn60v6gQNcHj8BWcndvBTFHCyxdWxZ2rbeaObMmcZquCkpKWzYsAGAJ598kvbt2/Pqq68ChnavL774IrNnzyYkJKTQewAOHTpkfJ5fMff1118nJSWFqKgoAgICGDx48E3nX7JkCe7u7ixdurRMv4vyUImjHPITh76IKp5l8Xbnhvi52HLoYgKz/rze9tLK25ug2bPQ2NuTtHEj0dOm39J5FMXSWGrr2PK0bTXFF198webNm/nll1949dVXeeCBB3jqqafKfBw7Ozt69OhxU6Xc1NRUfvjhB7766itOnz5NRdfqu5FKHOVQMHEkZ2TzyS8nGbOq+JLHxXG2teKTbncBhr4d+yOudwe0bdCAgBkzQKsldv584r79tmKCVxQLYKmtY2+lbWtJPD09mTFjBn379mXjxo188cUX5TpOamoqK1asoHbt2oW2r1mzBkdHR7p3786jjz7KkiWVO61fJY5y0ORNzdWnpmCj07Lgn/OsORBFXGrZxyPur+PJkPtDydVLXll5kJQCiwkd296P3/vvA3D1fx+S/McfFfMBlDtWjTd/Kvbx7a4I437f7ooocd+COn+5rcjtJbHU1rG30rZ11KhRuLq6Gh83dixs3bo1iYmJPPLII2XqSQ7w6aefGuP4559/WHZDM7glS5bQs2dPtFotffr0YeXKlWRnZ5fpHGWhEkc5aBwcQAj0mZlY6zTG9Rx7LpSvz++4TvVo6OdMRFwa760v/A/f9dmueI4caaxplV8rS1GqM0ttHWtK29bhw4cbj/nRRx8Z9/3iiy9ISEgwPm4cdxk2bBjPP/88mzZtKjQYb4rXXnuNhIQELly4gJ2dHadOXS8mfvHiRbZu3UrfvoYCrF26dCEjI4OffjI9kZeVrvRdlBt5jXwJr9GjEHmXsy1ruLPzXBx7zsfxaCPfUt59Mxudli96h/HEF/+wen8kD9bz4sm7/I2ve740guzLl0hcvYaLw1+kxsoVWBfogawoprrw8RMm7denVTB9WgWXviOw8eW2ZY6jYOvYZ599ttT9Bw4cyJQpU0ptHVuzZk1atrzeeaFv377GP6imyG/b+t577xV7u2rOnDkltoItyoIFC7h48SIbN26kRYsWDBkyhAMHDmBtbV2m4wQHBzNjxgz69+9P586dsbOzY9myZej1euMsLoCMjAyWLFnC008/Xabjm0pdcZSDsLIyJg2Ae0INjeHLe8UBUNvbibc7NwRg/NojRManXT+fEPhNnIhD27bkxscTMXQYOXHlP5eimJulto4tT9vW0ly6dIlx48Yxf/58bGxsGD58OB4eHnz44YflOl7Hjh3x9/dn3rx5gOE21XvvvcfBgweNj9WrV7Np0yZiK2ktmEocFaBZsBtajeDopaRCYxRl9VyrYB5u4ENyRg5jVh0yTtEFQ7IKnD4N24YNyY6I4OKwF8hNubVZXYpiTpbYOrYsbVtvNHLkyEJrMvJvf43JE0mgAAAgAElEQVQYMYJevXoZp9YKIZg/fz7Tp083eUzmRuPGjWPq1Kn89ddfhIeH89JLL+Hr62t8PPXUU9SuXZsVK1aU6/ilsYjWsRWtMlrHFpS2bx9XP/wI26ZN8Js4EYAuX/3LoYsJLB3UknZ1y1/WPTYlk8dmbONaciYvt6/N2EfqFXo9JzqaC336kn3xIvatWxM0dw6avKmLilKQah2rFOe2aB1b3cjsbDKOHyfr3Hnjts5N/OjZIgh3h7Lds7yRh6MN03uFoREwc+sZtp0u3JRK5+VF8MIFaL08Sdu5k0uvvYbMKf9VjqIoSlmpxFEORS0AHNquJlO6NaVxgMstH79NLU9Gd6iLlPDKyoNcTSrc4Mk6KIjgrxegcXYm+bctXH7vPdXHQ1GUKqMSRzlU1MrxkoxsX5s2tTyITc1i9MoDxiq6+Wzr1SVozhxDH4/Va7j26aeVFouiKEpBKnGUgyavyGFuakqh7TEpmfx46BKnriTf8jm0GsH0XmF4Otqw81wcM34/fdM+9s3uJvDLL0CnI27BQmLmz7/l8yqKopRGJY5yuH7FkVZo+/xt5xi14gDrDhbZFr3MvJ1s+aJXGELAl3+c5p/TMTft49i2Lf5TPgYhiP7sc+JXfVch51YURSmORSQOIUR3IcQxIYReCNGiwPYaQoh0IcTBvEfZVt1UEo29HWCojluwcm2bWp4AbD9bcXOn29T2ZFT7OobxjlUHuZaccdM+Lk88ge+7hlW1VyZOJHH9+go7v1K96VVlZeUGFTEeahGJAzgKdAX+LuK1s1LKsLzH8CqOq0hCo8G1dy/c+z8PBWY03VPDDZ1GcCQygcT0iqsTM6pDHe6t6UFMSiajVxy8abwDwK13b7xfG2voIPjWeJJ+/rnCzq9UTw4ODkRFRZGVlaUmTygAxtLxtra2t3Qciyg5IqU8AZS5p4U5+RVRidPeWkdYkCt7w+PZeyGODg18KuRcWo1gRu8wHp/xDzvOxfLZr6d4vVP9m/bzGDIEfUYmMTNnEvXaOIS1NU7t21dIDEr1ExgYSExMDOHh4YVqQil3NltbWwIDA2/pGBaROEoRKoQ4ACQBb0spt5k7oJI0D3Fjb3g8hy4mVFjiAMN4x5e97+a5BbuY9edZ7gpyLbIuludLI5CZGcTO/5qo0a8QOGsWjm3vr7A4lOpDo9Hg7e2Nt7e3uUNRbjNVdqtKCLFFCHG0iEdJBfkvA8FSyruBMcC3QgjnYo4/TAixVwixNzo6uqhdKlRuSgop2/4hs0DzmMxz52niYw/AwcjECj/nvbU8eKOTYSX5a98d4lx0yk37CCHwGjMGt379kNnZRI4cSequ3TftpyiKUl5VljiklA9LKRsX8Sh2JFdKmSmljM37eR9wFqhbzL7zpJQtpJQtylrrvjxi583n4tChJK5bB0D64cOce/xxfGZNQasR5FbSoOTQtjV5vIkvyZk5DP9mH6lF1MYSQuAz/i1cu3dHZmZy8cUXSdt/oFLiURTlzmMpg+NFEkJ4CSG0eT/XBOoA58wblYF982YApO3bD0D6ocMA+Hk4cnTioywf0rpSziuEYGq3u6jl5cB/V1N4Y/XhIgc+hRD4TpqIS5enkGlpXBw2jPQjRyslJkVR7iwWkTiEEM8IISKBe4GfhBC/5L3UDjgshDgI/AAMl1JaRD1xu7AwEIKMI0fQZ2aSeeaMYXvDhthZl786pykcbXTM7dccB2stGw9fZtG/F4rcT2g0+H34IU6dOqFPSSFiyBAyTpyo1NgURbn9mZQ4hBDupbyuFUI0K28QUsq1UspAKaWNlNJHSvlo3vbVUspGeVNxm0kpN5T3HBVN6+KCTZ06hoKHR48a/yDb1jdUnJRSkpBW9laypqrt7cQn3Q39yj/adILd54vOp0KnI+CTqTi2b48+MZGIAQPJOH680uJSFOX2Z+oVR7QQwjg1QwhxQAhRcD6XJ7CnQiOrBuzyb1ft3k3myZMAXH77bY5FJXL3B78xYFHl/koeb+LHsHY1ydFLXvp2/03FEPMJKysCpk/D8aGHyE1MJGLgIJU8FEUpN1MTx40LLGoDN9YPrz6LMCqIfTNDo5a4xUuQef2IsyMj8bORJKRlc/JKUpGL9SrS64/Wo3VNd6KTMxm2dC8Z2blF7qextiZgxnRj8ghXyUNRlHKqyDGOO25pquNDD6JxdiY3sfDUW6ecDAJc7cjI1nM+pnK79Om0Gr7q04xANzsORSby1pojxa4S1lhbE5iXPPR5ySO9nB3IFEW5c1nE4Hh1pXV0pObaNQTNm4vD/dcX2emTk2ngZ1hucvxyUqXH4eFow/znW2BvrWXtgSjmbyt+4pnITx75Yx6DBqvkoShKmZiaOCSFryhufH7HsgoIwLFdO4K/no/dXYbB6tzkFBr65yWOS5WfOAAa+DnzeQ/D+SdvPsnWk9eK3VdYWxM4fZpKHoqilEtZxjh2CiH+E0L8BzgAfxR4vr3SIqxGNE5OAOhTkmlYhVcc+To19uPVhw2dA0etOMCZazevLM9nTB4dOqjkoShKmZhaq2pSpUZxm9A45TV4SkqmUWND4jhRhYkD4OX2tTl5JYnNR68wbOle1o64Dxd7qyL3FdbWBE77nMhXx5Dy++9EDBxE8MKF2DVuVKUxK4pSvYjbsdxyixYt5N69e6v8vPGrviPj6BFcunbFLiyM5bsiaODnRLNgtyqt/JuWlcOzs3dw4nIS7ep6sbB/C3Ta4i8uZVYWkWPGkLLldzTOzgQv+Bq7Jk2qLF5FUSyDEGKflLJFqfvdSuIQQrQFvIGtlrKiG8yXOCxJZHwaXWb+S2xqFkPuD+Xtzg1L3F9mZRE1dizJv21B4+hI0Ly52Dcr95pORVGqIVMTh6krx0cKId6+Ydt64C/ge+C0EOLmBhEKYLgCqOz1HDcKdLNn9nPN0WkEX/9znh/2RZa4v7C2JuDzzwuUJxlK6s5dVRStoijViamD488DEflP8kqhPw70A+4BTgPjKzy6aiY3OZmM48fJunDBuO3no5d56NM/+WHfxSqPp2WoO+93aQzAW2sOF1uWJJ+wsiLg009w6dLFUBjxhRdI2WbR7U8URTEDUxNHLaBgXe7HgY1SyuV55c4nYChIeEdL2bqV812fJXrmV8ZtWbmSq0mZzPvbPEV9+7QKZkCbGmTnSl5YtpcLpSxIFDodfpM/wrVHD0NJ9hEvkbxlSxVFqyhKdWBq4rDD0IEvX2sK9wc/jWGs446mcTRMx81Nvv6reqyxLzY6DWejU4lPrbyihyV5p3NDHqrnRXxaNoOW7CExreR+6EKjwXfSRNz69YPsbCJHv0LSpk1VFK2iKJbO1MQRCTQFEEK4AY2AHQVe96JwYrkjafOm4+qTr6+fsNJqaBroAsDByATzxKURfNmnGfV9nTgXncqLy/eRnVtyo6n8ZlAeQ4dCbi5Rr40jYe26KopYURRLZmriWAV8IYQYASwBLgIF+5G2AE5VcGzVjsbZsHZDn5xcaHtYkCsAByLMkzjA0MNjwYB78HS0YfvZWN5Zd7TYmlb5DG1oX8Vz1Mug13P5rbeIX7myiiJWFMVSmZo4PsQwg+pDDJVx+0opC35l7Q38VMGxVTtaR8MVR05CPFcnf2wcGwgLcgPg4EXzJQ6AAFc7vu7fAhudhpV7LpZY0yqfEAKvESPwHjcOgCsTJxG3ZEllh6ooigUzKXFIKTOklAOklG5SyoZSyu03vP6glHJq5YRYfeSXHMmNjiFuyRIuvWWYaHZ3sOGK42BEPPoqnpZ7o7AgVz7vEQYYalr9cuyKSe/zGDwIn3cMM7KvTv6YmLnzKi1GRVEsm0VUxxVCfCKEOCmEOCyEWCuEcC3w2ltCiDNCiFNCiEfNGWdpNI6OUGCFuFvfPgD4udgy/vH6fNW3mUVUhnyiqR/jHq2HlPDKyoMcjUos/U2Ae9+++H34PxCC6GnTuDZjRqm3uxRFuf2YtHJcCGHS10sp5bByBSHEI8AfUsocIcSUvGO9IYRoCKwAWgL+wBagrpSy6G5Fecy5cjz94EHCn++PzMoiePEiHFq3NkscpZFS8tr3h1m9PxIfZxvWv3Q/vi62Jr03ccNGLr35JuTm4tavHz5vvYnQWMR3EEVRbkGFrhwHhgCPYBjfqFPMo3b5QgUp5a9Sypy8pzuB/La0XYCVUspMKeV54AyGJGKxrEJCkNmG6a5Wfn7InJxS3mEeQgg+6tqYljXcuZqUyeAle0jNNC1Wlyc7EzB9GsLKivhly7g8foLFfk5FUSqeqYljPeAH5AKzgY5SyodueLSvoJgGAZvzfg7AMIMrX2TeNouVtmcP5F3FnX20E1kXDeGnZeUw7++zjF97xJzhFWKj0zKnX3NCPOw5dimJ0SsPmlwaxbljR4LmzkHY25O4bh2Rr7yCPjOzkiNWFMUSmDo4/gxQA/gT+BiIEkJMFULUMfVEQogtQoijRTy6FNhnApADLC/Lh8h77zAhxF4hxN7o6Oiyvr3CRI0ZW+h5brxhJpWVVsP0Laf5dlcE15IzzBFakdwdrFk44B6cbXVsOXGV9zccM3ncwqFNG0IWLkDj7EzKlt+5OHw4+tTKbZWrKIr5mXxjWkp5WUr5IYbyI/3z/ntECLFVCFHqzXEp5cNSysZFPNYDCCEGAJ0xTPXN/8sVBQQVOExg3raijj9PStlCStnCy8vL1I9V4dyfew4AXV4MufGG+lBWWg3Ngg3TcvecjzdPcMWo5eXIvOdbYK3VsGRHOF9vO2/ye+3CwghZthStpydpO3YSPmgQuQnmnXasKErlKvOIpjT4GZiDYTyiLWDaqGoxhBCdgNeBp6SUaQVe+hHoJYSwEUKEYhhL2V3UMSyF97jXqLtnt7EHeW789STRMtQdgD0XLKYCvVHrmh58mtd69sNNJ/jp8GWT32tbrx41ln+Dlb8/GYcOE97vebKvFd+6VlGU6q1MiUMI4SeEGC+EOAcsw5A46kkpb/Ur5kzACfhNCHFQCDEHQEp5DPgOOA78DLxU2owqcxNaLVonJ7TuhquLnLjrieOeGobEsauUKrXm8tRd/rz5mKE6/qvfHSxTgrMOCSFkxbdY16pF5unThD/Xj6zIkku5K4pSPZnaj6OzEGIdcB54EHgDCJJSvimlPHurQUgpa0spg6SUYXmP4QVe+1BKWUtKWU9Kubmk41gSnZshcRS84rg72BWdRnDqShLJGSUXGjSXF9rV5LnWwWTl6BmyZG+JfctvZOXjQ8g3y7Bt1IjsiAjC+/Ql88yZSoxWURRzMPWK40fgLgwlRxYDVkB3IUSfgo9KirFa0roZri5y465/a7e10tIowAW9NH/5keIIIZj4ZCMebuBNYno2AxbtJjrZ9NlSOjc3gpcsxv6ee8i5do3w5/qRfsRyZpIpinLrTF0AWHIpVQMppdTeeki3zhJax2ZHRZF+7BjWITWwrVfXuP2L309zNCqRFx6oSfMQdzNGWLK0rBx6z9vJochEmga6sHJYa+ytdSa/X5+RQdToV0j56y809vYEzpqFQ+tWlRixoii3qkp6jlsqS0gcBaXu2o3O2wub0FBzh1Im0cmZdJ39Lxfj0ulQ35u5/Zqj05o+LCazs7n01niSNm5EWFnh/+mnOD/6SCVGrCjKrajQxCGEMKm7n5Ty79L3qnyWlDhyk5KIGDwEYWNNjW++MXc4ZXY2OoVnZ28nIS2bvq2C+d/TjREF6nGVRur1XP3wI+KXLwch8H3vXdx69arEiBVFKS9TE4ep9x7+BCRQ1F8MWeC/pt/LuM3JnBxiFywkKyKcjCNH0Li4GF9LyczhYEQCdXwc8XG+pZnMla6WlyNfP9+CPl/vYvmuCALc7BjxoOnVZYRGg8/bE9B5eRI9fQZXJk4iJzoGz5EvlSkBKYpiOUy97xAEBOf9t+AjFMNK8gyKWZh3x9JqiZ45k8TVawDQJyYaa1i9vfYIzy3YZXJJc3NrUcOd6T3DEAKm/nyK1fvKNs1WCIHn8OH4fvA+aDTEfPUVVyZOQuZa9MxqRVGKYWrJkagbH8DdwCZgBPA+ULfEg9xhhBBo7e0LbcvJm5p7by0PALadjqnyuMrr8SZ+vPNEQwBeX32YrSfLvsDPrXt3Ar/8AmFjQ8KqVUS98qqqb6Uo1VCZV44LIZoJIf4A1gB/AHWklFOklOovwA1yEwv3uchf09G2jqEcyY6zsaX2/rYkg+4P5cUHa5Grl7y4fB/7I8peOsWpQweCF3yNxtmZ5N9+4+KQoeQm3fHt6hWlWjE5cQghgoQQ3wB7gASgkZTyZSll9fnabGb5azr8Xe2o7e1ISmaOWfuQl8frj9ajR4tAMrL1DFq8hzPXkkt/0w3sW7QgZNkydN7epO3ZYyhRclWVKFGU6sLUleMfA6cwFDZsJ6XsKqU8XamR3QbcBw8CMA6M58ReXwzYto4nANtOm6+Sb3kIIfjomSZ0qO9NQlo2zy/YzaWE9DIfx7ZeXWqs+Bbr0FAyT50ivE8fMs+bXlxRURTzMfWK43UMs6ZSgPeEEL8W9ai8MKsn79Gjqf3Xn7g+8ww6X19jnw6Adnm3q/7+r3olDgCdVsPMPs1oHuLGpcQM+i/cTUJaVpmPYxUQQMi3y7Ft2pTsqCjC+/RVq8wVpRowNXEsxVBsMBLD7KniHkoBwtoaKx8ffN58gzp/bsXlyc7G11rVdMdapyE9O5eM7Oo3u8jOWsuC/i2o6+PI6WspDF6yl/Sssn8OnZsbIYsX4dC2Lbnx8YT3H0DKtm2VELGiKBVFrRw3o2tJGXhb+DqO0lxOTOfZWdu5lJhBh/rezOnXHKsyrC7PJ7Ozufz22ySu/xG0WsNCwR49KiFiRVGKU9E9x5VKUN2TBoCfix1LB7fE1d6K309e4601R0zuIFiQsLLCb/JkPF54AXJzufLue1ybNr1cx1IUpXKpxFEF0o8e47+2bQnv93yRr1+MS6uWt6vy1fZ2YuGAe7C10vDDvkim/HyqXMcRGg3er76C76RJoNUSO3cul15/A31W2cdPFEWpPCpxVAGNvR250THkFNEVb9SKA7SdupV/z1TvWc3Ngt2Y3bc5Wo1gzl9nmftX+du0uPXsQdDsWWjs7UnasMGw1uOGNTGKopiPShxVQJvX1Ckn/uYFc7W9HQH4+Wj1KD9Skofqe/Np96YATN58kuW7wst9LMd27Qj5Zhk6Ly/Sdu/mQp++ZEWq+ReKYgksInEIIT4RQpwUQhwWQqwVQrjmba8hhEjPaydrbClb3WhdXECrRZ+UhLzhtstjjX0B+PX41Wq1irw4z9wdyAddGgHw9rqjrD9Y/j/2tg0bUmPVSmzq1Cbr7Fku9OpF+tFjFRWqoijlZBGJA/gNaCylbAr8B7xV4LWzRbWUrU6ERoPW1RWAnPjCK8Xr+DhRx9uRxPRstp+NNUd4Fa7fvTV4o1N9pIQx3x3i11so5mjl70/I8uXYt2pFbkwM4f36kfznnxUXrKIoZWYRiUNK+auUMifv6U4g0JzxVAade34r2ZuTw2NN/ADYfORylcZUmV58sBYj8upajfz2wC2N4WidnQmePw+XLk8h09OJHPESccuXV2C0iqKUhUUkjhsMAjYXeB4qhDgghPhLCNHWXEHdKm1e4sgp0IM83xN5ieOXY1fQ62+f6afjHq3H8/eGkJWrZ+jSvewLL3tRxHzC2hq/jz/Gc8SLoNdz9YP/ceX995E5OaW/WVGUClVliUMIsUUIcbSIR5cC+0wAcoD8r5OXgWAp5d3AGOBbIYRzMccfJoTYK4TYGx1teWU8XLt3x/uNN7AOCTFuk1lZyNxc6vo4EuBqR0pmDuFxaWaMsmIJIZj4ZCO6NgsgLSuXgYt2c/xS+SvhCiHwGjUK/ykfI6ysiP92BReHDVMzrhSlilnMynEhxADgBaCDlLLIv55CiD+B16SUJS4Lrw4rx/UZGZx7orPhHv6ypZy5lkyAqz121lpzh1bhcnL1jPz2AD8fu4KnozWrXriXWl6Ot3TMtP0HiBw5kty4OKxDQwmaPQvrGjUqJmBFuUNVq5XjQohOGAopPlUwaQghvIQQ2ryfawJ1gHPmibJi5KakkPLvv6T+8w/ZUVGk7dmDzM6mtrfTbZk0wFAUcUbvMNrW8SQmJYu+83cRHpt6S8e0b3Y3od9/h03dumSdP8/5nr1I3bmrgiJWFKUkFpE4gJmAE/DbDdNu2wGHhRAHgR+A4VLKmwcJqpHIl0ZycfAQEjf+ZNyWU+DWml4vb8syGzY6LfP6taBlqDtXkjLoPW8nF2/xtpyhuu63OD70EPrERCKGDCF+1XcVFLGiKMWxiMQhpawtpQy6cdqtlHK1lLJR3rZmUsoN5o71VtnddRcA1kFB2NSrB0D21asAvP7DIcLe/5XzMbf2bdxS2VlrWTjgHlrklWPvNW8nkfG3ljy0jg4EzvzS0PskJ4cr773HlQ8/UoPmilKJLCJx3EnswsIASD94EKsgw6zjnLzud8kZOSRl5LDzbPUuP1ISRxsdiwbew93BrkQlpNN7/k6iytEIqiCh1eIzbhx+H34IVlbEL1tGxJChRc5gUxTl1qnEUcXswgxXHOkHD6LzMHQBzL9V1TT8EACbfz9onuCqiJOtFUsGteSuQBcuxqXTZ/5OLifeWvIAcH22KyGLF6H19CRt507Od+tG+jG10lxRKppKHFUsfyGgzM4m8/Rp6u7dg3u/58i+fJm7vpuNRurZkaghMT3bzJFWLmdbK5YObkWTABfCY9PoM38XV5Mybvm49s2bE7r6B2zvakrOpcuE9+lL4vr1FRCxoij5VOIwA9uGDQ3/bdQIraNhWmr09Bm4ZabQJOYsORotvx2/as4Qq4SLnRXLBrekkb8z52NS6T2vYq48rHx8CFm2DNfu3ZCZmVx6403DuEf27Z2MFaWqqMRhBoGzZ+M97jW8XxsLQG5SEokbNoBOx7PdHgTgp8OXzBhh1XG1t+abwa1o6OfMuZhUeszdccuzrQA01tb4ffCBobdH/rjHwEFkF1HaXlGUslGJwwysfLzxGDwYfXIy4QMH8l/LVqDXY9ekCU+0bYBGwLbTMaRkFp4ZtPHwJWb+cdpMUVceNwdrVgxtbRzz6DF3R4XNLHPr2YOQpUsM5dn37uV812dJ3bmzQo6tKHcqlTjMSGNnR9oOwx8x586dcerQHnd7Kz5rH8Dqx/2wtyq8IHDktwf49Nf/OHMtxRzhVioXeyu+GdKKe2q4cTkxgx5zd3D6anKFHNv+7rsJXbMa+5YtyY2JIWLQYKJnzULmVt+ui4piTipxmJHGwQFN3hiHz4TxeAwZQuo//1D/5d64zP4MjUYY903Puv5H7sYrkdtF/myrNrU8iE7OpOe8nRy7VDF1qHReXgQvWmgokiglMV98ycWhw8iJvT1K2StKVVKJw8x0Pj4Axraytk2aAJBx7Fihb8TRyZnGnxPSbt8e3PbWOhYOuIcH63kRl5pF73k7OXQxofQ3mkBotXiNGkXQ/Plo3dxI3b6d8890Jc3C65opiqVRicPc8sqLZJw4AYDOzQ3h5MSi0IfoNvtfMrINySPYw54n7/IHIP42ThwAtlZa5vZrziMNfUjKyKHP/J38c7riFkU63n8foevWYte8OTnXrhHefwDRM79Sq80VxUQqcZhZ1jlDzcbLb15veqhzdmaPT332RSZzIOL6t213eysAEtNu/2mlNjotX/VtRpcwf1Kzchm4eDc/Hqq4mWZWPj6ELFmMx9ChoNcTM3Mm4X2fIysiosLOoSi3K5U4zMxv8mQAAr78wrhN4+JMk1hDQtl57vo9+Nc71efkB50YcF9o1QZpJlZaDdN6hDH4/lCycyWjVhxg4T/nK+z4QqfDe+wYghctROfrS/qhQ5x/+hkSVq+5LQtNKkpFUYnDzFye7kLdvXtx7tjRuE3r7EJYtGHa7V//GcqRTPzxGM/O3l4okdwJNBrBO50bMv7x+gC8v/E4H28+WaF/2B1at6bm+nU4PdYJfVoalydMIGr0K+TEl79joaLczlTiMDMhBFpHh0LbtM7O3BV9BisBhyITiE3J5L+ryZy8kowQopgj3d6GtavF5z3uQqcRzPnrLK99f5jsXH2FHV/r4kLA55/j9/FkNA4OJP/6K+e7PE3KX39V2DkU5XahEocF8h47hkbrfqBVqBtSGhYDXsqrIPvKygOM/e6QmSM0j67NAvm6fwvsrLSs3h/J0KV7K3RqshAC16efJnT9OuyaNSPn2jUuvjCcqNdfV1cfilKAShwWyDokBJs6dXiooR8Af5y8xqVEQwHA+LRsjkbduT22H6znzYphrXF3sObPU9F0m739lnt63Mg6MJCQZUvxfv11hI0NST9u4NwTnUnavFmNfSgKKnFYtA71venZIoh2db3Iyrl+W+Z2n45bmrAgV9a82IZaXg6cvJLM01/9y77wir0iEFotHoMGUvPH9djfcw+5cXFEvTqGyJdfVvWulDuexSQOIcQHQojDea1jfxVC+OdtF0KIL4QQZ/Jeb2buWCtb2v4DXHrjTZx/WceUbk2p62NYXV7TyzAWEp+Wdcd/863h6cCaEfcZ+5j3nr+TdQeiKvw81iEhBC9ZjO/E99A4OJCy5XfOdX6S+JWrVMkS5Y5lMYkD+ERK2VRKGQZsBN7N2/4YUCfvMQyYbab4qkzOlcskrl9P2u49AFxKSEcI8Hexw95aS3auvG3LjpSFi50VCwfcQ7/WIWTl6Hll1UGm/HySnAocNAcQGg1uvXpRc+MGHNq1RZ+UxJWJE7nQoyfphw9X6LkUpTqwmMQhpUwq8NQByP9K3QVYKg12Aq5CCL8qD7AKaZxdAMhNMoxl1PRypKGfM3cFueBmbw1AfOrtvwjQFFZaDR883ZhJTzVCqxHM/vMs/RftJjYls/Q3l/Vcfn4EzZ1LwPRp6Hx9yTh2jAs9e3H5nXfU4LlyR7GYxAEghPhQCHER6Mv1K8ntmhEAAB2nSURBVI4A4GKB3SLztt22tC7OAOgTDbm0tpcjE55owIgHa+PuYEgccXf4OMeN+repwbLBLfF0tObfM7F0/vIfDkRU/B9zIQTOnTpR66eNeAwdAjodCd//wNlHOxG7cBH6LPX/Rbn9VWniEEJsEUIcLeLRBUBKOUFKGQQsB0aW8djDhBB7hRB7o/N6eFdXWmdD4shNMiQOjUbQppYnDjY6OjX2pV/rEJxtdeYM0SK1qeXJxpfb0izY1ViafdnO8EoZD9I4OOA9diw116/DoU0b9ElJXJs6lXOPPU7ixp+Q+oq9XaYolkRY4iCrECIY2CSlbCyEmAv8KaVckffaKeBBKeXl4t7fokULubcaVzzNTUjgv9b3onF2pt7uXeYOp9rJytHz4U/HWbIjHIDHGvsyuWsTXPNu81U0KSWp27Zx7ZNPyDx9BjBUOfYe8yr2rVvfsYs2lepHCLFPStmitP0s5laVEKJOgaddgJN5P/8IPJ83u6o1kFhS0rgdaJycANAnJ6tvruVgrdMwqUtjZvQKw9FGx+ajV+g0fRvbz1Zchd2ChBA4tmtH6Nq1+H7wPjovLzKOHCFi4CDC+/UjddfuSjmvopiLxVxxCCFWA/UAPRAODJdSRgnD17WZQCcgDRgopSzxcqK6X3EAhPcfgNDpCJz5JRo7O+P2hLQswmPTcLLVUdPL0YwRVg8RsWmMXnWAAxEJCAHDH6jFqw/XxVpXed+Z9GlpxC1dSuyixej/396dx0dVng0f/12zJZM9IQv7kggIIioCRdxflwcoShcXWm219ummPi3YfvrqSyta31rbp5ttfaxttbW+rcW2uDwPCioW3BdW2YUkbAESIHsyk1nO/f5xTpJJIJhRMjMk1/fzmc85554z51y5ZzLXnPucc98N9gUOGdOnU3j7bWRMm6ZHICpl9faII2USx8nUHxJHT554ew/ff2Yzn5s+gh99ZnKywzklRKIWv3plF795ZSeWgYlDcvjJNZOZNCy3T/cbbWqi9oknqP3T41jO+ar0syYz6JYvk335ZYjb/SFbUCqxTrmmKtU7hc5VVUeb9eqd3vK4XdxxxTie+tp5jCjws/VgI/MeeoMHXtjeMVBWX3BnZ1N0662c9vJLFN5+O+7cXIIb36fqW9+ifM4c6p58EisQ6LP9K9VXNHGkKBONEq2vxwoGu5QPykoD4Egf3KfQ300dXcCKBRdxy/ljsIzht6vLmf3ga33eVb07J4ei22/jtH+9QsmiRXiHDSO8Zy+H7v0BOy++hOof/Yi2ipM3zohSfU0TR4qqWngHH8w4j+Z//atLeWGWfcRxRI84PpIMn4e7r5rIP78xk3ElWVQeaWH+795m4ZINHGoIfvgGPgZXRgYFX7iRshXLGfaLn5M+eTJWYyO1j/+Zijlz2HPzl2hcvlzvBVEpTxNHinLl2FdWRRsau5QXZttHHH1xZ/RAMmVkPv/zHxey4PKx+Dwunl5fxaU/XcWvV+7s0+YrsEcezJk9mzFPLWH0P/5B3rXXIH4/rW+/TdWChey88CIO3nMPrevXD/g+yVRq0sSRotzt3Y40dU0c2WkefG4XLaEogZB2svdx+DwuFlw+jpV3XMzsSYMJhKP87KUPuOxnq3lu4wEsq++/tP2TzmDIffcxdvUqShYtIm3CBKyGBur/toQ9n/s85bNmcfhXvya4Y4cmEZUyNHGkKHeukzjq6ruUi0hMc9WxRx2hiMXBBj3hGo8RBRk8fOO5/PUrn+D0wdlU1Qf45pPrmf3gayzffDAhCcSdk0PBF26k9OmljHn2GQpuuQVPURHhPXs58l//ReW8T1E+axY1P/0pgfff1/t7VFLp5bgpqnH5cqoWLCTz4osY+cgjXZ7bdrCRDJ+bYXl+PO6uuf9rT6xhxZZqli+4kNMH5yQy5H4hahmeWrOPX6/c2TF41sQhOSy8YhyXTyhO6D0YJhql5e23aVq+gqaVK4nW1nY85y4oIPP888m64Hwyzz8fT2FhwuJS/Zfex3GKJ462ykoqZs/BU1LC2NWrev260XcuA+Cm80Zx77xJfRRd/9cWibLkvX385pVd1DTZR3Zji7P48gVj+NQ5w0j3JvYeDBON0rp2LU0vvkTTKyuJHOjaeULaxAlknX8BGdOn4z/nbNxZenOoip8mjlM8cZholB1Tp2ECAca+9Sae/Pxeva49ccyeNJiHbzy3L0McEILhKH99Zy+PvFpOdaOdQAZl+rhxxihunDGKIudihUQyxhCqqKDl9ddpfv0NWt99F9MW02zpcpE2fjwZU6aQce4U/FOm4B08OOFxqlOPJo5TPHEANK54EU9xEf4zzkB8nR30vby1mmc2VHHZuEI+PXVkl9dc+JNX2FcbYMrIPJbeen6iQ+63QhGLZZsO8OjrlWyusi9Y8LiEyyeUcN204Vw0tuiYZsNEsYJBWtespeXNNwmsXUtgyxaIdB3oy1NURPrEiaRNnED6xIn4J07EM3Sodn+iutDE0Q8SR0/+8FoF/3fZNq7z1/GV1Y9RvPAOcq+aC0BNU5DpP1xJdrqH9xdfqV8MJ5kxhncqa3n09UpWbqum/bx5cXYanz13OPPOHsr4kuyk1rsVCBDYtInAunW0rl1HYP16rObmY9Zz5+aSNnYsvrIy0spK8ZXaU8/gwfq5GaB6mzh0UIdTUHvzyOHaZiIHDuLKzCC4bRu+sjKKs9N5+IYpjC3JTnKU/ZOIMKN0EDNKB1HdGOSf6/bz9zX7qTzSwsOrynl4VTmlRZnMmTSEOWcOYcKQxCcRl99P5vTpZE6fDoCxLML79hHcupXg1m3OdCvRujpa16yhtduPLFdGBt5Ro/ANH4Z3+Ai8w4fhGz4c7/DheIcNw5WentC/R6UePeJIYdH6eo4++hhWSwuD7/5+R/nrO49w46PvMLm2kh+/+hB586+nfslT5D72OHWjxjE4J72jaxLV94wxrNlTxz/X7mfFlkPUtXYO6zuyIINLxhdx8bgiZpQOIjMtNX6rGWOIVFfTtqucUEU5beUVtJXvIlReQfRDhsF1FxXiLS7BU1zsPIrwFBfjbV8uLMSdl4d4vQn6a9TJok1V/SFxNLfwwdSpiNfL+HVrO/4Rtx9qZNYvX2NEYzV//OBJ/GedReOyZWz8zgPcucvDJ88cwkM3TEly9ANTJGrxdkUtz28+yIrNhzja0tl9iNctTBtdwMyyQUwbXcBZI/ISfnVWb0Tq6gjv20do3z7C+6sI799PuGo/of1VhA8cOOb8SU9c2dm48/Nx5+fhyS9w5u1ld24u7uxsXFlZuLKycGdl4crOxpWVjSvDj7j0FrNk0KaqfsCdlYl3+HDC+/cT2ruXtLIyAAqdo4n69Cz8Z52Fd8RwAA4fbgAG0RgMs+jpTeRn+PjOv41PVvgDksft4oKxhVwwtpD75k1i4/56Vu84zKs7D7NhXz1vlh/lzXK7U0Wf28WZw3OZOjqfM4flMmloLiMLMnC5knt+wZOfjyc/H//kY7vtN9EokZoaIjU1hJ1ppOZwR1mkpprI0Vq7g86mJqymJsJ798YXgEjXhJKRgfj9uNLTEX86rnQ/Ln86kt5DmT8dSUtHfF5cPh94vYjziF12+Xz2jzGvV8/pxEkTR4rzlZUS3r+ftvLyjsSRn+HDhaHJl4lnwln4sjIAOFLfAjIIn9vFX97Zy6RhOZo4ksjtEqaMzGfKyHwWXjGOupYQb5Qf4b3KWt7dXcf2Q42s3VPH2j2dTUPZaR4mDM1h0tBczhiaw2nFWZQWZZKdnhrNPuJ24x0yBO+QIfhPsJ6xLKzGRiJ1dUTr6onW1xGtqyNSW2svN9RjNbdgNTURbW7Gam6251taMK2tHUmnd8c2J4HXi8tJKPi8uLw+8HoQtwdxu8DtsY+CPO1TN+JyIx43uNz22Cru7lMX4vZ0TLtsx+1GXALiApcLBLtcXOASZ166LdtlXV7nEjvpxawn6X5y536yT6tLE0eKSysto2X1q4QqKmh+9VXCBw6SP/96pjbtxd3chOuaK/FG7Tb1I80hyIaxJdms3F6jY3akmPxMH3MnD2Xu5KEANATCrNtTx7q9dWw50MjmqgZqmtp4t7KWdytru7y2ODuNsqIsyoozKS3MYni+n6F5fobn+8n1p94vZnG5cOfl4c7LgzHxvdZEIljNzUSbW7Cam7BaA5hgACsYxAoEMMEgViBolwWCWMEAJhDECnYrC4ftRyiMCYe6LXc+cB5WuPPc1KncC5y7sHBgJA4RuQ97nHELqAFuNsYcEJFLgGeB9sEKlhpjfpCcKJMjrawUgLYPPuDwLx8EIOvSS/jjDWcT2LSZgjMndnRFURuyz1eNK7HvGj7aEsIYk3JfKsqW6/dy6enFXHp6cUdZTVOQLQca2VLVwNaDjVQcbqHiSAs1TW3UNLXx1nHGDmnvfmZonp9h+X4G56RTmJVGYZaPwuw0irLSKMxKw+9LvfMpxyMeT2fSSQBjWZhIpDPBdCSWEFgWJhIFK9o5jUYhak9j5+2pBdEIJmphohFonx5vO8aAZcBYdt9j3ZctY5cZC2MZsCx7vn09y8KY7utZuDIz+7zOUiJxAP9pjPk+gIh8E7gb+Lrz3GvGmLlJiyzJfKV281Tj8y8A4B05Em9JCd6SEjJnzgRAiosRr5c6sZszRhRk4Pe6CYSjNLdFUqaZQ3244ux0isenc+n4zmQStQwH6gOUH26m/HALlUeaqaoLcKA+SFV9gOa2CDtrmtlZc+y9GrEyfG4Ks9LIy/CS6/eS4/eSk94+7yHX78yne8lM85CZ5ibD68Hvc5Phc+P3upN+/qUviMtl32Dr8wF9/6XbH6RE4jDGxPYdngn0v0u9PqK0slLSJk6gbes2ADKmT+t4rrYlxFvlR5lz5mBKFi2ieXsWBO0uMQZl+dhfF6C2JaSJ4xTndgkjCjIYUZDBJd1OWRljaAxEqKoP2I+6Vqqb2jjS1MaR5jaOtoSc+RCtoSh7a1vZW3v8/fSG3+skEZ+bTF9nUkn3uvG5Xfg89sPrdpHmzLeXe2OeT3O78HoEn9uN1y143ILb5cItgtvVvixdl9vnXS5cLvC4XPY6rvZye+oSwSX2PTftU3VypUTiABCRHwJfBBqAS2OeOk9ENgIHgO8YY7YkI75kcefmUrp0KXu/9jVaVr9K5vTpHPntIxiXizmHRnK4NcJLCy9i7Pzr+fPhZg43tTEs38+grDT21wU40hxi1CD9FdVfiQi5GV5yM7xMHNpzb8jGGBqDEY42t1EfCNMYCNMQCNMYjHTOO9OGQNgZ7yVCayjqPCIEwxaBcJRAOAotCfwjTwIROhMK0rEcOxXA5RJ7KvZJZ/t5ZxknGbnsbbSX0+V5Z3/OPrrGIM5zMesdZ33pfMGx659oW86zeRlefvfFD72i9mNJWOIQkZeB4/W0tsgY86wxZhGwSETuAm4HFgPrgFHGmGYRmQM8A4ztYftfBb4KMHLkyOOtcsoykQiBNWsBOLj4HkzAHm9j5oKHeHZ3hGWbDrKgJJvSoixKi+zzG2cPzyUrzf4VqJSIdDRFfVSWZQiE7UQSCEVpcRJLIBQlGI4SjlqEohZtEYtQ+yNqEXamoYjznDMfjplGLUPEMh1TK2Y52lFuYRmIWBbRqCFqOtdvX45EDZYxGLCnTtuFMdjr20snoUZTV2ECbv5NuRsARWQk8Lwx5pg+wUVkNzDVGHPkRNvoLzcAtgts3Mju6+eDx4Nv1ChC5eUA7P7rCr7x1CYAZpfl8G0qKB5RQs6sWckMV6mUYoxxzh/bU8v5zrO6lRsn0XRPPO1l3Zc71zEd57UBDF0TVntZ7HL35zrnO2NuX+58jen2upj5mPW9buHcUQUfqa5OqRsARWSsMWanszgP2O6UDwaqjTFGRKZjj1h47GUl/Vzr2nUAZEybitXY1FE+65wR3NkU5sGXd/JCeSMvUMgtm7dwtyYOpTqICG6BmEYg9TGlROIAHhCR8diX4+6h84qqa4BviEgECADzTaodIiVAwU1fxDu4hMyLLqJx2fMcWryYrIsvRkT4+sVlzJ08hJseeYPy+hA7g/Yll8YYmtsiRKKG/Ezfh+xBKaV6L+Waqk6G/tZUFcsYQ8vrb+A/+yzc2Z094IaDbfxp7s2cfrSS899axbPbjrBwyUbmnT2UB+efk8SIlVKnilOqqUr1noiQdeEFx5R709P4X75GQoEGQpWV5GcMAuxLdpVS6mTSS276kY67zMsrOq6sOKLdjiilTjJNHP1I+13moYpyCpzzGrUtbSd6iVJKxU0TRz+SdloZnuJicLtjEkeI/ngeSymVPHqOox/JmTuX3Kuu6ljOSvPQ3BahMRj5WDd+KaVULD3i6Ee698kzKMs+6jja3NZxJ69SSn1cesTRD1ktLYQPHGDxVRMRhOKcdJ5ZX8W9/72Fe+dN4ppzhyc7RKXUKUyPOPqZmgcfZMfUaVReex1Ttr/FJeOLyErzEDWGllCU7/x9I99+aiNrdtdiWb0792GMoa2yUs+VKKUAvQGw37Ha2jh07w9oWLoUgOwrLifn6qvxn3kmS3YHWfzcVqJOwijJSWNmWSHzp43gE6Wd9300BcPkZ/rISfdiRaMcWnwPDf/4B3nXXceQH9ybtL9NKdW3ensDoCaOfsgYQ8PSpVT/8H6s1taO8syZMwk98Ev+vmY/z22o4mCjfanu3a3rudJUg8vNY95SHvPYl/Vm+twEw1FckQj5bU0MbjnKn+eMIPO8GeDxcNeqA0wcmsstF8Q5NqhSKiXpneMDmIiQ99nPkjljBvVPP0Pre+/RtnMnruxsTivO5q45E/j22bms+PQX+CBvBKfVbKcp0ACAp+xCSkrzacguoCVkd0IddXuoycjHZSwOLlrUsZ/l1/0cPd2u1MCjRxwDiAmF7CEyASsYJLBunT22ciRij4NsLHvsZMvgnzaVQM4g0n0uopbhcFMbtX/4A9kvL8OEQpholPIfPcLgUUM4Z2R+kv8ypdTJoE1VmjiUUiouvU0celWVUkqpuGjiUEopFRdNHEoppeKiiUMppVRcUi5xiMi3RcSISKGzLCLyKxHZJSLvi8iUZMeolFIDWUolDhEZAVwJ7I0png2MdR5fBR5OQmhKKaUcKZU4gF8A3wVirxGeB/zZ2N4G8kRkSFKiU0oplTqJQ0TmAVXGmI3dnhoG7ItZ3u+UKaWUSoKEdjkiIi8Dg4/z1CLg/2A3U33UbX8VuykLoFlEdnzUbQGFwJGP8fq+onHFR+OKj8YVn/4Y16jerJQSd46LyJnASqC9R77hwAFgOnAvsMoY86Sz7g7gEmPMwT6MZ01v7p5MNI0rPhpXfDSu+AzkuFKiqcoYs8kYU2yMGW2MGY3dHDXFGHMIeA74onN11QygoS+ThlJKqRM7FXrHfR6YA+zCPiL5UnLDUUqpgS0lE4dz1NE+b4DbEhzC7xK8v97SuOKjccVH44rPgI0rJc5xKKWUOnWkxDkOpZRSpw5NHDFEZJaI7HC6N7kziXGMEJF/ichWEdkiIt9yyu8RkSoR2eA85iQhtt0issnZ/xqnrEBEXhKRnc40oSM7icj4mDrZICKNIrIgGfUlIo+JSI2IbI4pO279JLI7nR7i+k8R2e7s+2kRyXPKR4tIIKbefpvguHp830TkLqe+dojIvyU4riUxMe0WkQ1OeSLrq6fvhsR+xowx+rCb69xAOVAK+ICNwMQkxTIE+6oygGzgA2AicA/wnSTX026gsFvZT4A7nfk7gR8n+X08hH09esLrC7gImAJs/rD6wb7o4wVAgBnAOwmO60rA48z/OCau0bHrJaG+jvu+Of8DG4E0YIzz/+pOVFzdnv8ZcHcS6qun74aEfsb0iKPTdGCXMabCGBMC/obd3UnCGWMOGmPWOfNNwDZS+275ecDjzvzjwKeSGMtlQLkxZk8ydm6MeRWo7VbcU/0krDud48VljHnRGBNxFt/Gvn8qoXqor57MA/5mjGkzxlRiX2k5PdFxiYgA1wFP9sW+T+QE3w0J/Yxp4uiUkl2biMho4BzgHafodueQ87FENwk5DPCiiKwV+259gBLTeW/NIaAkCXG1m0/Xf+hk1xf0XD+p9Jm7BfuXabsxIrJeRFaLyIVJiOd471uq1NeFQLUxZmdMWcLrq9t3Q0I/Y5o4UpiIZAH/BBYYYxqxewYuA84GDmIfLifaBcaYKdi9Ft8mIhfFPmns4+OkXKonIj7gauDvTlEq1FcXyayfnojIIiAC/MUpOgiMNMacA9wB/FVEchIYUsq9b918jq4/ThJeX8f5buiQiM+YJo5OVcCImOXhTllSiIgX+4PxF2PMUgBjTLUxJmqMsYDf00eH6SdijKlypjXA004M1e2Hv860JtFxOWYD64wx1U6MSa8vR0/1k/TPnIjcDMwFbnC+cHCago4682uxzyWMS1RMJ3jfUqG+PMBngCXtZYmur+N9N5Dgz5gmjk7vAWNFZIzzy3U+dncnCee0oT4KbDPG/DymPLZt8tPA5u6v7eO4MkUku30e++TqZux6uslZ7Sbg2UTGFaPLL8Fk11eMnuonqd3piMgs7GEMrjbGtMaUF4mI25kvxR4LpyKBcfX0vj0HzBeRNBEZ48T1bqLiclwObDfG7G8vSGR99fTdQKI/Y4m4EuBUeWBfgfAB9i+GRUmM4wLsQ833gQ3OYw7wBLDJKX8OGJLguEqxr2rZCGxpryNgEHYnlTuBl4GCJNRZJnAUyI0pS3h9YSeug0AYuz35yz3VD/aVLg85n7dNwNQEx7ULu/27/TP2W2fdzzrv7wZgHXBVguPq8X3D7km7HNgBzE5kXE75n4Cvd1s3kfXV03dDQj9jeue4UkqpuGhTlVJKqbho4lBKKRUXTRxKKaXioolDKaVUXDRxKKWUiosmDjWgiMifROTlZMfRnYisEpE/JDsOpXpDL8dVA4qI5AIuY0yd80V9mjHmkgTu/3vAv5uYUS6d8gIgYrp1H6FUKkrJoWOV6ivGmIa+2K6I+Izdq/JHYozpbQ+xSiWdNlWpAaW9qUpE7sG+S/liETHO42ZnnSwReVDswYRanV5PPxOzjdHO+jeIyPMi0gLc53Tr8HsRKRd7YJ8KEblfRNKc190M3AeMitnnPc5zXZqqRMQrIg84MYTEHrjn893+FiMit4rIEyLSJCL7ReSubuvMc+JvFZF6EXlXRM7pg6pVA4gecaiB6qfYfQqNwe60DqDB6Qvov7G7argeOIDdP9HfRGS2MWZlzDZ+DPxv4DZnWbA7l/s8UA1MBh7B7rZiMXbHeKcDNwDTnNc09xDf/dhdnX8du4uXa4D/JyLV3WJYDHwPe/CjWcBvRORdY8xKERmM3VPw95xpOnY33BGU+hg0cagByRjTLCIBIGSMOdReLiKXAOdhj2/Q3qz1O6eDuP/A7g+o3SPGmL/Q1aKY+d0iUgbcCiw2xgREpBmIxu6zOxHJAL4JLDTGtHcRf7+ITHO2HxvDEmPM7535h0TkduxEtxJ7tDgv8JQxZrezzrae9qtUb2niUKqradhDB1fZBx8dfNgdyMU6pmdWEfkK8O/Yw4lmYv+PxdskfJqzv1e7la8G7upWtqHb8gE6B/F5H1gBbBaRl4BVwFJjzD6U+hg0cSjVlQtooLMpKVb3k98tsQsici12T6R3Yn/JNwLXAj88+WH2GJPBSVTGmKiIzMb+Wy7H7sX1ARG51hjzP30Yk+rnNHGogSwEuLuVrQHygHRjTLzjd1wErDddx1AZ3Yt9drcLaHO2FxvDxcQ5poixr7d/13ncLyLLgS8BmjjUR6aJQw1klcC1InIG9snsJuAV7PEMlorId7Gbe/KBmUAw5nzC8ewAviwi87C/4OfSeeI9dp+DReQ87KavVhMziBKAMaZVRH6FfaXWYTpPjs8DrujtHyciM4HLgBexx5YYi33C/tHebkOp49HLcdVA9ij2yI9vAoeBzzm/0K8GlgK/ALYDy4BPYg+GcyKPYA9C9EdgPfAJ7KudYj2DfYXTMmef3+1hW4uwh039JXYSuhG4sdsVVR+mAftE/7PYSeox7HHF74tjG0odQ+8cV0opFRc94lBKKRUXTRxKKaXioolDKaVUXDRxKKWUiosmDqWUUnHRxKGUUioumjiUUkrFRROHUkqpuGjiUEopFZf/D1wBtVSUzb2uAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "#plt.rc('text',usetex=True)nn\n", - "#plt.xscale('log')\n", - "\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,linestyle='--',color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,linestyle='--',color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "plt.savefig(\"./error_fig/noise1/\"+figure_name+\".eps\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/convergence30S.ipynb b/convergence30S.ipynb deleted file mode 100644 index 59ba52f..0000000 --- a/convergence30S.ipynb +++ /dev/null @@ -1,1307 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/xiezhq/.wanghe_env/lib/python3.7/site-packages/torch_sparse/tensor.py:46: UserWarning: This overload of nonzero is deprecated:\n", - "\tnonzero()\n", - "Consider using one of the following signatures instead:\n", - "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:882.)\n", - " index = mat.nonzero()\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import networkx as nx\n", - "import copy\n", - "import pandas as pd\n", - "import xlwt\n", - "import torch\n", - "from torch import nn\n", - "import torch.optim as optim\n", - "from torch_geometric.utils import from_networkx\n", - "from torch.utils.data import Dataset, DataLoader\n", - "from torch_geometric.data import Data, Batch\n", - "from torch_geometric.nn.conv import MessagePassing\n", - "from torch_sparse import SparseTensor, matmul\n", - "import torch.nn.functional as F\n", - "import matplotlib.pyplot as plt\n", - "\n", - "num_nodes = 5\n", - "num_edges = 6\n", - "n = 100\n", - "m = 80\n", - "k = 16\n", - "train_num = 1000\n", - "test_num = 100\n", - "num_layers = 50\n", - "nnz = 8\n", - "\n", - "#less nnz =5; m = 50; k = 10\n", - "\n", - "def metropolis(adjacency_matrix):\n", - " num_of_nodes = adjacency_matrix.shape[0]\n", - " metropolis=np.zeros((num_of_nodes,num_of_nodes))\n", - " for i in range(num_of_nodes):\n", - " for j in range(num_of_nodes):\n", - " if adjacency_matrix[i,j]==1:\n", - " d_i = np.sum(adjacency_matrix[i,:])\n", - " d_j = np.sum(adjacency_matrix[j,:])\n", - " metropolis[i,j]=1/(1+max(d_i,d_j))\n", - " metropolis[i,i]=1-sum(metropolis[i,:])\n", - " return metropolis\n", - "\n", - "class SynDataset(Dataset):\n", - " def __init__(self, samples):\n", - " self.samples = samples\n", - " self.A = []; \n", - " self.y = []; \n", - " self.x_true = []\n", - " self.pyg_data=[]\n", - " self.process()\n", - " \n", - " \n", - " def gen_func(self, num_of_nodes, n, m, k):\n", - " A_all = np.random.randn(m, n)\n", - " x = np.random.randn(n)\n", - " x_norm = 0\n", - "\n", - " while(x_norm < 1e-2):\n", - " x_mask = np.random.rand(n)\n", - " x_mask[x_mask < 1 - nnz/100] = 0\n", - " x_mask[x_mask > 0] = 1\n", - " x_norm = np.linalg.norm(x * x_mask)\n", - "\n", - " x = x * x_mask\n", - " x = x/np.linalg.norm(x)\n", - " \n", - " SNR_db = 30\n", - " SNR = 10**(SNR_db/10)\n", - " \n", - " noise = np.random.randn(m) * np.sqrt(1/SNR)\n", - " y_all = A_all@x + noise\n", - "\n", - " A = np.zeros((num_of_nodes, k , n))\n", - " y = np.zeros((num_of_nodes, k))\n", - " for ii in range(num_of_nodes):\n", - " start = (k*ii) % m; end = (k*(ii+1) )%m\n", - " if(start > end):\n", - " A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0)\n", - " y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), \n", - " np.expand_dims(y_all[:end], axis = 0)), axis = 1)\n", - " else:\n", - " A[ii,:,:] = A_all[start:end,:]\n", - " y[ii,:] = np.expand_dims(y_all[start:end], axis = 0)\n", - " \n", - " x = np.expand_dims(x, axis = 0)\n", - " x = x.repeat(num_of_nodes, axis = 0)\n", - " \n", - " return A, y, x\n", - "\n", - " def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True):\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k = 0\n", - " while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False:\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k += 1\n", - " # print(\"Check if connected: \", nx.is_connected(G))\n", - " # nx.draw(G)\n", - " \n", - " edge_index = from_networkx(G).edge_index\n", - " adj = nx.to_numpy_matrix(G)\n", - " return G, adj,edge_index\n", - " \n", - " def process(self):\n", - " _, adj,edge_index = self.gen_graph(num_nodes, num_edges)\n", - " self.edge_index = edge_index\n", - " W = metropolis(adj)\n", - " self.W = [torch.tensor(W, dtype = torch.float)] * self.samples\n", - " \n", - " \n", - " for ii in range(self.samples):\n", - " A, y, x_true = self.gen_func(num_nodes, n, m, k)\n", - " self.A.append(torch.tensor(A, dtype = torch.float) ); \n", - " self.y.append(torch.tensor(y, dtype = torch.float) ); \n", - " self.x_true.append(torch.tensor(x_true, dtype = torch.float) )\n", - " \n", - " edge_weight=torch.tensor(W,dtype=torch.float)\n", - " self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) \n", - " \n", - " \n", - "\n", - " def __getitem__(self, idx):\n", - " return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx]\n", - "\n", - " def __len__(self):\n", - " \"\"\"Number of graphs in the dataset\"\"\"\n", - " return len(self.A)\n", - " \n", - " \n", - "def collate(samples):\n", - " # The input `samples` is a list of pairs\n", - " # (graph, label).\n", - " W, A, y, x_true, pyg_data = map(list, zip(*samples))\n", - " W = torch.stack(W)\n", - " A = torch.stack(A)\n", - " y = torch.stack(y)\n", - " x_true = torch.stack(x_true)\n", - " pyg_data = Batch.from_data_list(pyg_data)\n", - " return W, A, y, x_true, pyg_data\n", - "class MetropolisConv(MessagePassing):\n", - " def __init__(self):\n", - " super(MetropolisConv, self).__init__(aggr='add') # \"Add\" aggregation.\n", - "\n", - " def forward(self, x, pyg_data):\n", - " (B, N, D)=x.shape\n", - " out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1)\n", - " return out.view(B,N,D)\n", - "\n", - " def message_and_aggregate(self, adj_t, x):\n", - " return matmul(adj_t, x, reduce=self.aggr)\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = di ** power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - "\n", - "\n", - "train_data = SynDataset(train_num)\n", - "\n", - "test_data = SynDataset(test_num)\n", - "train_loader = DataLoader(train_data, batch_size=32, shuffle=True, collate_fn=collate)\n", - "\n", - "test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-PGEXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0006986133785176207 tensor(0.0093, grad_fn=) tensor(0.0003, grad_fn=)\n", - "0.00012015691663691541 tensor(0.0041, grad_fn=) tensor(-0.0009, grad_fn=)\n", - "8.623260691820178e-05 tensor(0.0024, grad_fn=) tensor(-1.3205e-05, grad_fn=)\n", - "6.412451762116689e-05 tensor(0.0025, grad_fn=) tensor(0.0005, grad_fn=)\n", - "5.2622512839661795e-05 tensor(0.0026, grad_fn=) tensor(0.0002, grad_fn=)\n", - "4.851970754771173e-05 tensor(0.0026, grad_fn=) tensor(0.0003, grad_fn=)\n", - "4.4501045522338245e-05 tensor(0.0026, grad_fn=) tensor(0.0003, grad_fn=)\n", - "4.263982395968924e-05 tensor(0.0026, grad_fn=) tensor(0.0003, grad_fn=)\n", - "4.065322173119057e-05 tensor(0.0026, grad_fn=) tensor(0.0003, grad_fn=)\n", - "3.926794988728943e-05 tensor(0.0026, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.791956851273426e-05 tensor(0.0025, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.6790817716791935e-05 tensor(0.0024, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.5174559570805286e-05 tensor(0.0023, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.385303153891073e-05 tensor(0.0022, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.269620719947852e-05 tensor(0.0020, grad_fn=) tensor(0.0003, grad_fn=)\n", - "3.2758120028120175e-05 tensor(0.0017, grad_fn=) tensor(0.0001, grad_fn=)\n", - "3.170488179193853e-05 tensor(0.0016, grad_fn=) tensor(3.6092e-05, grad_fn=)\n", - "3.1306422158650093e-05 tensor(0.0016, grad_fn=) tensor(0.0001, grad_fn=)\n", - "3.0435381859206245e-05 tensor(0.0016, grad_fn=) tensor(0.0001, grad_fn=)\n", - "2.9783966169816267e-05 tensor(0.0016, grad_fn=) tensor(0.0002, grad_fn=)\n", - "2.9163896272166312e-05 tensor(0.0016, grad_fn=) tensor(0.0003, grad_fn=)\n", - "2.866159803716073e-05 tensor(0.0016, grad_fn=) tensor(0.0003, grad_fn=)\n", - "2.8137820720530726e-05 tensor(0.0016, grad_fn=) tensor(0.0003, grad_fn=)\n", - "2.7916246779113862e-05 tensor(0.0016, grad_fn=) tensor(0.0004, grad_fn=)\n", - "2.738468134566574e-05 tensor(0.0017, grad_fn=) tensor(0.0004, grad_fn=)\n", - "2.7339344853771763e-05 tensor(0.0018, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.6953913049965195e-05 tensor(0.0019, grad_fn=) tensor(0.0006, grad_fn=)\n", - "3.488612492219545e-05 tensor(0.0016, grad_fn=) tensor(2.8332e-05, grad_fn=)\n", - "3.235672357959629e-05 tensor(0.0016, grad_fn=) tensor(2.9870e-05, grad_fn=)\n", - "3.1560192837787326e-05 tensor(0.0016, grad_fn=) tensor(3.1639e-05, grad_fn=)\n", - "3.099829967823098e-05 tensor(0.0016, grad_fn=) tensor(3.3601e-05, grad_fn=)\n", - "3.069737016403451e-05 tensor(0.0016, grad_fn=) tensor(3.6181e-05, grad_fn=)\n", - "3.0231422272208874e-05 tensor(0.0016, grad_fn=) tensor(3.9083e-05, grad_fn=)\n", - "2.973949847273616e-05 tensor(0.0016, grad_fn=) tensor(4.1993e-05, grad_fn=)\n", - "2.9430349115955323e-05 tensor(0.0016, grad_fn=) tensor(4.5767e-05, grad_fn=)\n", - "2.9126389790690155e-05 tensor(0.0016, grad_fn=) tensor(4.9603e-05, grad_fn=)\n", - "2.9094566741605377e-05 tensor(0.0016, grad_fn=) tensor(5.2733e-05, grad_fn=)\n", - "2.8721103774387302e-05 tensor(0.0016, grad_fn=) tensor(5.6901e-05, grad_fn=)\n", - "2.8416127861419227e-05 tensor(0.0016, grad_fn=) tensor(6.0920e-05, grad_fn=)\n", - "2.8223288438766758e-05 tensor(0.0016, grad_fn=) tensor(6.4743e-05, grad_fn=)\n", - "2.814489499769479e-05 tensor(0.0016, grad_fn=) tensor(6.9457e-05, grad_fn=)\n", - "2.7911388144730154e-05 tensor(0.0016, grad_fn=) tensor(7.3993e-05, grad_fn=)\n", - "2.759811161467951e-05 tensor(0.0016, grad_fn=) tensor(7.8793e-05, grad_fn=)\n", - "2.7677251125624025e-05 tensor(0.0016, grad_fn=) tensor(8.3144e-05, grad_fn=)\n", - "2.750415410446294e-05 tensor(0.0016, grad_fn=) tensor(8.7539e-05, grad_fn=)\n", - "2.7404297952671186e-05 tensor(0.0016, grad_fn=) tensor(9.3175e-05, grad_fn=)\n", - "2.73394338705657e-05 tensor(0.0017, grad_fn=) tensor(9.8559e-05, grad_fn=)\n", - "2.7145543526785332e-05 tensor(0.0017, grad_fn=) tensor(0.0001, grad_fn=)\n", - "2.7076275898707536e-05 tensor(0.0017, grad_fn=) tensor(0.0001, grad_fn=)\n", - "2.7235628579092008e-05 tensor(0.0017, grad_fn=) tensor(0.0001, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_PGEXTRA(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_PGEXTRA, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - " \n", - "###main\n", - "model_PGEXTRA = Net_PGEXTRA(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_PGEXTRA.parameters(), lr=2e-5)\n", - "model_PGEXTRA.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.84,z, x_true)\n", - " #best 83\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_PGEXTRA.lam[1], model_PGEXTRA.step_size[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0007493804750993149 tensor(0.0093, grad_fn=) tensor(0.0016, grad_fn=)\n", - "0.00019100837835139828 tensor(0.0071, grad_fn=) tensor(0.0050, grad_fn=)\n", - "0.00015175550970525364 tensor(0.0096, grad_fn=) tensor(0.0059, grad_fn=)\n", - "0.00014586906490876572 tensor(0.0103, grad_fn=) tensor(0.0058, grad_fn=)\n", - "0.0001485099460296624 tensor(0.0110, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.00014119377283350332 tensor(0.0110, grad_fn=) tensor(0.0057, grad_fn=)\n", - "0.0001406069532094989 tensor(0.0113, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.0001372662952690007 tensor(0.0113, grad_fn=) tensor(0.0058, grad_fn=)\n", - "0.00013631668252855889 tensor(0.0116, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.0001456634408896207 tensor(0.0123, grad_fn=) tensor(0.0051, grad_fn=)\n", - "0.00014277505692916748 tensor(0.0122, grad_fn=) tensor(0.0052, grad_fn=)\n", - "0.00014082989991948125 tensor(0.0122, grad_fn=) tensor(0.0052, grad_fn=)\n", - "0.00013989106741973956 tensor(0.0122, grad_fn=) tensor(0.0053, grad_fn=)\n", - "0.0001378425208713452 tensor(0.0122, grad_fn=) tensor(0.0053, grad_fn=)\n", - "0.000137180879164589 tensor(0.0121, grad_fn=) tensor(0.0054, grad_fn=)\n", - "0.00013501506759894255 tensor(0.0121, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.00013315437581695733 tensor(0.0121, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.0001342215166459937 tensor(0.0120, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.00013199667569097073 tensor(0.0121, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.00013163664652893203 tensor(0.0121, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.00012958295769749384 tensor(0.0122, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.000129243002675139 tensor(0.0123, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.00012820223810194875 tensor(0.0123, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.00012583143984556955 tensor(0.0124, grad_fn=) tensor(0.0054, grad_fn=)\n", - "0.00012156375146332721 tensor(0.0124, grad_fn=) tensor(0.0054, grad_fn=)\n", - "0.00011954254046031565 tensor(0.0124, grad_fn=) tensor(0.0051, grad_fn=)\n", - "0.00011653489264062955 tensor(0.0126, grad_fn=) tensor(0.0049, grad_fn=)\n", - "0.0001109400534460292 tensor(0.0126, grad_fn=) tensor(0.0047, grad_fn=)\n", - "0.00010294363391949446 tensor(0.0124, grad_fn=) tensor(0.0049, grad_fn=)\n", - "9.636382651478925e-05 tensor(0.0125, grad_fn=) tensor(0.0049, grad_fn=)\n", - "9.15141322366253e-05 tensor(0.0125, grad_fn=) tensor(0.0049, grad_fn=)\n", - "8.622594282314822e-05 tensor(0.0125, grad_fn=) tensor(0.0048, grad_fn=)\n", - "8.106120276352158e-05 tensor(0.0125, grad_fn=) tensor(0.0047, grad_fn=)\n", - "7.694645091760322e-05 tensor(0.0126, grad_fn=) tensor(0.0045, grad_fn=)\n", - "7.202162339581264e-05 tensor(0.0125, grad_fn=) tensor(0.0043, grad_fn=)\n", - "6.829183996615029e-05 tensor(0.0124, grad_fn=) tensor(0.0042, grad_fn=)\n", - "6.482485616743361e-05 tensor(0.0124, grad_fn=) tensor(0.0039, grad_fn=)\n", - "6.163963905692071e-05 tensor(0.0123, grad_fn=) tensor(0.0034, grad_fn=)\n", - "6.722519697177631e-05 tensor(0.0123, grad_fn=) tensor(0.0030, grad_fn=)\n", - "5.9205009506513306e-05 tensor(0.0124, grad_fn=) tensor(0.0022, grad_fn=)\n", - "5.710730192731717e-05 tensor(0.0125, grad_fn=) tensor(0.0013, grad_fn=)\n", - "5.457678139464406e-05 tensor(0.0129, grad_fn=) tensor(0.0005, grad_fn=)\n", - "5.208276286339242e-05 tensor(0.0138, grad_fn=) tensor(0.0002, grad_fn=)\n", - "5.039135282913776e-05 tensor(0.0153, grad_fn=) tensor(0.0003, grad_fn=)\n", - "4.844232341838506e-05 tensor(0.0172, grad_fn=) tensor(0.0003, grad_fn=)\n", - "4.5950812364026206e-05 tensor(0.0195, grad_fn=) tensor(0.0002, grad_fn=)\n", - "4.474553759337141e-05 tensor(0.0221, grad_fn=) tensor(0.0002, grad_fn=)\n", - "4.2446028260201274e-05 tensor(0.0250, grad_fn=) tensor(8.6201e-05, grad_fn=)\n", - "4.1693557705002604e-05 tensor(0.0283, grad_fn=) tensor(0.0004, grad_fn=)\n", - "3.9606113659829134e-05 tensor(0.0306, grad_fn=) tensor(-4.5120e-05, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_DGD(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_DGD, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " #x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " # self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1)\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " #di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = 1/ power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - " \n", - " \n", - "model_DGD = Net_DGD(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_DGD.parameters(), lr=2e-5)\n", - "model_DGD.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_DGD(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.907,z, x_true)\n", - " #best 905\n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_DGD.lam[1], model_DGD.step_size[1])" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\nclass Net_NIDS(torch.nn.Module):\\n def __init__(self, step_size, num_layers, num_nodes):\\n super(Net_NIDS, self).__init__()\\n self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\\n self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\\n self.c = nn.Parameter(torch.ones(num_layers)*step_size)\\n self.num_layers = num_layers\\n self.conv=MetropolisConv()\\n \\n def tgrad_qp(self, A, b, x):\\n # A: nodes * k * n\\n # X: nodes * n\\n # Y: nodes * k\\n\\n x_ = torch.unsqueeze(x, axis = -1)\\n b_ = torch.unsqueeze(b, axis = -1)\\n\\n A_t = A.transpose(2,3)\\n grad_A = A_t @ (A @ x_ - b_)\\n grad_A = torch.squeeze(grad_A, axis = -1)\\n return grad_A\\n \\n def act(self, x, ii):\\n tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\\n return F.relu(x - tau) - F.relu( - x - tau)\\n \\n def forward(self, W, A, b,pyg_data, max_iter):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n ret_z = []\\n \\n k = 1\\n x_0 = init_x\\n x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\\n x_1 = self.act(x_12, 0)\\n \\n x_hist = [init_x,x_1]\\n \\n while (k < max_iter):\\n c = self.c[k]/(2*torch.max(self.step_size[k]))\\n #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n #print(W_hat)\\n temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\\n conv_result = self.conv(temp,pyg_data)\\n x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\\n #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\\n #x_32 =x_12 - x_1 + w@temp\\n x_2 = self.act(x_32, k)\\n \\n ret_z.append(x_2)\\n\\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n\\n k = k + 1\\n x_hist.append(x_2)\\n \\n ret_z = torch.stack(ret_z)\\n return ret_z, x_2,x_hist\\nmodel_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\\noptimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\\nmodel_NIDS.train()\\nepoch_losses = []\\nfor epoch in range(500):\\n epoch_loss = 0\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\\n z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\\n loss = step_loss(0.83,z, x_true)\\n \\n optimizer.zero_grad()\\n loss.backward()\\n optimizer.step()\\n epoch_loss += loss.detach().item()\\n epoch_loss /= (iter + 1)\\n if(epoch % 10 == 0):\\n print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\\n'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "class Net_NIDS(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers, num_nodes):\n", - " super(Net_NIDS, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\n", - " self.c = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " \n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - "\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " \n", - " while (k < max_iter):\n", - " c = self.c[k]/(2*torch.max(self.step_size[k]))\n", - " #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " #print(W_hat)\n", - " temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " conv_result = self.conv(temp,pyg_data)\n", - " x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\n", - " #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\n", - " #x_32 =x_12 - x_1 + w@temp\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "model_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\n", - "optimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\n", - "model_NIDS.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.83,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\n", - "'''" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Origin Methods" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "def tgrad_qp(A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - " \n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " # print(A.shape, x.shape, b.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - "\n", - "def torch_soft(x, tau):\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - "\n", - "def opt_distance(x,opt):\n", - " error = 0\n", - " batch_size = x.shape[0]\n", - " num_of_nodes = x.shape[1]\n", - " error = np.linalg.norm(x-opt)**2\n", - " return error/num_of_nodes/batch_size\n", - "\n", - "def hist_nmse(x_hist,opt):\n", - " error = []\n", - " iteration = len(x_hist)\n", - " #print(iteration)\n", - " for k in range(iteration):\n", - " error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt)))\n", - " return error\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin PG-EXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.6121812090940075 \t 0.4883340015353042\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.6109607461113482 \t 0.48309061967754857\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.6095678030011478 \t 0.476906510771225\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.6035927633545652 \t 0.44243416698818505\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.6067563873250037 \t 0.42840827966261896\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.7973034541161106 \t 0.6869779006914469\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.5497892530757891 \t 0.43603226793956124\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.5470555619019506 \t 0.4275494741711755\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.5438668566773486 \t 0.41755433304936745\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.5268345618986132 \t 0.3627158631314542\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.522902399115992 \t 0.33923231854573715\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.7453494304011765 \t 0.6332978530419059\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.48819077434077185 \t 0.38729155274260124\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.483076687335968 \t 0.3740432388660338\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.477053257638363 \t 0.3585969371217725\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.4433072752229127 \t 0.27884910629515797\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.4295285306534807 \t 0.24852751830549097\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.6867067241000768 \t 0.5843936214451678\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.38736332571024107 \t 0.31118993067468\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.3743831398082257 \t 0.2828414242011495\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.35927135276622907 \t 0.2515928017287097\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.280540063211236 \t 0.12865522480315303\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.25005582813723776 \t 0.11077101216517303\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.5841959289071383 \t 0.5283634359483331\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 0.29168512966731397 \t 0.2391167252694213\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 0.25697513202019034 \t 0.17209783619524568\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 0.219897794468141 \t 0.11299406772177463\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 0.09149424355124619 \t 0.023685234617187686\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 0.08440953293198664 \t 0.0495090958652163\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.5208712276286751 \t 0.5144244942851656\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t 2.7772362438594522e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t 2.5884555069752845e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t 2.363942851178131e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t 1.0450044482654898e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t 2.806476899569432e+22 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t 0.5145184702836267 \t 0.5142058619987874\n" - ] - } - ], - "source": [ - "def torch_PGEXTRA(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 + x_12 - W_hat@x_0 - \\\n", - " step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_PGEXTRA(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " \n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.6282407981586876 \t 0.502789480182175\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.6272877297943996 \t 0.49794653698162433\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.6262156332784871 \t 0.4922667078634568\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.6221691908236899 \t 0.460967667287332\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.6267444088241974 \t 0.4494386707955145\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.8041094202755558 \t 0.6969039375269859\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.5711177319337439 \t 0.4538151008090226\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.5688216942610524 \t 0.4458950246293098\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.5661529576795946 \t 0.4366282253426489\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.5524852481475391 \t 0.3862438521709119\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.551204882723694 \t 0.36606067848042767\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.7564790800229021 \t 0.6470133915718834\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.5155653896161312 \t 0.40892600802291595\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.5111147628530235 \t 0.3964418256618974\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.5059080004416247 \t 0.3820036568061987\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.4775475676337446 \t 0.3080030319370053\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.4681148203839548 \t 0.28141130459263697\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.7037066231439895 \t 0.6021947462488897\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.42774387155171284 \t 0.3410246005471872\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.41601259130775176 \t 0.3139733473578308\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.4024888103732447 \t 0.2843390251432738\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.33355759746940933 \t 0.16687443562132104\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.3099347048749787 \t 0.1497595417538996\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.6140665903422341 \t 0.5537692570758809\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 0.3506792358823077 \t 0.279646219951399\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 0.3182124388694065 \t 0.2148550928708737\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 0.28375789415877806 \t 0.1568216284365626\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 0.16290609056253924 \t 0.054220737654596174\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 0.1564879335265432 \t 0.0840162268982208\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.5651317725051486 \t 0.5550998774808322\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t 5.5192812765785205e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t 5.197617717212556e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t 4.811994516846711e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t 2.454035114205908e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t 8.458446330993355e+25 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t 0.5737927032052831 \t 0.5730847588472825\n" - ] - } - ], - "source": [ - "def torch_DGD(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 - step_size*tgrad_qp(A, b, x_1)\n", - " x_2 = torch_soft(x_32, tau * step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_DGD(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin NIDS" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\ndef torch_NIDS(W, A, b, max_iter, step_size,tau):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n c = 1/(2*step_size)\\n \\n (batch_size, num_of_nodes, dim) = init_x.shape\\n I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\\n I = I.repeat(batch_size, 1, 1)\\n \\n \\n #initialization\\n k = 1\\n x_0 = init_x\\n #print(alpha.unsqueeze(-1).shape)\\n x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\\n x_1 = torch_soft(x_12, tau*step_size)\\n \\n x_hist = [init_x,x_1] #add for plot\\n while (k < max_iter):\\n W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\\n x_2 = torch_soft(x_32, tau*step_size)\\n \\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n k = k + 1\\n \\n x_hist.append(x_2)\\n \\n return x_2,x_hist\\nlams = [5e-4,1e-3, 5e-3,1e-2]\\ntaus = [1e-2, 5e-1, 1, 5]\\nbest_error = 100\\nbest_par = {}\\n#cs = [ 5e-1, 1,10,20,50,200]\\nfor lam in lams:\\n for tau in taus:\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\\n loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\\n loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\\n \\n print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\\n print(lam,\\'\\t\\', tau, \\'\\t\\',1/(2*lam),\\'\\t\\',loss1,\\'\\t\\',loss2)\\n if loss2 < best_error:\\n best_par[\\'lam\\'] = lam\\n best_par[\\'tau\\'] = tau\\n best_par[\\'c\\'] = 1/(2*lam)\\n best_error = loss2\\n'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "def torch_NIDS(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " c = 1/(2*step_size)\n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " #print(alpha.unsqueeze(-1).shape)\n", - " x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,1e-3, 5e-3,1e-2]\n", - "taus = [1e-2, 5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "#cs = [ 5e-1, 1,10,20,50,200]\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\n", - " print(lam,'\\t', tau, '\\t',1/(2*lam),'\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_par['c'] = 1/(2*lam)\n", - " best_error = loss2\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd0lMXXwPHvbEnvCUlISKMjIB3poIiFIkXpoCCCCIivKBZQFBuCYkUQUGkqoCggCiL8REQBFZTeQ0lI773uzvvHJksq2YRUmM85e8ju02Yh7N2ZeeZeIaVEURRFUSylqekGKIqiKHWLChyKoihKuajAoSiKopSLChyKoihKuajAoSiKopSLChyKoihKuajAoSiKopSLChyKoihKuajAoSiKopSLrqYbUBU8PDxkYGBgTTdDURSlTjl8+HCslLJeWfvdlIEjMDCQQ4cO1XQzFEVR6hQhxBVL9lNDVYqiKEq5qMChKIqilIsKHIqiKEq5qMChKIqilMtNOTmuKIpJcnIy0dHR5OTk1HRTlFrC3t6eBg0aoNFUvN+gAoei3KSSk5OJiorC19cXW1tbhBA13SSlhhmNRsLCwoiNjcXT07PC51FDVYpyk4qOjsbX1xc7OzsVNBQANBoNXl5eJCUl3dh5Kqk9N5X4tWuJXry4ppuhKDckJycHW1vbmm6GUsvo9Xpyc3Nv6BwqcJQg5oMPiVv5GYYbjMqKUtNUT0MpqjJ+J1TgKELm5GBMTwfAmJFRw61RFEWpfVTgKMKQmmr+WQUORak6gYGB2Nra4uDggJeXFxMmTCC1wP+/qiKEwN7eHgcHB9zd3enbty8bN24stt+uXbu48847cXR0xN3dnbZt27Jw4UIyMzMBePXVV9Hr9Tg6OuLo6EjTpk2ZMWMGERERVf4eapoKHEUYk5PNP8u8XxBFUarGtm3bSE1N5d9//+XQoUO88cYbhbZLKTEajZV+3aNHj5KamsrZs2eZMGECM2bMYP78+ebt3377LQ899BBjxozhypUrxMXFsXHjRq5evUpoaKh5v5EjR5KSkkJ8fDybN28mMjKSDh063PTBQwWOIgzJKeafjRkqcChKdfD19eX+++/nxIkT9OnTh7lz59K9e3fs7Oy4ePEi4eHhPPDAA7i5udG4cWNWrlxpPrZ///4888wz5uejRo3i0Ucftei6Hh4ejB8/nmXLlrFgwQLi4uKQUjJr1izmzZvH5MmTcXNzA6BZs2Z8/PHHNGnSpNh59Ho9LVu2ZOPGjdSrV4/FN/nNNWodRxHGlII9DjVUpSjVITQ0lO3btzNs2DD27dvHunXr2LFjB82aNUNKSd++fWnVqhXh4eGcOXOGfv360ahRI+666y6++OILbr/9dgYMGEBERAR///03R48eLdf1Bw8eTG5uLn///TdBQUFcvXqVBx98sNzvQ6vVMnjwYHbu3FnuY+sSFTiKKNTjUENVyk3mdPMWpW7znj8f15EjAEjY+A2Rr7xS6r4tzpw2/3xp2INknjpV7HVLDBkyBJ1Oh7OzMwMGDGDOnDncf//9TJgwgZYtWwKmoPLnn3/y008/YWNjQ9u2bXnsscdYu3Ytd911F97e3ixbtoxHHnmEjIwMtmzZgqOjY7naodfr8fDwID4+3nyst7e3efuoUaP4+eefyc7OZvny5YwfP77Uc/n4+BAfH1+u69c1aqiqCEOBHoeaHFeUqrVlyxYSExO5cuUKS5cuNa878fPzM+8THh6Om5tboWAQEBBAWFiY+fmgQYMwGAw0a9aMHj16mF9v2bIlDg4OODg4sG/fvlLbkZOTQ0xMDG5ubri7uwMUmqfYsGEDiYmJtG/fHoPBcN33FBYWZh7eulmpHkcRxgI9DjU5rtxsLO0RuI4cYe59lCXo++9upEklKrjWIP8bfEpKijl4hISE4Ovra95n7ty5tGjRgkuXLrF+/XpGjx4NwMmTJy263tatW9HpdHTu3BlXV1d8fX35/vvvC82dWMJoNLJt2zbuvvvuch1X16geRxGFexwqcChKTfPz86Nbt268+OKLZGZmcuzYMT7//HPGjRsHwO+//86qVatYu3Yta9as4cknnyzUG7me+Ph4vvrqK6ZPn87zzz+Pu7s7Go2GxYsXM3/+fFauXElCQgJSSs6fP09UVFSJ58nNzeX06dOMHj2ayMhIZs2aVWnvvzZSPY4iCvc41FCVotQG69evZ+rUqfj4+ODq6sr8+fO5++67SU5O5uGHH2bJkiX4+vri6+vLpEmTmDhxIjt37ix1lXSbNm0QQmBlZUWbNm14//33GTNmjHn7yJEjcXZ2ZsGCBTz99NNYW1vj7+/PlClTGD58uHm/jRs3smXLFqSU+Pj40K9fPw4fPoyPj0+V/53UJCGlrOk2VLqOHTvKitYcD3vuOZJ/2AaAx/Tp1HtyRmU2TVGqzenTp2nRovTJcOXWVdrvhhDisJSyY1nHq6GqIoyF7qpSPQ5FUZSiVOAowpBSYKhKzXEoiqIUowJHEQVTjqh1HIqiKMWpwFFEoR6HGqpSFEUpRgWOIgr1ONRQlaIoSjEqcBQgc3PNtThATY4riqKURAWOAgoOUwHIzKwaaomiKErtpQJHAcYigUNNjiuKohSnAkcB+T0OYWMDgFRJDhVFUYqpM4FDCHGfEOKsEOKCEOKFqrhGfo9D5+lpeq56HIpSZa5XOrassq1FXb58GSGEORNu/iO/JOyDDz7I5MmTCx0zdOhQZsyYwVtvvWXe38bGBq1Wa36en9q9YLlZX19fZs2aVWKW3AkTJqDT6W76CoBIKWv9A9ACwUBDwAo4CtxW2v4dOnSQFZG0c6c81ay5vDRmrDzVrLk82/mOCp1HUWqDU6dO1XQTrisgIEDu2rVLSinl1atXZcuWLeXzzz8vv/nmG+nk5CRXrFgh4+LipJRSnjlzRs6YMUOeO3euxHNdunRJAjInJ6fE7REREdLNzU3++uuvUkopN2zYIP39/WVKSkqh/VatWiW7d+9e7HhAnj9/Xkop5fnz56WPj49csWJFoX1SU1Olg4ODdHNzk4sWLSrH30T1K+13AzgkLfhMris9js7ABSnlRSllNrABGFzZF0lPSuGAd0siPf0B1eNQlOqSXzr2+PHj5S7baglvb28WL17M5MmTCQkJYebMmSxfvhwHB4dyn6tx48Z0796dI0eOFHr9u+++w8XFhXnz5rFmzZoKtbOuqCuBwxcILfD8at5rler9YCOvdZnIV1keAMisLKTRWNmXURSliPzSsXZ2dhUu21qWCRMm0KhRI9q3b899993HfffdV6HznDlzhn379tG4ceNCr69Zs4bRo0czatQozpw5w+HDhyuj2bXSTZNWXQgxBZgC4O/vX6FzNDMkAXb8ZtOA6TY2yMxMZGYmws6uEluqKDUn8IWfSt321tDWjLnD9H/n679CmLP5eKn7Xn57gPnngR/v40RYcrHXLVG0dOzo0aPZtGlThcu2enh4FHp+4MCBQllge/bsyS+//GKu5VEe+dX/0tPTGTVqFNOmTTNvCwkJYc+ePSxevBgvLy/69u3L2rVr6dChQ7mvUxfUlR5HGOBX4HmDvNfMpJQrpJQdpZQd69WrV6GLjH1uAhppJMXKnkv2pnOo4SpFqTpFS8fmf/CXVba14AR4SEiIed/Y2FgSExPNj4JB4/z587z77rtMmzaNZ555hpycnHK19d9//yU1NZWNGzfy119/kZaWZt62bt06WrRoQdu2bQEYO3YsX3/9dbmvUVfUlR7HP0ATIUQQpoAxChhz/UPKz1qvo4WdkZMZGvZ6tiQwLlTdkqvcVCztEYy5w9/c+yjLj0/2vJEmFdKsWTOLyrbm332V7/Lly9c9r5SSxx57jP/7v/9j3rx5dO/enYULF/LSSy+Vq31CCEaMGMHWrVt57bXX+OCDDwBYu3YtISEh5p5Sbm4ucXFxbN++ncGDK306tsbViR6HlDIXmAHsBE4D30gpLSsmXE6T7mkFwEHv2wDV41CU6lSRsq2WWLZsGbGxscyZMweNRsPnn3/OokWLOHPmTIXO98ILL7By5UoiIyM5cOAAwcHB/P333xw5coQjR45w4sQJxowZw9q1ayvc5tqsrvQ4kFJuB7ZX9XXu7xDIc1tPc9nZhyuOXvgmJGFd1RdVFMXM0rKtJXFxcSn0/LXXXuOhhx5izpw5/Pzzz1hZWQFw22238cwzzzB58mR+//33UkvMlqZ169b06tWLd955h7S0NAYPHkzr1q0L7fPUU0/Rs2dP4uPjzXeH3SxU6dgSvPDtETYcDuOu0MPc2cCO8QufrcTWKUr1UKVjldKo0rFVYHrfpmikkd9829Kqa5uabo6iKEqtogJHCfzc7LgnNxyjRsuW8Nyabo6iKEqtogJHKfoSB8CpJANJ0XEcXLaWm3FYT1EUpbxU4ChFE2vT/ddnUyR3LvofM08Lwj5bVcOtUhRFqXkqcJTCy0aLQ3Y6yUYNrk72RNu58fGOk6Tu+6Omm6YoilKjVOAohdbOhqBk0+rV0Xe1QCD5rnEvDry8gKwLF2q4dYqiKDVHBY5SCBtbgpLCAcg1SEZ18idXo+PjRvdwZdJj5ISFlXEGRVGUm5MKHKXQ2F7rcZyJSOa5+5rjZqfnWL3G7NL7EDLpMQzJyTXcSkVRlOqnAkcpNHZ2BCXlBY7IFFztrXihv2nBzGdth6LtfSeaCuTyVxRFqetU4CiF3seHgJQohJRciE4lO9fIQ+0bMLSdL++Ou4OAF2YjNOqvT1FuxIYNG7jjjjuwt7fH09OTO+64g6VLlyKlZMKECQgh+Pvvv837X7hwoVB6kD59+mBjY0No6LVyPbt37yYwMLDUa7766qvo9XocHBxwcXGhW7duHDhwoEreX1ETJkzAysoKR0dHHB0dadWqFS+++CJJSUmF9ouIiGDy5Mn4+Pjg4OBAw4YNmTBhgjm3VtFSuV5eXgwcOJBdu3ZVy/tQn3yl0Pv5Y2PIxj8znlyj5L+QBDQawfsj29K3rb/5lzc3NpawZ57FkJhYwy1WlLpl8eLFPPXUU8yePZvIyEiioqL49NNP+fPPP8nOzgbAzc2tzAy29vb2vP766+W69siRI0lNTSUmJoYePXowbNiwEtdp5eZW/gLg5557jpSUFGJiYli1ahUHDx6ke/fu5jTtcXFxdOvWjfT0dPbt20dKSgr//vsvvXv3LhYYEhMTSU1N5ejRo/Tr14+hQ4eyevXqSm9zUSpwlELfwBc0GjqEm5Lw/no2utg+ZyKTuTrvFZJ/+okrEx8lNy6uupupKHVSUlIS8+bNY+nSpTz00EM4OjoihKBdu3Z89dVXWFubUos+8sgjHDt2jL1795Z6rpkzZ7J+/XqCg4PL3Q69Xs8jjzxCZGQkcXFxrF69mu7du/P000/j7u7Oq6++itFo5I033iAgIABPT08efvhhcw9h48aNBAUFkZw337ljxw68vb2JiYkp89o2NjZ06tSJH374gbi4OFatMq0Te//993FycmLdunU0atQIIQQuLi5MnDiRJ598ssRzeXt789RTT/Hqq6/y/PPPY6ziyqUqcJRCY2WF3tubThGnANhzpnDg+Oh/5+n/4T5+7D8Zq4AAsk6f5vKYMWSHhpZ0OkVRCjhw4ABZWVll1qqws7Njzpw5zJ07t9R9fH19mTx5Mq+88kq525GVlcXq1avx8/MzF5H666+/aNiwIVFRUcydO5fVq1ezevVq9uzZw8WLF0lNTWXGjBmAqefSrVs3Zs6cSVxcHJMmTeKzzz6jPMXkHB0d6devH/v27QNMQ21Dhw5FU4Gh8GHDhhEdHc3Zs2fLfWx51Jm06jVB7+9Py7/+wV4H56JSuZqQTgNXUxnZ2xs4Y5Tw4YFweiz+FKeXZ5mCx+gx+K9cgY3KSqrUMqebV8/vZIszp8vcJzY2Fg8PD3S6ax9B3bp149SpU2RlZbFz507z648//jjvvvsuO3bsoEmTJiWe78UXX6Rx48acPGlZmZ5vvvmGH3/8ESsrK1q1asXmzZvN23x8fMzf7HU6HV999RWzZs2iYcOGACxYsIBWrVqxatUqdDodn3zyCbfffjt9+vRh0KBBDBw40KI2FOTj42OuUR4bG1uodO4PP/zAww8/jMFgoGvXrvzyyy/XPQ9AfHx8udtQHqrHcR1Wfn7opYEudqbx1oK9jj7NPJnQLZAcg2TWzst4frEKuy5dMMTGcmXceNIOHqypZitKrefu7k5sbGyhOYT9+/eTmJiIu7t7oaEWa2trXn75ZV5++eVSz1evXj1mzJjBvHnzCr3+1VdfmSeQ77//fvPrI0aMIDExkejoaH799ddCtcH9/PwKnSM8PJyAgADz84CAAHJzc82FpVxcXBg+fDgnTpwoVLXwrbfeMl976tSp1/37CAsLM9fscHd3L1Q694EHHiAxMZH333/fPPdzvfMAVV7/Q/U4rkPvb/oF6pITxf/w488LcYzvGmje/sL9zfnzQizno1NZuDeU+SuWE/7886Ts+Jn0w4ex79KlhlquKMVZ0hOoLl27dsXa2pqtW7fy4IMPlrn/xIkTWbhwId9//32p+8yePZuGDRvSuXNn82tjx45l7Nix5Wpb0aJOPj4+XLlyxfw8JCQEnU6Hl5cXAEeOHOGLL75g9OjRzJw5k59//hmAOXPmMGfOnDKvl5qayu7du83DcX379mXLli288sor5R6u2rx5M56enjRr1qxcx5WX6nFch5W/6VtGsyjTpNvxsMK3zNnotXwwqi16rWDdwSv8fDYO38WL8X3/PTymTav29ipKXeHi4sIrr7zCtGnT2LRpEykpKRiNRo4cOWK+u6ggnU7H/PnzWbhw4XXP+cwzz7Bo0aJKbevo0aN5//33uXTpEqmpqcyZM4eRI0ei0+nIzMxk3LhxvPXWW6xatYqwsDCWLl1q0XmzsrI4fPgwQ4YMwdXVlYkTJwIwa9YsEhISGD9+PMHBwUgpSUlJ4ciRI6WeKyoqiiVLljB//nwWLFhQofmR8lCB4zqs8noc3lfOYGelJSwxg7jUrEL7tPRx5sX7TWPH7/5yFoMEp/vvN39ryQkLI/K111TtckUp4rnnnuO9995j0aJFeHl54eXlxeOPP87ChQvp1q1bsf1Hjx5N/fr1r3vOp556Cq1WW6ntfPTRRxk/fjy9evUiKCgIGxsbPv74Y8A0t+Ln58cTTzyBtbU1X375JS+99BLnz58v9XyLFi3C0dERd3d3Hn74YTp06MD+/fuxt7cHwMPDg4MHD2JjY0OPHj1wdHSkbdu2pKSksGzZskLncnFxwd7entatW7N9+3a+/fZbHn300Up9/yVRpWOvw5CaxrmOHRFWVrw8bRn/XE5g9cRO9GnmWWg/KSVLfwtmeMcGeDraFHr9yrjxZBw+jM3tt9NgycfoPT2LXkZRqoQqHauURpWOrUJaB3u09TyQ2dncpjP1GI5dTSq2nxCC6Xc2LhY0hBB4z5uH3teXzGPHuDxiJBnHT1Rb+xVFUaqCChxlcB0+AoAGf5gmvEoKHAUZjZIFO07zyg8nkVJi06wpgd9sxLZ9e3IjI7kyZgwJ335b5e1WFEWpKipwlMH9sUnovLwIPG7KZXM87PqpRS7EpLLqj8usPXCFlfsuAqBzd8d/9Spcx4xG5uQQ+fI8Il59taqbriiKUiVU4CiDxs4Oz1lP45MWh11uFlHJWUQmlT7R3dTLkcUj2gDw1vYzbDtqqumhsbLCe9486r+9AGFtjXXeYiJFUZS6RgUOCzgNHIhNUCC3xZp6EFuefJnELVtK3X9QGx9evL85AM98c5S/L11bxekyZAgNf9yG6/jx5tdUXQ9FUeoSFTgsILRaPJ6YSsdoU/6XgwYn4teuve4xU3o15OGuAWQbjExee4gL0SnmbVZ+fubbdbNDQgjudw8xHy9BGgxV9yYURVEqiQocFnLq35+uthkA/OvZjMzQsBLTMOcTQvDKoJbc3cKLpIwcXv+x5FW7aQcPYkhOJvaTTwiZMJGcvDQGiqIotVWtDxxCiFeFEGFCiCN5j/410g6dju6ffUwDRz3J1vac0zqTG3391MlajeDj0e0Ye4c/749sW+I+riNG4P/5Z2g9PEj/5x8uDRlK6nVSSCuKotS0Wh848rwvpWyb99heU43QubrSp6Vp5eohr+ZkXWd1aD5bKy1vDm2Nm70VYFrfkZVbeEjKvls3Gm7ZjH337hgSEgh9fCpRCxchy0hopiiKUhPqSuCoNXo1MeXZ/69eE7LOnyf98GFyExIsOtZolLzyw0keW3OoWPDQeXjgt3IF9Z6ZBVot8V9+SdalS5XefkWpTWpj6diyyraWJL8d+dlwHRwcGDRoEADbtm3D29u7UKrzrVu34uvry5UrVwodI4TA3t7e/Hzfvn3mcrMODg64ubnRr1+/Etvy22+/IYS4bj6vylJXAscMIcQxIcQXQgjXmmzIHUHuCCRnXf0J/+prrowdR5SFZSujU7LYfjyCfedjmbXxKAZj4TkSodHgMXkyAV+uo/78+dhUcYZLRalJtbF0bHnKtha1ZMkSUlNTzY9t27YBMGjQIO666y6efvppwFTu9YknnmDZsmUEBAQUOgbg6NGj5uc9e/YETHm9UlNTCQsLw9fXl0mTJhW7/po1a3Bzc2NtGTfuVIZaETiEELuFECdKeAwGlgGNgLZABLC4lHNMEUIcEkIcsqRsY0U52+lp7qInV6vjRKapvGWGhcVjvJ1tWD2xM47WOn46HsHsTcWDB4Bdu3a4DBtqfp68YwfhL7yIIe8XS1HqutpaOrYiZVst8dFHH7Fjxw527tzJ008/Te/evXnggQfKfR5bW1tGjBhRLFNuWloamzZt4pNPPuH8+fNURq6+66kVgUNKebeUslUJj61SyigppUFKaQRWAp1LOccKKWVHKWXH8pRtrIiuTUyJCo/VawRAztUwZE6ORce28nXm8wmdsNVr+f7fMJ7/7liJwSOfMTubqAVvk7RlC5cGDyE9r0qYotRltbV07I2Ubb0eDw8PPvzwQ8aOHcuPP/7IRx99VKHzpKWlsX79eho3blzo9e+//x4HBweGDx/Ovffey5o1ayqj2aWq9YWchBD1pZT55bCGAjWeJbBrCx+++CecY+6mwIHBQPbVq1gHBVl0fOcgN1ZN7MTEVf+w6fBVBLDwwdvRaESxfTVWVviv+oLw2c+ReeoUV8Y/jPuUydSbPh2h11fiu1JudoEv/FQt17n89oAy96mtpWNvpGzrzJkzefbZZ83Pn3zyyUJDaF26dCEpKYkRI0aUqyY5wLvvvsuSJUtITk4mICCArVu3Ftq+Zs0aRo4ciVarZcyYMcycOZP33nsPfRV9RtSKHkcZFgkhjgshjgF3Ak/XdIM6B7qZ5jncA6DV7QBkF6gQZokuDd35Iq/ncSYyhYyc0hf/WTdqROCG9bhPmQJSEvfpci6PHkPWRTV5rtRNtbV0rCVlW6dOnWo+51tvvWXe96OPPiIxMdH8KDrvMmXKFB5++GG2b99eaDLeEs8++yyJiYlcvnwZW1tbzp49a94WGhrKnj17zJUOBw8eTGZmJj/9VHVfFGp9j0NKOb7svaqXs52e23ycORmezIXb7qDxiWPklDNwAHRt5M66SZ0J8rDH3vr6/xTCygrPWU/j0Ksn4c89T+aJE4TNmkXQ5u+LlbpUlJJY0hOoLrW1dKwlZVs//fRTPv30U4vPCfD5558TGhrKjz/+SMeOHXnsscf477//sLKyKtd5/P39+fDDD3nkkUcYOHAgtra2rFu3DqPRaL6LCyAzM5M1a9YwZMiQcp3fUnWhx1ErdQ4yFYM/4dQAKH+PI1/HQDfcHUwTgUaj5JM9F0jKKH2+xK5jR4K2bsF5yBDqz39VBQ2lTqqtpWMrUra1LOHh4cyePZuVK1dibW3N1KlTcXd3580336zQ+fr164ePjw8rVqwATMNUr7zyCkeOHDE/vvvuO7Zv305cXFyF2309KnBUUOdAU+A4YnQEIPvy5Rs+50e/nuednWcZufwA0SmlZ+DVOjri8/YCbNu0Mb8Wvfg9kq8z/qootU1tLB1bnrKtRc2YMaPQmoz84a9p06YxatQo8621QghWrlzJBx98YPGcTFGzZ89m0aJF7N27lytXrjB9+nS8vb3NjwceeIDGjRuzfv36Cp2/LKp0bAXFpGTR6c3d2OoE33w3GytHB5zuvx+HO/vg2KdPhc4ZlpjB+M//4mJMGv5udqye2ImG9RzKPC79n3+4Mv5hAJwGDcL7pblonZ0r1Abl5qFKxyqlUaVja0g9R2saetiTkSsJdvbFmJxM4saNxLz/QYXP6etiy7ePd+X2Bs6ExKczbNl+Dl2OL/M42w4d8Jo7F2FjQ/K2bVwcOEjlu1IUpcqowHEDOuUNV53wuFaUKeviRYvXdJTE3cGa9ZO70Le5J4npOYz57C9+OhZx3WOERoPb+HE03LIZ23btyI2JIfTxqYS/9JJaNKgoSqVTgeMGdMqbID/btjcaZ2c0zs6Qk3PDOabsrXUsH9+BcV38yc41subAZYzXWSSYzyowkIAv1+E5ezbCyoqkTd8Ru+STG2qLoihKUSpw3IA78gLHUZ0bjfbtw66jaWgw6+y5Gz63Tqvh9cGteHNoK5aP61Di4sCSCK0W90mPEvT9dzj07YvHtCduuC2KoigFqcBxA/zc7PBzsyU5M5fT0enYNGsKQNa5c5VSzU8Iwdg7AnDNS8meazDyyZ4LpGfnlnEkWDdujN8nS9A6OQFgzMwkdOoTKmWJoig3TAWOG9S1oTsABy7GYt3UFDhS9+3jfO8+RMwrf+6c63nnl7O8s/Mso1YcvO7tuiVJ+OorUn/7jSvjxhO1YAHGjIxKbZtSOxVcga0owHUrl1pKBY4b1K2RBwD7g+OwbmpKg5515gyG2FhS9vxaqdca0dEPPzdbjl1NYtjS/YXqmJfFbfx43Kc+DhoN8WvWcnHwENL++rvsA5U6y97enrCwMLKzsyvlw0Kp+/JTx9vY2NzQedQ6jhsUlZzJHW/9D3srLf+91JeLnTohs7LM25se+getQ9lrMSwVm5rFpDWHOBqaiJONjk/HdaBbYw+Lj884foKIuXPJOmeah3EZNRLPZ5+9crsrAAAgAElEQVSt1DYqtYPRaCQ2NpakpKRCOaGUW5uNjQ0NGjQoMQGipes4an2uqtrOy8mGhvXsuRiTxrHwFNwbNyazwGrQ7EuXsW3dqtKu5+FgzYbJXXhqw3/8ciqKh7/4m7eGtWZERz+Ljrdt3YqgTd8Su2IlscuXk7hhI/Z33IFTgQRwys1Bo9Hg6emJp6dnTTdFucmooapK0CPvG/++87G4T3oUh969zXdYVUYqkqJsrbQsG9eByT2DyDVKth0Nt+h23XzCyop6M6YT9N0m3CdPxvG++8zbbmQNiqIotwYVOCpBfh3y38/H4NS/P37LP8U2L09NdhXVDddqBHMH3MaHo9ryydj2Ft+uW5BN06Z4PjPLnCgx6/x5Ltx7r8p5pSjKdanAUQm6NHJHpxEcDU0kKd30jd0qMBAw9ThkFd7ZMritL042prHK7Fwjr/94ipiUrDKOKlnCho3khkcQNvMprj71f+TGxlZmUxVFuUmowFEJHKx1dAhwxSjhz2DTh61VYAAAGUePcqHv3YQ//zxSSmKXLSOpigqsvLfrHJ//cYmhS//kfJTld1zl85o7B6+XX0LY2ZGycycXBwwk6Ycf1B05iqIUYlHgEEK4lbFdK4RoXzlNqpt6Nc0brjoXA2AuI5sTFkZuRATJu3aTfekSMR9+RNTbb1dJGx7tEUgbPxeuJmQwbOl+9p2PKdfxQqPBbexYGv7wA/bdumFISiL8uecJnTqVnMjIKmmzoih1j6U9jhghhPnWDCHEf0KIBgW2ewD/VGrL6pjeeYFjz9lojEaJ1sUFrYuLebtMTyd1zx4ADLFxyCq4PdLT0YYNk7vQv7U3KVm5TFj1D+v/Din3eawa+OL3+WfUf/NNNI6OpP/1d5W0V1GUusnSwFF05rUxULTm4S1diq6ljxP1nW2ISs7iWFgScG2eI1/yT9tNP0hJblzZ6dIrwtZKy5LR7XmiTyMMRsmL3x/nre2ny3XXFZjSnbg8OIyGP/2I73vvYdXA9D1BGo1kXbhQFU1XFKWOqMw5jlt6IFwIwT23eQHwy0nTsI7jPfegdXHBrlMnADJPnTLvnxtTvmGk8tBoBM/f15xFD96OTiO4GJNa4X8cvacnjnfdaX6euGkTFx8YTNTCRRjT0yunwYqi1ClqcrwS3dPSG4BfTkUB4P7oRJoc2I/TgAHF9s2NrbrAkW9EJz82TOnCh6Paoa3A7bolyY00vbf4VatUwShFuUVZGjgkhXsURZ8rQOcgN5xsdFyITiU4xlRASQiBdZPGxfatyh5HQR0D3bC3NiUIyMwxMP3rfzkdkVzh89Wb+SSBGzdi3aIFOeHhhD4+lbBZz6hbdxXlFlKeOY6DQohzQohzgD3wa4Hn+6ushXWIXqvh7ham4artBar2WTcuHjgMNfBBu3zvRX46FsFDy/az52x0hc9j27oVQd9+YyoYZWtL8vbtBPcfQOaZM5XYWkVRaitLc1XNr9JW3EQGtfHh+//C2HYsnCf7NgFA6+yMrl69Qr2M3JgYEtavR+viUm15oh7v3ZALMalsOxrOpNX/MP+BlozvGlihcwmdDvdJj+J47z1Ezn+N3OhorBs1qtwGK4pSK1kUOKSUKnBYqHtjD1zs9JyLSuVsZArNvB0BsG7SmNyYGKxbtCDr9Gkyjp8g4ev1aOztcbzvPnPaj6pko9fy0ai2BLnb8dGvF3h560kuxaYzd0CLCs+BWDVogN+K5RgSExF52TZzY2JI2PgN7pMeRWNrW5lvQVGUWuCGJseFED2FEA+WtUDwVmKl03B/K9Mk+baj4ebXnYcORd+gAW5jxwCQefw4AMa0NIxpadXWPiEEs+5pxuLhbdBrBV/8eYnH1x0iM6fiFQuFEOhcXc3Po955h9glS7g4cBApv+6pjGYrilKLWLpyfIYQ4qUir20F9gLfAueFEM2roH110qA2PgBs+CeU1CzTwjnnQYNovHuXOWtuQbnRFZ9vqKgHOzTgy0l34GKnx1qnxUpbeTfYuY4ajXWzZuSEhXF12jRCp00n+2pYpZ1fUZSaZemnxcOAeQmyEGIw0B8YD3QCzgNzKr11dVTXhu6093chNjWLpXsKL5bTetQrtn9NBA6AOxq6s3V6dxaPaGPOrlvehYIlsWvfjqDvNuE150U09vak/vorFwcOJPbT5Rizs2/4/Iqi1CxLA0cj4L8Cz/sDP0opv5JSHgbmAr0q2gghxHAhxEkhhFEI0bHItheFEBeEEGeFEPdW9BrVSQjBywNvA+CzPy5xJe7aUJTG3g5RZNy/pgIHQIC7PTZ6LQDp2bkMX36ArUduvHcgdDrcHn6Yhtu34zRgADIzk5gPPyTr3PkbPreiKDXL0sBhCxS8+b8L8HuB5+eBGykzdgIYVuScCCFuA0YBLYH7gKVCCO0NXKfatPN3ZVg7X7JzjTy14Qg5BlNqdSEEunqFex01GTgK2nY0nMNXEnhqwxEW/XymUnofei9PfBe/i/+qL6j31FPYtmpp3mZISrrh8yuKUv0sDRxXgdsBhBCumD7IDxTYXo/CgaVcpJSnpZRnS9g0GNggpcySUl4CLgCdK3qd6vbKoJb4ONtwJDSRD3df+6at8yhcI7y6FgOWZURHP+Y/0BKtRrD0t2CmrDtsnqO5UfZdu+Ix9XHz89Q//uTCXX2JW7VaVR1UlDrG0sCxEfhICDENWAOEAn8X2N4RKOmD/0b55l0r39W81+oEZzs9749sC8AXf14iLe9DOL/HkZ89N6eW9DiEEDzSLZC1j3bG2VbP7tNRPLh0P6HxlZ+TKvX3vRjT0oheuJCLg4eQuu+PSr+GoihVw9LA8SamO6jexJQZd6yUsmBZu9HAdasTCSF2CyFOlPAYXKGWFz//FCHEISHEoZha8g0eTBPQHQNcSc82sOOEKflh/kpyp/6mhX+50bWnvWBai7Jlenca1bPnbFQKDyz5g7DEjEq9hvecOTT4dBlWAQFkX7xI6OTJhD4xjewrVyr1OoqiVD6LAoeUMlNKOUFK6SqlvE1Kub/I9j5SykVlnONuKWWrEh5br3NYGOBX4HmDvNdKOv8KKWVHKWXHevWK37lUkx7qYEpJ/t3hqwB4PD6FwO824TpuHFB75jgKCvKwZ/P07vRpVo87m3vi42xT6ddw7NOHoG0/4PnsM2js7Ejds8eUOPH338s+WFGUGlPbs+P+AIwSQlgLIYKAJhQeIqsT+t9eH2udhgMX4wiNT0dYWWHbsiU6T1Neq9yYmFpZntXJRs/nj3RiwbDW5pXtMSlZ5Boqr4a6xsoK98ceo+HPO3AeMgStiwu27W/pYpKKUutZlHJECLHCkv2klFMq0gghxFDgY0yT7D8JIY5IKe+VUp4UQnwDnAJygelSyoovca4hTjZ6+reuz+b/wpiz+TirJ3ZGqxFoHezR2NlhTE/HmJKC1smppptajFYj0GpMN7KlZuUy7rO/8HC04pMx7XGxK1rLq+L0np74vL0AQ3IyWgcHAIwZGYQ//wLukx/DtnXrSruWoig3xtIex2PAPZjmN5qU8iieAtZCUsrNUsoGUkprKaWXlPLeAtvelFI2klI2k1LuqOg1atrz9zXH3d6KfedjWfLrtUWBOk/TXcy1cbiqqKsJ6cSlZfPnhTiGfPInF6JTKv0aBYNn/Np1pPzyC5eHjyBs9nPkhIdf50hFUaqLpYFjK1AfMADLgH5SyjuLPO6qslbeBLydbfhwVDsAVvwebF7XUZcCR3NvJ36Y0Z2WPk5cjktn6Cf72XOm6trtOnYM7o9NQuj1JG/bRvB99xO9+D0MqalVdk1FUcpm6eT4UCAQ+A14GwgTQiwSQjSpuqbdfHo08aBhPXvSsg0cu5oIXAscWZcuVeic2VfDkLmVs9bCEj4utnw7tSsDWtcnJSuXR9f8w4rfg6tkjkbr4IDns8/ScMcO0+rz7GziVq4k+J57Sd6+vdKvpyiKZSyeHJdSRkgp38SUfuSRvD+PCyH2CCEq/5abm1SPxqbFf3+cjwNA38C0LCXq9TeIeOXVMo+XUpJx7BjGzEwyjh0j+O67iVp03RvaKp2dlY4lY9rx9N1NkRLe2n6GPy5UXWEqqwa++C5+l8CNG7Dt0AFDfDzC2rrKrqcoyvWV+64qafIz8ClwEOgJqMBhoW6NTIHjz2DTB637o4/iOn48wsqKxI0bSf/3v+sdTurevVweMZKYDz4k87Sp4l72heBi+xnT0qr0Ti0hBE/d3YRlY9vzaPcgc0CsSrZt2hDw5Tr8V6/C4a5rI6PxX35FxomTVX59RVFMyhU4hBD1hRBzhBAXgXWYAkczKWVilbTuJtS1oTsaAf+FJJCenYvWyQnvuXNwmzgRgPjVq697fMaRIwBkXbhgTlViSCk8SZ0TFcW57j2InF/19bfub12feYNuM9+ueyE6lb8uxlXZ9YQQ2HfpYr5eVnAwUQsWcPmhh7j69NMVHvJTFMVyltbjGCiE2AJcAvoAzwN+UsoXpJTFv+4qpXK209Pa15kcg+TvS/Hm113HjkHo9aTs3k3qvj+QpaQfzw6+CJjWfuTGmgKHMblwmrCs8xeQmZlknjxVRe+iZCmZOUxZd4ixn/3Fmv2Xq2Vtis7DA7dHHkFYWZGy42cuDhxE+EsvkRMRUfbBiqJUiKU9jh+ANphSjqwG9MBwIcSYgo8qauNNp0sjdwAOXU4wv6b39MRp4EAwGgmdPJmLQ4dhSC6eNzLrYl7giI4mN9Y03FV0P2PeXUfGjMrPMXU9tnotd7fwItcoeeWHkzy36dgNVRa0hNbZGa/nZtPol524DB8OQNKm7wi+9z6i3l5YKxdWKkpdV56hqgBgPvBlKY91ld66m1Q7P1NywyOhhUf4vOa8iOv48ei8vckODiZqwduFtsucHHMuJ0NCArnhpm/VhpSUQh+Q+aVoZUZmlb2Hkui0Gub0b8GHo9pio9fw7eGrjFxxkIikys1zVRK9tzf1X3+Nhj9uw6l/f2R2NrnR0dVSy11RbjUWrRyXUtb21CR1Sjt/U33uo6GJGI3SXH1P6+iI99w5uI4exaWhw0javJmc8HD09euj92uAXcdOUODW26zzeanac3OR6ekIe3sAjGn5PY6q/8AuyeC2vjT2dGDK2sMcDU1k0Md/sGxcBzoFVn1peuugIHzfW4z75MfQOF5bTJj2999kHj+B65jRaIoU0lIUpXwsTTliUXU/KaXKTmcBLycb6jvbEJGUycXYVBp7Ohbabt2wIZ7PzSbq9TdI/+sv8+taV9dC+xWsY2FISUGTFzjyF8gZM6u3x1FQSx9ntj3Zgxlf/8v+4DguxaZVS+DIZ9OihflnKSXR7y4m89gx4r74wnQn2+hRaOzsqq09inIzsShwYFr4J4GS+v2ywJ+Wnu+W19bPhYikSP4NSSwWOADcxo7FoWdPsq9cIScsjMjX38CQkFDCmUwMycnovb0BMKbmD1VlIKWsseEaN3sr1j7amd2no7mvlbf59ZpoU70Z04n5eAmZx48T/c47xH3+Oe6PTsR19GhzwFUUxTKWDkH5Af55fxZ8BGFaSZ5JKenOlZK1LWWeoyArf38cevbEddQoHPv2Nb8ubIovmyl4Z1X+HAdSIrOyKqnFFaPTagoFjTORyQxZup/gmOpLGyKEwKFXLwK/2Yjf8k+xuf12DPHxRL+7mAt39yP98OFqa4ui3AwsTTkSVvQBtAO2A9OA14CmVdjOm07+PMeREMuWwLiOuXbTml2HDsW2G5KvreUwFsjlVFPzHKV5d+c5joYm8sDHf/DjsepNWiiEwKF3bwI3bsBv5Qps27RB5uZi3eRa5hxpqHPJlxWl2pV70lsI0V4I8SvwPfAr0ERKuVBKWbNfbeuY1r7O6LWCM5HJJKWXXXPb7o7O2LZrh9bdHfuePYptNyQnmX8uGDhkevXekluWD0a1ZeDt9UnLNjDj6/949YeTZOdWXn0PSwghcOjZk4AN6wna/L05I68xM5Pg/v2JWriInKioam2TotQlFgcOIYSfEOJL4B8gEWgppXxSSll1SYpuYrZWWtr5u2KUcMCCldZCCPxXr6Lx7l1YBQYW2559+TIXhw4jYcPGa0NV1OwEeUkcrHV8PLod8x9oiV4rWL3/MiNXHCC8kkvTWkIIgVWDBubnaX/+Sc6VEOJXreLC3f0If+klsi6qleiKUpSlK8ffBs5iSmzYS0o5TEp5vkpbdgvonpe3an+wZbFXY22NxtYWfV5GXQD0egBSftlF1unTJP24DUPBwJFeu4aqwPSB/Ui3QL55vCs+zjb8F5LI8E8PVHvPoyjHvn0J3LQJx/vug9xckjZ9x8UBA7j65Ewyjh2r0bYpSm1iaY/jOUx3TaUCrwghfinpUXXNvDn1aGJaQV7ezLK6AjXVrQL8AcjOy9FkiE8oPFSVWfsCR752/q78NLMnvZvWY/a9zbDS1fxyIdtWLWnwwfs02rEdlxEjEDodKbt2EfHSy2oVuqLksfT22bVcu+1WqSS3N3DBwVrHxZg0whMz8HGxbGGa1s0NtFowGLBu2MiUHTfvQ80QHw+aax/AtW1yvChXeytWT+xU6PbcnScjaeHthL97za2zsAoMpP5r8/GYMZ2Edeuwbtbc3Mbs0FBSdu3G5aEHa2W5X0WpapauHJ9Qxe24Jem1Gro0dGP36Wj+vBDL8I5+Fh0ntFp0Hh7kRkVh1TCo0DZDYqJ5+Apq51BVUQWDxrmoFGau/w8rrYY3h7XmgTY+NdgyUw4xz2eeKfRa/Lp1JKxdR8ySJbgMHYrb+HElzjspys2q5scGbnHdzPMc5UtF7jJyBHZdu2DXoWPxjQVWlBtr8VBVSbwcbbiruScpWbnMXP8fz206Snp29VU4tIRDz17Yde2CTE8n4auvCL6/P6GPTyXlt9/U7bzKLUEFjhrWo0leRcALseUaQ683bRoBq1ahc79+Gg9Zy4eqinK207N0bHveHNoKa52Gbw5dZdDHf3AqvHim4Jri0LMHAatWEbR1K84PPYjQ60ndu5erU58gupqrMSpKTVCBo4Y18XSgnqM1MSlZXIgu/2pqTRlj7MZqzpBbGYQQjL0jgB9m9KCJpwPBMWkMWfon245W74LBstg0a4rPG2/Q+Lc9eM5+Fr2/P04DBpi3p//7H2kHD6pJdeWmowJHDRNC0L1Rxe6uAlNG3eup7poclamZtyM/zOjB6M7+aATc5lM7J6J1bm64T5pEo593YNO6tfn1mA8/JGTCRC7e35+4zz4jJyq6BlupKJVHBY5aoFteve4/zpdvuApA4+gI10kYWNeGqoqytdKyYFhr/vdMHxrVcwBMSRIr8ndV1YRGY57ol1Ji17mTqbbK5cumvFh33knIlCkk79iBsYZziCnKjVCBoxbokRc4/ncmmnve/53TEZaP5wuNBo2D6QNVlFBnoi4OVZXEt8Ctyt8cCmXc538x7at/iU8rucRuTRNCUG/6dBrv3kWDZUtx7NcPtFrSft9H2NOzSPr++5puoqJUmAoctYCPiy1P9W2Cq52e89GpbPwntFzH568lsG3Zsti22r6OoyKsdVocrHXsOBHJvR/8zu5TtTevlNDpcLzzThp8/BFNft+L19y52LS5Haf77zfvE792LbHLlpmrOypKbacCRy3xdL+mvDeiLQBnI1PK2Luw/JXktp2u3Zor8ooU1eaV4xU1pJ0vO57qSedAN2JSsnhs7SFmrv+PuNTaPfyjc3XFbfw4gjZuROtiSqsvc3OJXbGSmA8/Ivje+7j00HDiVq0mJzKyhlurKKWrFYFDCDFcCHFSCGEUQnQs8HqgECJDCHEk7/FpTbazqjXzNk10n41KKdf4vff8V/FZtBD7Ll3Nr+k8TMNfdWEBYEX4udmxfkoXXh54G7Z6LT8cDadfOYf5agUh8HnzDZwHP4DGzo7MEyeIXriQC33u5PK4caQfOlTTLVSUYmpLxb4TwDBgeQnbgqWUbau5PTWivrMNjjY64tOyiUnNwtOxeMGmktg0a4ZNs2bXapBj6oXkhITUuuy4lUmrEUzqEUS/Fl688P0x4lKzzRPodYXQanHo3RuH3r0xzs8kde/vJG/fTupvv5Fx6HChGx8yz5xB4+BQKKOvotSEWhE4pJSngRorcVpbCCFo4e3E35fjORORYnHgyKd1u7YY0NzjqMO341rK392Orx67g7i0bHOixIS0bHaejGR4Rz+0mrrxe6WxscHp3ntwuvceDKmppO7di227dubt0YsWkbb/ANbNm+N499043t0X62bNbvn/N0r1qxVDVWUIEkL8J4TYK4ToWdONqWrm4apyznMApnHzvA+R/HkPeZMOVRUlhMDDwdr8/LUfT/HC98cZtvRPjl9Nus6RtZPWwQHnAQMQeQkrpZRoPTzQ2NuTdeYMsUuWcGnIUIL73UPkG2+SceJkDbdYuZVUW+AQQuwWQpwo4TH4OodFAP5SynbALOBrIUSJq8CEEFOEEIeEEIdiYmKq4i1Ui/zAcTqy/GP1Qqs1T7qaexw38VDV9dzdwgtvJxuOXk3igU/+YN7WEyRllF1psbYSQuC7aBFNDuzHb8VyXIYPR+vuTs7VqyR8+SWZp64FjpywMLJDy3dnnqKUR7UNVUkp767AMVlAVt7Ph4UQwZhqmxebMZRSrgBWAHTs2LF2rQwrh+Y30OMA03CVISEBXb38oapbo8dR1IDb69O7WT0+3H2OL/68zNoDV9h+PII5/VswtJ1vnR3e0VhZ4dCrFw69euH96itkHD1G6r7fcejd27xP3Oo1JKxbh1VgIPa9emLfpSt2nTqWmWVAUSxVK+Y4SiOEqAfESykNQoiGQBPgYg03q0o1zQsc56NTycwxYKPXlut4q6BAsoODsW7UCKj7K8dvhIO1jrkDbuPBDg14ecsJ/rmcwDPfHqWVrzNNver+h6jQarFr3w679u2Kva5xdCT78mWyL18mYe060GqxadUS58GDcRszpoZarNwsasUchxBiqBDiKtAV+EkIsTNvUy/gmBDiCLAJmCqljK+pdlYHJxs9t9V3IjvXyL7z5c9dVf/11wncsB6bVq0AU4+jtqXmqG7NvZ345vGuLB7ehkndgwoFjdhavvajIrxeeJ6mB/YT8OU6PKY9gW379iAEmUePkVNgkWFOeDgxn3xC2sGDGNNv/psolMpTK3ocUsrNwOYSXv8O+K76W1SzBtxen1MRyWw/HkG/27wsOiYiKYNhS/czsXsgU3qZ7l4Wej0yJweZnY2wti7jDDc3IQQPdih8G+sf52OZtOYfHusZxBN9GuNgXSv+O1QKodNh17Ejdh07Um8mGFLTyDh8CF39+uZ9Uvf9QezHS0xPtFpsWrTArkN7bNu1x7Z9u8K17RWlgFrR41AKG9Da9J9716koMnMsKwz0+7kYIpIy+d/paxlY83NX3crDVdfz96U4snKNfLInmD7v7GHdwStk5xprullVQutgj0Pv3tg0bWp+zaZZU1wfHo9NXqqazBMniF+zlrD/+z+C770PmXutgFbWhQsYs2tnXjCl+t08X7FuIoEe9rTydeJEWDL7zsda1Os4kzeZnpp17T+7xtYWY3IyxowM891WyjWz7mlGn+aevPHjKf4NSeTlLSdYvjeYp/o2YWg7X3Tam/t7lW3btti2NfVOjWlpZBw7Rvq//5Jx+F+ErS1CZ/p4kDk5XHrwIaTRiE3Tpti0aoVt61bYtGqFdePG5v2UW4f6F6+lBrT24URYMpsOh1oUOM6WFDhsTAsIb5YMuVWhvb8r3z3RjR0nInlv1zkuRKcye9MxEtNzmNyrYU03r9po7O2x79oV+65di23LjY1F7+tL9qVLZJ48SebJkyRu3AiAsLbG98MPcOzTx7RvfDwaa2s09vbV2XylmqnAUUs92MGX93adZdepKMISMwqlFS/JuShT4EjJvBY48hMd3gqrx2+EEIL+retzb0tvfjgaxuo/LzOik595++XYNPzd7NDUkRXolU1fvz6Ntv+EITWVzFOnyDxxkswTx8k4cZKckJBCKVBil3xCwtdfo/f3x6ZZU6ybNsO6WVNsmjZF7+9vXtCo1G0qcNRSno423NeqPtuOhvP1X1eYfW/zUveNTc0iNtU0/pyaWXioCorPcaTu3UvsypX4Ll6M3suyyfdbgVYjGNquAUPbXfsgzMwxMHz5ARxtdEzt1Ygh7XzNaU1uNVoHB+w7d8a+c2fza4bExELli42ZmaDXkxMSQk5ICCm7dpu32Xfriv8XX5j2y84m5ZddWDdqiFVgoPl3VakbVOCoxR7uGsC2o+Fs+DuUmX2bYK0reU1HwcWC2Qajef1H/nBB9pUQ7DpeS7ke++lyMv77j+QdO3CfMKFK30NdFxqfjpVWw8WYNJ777hjv7TrHYz2DGNXZ/6a6C6uiis6d+bz1JvXnv0rWpUtknT1H1rmzZJ49S9aZs+j9/M37ZV++TPizz5qf6318sGpoCiJW/n449e9vzn6g1D7qN78W6xjgSov6TpzOuzW34Dfhgs4UWWWempWLjV6L4z39SNu3j9ilS3EaNBCNlRWG5GQyjh0DIDv4pl5LWSmaeDny2+w+/HQsgmW/BXM2KoU3fjrNx79eYHyXAJ7s27jUgH6rEnq9aRK9aVNgoPl1abh2h6AQAsd+d5N18RLZV66QEx5OTng4aX/8AYB99+7mwBH97rukHzmCVQM/9H4NsPL3R9+gAXpfX3QeHmr4qwaowFGLCSF4pGsAL3x/nDX7r5QaOM4WyWuVmpmLh4M1LsOGkbB2HVnnzxO3fAUeM6aT9tdfkPcfOOvSRTJOnCRu+XK8571sToyoFKbXahjSzpfBbX3YczaaZb8F88/lBHaejOSZe5qWfQIFMK1oz2fdpAkNPv4YMN21lX31KtmXLpkeV6+i9/U175tx9BgZhw6b0swX4dCnD36fLgPAkJJC3IoV6OrXR+9dH319b3Te3mhdXOpsipnaSgWOWm5wW/NNmWIAABw1SURBVF8W7DjDkdBEjoYm0sav+G2156JSAVNiXCmvTZALrRbP52YTOnkKsZ98QsaJ42hs7czHZV+8RNzyT0nZtRub21vjMXly9bypOkoIwV3NvbiruReHLseTkWMwfyCFxqczZd1hxnXxZ0hbX+zVMJbFhF6PdVAQ1kFBJW73WbSQ7CtXyA4JISf0KtmhoaY5lIgIdAXm6HKuXiVu5WfFz29ri65ePXzff89cXjnt4EFywsLQeXigq1cPrYcHOnf3QsFNKZ367a7lbK20jOzkx4rfL7Jy30WWjGlfbJ+rCabJb383O67EpZOSdS0LrEPPntR/802iFi4kbe/vhY4zxMeTtv8AADmhV6vwXdx8Oga6FXr+7aFQTkckM3fzCRZsP8PA2+vzUIcGdAhwVd92b5C+fn309etj36VLsW3SeG3BptbFBY+ZT5IbEUlOZCQ5EeHkRkRiTEsjJySk0AR80ubNJG39ofDJNBq0bm7Yd+2K7zuLTOc3GIhd9ilaVxd0rq5oXV3RuriY/nR1RXOLZmRQgaMOmNAtkNV/Xuan4xH8X3QKjT2v5VrKyjUQm5qFViNo6GFvChwF7qwCcHlwGPY9enB12jQyT55E6+yMzteHrFOnMaalAZAdGlKt7+lmM+OuJjTydGDtgSscvpLAhn9C2fBPKA097BnbJYBJPUr+Nq3cmILzG/r69ak3bVqxfQwpKeTGxGLV4Nrwl12nToAgNzaW3JgYcmNjMcTHY4iNxZCYeO3Y5GRilywp9fq+7y3GqX9/AJJ//pnknTvROjqhdXJE4+CIxsnR9NzVFYce3c3HGTMyEDY2dfZLhQocdYCPiy3DOzbgq79C+Oh/F/ho9LVsqJFJpsV93k42ONvqgcK35ObTe3kSsG4tscuWYdO6NSm7d5N16rR5e06Iqt9wI6x0Gga39WVwW1/OR6Ww6d+rfP9vGBdj0zgRdq2QVGaOgcwcAy52VjXY2luL1tGxWEp5l4cewuWhhwq9JnNyyI2PR+YUWAul0eD+xFQMiYkYEhIxJCSYH7mJiYXuKss8dZqUHT+X2AadpydNft9rfn7hnnswxCegdXREk9c+jb09mv9v787j26quBI7/jmRb8p44dhbHiZ3EWUhIKOAADVsDDBAIhC60LJ0JEIYyhQGmdCghzJROp3yaGTq0HTosHbZhaFnatEDpsCWk0JJAFrITE2f3kjiLd9mSJd354z3LsmMllhdJjs7389FH0tPz09GTrKN3733nZmSQ++VryLn8cgC8u3bTtPw9HBkZODIy7esMHJnWdbzO3NfEMUR8e24pr6zdzxubqrljbmlowqeqequZqnCYm2y3nTi8xyYOAEdGBiPvvRcA366uI6raa2qsYohp+oXWX5NHZbN43in846VT+XDHYUbmdDZnrNhey12//pTzJudz5cwxXDp9NLkZqXGMVnWQ1NRjzmty5uYy8u67e1zfGGN1Ktpyr5qPa+oUgk1NBBqbCDY1EmhqItjYhCMrq+sft/shELASUn094VOMZZw1O3S77bNtHPrJf0SMecrHq3Hm5gKw79a/xVtRweSV7/fyFfedJo4hYuywdK4/azz/s2ovS9/azjM3WR+u6nrriKNwWDpZbuvtbGo78Ux3aRPCymmIQDBIe00NacXFAx98kkpxOpg7rWuF2YraZoLGsLL8ECvLD/GAczPnleZz8SmjuGjaSApPUCFAJQ4RCU3VDNZIMdfkyb362ymrV2F8PgLNzZ2JxuMh6GnpMkjANWECI25dRKClBePxWOu02NceT5d+m2BTU5dENpg0cQwhd108md+uq2TF9lpW7zrCORNHUB064kgnuyNxRDjiCOeaaH04xeXCPWMGrevX49u3XxPHILvr4snccPZ43t56gD9urmHVziO8X36I98sPcVpRLq/deR5g/ZoNGutsdnVykrQ0UvLyIC8v4jru6dNxT5/eq+0V/+pFjDc288to4hhC8rNc3HbBJB5973N+vnwH50wcQU1DZ+Lo+LXRUx9Hd2mlpeQtXEhq8Xjatm2zEod2kMdEfpaLG88u5saziznc7GXFZ7Us336QsuLOL5Dyg03c8MuP+eKkEZw7KZ9zS0cwPi9jyHamqsEnTmeoPt1g08QxxNx8XglPfbCTj3Ye4bOaRqo6mqpy3TTaTVTdR1X1REQYtfh+AA4/+RRwbAd5e20tKQUF+mU1iPKzXHx99rguRRUBVu88wtEWH29uquHNTTWA1Vw5Z9IIzi3NZ/6sMSd92XeVuPSTN8TkuFO5tsz6knn2L7u7NlW5jt85HknaeGt7vsrOxFH/m99QccGFNLz22kCEraK0cE4JK+69kB9ecyrzTh3NsIxUqupbeXVdJQ+9sbVLE9ay9ZWs31eH19+7Sb+U6i894hiCFs4p4flVe/j9hurQjHWFw9JpaO044jhx53i41CIrcbSuW0/1A0so+Ps7OfqrXwHQ8uGfGXbNNV3WN8boUcggExEmFmQxsSCLvz6nmGDQsK2mkY92Hsbj6zxjvdUX4L7fbMIfNKQ5HcwsyuUL44YxozCHGYW5TCrI1CMTNeA0cQxBE/IzuWz6aN7aegCALFcKOe6Uzs7xXjRVhUsrKUZSUwnU1dGwbBnezz8PnePRVr69y7qtmzez/1u3M+qBB8idf+UAvBrVGw6HcOrYXE4dm9tleYvPz9fOLGLd3jp21Dazbm8d6/bWhR7/rxvP4Ap7KuLyA020+PxMGZWtlX1Vv+inZ4i699IpocThCwStaqN9bKpyZmcz/tlnaCsvp/aRn9C2ZUvoMd/uPQS93lBphcq77yZw9CjV3/2uJo4EkJ/l4sdfnQVAg6ed9fvr2FzZwLbqRrbWNDCjsHOujKf/vItX1lqlZcbkuikdmcWkgixKR2YxozCH08cPj8trUEOPJo4havKobGaOzWVzVQN59lnIWX084gDIKCsjo6wM/+HDHHn8CWthaiq0t+OtqAgVh/NX1wzMC1ADLjcjlblTRzJ36sgeHy8cls600dnsOtRCTUMbNQ1tfLjjMADnT87nhUVnA9DY1s7iZZsZn5fR5TIm163NXgrQxDGkPXPTbB56YyvX2SNyOpofmr3+PvdDjFi0iMbX30DcblylpTS9/Tbe7eWhxNEhpXBM/1+Aiql7LpnCPZdMIRA07D/qoaK2mYpDzVTUNnPKmM4jk31HPKGRXOGcDqFwmJtf3HAGs4qsUhvr99VxuMnLmNx0RuW6yM90Je0Uu8lEE8cQVpDt4hdh1XLTUhy4Uhx4/UHa2oOkp0VfItqZlcXEN/8ADgdHn33OShyflwN0Kf6WMjzySUsqsTkdQkl+JiX5mVzCsVMHFw5L56ff+AL7jnpCl/1HPRxobGP/0VZy3J0lUl5YtZfffVoVup/iEEbluBmV42J2SR6LrzgFgEDQ8IdN1eRnuRiRlUZeZhp5GWl6BDNEaeI4yWS7U/E2e2lqa+9T4gBwuN0AuKdNBaBtu5U42so/D60T9Hj6GalKVHmZaVxz+thjlre1B6iqb6VoeGeZixmFOdR7fBxo9HKgoZU6TztV9a1U1beGim4C1Hl83P3ShmO2OSwjlbzMNP51wanMKbVm/PtwxyFW7zpCbnoqOe5U6zrdus5NT2VcXmxOclORaeI4yWS7Uzjc7KXJ66fnlu7ec021E8eWLTStXEn7vs4zy4PNzf3cuhpq3KlOJhV0LdZ36/kTufX8zrpnbe0BDja2caChrcsPl6AxXDlrDEeavRxp9nGkxUedx0e9p516TzvBsBJLf6k4whN/2tljDGNy3axafHHo/kWPrMTrD5LpcpKRlkKmy0lmWgqZrhSuPq0wVCtsz+EW/lxxGHeqE1eK45jrGYU5oaOfZq8fpwiuFIc2u0WgieMk09HP0dga3bkcPUkZNYq00kn4KnZSefvfdXksYM/joVQ4d6qT4hGZFI/I7LJ8ZLa7S7MqWM1XdR4fdS2+LsUdL5iST0aak8bWdhpa22lss69b/eRnd504qaq+Fa8/SE9mFOaEEseG/fU8+PstPa4HsOUHl5FlJ45Fz63h491HAUhzOnCldiaYy2eM5sH5Vu2oyjoP33l5IylOIdXpIDV07SDFKdwxtzSUaJd/dpD1++pCj6c6hRSHtd7wjDSuOq0wFMsfNlUDVrOfQ4QUp33tcDCxIDO0r+pafFTWteJ0iH2xpjnuvu8HgyaOk0xJfiabqxr4aOeRfg+vFBFKXnqZ+lde4dB//iemtTX0mPF4MH5/XOYCUCcHp0PIz3KRn9U1GcyZlM+cSfm92sZH91+ExxegxeenxRvAY1+3eP3MLOo852VcnlVd2usP4G0P0tYesPsCrWtXStiEUM7OvkJfwLp0jFSs83T+IGv2+vlkz9GIsd1w1ngosG5/uOMwz320p8f1Jo/M6pI4vvPyRnyBnpPhP8+fzi32pGArttdy76sbuzw+IjONdf/0VxFjGigJ8V8vIv8OXAX4gJ3AzcaYevuxxcAiIADcZYx5O26BDgFfOX0sb2ys5tW1+/n2lyb1+wxvZ1YmI265GfeMGexbuBBJTw8lkGBLS2guAKXiYUSWixG9WO/M4jzOLO7dgI7/vdUalmyMwesPWpf2AG3tQdxpnQlm3PAMXrrtHPwBQ3sgaF8M/mAQnz9ISX7nL/+500YyIjON9qC9rt9aP2AMBVnuLs8/b+Zo67GgCV389nX4kVlOeiozCnM61zOGYemxmdtFTIzqtx83CJFLgRXGGL+ILAUwxnxPRKYDvwbOAgqB94ApxpjjFuUpKysza9euHeywE5I/EOTcpSs42Ojl1du/yOySgRv95Nu7l2BbG/tv/zv8NTWULn+P1LHHdqIqpYYmEVlnjCk70XoJMRbOGPOOMabjrLXVQJF9ewHwkjHGa4zZDVRgJREVQYrTwVfOsHbfy2sGdjrYtOJi3FOn4rRnMws0az+HUskoIRJHN7cA/2ffHguEf/tV2suOISK3ichaEVl76NChQQ4xsX2jbBwi8NqGqlD13IHUMQ1msLlpwLetlEp8MUscIvKeiGzp4bIgbJ0lgB94MdrtG2OeMsaUGWPKCgoKBjL0IackP5P5swppDxiejDCssT86E4cOyVUqGcWsc9wYc8nxHheRm4D5wMWms+OlCgif4abIXqZO4M65pbyxsZpfr9nP9WePZ9ronBP/US85sqxOv4AmDqWSUkI0VYnI5cB9wNXGmPBTkl8HrhMRl4hMACYDn8QjxqFm6uhsrj6tEJ8/yNceX8WHOwau+c6ZlQ1AUPs4lEpKCZE4gMeAbOBdEdkgIk8AGGO2Aq8A24C3gDtONKJKdVr61VlcOWsMzV4/97y0gZYoy61Hon0cSiW3hDiPwxhTepzHfgT8KIbhnDTS05w8dv3pVNe38um+ep79y27uvGhyv7erTVVKJbdEOeJQg0RE+MdLrZpTT36wi3qPr9/bdIaOOLSpSqlkpIkjCcwpzefc0hE0tfn5wRvb+r09R0cfR5M2VSmVjDRxJIl/WXAq7lQHv/u0KlREra9CfRwt2lSlVDLSxJEkJhVkseRKq6rn936zia3VDX3elvZxKJXcNHEkkW+ePZ6rTiukxRfglufWcKChrU/bcWbrcFylkpkmjiQiIjxy7SzOmpDHwUYv3311I8Fg9EUuQ01V2sehVFLSxJFkXClOHrvhdIZnpPLnisM8+cEuoq2Q7Mi0ixxqH4dSSUkTRxIame3m4S/PBGDpW9v5+pOroiqG6LT7OLSpSqnkpIkjSc2bOYalX51JXmYaa/bUcfOza2hs6910s5KRAQ4HprUV097/KWqVUkOLJo4k9o3Z41n+nQuZVJBJ+cEmFj23hpqGEx95iEjYkFw96lAq2WjiSHLDM9N47uazKMh2sWZPHZc9+gG/XVd5wn6Pjilj/UfrYhGmUiqBaOJQjMvL4M27zuOiaSNpbPNz76sbWfT8WmqbIg/X7Zgytr2qMlZhKqUShCYOBVgd5k8vLOORa08jx53Ciu21XPboB7ywag9e/7EFiVOL7MRRqYlDqWSjiUOFiAhfO7OId/7hQs6fnE+dp51/em0r5y99n0ff/ZxDTd7QumlF1vxavv2aOJRKNpo41DFG57p5/uazePzGM5g2OpvaJi8/W76Dc5euYPGyTazbW0fK2CJAjziUSkYS7clfQ0FZWZlZu3ZtvMM4KRhjWL3rKM/8ZTfvbjsYWp7vdjBzxxqmu9q59Af3ctq4YXGMUik1EERknTGm7ITraeJQvVVR28Qrayt5fUM1Bxo7O87nnTqax795ZhwjU0oNhN4mjoSYAVANDaUjs3ngilNYPG8a2w808sdv3UdFegFzr/ybeIemlIohTRwqaiLCKWNycZkavBs/oCTv+niHpJSKIe0cV32WOs4aWdVeWRXnSJRSsaSJQ/VZ57kc++MciVIqljRxqD4LncuhQ3KVSiqaOFSfpRYV4cjMjHcYSqkY085x1WdZX7qQKWvXICLxDkUpFUOaOFSfiUMPWJVKRvqfr5RSKiqaOJRSSkUlIRKHiPy7iGwXkU0i8jsRGWYvLxGRVhHZYF+eiHesSimV7BIicQDvAqcaY2YBnwOLwx7baYz5gn25PT7hKaWU6pAQicMY844xxm/fXQ0UxTMepZRSkSVE4ujmFuD/wu5PEJFPReRPInJ+vIJSSillidlwXBF5Dxjdw0NLjDGv2essAfzAi/ZjNcB4Y8wRETkT+L2IzDDGNPaw/duA2wDGjx8/GC9BKaUUCTQfh4jcBHwLuNgY44mwzkrgu8aY4062ISKHgL39CCcfONyPvx8sGld0NK7oaFzRORnjKjbGFJxopYQ4AVBELgfuAy4MTxoiUgAcNcYERGQiMBnYdaLt9eaFnyCetb2ZzCTWNK7oaFzR0biik8xxJUTiAB4DXMC7dvmK1fYIqguAfxGRdiAI3G6MORq/MJVSSiVE4jDGlEZY/lvgtzEORyml1HEk4qiqRPBUvAOIQOOKjsYVHY0rOkkbV8J0jiullBoa9IhDKaVUVDRxhBGRy0WkXEQqROT+OMYxTkTeF5FtIrJVRO62lz8kIlVhtbuuiENse0Rks/38a+1leSLyrojssK+HxzimqWH7ZIOINIrIPfHYXyLyjIjUisiWsGU97h+x/Nz+vG0SkTNiHFfca8RFiCvi+yYii+39VS4il8U4rpfDYtojIhvs5bHcX5G+G2L7GTPG6MVqrnMCO4GJQBqwEZgep1jGAGfYt7Ox6ndNBx7COo8lnvtpD5Dfbdm/Affbt+8Hlsb5fTwAFMdjf2GNBDwD2HKi/QNcgVUlQYBzgI9jHNelQIp9e2lYXCXh68Vhf/X4vtn/AxuxRmBOsP9fnbGKq9vjPwH+OQ77K9J3Q0w/Y3rE0eksoMIYs8sY4wNeAhbEIxBjTI0xZr19uwn4DBgbj1h6aQHwvH37eeCaOMZyMVZhzP6cANpnxpgPgO5DxiPtnwXA/xjLamCYiIyJVVwmAWrERdhfkSwAXjLGeI0xu4EKrP/bmMYl1jkDXwd+PRjPfTzH+W6I6WdME0enscD+sPuVJMCXtYiUAKcDH9uL7rQPOZ+JdZOQzQDviMg6scq8AIwyxtTYtw8Ao+IQV4fr6PoPHe/9BZH3TyJ95hKtRlxP71ui7K/zgYPGmB1hy2K+v7p9N8T0M6aJI4GJSBbWeSz3GKs+1+PAJOALWHW8fhKHsM4zxpwBzAPuEJELwh801vFxXIbqiUgacDXwqr0oEfZXF/HcP5FI5BpxpwPfAX4lIjkxDCnh3rdurqfrj5OY768evhtCYvEZ08TRqQoYF3a/yF4WFyKSivXBeNEYswzAGHPQGBMwxgSBXzJIh+nHY4ypsq9rgd/ZMRzsOPy1r2tjHZdtHrDeGHPQjjHu+8sWaf/E/TMnVo24+cCN9hcOdlPQEfv2Oqy+hCmxiuk471si7K8U4CvAyx3LYr2/evpuIMafMU0cndYAk0Vkgv3L9Trg9XgEYrehPg18Zoz5j7Dl4W2TXwa2dP/bQY4rU0SyO25jda5uwdpPC+3VFgKvxTKuMF1+CcZ7f4WJtH9eB/7GHvlyDtAQ1tww6KSzRtzVpluNOBFx2rd7XSNuAOOK9L69DlwnIi4RmWDH9Ums4rJdAmw3xlR2LIjl/or03UCsP2OxGAkwVC5YIxA+x/rFsCSOcZyHdai5CdhgX64AXgA228tfB8bEOK6JWKNaNgJbO/YRMAJYDuwA3gPy4rDPMoEjQG7YspjvL6zEVQO0Y7UnL4q0f7BGuvzC/rxtBspiHFcFVvt3x2fsCXvdr9rv7wZgPXBVjOOK+L4BS+z9VQ7Mi2Vc9vLnsGrmha8by/0V6bshpp8xPXNcKaVUVLSpSimlVFQ0cSillIqKJg6llFJR0cShlFIqKpo4lFJKRUUTh0oqIvKciLwX7zi6E5GVIvLf8Y5Dqd7Q4bgqqYhILuAwxtTZX9SlxpgvxfD5HwRuNcaUdFueB/hNt/IRSiWihJhzXKlYMcY0DMZ2RSTNWFWV+8QY09sKsUrFnTZVqaTS0VQlIg9hnaV8oYgY+3KTvU6WiPxMrMmEPHbV06+EbaPEXv9GEfmjiLQAP7TLOvxSRHaKNbHPLhF5WERc9t/dBPwQKA57zofsx7o0VYlIqoj82I7BJ9bEPTd0ey1GRL4tIi+ISJOIVIrI4m7rLLDj94hIvYh8IiKnD8KuVUlEjzhUsnoEq6bQBKyidQANdi2gN7BKNXwDqMaqT/SSiMwzxiwP28ZS4HvAHfZ9wSoudwNwEJgFPIlVtuL7WIXxpgE3ArPtv2mOEN/DWKXOb8cq8fI14H9F5GC3GL4PPIg1+dHlwGMi8okxZrmIjMaqFPygfe3GKsPtR6l+0MShkpIxpllEWgGfMeZAx3IR+RLwRaz5DTqatZ6yC8T9PVY9oA5PGmNepKslYbf3iMgk4NvA940xrSLSDATCn7M7EckA7gL+wRjTUSL+YRGZbW8/PIaXjTG/tG//QkTuxEp0y7Fmi0sFXjHG7LHX+SzS8yrVW5o4lOpqNtbUwVXWwUdIGlYBuXDHVGYVkb8FbsWaTjQT638s2ibhUvv5Pui2/E/A4m7LNnS7X03nJD6bgLeBLSLyLrASWGaM2Y9S/aCJQ6muHEADnU1J4bp3freE3xGRa7Eqkd6P9SXfCFwL/Gjgw4wYk8FOVMaYgIjMw3otl2BVcf2xiFxrjPnDIMakTnKaOFQy8wHObsvWAsMAtzEm2vk7LgA+NV3nUCnpxXN2VwF47e2Fx3AhUc4pYqzx9p/Yl4dF5C3gZkATh+ozTRwqme0GrhWRGVid2U3ACqz5DJaJyH1YzT3DgTlAW1h/Qk/KgUUisgDrC34+nR3v4c85WkS+iNX05TFhkygBGGM8IvJzrJFah+jsHF8A/FVvX5yIzAEuBt7BmltiMlaH/dO93YZSPdHhuCqZPY018+NHwCHgevsX+tXAMuBRYDvwJnAl1mQ4x/Mk1iREzwKfAmdjjXYK93usEU5v2s95X4RtLcGaNvWnWEnom8A3u42oOpEGrI7+17CS1DNY84r/MIptKHUMPXNcKaVUVPSIQymlVFQ0cSillIqKJg6llFJR0cShlFIqKpo4lFJKRUUTh1JKqaho4lBKKRUVTRxKKaWioolDKaVUVP4fX84W/eWKF0UAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "val_data = SynDataset(test_num)\n", - "val_loader = DataLoader(val_data, batch_size=100, shuffle=False, collate_fn=collate)\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 500,0.005,0.5)\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 500,0.005,0.5)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 200,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n", - "\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# PLOT" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"\\nfor iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\\n _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\\n \\n original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \\t 2 )\\n original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\\n original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\\n\\n\\norigin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\\norigin_DGD_error = hist_nmse(original_DGD_hist,x_true)\\norigin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\\npred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\\npred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\\n\\n#plt.rc('text',usetex=True)nn\\n\\nx = [i for i in range(num_layers+1)]\\nplt.plot(x,origin_DGD_error[:num_layers+1])\\nplt.plot(x,origin_PGEXTRA_error[:num_layers+1])\\nplt.plot(x,origin_NIDS_error[:num_layers+1])\\n\\nplt.plot(x,pred_DGD_error[:num_layers+1])\\nplt.plot(x,pred_PGEXTRA_error[:num_layers+1])\\n\\n\\nplt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \\nplt.xlabel('iterations',fontsize= 'x-large')\\nplt.ylabel('NMSE',fontsize= 'x-large')\\n\\nplt.show()\\n\"" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \t 2 )\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\n", - " original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "\n", - "#plt.rc('text',usetex=True)nn\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,origin_DGD_error[:num_layers+1])\n", - "plt.plot(x,origin_PGEXTRA_error[:num_layers+1])\n", - "plt.plot(x,origin_NIDS_error[:num_layers+1])\n", - "\n", - "plt.plot(x,pred_DGD_error[:num_layers+1])\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1])\n", - "\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "plt.show()\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [], - "source": [ - "test_data = SynDataset(test_num)\n", - "test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate)\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(test_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 500,0.005,0.5)\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 500,0.005,0.5)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 200,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": {}, - "outputs": [], - "source": [ - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "writer_error=pd.ExcelWriter(\"./error_fig/noise1/\"+figure_name+\".xls\")\n", - "df_error= pd.DataFrame({'PG-EXTRA':origin_PGEXTRA_error,'DGD':origin_DGD_error})\n", - "df_error.to_excel(writer_error,sheet_name='Origin')\n", - " \n", - "df_feasibility= pd.DataFrame({'PG-EXTRA':pred_PGEXTRA_error,'DGD':pred_DGD_error})\n", - "df_feasibility.to_excel(writer_error,sheet_name='GNN')\n", - "writer_error.save() " - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd0FNXbwPHv3ZbeKykkgdCrgChNUERQmoB0aRZAQFT42UAp6iuCooIKKKJUAQWkF0UQUCkCAtIhCWmQTnrPzvvHkiUJ6aRs4v2cs8fMzsyduxF49tZHKIqCJEmSJJWWqrorIEmSJNUsMnBIkiRJZSIDhyRJklQmMnBIkiRJZSIDhyRJklQmMnBIkiRJZSIDhyRJklQmMnBIkiRJZSIDhyRJklQmmuquQGVwdnZWfH19q7sakiRJNcqpU6diFEVxKem6Whk4fH19OXnyZHVXQ5IkqUYRQgSX5jrZVSVJkiSViQwckiRJUpnIwCFJkiSViQwckiRJUpnUysFxSZIMEhMTiYqKIisrq7qrIpkIKysrvLy8UKnK326QgUOSaqnExEQiIyPx9PTEwsICIUR1V0mqZnq9nvDwcGJiYnB1dS13ObKrSpJqqaioKDw9PbG0tJRBQwJApVLh5uZGQkLC/ZVTQfWpVeJWryZq4afVXQ1Jui9ZWVlYWFhUdzUkE6PVasnOzr6vMmTgKET054uIXb6cnPuMypJU3WRLQyqoIv5MyMBRgJKVhT41FQB9Wlo110aSJMn0yMBRQE5ysvFnGTgkqfL4+vpiYWGBtbU1bm5ujB07luQ8f/8qixACKysrrK2tcXJyonv37mzcuPGe63799VceffRRbGxscHJyonXr1syfP5/09HQA5syZg1arxcbGBhsbGxo2bMiUKVO4detWpX+G6iYDRwH6PH9wlYyMaqyJJNV+O3bsIDk5mdOnT3Py5Ek++OCDfOcVRUGv11f4c8+ePUtycjJXrlxh7NixTJkyhblz5xrP//TTTzzzzDOMGDGC4OBgYmNj2bhxI2FhYYSGhhqvGzp0KElJScTFxfHzzz8TERFB27Zta33wkIGjgJzEROPPssUhSVXD09OTJ598kvPnz9OtWzdmzpxJp06dsLS0JDAwkJs3b9KvXz8cHR3x9/dn+fLlxnufeuoppk+fbjweNmwYzz33XKme6+zszKhRo1i6dCnz5s0jNjYWRVGYNm0as2bN4sUXX8TR0RGARo0a8cUXX9CgQYN7ytFqtTRr1oyNGzfi4uLCwoUL7/M3YtrkOo4C9EmyxSFJVS00NJTdu3czcOBAjhw5wpo1a9izZw+NGjVCURS6d+9O8+bNuXnzJpcvX6ZHjx7Ur1+fxx57jO+++46WLVvSu3dvbt26xYkTJzh79myZnt+/f3+ys7M5ceIEfn5+hIWFMWjQoDJ/DrVaTf/+/dm3b1+Z761JZOAoQJ+cdPdn2eKQaplLjZsUec597lwchg4B4PbGH4mYPbvIa5tcvmT8OWjgINIvXrzn/dJ4+umn0Wg02NnZ0bt3b2bMmMGTTz7J2LFjadasGWAIKn/++Se7du3C3Nyc1q1b88ILL7B69Woee+wx3N3dWbp0KWPGjCEtLY2tW7diY2NTpnpotVqcnZ2Ji4sz3uvu7m48P2zYMPbu3UtmZiZff/01o0aNKrIsDw8P4uLiyvT8mkZ2VRWQk7fFcWcQTJKkyrF161bi4+MJDg5myZIlxnUn3t7exmtu3ryJo6NjvmDg4+NDeHi48bhv377k5OTQqFEjOnfubHy/WbNmWFtbY21tzZEjR4qsR1ZWFtHR0Tg6OuLk5ASQb5xiw4YNxMfH06ZNG3Jycor9TOHh4cburdpKtjgK0CflGeNIl11VUu1S2haBw9AhxtZHSfy2bL6fKhUq71qD3G/wSUlJxuAREhKCp6en8ZqZM2fSpEkTgoKCWL9+PcOHDwfgwoULpXretm3b0Gg0tG/fHgcHBzw9PdmyZUu+sZPS0Ov17Nixg8cff7xM99U0ssVRQE7S3a4qJV12VUlSdfP29qZjx468/fbbpKenc+7cOVasWMGzzz4LwOHDh/n+++9ZvXo1q1at4uWXX87XGilOXFwc69atY/Lkybz55ps4OTmhUqlYuHAhc+fOZfny5dy+fRtFUbh27RqRkZGFlpOdnc2lS5cYPnw4ERERTJs2rcI+vymSLY4C8g6OyxaHJJmG9evXM3HiRDw8PHBwcGDu3Lk8/vjjJCYmMnr0aL788ks8PT3x9PTk+eefZ9y4cezbt6/IVdKtWrVCCIFOp6NVq1Z89tlnjBgxwnh+6NCh2NnZMW/ePF577TXMzMyoW7cu48ePZ/DgwcbrNm7cyNatW1EUBQ8PD3r06MGpU6fw8PCo9N9JdRKKolR3HSpcu3btlPLmHL/5zjskbDI0vV1emYrzSy9VZNUkqcpcunSJJk2KHgyX/ruK+rMhhDilKEq7ku6XXVUF6BPzzqqSg+OSJEkFycBRQN7puEqGDBySJEkFycBRQN7puLLFIUmSdC8ZOArQJ8kWhyRJUnFk4Cgg73Rc2eKQJEm6lwwcBeRtcejlOg5JkqR7yMCRhz4zEyUz03isyHUckiRJ95CBI4+8rQ0AvdyrSpIk6R4ycOSRNxcHyE0OJUmSCiMDRx652f9UVlaGYxk4JKnSFJc6tqS0rQXduHEDIYRxJ9zcV25K2EGDBvHiiy/mu2fAgAFMmTKFDz/80Hi9ubk5arXaeJy7tXvedLOenp5Mmzat0F1yx44di0ajkRkATYUQopcQ4ooQ4roQ4q3KeEZuV5XGxQUARebjkKRKVVjq2NKmbS1MfHw8ycnJxtfQoUMB+Oqrr9iyZQsHDx4EDHtMnT59mo8++ogZM2YYr1+2bBkdOnQwHufdXTc33eyhQ4fYuHEj3333Xb5np6SksHnzZuzs7Fi7dm0F/6ZMS40IHEIINfAV8CTQFBguhGha0c/JXfyXGzj0MgOgJFWJ3NSx//77b5nTtpaGu7s7Cxcu5MUXXyQkJISpU6fy9ddfY21tXeay/P396dSpE2fOnMn3/ubNm7G3t2fWrFmsWrWqXPWsKWpE4ADaA9cVRQlUFCUT2AD0r+iHZCclccmhLnEuXoBscUhSVclNHWtpaVnutK0lGTt2LPXr16dNmzb06tWLXr16laucy5cvc+TIEfz9/fO9v2rVKoYPH86wYcO4fPkyp06dqohqm6Sasq26J5C3jRoGPFTRD/kkQM8PXacyJPkK44RAycpCyclBqNUV/ShJqha+b+0q8tyHA1ow4qG6APxwPIQZP/9b5LU3Pupt/LnPF0c4H554z/ulUTB17PDhw9m0aVO507Y6OzvnOz569Gi+XWC7dOnCL7/8YszlURa52f9SU1MZNmwYkyZNMp4LCQnh4MGDLFy4EDc3N7p3787q1atp27ZtmZ9TE9SUFkeJhBDjhRAnhRAno6Ojy1WGb2osAPs1dRDm5oCcWSVJlalg6tjcf/hLStuadwA8JCTEeG1MTAzx8fHGV96gce3aNT755BMmTZrE9OnTycrKKlNdT58+TXJyMhs3buT48eOkpKQYz61Zs4YmTZrQunVrAEaOHMkPP/xQ5mfUFDWlxREOeOc59rrznpGiKN8A34AhH0d5HvLsW88xb84vxJnbctPCgTppaejT042zrCSppitti2DEQ3WNrY+S7Hy5y/1UKZ9GjRqVKm1r7uyrXDdu3Ci2XEVReOGFF3j11VeZNWsWnTp1Yv78+bzzzjtlqp8QgiFDhrBt2zbee+89Pv/8cwBWr15NSEiIsaWUnZ1NbGwsu3fvpn//Cu9Vr3Y1pcXxN9BACOEnhNABw4DtFf0QSwsd9XTZABx2bgzIFockVaXypG0tjaVLlxITE8OMGTNQqVSsWLGCBQsWcPny5XKV99Zbb7F8+XIiIiI4evQoAQEBnDhxgjNnznDmzBnOnz/PiBEjWL16dbnrbMpqROBQFCUbmALsAy4BPyqKUros9GX07COGAa/jboZJW3IthyRVraFDh/Ljjz+ydu1avL29cXZ2ZsiQIfekbS2Mvb19vm6sTz/9lJCQEGbMmMGKFSvQ6XQANG3alOnTp/Piiy9SniyoLVq04JFHHuHjjz9m1apV9O/fnxYtWuDu7m58vfLKK+zcuZO4uLhy/R5MmUwdW0B8aiYPvPcLQq+w+pcPaLN2BZbNm1dwDSWp8snUsVJRZOrYCmZvqeMxf0f0KhXb6nXm1y2/V3eVJEmSTIoMHIWY8oQhEu/y60Dj5n7VXBtJkiTTIgNHIR6o60Dr7FhStRbsj68pE88kSZKqhgwcReinGOaRn4zJJj05hcDd+6u5RpIkSaZBfp0uQkOtIaHTpcQcnnh3K9bJt9lgocb20UeruWaSJEnVS7Y4iuBjrqDJySYiW02SuQ0XnfxY/ekPZJaw0EiSJKm2k4GjCDoLc+omGxYcjezWEIDl/o9z6eVp9yR8kiRJ+i+RgaMIKgtz/BIM4xx17C1o521Hgpk131o0IvSlSejlzrmSJP1HycBRBGFmjm9iBABXIpJ4f2BL1AJ2+XXk3+sRhL82DaWWbmAmSZJUHBk4iqCyMMcv8SYAlyOSaFLHljEd/dALFUvaDEY4OoIQ1VxLSarZNmzYwEMPPYSVlRWurq489NBDLFmyBEVRGDt2LEIITpw4Ybz++vXriDx/77p164a5uXm+zID79+/H19e3yGfOmTMHrVaLtbU19vb2dOzYkaNHj1bK5yto7Nix6HQ6bGxssLGxoXnz5rz99tskJCTku+7WrVu8+OKLeHh4YG1tTb169Rg7dqxxb62CqXLd3Nzo06cPv/76a5V8Dhk4iqB2cjK2OC7fSkRRFF7t0YAmdWwZO6gjdd5/H6GRk9IkqbwWLlzIK6+8wuuvv05ERASRkZEsW7aMP//8k8xMw6xGR0fHEnewtbKy4v333y/Ts4cOHUpycjLR0dF07tyZgQMHFrpnVXZ2dpnKLY033niDpKQkoqOj+f777zl27BidOnUybtMeGxtLx44dSU1N5ciRIyQlJXH69Gm6du16T2DITZV79uxZevTowYABA1i5cmWF17kgGTiKYObnh2N6InbZaSSmZxMUk4KtuZbdUzsz9PEWqNWGX11OfDzRS5agFJK4XpKkwiUkJDBr1iyWLFnCM888g42NDUIIHnjgAdatW4eZmRkAY8aM4dy5cxw6dKjIsqZOncr69esJCAgocz20Wi1jxowhIiKC2NhYVq5cSadOnXjttddwcnJizpw56PV6PvjgA3x8fHB1dWX06NHGFsLGjRvx8/Mj8c6EmT179uDu7k5pcgKZm5vz4IMPsn37dmJjY/n+++8B+Oyzz7C1tWXNmjXUr18fIQT29vaMGzeOl19+udCycjdVnDNnDm+++SZ6vb7Mv4uykIGjCDofHwTQJtLQNNx3wTDDKm8z+XZKBmFTXiZm8RfcmjEDpRK+nUhSbXT06FEyMjJKzFVhaWnJjBkzmDlzZpHXeHp68uKLLzJ79uwy1yMjI4OVK1cad+EFOH78OPXq1SMyMpKZM2eycuVKVq5cycGDBwkMDCQ5OZkpU6YAhpZLx44dmTp1KrGxsTz//PN8++23uLi4lLoONjY29OjRgyNHjgCGrrYBAwagUpX9n+eBAwcSFRXFlStXynxvWci+liKorKzQuLrSOewMBz0fYM/5W7zUrb7x/FcHr/Plget8M3IiLhenkrBtOzlJyXh+uhDVneyBkmRKLjWump1ym1y+VOI1MTExODs7o8nT3duxY0cuXrxIRkYG+/btM74/YcIEPvnkE/bs2UODBg0KLe/tt9/G39+fCxdKl23hxx9/ZOfOneh0Opo3b87PP/9sPOfh4WH8Zq/RaFi3bh3Tpk2jXr16AMybN4/mzZvz/fffo9Fo+Oqrr2jZsiXdunWjb9++9OnTp1R1yMvDw8OYozwmJiZf6tzt27czevRocnJy6NChA7/88kux5QCVvpW7bHEUQ+fnR9vIK1io4VxYAmG3U43nUjKyScvKYea/mTgt/QaVnR3JBw4QMu45cuLjq7HWkmT6nJyciImJyTeG8NdffxEfH4+Tk1O+rhYzMzPeffdd3n333SLLc3FxYcqUKcyaNSvf++vWrTMOID/55JPG94cMGUJ8fDxRUVEcOHAgX25wb2/vfGXcvHkTHx8f47GPjw/Z2dnGxFL29vYMHjyY8+fP58ta+OGHHxqfPXHixGJ/H+Hh4Tg6Ohp/N3lT5/br14/4+Hg+++wz49hPceUAxrIqi2xxFEPn64vZ8eN0scrgl0Qz9vwbwYuPGL51vPp4Q36/Es3FW4ksDHNi7rq1hLzwImn//MONkc9Sd/k3aO9Ef0kyBaVpCVSVDh06YGZmxrZt2xg0aFCJ148bN4758+ezZcuWIq95/fXXqVevHu3btze+N3LkSEaOHFmmuokCsyU9PDwIDg42HoeEhKDRaHBzcwPgzJkzfPfddwwfPpypU6eyd+9eAGbMmMGMGTNKfF5ycjL79+83dsd1796drVu3Mnv27DJ3V/3888+4urrSqFGjMt1XVrLFUQydny8AndMNUfzPgJi75zQqPh/WGp1GxYa/QzmcaYPvhvWYNfAnMyCAhG3bqqHGklQz2NvbM3v2bCZNmsSmTZtISkpCr9dz5swZ4+yivDQaDXPnzmX+/PnFljl9+nQWLFhQoXUdPnw4n332GUFBQSQnJzNjxgyGDh2KRqMhPT2dZ599lg8//JDvv/+e8PBwlixZUqpyMzIyOHXqFE8//TQODg6MGzcOgGnTpnH79m1GjRpFQEAAiqKQlJTEmTNniiwrMjKSL7/8krlz5zJv3rxyjY+UhQwcxdDdmQve8KZhoOlcWEK+KXsN3Wx4s5chN/lbm89x29Ien7VrcZk2DacJE6q8vpJUk7zxxht8+umnLFiwADc3N9zc3JgwYQLz58+nY8eO91w/fPhw6tSpU2yZr7zyCmq1ukLr+dxzzzFq1CgeeeQR/Pz8MDc354svvgAMYyve3t689NJLmJmZsXbtWt555x2uXbtWZHkLFizAxsYGJycnRo8eTdu2bfnrr7+wsrICwNnZmWPHjmFubk7nzp2xsbGhdevWJCUlsXTp0nxl2dvbY2VlRYsWLdi9ezc//fQTzz33XIV+/sLI1LHFyLxxg4BeT6KuU4ehj77F7dQsjrzxKN6OlsZr9HqFUd8d58/rsfRo6sby0fmzLmZFRpL0634cRo64pwksSZVJpo6ViiJTx1YirZcXaDTk3LpFC3drAP4Nz7/CU6USfD70AR5v4srsvk3znVNycgh7eSqRH3zArbdnoC9hYEuSJKkmkIGjGEKjwaJ1KwD8Qy4CcDbs3hlTLjZmfDvmQbwcLPPfr1bj9NxzCHNzErZuJWT0GLKioiq/4pIkSZVIBo4SuL/zDmg0eP+xB4BzoQnFXq8oCl8fCuBsqCHA2Pbqie8P69DUqUPamTMEDRxESp69dyRJkmoaGThKYN64Mc4TJtDwtmETtX/DbnPjuedJ2l94Ktn1J0KZt+cyz686SWicYd2HedOm+P30I5bt25MTE0PI2HHErviuyj6DJElSRZKBoxScXnwBVzsLnFPjSc7Uc/F8EHGr1xR67TNtvejk70RMcgbjVv5NQqph63WNszN1v1uB0/jxoNcDtW9SgiRJ/w0ycJSCytwc55cm0ibaMC33mHtTMorYUE2nUbFkZFsaullzPSqZCWtPkpltWAUrNBpcp72G74b1OOaZMqfPyKj8DyFJklRBZOAoJftBg+icZdhi4KhHc3JiY8m+fbvQa+0stHw39kFcbMw4FhjHW5vP5Vv/YdG6tXFqblZ4OAFP9OT2hg2FbussSZJkamTgKCWh0zFg0RwsNIJr9t5Em9uRce0amWHh6NPT77ney8GS78c+iKVOzZZ/wlnxR1Ch5Sbu3Ut2ZCQRc+YS9vLLRQYjSZIkUyEDRxnYeNahS0NXAI7XaUbC5i0E9OxJ5IfzCr2+uacdXwx/gBaedvRv7VnoNU7PP4/Hwk9QWVuTvP83gp4eQMpxOetKkiTTJQNHGfVoatjY7Lh7U8N+VDk5pP1zusjruzdxY+vkTrjYmBV5jV3v3vht/RmLVq3IjowkZOxYohYtkvk9pFrPFFPHlpS2tTC59cjdDdfa2pq+ffsCsGPHDtzd3fNtdb5t2zY8PT0JDg7Od48QAisrK+PxkSNHjOlmra2tcXR0pEePHoXW5ffff0cIUex+XhVFBo4y6tbI0OI451yfDJVhc+HM0LBixyfUKsMfdL1eYd7uS/x+5d5FgDovL3zWrsHpJcP2y7HLvyXj+vWKrr4kmQxTTB1blrStBX355ZckJycbXzt27ACgb9++PPbYY7z22muAId3rSy+9xNKlS/Hx8cl3D8DZs2eNx126dAEM+3olJycTHh6Op6cnzz///D3PX7VqFY6OjqxevbpMv4vyMPnAIYSYI4QIF0KcufN6qjrr42JjRjMXCzLVWs47GbZYV9LTyS5Fqshd/97i68OBvLT2NKeC7x3LEFotrq+8Qt1VK3F/ZybmjRsbz8mBc6k2MdXUseVJ21oaixcvZs+ePezbt4/XXnuNrl270q9fvzKXY2FhwZAhQ+7ZKTclJYVNmzbx1Vdfce3aNSpir77imHzguOMzRVFa33ntru7KdG1myLNxyu3unvdZYWEl3tenZR0Gt/UiLSuH51b+zaVbiYVeZ9W+PQ7DhhmPE/f9QujEiXK7EqnWMNXUsfeTtrU4zs7OLFq0iJEjR7Jz504WL15crnJSUlJYv349/v7++d7fsmUL1tbWDB48mJ49e7Jq1aqKqHaRZCKncuja0IUlvwdwyq0xZtnXyLh8mcyQECzbtCn2PiEE8wa2ID4ti18vRjLy2+P88OJDNHa3LfIeJSeH6M8+I/PGDYL69sN9zmxs82Qyk6TS8n1rV5U858ZHvUu8xlRTx95P2tapU6fyv//9z3j88ssv5+tCe/jhh0lISGDIkCFlykkO8Mknn/Dll1+SmJiIj48P2wrk+1m1ahVDhw5FrVYzYsQIpk6dyqeffopWqy3Tc0qrprQ4pgghzgkhvhNCOFR3Zdr4OGCtUxNi40ZS5+4AZIWElnCXgUat4ovhD9CtkQtxKZmMWH6cyxGFtzzAsFFi3VUrsercmZyEBMJfm0b49P/J9LRSjWaqqWNLk7Z14sSJxjI//PBD47WLFy8mPj7e+Co47jJ+/HhGjx7N7t278w3Gl8b//vc/4uPjuXHjBhYWFly5csV4LjQ0lIMHDxozHfbv35/09HR27aq8Lwom0eIQQuwH3As5NRNYCryPYY+O94GFwD2ZSoQQ44HxAHXr1q20ugJo1So6+Dvz68VI/rH24iEgM6x0gQPAXKtm2bNtmbDmFIeuRvPBzkusfeGhop/n5ob38m+I37iRyPkLSNy1i9S//6bO/32A9Z3BM0kqSWlaAlXFVFPHliZt67Jly1i2bFmpywRYsWIFoaGh7Ny5k3bt2vHCCy/wzz//oNPpylRO3bp1WbRoEWPGjKFPnz5YWFiwZs0a9Hq9cRYXQHp6OqtWreLpp58uU/mlZRItDkVRHlcUpXkhr22KokQqipKjKIoeWA60L6KMbxRFaacoSruyNgPLo1N9JwD+zjHk6cgMDiZ+0yYyisn8lZe5Vs3Xo9ryXCc/vhj+QInXCyFwGDaMelt/xuKBB8iOiiLigw9QsrLK/yEkqZqYaurY8qRtLcnNmzd5/fXXWb58OWZmZkycOBEnJyf+7//+r1zl9ejRAw8PD7755hvA0E01e/Zszpw5Y3xt3ryZ3bt3ExsbW+56F8ckAkdxhBB5c0UOAM5XV13y6uTvDMCJ2BwUIP3sOW698y635swtdRnmWjWz+jbFwcrwrUOvVzgTWnwXlM7HB5+1a3CZPo06772PuNOHqeTklO+DSFI1McXUsWVJ21rQlClT8q3JyO3+mjRpEsOGDTNOrRVCsHz5cj7//PNSj8kU9Prrr7NgwQIOHTpEcHAwkydPxt3d3fjq168f/v7+rF+/vlzll8TkU8cKIdYArTF0Vd0AJiiKcqu4eyoqdWxxFEXhoQ9/Iyopg2W/fYxPkmEfK2FpSaOTfyPKOCtDURRmb7/AuuMhfDSwBYPbeZfp/ltz5qBPTMLtnZloHB3LdK9UO8nUsVJRan3qWEVRRimK0kJRlJaKovQrKWhUFSEEHe90V51xuTvTQ0lNJSs8vFxl2lloydErvL7pHMsOBZR67UZ2dDQJ23eQuHs3gb37kLBrl1z3IUlSpTH5wGHKOt7prrrQvgdaLy90/vUByLh6tcxlCSGY/kQjY97yj/ZcZubW82Tn6Eu4EzQuLtTbthXLhx8m5/Ztbk7/H2GTp5AVKdd9SJJU8WTguA9dGhgCxym9LXX37sO6s6EPM+PqVRJ//bXInB3FGXdnsFynUfHD8RCeW3WSpPSSB8B13t7U/f473N+ba9gw8cABAvv0IX5z0bNQJEmSykMGjvtQx86CRm42pGbmcPJGHGYNGwJwe8NGwl+eys0ZM8pVbt9WHqx/8SEcrXQcvhrN/L1Fb66WlxAChyFDqLdzB9Zdu6JPSiJV5jeXJKmCycBxn7o1Mkz9/f1qtDFwZEcaBsozrl5D0Zfc1VSYtj6ObJ3UiZ7N3Hi9Z+OSb8hD6+6O17KleHzyCa5vvWl8PzMsDP2dRUySJEnlJQPHfep6J3AcuhKNmX99yDObSklLI/s+9peq62TJ16PaYWdhmHKblaPnaEDp5mULIbDr0xuNg2GhvZKZSejEiQQ9PYDUv/8ud52kmkVfzi8uUu1VERNnZOC4T+18HLHSqbkSmUREuoKuwKr1zBs3KuQ5iqLw7tbzjPj2GN8cLv2Mq1xZkZGQnUNmYCDBo0Zz85135LYltZyVlRXh4eFkZmbKWXYSgHHreHNz8/sqxyS2HKnJdBoVnfyd+eViJL9djuKJ7o8Rv34DOn9/0s+dI/NGMFYPP1whz/JxskJR4MPdlwmMTuG9/s3RaUoX+3Xe3vht30bsN8uJ/fprEjZtJvnAQdzefgvbPn3yJceRagcvLy9iYmIIDg7OtyeU9N9mbm6Ol5fXfZVh8gtVfSXDAAAgAElEQVQAy6MqFgDm9ePJUN7YdI6uDV1Y9Vx79JmZxK1cRfSnn+I4dixuecYZcn/f5f2Hete5W0z78QwZ2Xra+zqy9Nk2OFkXnV2wMBmBgUTMnmPssrLt3RvPhZ+Uqz6SJNUetWYBYE3QvbErQsDRgFiSM7JR6XTofHwAQ1dV8uHDZFy/jj4jg8CnenNz+v9KKLFovVvW4ccJHXCzNePEjTj6f/VnsbvrFsasXj3qrl5FnQ8/RG1nh1XHDuWujyRJ/z0ycFQAJ2sz2tZ1IDNHz6ErhkyAujs5j1OOHSN0/ATCX3vNkLcjKIjkYrKZlUYrb3u2T+lMKy87wm6nseRg2deLCCGwHziAenv3YDdggPH92+vXk7R/v+wTlySpSDJwVJAeTd0A+PViBAA6H8MguZKeDkDG9QDSzv0LgD4lBX1q6n09z83WnI0TOjCpW30+HNii3OVoHByM+2pl3bxJ5EfzCZvyMqETJpAZHHxfdZQkqXaSgaOC9GxmSCfy68VI0jJzUJmbo/HIs5OnopC4b6/xMDsm5r6faa5V80avxlibGeY4pGflsPi3a6RnlW+nXI2rK66vv47KxoaUw0cI7NOX6MWL0ael3XddJUmqPUoVOIQQxW63KoRQCyGKz5tay/k6W9HK256UzBz2XzIsADS7013FnYHwtJOnjNdnR0dXeB3e23mRT3+9yrBvjhGVmF7m+4VGg+OzI6m/Zzd2AwagZGURs2QpgX36knTgQIXXV5Kkmqm0LY5oIYRr7oEQ4h8hRN75XM7Af35V2dOtPQDYdsawO67juHHYPPEEjqNH33NtdvT9tzgKevYhHzztLTgTGk+/L//k37CEcpWjcXbGY96H+PywDrPGjckKDydu1Wo57iFJElD6wFFw7qg/UDDn4X9+IUCflh6oVYLfr0QTl5KJdZcueC1eVOispcpocTT1sGXblE6083EgIjGdwV//xa5z5d+F3rJNG/w2/YTbzJm4v/uOcQpxVkQEOcn3ZmmTJOm/oSLHOP7zX0ddbMzo0sCZbL3C9jN3c3KYNWp0z7XZMTHkJKegTy97l1JxnK3NWPfiQwxu60V6lp7JP5zm01+voteX73+P0GhwHPUsZv7+gGEdyq0ZMwl88knif95a7r24JEmqueTgeAUb3NaQuW/D36HGrh2NmxsqW1vDBbkzmEJDCezXl5AxYyu8DmYaNQueack7vZugEhAQnUxFLQzXJyWRk5xMdnQ0t95+mxtDh5H6zz8VU7gkSTVCaQOHQv4WRcFj6Y7Hm7riYKnlckQS/4YbxhiEEJjf2TnXvHlzAFKOHiX75i3Szp5FySo530ZZCSF4oUs91r3wMJ8808rYzXS/4xRqW1t8N6zHY/5HaFxcSP/3X4KHjyD89TfIioioiKpLkmTiyjLGcUwIcVUIcRWwAg7kOf6r0mpYw5hp1Ax4wDBvYMPfocb3bZ54ArRa7AcNAiDn9m3jucoY78jVob4TFjo1AGmZOYz89jiHrt7f84RKhV3//tTfuweniRMQOh2JO3YQ9PQAOXVXkv4DSrvJ4dxKrUUtM6y9N9/9GcS2f8KZ8VQTrM00OI4ehePoUWTdunewOisyEq2HR6XX64cTIfwVEMuxwFhe79mYiV3r3dfmhiorK1xffRX7Z54hasHHmPnXR2VhAWAY+xBCbp4oSbVQqQKHoigycJRBQzcb2vs6cuJGHD//E86oh32M5zROTvdcn11FucHHdfQlKT2Lz/dfY/7ey5y/mcDHz7TEUnd/myTrvLzwWrwo30B5wtZtxG/ahNsbr2PRuvX9Vl2SJBNyX4PjQoguQohBJS0Q/C96toMhWKw7FpxvXEHodKjt7fNdmx0VWSV1UqkErz7ekG9GtcXaTMOuc7cYuOQvQmLvb/uTXLlblyiKQtyqVaSdPs2NYcMJe/U1MkNCKuQZkiRVv9KuHJ8ihHinwHvbgEPAT8A1IUTZ8pvWcr2aueNsreNyRBJ/Fcjap3FxyXecFVk1gSPXE83c2Tq5I/WcrbgckUTfL/8g7HbFBA8wDMz7rF2D0/jxCDMzkvbuJaB3HyLnzSM7z9iOJEk1U2lbHKMB41dGIUR/4ClgFPAgcA2YUeG1q8F0GhVjO/oC8N6Oi2Tl3O3G0bg4G3640/9fVV1Vefm72rB1Sicea+xK9yaueNpbVGj5ahsbXKe9Rv29e7B7+mnIziZu1WoCnuhJ+sWLFfosSZKqVmkDR30g72T9p4CdiqKsUxTlFDATeKSiK1fTvdClHt6OFlyJTGL10bs7zea2OCxatQIgu4pbHLlszbV8O7od8wa2MA5ih8alkpRecdODtXXq4PHRPPy2bMaqYwc0Tk6YNWhQYeVLklT1Shs4LIC82YIeBg7nOb4GuCLlY65VM7tPMwC+OnjduGutTc9e6OrXx3GMYQ+r7Kiqb3HkUqkEZhrDdN3UzGyeX/U3/b78kysRSRX6HPMmTfBesQKf9T8gtFrAsHo+ePQYUk6cqNBnSZJUuUobOMKAlgBCCAegGXA0z3kX8gcW6Y7uTVxp5mFLXEomu/81TMW1eexR6u/aiVWXLgBkRUWZxAaC8alZqIQgKCaFp7/6k63/hJd8UxkIIdA4OBiPY7//ntQTJwgZPYaQ8eNlF5Yk1RClDRwbgcVCiEnAKiAUyPs1sR1wpYLrVisIIRh9Z4bVqqP5EyOpra1RWVqipKWhT6rYb/jl4WFvwc+TOjGwjSdpWTm8uvEM7249T0Z2+fJ7lMRl8mScp76MysqKlMNHCBo4iPBp08gICqqU50mSVDFKGzj+D8MMqv/DsDPuSEVR8u5uNxzYVcF1qzX6tfLEzkLL2dB4Tofkn1WkcTNkDqyucY6CLHRqFg5uxYcDWqBTq1hzLJihXx/jZnzFrwhXWVriMmkS9ff/iuPYsYYV6Lv3ENinL3GrV1f48yRJqhilChyKoqQrijJWURQHRVGaKoryV4Hz3RRFWVA5Vaz5LHRqRjxkSCU7d/sFcvLsVKtxNQwNRS/+wmSSJQkhGPFQXX6a2MGY3+OPaxWfPySXxsEBt7fepP4v+7AfPBiEME4ckCTJ9JjE7rhCiMFCiAtCCL0Qol2Bc28LIa4LIa4IIXpWVx3v1+RH/XG3NedsWAJrj+WZYeVmCBxJv/5K2NRXyImPr64q3qOVtz07X+7MrD5NGdzOq+Qb7pPW3Z0677+H/2+/5Qsc4dP/R/TixeSYQHeeJEmlXwD4TWle91GP88BA8s/UQgjRFBiGYTC+F7BECKG+j+dUG2szDXP6GWZYfXHgmjE/hpl/nqmp2dkkHzpUYlm5g+lZN28S8FRv4jdvqZQ6AzhY6Xius59xuu71qCQmrztNfGpmpT1T63Z3gl7G9esk7tpFzJKlBDzeg9gVK9CnVtxiRUmSyq60LY4XgCcwjG80KOLlX95KKIpySVGUwgbX+wMbFEXJUBQlCLgOtC/vc6pbz2ZueNpbEJOcadxy3WnsGHzWrcX1rTcBSNr/W7FlpBw/wfVHuhLzxZekHD1GZmAgib/sq/S653p7y7/s+vcWfb74o9ypacvCzN8fn3VrsWjXlpyEBKI+/oTrj/cgdsV3MoBIUjUpbeDYBtQBcoClQA9FUR4t8HqsEurniWEGV66wO+/dQwgxXghxUghxMroStym/H0IIujUyLP47eMWwdkPodFi2bYttT0MvXPKffxabFTD1+DEA0s7/S3a0oQx9/L3/gCcfPlwp+TE+G9qall52hN1OY9DSv1hTYC+uymDZti0+a9bgvfwbzFu2JCcujqiPPyagTx+UzMpr+UiSVLjSDo4PAHyB34GPgHAhxAIhRKmXAAsh9gshzhfy6l+eihdSx28URWmnKEo7lwJ7QZmSRxsZumF+v5I/uGnr1MG8aVOU1FSiF39R6PbrABnXrgGQHRFpXDiYk5h/CU36lSuEjp9AxOw5FVx78HKw5KeJHRj5UF0yc/S8u/U8U374h8QKXG1eGCEE1l264LtxA97ffI15y5bYPP44QqcDQMnJkS0QSaoipR4cVxTllqIo/4dh+5Exd/77rxDioBDCvBT3P64oSvNCXtuKuS0c8M5z7HXnvRqro78TOrWKs2HxxKXk/7Zs06sXAHHffUdg335kBN67niH96lXAMH03KzdwJORvcWTdvGn4b3TlrEg306j5vwEtWDz8Aax0anb9e4shy47mmy1WWYQQWD/yCL4bN+A6fbrx/cTdu2UXliRVkTLPqlIM9gLLgGNAF6DEwFFO24FhQggzIYQfhrGUGr0/haVOw0P1HFEUOHItf6vDadxYPD75BMt27dAnJxP+ylQyQ0KMeS70qalkhRh67nISEsgKDjH+nLe7SJ+cbPhvSkqlfpZ+rTzYObULTerY8lwnP9SqqkvaJIRAZWZmPE4+dNjYhXX98R7ELFt2T0tMkqSKUabAIYSoI4SYIYQIBNZgCByNFEW5rzmkQogBQogwoAOwSwixD0BRlAvAj8BFYC8wWVGUylnGXIU6+xt2xz1aYLt1odVi16c3XsuWoatXj4xr1wl4oidX2z9E8LhxJO7dB3kCREZAgOGHnJx8QSJ32mpVfPP2c7Zi2+RO+abr/hUQU+ldVwV5fLzA2IWVExdH9OeLuP7oY0QtXEh2TOWtQZGk/6LSTsftI4TYCgQB3YA3AW9FUd5SFCXgfiuhKMrPiqJ4KYpipiiKm6IoPfOc+z9FUeoritJIUZQ99/ssU/BwPUMWwONBcYWeV1tb4b10CdaPPYbaxRl9cjKpR49x691381+YJ+NeTp4Bcn2SocWhpFRNl41OozJO170WmcTzK0/SZ3HVzLrKlbcLq+53K7B8+GH0KSnELv+WpP37q6wekvRfUNqcoduBYAxbjgQAWmBwwXzSiqL8UKG1q6WaedhibaYhKCaFiIR03O3u7enT+fjgveQrwJDoKWjAQHLiCg80APrEBHInnOmTDF00+tRUFL3emJmvKug0Kuq5WHHhZiKDlv7FzN5NGN3Bp8pyjwshsOrYEauOHUk7d47b6zdgN2CA8Xzi3r2Y+ftj5l/u2eOS9J9Xln9RfIC5wNoiXmsqvHa1lEatop2vYZfY40GxJVwNWjc3HEePMh7rfH3vuSbvAHnOnRYHgJJW8XtMFcfHyYrNL3Vk1MM+ZObomb39Ai+tPU1CWtV2XQFYtGyJx7wPjWMhOYmJ3Jr5DoF9+hI6eQqpp06ZxK7EklTTlHY6rqoUrxq5oru65HZXHQssOXAAOIwYgcrKCoTAumvXe87nDRx5d9qtjhlG5lo17z/dnK9GtMHaTMPeCxE8tegIZ0OrdzsVJTsbu/79ETodyb/9RvDIZ7kxdBiJu3ejZGdXa90kqSYpVVeVEKJU2f0URTlc8lUSwEN+jsC9A+RFUdvaUnfFt2RFR0OOHlatync+7xhHTnKewJGSAtW0rqV3yzo097Rlyg//cDUyCUtd9X630Dg64j7rXZwnvUTcunXEr99A+rlzhE+bjsajDn6bNqFxdKzWOkpSTVDaMY7fAQUorKNayfPf0pb3n9fC0w4bcw03YlMJjk3Bx8mqxHssWrfGAkj95597zmVHRXLr3VnYPvUk+sTqbXHkldt19W94Ag3cbABQFIXo5AxcbSprFnfxNM7OuL7yCs7jx5OwbRtxK1ehdnDIFzSyY2LQODtXS/0kydSVdozDG6h75795X34YVpKnU8MX5lU1jVrFIw0NLYEDl8u2UE/r7n73Zy/DNNiE7TuI/+knYr7+Jn+LwwQWw+k0Ktr63M38t/l0OI9+/DubT4VV6xiDysICh2HDqLd7F15fLDa+n37lCte6diPstdfkOIgkFaK0YxzhBV/AA8BuYBLwHtCwEutZKz12Z/uRsgYOjbMz3JkpZdbAsOtLVlgYYMhfrs8zOG4KgaOg44GxpGTmMP2ns0zdcKZaBs7zEipVvtZF2pmzIARJe/YSPPJZggYM5PaPP5rk71KSqkOZ52kKIdoIIQ4AW4ADQANFUeYripJR4bWr5bo1ckEIOB4YR0pG6QdnhVaLxskwuJ4bOHJlx8TkHxyv5NXj5bHgmZYseKYlljo1O87e5KlFRzhRxJqW6uAwdAj++3/FacIE1I6OZFy+TMSs2Vzr2o2oRYuqu3qSVO1KHTiEEN5CiLXA30A80ExRlJcVRZHLcsvJydqM1t72ZObo+eN62X6N9sOGYvnww1g++GC+9/WJifm+GZvit2QhBEPaebNrahdaetkRHp/GsG+O8sm+K2Rm60suoApo3d1xfe1V/H8/iMeC+Vi0aoU+KSlfoi0lOxslp8ZvZCBJZVbaleMfAVcwbGz4iKIoAxVFuVapNfuP6N74TnfVpbJ1V7lMnozPyu/RODsVe52+ilaPl4efs2HgfFK3+ijALxcj0JvYeIJKp8OuXz98N27Ad/MmnJ5/3ngucdcuAp7oScyyZWSZSM54SaoKpZ0F9QaQBiQDs4taBawoyhMVVK//jMcau/HJL1f57XIUer2CqowbBart7Io9r081va6qvLRqFW/0aky3Rq5Y6tSYaw1TdlMzs9GpVWjUJpHdGACLZs3yHSf9doCs8HCiP19E9OIvsO7SBfvBz2DdtStCq62mWkpS5Stt4FjN3Wm3UgVqUscGDztzbiakcy48gdbe9mW6v8TAYcItjrza++VfP/HOz+cJik1h4eBW1HOxrqZaFc/z889I+fNP4jdtJunAAZIPHSL50CHUzs64TJ6Ew/Dh1V1FSaoUpQociqKMreR6/GcJIejexI01x4I5cCmyzIFDWFqCVgtZhc9MMsUxjpLcTsnkaGAstxLSeWrxEd7s1ZgxHXzL3BqrbEKlwrpLF6y7dCE7Lo6EbduJ37SJzIAAEHdbSjnx8QgzM1QWFtVYW0mqOKbTD/Af1r2JYZxj34VI9GVMhiSEMLY6VIW0Pmpi4HCw0rH31UcY2MaT9Cw9c3dcZMS3xwiNM93PonF0xGncWOrt3IHP+h+w7dPbeC5m6TKuderMzTffIvnPP+WAulTjycBhAjrUd8LRSseVyCQW/Vb2OQdqW1sArNrfnWGVm1LVFKfjloadhZZPh7Tm61FtcbbWcSwwjic+O8x3f9ybFdGUCCGwfOAB1NZ3u9cyQ0LQp6aSsG0boc+/wPVujxL50XzSL16UiwulGkkGDhNgplHz2dDWqAQs+u3aPZkBS2LXry/mzZtj88TduQkaNzegZrY48urZzJ19rz5Cv1YepGXlEBCdXPJNJsZ76RLq79uL85QpaH3qkh0dTdzKlQQNHETMF19Wd/Ukqcxk4DARXRu6MKmbIUfE9jM3y3Sv88SJ+G36CV3dusb3tLUkcIBhvcvi4Q/w/dgHeevJxsb3b8SkkJFdM7p9dD4+uEyZTP29e/HduAGHkSNROzhg1bmz8ZqkgweJW72arIiIaqypJJVMbkpoQno0dePLg9c5FXK7XPdr8uyCa2xx1NCuqsI8emfNC0BaZg5jvj+BTq1i/jMtaVPXoZg7TYcQAotWrbBo1Qq3t94Ezd2/grfXrCXlr7+I/HAeFq1bY9OrJ7ZPPIHWw6MaayxJ95ItDhPS1MMWc62KwOgU4lIyy3y/Os9+S7Wlq6ootxLSUAnBtahkBi39i7k7LpBchm1bTIHQavNlRrQfMhibHj0QZmaknTlD1Efzuf5Yd4KGDjXkm5ckEyEDhwnRqlW09DJMx/2nHK0OlU5nnGGlda/dgaOeizV7XunCxK71UQnB93/eoPvC39l57maNHXC27dULry8W0/CvP/H87FNsevVCWFiQfvYcOfF3/zxkhoaSduFCjf2cUs0nA4eJye1yORVcvu4qtYuh1aFxM2y9Xpu6qgoy16p568nGbJvciVbe9kQmZjDlh3+Y/tPZ6q7afVFZWWH75JN4ff4ZDf/8A89Fi/JNfLi9dh03Bj3D9W6Pcmv2HJIPHUKfIfcYlaqODBwmJjdvRXkDh0WrVqDVYt6sGQiBkp5e69cNNPe04+eXOjJvYAvsLLQ8lmcspKZTWVpi2/OJfEmmVLY2aFxdyY6MJH7jRkInTOTqwx0InTyFxH2/VGNtpf8KGThMTJu6hq6qs2HxpGeV/R/8Ou+/T8M/jqDz8jSuVNanpVVoHU2RSiUY3r4uh19/lN4t6hjfX/FHEL9dql0bELpMnoz/od/x3bQJ58mTMW/aFCUtjeTffiP1xAnjddmxsaT89ZdsjUgVTs6qMjFO1mY087Dlws1EDl2Npmcz95JvykOoVMZxDmFlCamp6FNS8y1Iq83sLO9uLhgQncy83ZfI1it0b+zKzN5NTHbfq7ISQmDRvBkWzZvh8vIUsiIjST74O+bNmxuvSTpwgIh3ZyHMzLB88EGsOnXCqlNHzBo0oKiNSiWpNGSLwwT1bWWYfrn9bNnWcxSksrQEavc4R3F8HC15+6kmWJtp+O1yFE98dpj3d16s9oyDlUHr5obDsKFYNL+7g6/K3AKzJk1QMjJI+eMPoubPJ6hff6537catuXOrsbZSTScDhwnq09LQ1fLbpcgyZQYsSGVlBdTemVUl0ahVPN/ZjwP/68rQdt7kKAor/gii28cHWXP0Rq2flWTXtw/1ft5Cgz+O4PHxAuz690Pt7Ex2VBSZN24Yr1Oysrg5Y6Zhg8bg4Fr/e5Hun+yqMkFeDpa09XHgVPBt9l+KpH9rz3KVY2xxmHhOjsrmamPO/GdaMqqDD+/vvMjxoDgOX4thVAff6q5aldA4O2PXty92ffuiKAoZV66g5Bn3SL9wgYQtW0jYssVwvasrlu3bY/ngg1g80Bozf3+ESn7HlO6SfxpM1NOtDd1V646HlLsMjZNham7GpUv53s++fZuEHTtQ9KaRprWqNPe0Y8P4h1n2bBtmPtXE+P6FmwmcCY0v5s7aQwiBeePGhtl3d2g8PHCbORObHj1QOziQHRVF4s6dRMyeTVC//mTeCDZemxEQQE5CQnVUXTIhssVhoga08WLB3iucCIrjfHgCzT2LT9hUGNveT5G0bx+3f/oJh9GjjQOike9/QOLu3YDArm+fCq65aRNC0Kv53VlXiqIwe9sFTgbf5snm7kx/ohH+rrVjAL20tK6uOI56FsdRz6Lo9WQGBJBy4gRpp06TERSEzs/XeO3NN94k/cIFdPXqYdG6NRatW2HRvLmhVXJnR2ap9jOJFocQYrAQ4oIQQi+EaJfnfV8hRJoQ4syd17LqrGdVsjbTMORBbwC++7N8W4nbPPooamdnMq8HkPbPGQD0mZkk//47AOnnz1dIXWuybL1Cez9HzLUq9pyPoOfnh3lr8zluJdT+KcyFESoVZg0a4DhyJJ6fLqTez1uMXzgUvR6VlRVCpyMzMJCELVuImDWboIGDuNK2HbErVxrL0aeno2SWfdscqWYwicABnAcGAocLORegKErrO6+JVVyvajW2oy8qATvO3iQ6qexz8YVWi/2AAQBEffIJ6ZcukXr8uHGwPOP6dfQpKSTt31/rFwkWJTfn+aHXH2V4e8Puwhv+DqXrgt+Zte18ufYMq62ESoXP6lU0Ovk3vhs34Pb2W9g+9RQ6Hx+UrCw0znc32UzYvp0rbdsRNOgZbs2aTdy6daT+/bfs5qolhCnNoBBC/A78T1GUk3eOfYGdiqI0L+a2e7Rr1045efJkhdevOry4+iS/Xozk9Z6NmPyof7HX/noxkuaettSxu5uiNDMsnKCnn0afnAxCoKtfj8zrAYBhENSmZ09ur1lDnY/mYf/005X6WWqCgOhkPv3lKrv+vYW1mYY/3nwUe0vZBVOSnKQkhFptnJARtWgRsUsL7yAwa9CAeju2G48zAgPRenmhkl1d1U4IcUpRlHYlXlcDAscF4CqQCLyjKMqRksqpTYHj8NVoRn93Ak97Cw6/8SjqIvJubzoVxv9+Okt9Fyt+m94t37ms8HBiV6zg9g/r77lP7eJMTnQMDqNG4T5zRmV8hBrpSkQS16KS6NPSMEkhIzuHz/dfY9TDPnjYy9zhpZGTnEzGpUukXbhAxtVrZFy9Ssb165g3a4bvurWAYSrw5TZtQa9H5+uLWYMGmNXzQ+fnh87XDzP/+jJXexUqbeCossFxIcR+oLBl0DMVRdlWxG23gLqKosQKIdoCW4UQzRRFSSyk/PHAeIC6eRIa1XSd/Z3xdbLkRmwqBy5H0aOpW6HXbToVCkBA9L1Tb7WenrjPmoXawZGYr75C41EHjYMj6RcukBMdA0BmkGmnZK1qjdxtaORuYzz+6WQYS38PYPnhQPq39mRC13o0dLMppgRJbW2N5YMPYvng3ZTGSk4OOYl3//pmx8Wh8/QkMySEzIAAMgMCSMpThuenC7F96ikAUo6fIOPyJUNQ8fND6+GBUKur6uNIeVRZ4FAU5fFy3JMBZNz5+ZQQIgBoCNzTnFAU5RvgGzC0OO6vtqZDpRI8+7APH+y6xIo/AosMHNejSl6r4TxlMmYNGqDzqUvc6jWkX7hgPCcDR/EerudI31Ye7Dp3k82nw9h8OozujV2Z0LU+D/o6yC08Skmo1Wgc7ibd0rq5UX/vHvTp6WQEBJB5/ToZQUFkBt0gMzAQXf273bNJ+/Zx+4cf7ham1aL1qIPO0wvzFi1wfe1V46mc5OT/zDY71cGkp+MKIVyAOEVRcoQQ9YAGQGA1V6vKDX3Qm0W/XeNYYByngm8bd9DNlZmtJya55MFzIQS2vXoChn7mvLJu3kSfkYHKzKziKl6L+Lva8MXwB3j9iUZ8+0cgG/8O5bfLUfx2OYpBbbxYOKRVyYVIRVKZm2PRrBkWzZoVeY1lh4dRFD2ZgUFkBgWRHRVFVnAIWcEh6DPv/vnXZ2Zy9cH2qGxt0Xl5ofX2RlunDto67mjc3LFs80C+bJlS2ZlE4BBCDAC+AFyAXUKIM4qi9AQeAd4TQmQBemCioihx1VjVamFjrmV0Bx++OhjA0t+v8+2YB/Odv3jrbtNfJUCvV1AVMRdFoPgAABtOSURBVBaSK2/gEObmKOnpZAYHY96wYcVWvpap62TJe/2b80r3Bqw6GszqozfoWN/JeD46KQO1SuBoJQd6K5ptjx7Y9uhhPNanp5MVHk5maChCe3dzy+yoaIS5OfqEBNITEvK1rAG8vvwCm8cNHSC3N/5I4o4daOrUQevujqaOO1r3OmjcXNG4uKB1rT1b9FckkwgciqL8DPxcyPubgc1VXyPTM66TH98eCWL/pSgu3EygmcfdBYEnb9yNpXoFkjKysbPQFlaMkXmTxgitFo2LC7oG/qQcOkzmjRsycJSSk7UZ03o0ZGLXemjVd2e1f3ngGuv/DqV/Kw/GdPQt18JNqXRU5uaY1a+PWf36+d7XeXnS6PQpcmJjyQwNJSssjKxbEWRH3CLrVgQ6X1/jtRlXLpNaxEQarU9d/PfdTdkbNvUVVFZWaJyd0bi4oHG5819nZzTu7qjMzSvlc5oikwgcUsmcrc0Y+ZAP3/0ZxOLfrvH1qLsTH04XSDObmJZVYuDQODvjs3YNajs7bm/YaAgcQTcqo+q1mqUu/1+h2JRMMrP1/HQqjJ9OhdGmrj3D2teld4s6WJnJv25VRQhh+Afd2RkeeKDI65zGj8emRw+ybkWQFXGL7FsRZEVEkB0djdbz7h5x+owMkn4pOkmW+5w5OAwbCkDir79y+4cf0Dg4oHZwRO3ggNrRwXhs+VD7Gj8mJv8k1yATu9Zj3fFg9l2IzNfqOBdmWFSlVQuychQS0rLwLkV5ufsV6fz8AEg7fZrEfb9g0/0xsiIiiVm6BJdJk/L9BZKK9+WINkx/IoU1R4P56WQop0PiOR0Sz9ztF/i/AS14+gH5uzQlWnd3tO4l57wRQuC1bCnZ0dHkxMSQHR1NdnTuf6PRuN4dM8kMDCL16LHCy7GwoPE/p43HQYOHkB0bg9rWDrWNDSo7W+PPVp07Yd2lCwA5CQlkBASitrNFbWuLys6uWte9yMBRg7jamhtbHcsOBfLF8AdISs8i7HYaOo2K1l72nLgRR3xq2fJN5Dbdkw8dIvnQIZwnTSIzOJjEXbtQ/X979x4fV1UtcPy35p1k8mjSNGnTlCRtaWlLKbW0BQqCRSiiVFCwFB8IWB+gcn0gWBXUC1evXhU+ICqiaC1SuRYKAgpUQa61lLb0/X6kpE1feTTvZDIz+/5xTiaTNGkzaTKTZNb385lPZs6cOWdlz5lZc/beZ++0NPK/qdd3xKJ4eBrf+dAkvnrl2by4+TDL3i5j3YFqxkZNIrXraB1ZqW5GpCdP9cZgJh4P6Zdd1qN1M+dfi2/yZELV1YSqqwhWVRGqPkGoqgo6jTLcWl5OqLKSYPnhk/eZmhJJHE0bN1K26LMnxeRIT8fhT+Os3y/BnRe/9hhNHIPMrXOK+O2q/fxt6xFONAbYe7wegPEj/JEG2VgnKvKWFHd4XL1smXWlOdC8bVsfRJ2c0rwubpxRyI0zCtlf0UBRTmrkucXPbmbdgWouHjeca88bxbwp+aT7Tl29qAaHnp7FAIx9+SVCtbWEa2sJ1dYSqqklXGf9TZnW3lNPPF58500lXGOvV1uLCQQIVVYSqqzE4Y3v2YcmjkFm9LBU5owbzpu7K1ixoTxyJfmE/HQ8diPtiabYxldy5eaS9817weWieulTBPbujTzXsm07JhzW+RjOUPHwtMj9lmCIrFQPTofw5u4K3txdweLntnDFOSO49rwCLpuQi8+tF7YlA2eGVfV0OmmzZ1G8bFnksTEG09JCuK6OUH09jh5soy/pt8EgdOMMqwVj2dtl7DxiXWd7Tn5GpEG8N1OjZn/yk2QvXMiwj32sw/JwY2OH+Riad+xgz/vmUvvqq70NP+l5XU4e/+QM3l58Bf91/bnMLsmmNRTmpc1H+Nwf1vHipvZqi4E0JJAaOEQEh8+HKzcXb3Fx3H/Y6RnHIHTl5DyyUt1sO1zL0dpmwDrjaC23JmaqibGNI1rmh+dz/JFHELcb34SzaVj1b5q3bYtUZ717622Eqqo49KUvk7Fdq7HORFaqh5tmjuGmmWMoP9HEXzaV89LmI1xxTvvoAN99YRullQ3Mm5zP5RNHkJehbSIq8TRxDEJel5ObZ43h0X/spdIe9nviyHTKqq3h0ntzxtHGmZFBybPLweHgxPLlVuLYupXMD14DYDXwQdxPjYe6UVkpLLp0LIsubb8mwRjDy1sOc7S2hdd3HgdgYn46l00YwWUTcnnPWcM6XEOiVLzoUTdIffriYnxu6+3LTvOQ6/dGqqpi7VXVmbugAPfIkfgmTQKgcf066t/8P4KVlZF1vCUlZ7QPdXoiwotfuoQfXH8ucyeOIMXtZMeROn7xxl4W/Go1j/x9T2TdUFirtFT86BnHIDXc72XBBWN4clUpE/LSERGyUnrXq6o7vknWuEHNGzdR9pnP4Im6QjdZJ36Kt+F+LwtmjmHBzDE0t4Z4u7SKN3Ye5/Vdx7n07OGR9R5/cx9L3zrA7OIcZpXkMLskm9HDUk+xZaV6TxPHIPbF942jujEQaSyPnHH0UeJwjcglbc4cmjZvJtzQ0KG3Vbiu7hSvVP3B53ZyyfhcLhmfy7fo2HC+4d0TlFU1UVZlXbEOUJCVwuySHN43cQTXTB3ZzVaVip0mjkEsx+/loQXtwylkpVqJo7aPEoeIMObXjwNw9Ec/ouqJ30Sea7vOQyVO9LAVj948ne2Ha1m9r5LV+6pYs7+SQyea+PP6gzS1BiOJo6axlSf+tZ/zC7OYVpjFMB2MUfWCJo4hJCPSxtH382QP//znqX3pZYIVFdDaSqjh9PN/qPhxOoQpBZlMKcjk9ktKCIUNO47UsnpfFSW57deQvFNWzcMrd0ceF+Wkcl5hFpNGZjBpVAYzi7PxuvQaEnVqmjiGkHSvC4dAQyBEayjcpz1unH4/xcv/jAkE2HPZ5ZjGRkwwiLj0EBqInA5h8qjMDqMog9V76zOXFLOh7ASbD9VQWtlIaWUjKzaUA7DxO1dGEsez7xzE5XAwIT+dopw0PC7tS6Ms+qkfQhwOISPFzYnGVmqbWsnx9+2kTG0ztzn8fsJ1dYQbGnBm6rDhg8nZeeksvsbqLdcaCrPzSB2bDtaw/XAtx+qayUxtH/bkJ6/uoqyqCbAS0ZjsVMbmpjF2hJ+5E/OYWZydkP9BJZ4mjiEm004cJ/ohcbSJJI76ek0cg5jb6YhUb3VmjOG6aQVsLa9l97F6yqob2V/RwP6KBl7bfozMFHckcazaU8HPXtvN6OwUxmSnUjgslTE51t8R6d7TTiqmBh9NHENMVoqbA0BFXUuH0Vj7ktPvJwiE6hvQYfmGJhHhK1dOiDxubg1RWtnA3mMN7DlWz5xx7V2Bt5TXsKa0ijWlJ2/H53aw6b6rItVcz6wtQ0QYmekjL8NHfqYPv85TMujoOzbETB2dxcaDNby2/SizSnJO/4JecPithBSu1y65ycLndjIxP4OJ+SePGHD99NFMHpVJWVUj71Y1UlbdZP2tasTjdHRoG/npq7sor2nu8Pp0r4u8TB+fuvAsPnFhEQBHappZe6CKnDQvw/0ecvxeslLcevYyQGjiGGKum17AktUHeG5DOd+YNxFXPwxJ0Z44tEuusi5SHD6u62rR5taOF4peN72Ag9VNHKlp5mhtM4drmqlrCVJ3rJ6mqHXXv1vNnU+90+G1bXO556R5eHrRbLJSra7EKzYcorohQFaqh8wUN5mpbjJT3GSluMlIceuwLP1AE8cQc35hFiXD09hX0cCbeyq4fELfT+7i8FvdO0OaONRpdB4e/utXTezw2BhrxsrDNc3kRF1Tkp3mYd7kfCobWqisD1BR30Jtc5DjdS0cr2vpMA3vk6tKeefdE13u/8PTRvEz+1qnsqpG7v7fTaR5XaT7XKR5nfi9bvxeJ2leF9dMHRmZWOvQiSYaW4KkeV34fS7SPK7IFAZKE8eQIyJcP72AH7+yi2fWlvVL4nD60wEI12niUGdGRMhK9UTOHtrMLslhdqeq1kAwTFVDgKqGQIeziA9NHcWUUZmcaGqlpqmVmsYANU1WB5HoCxyP1bXw732VdOeCouxI4njk77v545qyDs97XA5S3E7OK8zi97fOBCAcNtz867dI8ThJcTvxuq11UtxOUjxOrpqcH+l8sO94PVvLa/G4rOo7r12N53E58LqcTMhPj+yrtrkVpwgelwOXQwbcHOWaOIagj7xnNA+t3M1ftxxhz7F6xo3o20bySFVVgyYOFT8el4P8TKtBPdqtc4q7eUVH4/P8LL19FnXNQRpagtRH3RpagozIaK9uy07zMDY3zX4uREMgSCAYJhAM0xQIRtZrCYZPmYyKctIiieP1ncf53l+6norA43Sw64GrI48/+tgqdh21Pl8iRNqKvC4HN80cw1ftjgvbymu5Z/kmXA7B7XSQnebhsY+/p0flcSY0cQxBIzNTuGFGIU+99S4Pr9zNwzedf/oXxUCrqtRglOFzc3FUb7BT+fpVEztUqxljaAmGaW4NET0QsdspLL19Fk2BEE2tIZrtW1NriKZAmMkF7Z0JzspJ5ZpzR9ISDBMIhQkEQ7SGDIFg+KRqMK/LOmsJhMKEwta+W4Jh6oCmQHtbUE1TK5sO1kQe56b3Txf8zjRxDFF3XD6OZ9aW8cKmchZdWtJlX/3ecqbbVVX1OuyISg4igs/tPKnNxuV09DgZzT0nj7lRk3SdygtfnBO5HwqbyNlOSyjUYUiYKQUZPHfHxQRDYVpDhng1w2h3gyGqICuFT8wuwhi486n11DX3zcCHAI40u6pKR8hVqt85HUKKx0lmqpsR6b7IKNgA6T430wqzmFGUzYVjc/qtC35nmjiGsLvnTeCckRmUVjZy3/Nb+2y72sahVHLTxDGE+dxOHl14Ph6ng2ffOcSeY31zhqBtHEolN00cQ1xJrp+PzhiNMfDzf+w9/Qt6INLGod1xlUpKmjiSwOffOxanQ1ixsZzSijNv0NYrx5VKbpo4kkBhdiofmV5AKGz4zxe77kcei7bG8ZC2cSiVlAZE4hCRH4nIDhHZJCLPikhW1HP3isgeEdkpIlclMs7B7GtXTsDvdfHa9mP8fcfRM9qWM73tjEO74yqVjAZE4gBeBaYYY6YCu4B7AURkErAAmAzMA34uIjqvZS+MyPBx1xXjAfj2c1vPqHuupKSAw4FpasK09l03X6XU4DAgEocx5hVjTNt1/KuB0fb9+cDTxpgWY8x+YA8wMxExDgWfuqiIKQUZHDrRxPde6H2VlYhEdcnVsw6lks2ASByd3Aq8bN8vAKJHGjtoL1O94HY6+OmN0/C6HDyz7uAZVVm5sq3Z31oPH+6r8JRSg0TcEoeIvCYiW7q4zY9aZzEQBJb2YvuLRGStiKw9fvx4X4Y+pIzPS+frV1kDpN3//LaT5kvoKd8ka97q5q19d2GhUmpwiFviMMZcYYyZ0sVtBYCI3AJ8ELjZGNM2jNghoDBqM6PtZV1t/1fGmBnGmBm5ubn9+J8MfrdcVMSEvHTerWrksdd7d22Hb8oUAJq2bOnL0JRSg8CAqKoSkXnA3cC1xpjGqKeeBxaIiFdEioHxwJpExDiUuJwOvjt/MgAPrdzNkn+XxrwN32Tr9c1b9IxDqWQzIBIH8AiQDrwqIhtE5BcAxpitwJ+AbcBfgTuMMb2rW1EdzC7J4ZsfsIaN/vaKrSxffzCm1/smW1VVLTt3YgKBPo9PKTVwDYhh1Y0x407x3APAA3EMJ2ksunQsbqeD776wje+s2MoFRdkUZqf26LVOvx9PcTGB/ftp3r2bFPsMRCk19A2UMw6VILdcVMRVk/Oobwly17INMTWWt7VzaHWVUslFE0eSExEevO5c8jK8rDtQzWeXrKMl2LPkkTKlrZ1DG8iVSiaaOBQ5fi9/uG0W2Wke3th1nDuWru9R8vBNnYrv3HNxjyk87bpKqaFD2nu+Dh0zZswwa9euTXQYg8628loW/no1JxpbmTtxBI99/D14XPrbQqlkISLrjDEzTreefiuoiEmjMlh6+yyyUt2s3HGMO59aT2sonOiwlFIDjCYO1cHkUZn84bZZZPhcvLLtKLc++TY1TTqQoVKqnSYOdZIpBZksvX02w/0e3txdwXWP/out5TWJDkspNUBo4lBdOnd0Js/dcTET89PZV9HAdY+uYsnqAwzFNjGlVGw0cahujR6WyrNfuJiFs8YQCIX59nNbuGvZBq26UirJaeJQp5TicfLgdefy0IJppLidrNhQzryf/ZNXth7Rsw+lkpQmDtUj86cV8JcvzeG8wiwO1zSzaMk6Fj7+Fqv2VmgCUSrJaOJQPTY218+fP3ch931oEpkpbv69r5KFj7/FoiXrEh2aUiqONHGomLicDj59cTH//PrlfPX9Z5Od5mH6mGGJDkspFUcDYnRcNfhkprr54tzx3HZJMYIkOhylVBxp4lBnJNWjh5BSyUarqpRSSsVEE4dSSqmYaOJQSikVE00cSimlYqKJQymlVEw0cSillIqJJg6llFIxGZJTx4rIceDAGWxiOFDRR+H0JY0rNhpXbDSu2AzFuM4yxuSebqUhmTjOlIis7cm8u/GmccVG44qNxhWbZI5Lq6qUUkrFRBOHUkqpmGji6NqvEh1ANzSu2GhcsdG4YpO0cWkbh1JKqZjoGYdSSqmYaOKIIiLzRGSniOwRkXsSGEehiPxDRLaJyFYR+bK9/H4ROSQiG+zbBxIQW6mIbLb3v9Zeli0ir4rIbvtvXGd2EpEJUWWyQURqReSuRJSXiPxGRI6JyJaoZV2Wj1geto+3TSIyPc5x/UhEdtj7flZEsuzlRSLSFFVuv4hzXN2+byJyr11eO0XkqjjHtSwqplIR2WAvj2d5dffdEN9jzBijN6u6zgnsBUoAD7ARmJSgWEYC0+376cAuYBJwP/C1BJdTKTC807L/Bu6x798D/DDB7+MR4KxElBdwKTAd2HK68gE+ALwMCDAbeCvOcV0JuOz7P4yKqyh6vQSUV5fvm/0Z2Ah4gWL78+qMV1ydnv8f4DsJKK/uvhvieozpGUe7mcAeY8w+Y0wAeBqYn4hAjDGHjTHr7ft1wHagIBGx9NB84Hf2/d8BH05gLHOBvcaYM7kAtNeMMf8Eqjot7q585gO/N5bVQJaIjIxXXMaYV4wxQfvhamB0f+w71rhOYT7wtDGmxRizH9iD9bmNa1wiIsCNwB/7Y9+ncorvhrgeY5o42hUAZVGPDzIAvqxFpAg4H3jLXnSnfcr5m3hXCdkM8IqIrBORRfayPGPMYfv+ESAvAXG1WUDHD3Siywu6L5+BdMzdivXLtE2xiLwjIm+IyCUJiKer922glNclwFFjzO6oZXEvr07fDXE9xjRxDGAi4gf+DNxljKkFHgPGAtOAw1iny/E2xxgzHbgauENELo1+0ljnxwnpqiciHuBa4Bl70UAorw4SWT7dEZHFQBBYai86DIwxxpwPfAV4SkQy4hjSgHvfOrmJjj9O4l5eXXw3RMTjGNPE0e4QUBj1eLS9LCFExI11YCw1xiwHMMYcNcaEjDFh4HH66TT9VIwxh+y/x4Bn7RiOtp3+2n+PxTsu29XAemPMUTvGhJeXrbvySfgxJyK3AB8Ebra/cLCrgirt++uw2hLOjldMp3jfBkJ5uYDrgWVty+JdXl19NxDnY0wTR7u3gfEiUmz/cl0APJ+IQOw61CeA7caYn0Qtj66bvA7Y0vm1/RxXmoikt93HalzdglVOn7JX+xSwIp5xRenwSzDR5RWlu/J5Hvik3fNlNlATVd3Q70RkHnA3cK0xpjFqea6IOO37JcB4YF8c4+rufXseWCAiXhEptuNaE6+4bFcAO4wxB9sWxLO8uvtuIN7HWDx6AgyWG1YPhF1YvxgWJzCOOVinmpuADfbtA8ASYLO9/HlgZJzjKsHq1bIR2NpWRkAOsBLYDbwGZCegzNKASiAzalncywsrcR0GWrHqk2/rrnywero8ah9vm4EZcY5rD1b9d9sx9gt73Y/Y7+8GYD3woTjH1e37Biy2y2sncHU847KXPwl8rtO68Syv7r4b4nqM6ZXjSimlYqJVVUoppWKiiUMppVRMNHEopZSKiSYOpZRSMdHEoZRSKiaaOFRSEZEnReS1RMfRmYi8LiK/TnQcSvWEdsdVSUVEMgGHMaba/qIeZ4y5LI77/xZwuzGmqNPybCBoOg0fodRA5Ep0AErFkzGmpj+2KyIeY42q3CvGmJ6OEKtUwmlVlUoqbVVVInI/1lXK7xURY99usdfxi8hDYk0m1GiPenp91DaK7PVvFpGXRKQB+L49rMPjIrJXrIl99onIgyLitV93C/B94Kyofd5vP9ehqkpE3CLyAzuGgFgT9yzs9L8YEfmCiCwRkToROSgi93ZaZ74df6OInBCRNSJyfj8UrUoiesahktWPscYUKsYatA6gxh4L6AWsoRo+BpRjjU/0tIhcbYxZGbWNHwLfAO6wHwvW4HILgaPAVOCXWMNW3Ic1MN5E4GbgAvs19d3E9yDWUOefwxri5aPAH0TkaKcY7gO+hTX50TzgERFZY4xZKSL5WCMFf8v+68MahjuIUmdAE4dKSsaYehFpAgLGmCNty0XkMuBCrPkN2qq1fmUPEPdFrPGA2vzSGLOUjhZH3S8VkbHAF4D7jDFNIlIPhKL32ZmIpAJfAv7DGNM2RPyDInKBvf3oGJYZYx637z8qIndiJbqVWLPFuYE/GWNK7XW2d7dfpXpKE4dSHV2ANXXwIevkI8KDNYBctJNGZhWRzwC3Y00nmob1GYu1Snicvb9/dlr+BnBvp2UbOj0up30Sn03A34AtIvIq8Dqw3BhThlJnQBOHUh05gBraq5KidW78boh+ICI3YI1Eeg/Wl3wtcAPwQN+H2W1MBjtRGWNCInI11v9yBdYorj8QkRuMMX/px5jUEKeJQyWzAODstGwtkAX4jDGxzt9xKfCO6TiHSlEP9tnZHqDF3l50DO8lxjlFjNXffo19e1BE/gp8GtDEoXpNE4dKZvuBG0RkMlZjdh3wd6z5DJaLyN1Y1T3DgIuA5qj2hK7sBG4TkflYX/AfpL3hPXqf+SJyIVbVV6OJmkQJwBjTKCIPY/XUOk574/h84P09/edE5CJgLvAK1twS47Ea7J/o6TaU6op2x1XJ7AmsmR9XAceBm+xf6NcCy4GfAjuAF4FrsCbDOZVfYk1C9FvgHWAWVm+naM9h9XB60d7n3d1sazHWtKk/w0pCHwc+3qlH1enUYDX0r8BKUr/Bmlf8+zFsQ6mT6JXjSimlYqJnHEoppWKiiUMppVRMNHEopZSKiSYOpZRSMdHEoZRSKiaaOJRSSsVEE4dSSqmYaOJQSikVE00cSimlYvL/fByNmNgxmY8AAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "#plt.rc('text',usetex=True)nn\n", - "#plt.xscale('log')\n", - "#mpl.rcParams['font.sans-serif']=['SimHei']\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "plt.savefig(\"./error_fig/noise1/\"+figure_name+\".eps\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/convergence50L.ipynb b/convergence50L.ipynb deleted file mode 100644 index 2081a04..0000000 --- a/convergence50L.ipynb +++ /dev/null @@ -1,986 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/xiezhq/.wanghe_env/lib/python3.7/site-packages/torch_sparse/tensor.py:46: UserWarning: This overload of nonzero is deprecated:\n", - "\tnonzero()\n", - "Consider using one of the following signatures instead:\n", - "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:882.)\n", - " index = mat.nonzero()\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import networkx as nx\n", - "import copy\n", - "import pandas as pd\n", - "import xlwt\n", - "import torch\n", - "from torch import nn\n", - "import torch.optim as optim\n", - "from torch_geometric.utils import from_networkx\n", - "from torch.utils.data import Dataset, DataLoader\n", - "from torch_geometric.data import Data, Batch\n", - "from torch_geometric.nn.conv import MessagePassing\n", - "from torch_sparse import SparseTensor, matmul\n", - "import torch.nn.functional as F\n", - "import matplotlib.pyplot as plt\n", - "\n", - "num_nodes = 5\n", - "num_edges = 6\n", - "n = 100\n", - "m = 300\n", - "k = 60\n", - "train_num = 1000\n", - "test_num = 100\n", - "num_layers = 50\n", - "nnz = 30\n", - "\n", - "#less nnz =5; m = 50; k = 10\n", - "\n", - "def metropolis(adjacency_matrix):\n", - " num_of_nodes = adjacency_matrix.shape[0]\n", - " metropolis=np.zeros((num_of_nodes,num_of_nodes))\n", - " for i in range(num_of_nodes):\n", - " for j in range(num_of_nodes):\n", - " if adjacency_matrix[i,j]==1:\n", - " d_i = np.sum(adjacency_matrix[i,:])\n", - " d_j = np.sum(adjacency_matrix[j,:])\n", - " metropolis[i,j]=1/(1+max(d_i,d_j))\n", - " metropolis[i,i]=1-sum(metropolis[i,:])\n", - " return metropolis\n", - "\n", - "class SynDataset(Dataset):\n", - " def __init__(self, samples):\n", - " self.samples = samples\n", - " self.A = []; \n", - " self.y = []; \n", - " self.x_true = []\n", - " self.pyg_data=[]\n", - " self.process()\n", - " \n", - " \n", - " def gen_func(self, num_of_nodes, n, m, k):\n", - " A_all = np.random.randn(m, n)\n", - " x = np.random.randn(n)\n", - " x_norm = 0\n", - "\n", - " while(x_norm < 1e-2):\n", - " x_mask = np.random.rand(n)\n", - " x_mask[x_mask < 1 - nnz/100] = 0\n", - " x_mask[x_mask > 0] = 1\n", - " x_norm = np.linalg.norm(x * x_mask)\n", - "\n", - " x = x * x_mask\n", - " x = x/np.linalg.norm(x)\n", - " \n", - " SNR_db = 50\n", - " SNR = 10**(SNR_db/10)\n", - " \n", - " noise = np.random.randn(m) * np.sqrt(1/SNR)\n", - " y_all = A_all@x + noise\n", - "\n", - " A = np.zeros((num_of_nodes, k , n))\n", - " y = np.zeros((num_of_nodes, k))\n", - " for ii in range(num_of_nodes):\n", - " start = (k*ii) % m; end = (k*(ii+1) )%m\n", - " if(start > end):\n", - " A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0)\n", - " y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), \n", - " np.expand_dims(y_all[:end], axis = 0)), axis = 1)\n", - " else:\n", - " A[ii,:,:] = A_all[start:end,:]\n", - " y[ii,:] = np.expand_dims(y_all[start:end], axis = 0)\n", - " \n", - " x = np.expand_dims(x, axis = 0)\n", - " x = x.repeat(num_of_nodes, axis = 0)\n", - " \n", - " return A, y, x\n", - "\n", - " def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True):\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k = 0\n", - " while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False:\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k += 1\n", - " # print(\"Check if connected: \", nx.is_connected(G))\n", - " # nx.draw(G)\n", - " \n", - " edge_index = from_networkx(G).edge_index\n", - " adj = nx.to_numpy_matrix(G)\n", - " return G, adj,edge_index\n", - " \n", - " def process(self):\n", - " _, adj,edge_index = self.gen_graph(num_nodes, num_edges)\n", - " self.edge_index = edge_index\n", - " W = metropolis(adj)\n", - " self.W = [torch.tensor(W, dtype = torch.float)] * self.samples\n", - " \n", - " \n", - " for ii in range(self.samples):\n", - " A, y, x_true = self.gen_func(num_nodes, n, m, k)\n", - " self.A.append(torch.tensor(A, dtype = torch.float) ); \n", - " self.y.append(torch.tensor(y, dtype = torch.float) ); \n", - " self.x_true.append(torch.tensor(x_true, dtype = torch.float) )\n", - " \n", - " edge_weight=torch.tensor(W,dtype=torch.float)\n", - " self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) \n", - " \n", - " \n", - "\n", - " def __getitem__(self, idx):\n", - " return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx]\n", - "\n", - " def __len__(self):\n", - " \"\"\"Number of graphs in the dataset\"\"\"\n", - " return len(self.A)\n", - " \n", - " \n", - "def collate(samples):\n", - " # The input `samples` is a list of pairs\n", - " # (graph, label).\n", - " W, A, y, x_true, pyg_data = map(list, zip(*samples))\n", - " W = torch.stack(W)\n", - " A = torch.stack(A)\n", - " y = torch.stack(y)\n", - " x_true = torch.stack(x_true)\n", - " pyg_data = Batch.from_data_list(pyg_data)\n", - " return W, A, y, x_true, pyg_data\n", - "class MetropolisConv(MessagePassing):\n", - " def __init__(self):\n", - " super(MetropolisConv, self).__init__(aggr='add') # \"Add\" aggregation.\n", - "\n", - " def forward(self, x, pyg_data):\n", - " (B, N, D)=x.shape\n", - " out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1)\n", - " return out.view(B,N,D)\n", - "\n", - " def message_and_aggregate(self, adj_t, x):\n", - " return matmul(adj_t, x, reduce=self.aggr)\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = di ** power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - "\n", - "\n", - "train_data = SynDataset(train_num)\n", - "val_data = SynDataset(test_num)\n", - "test_data = SynDataset(test_num)\n", - "train_loader = DataLoader(train_data, batch_size=32, shuffle=True, collate_fn=collate)\n", - "val_loader = DataLoader(val_data, batch_size=100, shuffle=False, collate_fn=collate)\n", - "test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-PGEXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.00013569310976890847 tensor(0.0082, grad_fn=) tensor(0.0007, grad_fn=)\n", - "1.0673586157139425e-06 tensor(0.0051, grad_fn=) tensor(0.0026, grad_fn=)\n", - "4.2311402026484757e-07 tensor(0.0045, grad_fn=) tensor(0.0029, grad_fn=)\n", - "2.95198097521876e-07 tensor(0.0040, grad_fn=) tensor(0.0032, grad_fn=)\n", - "2.3132930948577268e-07 tensor(0.0036, grad_fn=) tensor(0.0034, grad_fn=)\n", - "1.8706502791232538e-07 tensor(0.0032, grad_fn=) tensor(0.0035, grad_fn=)\n", - "1.5605867087487013e-07 tensor(0.0029, grad_fn=) tensor(0.0037, grad_fn=)\n", - "1.340106055014445e-07 tensor(0.0027, grad_fn=) tensor(0.0038, grad_fn=)\n", - "1.1711095559974183e-07 tensor(0.0024, grad_fn=) tensor(0.0040, grad_fn=)\n", - "1.060343715053591e-07 tensor(0.0023, grad_fn=) tensor(0.0041, grad_fn=)\n", - "1.1217327799961652e-07 tensor(0.0034, grad_fn=) tensor(0.0040, grad_fn=)\n", - "1.0590313692659947e-07 tensor(0.0037, grad_fn=) tensor(0.0041, grad_fn=)\n", - "1.1929938703580945e-07 tensor(0.0046, grad_fn=) tensor(0.0041, grad_fn=)\n", - "9.874083750638363e-08 tensor(0.0045, grad_fn=) tensor(0.0042, grad_fn=)\n", - "1.0848608100744173e-07 tensor(0.0057, grad_fn=) tensor(0.0041, grad_fn=)\n", - "1.0304272435313067e-07 tensor(0.0061, grad_fn=) tensor(0.0042, grad_fn=)\n", - "1.0028954200436146e-07 tensor(0.0066, grad_fn=) tensor(0.0042, grad_fn=)\n", - "9.43396112340622e-08 tensor(0.0065, grad_fn=) tensor(0.0043, grad_fn=)\n", - "2.3531141324362181e-07 tensor(0.0085, grad_fn=) tensor(0.0039, grad_fn=)\n", - "9.269548972845598e-08 tensor(0.0074, grad_fn=) tensor(0.0044, grad_fn=)\n", - "1.2181697917057477e-07 tensor(0.0081, grad_fn=) tensor(0.0043, grad_fn=)\n", - "8.325274514220382e-08 tensor(0.0072, grad_fn=) tensor(0.0046, grad_fn=)\n", - "1.6335093233621478e-07 tensor(0.0095, grad_fn=) tensor(0.0038, grad_fn=)\n", - "1.0907373870772119e-07 tensor(0.0091, grad_fn=) tensor(0.0043, grad_fn=)\n", - "8.66791121101329e-08 tensor(0.0087, grad_fn=) tensor(0.0046, grad_fn=)\n", - "2.4522418096140086e-07 tensor(0.0112, grad_fn=) tensor(0.0035, grad_fn=)\n", - "1.1871409366648322e-07 tensor(0.0110, grad_fn=) tensor(0.0042, grad_fn=)\n", - "9.225522479283654e-08 tensor(0.0105, grad_fn=) tensor(0.0046, grad_fn=)\n", - "1.1317539105881735e-07 tensor(0.0112, grad_fn=) tensor(0.0043, grad_fn=)\n", - "8.304715182205769e-08 tensor(0.0106, grad_fn=) tensor(0.0048, grad_fn=)\n", - "9.202755180126587e-08 tensor(0.0110, grad_fn=) tensor(0.0046, grad_fn=)\n", - "7.932599821458552e-08 tensor(0.0106, grad_fn=) tensor(0.0048, grad_fn=)\n", - "8.904916071195146e-08 tensor(0.0113, grad_fn=) tensor(0.0047, grad_fn=)\n", - "7.511349786604171e-08 tensor(0.0108, grad_fn=) tensor(0.0049, grad_fn=)\n", - "1.0152031437726805e-07 tensor(0.0120, grad_fn=) tensor(0.0045, grad_fn=)\n", - "8.165739728838162e-08 tensor(0.0110, grad_fn=) tensor(0.0049, grad_fn=)\n", - "7.868931950483216e-08 tensor(0.0109, grad_fn=) tensor(0.0049, grad_fn=)\n", - "8.211792579260191e-08 tensor(0.0110, grad_fn=) tensor(0.0049, grad_fn=)\n", - "7.9589018042725e-08 tensor(0.0104, grad_fn=) tensor(0.0049, grad_fn=)\n", - "8.341917290266565e-08 tensor(0.0107, grad_fn=) tensor(0.0048, grad_fn=)\n", - "7.779531614460211e-07 tensor(0.0131, grad_fn=) tensor(0.0029, grad_fn=)\n", - "3.891525501842352e-07 tensor(0.0130, grad_fn=) tensor(0.0032, grad_fn=)\n", - "2.7344022557329595e-07 tensor(0.0130, grad_fn=) tensor(0.0035, grad_fn=)\n", - "2.1665335836473787e-07 tensor(0.0129, grad_fn=) tensor(0.0036, grad_fn=)\n", - "1.8238164134487533e-07 tensor(0.0128, grad_fn=) tensor(0.0038, grad_fn=)\n", - "1.5889751070474745e-07 tensor(0.0127, grad_fn=) tensor(0.0040, grad_fn=)\n", - "1.410010659341765e-07 tensor(0.0126, grad_fn=) tensor(0.0041, grad_fn=)\n", - "1.2594992959691353e-07 tensor(0.0124, grad_fn=) tensor(0.0043, grad_fn=)\n", - "1.1545787925726358e-07 tensor(0.0123, grad_fn=) tensor(0.0044, grad_fn=)\n", - "1.0629801550088303e-07 tensor(0.0122, grad_fn=) tensor(0.0045, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_PGEXTRA(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_PGEXTRA, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - " \n", - "###main\n", - "model_PGEXTRA = Net_PGEXTRA(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_PGEXTRA.parameters(), lr=1e-4)\n", - "model_PGEXTRA.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.81,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_PGEXTRA.lam[1], model_PGEXTRA.step_size[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.00021758794036941254 tensor(0.0079, grad_fn=) tensor(0.0029, grad_fn=)\n", - "2.240302615064138e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.8076279616252577e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.578619364295264e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.4204976054088547e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.2905917685657187e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.1948518391591278e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.102023611565528e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "1.0261077164841481e-05 tensor(0.0076, grad_fn=) tensor(0.0029, grad_fn=)\n", - "9.538237748074607e-06 tensor(0.0075, grad_fn=) tensor(0.0030, grad_fn=)\n", - "8.874980593986947e-06 tensor(0.0075, grad_fn=) tensor(0.0030, grad_fn=)\n", - "8.314537552678303e-06 tensor(0.0075, grad_fn=) tensor(0.0030, grad_fn=)\n", - "7.804651872334034e-06 tensor(0.0075, grad_fn=) tensor(0.0030, grad_fn=)\n", - "7.310984557307165e-06 tensor(0.0075, grad_fn=) tensor(0.0031, grad_fn=)\n", - "6.911499113471109e-06 tensor(0.0075, grad_fn=) tensor(0.0031, grad_fn=)\n", - "6.423866182103666e-06 tensor(0.0075, grad_fn=) tensor(0.0031, grad_fn=)\n", - "6.026670632763853e-06 tensor(0.0074, grad_fn=) tensor(0.0032, grad_fn=)\n", - "5.848887738579833e-06 tensor(0.0074, grad_fn=) tensor(0.0032, grad_fn=)\n", - "5.355395131800833e-06 tensor(0.0074, grad_fn=) tensor(0.0033, grad_fn=)\n", - "5.686014475259071e-06 tensor(0.0073, grad_fn=) tensor(0.0033, grad_fn=)\n", - "4.121997605466277e-06 tensor(0.0073, grad_fn=) tensor(0.0034, grad_fn=)\n", - "3.946714201674695e-06 tensor(0.0073, grad_fn=) tensor(0.0034, grad_fn=)\n", - "3.784322586852795e-06 tensor(0.0072, grad_fn=) tensor(0.0035, grad_fn=)\n", - "3.6125931757169383e-06 tensor(0.0072, grad_fn=) tensor(0.0036, grad_fn=)\n", - "3.5275268928103287e-06 tensor(0.0071, grad_fn=) tensor(0.0037, grad_fn=)\n", - "3.408440427676851e-06 tensor(0.0071, grad_fn=) tensor(0.0038, grad_fn=)\n", - "3.2827203071406075e-06 tensor(0.0071, grad_fn=) tensor(0.0039, grad_fn=)\n", - "3.210838372069702e-06 tensor(0.0070, grad_fn=) tensor(0.0040, grad_fn=)\n", - "3.178013557203485e-06 tensor(0.0070, grad_fn=) tensor(0.0041, grad_fn=)\n", - "3.1164435583264094e-06 tensor(0.0069, grad_fn=) tensor(0.0043, grad_fn=)\n", - "3.1581502355493285e-06 tensor(0.0068, grad_fn=) tensor(0.0045, grad_fn=)\n", - "2.9940633936575978e-06 tensor(0.0068, grad_fn=) tensor(0.0047, grad_fn=)\n", - "2.6514257598364566e-06 tensor(0.0068, grad_fn=) tensor(0.0050, grad_fn=)\n", - "2.4249003018894655e-06 tensor(0.0068, grad_fn=) tensor(0.0053, grad_fn=)\n", - "2.3208290897969164e-06 tensor(0.0068, grad_fn=) tensor(0.0056, grad_fn=)\n", - "2.2447686092164076e-06 tensor(0.0069, grad_fn=) tensor(0.0060, grad_fn=)\n", - "2.1402125618408263e-06 tensor(0.0068, grad_fn=) tensor(0.0064, grad_fn=)\n", - "1.0770074453603229e-05 tensor(0.0102, grad_fn=) tensor(0.0044, grad_fn=)\n", - "5.917972714541975e-06 tensor(0.0100, grad_fn=) tensor(0.0051, grad_fn=)\n", - "4.424496722776894e-06 tensor(0.0098, grad_fn=) tensor(0.0056, grad_fn=)\n", - "3.5720470776823277e-06 tensor(0.0097, grad_fn=) tensor(0.0061, grad_fn=)\n", - "3.003410682822505e-06 tensor(0.0096, grad_fn=) tensor(0.0065, grad_fn=)\n", - "2.5705553170496387e-06 tensor(0.0094, grad_fn=) tensor(0.0070, grad_fn=)\n", - "2.253966165710608e-06 tensor(0.0093, grad_fn=) tensor(0.0074, grad_fn=)\n", - "1.9902491175116666e-06 tensor(0.0092, grad_fn=) tensor(0.0079, grad_fn=)\n", - "1.7467478663490965e-06 tensor(0.0090, grad_fn=) tensor(0.0084, grad_fn=)\n", - "1.5800587682690548e-06 tensor(0.0089, grad_fn=) tensor(0.0090, grad_fn=)\n", - "1.435345627243123e-06 tensor(0.0088, grad_fn=) tensor(0.0096, grad_fn=)\n", - "1.306352896079943e-06 tensor(0.0087, grad_fn=) tensor(0.0102, grad_fn=)\n", - "1.1981159246943207e-06 tensor(0.0086, grad_fn=) tensor(0.0108, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_DGD(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_DGD, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " #x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " # self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1)\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "\n", - "\n", - "model_DGD = Net_DGD(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_DGD.parameters(), lr=1e-4)\n", - "model_DGD.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_DGD(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.85,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_DGD.lam[1], model_DGD.step_size[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Origin Methods" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def tgrad_qp(A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - " \n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " # print(A.shape, x.shape, b.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - "\n", - "def torch_soft(x, tau):\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - "\n", - "def opt_distance(x,opt):\n", - " error = 0\n", - " batch_size = x.shape[0]\n", - " num_of_nodes = x.shape[1]\n", - " error = np.linalg.norm(x-opt)**2\n", - " return error/num_of_nodes/batch_size\n", - "\n", - "def hist_nmse(x_hist,opt):\n", - " error = []\n", - " iteration = len(x_hist)\n", - " #print(iteration)\n", - " for k in range(iteration):\n", - " error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt)))\n", - " return error\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin PG-EXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.12945430286414922 \t 0.03877498257876323\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.12919341212205654 \t 0.03792135231036855\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.12892956855315607 \t 0.036975312114520424\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.12932454546207917 \t 0.03428692882305041\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.13535670942114847 \t 0.03977651463635266\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.28984356282424595 \t 0.2055009911531106\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.07603007731333537 \t 0.01747969873558759\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.0754209575523605 \t 0.016576888784421043\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.07474511489435827 \t 0.015627076364378923\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.07282081903735571 \t 0.01413440738264501\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.07776996984987636 \t 0.020874500112481202\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.2379083047128006 \t 0.18987220684992098\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.03819573177033635 \t 0.00600200690444197\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.03734266887396416 \t 0.005260784880437115\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.036400388951013156 \t 0.004593354453934381\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.03370471245225462 \t 0.005417860678436342\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.039106176317045535 \t 0.012957194063392422\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.20462755972516788 \t 0.18358430004518594\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.0057010769521594964 \t 0.00023510867493307507\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.004966532868255058 \t 0.0001221531825622617\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.004312551871828674 \t 0.0001696870020133474\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.0051788497819798066 \t 0.0027576684842756547\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.012682275035356839 \t 0.010422157550945485\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.18336186363271556 \t 0.18174261362729158\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 296609074.4590332 \t 2.671181670624344e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 268811879.1257988 \t 2.411480024764376e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 236052303.18211132 \t 2.1053545944708307e+22\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 61035495.946113765 \t 4.950616375254757e+21\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 2256195.179522583 \t 8.861256753373389e+18\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.18172239572107354 \t 0.18172126849681264\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t inf \t nan\n" - ] - } - ], - "source": [ - "def torch_PGEXTRA(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 + x_12 - W_hat@x_0 - \\\n", - " step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_PGEXTRA(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " \n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.002, 'tau': 0.05}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.14081849129684268 \t 0.04393912575563445\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.14063465843472112 \t 0.04308331741417533\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.14046724949500639 \t 0.042143241898804265\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.14153454898979542 \t 0.03951941047679111\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.14816127124690864 \t 0.04519852388196159\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.3028532438042912 \t 0.21336901522718837\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.0873854390768106 \t 0.02160874662677088\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.08683453568031109 \t 0.020643760508042872\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.08623619616968835 \t 0.01962735824114145\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.08487679048253813 \t 0.017849331852553517\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.09038458539462273 \t 0.024871025446465068\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.2521430211850675 \t 0.19841266223439744\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.048473075912298096 \t 0.008747484318976716\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.04761430168943479 \t 0.007858042963670441\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.04667457598508236 \t 0.00702662037549888\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.04408737857408096 \t 0.007438668178439484\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.04990827955909663 \t 0.015640941483323446\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.21960093778259943 \t 0.19375091610479284\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.01145909606057387 \t 0.0008133959189533542\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.010437648499187276 \t 0.00048301084312158337\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.009463048038069702 \t 0.0004276056721507064\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.009482813228032795 \t 0.0035091731711510196\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.018201654362837983 \t 0.012714877192593576\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.20199511665989303 \t 0.19900221651194624\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 182054662156339.2 \t 7.73563955543455e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 170495559175012.38 \t 7.156468107913534e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 156583601114316.8 \t 6.460200480061337e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 72228189631039.48 \t 2.417844385679781e+33\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 21094823194300.926 \t 3.6234833154091314e+32\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.21715069648031385 \t 0.21713340602996323\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t inf \t nan\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t inf \t nan\n" - ] - } - ], - "source": [ - "def torch_DGD(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 - step_size*tgrad_qp(A, b, x_1)\n", - " x_2 = torch_soft(x_32, tau * step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_DGD(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.002, 'tau': 0.1}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# PLOT" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "for iter, (W, A, y, x_true,pyg_data) in enumerate(test_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 200,0.002,0.05 )\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 200,0.002,0.1)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 200,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "writer_error=pd.ExcelWriter(\"./error_fig/noise3/\"+figure_name+\".xls\")\n", - "df_error= pd.DataFrame({'PG-EXTRA':origin_PGEXTRA_error,'DGD':origin_DGD_error})\n", - "df_error.to_excel(writer_error,sheet_name='Origin')\n", - " \n", - "df_feasibility= pd.DataFrame({'PG-EXTRA':pred_PGEXTRA_error,'DGD':pred_DGD_error})\n", - "df_feasibility.to_excel(writer_error,sheet_name='GNN')\n", - "writer_error.save() " - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd4FNXXwPHvzW56QkI6KZCE0BFQihQRFFFQAWnSIdJBBAErqIANRRErINLLjyoICIiiiLwCQkC6dEhIAqSR3rPz/rHJbELaJmwq9/M8+7B7d3bmJsaczC3nCEVRkCRJkiRjmVV0ByRJkqSqRQYOSZIkqURk4JAkSZJKRAYOSZIkqURk4JAkSZJKRAYOSZIkqURk4JAkSZJKRAYOSZIkqURk4JAkSZJKRFvRHSgLLi4uiq+vb0V3Q5IkqUo5fvx4lKIorsUdVy0Dh6+vL0FBQRXdDUmSpCpFCBFszHFyqEqSJEkqERk4JEmSpBKRgUOSJEkqERk4JEmSpBKplpPjkiSBTqcjNDSUpKSkiu6KVInY2tri7e2NmVnp7xuqTOAQQnQDvgI0wFJFUT6p4C5JUqUWFRWFEIIGDRrc1y8JqfrQ6XSEhYURFRWFm5tbqc9TJX6ahBAa4DugO9AYGCSEaGzq68RG3mXFom3s3brf1KeWpHIXGxuLu7u7DBqSyszMDHd3d+Li4u7vPCbqT1lrA1xRFOWaoijpwAaglykvsGPDb7SZd4D5lzO5uXgJWYmJpjy9JJW7rKwszM3NK7obUiVjbm5OZmbmfZ2jqgQOL+Bmrteh2W0qIcRYIUSQECIoMjKyxBd4uFVDFMA5NZ55zQdwZvBwdCkp99VpSapoQoiK7oJUyZjiZ6KqBI5iKYqyRFGUVoqitHJ1LXbHfD4+AT7YZaURXMODdI05Z2OziN28uQx6KkmSVLVVlcARBvjkeu2d3WZSDd1s1eenXeqS8Iec65CksuLr64u1tTV2dna4u7sTGBhIYjkMEQshsLW1xc7ODmdnZ7p06cLGjRvzHffbb7/xxBNPYG9vj7OzMy1atODTTz8lNTUVgNmzZ2Nubo69vT329vbUr1+fSZMmcevWrTL/GipaVQkcx4B6Qgg/IYQFMBDYYeqLDHiyqfr8tEtdko4dIytRLmWUpLKyc+dOEhMTOXHiBEFBQXz44Yd53lcUBZ1OZ/Lrnjp1isTERC5evEhgYCCTJk1izpw56vubN2+mX79+DB48mODgYKKjo9m4cSOhoaHcvGkYNR8wYAAJCQnExMSwbds2bt++TcuWLat98KgSgUNRlExgErAX+A/YpCjKOVNfp1MTT/X5JUcfUtGQdPiQqS8jSdI9vLy86N69O2fPnqVz587MnDmTDh06YGNjw7Vr1wgPD6dnz544OTkREBDADz/8oH722WefZfr06errgQMHMnLkSKOu6+LiwrBhw1i0aBFz584lOjoaRVGYNm0a7733HmPGjMHJyQmABg0a8M0331CvXr185zE3N6dJkyZs3LgRV1dX5s+ff5/fkcqtyuzjUBRlN7C7LK9R09aCOvZaghMy0ZlpOO/si932XdTo2rUsLytJD7ybN2+ye/du+vTpw8GDB1mzZg179uyhQYMGKIpCly5daNq0KeHh4Vy4cIGuXbtSt25dnnzySZYvX06zZs147rnnuHXrFkePHuXUqVMlun6vXr3IzMzk6NGj+Pn5ERoaSt++fUv8dWg0Gnr16sXevXtL/NmqpMoEjvLyxEPerDx0A4B/XevT8O8DFdshSTKR/xo2KrdrNbrwn1HHvfDCC2i1WhwcHHjuueeYMWMG3bt3JzAwkCZNmgD6oPL333+za9curKysaNGiBaNHj2b16tU8+eSTeHh4sGjRIkaMGEFKSgo//fQT9vb2Jeqvubk5Li4uxMTEqJ/18PBQ3x84cCC//PIL6enpfP/99wwbNqzQc3l6ehITE1Oi61c1VWKoqjw9Xt9FfR7k3hDblASigkMrsEeSVH399NNPxMbGEhwczMKFC7G2tgbAx8ewFiY8PBwnJ6c8waBOnTqEhRnWx/To0YOsrCwaNGjAY489prY3adIEOzs77OzsOHjwYKH9yMjIIDIyEicnJ5ydnQHyzFNs2LCB2NhYHnnkEbKysor8msLCwtThrepKBo57tPV3xkKr/7YE1/Ag0tqB9d9squBeSdKDJfdeg5y/4BMSEtS2kJAQvLwMW7lmzpxJo0aNuHXrFuvXr1fbz507R2JiIomJiXTs2LHQ623fvh2tVkubNm1o0KABXl5ebN26tcT91ul07Ny5s8hrVQdyqOoeNhZaHvVz4uDlKACOuTXkWGQmL4Xdws6rVgX3TpJKz9jho8rGx8eH9u3b8/bbb/P5559z6dIlli1bxrp16wD466+/WLFiBadOneLatWv07t2bxx9/PE9gKUxMTAx79uxh2rRpvPnmm+rdxvz58xkzZgw1atSgX79+ODo6cuXKFe7cuVPgeTIzM7l8+TKzZ8/m9u3bTJs2zXTfgEpI3nEUoFN9/QZCm4xUfqvdmos1fVi76McK7pUkPbjWr1/PjRs38PT0pHfv3syZM4ennnqK+Ph4hg8fzrfffouXlxcdO3Zk1KhRvPTSSyiKUuj5mjdvjp2dHQEBASxdupQFCxbw/vvvq+8PGDCATZs2sXbtWnx8fHBxceHFF19k7Nix9O/fXz1u48aN2NnZ4eDgQM+ePXF2dub48eN4enoWdNlqQxT1za2qWrVqpdxPzfHIhDQiElL5/ddjfHEhDYBaSVH8Pr0zNr51TNVNSSpT//33H40ald+EuFR1FPazIYQ4rihKq+I+L+84CuBqb0kTTwdeGtgJe50+cNyydWHjQpmCRJIkSQaOIthbmTOsqbP6emViTZL+/bcCeyRJklTxZOAoxth+7bBW9CmIg2t4sPWL1SjFLMeTJEmqzmTgKERmdDRXXp7Mqknv45NkSNO+xqY+dzfK5bmSJD24ZOAohLC0IuWP31nm8BCX7AzLcC/VrM0vq3eQWc13hkqSJBVGBo5CaOxssbC2olPYyXzvrfduR0Q1T2ImSZJUGBk4iqB1d+fJmyfytZ9xqcuh/SdIOnKkAnolSZJUsWTgKILW3Z0Gd0OolRiV770N9btwa8ZMWa9DkqQHjgwcRdC6uSKAzqH5l+AGeTTidIqWiM8/K/+OSZIkVSAZOIpg7q5Pq5w7cJjlqvO+ulE3YjdsJOnw4fLumiRVeUWVji2ubOu9bty4gRBCzYSb88gpCdu3b1/GjBmT5zO9e/dm0qRJfPzxx+rxVlZWaDQa9XVOavfc5Wa9vLyYNm1agVlyAwMD0Wq1sgLgg8y65SMA1E6MICA5AgCdAjmJO0+61eekSwDhM2eSFR9fUd2UpCqroNKxxpZtLUhsbKyaDTcxMZEBAwYA8N1337F161b2798P6HNMnThxgk8++YQZM2aoxy9evJh27dqpr8+dMxQazSk3e+DAATZu3Mjy5cvzXDspKYkff/wRBwcH1q5da+LvVOUiA0cRbNu1Q9jYANDp2lG13cPeSn2+qnE3MsJvcWvWrCKTqkmSVLic0rFnzpwpcdlWY3h4eKgZb0NCQpg8eTLff/89dnZ2JT5XQEAAHTp04OTJvCsuf/zxRxwdHXnvvfdYtWpVqfpZVcjAUQQzS0vssvPqdww3lKKMTEzDPHvM6oKTL0fdG5Gw5xfifpQZdCWpNHJKx9rY2JS6bGtxAgMDqVu3Lo888gjdunWjW7dupTrPhQsXOHjwIAEBAXnaV61axaBBgxg4cCAXLlzg+PHjpuh2pSTrcRTD/qkuJOzdi3taPI3Mkrht5cgzTTzI0ilsPq6vDLiqcXda3bnA7Y8+xvrhh7GsW7eCey1J+fm+tavcrnXjk+eMOu7e0rGDBg1iy5YtpS7b6uLikuf14cOH82SB7dixI7/++itDhw4t4VeEWv0vOTmZgQMHMnHiRPW9kJAQ9u/fz/z583F3d6dLly6sXr2ali1blvg6VYG84yiGXadOeM77lPp//x9L3+jBsZlP8UnfZrzerQHW5hoArjt4stf3UZSUFMKmTUeXklLBvZakquHe0rE5v/iLK9uaewI8JCREPTYqKorY2Fj1kTtoXL58mc8//5yJEycyffp0MjIyStTXEydOkJiYyMaNG/nnn39ISjIsxV+zZg2NGjWiRYsWAAwZMoT//e9/Jb5GVSEDRzE0NWrg0LMnGkdHvByt0Wr03zI3eyvGdfJXj1vZ+FnizW1Iu3iR27PnyPkOSSoFY8u25p4Ar127drHnVRSF0aNH8+qrr/LNN99ga2vLp59+WuL+CSF48cUXadeuXZ7CT6tXr+batWt4eHjg4eHBtGnTiIqKYvfu3SW+RlUgh6ruw/hOdfnxRCg3Y1JIsLBhZePuTD71I3Hbt2PVvBlOgwdXdBclSWXs8FFFMjMzK3HZVmMsWrSIqKgoZsyYgZmZGcuWLaNt27b069ePhg0blvh8b731Fm3btuWtt97i+vXrXL16lX///RdXV1f1mOnTp7N69Wp69epV6n5XVvKOo4QURSHpn6Mk7NuHucaM955vor73i19bLjl6A3Bn7icky9odklRixpZtLYijo2OeYawvvviCkJAQZsyYwbJly7CwsACgcePGTJ8+nTFjxpRqdOChhx7i8ccf57PPPmPVqlX06tWLhx56SL3j8PDwYMqUKfz888/EVMOEqLJ0bAmk37zJzanT2BtrzmGfFvzn24z/e/spJqw7wf6L+tTrDVMi+XzvPDQoaN3c8N28CXN3d5P3RZKKI0vHSoWRpWPLkdbNjcw7t1nX8GkOujchKiWLPw6cYlaPJlhkz31csHbl5yZPAZAZEUHohInokpMrstuSJEkmJQNHCZhZWuLz5Zd0iLqotv28/f+o42zDxCcMS3BXNnyGcHv9WGfq+fOEvfEGik5X7v2VJEkqCzJwlJBNy5a8MN4wzvqPxoWEP/5gYucAGnrYA5CaBQt7vYYO/SbBxH2/y/odkiRVGzJwlEKbji2wR1+HPNrakaPfrcJcyeLz/s3RZO8oP56o4c9BU9XPxCxbzt3Nmyukv5IkSaYkA0cpaDVmPNbAMOF9ON2W6BUraerlwPhcezu+y/IhoYthCeTtOe/LTLqSJFV5MnCU0hNNvdTnQe4NiPruO9JDQpjcpR4BbvrEacnpWXzVrA8WOasXMjMJnfQKKWfPFXRKSZKkKkEGjlLq1MCw0eecsx/JmQp3PvkUS62Gz/o1U+t2/H39Lkdefh9t9pJcXVISN8eOJe369YrotiRJ0n2TgaOU3GtY0cBdPxmeaablQs3apJ49S+bduzxcuyajOxqGrD45GIbFVwsxc3AAICsmhpBRo8i4j52wkiRJFUUGjvvQ2q+m+vz6swOp+8setDX1bdO61sfPxRaAhLRMZv2biPeihQhrawAyw28RMmoUWbGx5d9xSZKk+1ApAocQor8Q4pwQQieEaHXPe28LIa4IIS4KIZ6pqD4WpLWvvsiMxkyQ2rg5ZtlFnwCszDXM69dMrRb458VI9ihueH/9FWj1KcLSr1zl5rjx6HJl2ZSkB8mGDRt49NFHsbW1xc3NjUcffZSFCxeiKAqBgYEIITh61FBE7cqVKwhhqN/cuXNnrKys8lQG3LdvH76+voVec/bs2Zibm2NnZ4ejoyPt27fncDktWgkMDMTCwgJ7e3vs7e1p2rQpb7/9NnFxcXmOu3XrFmPGjMHT0xM7Ozv8/f0JDAzkwoULQP5Sue7u7jz//PP89ttv5fJ1VIrAAZwF+gB/5W4UQjQGBgJNgG7AQiGEpvy7V7DH67mybvSjnJ71NB+80DTf+619nRjRzld9/f7OcyQ1b43n3LlqW8qpU9wcP0HuLpceOPPnz2fKlCm8/vrr3L59mzt37rB48WL+/vtv0tPTAXBycuKdd94p8jy2trZ88MEHJbr2gAEDSExMJDIykscee4w+ffoUmLMqMzOzROc1xhtvvEFCQgKRkZGsWLGCI0eO0KFDBzVNe3R0NO3btyc5OZmDBw+SkJDAiRMn6NSpU77AkFMq99SpU3Tt2pXevXuzcuVKk/f5XpUicCiK8p+iKBcLeKsXsEFRlDRFUa4DV4A25du7wtW0taBDgAu2lvmTDCvZefjf6NaA2k76O5H41ExmbD1Djeefwz3X/wzJx45xc+LL6FJTy6fjklTB4uLieO+991i4cCH9+vXD3t4eIQQPP/ww69atw9LSEoARI0Zw+vRpDhw4UOi5Jk+ezPr167l69WqJ+2Fubs6IESO4ffs20dHRrFy5kg4dOjB16lScnZ2ZPXs2Op2ODz/8kDp16uDm5sbw4cPVO4SNGzfi5+dHfHw8AHv27MHDw4PIyMhir21lZUXr1q3ZsWMH0dHRrFixAoAFCxZQo0YN1qxZQ926dRFC4OjoyEsvvcQrr7xS4LlykirOnj2bN998E10ZZ6qoFIGjCF5A7ur0odlt+QghxgohgoQQQcb8Rysrqf/9R+irU7nerz9Kejo2Flo+6fuQ+v7vFyLYHBSK09AhuL3+utqefOQIoS9PQpeWVhHdlqRydfjwYdLS0opNOW5jY8OMGTOYOXNmocd4eXkxZswYZs2aVeJ+pKWlsXLlSjULL8A///yDv78/d+7cYebMmaxcuZKVK1eyf/9+rl27RmJiIpMmTQL0dy7t27dn8uTJREdHM2rUKJYuXZonvXpx7O3t6dq1KwcPHgT0Q229e/fGzKzkv5779OlDREQEFy8W9He46ZRb4BBC7BNCnC3gYZJk9YqiLFEUpZWiKK1K8h/NlDLv3uXGkKEk/PILaRcvEvX9EgDa13UhsL2vetycnee4GZOM86iRuE417C5P+vtvQidPRpd9my5Jphb5zbf817CRUY9b776X7/O33n2vyM9EfvOtUf2IiorCxcUFrdZwt96+fXscHR2xtrbmr78Mo9bjxo0jJCSEPXv2FHq+t99+m507d3LunHF7pDZt2oSjoyM+Pj4cP36cbdu2qe95enryyiuvoNVqsba2Zt26dUybNg1/f3/s7OyYO3cuGzZsUIexvvvuO/744w86d+5Mjx49eP75543qQ26enp5q+vWoqKg8pXN37NiBo6Mj9vb2PP3008WeByjzVO7lFjgURXlKUZSmBTy2F/GxMMAn12vv7LZKJTk9kz8vRvD5kTs4TZmitkctWULG7dsAvNmtIf7Zq6yS0rOYvukUWToFl3FjcXllkvqZpAN/ETZ5irzzkKo1Z2dnoqKi8swhHDp0iNjYWJydnfMMtVhaWvLuu+/y7rvvFno+V1dXJk2axHvv5Q1269atUyeQu3fvrra/+OKLxMbGEhERwR9//JGnNriPj0+ec4SHh1OnTh31dZ06dcjMzFQLSzk6OtK/f3/Onj3L9OnT1eM+/vhj9drjx48v8vsRFhaGk5OT+r3JXTq3Z8+exMbGsmDBAnXup6jzAOq5ykplH6raAQwUQlgKIfyAesDRYj5T7p796iCBK46x+MBVbj7+LFYPZQ9NZWQQt30HANYWGr4Y0ELNZXX0RgxLD14DwGXiRJzHj1PPl/jnn4ROkBPmUvXVrl07LC0t2b69qL8bDV566SViY2OLLCn7+uuvs3//fo4fP662DRkyRC0xW9QdS265V22B/q/44OBg9XVISAharRb37E29J0+eZPny5QwaNIjJkyerx82YMUO99uLFiwu9XmJiIvv27aNjx44AdOnShZ9++qlU8xTbtm3Dzc2NBg0alPizJVEpAocQorcQIhRoB+wSQuwFUBTlHLAJOA/8ArysKEpWxfW0YDnLcgEOXbuL07Ch6uu4rVvV1RotfBx5+YkA9b3P9l4k6EYMQghcp0zBeZwheCQdOkzImLFkJSaWw1cgPShcX5lEowv/GfWo9cH7+T5f64P3i/yMa66756I4Ojoya9YsJk6cyJYtW0hISECn03Hy5El1dVFuWq2WOXPmFFkn3NHRkenTpzNv3jzjvyFGGDRoEAsWLOD69eskJiYyY8YMBgwYgFarJTU1laFDh/Lxxx+zYsUKwsLCWLhwoVHnTUtL4/jx47zwwgvUrFmTl156CYBp06Zx9+5dhg0bxtWrV1EUhYSEBE6ePFnoue7cucO3337LnDlzmDt3bqnmR0qiUgQORVG2KYrirSiKpaIo7oqiPJPrvY8URamrKEoDRVGM+5OhnHUIcFGf/9+VKOy7dsXMVj8slR4cTMq/hv/grzwZQAsfRwAydQqT/vcv0YlpCCFwm/oqrq8ahrpSjh8n5KWRcpOgVC298cYbfPHFF8ybNw93d3fc3d0ZN24cn376Ke3bt893/KBBg6hVq1aR55wyZQoajWlX7I8cOZJhw4bx+OOP4+fnh5WVFd988w2gn1vx8fFhwoQJWFpasnbtWt555x0uX75c6PnmzZuHvb09zs7ODB8+nJYtW3Lo0CFss39nuLi4cOTIEaysrHjsscewt7enRYsWJCQksGjRojzncnR0xNbWloceeojdu3ezefNmRo4cadKvvyCydKwJRCSk0uaj3wEw1whOzXqauA/mELt5CwDWDz+Mzw8/oLHT/2CExabw3NcHiU3WL9ntWM+FlS+1UYexoleuJOITw19Wlg0aUHv5MrTOzuX2NUlVnywdKxVGlo6tBNzsrdQiThlZCvsvROLY31DsKeXff7k5bhxK9kSgl6M1Cwa0UN8/eDmKL/ddUl87BwbiMduwtDDt4kWChw0n405EWX8pkiRJxZKBw0SeaWJYPrf7zC2smzXD7c031baU48dJzF6nDfBEAzcm5Zrv+OaPK2z7N1R9XXPgQGp9MheyxyrTr10jeMgQ0m/cKMOvQpIkqXgycJjIc80MY69/XIggOT0T55cCqTk0e6JcCNIuXsrzmald69OxnmF+5M0tZzh63bD+2vGFF/D6Yr6a2yojNJQbg4fIeh6SJFUoGThMpL67vVrAKSUji/0X9LvXaw4cgMfsWdT76wAuuZbcgj454ndDHqG+u/5z6Vk6xq4J4nqUYVVJjW7d8P72G4SVFZCdkn34cJIOHSqPL0uSJCkfGThM6LmHDHcdu86EA2AZEEDNgQPRFrKbvYaVOctGtMbFzgKA2OQMAlccJTLBsAHQvnNnaq9Yrtbz0CUnEzJuPPG7d5fVlyJJklQoGThM6Plcw1UHLkaSmmHclhMfJxt+GN4KS63+P0dwdDKBK46SkJqhHmPz8MP4rluLNicVQUYGYdNfI2bNWtN9AZIkSUaQgcOE6rnb07+lNx/0asJfbzyBlXn+9eS69HTidu3K1/5w7Zp8M+hhteTsufB4xq89TlqmIfhYBgTgu/5/WNStq29QFO589BERXywoMCW0JElSWZCBw8Q+69+cYe18cbazzPdeVkICN0ePIXz6a6ScPp3v/aebePBxb0Mm3b+vRDNt0yl0OkNQMK9VC991a7FuYVjOG71kCeGvvyGTI0qSVC5k4ChHEfPnk5xdzSz2x4Jz7gxsU5vXnq6vvt51+hZzdp7Lc0ehcXSk9orl2HXqpLbF//wzISNHknn3bhn1XpIkSU8GjnLk8Nxz6vP4PXsKzYD78hMBjGhnyMa56nAwC//MW6TGzNoa7+++xXHgALUtJeg4wQMHkZ4rIZskVWaVsXRscWVbC5LTj5xsuHZ2dvTo0QOAnTt34uHhkSfV+fbt2/Hy8iI4ODjPZ4QQ2Nraqq8PHjyolpu1s7PDycmJrl27FtiXP//8EyFEkfm8TEUGjjKiKApnQuNYc/iG2mbdsiXm3t4A6OLjSdy/v8DPCiF4r0eTPHtDPtt7kQ1HQ/Iep9XiMWtWnoJQ6cHB3Bg4iOQT/5rui5GkMlAZS8eWpGzrvb799ls1G25iYiI7d+4EoEePHjz55JNMza69Exsby4QJE1i0aBF16tTJ8xmAU6dOqa9zMua+8cYbJCYmEhYWhpeXF6NGjcp3/VWrVuHk5MTq1atL9L0oDRk4ykBqRhZd5h+gx7f/x3s7zhERry8JK8zMcMhV8Sx6+Qq1xOy9NGaCL15sTocAQ36qGdvO8Nv5O3mOE0LgPGokXl9+icgut5l19y4hgYHE//KLqb80STKJylo6tjRlW43x9ddfs2fPHvbu3cvUqVPp1KkTPXv2LPF5rK2tefHFF/Nlyk1KSmLLli189913XL58mbLO1ScDRxmwMtfgVkP/g68o+hQkORx691Z3gqeePk3Egi8LPY+lVsPioS1p6lUDAJ0Ck/53giPXovMdW6PbM9RZtRJNdgEXJT2dsFenEr10qVxxJVU6lbV07P2UbS2Ki4sLX331FUOGDOHnn3/m66+/LtV5kpKSWL9+PQEBAXnat27dip2dHf379+eZZ55h1apVpuh2oWTgKCPPN/NUn/94wlC00MLbC7dcqdNjli8vcIVVDnsrc1YEtqGOsw0AaZk6Rq08xomQ/JPg1i1a4LtxAxZ+fmpbxOfzufXOOyhyxZUELPjtEr5v7TLq8fbW/D+Xb289XeRnFvx2qYCr5ldZS8feT9nWyZMn4+joqD7urVjYtm1b4uLiePrpp0tUkxzg888/V/vxf//3f6xZsybP+6tWrWLAgAFoNBoGDx7Mhg0byChkNMMUZOAoI883q4VF9oa+M2FxnA2LU99zGjkS28c7qq9jVhU9Julqb8makY/iZq+/i0lKzyJw+VHOhcflO9bCxwff9f/DppUhM3Lcj1sJHjmSzDKuQyxJxqqspWONKds6fvx49Zwff/yxeuzXX39NbGys+rh33mXs2LEMHz6c3bt355mMN8Zrr71GbGwsN27cwNramosXL6rv3bx5k/379zNkyBAAevXqRWpqKrsK2C9mKjJwlBFHGwuebWr4y2V9roltYWaG26uvqq/j9+4lI6LolOm1nW1YN/pRnGz1qUniUzMZtuwol+8k5DtW4+iIz/JleeZTUoKOc6Nff1Jz/cBJUkWprKVjjSnbunjxYvWcM2bMMKr/y5Yt4+bNmyxcuJCPP/6Y0aNHF1s/vCC1a9fmq6++YsqUKaSkpACwZs0adDodPXr0wMPDA39/f1JTU8t0uEpb/CFSaQ1qU5ufTupzVm0/Gc6MZxtha6n/lls1box1y5akHD+7mlaXAAAgAElEQVQOWVkkHzmCQzGTZfXc7Vkzqg2DlhwhPjWTmKR0hiz9h03j2uHrYpvnWDMLC2p9MhfL+vWI+Hw+KAoZ4eHcGDQYr8/mYd+lS9l80VKlNrVrfaZ2rV/8gYWY26cZc/s0u+9+5C4dqygKzzzzDLa2tpw+fbrI0rG5a3oXdM6c0rH29val6te0adNYu3Ytw4YN4/3338ff35/ExMQiy7YWJzw8nNdff53t27djaWnJ+PHjWb9+PR999BFz5swp8fm6du2Kp6cnS5YsYcqUKaxatYpZs2Yxfvx49ZijR4/Sv39/oqOjcS6DAnDyjqMMtfFzwt9V/ws9MS2T/Rfz3lU4jxqJ04gR1N37S7FBI0cTTwdWjWyDrYU+nUlEQhqDfzhCSHRyvmP1K65G4b3wO8xs9HMkSnIyoZNeIer7JXLSXKpQlbF0bEnKtt5r0qRJefZk5Ax/TZw4kYEDB6pLa4UQ/PDDD3z55ZdGz8nc6/XXX2fevHkcOHCA4OBgXn75ZTw8PNRHz549CQgIYP369aU6f3Fk6dgytuC3S3z1u77+cI/mnnwz6GGTnPfItWgCVxwlNUN/S+3laM36MW2pnT2Jfq+0y5e5OWEiGaGGYlE1evSg1ocfYGaZPz2KVPXJ0rFSYWTp2Eoud2XA/Rci8iQtvB9t/Z3zZNQNi01hUCF3HgCW9erhu3kTNq1bq23xO3fqS9IWM78iSZKUmwwcZaxRLXt8nKwB/XBV0I2ic0llhIWhS0016twd67mWKHhoa9ak9rKlOL74otqWevo0N/q/SMqZs0ZdU5IkSQaOMiaEYFQHP6Z1rc+vUx+nfd2CJ6oy797lziefcrVbd+6uW2f0+R+vX7LgISws8JgzG/d33oHsseDMO3cIHjKE2J9+KuFXJ0nSg0gGjnIQ2MGPyV3qUd/dPk+CttwSfvmFmJUrUTIyiFq4iIw7+tQi6cHBZISHF3n+EgcPIXAaOgSfJd9jVkO/K11JT+fWW29z+8OPCk2DIkmSBDJwVBqO/fph4e8PgC4piTsffkjcz7u42q07V55+hrhi1rsXFDwGLjlcaPAAsOvQAb/Nm7CsZ0hfcHftWkJeGklmdP60JpIkSSADR6UhzM2pNWe2+jrht32Ev/aaPtlVZia3Zs0mMzKyyHPcGzzC41KLDR4WdepQZ/0G7HOlU0gOCuJ6335y3qMaqI6rJqX7Y4qfCRk4ytmtuBT+ulRwALBp3RrH/v0KfM/zs3lojchv83h9V5aOKFnw0NjZ4vXVl7hOnQrZQ2mZt2/r5z22biv2mlLlZGVlRXR0tAwekiondbyVldV9nUfu4ygn8akZ9F14iMsRiVibazg5qyuW2gJqkicnc3P8BLVSIIDzuHG4TX0137FFOXg5ktGrgkjL1O/z8HSwYsPYdoXu88iR+NdfhL32Orr4eLWt5pAhuL/1JsLcvER9kCpWRkYGoaGhpBq5Sk96MFhZWeHt7Y15Af8/G7uPQwaOctT5s/3cyP7Lf+VLrencwK3A43QpKYS/8SYJv/+Oy/hxuBaRZqEopQ0e6cHBhE56hbTLl9U261Yt8f7yS7QuLqXqiyRJlZ/cAFgJPdXIXX2+NVeq9XuZWVvj/c3XNDxzutRBA/T7PEo6bAX6eQ/fDeuxf+YZtS0l6DjX+/Un5cyZUvdHkqTqQQaOctS3pbf6fO+528SnFr3sVdxHzp0cBQWPAUsOcy0yscjPmdna4vXlAlynTcs77zF4CHc3b77vfkmSVHXJwFGOGtWqQeNa+n0TaZk6dp2+Vcwn9CK+WMCNAQO5+uxzJB87VuLr3hs8bsWl8uL3hzkfHl/k54QQuIwdk3e/R0YGt999j/CZM43e4S5JUvUiA0c565frrmPbv4UPV+WWfuMGKadOkX7tWqn3V3Ss58rywNZYm+vvYqIS0xm45HCBlQTvZdexI35bNmPZoIHaFvfjVm4MHkx6rqSJkiQ9GIwKHEIIp2Le1wghHiltJ4QQnwkhLgghTgshtgkhHHO997YQ4ooQ4qIQ4pmizlMV9GjumTPyw/Hgu8QmF1/MxayGobZAVlzRdwlF6RDgwtrRbbC30tcEiU/NZOjSfzh0JarYz1rUro3vhvU49DKkf087/x/X+/Yj8cCBUvdJkqSqx9g7jkghhLoESAjxrxDCO9f7LkDJx1AMfgOaKorSDLgEvJ19ncbAQKAJ0A1YKIS4/4H/CuRqb0kLH31czNIpHChkT0duGvsa6nNdQukDB0DLOk6sH9NWrSSYnJ5F4Mpj7Dt/p9jPmllbU+uTT/CY9R5kL+XTxcVxc/wEIr/+BiXLNJl/JUmq3IwNHPcmWAoALIo5xmiKovyqKEpO8eEjQE5Q6gVsUBQlTVGU68AVoE1pr1NZdGloWIb7+3/FpzTXOBgCR1Z8/lKxJdXUy4FN49rhUUO/CSg9U8e4tcfZfrL4oTMhBDUHDcJ37Rq0Htkp4xWFqIULuTl+Apl3ix/6kiSpajPlHIepNoSMBHKKA3sBN3O9F5rdVqU92dCwLPfPixFkZhVe3xjALFcZzKz4OJP0IcDNjs3j21Ene09Hlk7h1Y0nWXP4hlGft27eHL+tP2LTrq3alnTwIDf69iPlbOmqmkmSVDWU2+S4EGKfEOJsAY9euY6ZCWQCxucVN3x2rBAiSAgRFFlMTqeK1qiWPW18nQhs78t3Qx4pNGNuDk2NXENVJrjjyOHjZMPmce2o724H6NNivbv9HF/uu2RUmgqtkxO1ly7FeexYtS0jPJzgQYPkkl1Jqsa0Rh6nkPeO4t7XxZ9AUZ4q6n0hRCDwPNBFMfzWCgN8ch3mnd1W0PmXAEtAv3O8JH0rb0IINo1vZ/Txee44EkwXOADcalixcWw7Xlp5jJM3YwH4ct9lYpLSmd2jCWZmRQc1odHgNm0q1s2bEf7mW+gSE9UluyknT+Lx7ruY3WdeHEmSKpeSzHEcEUJcEkJcAmyBP3K9PnQ/nRBCdAPeAHoqipJ7W/MOYKAQwlII4QfUA44WdI7qTFPDQX1uqqGq3GraWrBu9KN0rGdIJ7L6cDCTN/xLembRw2g57Lt0we/HLVjWr6+2ySW7klQ9GXvHMadMewHfApbAb9nDNkcURRmvKMo5IcQm4Dz6IayXFUV54JbuaHItx80Zqsq5KStumMtYtpZalo1ozfTNp9h5Sl846ufTt4hLyWDx0JbYWhb/o2JRpw6+Gzdwa9Ys4nfsBAxLdr3mfYpdp04m6askSRVLJjmsBFIzskjL0OFgU3D22Yw7EVzJ9UtX6+5OVkwMfj9tw7JuXZP2RadTmLPzHKsOB6ttzX0cWRHYWl3CWxxFUbi7fj135n4COdUEhcBlwgRcXp5oklQqkiSZXrkkORRCdBRC9C1ug6BUsD8vRjBwyWGazfmVxX9dLfQ4bU1HPD//HO/Fi0CrJfPOHZSMjDKp0mdmJpjdswnTuhqGnE7djKX/4kOExaYYdQ4hBE6DB+O7ZnX+Jbtjx5EZE2PyfkuSVH6M3Tk+SQjxzj1t24EDwGbgshCiYRn0r1pLzdBx5FoM6Zk69p69XeiyXGFhgcPzz5ESFASZmWp7VkzZ7JkQQjC5Sz0+eKGpusv9amQS/RYd4kqE8ZPz1i1a6Jfsts21ZPfvv7neuw/JJ/41dbclSSonxt5xDAdCcl5kL6F9FhgGtAYuAzNM3rtqrn2AM7YW+mGba1FJrPsnpMjj04Pzvp8ZU7Z1wYe1rcM3gx7GXKOPHrfiUum3+DD/GpHfKod+ye4PeZbsZt65Q/Dw4USvWCmr00lSFWRs4KgL5P4T8VngZ0VR1imKchyYCTxu6s5VdzWszJn0ZD319Re/XSoyd1V6SN7AUVZ3HLk938yTFYFtsMkOcLHJGQxZ+k+h5W8LIrRa3KZNxXvxIswcsleIZWYS8emnhE2eTFb8/aVRkSSpfBkbOKyB3P93twX+yvX6MlBwOTupSCMf81V3b8elZPBTIRlzFUUh/ebNPG1ZZXzHkeOxei758luNWnVMXX1lLPvOnfHf+iNWzZqpbQm/7eN6v/6k/vefSfssSVLZMTZwhALNAIQQNdEnHTyc631X8gYWyUiWWg1jOvqrr/ecvV3gcaHjJ6Ak563clxldfpPMzX0c2TSuHV6O1gBkZClM3vAvqw/fKNF5zL288F27hppDh6ptGSEh3BgwkLubNsmhK0mqAowNHBuBr4UQE4FV6PNH5d6I1wq4aOK+PTCebuKuTkIfuxFDVGJavmNSL1zI15ZVzquTAtzs2DKhHfXcDClK3tt+jgW/GZeiJIewsMDjnZl4LfgCMxv93ZaSns7t92Zx66230CUXXdpWkqSKZWzg+Aj9CqqP0GfGHaIoSu4lQIOAXSbu2wPDzd6KVnVqAqBT4LcCUpxnxcbma6uIZa21HKzZNK4dD9dWS6bw1e+XeXf7WbJ0JbtbqNG9O75btmBZzzDPE7d9BzcGDCDt2jWT9VmSJNMyKnAoipKqKEqgoig1FUVprCjKoXve76woyryy6eKDoVvTWurzgoarbNrkzyafVQb7OIyRk6KkU31XtW3tkRBeWX+C1IySbey39PfDd9NGHHr3VtvSLl/hRr/+xO2Sf4tIUmUkS8dWEt2aeqjPD12J4m5S3tVV7m+9icbJSa39DZAVF4eSa19HebKx0PLD8Fb0bO6ptu0+c5vAFUeJT80o0bnMrK3xnPsxtT76EGFpCYAuOZnw6a9x+/330aUXXyVRkqTyY1TKESHEEmNOpijK2OKPKntVLeVIjtGrgvBwsOT5Zp609nVCc09mWl1aGkKjIeXkSTSOjmicnNDUrGmyfFWlodMpfLDrPCv+vqG2NfSwZ9XINrjXKHlW3NQLFwidMoWMXHtWrJo2xevLL7HwrvKlWCSpUjM25YixgUOHfgPgNQqv9KcoivJkiXpZRqpq4KiqFEXh+7+u8ckewwS+l6M1a0a1wd/VrsTny0pM5NbMd0jYu1dtM3NwwPOTudg/8YRJ+ixJUn6mzlW1HagFZAGLgK6Kojxxz6NSBA2p/AkhGN+pLvP7N1fvksJiU+i3+LBa46MkNHZ2eH25APcZM0Crz8qri4sjdMJEIubPr7DhOUmS9IydHO8N+AJ/Ap8AYUKIeUKIekV9Tip7SmYmEfPnc2v2bDJuF7wHpLz0benN0hGtsDbX7zKPSUpn0JIj7L9YfF31ewkhcBo+TF/bvJZh4UD0D0sJCXyJjIiSn1OSJNMwenJcUZRbiqJ8hD79yIjsf88IIfYLIWSJNxNLSM3gQBFpPZT0dDLuRHB3/Qaif1hK7IaNXO/fn5QzZ8uxl/k90cCN/415lJrZKeJTMrIYsyqIH4+XrphTTqJE244d1bbkoCCu9+5D0pEjJumzJEklU+p6HEKIruhzVD0GuCiKUvIxiTJSlec4snQKE9cdZ//FSDKzdByZ0QU3+7xxOWrx90R++WWBnzf38aHu7l0I84Jre5SXq5GJDF92NE8q9re6N2Tc4/6lmsxXdDqilywh8utvQJe9hcjMDJeXJ+Iyfrys8SFJJlAm9TiEELWEEDOEENeANcARoEFlChpVncZMEJucQXqmDp0CvxSwp0Pr6lrAJ/Uybt4kI7xkOaTKQl1XO7ZObE9DD0P1wk/2XOCDn/9DV8KNggDCzAyX8eOpvXwZGmdnfaNOR9Q33xIyajSZkcYnXZQk6f4YW4/jeSHET8B1oDPwJuCjKMpbiqIUXoFIKpXnmxnG9H8+dSvf+5YBRVf9K4sCT6XhXsOKTePb0dbfUOdr+d/XeXXjSaNrmd/Ltm1b/LZtxaZ1a7Ut+cgRrvXuQ9KhQ0V8UpIkUzH2jmMH0Bx9ypGVgDnQXwgxOPejjPr4wOnWtJaauyooOCbfZkCLYsrFVpbAAfrU8StfasOzDxk2OO44Fc7IlcdITCvd6ihzNzdqr1iOy8SJ5HyjsqKiCBk1moivvpKrriSpjJVkqKoOMAdYW8hjjcl794BytbekhY8+F5ROId8kucbODq27u/raqkmTPO9XVCqSwliZa/hm0CMMa1tHbfu/K1EMXHKYyIT8CR2NIbRaXCe/oh+6cnHRNyoK0YsW61dd3ZGrriSprBi7HNfMiIecnTShLg0N5U3+uJD/l6BlrrsOlwnj8duxnTprVuO/ezcOPXuWSx9LQmMmeL9XE1572lDL/GxYPH0XHeJGVFKpz2vbrh3+27Zi085QnjY5KIjrL7xA4sGD99VnSZIKZuwcx+PGPMq6sw+SJ3IFjgOXIvPVI7fINc+RduUqVvXrY9O6NZb+fmqq8spGCMGkJ+vxad+HyMmmEhKTTL/FhzgTGlfq82pdXam9dCkuk18BM/2PdNbdu9wcM5aI+V/IoStJMjFjh6r+BPZn/3vvY3/24w/Tdu3B1rhWDTyycz3FpWRw9EbeFOqW/rkCx9WqtT5hQOvaLBnWCkut/scvKjGdgUsOc/By6VdGCY0G14kTqb1yRZ5VZ9E//EDw8BFk3Mq/yECSpNIxNnD4ALWz/8398EO/kzwVKLjmqVQqQog8dx3v/HSWpFyTyblXVqVXscAB8FRjd/435lEcrPX7TZLSsxi58hjbT97fj5Ftmzb4/bQN2w4d1LaUEye4/kJvEv78877OLUmSnrFzHGH3PoCHgd3AROB9oH6RJ5FKbHRHPzV9R9jdFE6FGrbL5F5ZlXr+PFmJiYA+g25VmRhuWceJLePbUctBf2eVkaUwZcNJlh68vyJOWmdnfH5Yguu0aZC9MTArLo7Q8RO48+k8lIySpX2XJCmvEtfjEEI8IoT4A9iKfniqnqIonyqKUrrlMVKh6rra8VHvpvi52LJtYgfa13VR39PWrKkGD42zM0pqKhdbteZi8xZc69GjorpcYvXc7dk6sT313Q1ZdD/c9R9zd5duo2AOYWaGy9gx1Fm9Cq2HYSlwzIoV3Bg6lPSbN++r35L0IDM6cAghfIQQa4FjQCzQRFGUVxRFiSqz3kn0ecSbPVM60tizRr73vL74AqfAQLy//QaNkxO6FH16D118fJUqflTLwZrN49rT2rem2vb9X9d4bfMpMrJKt1Ewh03Llvht24pdp05qW+qp01x/oTdxO3bc17kl6UFl7KqqT4CL6BMbPq4oSh9FUS6Xac8klZV5wSudrRrUx/2tN7F5+GGEmRkaJ8Mv3qwKqEd+PxxszFkz6lG6NjbsT9n6bxijVgXlmdspDW3NmngvWojb668b0rQnJRH+xpuEvfGGOswnSZJxjL3jeANQgERglhDi14IeZddNyRhaJ2f1eWZU5doEaAwrcw2LhjzCoDa11ba/LkUy+IcjRCfe30ioMDPDedRIfNevx7yO4fzxO3Zy/YXepJw8eV/nl6QHibGBYzWwCQhFv3qqsIdUhiISUtlwNITToQXnlNQ6GwJHVkzVCxwAWo0ZH/duypQuhlIvp0Lj6LvoECHRyfd9fuuHmuK/dSsOffqobRmhodwYMpSoxYtRsrLu+xqSVN1pjTlIUZTAMu6HVIylB6/x4a7/ABjatjbNvB3zHaPJFThSTp/B7vGquSdTCMHUrvVxtbfkve1n0SlwIzqZPov+ZkVgGx7ydriv85vZ2uL58UfYdmjP7dlz0CUkQFYWkV9+RdLfh/Cc9ynmuYpHSZKUV4lXVUkVo4mn4ZflvvMRFFRHJfcdR9S33xL3865y6VtZGdq2DouGtsy3UfCvIgpclYTDc8/ht20b1g8/rLYlHzvGtRd6E/+rHHmVpMLIwFFFtPatiWN2Vb3b8amcDYvPf9A9wcSmTev8x1QxzzTxYN3o/BsFt/1buoqC97Lw9qLOmtW4vPyymq5EFxdH2OQp3HpvFrrk+x8ek6TqRgaOKkKrMePJBoad5L+dz1/gybpVyzyvb44aTejkKQDc3bCR2x98WGU2B+bWyle/UdAze6Ngpk5h6sZTfH/gaoF3XiUltFpcX5lEnTWr0XoahqhiN23ieu8+cuJcku5RKQKHEOIDIcRpIcTJ7BVantntQgjxtRDiSvb7j1R0XyvSU7mWqv56/k6+9+07d6bm0KFY+PsDkHb5MqkXL5By8iS3Z8/m7rp1RH33Xbn115T0GwU75KkoOHfPBd7/+fx9bRTMzaZlS/x/+gn77t3UtvTgYG4MHqKv81GF9sZIUlmqFIED+ExRlGaKorQAfgbey27vDtTLfowFFlVQ/yqFx+u7YqHR/ye7cDshTz1vAGFujsc7M/HbslltywgNI3r5CvV17KZN5dPZMuDhYMXGce141M9QUXDF3zd4ZcO/pGWaZjWUpkYNvL74gloff4yZra2+UacjetFirg8cSOqlSya5jiRVZZUicCiKknvA3hb9nhGAXsBqRe8I4CiEeGCXu9hZank0VxnWgup0AJjZ2KB1yx7Wysoi7eLF8uheuXCwNmfVyLwVBXedvsWI5UeJTzVNDiohBI59euO3fXueErVp5//jRt9+RC9bLpftSg+0ShE4AIQQHwkhbgJDMNxxeAG5kwqFZrc9sJ7MlTF3fyGBA8CijqHaXnpwcJn2qbzlVBQMbO+rth25FsOLiw9zOy7VZNex8Pai9qqVuL31JsLCAgAlI4OIzz4jZEQg6aGmmaCXpKqm3AKHEGKfEOJsAY9eAIqizFQUxQdYB0wqxfnHCiGChBBBkZGmWa5ZGT15T2XAPWdusebwDcLvGbay8K1DQZzHjTPJhHJF05gJZvVozJvdGqptF24n0HfRIa5EJJjsOsLMDOfAQPy2/ohV48Zqe3JQENd79iJm3ToU3f3l05KkqqbcAoeiKE8pitK0gMf2ew5dB/TNfh6Gvu5HDm8K2aGuKMoSRVFaKYrSyjVXIZ/qpo6zLXVdbdXXE9ad4N3t5/LtJs99x5Gb85gxCCHKtI/lRQjBhM51+eLF5mizSwqGxabQd9FhjgebNleXZUAAvhs34DJxopqqXZeczJ0PPiR4+HDSrl836fUkqTKrFENVQoh6uV72Ai5kP98BDM9eXdUWiFMU5YEv5davpQ+D2viodSwAQu/ec8eRq15HDo2LCxo723ztVV2fR7xZFtgaGwv9L/S4lAwG//APv57Lv2T5fghzc1wnv4Lv+v+pK9cAUoKOc/2F3kQvXSrL1EoPhEoROIBPsoetTgNPA1Oy23cD14ArwA/oi0Y98CZ0rsvcPs14qYOv2nZv4LBp3QbMzfO0WdSuTXXVqb4rG8a2xcVOPxeRlqlj/NrjrPvH9PM71s2a4bdtK87jx6l3H0paGhGfz+fGgIGkVqPFCJJUkEoROBRF6Zs9bNVMUZQe2RUGyV5N9bKiKHUVRXlIUZSgiu5rZeJd00Z9Hno37w5njZ0tNo8Ytr04vPACblNfLbe+VYRm3o78OKE9dZz13xedAjO3neWLXy+afF7HzNISt1dfxW/LZiwbN1LbU8+d43rffkR+/XWVqokiSSVRKQKHVDreNa3V5/fecQCGJIcaDUn//EPEgi+5+kw3YtatK68ulrs6zrb8OKE9zXIlQvz6jyu89eMZMu+zKFRBrBo1wm/jRlynTVNXXpGZSdTCRVx/oTdJR46Y/JqSVNFk4KjC8t5xpOT7q7pG9254ffUV9Y8cpubAgaScOEF6cDAZ4eHl3dVy5WJnyfoxbelU37BIYmPQTcauOU5yuunnIIS5OS5jx+D30zasc93lpV+7RkjgS4S9/gaZ1Xiln/TgkYGjCqtpY65OCCemZRKXkncDnLmnJzWeeRqNvT1aZ8PGwazoqlUdsDRsLbUsHdGKvo94q21/XIhg8A//EJNUNkNIlv7+1Fm7Bvd33jHsOgfid+7kavdniVm7Tm4clKoFGTiqMCFEscNVOTS5qwNW0SJPJWWuMePz/s14+QnDCrOTN2Ppt+gQN2PKJuutMDPDaegQ/Hfvpsazz6rtusRE7nz4Idf795dJE6UqTwaOKs6niAny3B60O44cQghef6Yh7/dqQs72lWtRSfRZdIizYXFldl1zdze8vphP7eXLsPD1VdvTzv/HjYGDCHvt9Wo/ZChVXzJwVHFG33E4577jeHACR47h7XxZOPgRLLKLQkUmpDFwyRH+73JUmV7Xtn17/HZsx/XVKQhLS7U9/uefudr9WSK++gpdUlKZ9kGSTE0Gjiquvoc9LXwc6dXCE3/Xwjf3aZ1y33FEV4u0IyXV/aFarBnZBnsrfcXkxLRMXlp5lO0nC0xGYDJmFha4jB+P/66fsX/6abVdSUsjetFirnbrTuyPW2XqEqnKENXxF0irVq2UoCC55eNeFx5+BCVFf1dSP+gYGju7Cu5Rxbh4O4ERy49yO96QEHHms40Y87h/EZ8yneRjx7gz9xNSz5/P027ZoAGuU6Zg90TnapMWRqpahBDHFUVpVdxx8o7jAXLvXceDqoGHPVsntqeemyFwfrT7Pz4wYVGooti0bo3vls3UmjvXkP4eSLt4kdCJE7kxcCBJhw49kHeFUtUgA8cDJM88xwM0QV4QT0drtoxvTxtfQzBd9n/XmbLxpMmKQhVFmJnh2PsF6v6yB5eJExHWhrmq1FOnCRk5ipDhI0g+caLM+yJJJSUDxwNE42DYTZ0VF1vEkQ8GBxtzVo9qQ7cmhqJQO0+FE7j8mMmKQhXHzMYG18mvEPDbr9QcPgyRK79Y8rFjBA8eQsjoMSQdPSrvQKRKQ85xVAP7L0RwPPguwTHJjOnoRzNvxwKPSz5xAl1iIpoaNbCoWxeNvX2Bxz1osnQKs3ecY80RQ0LEhh72rBrZBvcaVkV80vQybt0iatFiYrduhXsy7Vo3b47zmNHYPfkkwkz+zSeZnrFzHDJwVANTN55k27/6lUFz+zzEoDbVNwtuWVEUhYV/XuWzvYbMtl6O1qwe1Ya6ruW/iCA9JISo774jbsdOuOf/UQt/f5xHj8bh+ecM+bEkyQTk5PgDpLaTYRNgcHTZ7Iiu7oQQvPxEAJ/1a4YmT1GoQxwPvlvu/bGoXfaBR9UAABwYSURBVBvPTz/Ff/cuHPv3zzOElX7tGrdmzODKU12J/O47mQdLKncycFQDOWnEAUJi5Gay+9G/lQ9LR7TC2lyfAyw2OYMhS4+w7/ydCumPpZ8ftT54n7q/78N59Kg8ObAyIyKI+uZbLj/ZhbDpr5F84oScB5HKhQwc1UDuwGHMHYeiKCiyVkShnmjgxoaxbXG21Q8DpWboGLsmiA1HQyqsT+Zubri99hoB+//Addo0NK4uhjczMojftYvgwUO43rMn0StWkvkAL7eWyp4MHNVAbSfDX6Eh0cmF/tWZdPQolzp25GKz5tycNKm8ulclNffRF4XKGQbUKfDW1jN8ue9Shf5Vr6lRA5exY6j3++94zv88Txp3gLTLV4j49FMud+rMzUmTSPj9d1lQSjI5GTiqARc7CzW9ekJaZqE5q4TWnKzIKJSMDLLiyi7BX3Xh66IvCtXUq4ba9uW+y8zYVjZFoUpCWFjg8Nxz+P5vHX7btuLQry/CxnDnSWYmift+J/TlSVxu34HwN98i4c8/5Z2mZBIycFQDQgha5drIlrPC6l4aR8M+Dl2sDBzGcLW3ZMPYdnSsZxgaWn9UXxQqMc30RaFKw6pRIzw//JB6f/1FrY8+zHcXoktMJG77dkLHT+BSh8cIf+ttEvbvR5dSeFJMSSqKDBzVRN9HvNTnW46HFjickncDoAwcxrKz1LJsRGt6P2z4Hv9xIYJ+iw4RHlt5fvlq7Gxx7NsX3/+tw3/3bpzHjsXcxyfPMbqEBOJ++onQCRO51LYdIWPGErNmLenBwYWcVZLyk/s4qonUjCxaf7SPhFT9X8Ebx7blUX/nPMcoGRlceKiZ/oWZGQ3PnpEbyUpAp1P47NeLLPrzqtrmam/J0uGtaO5T8KbLiqYoCqnnzpOw9xfi9/xCRmhoocea16mN3WMdsWnTBptWLdE6Oxd6rFQ9yQ2AD1jgAJi57Qzr/tGv/HmjWwMmdg7Id8zFR1qiS9avvKp/7KjcPV4Km4JuMmPrGTKzEyJamZux4MUWdH+oVgX3rGiKopB69hzxv+wh8cAB0q9cLfJ4C39/bFq2xKZ1K2xatkTr6Smz9lZzMnA8gIHjfHg8By5F0rOFJ16O1gUec/nJJ8kMvwVA3X2/YeHtXeBxUtEOX41m/Nrjeeq8v/5MAyZ2rltlfrlmhIWRePAgiX8dJOnwYTXlfmE0Tk5YNWmCVZPGWDdtilWTJmg9PKrM1ysVTwaOBzBwGONa7z6k/fcfAL5btmDdtEkF96jquhaZyKhVQVyPMmy67NfSm497P6RWGqwqdOnppAQFkfTPUZKDgkg9fRolo/hEjxonJyzr1cMyIADLgLpYBgRgERCAtmbNcui1ZGrGBg5teXRGqjxkhlzT8Xe1Y9vE9oxbc5x/ruvT1G85HsrNmGQWDW2Jk23VySNlZmGBbfv22LZvD4AuLY3U06dJDgoi+dgxUk6fQZeYmO9zWTExJP/zD8n//JOnXePkhEXt2pj7+GDh4425d86/3mjd3eXcWhUn7zgeMKFTXiVh714AvL6YT41nn63gHlV96Zk6Zm47w+bjholnL0drlgxvSRNPhyI+WXUoOh3pwcGknjtP6tmzpJ47R+r586Wqly7MzdHWqoW5mxtaNze07u76f91cMc9+rnFyxszWRg6DlTN5x/GAuxKRyPaTYTSqVYNnc03aamoYNrNlxcdXRNeqHQutGfP6NcPf1Y5Pf7kAGBIkzuvXnJ7NPSu4h/dPmJlh6eeHpZ8fDs8/B+iDSUZoKGlXrpJ25QrpV6/on1+7VuR8iZKRQUZICBkhxaRwMTdH6+iIpmZNNDn/1nRE4+iItmZNzOxrYGZni8bODrOch60dGjtbhI0MOmVJ3nFUQ9v+v707D4+qvBc4/v3NTJIhiwRIQpBFwqoIKgiIG3BblKValLohrUtRbx+Xutai+KhXb11uN6tVK4i1WluwV3Gr9VpxAVFAFAKIsgUkgZCwJCH7bO/945xMZgZCMkBmhszv8zzznHPeOTPzyzsn85vznnfed1UJty8oDG6P6JPNuEF53DphIN4dOwg0NuLs3Bln586IS787HE0frC/jtgWrw34c+J/j+nH3xBODo+52dCYQwFdaiqdkB96SYjzFxXiLS/CUWEv/vhjMPikSkkzScaS5Ebcbh9tepqXZ22lImhtxp+FwdwrfTksDlwtJSUFcKdYypWm7eUkL9+F0HnPJSy+OJ3HiKK9uYMwji4icPnvRnePiMrdEstlcXsMNL62kKOSi+bkDc3hq+nCy04+d6x7txV9Ti69sF76yMrzl5fjKd+MrL8dXVoavvBzv7nL8FZWt9vI6Jrhc1vUch8NaOp3B7eB6U4IJ2cYhiMPZ+j4iIA4QAYeACCIOcm66iU7DhkYdrjZVJbG8LDdnD8hhyaY9YeWbyqo1ccTAgLxM3rj5bG6bv5oPvy0HYMmmPUx9einPzjidIccf18ozdGzOzAycmf1J69//kPsFGhrwV1bir6iwbpWV+Coq8FdU4q+sJFBdjb+2hkBNLYGaGgI1NcHthEk6Ph9N399i+RW9y5XT2/X5NXF0UBcP73lA4timkzzFzHHuFJ6/aiS//2AjT324GbCGvL/omaU8eOHJTB/d+5hrxog1h9uNIz+flPz81neOYHw+ArW1djKpxTQ2Eqivt5YNDZiGRkxjA4GGRkxDvbUMbjdY+3g8GJ8P4/VYCcDjtbZ9Poy3ad1rdVv2RpR7veD3t0OttJG0b681TRwd1EWn9eSb0v3MXbI1WPbdXqvpxBiDt7iYhvXfcNykifEKscNzOIQ7zx/Myccfxx2vFlLn8ePxBbh34VqWb93LIxcPIyNN/wXbg7hcwet4Ka3v3i5MIAB+v7UMBDD+AAT8GL8fmspC9wnb1x+yNPbjAmFLjMEEjDW1sAnY2wEw4B5yUrv+bXrUdlAOhzD7B0M4q38O1774BQDb9tRhfD62TJocHLMo/fPP9Mda7WzS0B4M7J7FTa98xbe7qgF4c/VO1u6o4pkZIzgxP7mbrjqq4LWNeAfSDvRXOB3cgLxMJpzUnevOKeDyUb0Rlyts8Lq6JO5EEEv9czNZeOPZXD6yebTaot21TP3jUhZ8sV2nfFXHlIRKHCJyp4gYEcmxt0VEnhSRzSKyRkRGtPYcKlzvruk8f/VI7rtgCBfZw4Knjx4dvL/uiy/iFVrS6ZTq5PFLTuF3l50anNO80Rfgl6+t5Za/r6KqrvUhPpRKBAmTOESkN3A+EPqroMnAQPt2A/BsHELrcNJHjwqu163QxBFr00b04u1bzmZQ9+Yebu+sKWXiE4tZunnPIR6pVGJImMQB/B64m/Bea1OBl4xlGZAtIok9dvUxoNPwEVY/cKBxwwZ8FRVxjij5DMjL4o2bzuaKUc1NV7v2NzDj+eU89PZ6Grxx7JGjVCsSInGIyFRghzGmMOKunkBxyHaJXaYOQ6PPj9cfwJmZgbtpVFxj2POMnsjFQ3qqi8d+dArP/SR8QMQXlm7lwqc+5avtmtBVYopZryoR+QA4WIfs2cC9WM1UR/L8N2A1Z9GnT58jeaoOZ3VxJfNXbOfVlcUEDEw6OZ9Hp1/JrsI1AFS8/DIOdxpZEybQ6dRT4xxt8pl4cj7D+2Qz67W1wR8Mbiqv4UfPfsY1Z/XlrvMHa7ddlVBidsZhjJlgjBkaeQOKgAKgUES2Ab2Ar0QkH9gBhE6a3MsuO9jzzzHGjDTGjMzNzW3fP+YYU1HrYf4XxcEhSN77ehfrh55FxrixwX32zn2e0gf/K04RqrwsN/OuHsmvLh5KeqrVjGgM/HnpNiY+sZglm3bHOUKlmsW9qcoYs9YYk2eM6WuM6YvVHDXCGLMLeAu4yu5dNQaoMsaUxjPeY9HYQblMGRZ+sldYUkWPhx7CFfKrXP0lc3yJCDPOOIH3bx/L2EHNX35KKur5ybwV3Dp/FbuqGuIYoVKWuCeOVryLdUayGZgL3BjfcI5NTofwzIzTeWzasGDZmuIqUrp3p98bC8l/4H6yzpuAe9iwQzyLipVeXdL5y7Wj+N1lp5Kd3vy75zdX7+R7v/2YZz/eQqNPL56r+NHRcZPI+p37mfLkEgCO7+zms3u+H+eIVGt2Vzfy0DvrebtwZ1h5QU4G918whPGDc/VMUR01bR0dN9HPONRRNKh7Ju4U6y3fWdXA7urGOEekWpOblcZT04fzt+vPCPvdx9Y9tVz74hdMn7tMe1+pmNPEkURcTkfYVKZrSsLnHG/csoW98+axc/ZsKha8Guvw1CGc1T+Hf/78XO6/YAhZ7uYeVsuK9jHtmc+44aWVbCyrjmOEKplo4kgyp/RqThyFJVVh9zV8/TXlv/4NVa+9Tu2nn8Y6NNWKFKeDn55TwEd3jefKM/qEzSj4/voyJj2xmJ//fRXrd+qUwKp9aeJIMqGJY+W28Ck8Uwv6Bdcbi4piFpOKTk5mGo9cPIwP7hjHhSHzmQcMvFW4kylPLuGaP69gedFeHTxRtQtNHElmTL9uuBzC2EG5XD6qd9h9qQUFwXXP9u3WZDQqYRXkZPDU9OG8c8s5jB8c/tuljzfs5vI5y5j27GcsXFWiQ5ioo0p7VSWhqjovndMPPr3NpnHj8ZWVAdDvX++SFpJMVGJbW1LFnz7ZwrvrSon8t+6SnsKlI3tz5eg+9M3JiE+AKuFpryrVopaSBkBa/5Dmqg0bYhGOOkqG9erM0zNG8OGd45k+ujepzuZ/74o6L3MWFzH+Nx9zxZzPmb9iuw7jrg6bJg4Vxj3slOB61TvvxDESdbgKcjJ4dNopLJ31PX4xcTA9szuF3b+saB+zXl/LqF99wA0vreSdNTupbtAkotpOm6qSXIPXz1ffVXDWgBwAGou2UjRlinWny8XAjz/ClZMTxwjVkfIHDJ9sLOevy7bz0YbyA5qxAFKcwph+3fjeiXlMOKk7vbumxz5QFXdtbarSxJGkAgHDnCVFzPt0KxW1Hj65+z+C30y3zfgx9V9+CUDeL+6i28yZ8QxVHUXl+xt4e00pb67ewZqI7tih+uVkMKZ/N8b068aZ/bqRm5UWwyhVvGji0MTRqsuf+5zlW60uudNH9+bRaVYzVeXrCym9914AnDk59HtjoZ51dEBFu2t4q3An739dxvrSQ//2o39uBsP7dOHU3tmc1iubwflZpLq0pbuj0cShiaNVizfu5qoXVgS3n79qJBOGdCdQV8fmCefh37ePzPHjOf6xR3FmZ8cxUtXedlbW8+G35Sz6poylW/bi8QUOuX+qy8GJ+VkMzMticH4mA7tnMbh7Fj06u3XsrGOYJg5NHK0yxnDDy1/y7/VW99su6Sm8fcs59OqSTs2ST2lYv55u11+HOPSbZTJp8PpZtb2Sz4v2sqxoL6u3V+LxHzqRNMlKc9EvN4NeXdPp3SWd3l070cde75HtJs3lbOfo1ZHQxKGJo00qaj1M/sMSdu235nk4MT+Lf/zsTLLcB++y6/nuOzCG1L59Yxiliqd6j591O6soLK5kdXElhSWVFO+rP6znyk5PIS8rjbwsN3lZaeQeZ63nZqXRJT2F7E6pdO6UQudOKWS5XTgcevYSS5o4NHG02cpt+5g+dxlev3UsTDgpjzk/GXnAP62/poZtl12Ob88eevz3w2Sdd542SySpfbUeNuyqZmOZddtUVsOGsmqq6o9et14R6wymc7qVSLI7pZKR5iQ91UWnVCfpKU7SU510SnXZSycZ9ro7xUmqy0Gay0GK00Gqy0GKU0h1OUgNbjtwOUSP4RCaODRxROV/vyzhrn8UBrdvnzCIWycMDNun5LbbqX7vveB2+hlnkHfH7TpPuQKsps/d1Y18t6+O4n11bN9XR/G+eor31VFcUUfZ/obg9MWJQsQaPDLN6SDFZSUSl0NwOASnQ3CKvXQIjpD1pvscDuxtB04huJ/LaS0dIoiAAA57RQgvE7HiwC53ROwjwecIeZxDsB8SLA993MXDe9IvN7PlP7zF+mhb4nC1toNKDpec3ouNZdXMWWwNbvjEoo30zUln6mk9g/t0mzmT+lWrgkOS1C1fzvafzmTAJ5/gzNRhLJKdiJB3nJu849yM6tv1gPv9AcPe2kbK9zeyu7qR8uoGyvc3Ul5tbVfVe6ms97K/3ktVvZeaRl+7x2wMeHwBqzNAB5qe5vQTuhxW4mgrTRwq6O6Jg1m3o4rPtuzFGNgf0ezQadhQCt5YyJ6n/kjFggXg99P1mms0aag2cTrEvrbhbtP+Pn+A/Q0+quxEUlXvpa7RR63HT73HR53HT53HT73XT529Xe/xU+vx0+Dx4/FbCcHrD+DxB/D6AsEyjz+A12/wJ9op0FHS3s1v2lSlwlTUerjy+eXMPKeAS07v1eJ+jVu3snfePLrPmoUzs/2+2SjVnvwBg9cfoNFOMF5/AH/AEAiA31iJJWAMPr+19AdMsNzaz9r2Na037W+vGwMGe2kgYAwGIKQ8ELoPgDFWmb1v0+Psuw76OLB+1Nu0/0XDj+eEbtF/odOmKnVYumSk8vbNZ+NyhnfBbfT5uf6lL+mZ7QaERp+fmkEXMW1bNZOGauJQxybr+oR1MV21nSYOdYDIpAHwbWk1izfuPqD85OM7M2loLKJSSiUK/WWXapPCiPnJmxzN7pdKqWODnnGoNpk0NJ+czDQq6jwEDLhdDjLSXAzM02YqpZKNJg7VJnlZbqYM6xHvMJRSCUCbqpRSSkVFE4dSSqmoaOJQSikVFU0cSimloqKJQymlVFQ0cSillIqKJg6llFJR6ZCDHIrIbuC7w3x4DrDnKIZztCRqXJC4sWlc0dG4otMR4zrBGJPb2k4dMnEcCRFZ2ZbRIWMtUeOCxI1N44qOxhWdZI5Lm6qUUkpFRROHUkqpqGjiONCceAfQgkSNCxI3No0rOhpXdJI2Lr3GoZRSKip6xqGUUioqmjhCiMgkEdkgIptFZFYc4+gtIh+JyHoR+VpEbrXLHxSRHSKy2r5NiUNs20Rkrf36K+2yriLybxHZZC+7xDimwSF1slpE9ovIbfGoLxF5QUTKRWRdSNlB60csT9rH2xoRGRHjuH4tIt/ar71QRLLt8r4iUh9Sb3+KcVwtvm8ico9dXxtEZGKM41oQEtM2EVltl8eyvlr6bIjtMWaM0ZvVXOcEtgD9gFSgEBgSp1h6ACPs9SxgIzAEeBC4K871tA3IiSj7H2CWvT4LeDzO7+Mu4IR41BcwFhgBrGutfoApwL8AAcYAy2Mc1/mAy15/PCSuvqH7xaG+Dvq+2f8DhUAaUGD/vzpjFVfE/b8F7o9DfbX02RDTY0zPOJqNBjYbY4qMMR5gPjA1HoEYY0qNMV/Z69XAN0DPeMTSRlOBv9jrfwEuimMs3we2GGMO9wegR8QYsxjYF1HcUv1MBV4ylmVAtoi0y2xZB4vLGPO+McZnby4DerXHa0cb1yFMBeYbYxqNMVuBzVj/tzGNS0QEuAz4e3u89qEc4rMhpseYJo5mPYHikO0SEuDDWkT6AsOB5XbRzfYp5wuxbhKyGeB9EflSRG6wy7obY0rt9V1A9zjE1eQKwv+h411f0HL9JNIx91Osb6ZNCkRklYh8IiLnxiGeg71viVJf5wJlxphNIWUxr6+Iz4aYHmOaOBKYiGQCrwG3GWP2A88C/YHTgFKs0+VYO8cYMwKYDNwkImND7zTW+XFcuuqJSCrwQ+AfdlEi1FeYeNZPS0RkNuADXrGLSoE+xpjhwB3A30TkuBiGlHDvW4TphH85iXl9HeSzISgWx5gmjmY7gN4h273ssrgQkRSsA+MVY8zrAMaYMmOM3xgTAObSTqfph2KM2WEvy4GFdgxlTae/9rI81nHZJgNfGWPK7BjjXl+2luon7seciFwDXADMsD9wsJuC9trrX2JdSxgUq5gO8b4lQn25gGnAgqayWNfXwT4biPExpomj2RfAQBEpsL+5XgG8FY9A7DbUecA3xpjfhZSHtk1eDKyLfGw7x5UhIllN61gXV9dh1dPV9m5XA2/GMq4QYd8E411fIVqqn7eAq+yeL2OAqpDmhnYnIpOAu4EfGmPqQspzRcRpr/cDBgJFMYyrpfftLeAKEUkTkQI7rhWxiss2AfjWGFPSVBDL+mrps4FYH2Ox6AlwrNyweiBsxPrGMDuOcZyDdaq5Blht36YALwNr7fK3gB4xjqsfVq+WQuDrpjoCugGLgE3AB0DXONRZBrAX6BxSFvP6wkpcpYAXqz15Zkv1g9XT5Wn7eFsLjIxxXJux2r+bjrE/2fv+yH5/VwNfARfGOK4W3zdgtl1fG4DJsYzLLn8R+FnEvrGsr5Y+G2J6jOkvx5VSSkVFm6qUUkpFRROHUkqpqGjiUEopFRVNHEoppaKiiUMppVRUNHGopCIiL4rIB/GOI5KIfCwiz8c7DqXaQrvjqqQiIp0BhzGmwv6gHmCMGR/D178PuM4Y0zeivCvgMxHDRyiViFzxDkCpWDLGVLXH84pIqrFGVT4sxpi2jhCrVNxpU5VKKk1NVSLyINavlMeJiLFv19j7ZIrIH8SaTKjOHvV0Wshz9LX3nyEi74pILfCwPazDXBHZItbEPkUi8oiIpNmPuwZ4GDgh5DUftO8La6oSkRQRecyOwSPWxD1XRvwtRkRuFJGXRaRaREpE5J6Ifaba8deJSKWIrBCR4e1QtSqJ6BmHSla/wRpTqABr0DqAKnssoLexhmq4HNiJNT7RfBGZbIxZFPIcjwO/BG6ytwVrcLkrgTLgFOA5rGErHsAaGO9EYAYwyn5MTQvxPYI11PnPsIZ4uQT4q4iURcTwAHAf1uRHk4A/isgKY8wiEcnHGin4PnvpxhqG24dSR0ATh0pKxpgaEakHPMaYXU3lIjIeOBNrfoOmZq059gBxt2CNB9TkOWPMK4SbHbK+TUT6AzcCDxhj6kWkBvCHvmYkEUkHfg7cboxpGiL+EREZZT9/aAwLjDFz7fWnReRmrES3CGu2uBTgVWPMNnufb1p6XaXaShOHUuFGYU0dvMM6+QhKxRpALtQBI7OKyPXAdVjTiWZg/Y9F2yQ8wH69xRHlnwD3RJStjtjeSfMkPmuA/wPWici/gY+B140xxSh1BDRxKBXOAVTR3JQUKvLid23ohohcijUS6SysD/n9wKXAr45+mC3GZLATlTHGLyKTsf6WCVijuD4mIpcaY95px5hUB6eJQyUzD+CMKFsJZANuY0y083eMBVaZ8DlU+rbhNSNtBhrt5wuNYRxRzilirP72K+zbIyLyHnAtoIlDHTZNHCqZbQUuFZGTsS5mVwMfYs1n8LqI3I3V3NMFOAtoCLmecDAbgJkiMhXrA/4Cmi+8h75mvoicidX0VWdCJlECMMbUiciTWD21dtN8cXwqcF5b/zgROQv4PvA+1twSA7Eu2M9r63ModTDaHVcls3lYMz9+BuwGptvf0H8IvA78HvgW+CfwA6zJcA7lOaxJiP4MrALOwOrtFOoNrB5O/7Rf8+4Wnms21rSpT2AloR8DP47oUdWaKqwL/W9iJakXsOYVfziK51DqAPrLcaWUUlHRMw6llFJR0cShlFIqKpo4lFJKRUUTh1JKqaho4lBKKRUVTRxKKaWioolDKaVUVDRxKKWUioomDqWUUlH5fyLWYszADMK0AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "#plt.rc('text',usetex=True)nn\n", - "#plt.xscale('log')\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=3,color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=3,color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=3,linestyle='--',color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=3,linestyle='--',color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "plt.savefig(\"./error_fig/noise3/\"+figure_name+\".eps\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/convergence50S.ipynb b/convergence50S.ipynb deleted file mode 100644 index d827a95..0000000 --- a/convergence50S.ipynb +++ /dev/null @@ -1,1416 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/xiezhq/.wanghe_env/lib/python3.7/site-packages/torch_sparse/tensor.py:46: UserWarning: This overload of nonzero is deprecated:\n", - "\tnonzero()\n", - "Consider using one of the following signatures instead:\n", - "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:882.)\n", - " index = mat.nonzero()\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "import networkx as nx\n", - "import copy\n", - "import pandas as pd\n", - "import xlwt\n", - "import torch\n", - "from torch import nn\n", - "import torch.optim as optim\n", - "from torch_geometric.utils import from_networkx\n", - "from torch.utils.data import Dataset, DataLoader\n", - "from torch_geometric.data import Data, Batch\n", - "from torch_geometric.nn.conv import MessagePassing\n", - "from torch_sparse import SparseTensor, matmul\n", - "import torch.nn.functional as F\n", - "import matplotlib.pyplot as plt\n", - "\n", - "num_nodes = 5\n", - "num_edges = 6\n", - "n = 100\n", - "m = 80\n", - "k = 16\n", - "train_num = 1000\n", - "test_num = 100\n", - "num_layers = 50\n", - "nnz = 8\n", - "\n", - "#less nnz =5; m = 50; k = 10\n", - "\n", - "def metropolis(adjacency_matrix):\n", - " num_of_nodes = adjacency_matrix.shape[0]\n", - " metropolis=np.zeros((num_of_nodes,num_of_nodes))\n", - " for i in range(num_of_nodes):\n", - " for j in range(num_of_nodes):\n", - " if adjacency_matrix[i,j]==1:\n", - " d_i = np.sum(adjacency_matrix[i,:])\n", - " d_j = np.sum(adjacency_matrix[j,:])\n", - " metropolis[i,j]=1/(1+max(d_i,d_j))\n", - " metropolis[i,i]=1-sum(metropolis[i,:])\n", - " return metropolis\n", - "\n", - "class SynDataset(Dataset):\n", - " def __init__(self, samples):\n", - " self.samples = samples\n", - " self.A = []; \n", - " self.y = []; \n", - " self.x_true = []\n", - " self.pyg_data=[]\n", - " self.process()\n", - " \n", - " \n", - " def gen_func(self, num_of_nodes, n, m, k):\n", - " A_all = np.random.randn(m, n)\n", - " x = np.random.randn(n)\n", - " x_norm = 0\n", - "\n", - " while(x_norm < 1e-2):\n", - " x_mask = np.random.rand(n)\n", - " x_mask[x_mask < 1 - nnz/100] = 0\n", - " x_mask[x_mask > 0] = 1\n", - " x_norm = np.linalg.norm(x * x_mask)\n", - "\n", - " x = x * x_mask\n", - " x = x/np.linalg.norm(x)\n", - " \n", - " SNR_db = 50\n", - " SNR = 10**(SNR_db/10)\n", - " \n", - " noise = np.random.randn(m) * np.sqrt(1/SNR)\n", - " y_all = A_all@x + noise\n", - "\n", - " A = np.zeros((num_of_nodes, k , n))\n", - " y = np.zeros((num_of_nodes, k))\n", - " for ii in range(num_of_nodes):\n", - " start = (k*ii) % m; end = (k*(ii+1) )%m\n", - " if(start > end):\n", - " A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0)\n", - " y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), \n", - " np.expand_dims(y_all[:end], axis = 0)), axis = 1)\n", - " else:\n", - " A[ii,:,:] = A_all[start:end,:]\n", - " y[ii,:] = np.expand_dims(y_all[start:end], axis = 0)\n", - " \n", - " x = np.expand_dims(x, axis = 0)\n", - " x = x.repeat(num_of_nodes, axis = 0)\n", - " \n", - " return A, y, x\n", - "\n", - " def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True):\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k = 0\n", - " while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False:\n", - " G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed)\n", - " k += 1\n", - " # print(\"Check if connected: \", nx.is_connected(G))\n", - " # nx.draw(G)\n", - " \n", - " edge_index = from_networkx(G).edge_index\n", - " adj = nx.to_numpy_matrix(G)\n", - " return G, adj,edge_index\n", - " \n", - " def process(self):\n", - " _, adj,edge_index = self.gen_graph(num_nodes, num_edges)\n", - " self.edge_index = edge_index\n", - " W = metropolis(adj)\n", - " self.W = [torch.tensor(W, dtype = torch.float)] * self.samples\n", - " \n", - " \n", - " for ii in range(self.samples):\n", - " A, y, x_true = self.gen_func(num_nodes, n, m, k)\n", - " self.A.append(torch.tensor(A, dtype = torch.float) ); \n", - " self.y.append(torch.tensor(y, dtype = torch.float) ); \n", - " self.x_true.append(torch.tensor(x_true, dtype = torch.float) )\n", - " \n", - " edge_weight=torch.tensor(W,dtype=torch.float)\n", - " self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) \n", - " \n", - " \n", - "\n", - " def __getitem__(self, idx):\n", - " return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx]\n", - "\n", - " def __len__(self):\n", - " \"\"\"Number of graphs in the dataset\"\"\"\n", - " return len(self.A)\n", - " \n", - " \n", - "def collate(samples):\n", - " # The input `samples` is a list of pairs\n", - " # (graph, label).\n", - " W, A, y, x_true, pyg_data = map(list, zip(*samples))\n", - " W = torch.stack(W)\n", - " A = torch.stack(A)\n", - " y = torch.stack(y)\n", - " x_true = torch.stack(x_true)\n", - " pyg_data = Batch.from_data_list(pyg_data)\n", - " return W, A, y, x_true, pyg_data\n", - "class MetropolisConv(MessagePassing):\n", - " def __init__(self):\n", - " super(MetropolisConv, self).__init__(aggr='add') # \"Add\" aggregation.\n", - "\n", - " def forward(self, x, pyg_data):\n", - " (B, N, D)=x.shape\n", - " out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1)\n", - " return out.view(B,N,D)\n", - "\n", - " def message_and_aggregate(self, adj_t, x):\n", - " return matmul(adj_t, x, reduce=self.aggr)\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = di ** power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - "\n", - "\n", - "train_data = SynDataset(train_num)\n", - "val_data = SynDataset(test_num)\n", - "\n", - "train_loader = DataLoader(train_data, batch_size=32, shuffle=True, collate_fn=collate)\n", - "val_loader = DataLoader(val_data, batch_size=100, shuffle=False, collate_fn=collate)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "50" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "num_layers" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-PGEXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.0007084578264766606 tensor(0.0093, grad_fn=) tensor(0.0003, grad_fn=)\n", - "0.00012178569477327983 tensor(0.0041, grad_fn=) tensor(-0.0008, grad_fn=)\n", - "8.811488123683375e-05 tensor(0.0021, grad_fn=) tensor(0.0001, grad_fn=)\n", - "6.656960260897904e-05 tensor(0.0006, grad_fn=) tensor(0.0005, grad_fn=)\n", - "5.601191321602528e-05 tensor(0.0014, grad_fn=) tensor(0.0012, grad_fn=)\n", - "4.8075003405756433e-05 tensor(0.0011, grad_fn=) tensor(0.0013, grad_fn=)\n", - "4.424967221439147e-05 tensor(0.0008, grad_fn=) tensor(0.0014, grad_fn=)\n", - "4.736732535093324e-05 tensor(0.0011, grad_fn=) tensor(0.0018, grad_fn=)\n", - "4.387440219488781e-05 tensor(0.0010, grad_fn=) tensor(0.0017, grad_fn=)\n", - "4.206252572203084e-05 tensor(0.0009, grad_fn=) tensor(0.0017, grad_fn=)\n", - "4.1159131001222704e-05 tensor(0.0008, grad_fn=) tensor(0.0016, grad_fn=)\n", - "4.010802911125211e-05 tensor(0.0008, grad_fn=) tensor(0.0016, grad_fn=)\n", - "3.940199962926272e-05 tensor(0.0007, grad_fn=) tensor(0.0016, grad_fn=)\n", - "3.887473008035158e-05 tensor(0.0006, grad_fn=) tensor(0.0015, grad_fn=)\n", - "3.829265483545896e-05 tensor(0.0005, grad_fn=) tensor(0.0015, grad_fn=)\n", - "3.754758029117511e-05 tensor(0.0004, grad_fn=) tensor(0.0014, grad_fn=)\n", - "3.677344636798807e-05 tensor(0.0003, grad_fn=) tensor(0.0014, grad_fn=)\n", - "3.608696351875551e-05 tensor(0.0002, grad_fn=) tensor(0.0013, grad_fn=)\n", - "3.487628316634073e-05 tensor(9.9192e-05, grad_fn=) tensor(0.0012, grad_fn=)\n", - "3.387783237940312e-05 tensor(3.6122e-05, grad_fn=) tensor(0.0011, grad_fn=)\n", - "3.409189872627394e-05 tensor(0.0003, grad_fn=) tensor(0.0014, grad_fn=)\n", - "3.294807595466409e-05 tensor(1.5598e-05, grad_fn=) tensor(0.0011, grad_fn=)\n", - "3.2119203865477175e-05 tensor(2.6649e-05, grad_fn=) tensor(0.0010, grad_fn=)\n", - "3.149933712620623e-05 tensor(0.0001, grad_fn=) tensor(0.0009, grad_fn=)\n", - "3.087065186946347e-05 tensor(0.0003, grad_fn=) tensor(0.0008, grad_fn=)\n", - "3.0461447522611707e-05 tensor(0.0006, grad_fn=) tensor(0.0007, grad_fn=)\n", - "3.5724281133298064e-05 tensor(0.0015, grad_fn=) tensor(0.0013, grad_fn=)\n", - "3.354849224024292e-05 tensor(0.0015, grad_fn=) tensor(0.0012, grad_fn=)\n", - "3.2625669746266794e-05 tensor(0.0015, grad_fn=) tensor(0.0012, grad_fn=)\n", - "3.194850603449595e-05 tensor(0.0015, grad_fn=) tensor(0.0011, grad_fn=)\n", - "3.1461769992802147e-05 tensor(0.0015, grad_fn=) tensor(0.0010, grad_fn=)\n", - "3.1103262074339e-05 tensor(0.0015, grad_fn=) tensor(0.0010, grad_fn=)\n", - "3.072357861810815e-05 tensor(0.0015, grad_fn=) tensor(0.0009, grad_fn=)\n", - "3.0359460652107373e-05 tensor(0.0015, grad_fn=) tensor(0.0009, grad_fn=)\n", - "3.0356680269960634e-05 tensor(0.0016, grad_fn=) tensor(0.0008, grad_fn=)\n", - "3.012709214544884e-05 tensor(0.0016, grad_fn=) tensor(0.0008, grad_fn=)\n", - "2.983849208249012e-05 tensor(0.0017, grad_fn=) tensor(0.0007, grad_fn=)\n", - "2.967410603105236e-05 tensor(0.0018, grad_fn=) tensor(0.0007, grad_fn=)\n", - "2.9351169416713674e-05 tensor(0.0020, grad_fn=) tensor(0.0006, grad_fn=)\n", - "2.9322796933684003e-05 tensor(0.0021, grad_fn=) tensor(0.0006, grad_fn=)\n", - "2.9036831904249993e-05 tensor(0.0023, grad_fn=) tensor(0.0006, grad_fn=)\n", - "2.9080152330607234e-05 tensor(0.0025, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.8752309788160346e-05 tensor(0.0027, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.8653734943873133e-05 tensor(0.0030, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.856089128044914e-05 tensor(0.0032, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.8258666191049997e-05 tensor(0.0035, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.8040394511208433e-05 tensor(0.0038, grad_fn=) tensor(0.0004, grad_fn=)\n", - "2.979605426389753e-05 tensor(0.0047, grad_fn=) tensor(0.0009, grad_fn=)\n", - "2.826399867217333e-05 tensor(0.0046, grad_fn=) tensor(0.0005, grad_fn=)\n", - "2.792364171000372e-05 tensor(0.0049, grad_fn=) tensor(0.0004, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_PGEXTRA(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_PGEXTRA, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - " \n", - "###main\n", - "model_PGEXTRA = Net_PGEXTRA(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_PGEXTRA.parameters(), lr=2e-5)\n", - "model_PGEXTRA.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.86,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_PGEXTRA.lam[1], model_PGEXTRA.step_size[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# GNN-DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.00047994527812988963 tensor(0.0071, grad_fn=) tensor(0.0038, grad_fn=)\n", - "0.00017056674641935388 tensor(0.0099, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.00015947482233968913 tensor(0.0099, grad_fn=) tensor(0.0058, grad_fn=)\n", - "0.0001554991899865854 tensor(0.0105, grad_fn=) tensor(0.0056, grad_fn=)\n", - "0.00014631832073064288 tensor(0.0105, grad_fn=) tensor(0.0059, grad_fn=)\n", - "0.0001648697175369307 tensor(0.0103, grad_fn=) tensor(0.0063, grad_fn=)\n", - "0.0001592461680957058 tensor(0.0118, grad_fn=) tensor(0.0055, grad_fn=)\n", - "0.00015754647120047593 tensor(0.0124, grad_fn=) tensor(0.0050, grad_fn=)\n", - "0.00013429127989184053 tensor(0.0119, grad_fn=) tensor(0.0059, grad_fn=)\n", - "0.00016583654905844014 tensor(0.0132, grad_fn=) tensor(0.0053, grad_fn=)\n", - "0.00013677822630597802 tensor(0.0125, grad_fn=) tensor(0.0065, grad_fn=)\n", - "0.00012544124365376774 tensor(0.0121, grad_fn=) tensor(0.0077, grad_fn=)\n", - "0.00012179180635030207 tensor(0.0117, grad_fn=) tensor(0.0087, grad_fn=)\n", - "0.00012389722201078257 tensor(0.0131, grad_fn=) tensor(0.0084, grad_fn=)\n", - "0.00012063625717928517 tensor(0.0123, grad_fn=) tensor(0.0102, grad_fn=)\n", - "0.00010900885354203638 tensor(0.0120, grad_fn=) tensor(0.0118, grad_fn=)\n", - "0.00010504787383069925 tensor(0.0117, grad_fn=) tensor(0.0137, grad_fn=)\n", - "9.558323199598817e-05 tensor(0.0117, grad_fn=) tensor(0.0155, grad_fn=)\n", - "8.307600751322752e-05 tensor(0.0119, grad_fn=) tensor(0.0173, grad_fn=)\n", - "7.437278384259116e-05 tensor(0.0124, grad_fn=) tensor(0.0192, grad_fn=)\n", - "6.91656016442721e-05 tensor(0.0130, grad_fn=) tensor(0.0210, grad_fn=)\n", - "6.410528396827431e-05 tensor(0.0140, grad_fn=) tensor(0.0229, grad_fn=)\n", - "7.342621506722935e-05 tensor(0.0164, grad_fn=) tensor(0.0246, grad_fn=)\n", - "6.336972955978126e-05 tensor(0.0155, grad_fn=) tensor(0.0268, grad_fn=)\n", - "5.720919443774619e-05 tensor(0.0169, grad_fn=) tensor(0.0285, grad_fn=)\n", - "7.058821347527555e-05 tensor(0.0155, grad_fn=) tensor(0.0323, grad_fn=)\n", - "6.20683592842397e-05 tensor(0.0165, grad_fn=) tensor(0.0317, grad_fn=)\n", - "5.7903024639927025e-05 tensor(0.0170, grad_fn=) tensor(0.0319, grad_fn=)\n", - "5.5245686439775454e-05 tensor(0.0173, grad_fn=) tensor(0.0326, grad_fn=)\n", - "5.3081305168234394e-05 tensor(0.0178, grad_fn=) tensor(0.0336, grad_fn=)\n", - "5.103950536522461e-05 tensor(0.0185, grad_fn=) tensor(0.0349, grad_fn=)\n", - "4.96922757520224e-05 tensor(0.0196, grad_fn=) tensor(0.0362, grad_fn=)\n", - "4.812106010376738e-05 tensor(0.0212, grad_fn=) tensor(0.0375, grad_fn=)\n", - "4.724225721020048e-05 tensor(0.0232, grad_fn=) tensor(0.0388, grad_fn=)\n", - "4.61188341205343e-05 tensor(0.0258, grad_fn=) tensor(0.0401, grad_fn=)\n", - "4.558678813282313e-05 tensor(0.0288, grad_fn=) tensor(0.0414, grad_fn=)\n", - "4.419260767463129e-05 tensor(0.0325, grad_fn=) tensor(0.0427, grad_fn=)\n", - "4.6391918772314966e-05 tensor(0.0388, grad_fn=) tensor(0.0416, grad_fn=)\n", - "7.726550859388226e-05 tensor(0.0456, grad_fn=) tensor(0.0347, grad_fn=)\n", - "7.344460732383595e-05 tensor(0.0457, grad_fn=) tensor(0.0346, grad_fn=)\n", - "7.102695440153184e-05 tensor(0.0457, grad_fn=) tensor(0.0345, grad_fn=)\n", - "7.002898962582549e-05 tensor(0.0458, grad_fn=) tensor(0.0345, grad_fn=)\n", - "6.93225110808271e-05 tensor(0.0459, grad_fn=) tensor(0.0344, grad_fn=)\n", - "6.746892393039161e-05 tensor(0.0460, grad_fn=) tensor(0.0343, grad_fn=)\n", - "6.701109805362648e-05 tensor(0.0462, grad_fn=) tensor(0.0342, grad_fn=)\n", - "6.566255160578294e-05 tensor(0.0463, grad_fn=) tensor(0.0341, grad_fn=)\n", - "6.490408395620761e-05 tensor(0.0464, grad_fn=) tensor(0.0340, grad_fn=)\n", - "6.386023142113118e-05 tensor(0.0466, grad_fn=) tensor(0.0339, grad_fn=)\n", - "6.251969011827896e-05 tensor(0.0468, grad_fn=) tensor(0.0338, grad_fn=)\n", - "6.132977603101608e-05 tensor(0.0470, grad_fn=) tensor(0.0337, grad_fn=)\n" - ] - } - ], - "source": [ - "class Net_DGD(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers):\n", - " super(Net_DGD, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " #print(A.shape, x.shape, b.shape)\n", - " #print(grad_A.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " #print(grad_A.shape)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = self.lam[ii] #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " while (k < max_iter):\n", - " #x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \\\n", - " # self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1)\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "def step_loss(gamma,x, y):\n", - " #gamma = 0.75\n", - " n_steps = x.shape[0]\n", - " #print(n_steps)\n", - " #di = torch.ones((n_steps)) * gamma\n", - " power = torch.tensor(range(n_steps, 0, -1))\n", - " gamma_a = 1/ power\n", - " gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)\n", - "\n", - " y = torch.unsqueeze(y, axis = 0)\n", - " ele_loss = gamma_a * (x - y) **2\n", - " #print(ele_loss.shape)\n", - " #print(torch.mean(ele_loss, (1,2,3) ))\n", - " loss = torch.mean(ele_loss)\n", - " return loss\n", - " \n", - " \n", - "model_DGD = Net_DGD(1e-3, num_layers)\n", - "optimizer = optim.Adam(model_DGD.parameters(), lr=1e-4)\n", - "model_DGD.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_DGD(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.8965,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_DGD.lam[1], model_DGD.step_size[1])" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\nclass Net_NIDS(torch.nn.Module):\\n def __init__(self, step_size, num_layers, num_nodes):\\n super(Net_NIDS, self).__init__()\\n self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\\n self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\\n self.c = nn.Parameter(torch.ones(num_layers)*step_size)\\n self.num_layers = num_layers\\n self.conv=MetropolisConv()\\n \\n def tgrad_qp(self, A, b, x):\\n # A: nodes * k * n\\n # X: nodes * n\\n # Y: nodes * k\\n\\n x_ = torch.unsqueeze(x, axis = -1)\\n b_ = torch.unsqueeze(b, axis = -1)\\n\\n A_t = A.transpose(2,3)\\n grad_A = A_t @ (A @ x_ - b_)\\n grad_A = torch.squeeze(grad_A, axis = -1)\\n return grad_A\\n \\n def act(self, x, ii):\\n tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\\n return F.relu(x - tau) - F.relu( - x - tau)\\n \\n def forward(self, W, A, b,pyg_data, max_iter):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n ret_z = []\\n \\n k = 1\\n x_0 = init_x\\n x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\\n x_1 = self.act(x_12, 0)\\n \\n x_hist = [init_x,x_1]\\n \\n while (k < max_iter):\\n c = self.c[k]/(2*torch.max(self.step_size[k]))\\n #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n #print(W_hat)\\n temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\\n conv_result = self.conv(temp,pyg_data)\\n x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\\n #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\\n #x_32 =x_12 - x_1 + w@temp\\n x_2 = self.act(x_32, k)\\n \\n ret_z.append(x_2)\\n\\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n\\n k = k + 1\\n x_hist.append(x_2)\\n \\n ret_z = torch.stack(ret_z)\\n return ret_z, x_2,x_hist\\nmodel_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\\noptimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\\nmodel_NIDS.train()\\nepoch_losses = []\\nfor epoch in range(500):\\n epoch_loss = 0\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\\n z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\\n loss = step_loss(0.83,z, x_true)\\n \\n optimizer.zero_grad()\\n loss.backward()\\n optimizer.step()\\n epoch_loss += loss.detach().item()\\n epoch_loss /= (iter + 1)\\n if(epoch % 10 == 0):\\n print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\\n'" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "class Net_NIDS(torch.nn.Module):\n", - " def __init__(self, step_size, num_layers, num_nodes):\n", - " super(Net_NIDS, self).__init__()\n", - " self.step_size = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size)\n", - " self.lam = nn.Parameter(torch.ones(num_layers,num_nodes)*step_size*10)\n", - " self.c = nn.Parameter(torch.ones(num_layers)*step_size)\n", - " self.num_layers = num_layers\n", - " self.conv=MetropolisConv()\n", - " \n", - " def tgrad_qp(self, A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - "\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - "\n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - " \n", - " def act(self, x, ii):\n", - " tau = (self.lam[ii]).unsqueeze(0).unsqueeze(-1) #* self.step_size[ii]\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - " \n", - " def forward(self, W, A, b,pyg_data, max_iter):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " ret_z = []\n", - " \n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = x_0 - torch.diag(self.step_size[0]).unsqueeze(0)@ self.tgrad_qp(A, b, x_0)\n", - " x_1 = self.act(x_12, 0)\n", - " \n", - " x_hist = [init_x,x_1]\n", - " \n", - " while (k < max_iter):\n", - " c = self.c[k]/(2*torch.max(self.step_size[k]))\n", - " #W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*torch.diag(self.step_size[k]).unsqueeze(0)@(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " #print(W_hat)\n", - " temp = 2*x_1-x_0 - torch.diag(self.step_size[k])@(self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0))\n", - " conv_result = self.conv(temp,pyg_data)\n", - " x_32 = x_12 - x_1 + temp - c*torch.diag(self.step_size[k]).unsqueeze(0)@ (temp - conv_result)\n", - " #x_32 = x_12-x_1 + self.conv(temp,pyg_data)\n", - " #x_32 =x_12 - x_1 + w@temp\n", - " x_2 = self.act(x_32, k)\n", - " \n", - " ret_z.append(x_2)\n", - "\n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - "\n", - " k = k + 1\n", - " x_hist.append(x_2)\n", - " \n", - " ret_z = torch.stack(ret_z)\n", - " return ret_z, x_2,x_hist\n", - "model_NIDS = Net_NIDS(1e-3, num_layers,num_nodes)\n", - "optimizer = optim.Adam(model_NIDS.parameters(), lr=1e-4)\n", - "model_NIDS.train()\n", - "epoch_losses = []\n", - "for epoch in range(500):\n", - " epoch_loss = 0\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader):\n", - " z, _,_ = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " loss = step_loss(0.83,z, x_true)\n", - " \n", - " optimizer.zero_grad()\n", - " loss.backward()\n", - " optimizer.step()\n", - " epoch_loss += loss.detach().item()\n", - " epoch_loss /= (iter + 1)\n", - " if(epoch % 10 == 0):\n", - " print(epoch_loss, model_NIDS.lam[1], model_NIDS.step_size[1])\n", - "'''" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Origin Methods" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "def tgrad_qp(A, b, x):\n", - " # A: nodes * k * n\n", - " # X: nodes * n\n", - " # Y: nodes * k\n", - " '''grad_A = np.zeros(x.shape)\n", - " for i in range(x.shape[0]):\n", - " grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i])\n", - " return grad_A'''\n", - " x_ = torch.unsqueeze(x, axis = -1)\n", - " b_ = torch.unsqueeze(b, axis = -1)\n", - " \n", - " A_t = A.transpose(2,3)\n", - " grad_A = A_t @ (A @ x_ - b_)\n", - " # print(A.shape, x.shape, b.shape)\n", - " grad_A = torch.squeeze(grad_A, axis = -1)\n", - " return grad_A\n", - "\n", - "def torch_soft(x, tau):\n", - " return F.relu(x - tau) - F.relu( - x - tau)\n", - "\n", - "def opt_distance(x,opt):\n", - " error = 0\n", - " batch_size = x.shape[0]\n", - " num_of_nodes = x.shape[1]\n", - " error = np.linalg.norm(x-opt)**2\n", - " return error/num_of_nodes/batch_size\n", - "\n", - "def hist_nmse(x_hist,opt):\n", - " error = []\n", - " iteration = len(x_hist)\n", - " #print(iteration)\n", - " for k in range(iteration):\n", - " error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt)))\n", - " return error\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin PG-EXTRA" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.5940539376917586 \t 0.4680586206153548\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.5926524396339736 \t 0.46249895539447794\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.5910356479359616 \t 0.4559274614364422\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.3 \t 0.5860771867587463 \t 0.43408721601314215\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.5833889446061293 \t 0.41894614409603675\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.7 \t 0.5826519235668893 \t 0.409104285640251\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.5847374461508589 \t 0.4023254521409562\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.7703447354999371 \t 0.6478505945503712\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.5301225537094361 \t 0.415722756291223\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.5271336443722248 \t 0.40687496252940764\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.5236207424564127 \t 0.3964408589277609\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.3 \t 0.5121031546092781 \t 0.362220444584389\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.5043861842132464 \t 0.3389734285821923\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.7 \t 0.4998694674018843 \t 0.32396787785369086\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.4981388679557294 \t 0.3129880424962921\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.7123190027009987 \t 0.5892479285714289\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.467728493232471 \t 0.36742037890078977\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.46227280331077053 \t 0.35378411973245233\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.4558335846525024 \t 0.33788173565810026\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.3 \t 0.434376467559503 \t 0.28774974921022656\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.4194138549759536 \t 0.25594673657615385\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.7 \t 0.409673255263624 \t 0.23674523565794697\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.402953309749153 \t 0.22396583507565265\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.6474087957968295 \t 0.5366686070618453\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.3672762931329199 \t 0.2923963505486318\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.353846933402383 \t 0.26362702612325667\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.3382127295366972 \t 0.23203015178851094\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.3 \t 0.2887298166279379 \t 0.14891818573349336\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.25700397423775756 \t 0.110967441131525\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.7 \t 0.23772714761985297 \t 0.09564610865531904\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.22477849013358356 \t 0.09352968252077015\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.5361935517881357 \t 0.47802591522297194\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 0.27282552199581006 \t 0.22073285715960084\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 0.2374765949004377 \t 0.15390678756125273\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 0.19999572756187992 \t 0.09615506519395467\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.3 \t 0.10989206281882115 \t 0.021763587058577397\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 0.07582068177598968 \t 0.01747684283052979\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.7 \t 0.06552564392336535 \t 0.023949552897089346\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 0.06924297520704567 \t 0.04051940864338803\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.4703033346608281 \t 0.4641906695382659\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.01 \t 14208.244755363941 \t 37738906243424.77\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.05 \t 11421.726110506057 \t 30324862304317.95\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.1 \t 8445.307487022877 \t 22408387417636.992\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.3 \t 1359.277442410655 \t 3575826630616.352\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.5 \t 0.6139738164287992 \t 524392965.253125\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.7 \t 0.03626997108796968 \t 0.020719137638574924\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 1 \t 0.048247790596779395 \t 0.0385439632045568\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 5 \t 0.4654260416605466 \t 0.4640057638791204\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t 6.256789565471774e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t 5.7769127150182714e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t 5.207994164633043e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.3 \t 3.2544923208190645e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t 1.8616474045125856e+23 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.7 \t 9.28688575396868e+22 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t 1.9539885469502116e+22 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t 0.46416835338743295 \t 0.46398926246183875\n" - ] - } - ], - "source": [ - "def torch_PGEXTRA(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 + x_12 - W_hat@x_0 - \\\n", - " step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,7e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,3e-1,5e-1, 7e-1,1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_PGEXTRA(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " \n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin DGD" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.01 \t 0.605324313603116 \t 0.4777560819402879\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.05 \t 0.6041230399185151 \t 0.47247883532072776\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.1 \t 0.6027498381328769 \t 0.46627429917640983\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.3 \t 0.5986390795216721 \t 0.44572820769326243\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.5 \t 0.5966072908644564 \t 0.43150386412894476\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 0.7 \t 0.596412417690044 \t 0.42242122252012637\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 1 \t 0.5991846072476182 \t 0.4166333064153605\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0005 \t 5 \t 0.7770043687039943 \t 0.6565058805164881\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.01 \t 0.5449529025194424 \t 0.4275345266469249\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.05 \t 0.5422909490196325 \t 0.4190731938183035\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.1 \t 0.5391815688926727 \t 0.40915404446773934\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.3 \t 0.5290939575211379 \t 0.37675980513660945\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.5 \t 0.522510301244918 \t 0.3547385442897958\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 0.7 \t 0.5189351923247159 \t 0.34069950236557817\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 1 \t 0.518407741313924 \t 0.3309626039515733\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.0007 \t 5 \t 0.7228559435361603 \t 0.6009312514247794\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.01 \t 0.4865721544785647 \t 0.3816777560041828\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.05 \t 0.4816078651778589 \t 0.3685525916798797\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.1 \t 0.47578300551690517 \t 0.3533543154448562\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.3 \t 0.4565946680083307 \t 0.3056183052720899\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.5 \t 0.4433946277025389 \t 0.2752672229898162\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 0.7 \t 0.4351414300562173 \t 0.2571117036353753\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 1 \t 0.4302776880126403 \t 0.24558072956048999\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.001 \t 5 \t 0.6631267220888113 \t 0.5515168847821478\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.01 \t 0.3947319084811916 \t 0.31191243160062004\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.05 \t 0.38217612475382523 \t 0.2840330423829146\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.1 \t 0.3676902214882303 \t 0.2535609069976799\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.3 \t 0.3223552369956906 \t 0.1732036889072333\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.5 \t 0.29358641629327215 \t 0.13569167154634487\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 0.7 \t 0.27655102460332454 \t 0.12016410144421069\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 1 \t 0.26609735030499176 \t 0.11802508074586694\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.002 \t 5 \t 0.5628771209345431 \t 0.4987725284595236\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.01 \t 0.31318057835273977 \t 0.2501581664487021\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.05 \t 0.2794851188737739 \t 0.18322170445469235\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.1 \t 0.2439440283472941 \t 0.12510365435410903\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.3 \t 0.15809911026171902 \t 0.042975259893690235\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.5 \t 0.12358578601858607 \t 0.03417662114119639\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 0.7 \t 0.11223631116009347 \t 0.04056274076405361\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 1 \t 0.11575338206776405 \t 0.059824248059213006\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.005 \t 5 \t 0.5098841484872465 \t 0.4999362906619281\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.01 \t 68052561144.05 \t 9.76931508131167e+26\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.05 \t 59891850020.738 \t 8.572871616527305e+26\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.1 \t 50558933362.322 \t 7.206389563987081e+26\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.3 \t 22287054995.920128 \t 3.087647371844305e+26\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.5 \t 7079786285.390281 \t 9.099169397754408e+25\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 0.7 \t 1234012448.6381328 \t 1.2053789406689961e+25\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 1 \t 31545888.509980593 \t 9.147792721736306e+21\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.007 \t 5 \t 0.511862741134044 \t 0.5086370710864303\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.01 \t 9.796332749813211e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.05 \t 9.15170750349963e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.1 \t 8.37999855174287e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.3 \t 5.6723953445580546e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.5 \t 3.62664438639385e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 0.7 \t 2.118144026984666e+26 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 1 \t 7.187142605995812e+25 \t inf\n", - "lamb\ttau\tlayer_loss\t\tfinal_loss\n", - "0.01 \t 5 \t 0.5205962064808773 \t 0.51992274850377\n" - ] - } - ], - "source": [ - "def torch_DGD(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " \n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " W_hat = (W + I)/2\n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " x_12 = W @ x_0 - step_size * tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " \n", - " x_32 = W@x_1 - step_size*tgrad_qp(A, b, x_1)\n", - " x_2 = torch_soft(x_32, tau * step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,7e-3,1e-2]\n", - "taus = [1e-2, 5e-2,1e-1,3e-1,5e-1, 7e-1,1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_DGD(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\ttau\\tlayer_loss\\t\\tfinal_loss\")\n", - " print(lam,'\\t', tau, '\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_error = loss2" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Origin NIDS" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\ndef torch_NIDS(W, A, b, max_iter, step_size,tau):\\n (batch_size, num_of_nodes, _, dim) = A.shape\\n init_x = torch.zeros((batch_size, num_of_nodes, dim))\\n c = 1/(2*step_size)\\n \\n (batch_size, num_of_nodes, dim) = init_x.shape\\n I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\\n I = I.repeat(batch_size, 1, 1)\\n \\n \\n #initialization\\n k = 1\\n x_0 = init_x\\n #print(alpha.unsqueeze(-1).shape)\\n x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\\n x_1 = torch_soft(x_12, tau*step_size)\\n \\n x_hist = [init_x,x_1] #add for plot\\n while (k < max_iter):\\n W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\\n x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\\n x_2 = torch_soft(x_32, tau*step_size)\\n \\n x_0 = x_1\\n x_1 = x_2\\n x_12 = x_32\\n \\n k = k + 1\\n \\n x_hist.append(x_2)\\n \\n return x_2,x_hist\\nlams = [5e-4,1e-3, 5e-3,1e-2]\\ntaus = [1e-2, 5e-1, 1, 5]\\nbest_error = 100\\nbest_par = {}\\n#cs = [ 5e-1, 1,10,20,50,200]\\nfor lam in lams:\\n for tau in taus:\\n for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\\n loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\\n loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\\n \\n print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\\n print(lam,\\'\\t\\', tau, \\'\\t\\',1/(2*lam),\\'\\t\\',loss1,\\'\\t\\',loss2)\\n if loss2 < best_error:\\n best_par[\\'lam\\'] = lam\\n best_par[\\'tau\\'] = tau\\n best_par[\\'c\\'] = 1/(2*lam)\\n best_error = loss2\\n'" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "def torch_NIDS(W, A, b, max_iter, step_size,tau):\n", - " (batch_size, num_of_nodes, _, dim) = A.shape\n", - " init_x = torch.zeros((batch_size, num_of_nodes, dim))\n", - " c = 1/(2*step_size)\n", - " \n", - " (batch_size, num_of_nodes, dim) = init_x.shape\n", - " I = torch.unsqueeze(torch.eye(num_of_nodes), axis = 0)\n", - " I = I.repeat(batch_size, 1, 1)\n", - " \n", - " \n", - " #initialization\n", - " k = 1\n", - " x_0 = init_x\n", - " #print(alpha.unsqueeze(-1).shape)\n", - " x_12 = x_0 -step_size* tgrad_qp(A, b, x_0)\n", - " x_1 = torch_soft(x_12, tau*step_size)\n", - " \n", - " x_hist = [init_x,x_1] #add for plot\n", - " while (k < max_iter):\n", - " W_hat = torch.eye(num_of_nodes).unsqueeze(0)- c*step_size*(torch.eye(num_of_nodes).unsqueeze(0)- W)\n", - " x_32 = x_12-x_1 + W_hat@(2*x_1-x_0 - step_size*(tgrad_qp(A, b, x_1)-tgrad_qp(A, b, x_0)))\n", - " x_2 = torch_soft(x_32, tau*step_size)\n", - " \n", - " x_0 = x_1\n", - " x_1 = x_2\n", - " x_12 = x_32\n", - " \n", - " k = k + 1\n", - " \n", - " x_hist.append(x_2)\n", - " \n", - " return x_2,x_hist\n", - "lams = [5e-4,1e-3, 5e-3,1e-2]\n", - "taus = [1e-2, 5e-1, 1, 5]\n", - "best_error = 100\n", - "best_par = {}\n", - "#cs = [ 5e-1, 1,10,20,50,200]\n", - "for lam in lams:\n", - " for tau in taus:\n", - " for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " original,origin_hist = torch_NIDS(W, A, y, 100, lam, tau)\n", - " loss2 = opt_distance(original.detach().numpy(), x_true.numpy())\n", - " loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy())\n", - " \n", - " print(\"lamb\\t tau\\t c\\t layer_loss\\t\\t final_loss\")\n", - " print(lam,'\\t', tau, '\\t',1/(2*lam),'\\t',loss1,'\\t',loss2)\n", - " if loss2 < best_error:\n", - " best_par['lam'] = lam\n", - " best_par['tau'] = tau\n", - " best_par['c'] = 1/(2*lam)\n", - " best_error = loss2\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'lam': 0.005, 'tau': 0.5}\n" - ] - } - ], - "source": [ - "print(best_par)" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd4k1X7wPHvyejem5bSlo2AICLIUhR4RWWIyBJlqDgQUVD0FZThz4WC+CqCiogsAUGGILgREdlL9u7ee9GZ8/sjbdpCR9qmC87nunLZJE+e56SW3DnrvoWUEkVRFEUxl6auG6AoiqI0LCpwKIqiKJWiAoeiKIpSKSpwKIqiKJWiAoeiKIpSKSpwKIqiKJWiAoeiKIpSKSpwKIqiKJWiAoeiKIpSKbq6bkBN8PDwkIGBgXXdDEVRlAbl8OHD8VJKz4qOuyEDR2BgIIcOHarrZiiKojQoQogQc45TQ1WKoihKpajAoSiKolSKChyKoihKpajAoSiKolTKDTk5riiKUWpqKrGxseTm5tZ1U5R6wt7ensaNG6PRVL3f0GAChxCiP/A/QAt8JaV8v46bpCj1WmpqKjExMfj5+WFra4sQoq6bpNQxg8FAREQE8fHxeHl5Vfk8DWKoSgihBT4D7gduAUYJIW6piWtJKZEGQ02cWlFqVWxsLH5+ftjZ2amgoQCg0Wjw9vYmJSWleuexUHtqWhfgopTyspQyB1gLDLb0RdYs38EDLy5j1YqfLH1qRal1ubm52Nra1nUzlHpGr9eTl5dXrXM0lMDhB4QVux9e8JiJEOJpIcQhIcShuLi4Kl0k+GI4Z+y82fLX2aq3VFHqEdXTUK5lib+JhhI4KiSl/FJK2VlK2dnTs8Id86W654HuAJx3bkzU7r2WbJ6iKMoNo6EEjgjAv9j9xgWPWVTXO2/BOTudVGt7dixaY+nTK4pSTGBgILa2tjg4OODt7c24ceNIT0+v8esKIbC3t8fBwQF3d3f69OnDunXrrjvu119/5Z577sHR0RF3d3c6duzI3LlzycrKAmD27Nno9XocHR1xdHSkZcuWTJo0iaioqBp/D3WtoQSOg0ALIUSQEMIKGAn8YOmLCCFo5Wj8lVzJkGTsVb0ORalJW7duJT09nSNHjnDo0CHefvvtEs9LKTHUwGKV48ePk56ezrlz5xg3bhyTJk1izpw5pufXr1/PI488wqOPPkpISAgJCQmsW7eO8PBwwsKKRs1HjBhBWloaiYmJbNq0iejoaG6//fYbPng0iMAhpcwDJgE/A2eA76SUp2riWsP73wbAUa8WhDz9DFePH6+JyyiKUoyfnx/3338/J0+epHfv3syYMYMePXpgZ2fH5cuXiYyMZNCgQbi5udG8eXOWLFlieu0DDzzAyy+/bLo/cuRInnjiCbOu6+HhweOPP87ixYt57733SEhIQErJ1KlTmTlzJhMmTMDNzQ2AVq1a8emnn9KiRYvrzqPX62nbti3r1q3D09OT+fPnV/M3Ur81mH0cUsrtwPaavk6/jgHww1lOuwWRgZak9eux7dChpi+rKDe1sLAwtm/fzsMPP8zu3btZuXIlO3bsoFWrVkgp6dOnD+3atSMyMpKzZ8/Sr18/mjVrxr333svXX3/NrbfeyoMPPkhUVBQHDhzgeCW/8A0ePJi8vDwOHDhAUFAQ4eHhDB06tNLvQ6vVMnjwYH7++edKv7YhaTCBo7Y42+m53c+JwxFp7PG9lQGh4XXdJEWxmDOt25T5nM+cObiOGA5A0rrviJ41q8xj25w9Y/r5ysNDyTp9+rrHzfHQQw+h0+lwdnbmwQcfZPr06dx///2MGzeOtm3bAsagsmfPHn788UdsbGzo2LEjTz31FCtWrODee+/Fx8eHxYsXM3bsWK5evcrmzZtxdHSsVDv0ej0eHh4kJiaaXuvj42N6fuTIkfz000/k5OTwxRdf8Pjjj5d5Ll9fXxITEyt1/YamQQxV1bYRdwYBsLNxJzTpqXXcGkW5cW3evJnk5GRCQkJYtGiRad+Jv3/RWpjIyEjc3NxKBIOAgAAiIorWxwwcOJD8/HxatWpFz549TY+3bdsWBwcHHBwc2L17d5ntyM3NJS4uDjc3N9zd3QFKzFOsXbuW5ORkOnXqRH5+frnvKSIiwjS8daNSPY5S9G/vwxubT/CvR1Nign+maV03SFEsxNwegeuI4abeR0WCNn5fnSaVqvheg8Jv8GlpaabgERoaip9f0VauGTNm0KZNG65cucKaNWsYNWoUAKdOmTcVumXLFnQ6HV26dMHV1RU/Pz82btxYYu7EHAaDga1bt9K3b99Kva6hUT2OUjjZ6Lm3lSdSaPjdLoD1H68iPy2trpulKDclf39/unfvzuuvv05WVhb//vsvS5cu5bHHHgPgr7/+YtmyZaxYsYLly5fzwgsvlOiNlCcxMZHVq1fz/PPP89prr+Hu7o5Go2H+/PnMmTOHJUuWkJSUhJSSCxcuEBMTU+p58vLyOHPmDKNGjSI6OpqpU6da7P3XRypwlKFv20aAcTPgO8E6dr7/WR23SFFuXmvWrCE4OBhfX1+GDBnCnDlz6Nu3L6mpqYwZM4aFCxfi5+dHr169ePLJJxk/fjxSyjLP16FDBxwcHGjevDlfffUVCxYs4K233jI9P2LECL777jtWrVqFv78/Hh4eDB8+nKeffpphw4aZjlu3bh0ODg44OzszaNAg3N3dOXz4ML6+vjX6+6hrorxfbkPVuXNnWd2a44dDkhi6+B+aJYcjEbhnpbLsqW449OxhoVYqSs06c+YMbdqUPRmu3LzK+tsQQhyWUnau6PWqx1GGIA97ACLtPfDOSeWgTxt2vv8Z+bWws1VRFKU+U4GjDK52ehxkHlf1NtzT2A6AFR6diP3gwzpumaIoSt1SgaMMQgga64xV0/ycrLDTCQ76tGH/L3tJ+/33Om6doihK3VGBoxwBdsYlgdFpOYzpYVyU+23rfqTt3FmXzVIURalTah9HOZq42EAahKbnM7lXEP9cjGdo+w74jLqx12griqKURwWOcgR6OkJYGmE5WtwdrPnhhZ4lnpf5+Qit1uzzSSlVYR1FURo8NVRVjqaNjakHwrG57rnskBCChw0nZetWs86VvGEDF7r3IOvceYu2UVEUpbapwFGOpk2NSc4irJxMNQES0rOZteUkz357lKzTp4ma8YZZqdfT9+whPymJrBP/1mibFUVRapoKHOXw9HLFLi+LDL0t+7v0Imnddwgh2Hgkgp0pes4Pm4DMySHs+UnkVlC4xZCRYfxvdnZtNF1RFKXGqMBRDo1GQ0uDMUfVGYdGpP60Azd7Kybe0xyAxW63Y921K/nx8YQ9/7wpOJTGkJkJgMzOqfmGK0oDUF7p2IrKtl4rODgYIYQpE27hrbAk7NChQ5kwYUKJ1wwZMoRJkybx7rvvmo63sbFBq9Wa7hemdi9ebtbPz4+pU6eWmiV33Lhx6HQ6VQHwZtfzP10BOOkeRHbB/MT4HoH4udhyNiadg0++jj6gCdmnzxD+4kvInNIDgyGjMHCU/oevKDej0krHmlu2tTTJycmkp6ebbiNGjADgs88+Y+PGjewsWEq/bt06jhw5wvvvv8/06dNNx3/++ed069bNdL94dt3CcrO7du1i3bp1fP311yWunZGRwffff4+zszOrVq2y8G+qflGBowJdmnoAcMqrOfmJieTFx2Oj1zLtvlYAfPR3GJ4LF6N1cyPj779JLaPylyFTDVUpSlkKS8eeOHGi0mVbzeHj48P8+fOZMGECoaGhTJ48mS+++AIHB4dKn6t58+b06NGDY8eOlXj8+++/x8XFhZkzZ7J8+fIqtbOhUIGjAp0CXNEIuOjkS5bWiuzzxl7HoA6+tPdzJiY1m29C8vFf8iVer76K04ABpZ6nqMehhqoU5VqFpWPt7OyqXLa1IuPGjaNZs2Z06tSJ/v37079//yqd5+zZs+zevZvmzZuXeHz58uWMGjWKkSNHcvbsWQ4fPmyJZtdLah9HBRysdbT1deZERApnXJvQ5Nx57Lt3R6MRzHiwDSO/3EdEcia2fTtgWzAeCmDIykJjU7SMt2iOQ/U4lLoT+N8fy3zu3SHtebRrEwC+3R/K9E0nyjw2+P0HTT8P+HQ3JyNSr3vcHNeWjh01ahQbNmyoctlWDw+PEvf37t1bIgtsr169+OWXX0y1PCqjsPpfZmYmI0eOZOLEiabnQkND2blzJ/Pnz8fb25s+ffqwYsUKbr/99kpfpyFQPQ4z3BFo7C6f8mhq6nEA3NnUnZ9e6sUHj3QocXxOeDiXBw8mae1aAKTBgCwIHIYcFTgUpdC1pWMLP/grKttafAI8NDTUdGx8fDzJycmmW/GgceHCBebNm8fEiRN5+eWXyc3NrVRbjxw5Qnp6OuvWrWP//v1kFFsMs3LlStq0aUPHjh0BGD16NN9++22lr9FQqB6HGToHuvL1niucd/En+/y+Es+19nG67vjMAwfJDQklevYchF6P4333mZ5TQ1VKXTK3R/Bo1yam3kdFtr3QqzpNKqFVq1ZmlW1Nv6a8QXBwcLnnlVLy1FNP8dJLLzFz5kx69OjB3LlzeeONNyrVPiEEw4cPZ8uWLbz11lt8/PHHAKxYsYLQ0FBTTykvL4+EhAS2b9/O4MGDK3WNhkD1OMzQppExOFxxakT2xYvIUpbhHQlNYtyyA6Rl5eLy8BC8Xn0VgKg33iR5/QbTcbKM5YSKolClsq3mWLx4MfHx8UyfPh2NRsPSpUv54IMPOHv2bJXO99///pclS5YQHR3N3r17uXTpEgcOHODYsWMcO3aMkydP8uijj7JixYoqt7k+U4HDDE3c7LDVa4m3cyHVoCH1x5LjxFJK3t52mj/PxTHv53MAuD8xHq9pr4CUxM6dazpWDVUpSvnMLdtaGhcXlxLDWB999BGhoaFMnz6dpUuXYmVlBcAtt9zCyy+/zIQJE8otMVuW9u3bc9ddd/Hhhx+yfPlyBg8eTPv27fHx8THdXnzxRbZt20ZiYmKVfg/1mSoda6bBn+3heFgyc3cvokNmFIHr1mLTsqXp+VORKQxeuIc8g2Tlk13o1cKTrDNniHn/fTL3HzAdZ3fnnQR8s8yibVOU0qjSsUpZVOnYWtLGxxGAqB79kFevEvfJJyWeb+vrzIt9jGvMX1l/nOTMHOL+9wmZ+w9gUzBhBmpVlaIoDZ8KHGZqXRA4ItvfCUDmvv3IvLwSxzzXuxmdmrgQk5rNjM0nyb5wAQBtsU1GKnAoitLQqcBhptYFE+TnUvPRBzTBkJ5O1unTJY7RaTUsGNEReystvx0JJjciAoDcYmkSDDnZ5ISFkbD06yqNrSqKotQ1FTjMVNjjOB+dhk0XY68jY+++644LcLdn1sC2tMwumhDLjYw0/SyvZhE24WliP/yQmPfeQxaka1cURWkoVOAwk4udFT5ONlzNzSepozHxYeb+koEjPzkZmZvLsM6NWdjD1fS4LLYJSObk4Dl1CkKvJ2nFSqJmvHHdkJeiKEp9pgJHJbQq6HWE+honwTMPHzElLcw6f54Ld/cmfMoUhBDYRpWexdOQk4PTf/5D488XI2xtSdm0iYgpUzGUkVVXURSlvlGBoxIKh6suZEisW7dGZmeT/scfACStXGm8/9vvpO/+m+xLF0s9R+HkuEOPHjRZuhSNkxNpv/5K2FMTyE9NrZ03oiiKUg31PnAIIWYLISKEEMcKbg/UVVsKexznY9JwHTEcgISvlpKfkkLK1m2m42I//JDs8xdKPYfMzjZNitt1uo2AFcvReXqSeeAA6X/truF3oCiKUn0NJVfVAinlvLpuREtvY+A4G52G8/NDiPt0IVmnThExbRoyKwu7zp3JjYwskQjxOlJCbi4U7GC1ad2awLVrSPv9D5wHVC6zqKIoSl2o9z2O+qS5lwNajSA4PoMcrR7Xx0YDkFHQU3AbNxbf+fNAqwVA16gRGqfrkyCGxySVuK/388NtTFGq6OyLF8nYt7+m3oai1Btr166la9eu2Nvb4+XlRdeuXVm0aBFSSsaNG4cQggMHijIvXLx4ESGE6X7v3r2xsbEpURnwt99+IzAwsMxrzp49G71ej4ODAy4uLnTv3p29e/fWyPu71rhx47CyssLR0RFHR0fatWvH66+/TkpKSonjoqKimDBhAr6+vjg4ONC0aVPGjRtnyq11balcb29vBgwYwK+//lor76OhBI5JQoh/hRBfCyFcKz68ZtjotQS622GQcDE2HbcxY3B+6CGcHxmKz1tzcOjTB7vbbqP5H7/jPHgQXlOnonO9vrkvrThAalbp6ZbzEhMJffppQidMIGVb2bUTFKWhmz9/Pi+++CLTpk0jOjqamJgYPv/8c/bs2UNOwWIRNze3CjPY2tvb83//93+VuvaIESNIT08nLi6Onj178vDDD5e6ryqvBlY8vvrqq6SlpREXF8eyZcvYt28fPXr0MKVpT0hIoHv37mRmZrJ7927S0tI4cuQId99993WBobBU7vHjx+nXrx9Dhgzhm2++sXibr1UvAocQ4jchxMlSboOBxUAzoCMQBcwv4xxPCyEOCSEOxcXF1VhbC9Oon4tOQ+vggO/77+H79tu4Dh9u+iak9/bGd+5cnAcOQFtQ+rK4pOR0QuIzSz2/1tkZx759ITeXyFdeIf6LL9VGQeWGk5KSwsyZM1m0aBGPPPIIjo6OCCG47bbbWL16NdbW1gCMHTuWf//9l127dpV5rsmTJ7NmzRouXbpU6Xbo9XrGjh1LdHQ0CQkJfPPNN/To0YMpU6bg7u7O7NmzMRgMvP322wQEBODl5cWYMWNMPYR169YRFBREasHClh07duDj44M5n0E2Njbccccd/PDDDyQkJLBsmTGH3YIFC3BycmLlypU0a9YMIQQuLi6MHz+eF154odRzFSZVnD17Nq+99hqGGt4fVi8Ch5Syr5SyXSm3LVLKGCllvpTSACwBupRxji+llJ2llJ09PT1rrK2F8xznYtLMOr60wPHFiPa0b+xc6vFCq8Vn+nS8XnsNhCBuwQKips9AquW6yg1k7969ZGdnV1irws7OjunTpzNjxowyj/Hz82PChAnMmjWr0u3Izs7mm2++MWXhBdi/fz9NmzYlJiaGGTNm8M033/DNN9+wc+dOLl++THp6OpMmTQKMPZfu3bszefJkEhISePLJJ/nqq6+ozGeQo6Mj/fr1Y/du45D3b7/9xpAhQ9BoKv/x/PDDDxMbG8u5c+cq/drKqPeT40KIRlLKwnJgQ4CTddme1o2MgeN0pHlLZ3VuRUNVGkdHDGlp+DsU/dpPR6bSppFjiXFbAPfx49A39iPy1ddI2bSJ3LAw/D79pNShL0Uxx5nWtZMpt83ZMxUeEx8fj4eHBzpd0b+F7t27c/r0abKzs/n5559Njz/zzDPMmzePHTt20KJFi1LP9/rrr9O8eXNOnTplVhu/++47tm3bhpWVFe3atWPTpk2m53x9fU3f7HU6HatXr2bq1Kk0bdoUgPfee4927dqxbNkydDodn332Gbfeeiu9e/dm4MCBDBgwwKw2FOfr62uqUR4fH1+idO4PP/zAmDFjyM/Pp1u3bvzyyy/lngeo8VTu9aLHUYEPhBAnhBD/AvcAU+qyMbcW9BT+DU82awhJ61rU49AWBJHCvRxrDoTy4Ke7WfRn6V1sp379CFi1Ep2XF5mHDpF54GB1m68o9YK7uzvx8fEl5hD++ecfkpOTcXd3LzHUYm1tzZtvvsmbb75Z5vk8PT2ZNGkSM2fOLPH46tWrTRPI999/v+nx4cOHk5ycTGxsLH/88UeJ2uD+/v4lzhEZGUlAQIDpfkBAAHl5eabCUi4uLgwbNoyTJ0+WqFr47rvvmq797LPPlvv7iIiIwK1gdMLd3b1E6dxBgwaRnJzMggULTHM/5Z0HMJ2rptT7HoeUsuzK9HXAx8kGL0drYtOyCU7IJMjDvtzjtcV6HDpXN3JDQjFkFWwCtDb++j/8+Rxu9laM6nJ9qU7btm0JXP8d6X/uwum+/1jwnSg3G3N6ArWlW7duWFtbs2XLFoYOHVrh8ePHj2fu3Lls3LixzGOmTZtG06ZN6dKlaDR79OjRjB49ulJtu7b37+vrS0hIiOl+aGgoOp0Ob29vAI4dO8bXX3/NqFGjmDx5Mj/99BMA06dPZ/r06RVeLz09nd9++800HNenTx82b97MrFmzKj1ctWnTJry8vGjVqlWlXldZDaHHUa8IIbi1sQsAx8OSKzy+cGhJ6PVoCtKry4IqgAM7+PLW4HYAzNh0gh0noko9h97b27ThEODqyVMkrV9f9TehKHXMxcWFWbNmMXHiRDZs2EBaWhoGg4Fjx46ZVhcVp9PpmDNnDnOLVdMs7Zwvv/wyH3zwgUXbOmrUKBYsWMCVK1dIT09n+vTpjBgxAp1OR1ZWFo899hjvvvsuy5YtIyIigkWLFpl13uzsbA4fPsxDDz2Eq6sr48ePB2Dq1KkkJSXx+OOPc+nSJaSUpKWlcezYsTLPFRMTw8KFC5kzZw7vvfdeleZHKkMFjiro6G8crjoeXnHgKJwc19jbIwpWihSvyfH4nQFM6dsSg4TJa4+y81xsuefLT08nfOJEot+cSczcD0qtf64oDcGrr77KRx99xAcffIC3tzfe3t4888wzzJ07l+7du193/KhRo2jUqFG553zxxRfRFuyjspQnnniCxx9/nLvuuougoCBsbGz49NNPAePcir+/P8899xzW1tasWrWKN954gwsXSs8cAfDBBx/g6OiIu7s7Y8aM4fbbb+eff/7B3t44euHh4cG+ffuwsbGhZ8+eODo60rFjR9LS0li8eHGJc7m4uGBvb0/79u3Zvn0769ev54knnrDo+y+NKh1bBX+dj2PM1wfo1MSFjRN7lHvs1ZOnCH7kEfR+fth2uJXU7TvwnT8P5weLdolLKfm/bWf4es8VrHUalo27g+7NPco8Z/KGDUTNngN5edjf1Qu/efPQlrLRULm5qdKxSllU6dg6UDhBfioyldz88tdLW7dojnWbNjj264ewKuxxlJzgEkLw5oA2PNq1CfbWOpzt9OWe0+WRR2iydClaFxcy/tpN8PARZF++XI13pCiKYj4VOKrAxc6KQHc7svMMnIsufz+Hxtqapps24v3f14qGqnKuLx8rhODtwe3Y+kJP2vqWvsejOPuuXQjcsAHrVq3ICQ4mePgI0svZJKUoimIpKnBUUQd/4wT5v+EpFRxZRFgbExsasrJKfV6jEfi52JrubzwSzsmIss9v1diPwDXf4njffRjS01VBKEVRaoUKHFVUuLLqXzMmyAtprG2A64eqSrPzXCxTvzvO40v3l9ur0djZ4ffxAgJWrcSxTx/T4zfi3JWiKPWDChxV1KFx4cqqyvQ4rl9VVZYezTy4p5UnSZm5jP5qP5fj0ss+rxDYdS6az8o8coSQkaNK1DpXFEWxFBU4qqitrzNajeB8TBpXc8xbEls4VFXaHMe1rHQaFj92Oz2bexCfns2jS/YTmlB6YsTipJTEzpvP1ePHuTJsOJkFaQwURVEsRQWOKrK10tLS25F8g+RUpHm9Dk1Bj8NgxlAVGNO4fznmdroEuhGdmsXIL/dWGDyEEPgv+gz77t3JT0ggZNx4Er/9Vg1dKYpiMSpwVENlh6uKluNW3OMoZGel4+vxd3B7gCuRKVk8/+2RCoOA1sUF/y+/wG3sWMjNJeat/yPqv6+XOSmvKIpSGSpwVENlUo9A8TmO6z/A4z//goRl35T6OgdrHcuf6MJ/bvFm3rAO1+XSKfVaOh3er/8X33nzELa2pGzZQtizz6meh6Io1aYCRzV08C/KlGsOjU3pQ1X5aWnEffwxsfPmIcsowOJgrePLMZ1p5eNoeiwzp+Llt84DHiRw7VqsAgJwf2K8WUFHUWpLfSwdW1HZ1tIUtqMwG66DgwMDBw4EYOvWrfj4+JRIdb5lyxb8/PwICQkp8RohBPb29qb7u3fvNpWbdXBwwM3NjX79+pXalj///BMhRLn5vCxFBY5qaOntiLVOQ3BCJsmZFc9blLWqKrcgFTL5+RjSy149Vdy3+0PpO38XwfHXJ4S7lk2rljTdthWHu+4yPXb133/LDFKKUhvqY+nYypRtvdbChQtJT0833bZu3QrAwIEDuffee5kyxVgRIjk5meeee47FixcTEBBQ4jUAx48fN93v1asXYMzrlZ6eTkREBH5+fjz55JPXXX/58uW4ubmxYsWKSv0uqkIFjmrQazW08yvsdVQ8z1HWHIcpcAD5qRUXiMo3SDYfjSAyJYuRX+4zK3gIfVEak4wDBwge9SjhL0wmP828SoaKYkn1tXRsVcq2muOTTz5hx44d/Pzzz0yZMoW7776bQYMGVfo8tra2DB8+/LpMuRkZGWzYsIHPPvuMCxcuUJO5+kAFjmorXtipIprCneM51waOov0W+SkVByCtRvD1+Du4I9C1YLWVecGjkMzJRWNvT/rvvxP8yDCyzp83+7WKYgn1tXRsdcq2lsfDw4P//e9/jB49mm3btvHJJ59U6TwZGRmsWbOG5s2bl3h848aNODg4MGzYMO677z6WL19uiWaXqd4XcqrvOhRMkB8LM6PHYV16ksPiPQ6DGT0OMM55fDO+C+OWHeBgcBIjv9zHmqfvrLCwFIBDzx4EbVhP+AuTyT53juCRo/B9522cilVIU248gf/9sVauE/z+gxUeU19Lx1anbOvkyZN55ZVXTPdfeOGFEkNod955JykpKQwfPrxSNckB5s2bx8KFC0lNTSUgIIAtW7aUeH758uWMGDECrVbLo48+yuTJk/noo4/Q68tPmFpVqsdRTYU5q46bUUrWFDiuWRabG1lsqCrFvMABYF8QPAr3eYz6ch9XzOx5WDVpQuDaNTgNHIjMzCRiylRi3p+LzM01+/qKUlX1tXSsOWVbn332WdM53333XdOxn3zyCcnJyabbtfMuTz/9NGPGjGH79u0lJuPN8corr5CcnExwcDC2tracO3fO9FxYWBg7d+40VTocPHgwWVlZ/PhjzX1RUD2Oagp0t8PJRkdcWjbRqVk0crYt81jTBsBrhqpySsxxmJ/CBIzBY9n4Oxi/7CCX49MrTPNeoj22tvh+MBfBRAdfAAAgAElEQVTbW28lZu5cUjZvxm38OPQFJTGVG4s5PYHaUl9Lx5pTtvXzzz/n888/N/ucAEuXLiUsLIxt27bRuXNnnnrqKY4ePYqVlVWlztOkSRP+97//MXbsWAYMGICtrS0rV67EYDCYVnEBZGVlsXz5ch566KFKnd9cqsdRTUIIU6/jcEhS+cfa2gGQHxdPXkKC6fHicxzXDlVFzZpN6DPPlLsCqjB4fPdMN1p6O5Z5XFntd3v8MQJWLMfvo/kqaCi1or6Wjq1K2daKREZGMm3aNJYsWYK1tTXPPvss7u7uvPPOO1U6X79+/fD19eXLL78EjMNUs2bN4tixY6bb999/z/bt20ko9jljSSpwWEDXIGN52P2XE8s9Tuflid2dd2LIzCTqjTeRUpKfloah2IR48aEqKSXJGzeSsesv8uLiyz23vbWOpp4OpvvfHQzjQoz5K6bsOnXCvli5zoSvviLu04WqNK1SY+pj6djKlG291qRJk0rsySgc/po4cSIjR440La0VQrBkyRI+/vhjs+dkrjVt2jQ++OADdu3aRUhICM8//zw+Pj6m26BBg2jevDlr1qyp0vkrokrHWsDB4ESGfb6Xlt4O/DLl7nKPzY2K4vKgwRjS0vCd+z7WrVtzZXBRd9JlxAgazZkNGJfmnu/SFYCgzZuwad3arPb8djqGp1Ycwt3eilVPdaVNo8qVlc2NiuLif+6D3Fzsu3fD98MP0bm7V+ocSt1TpWOVsqjSsfXArY2dsdZpOB+TTmJG+RsB9Y0a4f366wDEzv+I7PMli9oXn+MoPpyVn1T+MFhxPVt4cHdLTxIychi1ZF+5xaDKaqP/54vRurmR8c9ergx5WGXZVRTFRAUOC7DWaenUxBWAA1cqHlN0fmgwNu3akRcbS+xHHwGgb9wYAEOxoar8YikK8hLLHwYrrjCrbt82XiRn5jJqyT6OhpofeAAcevQgaNNGbDt1Ii82lpAxY0lY+rXKdaUoigocltK1acE8x5WKP+CFRoP3dGOvI69g6Z9tx45AyZ3jefHFexzmVxoEYzBbNPp27m/nQ1pWHo8vPcDBYPODD4De25uA5d/g9sQTkJ9P7IcfEv/pp5U6h6IoNx4VOCyka5BxDmDvJfNWMdh16oTfxx/j8fzzeE9/HfcnxgMlA0d+YrHAUYkeRyErnYZPR93GwA6+pGfnMW39cfIqsVwXjKlKvF+dRuPPFmIVEIDL8OGVboeiKDcWtY/DQm5r4oK1TsPZ6DTi07PxcLCu8DVO/e+D/vcBRUNRxVdY5SUUBYv85MoNNRXSaTV8PKIjng7WjOrij05bte8Kjn364HD33YiCnb7SYCDtl19wvO8+lXFXUW4yqsdhITZ6LXcEGoerzO11FKd1NO6/yE9LM+3ZKN7jyEusWuAAY26rmQNvoUWxPR7hSRWXob2WKJYeIuGrpUS8NIXw5yaSV4mJe6V2GVQGZOUalpinVIHDgro1Mw5X/XOp/D0XpRF6PRo7OzAYMBRsgCrR47Dgh/PKvcHcO28XP52MrvI5rIIC0Tg5kf7nn1x5aAiZBw9arH2KZdjb2xMREUFOTo5a1KAAmFLH29jYVOs8aqjKgno09+DDn8+x52LVdmtqnJ0xZGaSn5KK1tGR/ITqzXGUJTQxk5x8A89/e4SPR3RkYAffSp/DqV8/bG+5hYiXX+HqsWOEjB2Hx/MT8Xj2WUQ1NmApltO4cWPi4+MJCQkpkRNKubnZ2NjQuGAVZ1WpwGFB7f2ccbTREZqYSVhiJv5udpV6vdbJibyoKAypKYBfiSW4eVWc4yjN9AfaoNdqWPTnJV5ce5TcfAMPd6r8H5Lez4+AlSuI+3QhCUuWEP/pQjL3H6DxJ/9D6+JisfYqVaPRaPDy8sLLy6uum6LcYNRQlQVpNYJuTY3DVXsuVn64Sutk3OFduLKqRI8jqeLsu+YSQjDtvla81LcFBgkvrz/OmgOhVTuXXo/X1Cn4f7UErbs7Mj8fjYNDxS9UFKXBUoHDwnq1NObZ/+tCXKVfq3EuCBwpqcjcXGNRJ40GYWcHeXkYLFitTwjBS31b8mr/VkgJr288UeXgAcYNg003b8Lvo/mmSfT8lBRkTsUldRVFaVjqReAQQgwTQpwSQhiEEJ2vee51IcRFIcQ5IcR9ddVGc93VwgOAvy/Ek2+oXA9B62SsJpifmmJaRaV1dUXnZlytZcl5jkITezdn9sBb8HS0NiVrrCqdp6cpu640GIiY+jLBox8jJyTEEk1VFKWeqBeBAzgJPAz8VfxBIcQtwEigLdAfWCSEqNczrwHu9gS425GalcdxM8rJFmcaqkpKNi3F1bm5oS0IHDW17HVcjyB+f/nuEtl1qzsslhcTQ/aVy2SdOMHlIQ+T/P1GtbJHUW4QZgUOIUS5X0WFEFohRKeqNkJKeUZKea6UpwYDa6WU2VLKK8BFoEspx9Urd7UwDlftPl+5eQ6roCAAklauJKugwpfW3R2tq3GiubJpRyrDyaaoxOTSv68wfdPJSveYitM3akTTTZtweuB+ZGYmUTNmEPHSFPKTa+49KIpSO8ztccQJIUxLM4QQR4UQxZfheAA1sZDfDwgrdj+84LHrCCGeFkIcEkIciour/PyCJfUqGK7aeS62Uq9zGfowdp07kxcXR9R/jbmsdG5u6FwLhqqSLD9Uda3olCw+/Pksaw6EMvW7Y5WqKHgtrbMzvvPn4zv3fTT29qT9/DOXBz9Exr59Fmyxoii1zdzAcW1OiebAtTUPy807IYT4TQhxspTbYHMbWx4p5ZdSys5Sys6VLQRvaT1beGBnpeVYWDKhCebv0BY6HX4LPkJXrFiNztMTrasx864lNwGWxcfZhmXjumBvpWXLsUgmrj5Cdl7VizkJIXAePJigzZuw7diRvJgYMvbvt2CLFUWpbZac4yh3XENK2VdK2a6U25ZyXhYB+Be737jgsXrNzkrHfW19ANh8rHLN1Xl60nTLZnz+7y1cRo7A9bHRaN2MgaM6aUcqo1szd1Y91RVnWz2/no7hqeWHyMyp3gYyK39/AlatxGf2bDwnTjQ9rlZdKUrDU18mx8vyAzBSCGEthAgCWgAH6rhNZnnoNuOI2uajEZWeFNY6OeE6bBiNZs/Gyt8fnZtxb0h+QuX3hlTVbU1cWfv0nXg4WLH7Qjxjlh4gNSu3WucUOh2uI0cg9Mb5lLyEBC498CCJ336rJs4VpQExN3BISvYorr1fLUKIIUKIcKAb8KMQ4mcAKeUp4DvgNPAT8LyUskEUwe7RzB0PB2sux2dwPLxyFfiupfM0zpkUr89RG9o0cuK7Z7rh62xDWFImKZnVCxzXSt3xE7nh4cS89X+EPf0MuTExFj2/oig1ozJzHPuEEOeFEOcBe+CPYvf/qU4jpJSbpJSNpZTWUkpvKeV9xZ57R0rZTErZSkq5ozrXqU06rYbBHY05oDYcDqvg6ArO5VEQOOpg0r+ppwPfPduN1U/dWekUKhVxe2w0fgs+QuPsTMbu3VweOIiUrVtV70NR6jlzc1XNqdFW3KCGd/Zn6d9X2HIskjcevAUbfdW2oGgLA0d87Q1VFdfYtWTAWLE3mB7NPWjmWf3UIk73349tp9uJmvkmGbv+InLaq6T9+hs+s2eZNj4qilK/mBU4pJQqcFRBKx9HOjR25nh4Cj+fimZwx1JXEldI5+YGQpCflITMzTXNEdSFHSeimLnlFG72Viwf34X2jZ2rfU69txf+n39O8oYNxL73Pmm//ILrY6PRdan3W3YU5aZUrclxIUQvIcTQijYI3syGdTYuClu9L7TKQzBCp0Pr7g5S1trKqrLc3cqTu1t6kpiRw8gv9/JPFZI5lkYIgeuwYQT98AM+s2djXyxoGNTKK0WpV8zdOT5JCPHGNY9tAXYB64ELQojWNdC+Bm9QR1+cbHQcCE7krwtV/5A1zXPE1+3mRjsrHUvGdGZQB18ycvIZt+wgO05EWez8Vo39cB05wnQ/Y+9eLve/n4x/qjWNpiiKBZnb4xgDmFKnFmzaewB4HLgDuABMt3jrbgBONnom3dscgPe2n7kujcfB4EQGf7aHc9FFmW9TruZet/S1LifIr2WlM9YxH9c90FQQqjqZdcuTtHYduZGRhD7xJFGzZpOfnl4j11EUxXzmBo5mwNFi9x8AtkkpV0spDwMzgLss3bgbxZhugfi52HI2Oo3vj4SXeO6r3Zc5HpbMN/8EA5BvkNzxzm/0+2hXiSBTGDjy62iC/FoajWDWwFuY2q8lBgmf7bxY7U2CpfGbPw/PFyeDXk/yunVcHjCQtD//tPh1FEUxn7mBwxZILXb/Tkpmsr0AqDJjZbDRa5l2XysAFvx6nqxc41aUfINk7yXj3oy/zschpeRkRAo5eQZiUrNJzy76IC7ay1E/AgcY5yUm92nB3KHtWflkV+ysLF9QUuh0eDz3HEHfb8Dm1lvJi44m/NnniJj2qkqYqCh1xNzAEQ7cCiCEcMWY5nxvsec9KRlYlGsM6uDLLY2ciErJ4us9VwA4FZlCapYxOEQkXyU4IZM9l4oCQ8nAYcy/lRdXfwJHoRF3NCHIw950f8eJqGolRyyNTcuWBK75Fq/XXkPY2JCxby+IctOjKYpSQ8z9irgO+EQI4Y+xLkYYJVN/dAZKS4uuFNBoBP+9vzVjvj7Agl/PE+RuT0hiyQSIuy/E8c/Fot3h6VnFAkc9muMozzd7rjB762n6tPbis9Gdqrx3pTRCq8V9/Dgc+9xLbnQ0WmfjUmBDdjb5ycmmIlKKotQsc3sc72BcQfUOxsy4o6WUxb9SjgJ+tHDbbjh3tfTk6buakpsvef7bIywr6Hn0bG4MCr+ejuFgcFHq9PTsognyut4EaK4O/i642On5/Wwso7/aT1KG5ZfSWjVpUmK5bvzixVx+cABJ332ndp0rSi0wK3BIKbOklOOklK5SyluklP9c83xvKeUHNdPEG8vr97fmpb4tMEiISc0G4LX+xpXMuy/Ek51XFI/TSvQ4CoaqyggcUkpStm4lJzy81Odry21NXFlfkN/qcEgSQz//h7BE81PLV5aUkpxLlzGkpxM9cxahY8aSfelSjV1PUZT6nx33hiOE4KW+LVn/bDc6B7gyqksT2jd25uV+LdFpSo7ZV2ZyPH3XLiKnvUrM2+/UXOPN1MLbkY0Te9Dax5HLcRkMWfQPJ6qZ6LEsQgj8PvkfvvPnoXVzI/PgQS4/NITYjz/GkJVVI9dUlJudMKdrL4T40pyTSSmfrnaLLKBz587y0KFDdd2MSrsYm86f52I5HJLEjpPRzB3anhF3NAGM36zP3dYJmZVFi793m+Y8CsW89z6Jy5eja9SIFjv/qIvmXyctK5fnVh3h74vxtPdz5odJPRA1OKGdn5xM7EcLSP7uOwD0/v4Erl2Dzt29xq6pKDcSIcRhKWXnio4zt8fxFPAfjPMbLcq4Na9aU5VCzb0ceKpXU3ycbYCSQ1VCCGzatQUg9IknyY0pWZY28/BhAPKiojBk1tzQUGU42uj5etwdjO8RyKLRnWo0aABoXVxo9NYcAr79FuuWLbEKCkSrEiUqisWZGzi2AI2AfGAx0E9Kec81t3trrJU3GUdr42K34kNVAH7z5mHVtCnZ589z5aGHSPvD2LPIT88g6/Rp03E5ISG119gKWOk0zBrY1pSSXUrJH2djanQS267TbQR9vwHfuXNNwSr7wgUSV6xA5ll+k6Ki3GzMnRwfAgQCfwLvAxFCiA+EEC1qrmk3LwebgsCRVfJDTu/jQ8CqldjdeSf5SUmET3yepHXfcfXYMTAUTarnXLlSq+2tjC/+uswT3xzi1Q3/WnyvR3FCr0dXUKtdSknUnDnEvPseV4Y8TMa+fTV2XUW5GZg9OS6ljJJSvoMx/cjYgv+eEELsFELY1FQDb0YO1sa06df2OMCYYr3J10vxfOlFAKJnzSL2ww+NT2qNeyay63HgaO7pgI1ew/rD4Ty5/FCp79HShBC4P/EE+saNyb5wgdBx4wmf/CI54fW+fL2i1EuVXlUljX4CPgf2Ab0AFTgsyN7aGADSyvhQFRoNHs8+i9d/XwMg+5xx76VD794A5Fyuv4Gj7y3erH26G+72Vvx1Po7hn+8lKuVqjV/X8d57afrjNjxfeglha0vaL79w+cEHifvkUwxXa/76inIjqVTgEEI0EkJMF0JcBlZiDBytpJQqaZAFORYMVWVU8G3cfdw4Atevx/mhh3D8z39we2w0UL+HqgA6+rvw/XPdCfKw53RUKoMX7uF4WM3/CWmsrfF49hma7diO04AByOxsEpYtUzmvFKWSzEo5IoQYgHFlVX+MyQ1fAzZLKXPLfaFSJaahqqyKh3Fs27fD9v33AMhLMhZ5ygkORkpZ46uYqiPQw56Nz3XnudWH2Xc5kbe2nWbDs91qpc16Hx/85n2I66iR5ISEom/UCACZl0fWuXPYtm1b421QlIbM3FxVPwAhGFOOXAL0wLBr/5FLKb+1aOtuUg5lrKqqiM7VFa2rK/lJSeTFxtb73E2u9laseKIr8345x/gegbUe6Oxuvx2722833U/euJHombNweuB+PKdMwcrfv1bboygNRWWGqgKAOcCqMm4rLd66m1ThUFWaGT2Oa1m3MC50S1qzxqJtqilWOg3TH2hDI2dbwLgCatW+EFPq+dpkSEtDWFuTun0Hlx54kJj33jP14hRFKWLuclyNGTfLpUG9yVW1xwHgMel50GhI+OJL0v/eY+mm1bhFf17ijc0nGf3VfuLTs2v12u5PPkmzHdtxfughyMsjcfkKLv3nPuKXLFHpSxSlGHNrjt9lzq2mG3uzsC8WOCq7Uc6+Sxdj8JCSyGnTSPtjJ5cHDSb2449roqkWd08rL1OCxIc+28PZ6Not86L39cX3/fcI2vg99j16YEhLI27+R6T+uL1W26Eo9Zm5uaoMgARKG4QuPIGUUlq+BFwVNNRcVcW1emMH2XkGzrzVH1urynXmZH4+Yc8+R8bu3abHNHZ2tNy3F2FlRW5sLOk7/8Tl4SEIvd7STa+22NQsJqw4xPHwFGz1WuYP78AD7RvVSVvS/95D8oYN+M37EKEz/nlfPXUKm9atEVrVyVZuLJbOVeUPNCn4b/FbEMad5FmA2k1lQdUZrhJaLX7zPkRfOLkrBIbMTDKPHQMgevYcomfNImHpUou115K8nGxY90w3htzmx9XcfCauPsIHP50tUYO9tjj07EHjjxeYgkZeXBwhox/j8oCBpPz4I9JQc7vfFaW+MneOI+LaG3AbsB2YCLwFtKzBdt50TGlHqrizWuvsTOCab2m8eBGujz8GQMbuv8mLjyd91y4AElesrLeb32z0Wj4a3oE3B9yCViPYezmBvHrwIZ0bFYXO05OcK1eIfPkVrgweTOrPv6gAotxUKr1zXAjRSQjxB7AR+ANoIaWcK6Ws3ZnMG5ypx1GFlVWFdB4eON5zDw69jNNP6Xv+JmXbNsg3rljKT0wk+fuN1W9sDRFC8GTPIFY+2YXPH7sda13dDw3Z3norzbb/iM//vYXOtxHZFy4S8eKLXHl4KKnbt6sKhMpNwezAIYTwF0KsAg4CyUBbKeULUsr6Xcu0gSoMHGnZ1d9jaXdHZ4S1Ndmnz5C0ajUATgMHAg1j2W73Zh54Oxmz2hgMkhfXHmX7iag6a4/Q63EdNoxmP/2E98w30Xl5kX32LMkbN9XrTZeKYinmrqp6HziHMbHhXVLKh6WUF2q0ZTc5xzIy5FaFxsYGu67GGt254eFonZ3xeWOG8X5YWIP6lrzjZDRbjkUycfUR5mw9RU5e3Q0RaayscHv0UZr9+gs+s2fjOel503NXT5wk4etl5Kdn1Fn7FKWmmLsK6lXgKpAOzCrrW5WU8j8WatdNrzqT46XxeeMNkr/fiCEjA8d770Hr7Iyws0NmZmJIT0fr6Gg6Njc2FqHRXFdlsD54oL0Pswbewrvbz7BsTzDHwpJZ+Ggn/Fxs66xNGmtrXEeOKPFY/OLFpP/xB/FffIHb6EdxfewxdKqolHKDMDdwrKBo2a1SCxzMTHRoLqsmTfCa8lKJx3Tu7uRmZpIXH28KHDI/nytDHkZYW9H899/r3dCLEILxPYLo4O/CpNVHOBqazIOf7GbBiI7c08qrrptn4jpyBPkpKVw9fJj4RYtJ+GopTgMG4DbmcWxat67r5ilKtZi7qmqclHJ8RbeqNkIIMUwIcUoIYRBCdC72eKAQ4qoQ4ljB7fOqXqOhKUx0WFZqdUsorMWdn5BgesxwNYv8hATyIqMwZNTfYZZOTVz5cXIverfyJDkzl/HLDvLnudiKX1hLHO66i8DVqwhYvQqH3r2RubmkbNzIlYeGkLR2XV03T1GqpV5s2ANOAg8DX5Ty3CUpZcdabk+dcyioyZF6teYCh9bDGDjy4osCh8wqWp6bFxeH1sGhxq5fXa72Vnw99g4W/XmR3Rfi6dG8/g2tFSZSzAkJIXHValJ++AGH3nebns86fRp948ZonZzqsJWKUjmVXo5bE6SUZ6SU5+q6HfVJY1djje5Lcek1dg2du/GDNi+xWI+jWE6mvLi4Gru2pWg0gkn3tuDbCXei1xr/nOPTs/npZN2tuiqNVUAAPjOm02L3X+h9fACQBgPhU6Zw4e7eRM6YwdVjxxrUQgXl5lUvAkcFgoQQR4UQu4QQveq6MbWlnZ8zACcjUmrsGqahqhI9jqLAkR/fcFZaazXGuRgpJdPWH+fZVUd4dcNxi80RWYrGysr0c35yMnpfX+TVq6R8v5HgkaO4MmgwiStWquJSSr1Wa4FDCPGbEOJkKbfB5bwsCmgipbwNmAp8K4QotU8vhHhaCHFICHEorgF8U65IUw977K20RKVkEZdWM3srTUNV18xxFMprQIGjuHtbe2Gt0/DdoXAGfPo3/4bXzw9hnZsbAcuW0XTHdtyefAKtmxvZFy4Q8+67XLjrbq6ePFXXTVSUUtVa4JBS9pVStivltqWc12RLKRMKfj6MsYhUqalNpJRfSik7Syk7e3p61sybqEUajaBtDfc6TENVCUUBQmYXH6pqeIFDCMHj3QL5YVJPWnk7ciU+g4cX/cNnOy+Sl18/04JYBwXhPW0aLf7cid/HH2PfowdaV1dsWrcyHZOy7Uey63lJYOXmUa+HqoQQnkIIbcHPTYEWwOW6bVXtaV8QOE7UVODwuH6o6kbocQC08nFky6QejOseSJ5B8uHP5xj2xV5y62nwABBWVjj1v48mS7+i2fYfTYkV85OTiXr9dS7f/wBXhg0nccXKBv3/Rmn46kXgEEIMEUKEA92AH4UQPxc8dRfwrxDiGLABeFZKmVhX7axtNR443EsZqiq+qqqBfzjZ6LXMHtSWFU90wdfZhq5B7qYJ9PpOY29v+tlw9SpODz6Ixs6OrBMnjENZd/cmdMLTpPzwQ71NVKncuOrFclwp5SZgUymPfw98X/stqh9qeoJc61E4VFV8crxoPqWhB45Cd7X05Kcpd2FVLGgcuJKIq52eFt6O5byyftA3aoTv++9hmDWT9J07Sdm6jfTdu8kouDXfdScaW+POeUNOTokJeEWpCfUicCilKz5BHp+ejYeDtUXPr7G3R1hbG9OOZGaisbO7psfR8BcZFHKyKSpYlZKZywtrjpCUmcuLfVowoVdTrHT1vyeisbXF6YEHcHrgAfKSkkj76SeyL1xE723cMS+l5PLAgeh9GuHYrx+O/fqi9/au41YrN6L6/6/lJqbRCNr61lyvQwhx3XBV8R5HfkIisiAF+41EozGWqM3JM/Dhz+d44JPd7L+cUPEL6xGdqyuuo0bhM/NN02O5YWHkRkSSuX8/MW+/zcW7e3Nl+AjiFn7G1RMnVM0QxWJU4Kjn2voZVx+fiqyZ2tvawsBRMCxVvMeBwUB+UlKNXLcuOdroeX/orax6sitBHvZcjE1nxJf7eGX9cRIzcuq6eVVm1aQJLf/Zg+8Hc3Ho2wdhbU3Wv/8Sv3AhwcOGc/XwYdOxN+IXAqX2qKGqeq5dDfY44Pp8VbLYqiowBpT6mCXXEnq28GDHi734fNclFu28xIbD4VyISWPz8z3qXXJHc2mdnHAeNAjnQYMwZGaSsW8/6X/t4urRY9h2LMrcEzZxIoaUVOx79MCuaxdsO3ZUcyOK2VTgqOfaNy4IHJE1FDg8CybIC5bkGrKvCRxxcXADZ3O10Wt5qW9LBnf0Y+aWkzzZM8gUNAwGiUbTMAMIgMbODsd778Hx3ntKPC5zcrh6+AiG9HSuHjsGn32GsLbGttNt2He9E8f/9MO6adM6arXSEKjAUc819bDHRq8hLPEqKZm5ONvpK35RJZiGqgo2AV7X42iAmwCrIsjDnhVPdCnR05j5w0lSr+bxav9WptxhNwJhZUXzP/8kc/8+MvbtJ3P/frLPnydz7z4y9+5D6+pqChzZFy+SFx+Pbfv2JZYIKzc3FTjqOZ1WQ5tGThwNTeZUZArdLZwBVudh3GVvmuMo6HFonZ3JT0khN7p+JQusScWDRlJGDhuPRJCZk89Pp6J5qmcQE+9pbiqw1dBpHexx7NMHxz59AMhLTCTzwAEy9u3Dvkd303HJ6zeQuHw5aDRYt26F3W2dsL3tNuxu64jO17fBDukp1aMmxxuAwnmOmtgIWDh/UZjQsLDHYdflDgDSfvn1pszY6mpvxS9T7mJwR19y8gws+vMSvT/cyap9IXVarram6NzccOrfn0azZ2PVuLHpcb1vI2zatgUhjDXrV68m8pVXuNinL2ETnjYdJw0GcqOjb8q/lZvRjfH16QZXOM9xNNTyyfqum+MoyI7rdP/9ZB48RPbZs2SdOo1tu7YWv3Z919jVjv+NvI2x3QN5e9tpjoQm88bmk3z512V2vNgL+xuk91Eet7FjcRs7FkNmJldPnuTq0WNcPXqUq0ePom/sZzouJziEyw88gNbDA5u2t2Dbti3WLVth3bIlVk38TelTlBuD+h65Qg8AAB3eSURBVL/ZAHQJNNaqPhiciJTSosMDhT2OwqGqwrTqGkcnnAcPInH5CpI3rL8pA0ehTk1c+f657mw/Ec2C387T2sfRFDSklBhkUVr3G5XGzg77Ll2w79IFML7v4in482Ki0Tg5kR8fT8auv8jY9ZfpOWFlRdCWzVgHBQGQfekSGnt7dN7eaqirgVKBowEIcLfDy9Ga2LRsLsWl09zLcmkytO5FgUNKaepxaGxtcB46lMTlK0jd9iM+b76J0Gotdt2GRgjBg7c2on87H9KL1fjYczGBmT+c5MU+LXiwfSN0DSQXVnUJIRAFaU4A7Lt1o+X+feSGh5N16hRZp06Rff4CWRfOkxcbh96vqHcSPXsOmQcPonF0xKppENaBgVgFBWEVGIhNmzZYBQTUxVtSKkEFjgZACEHXpu5sPR7JvsuJFg0cGns7hK0t8upVDBmZpm+RwsYWm5Yt0Tg7Y0hJIT81FZ2rq8Wu21BpNQJn26KVbWsOhnI5LoMX1x7jw5/P8VTPIIbf4Y+d1c33T0sIgZW/P1b+/jj172963JCRUWKPiNbFBa2LC/nJyWQd/5es4/+annN9dBQ+M2cCkBMSQsKyZVj5N0Hv52e8NfZD6+Kieip17Ob7626gugS5sfV4JAeuJPLYnZb7RiaEQOfhQW5YGPnxcUU9DhtjXixtQeAwpKSAChzX+XhER3o29+DLvy5zJT6D2VtP8/HvF3j8zgDGdAvE09Gy+cUaomuX8Tb+9BOklOQnJJBz5QrZwcHkBAeTcyW4xCbFrDNnSF677rrzCTs7rPx8abJsmWmo9eq//4LQoPfxRuvmdlP3jmuDChwNxJ1BxnmO/VcSamSeIzcsjLz4eFPKEWFjHIbQOjmRC+Sn1FwJ24ZMr9UwqksThnf259fTMXz51yWOhCbz6R8X0Ws1TO7Toq6bWC8VfmHReXhgd8cdpR5j06YN3tNfJyc8nNyISHIjIsgND8eQnk72pctonZ1Nx8a89z5Xjx413tFq0Xl6ovPyQu/thUPv3rgMHQoYF3/kRkSg8/JC4+Cgei5VpAJHA9HcywFPR2tiUrOZsfkk/ze4ncUmZIsmyBNMSQ41tjYApn+c+ak1kyvrRqHVCPq386F/Ox8OBSeybE8wjxfrGW44HA7AgFsbYaNX34bNYRUQgNuYMdc9np+aSm50NEJfNGRo1TQIQ2YmebGx5CclkRcdTV50NFmArlEj03FZZ84QMupR4x29Hp2bG1p3N3Suxv96TZ2K3sfHeOy588jsLLRubuhcXRF2dirQFFCBo4EQQvD+w+15bvURvt0fSiMnG16w0LdZU+3x+HjTUJWwviZwJKseh7k6B7rRuWAlHEBevoH5v5wjKiWL/9t2mqGdGvNo1yY093Kow1Y2XFonJ7ROTiUe833nHdPPhpwc8mLjyIuNIS82Fn1jf9NzMjcXfUAT8uLikZmZ5MXEkBcTQ2FOaK+X/r+9e4+PqjwXPf575p57AgkJSYAEuSihCILgXVttASuiW2t1q6fuyun22FZrd+tlW7ee46mtp6fdu9vaauulPda91XorrbUIVstWCzSK3EG5WQgkhARyz2Qu7/ljrSQzIRMyIcxMMs/385nPrFlrzZonay5P1rve9bzf6Fn38KOP0vLGG70v4nb3vHbWOedQct93rNfr6KDhF0/gzMvFkZuHMy8PZ14uzrw8HDk5VtIZZXXANHGMIBefVswPrp7F7c99yDs7Dw9b4ug54qiv7+2Oa5/jcORZX1Btqho6A9zx2Wk8u+YTNuxv4ql39/DUu3s4fUI+V84uZensMgqyRtcPSzI5PB485WV4Iq4z6ZY1fz5TVlgDjIY7Owk1NhJsaCR0xLp3RhT0dE8ox1dVRbCxkVBjI8bvJ9TQQKihAe+UKT3rhRobOfzTn8aMZ8Ljj5F94YUANDz1NEdffBFHVhaO7CwcWVk4s7J7uicX/mPvRZWtq1eDOHBkZiA+H46MDBwZ1rQzKyupyUgTxwgz3z7X8VFdC0fbu7jrpY1cN38iF00fN+RtdpcdCRw8AOEwuN09F2z1HHE0Df/Fh+nC7XRwzbwJXDNvApv2N/Hs2k/43YYDbNh3lA37jjKtOGfYS8mo43P4fDhKS3GXlva7vPjb3456HPb7CTU1EW5ujmomk8xMCm+9lVBzM6HmJmudpmbrvrUVR8TRUaD2IF27d/f7ep5Jk6ISR80d3yTc1tbvuuO+9U+MXbYMgJY//Ym6h76HI8OHs7CQSU8/PbgdcAI0cYwwJbk+cnwujrQH+Pnq3azYUoc/GD6xxGFfPR7YZ7XDO3y+nmXOvHwAwnqOY1h8qjyP75fP4v4lVazcVsdb2w+xYPLYnuW3/ed6wsawsKqET586btTUxhoNHF4vjnHjYFz0d81VUEDRbV8f1DaKbr2Vgi9+kXBrK6HWVsJtbYRb26wuyxm93ztjDFnnnE24rZ1wZyfhjg6ry3xHB+HOThzZvc2coSNHCey3vruupsR8T/VTOcKICNOLc6j+5AgvVFsflkPN/uM8a2DdTVXdH77oxKHnOE6GDI+Ty08v5fLTe//bbWoP8IdNBwmGDb/feBCP08G5U8aysKqES2YUD/vQwSrxuq9hOR4RofyRRwa1zdzFi8icfybh9g4IJ2aALk0cI9C0EitxHG61EsahluFJHMF6a4xxiUoceo4jUfIy3bz1rYtYsaWWFVtqqf7kCG/tqOetHfXIK5t47Ia5LKwqSXaYKsU4MjPxZCa27L8mjhFoenH0leMNbX6CofCQy124CgutgbjtMan7PeLQxJEQE8Zksuz8ySw7fzL1LX5WbatjxZZa3tvZwKzy3usWfrBiO7sOtXHBtCLOmjyGysIs7SqqEkYTxwg0rU/iMAYa2roozvXFeMbAxOPBXV5O4G9/sx5H1CDS6ziSpyjHy3XzJ3Ld/Il0BkJR13+8tvEgexva+eOWWgAKs70sqBzD/MoxnD+1kMlF2tVXnTzpUZFtlJlWfOyPQv0JNld5KnovVnN4e9vSHXrEkRL6XjT4zM0L+O6VM1k8s4TCbA+HW/28tukg9y/fwvPV+3rWqznawRtbaqlr7uy7SaWGTI84RqCx2V4Ks70cbvWT43PR0hnkUEsnkHfc58biqaigbfV/ASAZ/TdVDXepEzV0E8Zkcv2CSVy/YBLGGHYfbmPdnkbW7WnkwmlFPeut2lrH/cu3AFCc6+VTZfmcXp7HzPI8Ti3JoSTXp++pipsmjhHqvstOY+uBZupb/Ly8vuaEe1Z5Kip6ph1eX8S0F/H5MJ2dmPZ2RMedTjkiwilF2ZxSlM118ydGLSvM9nLulLFs3N9EXbOfuuY6Vm2rA2Bslof37/tsz7ovf7Cf0vwMphfn6AWJakCaOEaopbPLWDq7jIf/uB048Z5V3ojEEXnEAdZRR7Czk1Bz8zGVTlVq+/ys8Xx+1njCYcPehjY27m9i4/4mNh9oYkxmb3LoDIT49osbCYWtoV/HZHmYNDaTyrFZVBRmsXhmCVOLh6+cvxrZNHGMcOPsst0nfo6jomfa4cuIWubMyyNYV0eoqQl3RME4NXI4HMLkomwmF2VzxZxjS3F0dIW4+oxydtS18FFdC41tXTS2dfUMVzx1XHZP4njmL3t5oXo/pfk+SvMzKMvPoNS+leVnaCn5NKCJY4Qbl2MdHVjnOIbOVdJ7fYAJBqOWdReU04sAR6+CLA8PXz0LsK5armv2s7ehjb2H29jT0MbMst7zZx8famVTTRObao79PJQXZPDOXZ/pefxPL2wg2+ukKMfbe8v2UZjjoTDbiztNRkwcbTRxjHDd/92daFOVOHq/wMHag1HLHPnasyqdiAgleT5K8nycFVEOpdvtF0/lijllHDjaYd86qbGnS/N7j1b9wRAvfbA/5us8uLSKG8+uAGDl1jp+veYTCjLd5Gd6KMj0UJBlTednuDl/amHPSfyuYBi3U/SkfhJp4hjhupuqTvTkeKTAwdqox73XcmjiUFavvrHZXs6YOPCIkILwyHVzqG/xU9/qt+67b61+xkVcd7SrvpU/f1Tf73Z8bgfbH1zc83jRj1ezr7GdbK+LHJ/bvrdui2eO56q55QDsP9LO65tq8XmcZLqdZHjsm9u6nTo+B6/L6ubcFQzjcgiOYRrjZrTTxDHCjcvtPcdxot1lC268kSPPPEPBDddHze8pdKhHHCoOHpeDJaf3X3m2r8tmjWdacTZH2gIcae+iqcO6P9IeoO8n2h8IEwgZjrQHONIeiFp2aklvJdqdh1r57h+2xXzN/7rz00wYY5XquP259by+uRaf20Gmx4XX5cDjcuBxOjhr8lgevGImAC2dAe54fkPP8sj1PC4HV84p6zkXtLmmic01TbicDtxOwekQXA5rOsPtjKqIvPVAMwaDy+HA5RRcDrGe5xCyvC6y7GKX4bDBwLAN4jZUmjhGuEyPi2yvi1Z/kOaOIHmZ7uM/KYbiu+4k/+qr8U6LHuej9xyHllZXJ0d5QSblBYOrt/Tu3Z/BHwzR2hmkpTNIq9+6b+kMUFHY2+uvND+Dm8+rpL0rRGcgREdXiPZAiM6uEO2BYFTl4UDI6k3WGQjTGeiKer2JY3rjau8K9XRn7s8ZEwt6EsfKrXX8+M2P+12vKMfLX++9pOfxTU+vi9ncfNvFU/nmZ6cB8NaOQ9z8q2pEsJKLw4HTIT23P37j/J7znidTSiQOEfkBsAToAnYB/2CMOWovuwe4GQgBtxljViQt0BQ1LsdLqz/I/qPt5GUO/SJAcbnwTZ92zHxXkXVBWXcRRKWSzety4s12MnaAisHTinO477IZg9reE1+aRzhs6AyGaO8K4Q+G6bJvPnfv+b+8DDeP3zg3anlXMERXyJqOHNVxRmku18wrJxgyBMKGUNg6UgqFDbm+6J/eacU5jM32EgyFCYYNwXCYYMgQDBtyIhJcKGwQscoMBUKGQCi6Gq4cc3x2cqRE4gBWAvcYY4Ii8jBwD3CXiMwArgWqgFJglYhMM8YkpnbwCDF7Yj67D7fx6voaqkqHnjhicZUUAxCojf2fllIjncMhZHpcZHpi/yz63M5BVyheWFUy6HV/vWzBoNb7XFUJe773ecJhQ8BOLiFjCIetJBN5bc7JlBJ94YwxbxhjuvuArgHK7emlwHPGGL8xZg+wE5ifjBhT2U3nVADw3F/30eYPDrzyELjtrroBu7dVoK4OY8ywv45SanAcDsHrcpLldZHrs3qfFWZ7E3ZyPyUSRx9fBl63p8uAfRHL9tvzjiEiXxGRahGprk+zJpVZ5fnMnVRAS2eQK3/6Ljc8sZaWzsDxnzhIrmIrcQRr62h5+212XngRjU8+OWzbV0qNLAlLHCKySkQ293NbGrHOvUAQeDbe7Rtjfm6MmWeMmVdUVHT8J4wyXz63EoCP6lp5Z+fhAU/gxcuZnYUjJwfj99OyahUAnTs+GrbtK6VGloSd4zDGXDLQchG5CbgMuNj0toPUABMiViu356k+Lv1UCY/fOJc3t9XxQvV+1uxq5Mo55cd/4iC5S4rxt7TQ9t57gI5BrlQ6S4mmKhFZBNwJXG6MaY9YtBy4VkS8IlIJTAXWJSPGVCciLKwq4foF1rgaa/c0DOv2e5qrDljnOXRgJ6XSV6r0qvoJ4AVW2hewrTHG3GKM2SIiLwBbsZqwvqo9qgZWVZpLttfF3oZ2DjZ1MD4v4/hPGgT3+OjeIaEWTRxKpauUOOIwxkwxxkwwxsy2b7dELPuuMeYUY8x0Y8zrA21Hgcvp4MwKqxTE2t2Nw7fd4ujEEW7SxKFUukqJxKGGV3dhuvd2HR62bbrtazm6aVOVUulLE8codIE9dOjLH9Tw/ifWcKInOl6HqyR6HA7j9xP2D19hRaXUyKGJYxQ6bXwuN59XSTBsuOpnf+Gax//Ckkfe4cDRjiFvs+8RB2jPKqXSlSaOUequRacyZ6JV1dbrclDb3MmNT67lP9b+bUhHH5EDPXXT5iql0pMmjlHK43Lw7LIF/O5r57H2ny9m6rhsdtW38c+vbOLGJ9fGXTLEmZ2Ns6AAHA7cE6xLazRxKJWeNHGMYpkeF58qzyM/08OLt5zDA0tmkJ/pZnttC1sOxP+jX/ajH1L2ox/hmWRdK6JNVUqlJ00caSIv081N51ZyxWyr1Ncr6+O/AD/r7LPJXbQQZ6413kCouWVYY1RKjQyaONLMFXOsxLF8wwGCofCQtuHoHthJh5JVKi1p4kgzp5fnUVmYRX2Ln6WPvstrGw/GvQ1nrjXmhzZVKZWeNHGkGRHhq5+egsflYMuBZm5/bj17DrfFtQ1tqlIqvWniSENXzy1nw798jitmlxIMG77/+ra4nq9NVUqlN00caSrD4+SeS08jw+1kxZY67npxI5trBpcItKlKqfSmiSONFef6+PbC6QA8X72PJT95hweWb6G9a+DhZ7WpSqn0pokjzX35vEpW3nEBN51TgUOEX763l6t+9hf2NbbHfI7DPuLQCwCVSk+aOBRTi3N44PIqln/tXCoLs9h2sJnP/etq/vfvt9LUcezY5c486xyHNlUplZ40cageVaV5vHrruSyqKqEjEOKJd/ZwxaPvsqM2uknKmdPdVKWJQ6l0pIlDRcnLdPPYjXP5/dfP47Txuew53MbCf1vNkkfe4Zk1n9DRFcJhJ45wSwsmpAMyKpVuNHGofs0sy+Ol/3E21545AZ/bwaaaJu57dTNnfe9N/udr29k1fioGCLe2JjtUpVSCSbxVUkeCefPmmerq6mSHMWp0BkKs3FrHE+/sYcO+oz3zy1rrueTMU1h8/oyeUQeVUiOXiLxvjJl3vPVciQhGjWw+t5Mlp5ey5PRSNtc08Zvqffz2vY+pyS7iV9uaOSh7NHEolUY0cai4zCzLY2ZZHrfWr2P1U79h02eu4ty5c5MdllIqgTRxqCHJnjmDmY17mffxSiqrliU7HKVUAunJcTUkvhkzAPBv34EJDnyluVJqdNHEoYbEmZuLe8IEjN+Pf9fuZIejlEogTRxqyHxVVQB0bt2a5EiUUomkiUMNWXdzVeeWLUmORCmVSJo41JD5ZsxAMjMxIT3HoVQ60V5VasiyzlrA9L+uQ5zOZIeilEogTRxqyMSlHx+l0pE2VSmllIqLJg6llFJx0cShlFIqLimROETkByKyXUQ2isgrIpJvz68QkQ4R+dC+PZbsWJVSKt2lROIAVgIzjTGzgI+AeyKW7TLGzLZvtyQnPKWUUt1SInEYY94wxnRfDLAGKE9mPEoppWJLicTRx5eB1yMeV4rIehH5s4icn6yglFJKWRLWEV9EVgEl/Sy61xjzW3ude4Eg8Ky97CAw0RjTICJzgVdFpMoY09zP9r8CfMV+2CoiO04g3ELg8Ak8/2TRuOKjccVH44rPaIxr0mBWSpmhY0XkJuAfgYuNMe0x1nkb+JYx5qSOCysi1YMZPjHRNK74aFzx0bjik85xpURTlYgsAu4ELo9MGiJSJCJOe3oyMBXQGt5KKZVEqVIz4ieAF1gpIgBr7B5UFwD/S0QCQBi4xRjTmLwwlVJKpUTiMMZMiTH/JeClBIcD8PMkvOZgaFzx0bjio3HFJ23jSplzHEoppUaGlDjHoZRSauTQxBFBRBaJyA4R2Skidycxjgki8paIbBWRLSJyuz3/ARGpiSjBcmkSYtsrIpvs16+2540RkZUi8rF9X5DgmKZH7JMPRaRZRL6RjP0lIk+JyCER2Rwxr9/9I5Z/tz9vG0XkjATHlfRSPzHiivm+icg99v7aISILExzX8xEx7RWRD+35idxfsX4bEvsZM8bozWqucwK7gMmAB9gAzEhSLOOBM+zpHKwyLDOAB7C6IydzP+0FCvvM+z/A3fb03cDDSX4fa7H6oyd8f2F16DgD2Hy8/QNcinWxqwBnAWsTHNfnAJc9/XBEXBWR6yVhf/X7vtnfgQ1YHWkq7e+rM1Fx9Vn+Q+BfkrC/Yv02JPQzpkccveYDO40xu40xXcBzwNJkBGKMOWiM+cCebgG2AWXJiGWQlgK/sqd/BVyRxFguxqpv9kkyXtwYsxro2/Mv1v5ZCvw/Y1kD5IvI+ETFZVKg1E+M/RXLUuA5Y4zfGLMH2In1vU1oXGJ1/bwG+M+T8doDGeC3IaGfMU0cvcqAfRGP95MCP9YiUgHMAdbas75mH3I+legmIZsB3hCR98W6Wh+g2Bhz0J6uBYqTEFe3a4n+Qid7f0Hs/ZNKn7lUK/XT3/uWKvvrfKDOGPNxxLyE768+vw0J/Yxp4khhIpKN1R35G8Yqs/Iz4BRgNlY5lh8mIazzjDFnAIuBr4rIBZELjXV8nJSueiLiAS4HfmPPSoX9FSWZ+ycWiV3qZw7wTeA/RCQ3gSGl3PvWx3VE/3OS8P3Vz29Dj0R8xjRx9KoBJkQ8LrfnJYWIuLE+GM8aY14GMMbUGWNCxpgw8AtO0mH6QIwxNfb9IeAVO4a67sNf+/5QouOyLQY+MMbU2TEmfX/ZYu2fpH/mxCr1cxlwvf2Dg90U1GBPv491LmFaomIa4H1Lhf3lAv4OeL57XqL3V3+/DST4M6aJo9dfgakiUmn/53otsDwZgdhtqE8C24wxP4qYH9k2eSWwue9zT3JcWSKS0z2NdXJ1M9Z++pK92peA3yYyrghR/wkme39FiLV/lgP/ze75chbQFNHccNJJipb6GeB9Ww5cKyJeEam041qXqLhslwDbjTH7u2ckcn/F+m0g0Z+xRPQEGCk3rB4IH2H9x3BvEuM4D+tQcyPwoX27FHgG2GTPXw6MT3Bck7F6tWwAtnTvI2As8CbwMbAKGJOEfZYFNAB5EfMSvr+wEtdBIIDVnnxzrP2D1dPlUfvztgmYl+C4dmK1f3d/xh6z173Kfn8/BD4AliQ4rpjvG3Cvvb92AIsTGZc9/5dYpY8i103k/or125DQz5heOa6UUiou2lSllFIqLpo4lFJKxUUTh1JKqbho4lBKKRUXTRxKKaXioolDpRUR+aWIrEp2HH2JyNsi8kSy41BqMLQ7rkorIpIHOIwxR+wf6inGmIsS+PrfAZYZYyr6zB8DBE2f8hFKpaKUGDpWqUQxxjSdjO2KiMdYVZWHxBgz2AqxSiWdNlWptNLdVCUiD2BdpXyhiBj7dpO9TraI/FiswYTa7aqnfxexjQp7/etF5A8i0gY8aJd1+IWI7BJrYJ/dIvKQiHjt590EPAhMinjNB+xlUU1VIuIWke/bMXSJNXDP3/f5W4yI3Coiz4hIi4jsF5F7+qyz1I6/XUSOisg6EZlzEnatSiN6xKHS1f/FqilUiVW0DqDJrgX0O6xSDV8EDmDVJ3pORBYbY96M2MbDwF3AV+3HglVc7u+BOmAW8DhW2Yr7sQrjnQpcD5xpP6c1RnwPYZU6vwWrxMvVwK9FpK5PDPcD38Ea/GgR8BMRWWeMeVNESrAqBX/HvvdhleEOotQJ0MSh0pIxplVEOoAuY0xt93wRuQg4G2t8g+5mrZ/bBeK+jlUPqNvjxphniXZvxPReETkFuBW43xjTISKtQCjyNfsSkUzgNuAOY0x3ifiHRORMe/uRMTxvjPmFPf2oiHwNK9G9iTVanBt4wRiz115nW6zXVWqwNHEoFe1MrKGDa6yDjx4erAJykY6pzCoi/x1YhjWcaBbWdyzeJuEp9uut7jP/z8A9feZ92OfxAXoH8dkIrAA2i8hK4G3gZWPMPpQ6AZo4lIrmAJrobUqK1Pfkd1vkAxH5AlYl0ruxfuSbgS8A3x3+MGPGZLATlTEmJCKLsf6WS7CquH5fRL5gjPn9SYxJjXKaOFQ66wKcfeZVA/mAzxgT7/gdFwDrTfQYKhWDeM2+dgJ+e3uRMVxInGOKGKu//Tr79pCI/BH4B0AThxoyTRwqne0BviAiVVgns1uAP2GNZ/CyiNyJ1dxTAJwDdEacT+jPDuBmEVmK9QN/Gb0n3iNfs0REzsZq+mo3EYMoARhj2kXk37F6atXTe3J8KfDZwf5xInIOcDHwBtbYElOxTtg/OdhtKNUf7Y6r0tmTWCM/vgfUA9fZ/6FfDrwM/CuwHXgN+DzWYDgDeRxrEKKngfXAAqzeTpFexerh9Jr9mnfG2Na9WMOm/htWEroBuKFPj6rjacI60f9brCT1FNa44g/GsQ2ljqFXjiullIqLHnEopZSKiyYOpZRScdHEoZRSKi6aOJRSSsVFE4dSSqm4aOJQSikVF00cSiml4qKJQymlVFw0cSillIrL/wf77TbmyaCedwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 500,0.005,0.5)\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 500,0.005,0.5)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 200,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n", - "\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# PLOT" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\"\\nfor iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\\n _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\\n _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\\n \\n original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \\t 2 )\\n original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\\n original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\\n\\n\\norigin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\\norigin_DGD_error = hist_nmse(original_DGD_hist,x_true)\\norigin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\\npred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\\npred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\\n\\n#plt.rc('text',usetex=True)nn\\n\\nx = [i for i in range(num_layers+1)]\\nplt.plot(x,origin_DGD_error[:num_layers+1])\\nplt.plot(x,origin_PGEXTRA_error[:num_layers+1])\\nplt.plot(x,origin_NIDS_error[:num_layers+1])\\n\\nplt.plot(x,pred_DGD_error[:num_layers+1])\\nplt.plot(x,pred_PGEXTRA_error[:num_layers+1])\\n\\n\\nplt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \\nplt.xlabel('iterations',fontsize= 'x-large')\\nplt.ylabel('NMSE',fontsize= 'x-large')\\n\\nplt.show()\\n\"" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "'''\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, num_layers,0.002 \t 2 )\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, num_layers,0.001,0.05)\n", - " original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, num_layers,0.005,0.5 ,7 )\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "\n", - "#plt.rc('text',usetex=True)nn\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,origin_DGD_error[:num_layers+1])\n", - "plt.plot(x,origin_PGEXTRA_error[:num_layers+1])\n", - "plt.plot(x,origin_NIDS_error[:num_layers+1])\n", - "\n", - "plt.plot(x,pred_DGD_error[:num_layers+1])\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1])\n", - "\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','NIDS','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='x-large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "plt.show()\n", - "'''" - ] - }, - { - "cell_type": "code", - "execution_count": 62, - "metadata": {}, - "outputs": [], - "source": [ - "test_data = SynDataset(test_num)\n", - "test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate)\n", - "for iter, (W, A, y, x_true,pyg_data) in enumerate(test_loader):\n", - " _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers)\n", - " _,pred_DGD,pred_DGD_hist = model_DGD(W, A, y, pyg_data,num_layers)\n", - " #_,pred_NIDS,pred_NIDS_hist = model_NIDS(W, A, y, pyg_data,num_layers)\n", - " \n", - " original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 500,0.005,0.5)\n", - " original_DGD, original_DGD_hist = torch_DGD(W, A, y, 500,0.005,0.5)\n", - " #original_NIDS, original_NIDS_hist = torch_NIDS(W, A, y, 200,0.005,0.01)\n", - "\n", - "\n", - "origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true)\n", - "origin_DGD_error = hist_nmse(original_DGD_hist,x_true)\n", - "#origin_NIDS_error = hist_nmse(original_NIDS_hist,x_true)\n", - "pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true)\n", - "pred_DGD_error = hist_nmse(pred_DGD_hist,x_true)\n", - "#pred_NIDS_error = hist_nmse(pred_NIDS_hist,x_true)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 63, - "metadata": {}, - "outputs": [], - "source": [ - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "writer_error=pd.ExcelWriter(\"./error_fig/noise3/\"+figure_name+\".xls\")\n", - "df_error= pd.DataFrame({'PG-EXTRA':origin_PGEXTRA_error,'DGD':origin_DGD_error})\n", - "df_error.to_excel(writer_error,sheet_name='Origin')\n", - " \n", - "df_feasibility= pd.DataFrame({'PG-EXTRA':pred_PGEXTRA_error,'DGD':pred_DGD_error})\n", - "df_feasibility.to_excel(writer_error,sheet_name='GNN')\n", - "writer_error.save() " - ] - }, - { - "cell_type": "code", - "execution_count": 64, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEOCAYAAACetPCkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd0k9UbwPHvzepM94CWDsoS2UMQGSrDxQaZKkNAEQEnDlCQnwNBERcooLJFhgxRHKioyN7IlNWW7r138v7+SBtapG1SUjq4n3NyJMn73txypE/ueh6hKAqSJEmSZClVVXdAkiRJqllk4JAkSZKsIgOHJEmSZBUZOCRJkiSryMAhSZIkWUUGDkmSJMkqMnBIkiRJVpGBQ5IkSbKKDBySJEmSVTRV3YHK4OXlpQQHB1d1NyRJkmqUw4cPJyiK4l3edbUycAQHB3Po0KGq7oYkSVKNIoQIs+Q6OVUlSZIkWUUGDkmSJMkqMnBIkiRJVpGBQ5IkSbJKrVwclyTJJC0tjbi4OPLz86u6K1I14eTkRL169VCpKj5uqDGBQwjxAPARoAa+UBTl3SrukiRVa2lpacTGxuLv74+DgwNCiKruklTFjEYjkZGRJCQk4OPjU+F2asRUlRBCDSwEHgRuB0YIIW6vjM9SFAXFaKyMpiXppoqLi8Pf3x9HR0cZNCQAVCoVvr6+pKam3lg7NupPZesAXFAU5ZKiKHnAN0B/W3/I2hU/8uAzy1i18mdbNy1JN11+fj4ODg5V3Q2pmtFqtRQUFNxQGzUlcPgDV4o9jyh8zUwI8YQQ4pAQ4lB8fHyFPiT0QgRnHX3Z9teZivdUkqoROdKQrmWL/ydqSuAol6IoSxRFaa8oSntv73JPzF9X996dATjn6k/UX7tt2T1JkqRao6YEjkggoNjzeoWv2VSHjk1xzcsgzc6ZHQu/tnXzkiQVExwcjIODA87Ozvj6+jJmzBgyMjIq/XOFEDg5OeHs7Iynpyc9evRg3bp1/7lux44d3Hvvvej1ejw9PWndujVz584lJycHgDfeeAOtVoter0ev19O4cWMmT55MdHR0pf8MVa2mBI6DQCMhRH0hhA4YDnxn6w8RQtBEb/oruZSlEDf/A5S8PFt/jCRJhbZt20ZGRgZHjhzh0KFDvPXWWyXeVxQFYyVsVjl+/DgZGRmcO3eOMWPGMHnyZGbPnm1+f8OGDTz88MOMHDmSsLAwEhMTWbduHREREVy5cnXWfNiwYaSnp5OUlMTmzZuJiYmhXbt2tT541IjAoShKATAZ+Bk4A6xXFOVUZXzWkPvbAHDEpwmJS5eSuGJFZXyMJEnF+Pv78+CDD3Ly5EnuueceZsyYQefOnXF0dOTSpUtERUXRr18/PDw8aNiwIUuXLjXf+9BDD/HCCy+Ynw8fPpzHH3/cos/18vLiscce47PPPmPOnDkkJiaiKArPP/88M2fOZMKECXh4eADQpEkTPvnkExo1avSfdrRaLc2aNWPdunV4e3szf/78G/wbqd5qzDkORVG2A9sr+3N6tQ5EfHeWk14hpOic0P9zsrI/UpJueVeuXGH79u0MGjSIXbt2sWrVKn788UeaNGmCoij06NGD5s2bExUVxdmzZ+nVqxcNGjSge/fufPXVV7Rs2ZLevXsTHR3NgQMHOH78uFWf379/fwoKCjhw4AD169cnIiKCwYMHW/1zqNVq+vfvz88/1+6dmTUmcNwsbo46Oge783doChsa3cuTYRerukuSZDNnbmta6nt1Zs/GfdhQAJLXrSdm1qxSr2169urOw8uDBpNz+vR/XrfEgAED0Gg0uLq60rt3b6ZPn86DDz7ImDFjaNasGWAKKrt37+aHH37A3t6e1q1bM378eFauXEn37t2pU6cOn332GaNHjyY7O5stW7ag1+ut6odWq8XLy4ukpCTzvXXq1DG/P3z4cH766Sfy8vJYvHgxjz32WKlt+fn5kZSUZNXn1zQ1YqrqZnu5T3MAvg/pTHKOTNUgSZVly5YtpKSkEBYWxqJFi8znTgICru6FiYqKwsPDo0QwCAoKIjLy6v6Yvn37YjAYaNKkCV26dDG/3qxZM5ydnXF2dmbXrl2l9iM/P5/4+Hg8PDzw9PQEKLFO8c0335CSkkLbtm0xGAxl/kyRkZHm6a3aSo44rqNFPVfuu82bX87Gs8W5MR2NRsQN5HWRpOrC0hGB+7Ch5tFHeepv+vZGunRdxc8aFH2DT09PNweP8PBw/P2vHuWaMWMGTZs25fLly6xdu5YRI0YAcOqUZUuhW7duRaPR0KFDB9zd3fH392fTpk0l1k4sYTQa2bZtGz179rTqvppG/jYsxaD2pm88l/R1WDJzIcn7DlRxjyTp1hQQEMBdd93Fq6++Sk5ODidOnODLL7/k0UcfBeCvv/5i2bJlrFy5khUrVjBlypQSo5GyJCUlsWbNGp5++mlefvllPD09UalUzJ8/n9mzZ7N06VKSk5NRFIXz588TGxt73XYKCgo4c+YMI0aMICYmhueff95mP391JEccpQjxdgYgwtmb9Ql5JHy0gZebNELj7l7FPZOkW8/atWuZOHEifn5+uLu7M3v2bHr27ElaWhqjRo3i008/xd/fH39/f8aNG8fYsWP5+eefSz0l3apVK4QQ6HQ6WrVqxYIFCxg5cqT5/WHDhuHq6sqcOXN47rnnsLOzIzAwkCeeeIIhQ4aYr1u3bh1btmxBURT8/Pzo1asXhw8fxs/Pr9L/TqqSUBSlqvtgc+3bt1dutOZ4Tr6Bpq/9iFCMtIk7x1nP+qzL+otmiz6SaRykGuHMmTM0bVr6Yrh06yrt/w0hxGFFUdqXd7+cqiqFvVZNHZGLUaWmiRNkah34Mt6e5FWrqrprkiRJVUoGjjIE2Zl2TzT0MS3IbavfmROffkHWkSNV2S1JkqQqJQNHGYL1WgCSco0MaO1HgVrDikY9iZg8hXwLF98kSZJqGxk4ylDfywmAsCyFF+5rgk4t2BnQjqiWd6Jycani3kmSJFUNuauqDA38PeBsLOEGHV7h//LyPcF4uzrQrc19qLRaq9pSFAVDSorclSVJUo0nRxxlaBRiSjlwReVE6PAR9N6/mX4d6puDhjEvj5SNG7FkZ1ri559z/q7Ocn1EkqQaTwaOMtQLqovOkE+SgyuZGnsy//7b/N6VpEzCpz5D9Guvk/DJJ+W2lXPmLCgKuRcuVGaXJUmSKp0MHGVQa9Q0yk0E4Jh3Q/JCQylISmLhzgt0n/8nu7sMArWahEWfkbRqdZltGTPSAVAKi8BIkiTVVDJwlOPBnm0BONTibgCyjx6ljos9+QaFhVdUuM9+E4DYt98m5dvSc/YY0k2VzYw5uZXcY0mSpMolA0c5HrizIQB7nQIxIMg6coQBbfxpWteFqNQc1ro2w+fllwGIfu31UoOHsbAkppKTfXM6LknVXFmlY8sr23qt0NBQhBDmTLhFj6KSsIMHD2bChAkl7hk4cCCTJ0/mnXfeMV9vb2+PWq02Py9K7V683Ky/vz/PP//8dbPkjhkzBo1GIysA3uoaeDsT7OlIqqLmjEcQ2UeOolYJZvcz/Q/12Z8Xyeg3BJ8XXwBFIfq118nY9fd/2jEUTlXJEYckXXW90rGWlm29npSUFDIyMsyPYcOGAbBw4UI2bdrEzp07AVOOqSNHjvDuu+8yffp08/Wff/45nTp1Mj8vnl23qNzsn3/+ybp16/jqq69KfHZmZibffvstrq6urF5d9tR1TScDRzmEEPRs6gvA/rrNyTl5EkNaGh3qezCojT95BUZmfXcKj3Hj8HnxBZzvvhvHjh3+044xIxOQaxySdD1FpWP/+ecfq8u2WqJOnTrMnz+fCRMmEB4eztSpU1m8eDHOzs5Wt9WwYUM6d+7MsWPHSrz+7bff4ubmxsyZM1lRy0tOy8BhgS6NvAD4N6ApSn4+YY+NIj82jlcfaoreXsMf5+L55XQsnuPHU2/hp6h0OgCUwqGskp+Pkm2aojLKwCFJ/1FUOtbR0bHCZVvLM2bMGBo0aEDbtm154IEHeOCBByrUztmzZ9m1axcNGzYs8fqKFSsYMWIEw4cP5+zZsxw+fNgW3a6W5AFACzT3dwXgor4umuBgcs+dI+699/B//z1evK8Ju87Hc3td00lyoVYDYMzN5crEiTh3uxvXAf3NbckRh1SVgl/5odT33hnYgpEdAwH4en840zf/U+q1oe/2Nv+5zye7OBmZ9p/XLXFt6dgRI0awcePGCpdt9fLyKvF87969JbLAdu3alV9++cVcy8MaRdX/srKyGD58OJMmTTK/Fx4ezs6dO5k/fz6+vr706NGDlStX0q5dO6s/pyaQIw4LeDnbUdfVnsx8IwVvvgdA1sGDAIzqFMQXo+8gwMOxxD2Ze/aQtXcfcXPnEv/xx+bXjblyjUOSilxbOrboF395ZVuLL4CHh4ebr01ISCAlJcX8KB40zp8/z/vvv8+kSZN44YUXyM+3riz0kSNHyMjIYN26dezfv5/MzEzze6tWraJp06a0bt0agEceeYSvv/7a6s+oKeSIw0LN/FyJTs3hvMaNJno9BbGx5MfEoC32zchgVIhOzaaeuyP6e+/FvlUrco4fJ2XtN+ZriqasJKkqWDoiGNkx0Dz6KM/3U7reSJdKaNKkiUVlW4t2XxUJDQ0ts11FURg/fjzPPvssM2fOpHPnzsydO5fXXnvNqv4JIRg6dChbt27lf//7Hx9++CEAK1euJDw83DxSKigoIDExke3bt9O/f/+ymqyR5IjDQs39TVNRp6LTcWjZEoDsY8fN78en5zJs8V6GL9lHZm4BAIakJNObxeqVG2XgkKRSVaRsqyU+++wzEhISmD59OiqVii+//JJ58+Zx9uzZCrX3yiuvsHTpUmJiYti7dy8XL17kwIEDHDt2jGPHjnHy5ElGjhzJypUrK9zn6kwGDgs19zOtc5yMSsWhVSsAso9fDRyuDlqy8gxEJGfz7o9nMWRkkF+4dVAbEGC+LufcOYzFhriSJJU0bNgw1q9fz+rVqwkICMDLy4uhQ4f+p2zr9bi5uZWYxvrggw8IDw9n+vTpfPnll+gKN67cfvvtvPDCC0yYMMGiXHPXatGiBd26deO9995jxYoV9O/fnxYtWlCnTh3z45lnnuH7778nqegLZC0iS8daKCY1hzvn/Iarg5a/77Ej4smJOLRtS/DXa8zXnN73D8M3nSdNZceaO+3xeGUyACpHR4xZWaY/OznR+OABhErGbKlyydKxUmlk6dibxNfFDi9nHanZ+SQGNQEg59QplLw8ALKOHEU8PpzFp9aAovDdt3+a7y0KGgBqV1dz0KiNQVuSpNpPBg4LCSFoVjhddTZDQRcSgpKbS8ZffwGQuGQJGI24XTzNk/nn8Ym//gnXol1VxuxswseMJf23327ODyBJkmQjMnBYoWiB/GRkGu7DhwMQN/8Dcs6eJeOPP8zXDTq0hVbJl6/bRtE5jtStW8nav5+IyVNIqqULaJIk1U4ycFih+AK5+/Bh6IKCyLt8mbBHTQeS3IYPw/722zHGxeGfev0dIEUjDrdhw/B+ZiooCrHvzCF69myUWrrnW5Kk2kUGDisUTVWdjEwFrRafl18CTJlv1W5ueI6fgP9HHyIcCw8DqlSIwl0cZgUFKPn5CCHweuop/N6bh9BqSVn7DeHjxlOQnHwzfyRJkiSrycBhhQAPB/T2GhIy8ohLz0XfvTvB324kZNt3NNr1F7p6/ugCAmjw/TacOnfG99VXUXt5/qed4qfHXfv2JWjVStTeXmQdOEDow0MoSEy8mT+WJEmSVWTgsIIQ4up0VWQqAA7NmmHXqBGisA45gNbPj8Avv8DjsUfReHr9p51rT487tG5N/Y0bsW/eHIe2bVEXZgSVJEmqjqp94BBCvCGEiBRCHCt8PFSV/Sm+QG4JjWfZI44iWl9fglavou6b/0MIAYAhJUVu2ZUkqdqp9oGj0AJFUVoXPrZXZUeKMuWeiEix6PriU1X5KlPm3FOX4657rcreHpW9PQDGzEzCRo8hcupUDNfk5ZEkSapKNSVwVBttA90BOBKebNFooPhUVZ6L6d45m48RnVp2zqrcixfJj4oifcevhD48hNwLF26g15JUPX3zzTd07NgRJycnfHx86NixI4sWLUJRFMaMGYMQggMHDpivv3DhgnlEDnDPPfdgb29fojLgr7/+SnBwcKmf+cYbb6DVanF2dsbNzY277rqLvXv3VsrPd60xY8ag0+nQ6/Xo9XqaN2/Oq6++SmpqaonroqOjmTBhAn5+fjg7OxMSEsKYMWPMubWuLZXr6+tLnz592LFjx035OWpK4JgshDghhPhKCOFelR2p5+6At96O5Kx8LiWUn3Oq+FSVRz1T5szMtEzGrzhETv5/axYXcWjZkvob1mPXuDF5oaFcHjqMtB9/vPEfQJKqifnz5/PMM88wbdo0YmJiiI2N5fPPP2f37t3kFWZk8PDwKDeDrZOTE2+++aZVnz1s2DAyMjKIj4+nS5cuDBo06LpfBAsKCqxq1xIvvfQS6enpxMfHs2zZMvbt20fnzp3NadoTExO56667yMrKYteuXaSnp3PkyBHuvvvu/wSGolK5x48fp1evXgwcOJDly5fbvM/XqhaBQwjxqxDi5HUe/YHPgAZAayAamF9KG08IIQ4JIQ7Fx8dXZl9pVzjqOBxW/tZZTeFUlbCzQ+OiByDIWU3/1n7Yacr+69cFBxP8zVpc+vRBycoi8rnniZ3zrjzvIdV4qampzJw5k0WLFvHwww+j1+sRQtCmTRvWrFmDnZ0dAKNHj+bEiRP8+eefpbY1depU1q5dy8WLF63uh1arZfTo0cTExJCYmMjy5cvp3Lkzzz33HJ6enrzxxhsYjUbeeustgoKC8PHxYdSoUeYRwrp166hfvz5paaY1zx9//JE6depgye8ge3t77rjjDr777jsSExNZtmwZAAsWLMDFxYVVq1bRoEEDhBC4ubkxduxYpkyZct22ipIqvvHGG7z88ssYjUar/y6sUS0Ch6IoPRVFaX6dx1ZFUWIVRTEoimIElgL/LehtamOJoijtFUVp7+3tXan9bR9cOF1lQeBQF05VqfR6hL0DAG8+0JAnujUoMeQujcrREb/35uE7YwZoNCStWEH67ztvoPeSVPX27t1Lbm5uubUqHB0dmT59OjNmzCj1Gn9/fyZMmMCsWbOs7kdubi7Lly83Z+EF2L9/PyEhIcTGxjJjxgyWL1/O8uXL2blzJ5cuXSIjI4PJk00JTIcNG8Zdd93F1KlTSUxMZNy4cXzxxRdY8ztIr9fTq1cvdu3aBZim2gYOHIiqAolQBw0aRFxcHOfOnbP6XmtU+0JOQoi6iqIUlQMbCJysyv4AtA0yBY5DFgQOrb8/ABovL1T2pm9RmoKrI4bwxCxORaXyYIu6pbYhhMDjsUexb3Y76b/+hv6+XjfSfekWdea2m5Mpt+nZM+Vek5CQgJeXFxrN1V9Bd911F6dPnyY3N5eff/7Z/PqTTz7J+++/z48//kijRo2u296rr75Kw4YNOXXqlEV9XL9+Pd9//z06nY7mzZuzefNm83t+fn7mb/YajYY1a9bw/PPPExISAsCcOXNo3rw5y5YtQ6PRsHDhQlq2bMk999xD37596dOnj0V9KM7Pz89cozwhIaFE6dzvvvuOUaNGYTAY6NSpE7/88kuZ7QCVnsq9Wow4yjFPCPGPEOIEcC/wXFV3qLmfKzqNigtxGaRk5ZV5ra6eP/4fLsDvnbcRdqYdU0qOaWE8ISOXAYt2M2XtUXZfSCj3cx3btsX3pWnmkUruxYskrVght+xKNY6npycJCQkl1hD27NlDSkoKnp6eJaZa7OzseP3113n99ddLbc/b25vJkyczc+bMEq+vWbPGvID84IMPml8fOnQoKSkpxMXF8fvvv5eoDR5QrH4OQFRUFEFBQebnQUFBFBQUmAtLubm5MWTIEE6ePFmiauE777xj/uyJEyeW+fcRGRmJR+H5LU9PzxKlc/v160dKSgoLFiwwr/2U1Q5gbquyVPsRh6IopVemryI6jYpW9Vw5GJrM0fAU7r3Np8zrXR54AACVQ+FW2xzTOQ4vZzsGt/Vn6a7LTFh5iDXjO9Im0LK1f6WggMhnnyP3/Hky9x/Ab847qF1db+Cnkmo7S0YCN0unTp2ws7Nj69atDB48uNzrx44dy9y5c9m0aVOp10ybNo2QkBA6dLg6m/3II4/wyCOPWNW3a6eQ/fz8CAsLMz8PDw9Ho9Hg6+sLwLFjx/jqq68YMWIEU6dO5aeffgJg+vTpTJ8+vdzPy8jI4NdffzVPx/Xo0YMtW7Ywa9Ysq6erNm/ejI+PD02aNLHqPmvVhBFHtXR1usryIaF5xJGbY37t1QebMrCNP1l5BsYsO8i5mHTL2tJo8Jo6BZVeT8bvv3N54CCyT5yw4ieQpKrj5ubGrFmzmDRpEhs3biQ9PR2j0cixY8fMu4uK02g0zJ49m7lz55bZ5gsvvMC8efNs2tcRI0awYMECLl++TEZGBtOnT2fYsGFoNBpycnJ49NFHeeedd1i2bBmRkZEsWrTIonZzc3M5fPgwAwYMwN3dnbFjxwLw/PPPk5yczGOPPcbFixdRFIX09HSOHTtWaluxsbF8+umnzJ49mzlz5lRofcQaMnBUUPsg01DQkp1VRcwjjuyrgUOlEsx7uCU9m/qQmp3PY1/uJzwxq7QmSnDp1Yv6mzdh37w5+VFRhD7yqJy6kmqMl156iQ8++IB58+bh6+uLr68vTz75JHPnzuWuu+76z/UjRoygbt3S1wIBnnnmGdRqtU37+fjjj/PYY4/RrVs36tevj729PZ988glgWlsJCAjgqaeews7OjtWrV/Paa69x/vz5UtubN28eer0eT09PRo0aRbt27dizZw9OTk4AeHl5sW/fPuzt7enSpQt6vZ7WrVuTnp7OZ599VqItNzc3nJycaNGiBdu3b2fDhg08/vjjNv35r0eWjq2gxIxc2r31Kw5aNSfeuA+tuvwYnLB4CfELFuA5YTw+xeZCAXLyDYz+6gD7LycR6OHIT892xVFn2UyiMS+PuPfeJ3nVKgBc+vTB//33rP+hpFpFlo6VSiNLx1YRT2c76ns5kZ1v4Ey0ZXmrinZVFa1xFGevVfPF6Pa0CnBjQrcQi4MGgEqno86M6fh//BEqvR6nOztafK8kSZK1qv3ieHXWLsidywmZHA5LpmU9t3KvLzrHUbSrqriUTZsRWg3fTuyNxoLRy/W43Hcfjm3boi52Wj330iV09etbdGZEkiTJEnLEcQPaWXGeA0ofcRgyMoh+7TWiZ7yGutjv939j03nki30kZvx3hFIajZfX1e26Fy5wefDDRDw9WRaIkiTJZmTguAFFCQ+PhVuWKbe0EUdeaBgYjSh5eRjTr+6qmrn1JLsvJDJy6X6rgkeR/OhohEZj2nU1YCCZ+w+Uf5MkSVI5ZOC4AQ19nHG20xCZkk1cWk6515c24sgLCzX/2VBsZPDxiDY09HHmXGx6hYKHc9euhGzZjEObNhTExhI+ZgxxH34oc11JknRDZOC4AWqVoFWA6dDd0SvljzqunhwvGWTyQkPNfzakXG3HR2/P1xM63lDw0Pr7E7RqJV6TJoEQJH6+mLDHRpEXEWlVO5IkSUVk4LhBrQNMi+JHLZiuunpy/JrAUexU6rVrEbYIHkKjwXvqFAKXL0NTpw45Z86gZFt2VkSSJOlaMnDcoDYBpnWOo+HlLz4L+1JGHMUChyH5vwHo2uCx52Jihfrq1KEDIVs2U+/TT7ErTBanKMp1S9lKkiSVRgaOG9Q60DTiOBGRSoGh7Bz4RWVhDWlpGLOvLpDnhRYLHCklA0f2iRNk7N5tDh4fDW9N31Z+Fe6v2s0N565dzM9Tt27l8oCB5Jw+XeE2JUm6tcjAcYO8nO0I9HAkO9/Audiy80yp3dxACApiY7nQsxfZJ09RkJyMsVjZSMM1U1VXnn6aKxOfwpCRgY/env6t/c3vnY1JIya1/EX50iiKQsrab8i7fJnQYcNJXL4cpZILwEhScdWxdGx5ZVuvp6gfRdlwnZ2d6du3LwDbtm2jTp06JVKdb926FX9/f8LCwkrcI4TAycnJ/HzXrl3mcrPOzs54eHjQq1ev6/bljz/+QAhRZj4vW5GBwwbaF57n2H+p7ISHaldXApYswb5ZMwyJiUS9+CK51xRcKT7iMGZlYYhPgPx8CuLiSlx3IS6DR5buZ9iSvUSmlF2/vDRCCAJXLMdtxHCU/Hzi3p3LlfHjyS9MFy1Jlak6lo61pmzrtT799FMyMjLMj23btgHQt29funfvznPPmSpCpKSk8NRTT/HZZ58RFBRU4h6A48ePm5937doVMOX1ysjIIDIyEn9/f8aNG/efz1+xYgUeHh6sXLnSqr+LipCBwwbuDDGd1N53qfy1B+euXQj6eg12jRqSFxpK9GumGgOisFSmIeXqiKOgWPnJgoSS9To8nXTUdbMnLDGLYYv3ciWpYovdKnt76s6aRb2Fn6J2dydzz14u9etPWmFqaEmqDNW1dGxFyrZa4uOPP+bHH3/k559/5rnnnuPuu++mX79+Vrfj4ODA0KFD/5MpNzMzk40bN7Jw4ULOnz9PZefqk4HDBjo1MAWO/ZeTMBrLTxqpsrPDb+5c0GrJj4gAwL55c6DkrqriowzDNRW93J10rBl/J60D3IhIzmbo4r1cTvhvOmpL6Xv0IOS7rTh164oxNZWEhQvleQ+p0lTX0rE3Ura1LF5eXnz00Uc88sgjfP/993z88ccVaiczM5O1a9fSsGHDEq9v2rQJZ2dnhgwZwv3338+KFSts0e1SyVxVNlDP3QF/NwciU7I5E5NGM7/yCyrZ33479devI3XLVnIvX8J92DAiDh8uMVWVH3s1cBQk/Hc04+qgZdW4Djy+/CAHQ5MZungvayd0pKGPvkI/h8bbm4DFi0n55hscWrdGaLWAaS1E5rqq+YJf+eGmfE7ou73Lvaa6lo69kbKtU6dO5cUXXzQ/nzJlSokptDvvvJPU1FSGDh1qVU1ygPfff59PP/2UtLQ0goKC2Lp1a4n3V6xYwbBhw1Cr1YwcOZKpU6fywQcfoC38N2xrcsTXW2r5AAAgAElEQVRhA0II83TVXiu2yto3bYrvq68QuGQJ9i1aACW34xYfcRQkXr+0rN5ey4rHO9ApxJP49FyGL9lPek7FRwpCCNxHjMC+WMrl6FenEzf/A5RyylZKkqWqa+lYS8q2Tpw40dzmO++8Y772448/JiUlxfy4dt3liSeeYNSoUWzfvr3EYrwlXnzxRVJSUggNDcXBwYFzxdZGr1y5ws6dO82VDvv3709OTg4//FB5XxTkiMNG7gzx4NsjEey9mMj4riFW369xM23rNaSkmL/hl5iqSix94d1Rp+GrMXfw5OrDPNS8Dnp7233LyD1/ntTvvgOjkYzdf+M/bx521wyTpZrBkpHAzVJdS8daUrb1888/5/PPP7e4TYAvv/ySK1eu8P3339O+fXvGjx/P0aNH0el0VrUTGBjIRx99xOjRo+nTpw8ODg6sWrUKo9Fo3sUFkJOTw4oVKxgwYIBV7VtKjjhspEsjLwD2XEwkJ99g9f1Cp0Pl5AQGgznRYckRR9kjGQedmuVj7mB4h0Dza/nlnCuxhF2jRgStXo22Xj1yT5/h8uCHSVq1Wm7blW5IdS0dW5GyreWJiopi2rRpLF26FDs7OyZOnIinpydvv/12hdrr1asXfn5+LFmyBDBNU82aNYtjx46ZH99++y3bt28nsZzfGxUlA4eN1HV1oLm/C9n5BvZcvP60UnnU7qZtvUVnOUqMOBLKb1OluroOcSY6je7z/+BQqOU10Uvj2LYN9bdswXXwIJTcXGLffpsrE54osQYjSdaqjqVjrSnbeq3JkyeXOJNRNP01adIkhg8fbt5aK4Rg6dKlfPjhhxavyVxr2rRpzJs3jz///JOwsDCefvpp6tSpY37069ePhg0bsnbt2gq1Xx5ZOtaGPvr1PAt+/ZcRHQKZM6iF1fdffngIOSdPErzuGxxateLC/feTHxYOgDYggIY7Sl+Yu9brW06yal8Yjjo1X46+w7zz60al7dhBzOszMaSk4DF6NL6vvmKTdiXbk6VjpdLI0rHVSM/bfQD47UysRdtyr1U04ihITkZRFAriip3jsHLI+Ua/Zgxq409WnoGxyw/w9/mKjYKu5dKrF/W/24rbkIfxnnp1X3tt/AIiSdL1ycBhQ7fXdcHP1Z649Fz+iUwt/4ZrqIstkBszMlCysxGOjgidDiUrC2OW5Yf81CrBe0NaMax9ADn5Rh5fcZCdZ20ztaT18aHum2+a1mQAY2YmYSNGklbOyVpJkmoHGThsSAhB96amUcef/8aXc/V/qd1NgaMgLt68vqH18UHtZZpmsnbUoVYJ5gxqwaN3BpJXYOSJVYf45VSM1f0qT8rGjWQfO0bklKlETnsJQ6r1QVOSpJpDBg4b69bIdLCnIoGjaJtrwqJFpGz8FgCNjw8aT9OOLUMFdkioVII3+zfn8c71KTAqpOUUlH+TldwfewzfGTMQ9vakbdvGpT59ySgjRYQkSTWbDBw21qmBJxqV4NiVFFKzrTuI5zZoEK6DBqHk5JC0bBlQGDg8PADrRxxFhBC83qcpmyd15uF29SrURpntq1R4PPbo1TK18fFceXIiUa+9hqEwcZskSbWHDBw2prfX0jbIHYNRYc8F6xakhUZD3bfexKvYorM2oN7VqarrpB2xuG0hzNUKAU5GprLh0JUy7rCeLjiYoNWr8Jk2DaHTkbrxW7L27bPpZ0jWMcrzNtI1bLGRRQaOSnB3Y9N01V/nrZ+uEioV3pMm0ejvXdR9+208x469OlWVZJvDPMmZeYz66gDTNp7gy78v26TNIkKtxnPc49Tf9C1ekyej79nT/J5SYPtpMql0Tk5OREZGkpeXJ3e9SQDm1PH2hUXlKkqmHKkEdzf25r2fz/HLqVhm9zOi01gfnzVeXrgNHlT458IRR7xtttS6O+mY0r0hs7ed5s3vT5OZW8CU7g1tmsjQrmFDvCdfTU2Sc/o0EVOfoc7M13Hu1s1mnyOVrl69eiQkJBAWFlYiJ5R0a7O3t6devRubspaBoxI083Phtjp6zsaks+N0LL1bln3atTwaX1O2zvwY2+2IGtu5Pk46Da9sOsEHO/4lI7eAVx+8rdKy4CatXkN+RARXnngS1/798HnlFTSF51akyqFSqfDx8cHHx6equyLVMnKqqhIIIRh+RwAA3xwMv+H2tH6mGuP5UVE33FZxQ+8I4OMRbdCoBEv+usSMLScxVODgoiXq/m+2ae3Dzo7Urd9xqU9f0n76ufwbJUmqdmTgqCQD29TDTqNi1/mEClfnK6L1r5zAAdCnpR9LR7XHTqNi7YFwjl1JLv+mChAaDZ7jHidk6xYc27fHkJhI5LPPEjFl6n+qG0qSVL1Vi8AhhBgihDglhDAKIdpf896rQogLQohzQoj7q6qP1nJ11PJgc9MU03fHb+wXvtrdHWFvjzEtrVK2t957mw/Lx3Zg3uCWtAvysHn7xemCgwlcuYI6s2aicnQkc88eWWlQkmqYahE4gJPAIOCv4i8KIW4HhgPNgAeARUKIiqe+vMn6tDSNFH48GV3OlWUTQqAtzApaGaMOMJ0/GdI+wPz8bEwaWXmVs6AqVCrcR4wg5Ptt+H8w3/yzKQYD+bGxlfKZkiTZjkWBQwhR5tdQIYRaCNG2op1QFOWMoijnrvNWf+AbRVFyFUW5DFwAOlznumqpSyMvnO00nIxMIzzxBqerKmmd43rORKcxbPE+Rn15wOpDjNbQ+vnhfPfd5ufJa77m0oMPkbRyJYrB+pomkiTdHJaOOOKFEOatGUKIo0KI4vu5vICDNu2ZiT9Q/JRaROFr/yGEeEIIcUgIcSg+3vrzE5XBXqumR2Huqu03OOq4mYHDTqPCSafmUFgyw5fsIz49t9I/E0xbdo1ZWcS+M4fQocPIPlmxWgWSJFUuSwPHtXs0GwLX1jwscx+nEOJXIcTJ6zz6W9rZsiiKskRRlPaKorS3thB8ZXqwuWkaZsvRyBs6hKX1M7VTcBMCR4i3M+sndqK+lxNnotMY8vmeG17gt4Tfu3Oot2ghmrp1yTl1itChQ4mdMwfjdSrCSZJUdWy5xlHmb0VFUXoqitL8Oo+tZdwWCQQUe16v8LUa454m3ng523E2Jp2d5yqe1vzqiOPGRi6WqufuyIaJnWjm50JoYhaDP9vDuZj0Sv9cfffuNPh+Gx6jRwOQtGIlF3v3IS/8xrc1S5JkG9Vlcbw03wHDhRB2Qoj6QCPgQBX3ySr2WjUT7w4BTBUCKzrquJlTVUW8nO345ok76Vjfg7j0XB75Yh8ZuZV/Alnl5ITvq68QvGE99s2aofH2Rut/3RlKSZKqgKWBQ6HkiOLa5zdECDFQCBEBdAJ+EEL8DKAoyilgPXAa+Al4WlGUGrdqOrJjIJ5OOo5HpLKrgpX4qiJwgClp44rHO3Df7b7M6N0UZ7ubl2zAoVkzgtevI2DRQkRhHen8mBiSVq2Wi+eSVIUsqjkuhDACl4Gif60NgHCgaMuNGghWFKVabJWtqprjZVm48wLv/XyOe5t4s2ys9RvDlPx8zrZqDYpC4wP7Uev1ldDLMj5fUUqkI0nOzMPd6dplrsoXMWUK6Tt+xe72ptR5/XUc27S56X2QpNrK1jXHZwMrgTWFj/8By4s9X1n4mlSKER0C0WlU7DwXz+UE6xd7hVaLXePGoChc6t+fzL17K6GXZXx+saDxT0Qq3d7byap9YTe1DwCuAwagqVuX3NNnCBsxkqhXp1e4TokkSRVj0YijpqmOIw6AaRuOs+FwBGM7BzOrbzOr78+9eJGol14m55Rpm6rHmDH4THvRPI2TFxGJxtsLlZ2dTft9rWW7LzN722kAnu/V2OaZdctjzMoiYfESkr76CiU/H5Vej/fUqbiPGI7QyLydklRRth5xlPYhXYUQg8s7ICiZjL4rGIB1B68Ql55j9f12DRoQvO4bU6EntZqk5cuJnTsXRVFI27GDi716ETO78gd+YzvXZ86gFqgEfLDjX2ZvO42xkpIjXo/K0RGf554lZNt3OHXtijE9ndh33yUvNPSm9UGSbmWWnhyfLIR47ZrXtgJ/AhuA80KI2yqhf7VKc39Xejb1JSvPwIId5wH4NzadZ785SlRKtkVtCI0G70mTCPxiKUKrJXnlKmJmvUHM6zNBUcjY9ddNKdozokMgn45si06tYvmeUF7YcJx8w82tNqcLDiZgyWLqLfwU72eeMddsVxQFQ0rKTe2LJN1KLB1xjMK0GA5A4aG9h4DHgDuA88B0m/euFnrlwdtQqwTrDoZzLiadVzf9w5ZjUSzddQkw/dKbuOowE1YeKjMAOHXqRN1354BKRcr69eZflIb4hJtySBDgoRZ1+WrMHTjq1Gw+Gsnkr4/c9EpzQgj0PXrg9cQE82sZv//OhZ69SFy2HCUv76b2R5JuBZYGjgbA0WLPHwK+VxRljaIoh4EZgCzrZoGGPs6M7BCIUYGxyw5wOMyUxvzPc6Y0KRfjM/npVAw7TseWe2bCtXdv6n+7Eae7u2F3223YN28OQPbx45X7QxTTpZEXX0+4E08nHf1b+9/UtY7SZOzahTEjg7i5c7nYty/pv/4qS6dKkg1ZGjgcgLRiz++kZCbb84AsM2ahaQ80wd/NgajUq+sclxIyCUvMZFexOuVpOeUftrNv2pTAxYsJ2bIZ53vvASD7+Amb97ksrQPc+POle3moxdVKh5VVEMoSdd94g4DFn6MLCSE/LJyIyVMIHzOWnDNnqqxPklSbWBo4IoCWAEIId0xpzovvB/WmZGCRyuBir2XBsNYIAXo7DV0beQHwx7l4/vq3WOCwMjOtQ6vWwM0dcRQpfjDwcFgy93/4FxfibF87xOL+3H03IVu34DtjBmpXV7L27+fyoMGk//57lfVJkmoLSwPHOuBjIcQkYAWmjLXFU3+0B66XFl0qRYf6Hmx4shPrJ3aiXyvTqfCfT8Ww71KS+RprU5o7tGwBFGaZrcK5/YU7L3AhLoOHP9/D4bCk8m+oJEKrxeOxR2nwy894jB6F1t8fp06dzO/L6StJqhhLA8fbmHZQvY0pM+4jiqIU30IzAvjBxn2r9doHe9C0rgt3N/FGCNhzMZHs/KupNKwdcahdXNCFhKDk5ZGycaOtu2uxhSPb0uM2H1Ky8hm5dD+/nIqpsr4AqF1d8X31VUJ++B6VgwMAhowMQocNJ/X7H1CMN3c3mCTVdBYFDkVRchRFGaMoiruiKLcrirLnmvfvURRlXuV0sfbz0dvz1oDmOGhLZmyxZI3jWh5jxwAQ++ZbpGzabIvuWc1Bp2bxY+0Y0SGA3AIjE1cfZs3+m3/K/FrFD0amrN9AzokTRL34IqEPDyFj9+4q7Jkk1SzVPTvuLeORjkH89sLdzBnUgmGFJVwrUn3PfcgQvJ97DhSF6OnTiVvwYZV8o9aoVbwzsAXP9WyMUYEZm0/yyW/nb3o/SuMxehR13vwfGh8fck6f5sq48YSNHUv2PyerumuSVO1ZlJ9BCLHEkusURXnixrpza/Nzc2BEh0CiCw8DWjtVVcTrySdQOTkRO2cOiYsXY8zKwnf6q+atsgXJyWTu3oPLA/dXaooOIQTP9GyEr4sdM787RTN/l0r7LGsJtRr3IUNw7dOHpNWrSVyylKy9+wgdMgTPpybi88wzVd1FSaq2LP2tMR7TAcBLlF7pT6402oiLgxaAtJyK1/v2ePQRdIEBRDw9meRVq1A5OOD93LNgMHChR0+UrCyERoPLA/fbqtulGt4hkHua+FDH1d782rXZdquKysEBrwkTcB8yhISlS0letRrHtu2quluSVK1ZGji2Yjr0dx5YCmxSFKXyK/rcoooCR0Wmqopz7tYN/wUfEPHMsyQuWYIxMxOh06FkmcrA5pw7e1MCB1AiaOy9mMgHO86x6JF2eOsrNyGjpdRubvhOm4bn2LGoPT3Nr0e/PhOVoyOeT0xAU+x1SbqVWbo4PhAIBv4A3gUihRDzhBCNKq9rty4X+8IRR/aNx2Z9z574L/gAtFqS16whadky83vGtMovBXsto1HhrR9OczA0mQELd/Nv7M3vQ1k0Xl7mkVB+bCwpmzaRtGIFF3r2Im7+fAqSk6u4h5JU9SxeHFcUJVpRlLcxpR8ZXfjff4QQO4UQ9mXfLVnD1QZTVcW53HcfgUuX4tyzB/atWmLXtCkABXEVr4FeUSqVYPnYDrQOcCMyJZvBi/aUOPRYnWh9fam/YT3O996Lkp1N4tIvuNijJ3EffYQhNbWquydJVcbqXVWKyU/A58A+oCsgA4cNuTiYZhArujh+PU53diTg00+pv24ddV43JTrOj4u1WfvW8Nabapn3blGX9NwCxi4/yOoqKAplCfvbbyfgs0UEb1iPU7euGLOySPzscy7edz+G9Oo1WpKkm8WqwCGEqCuEmC6EuASswhQ4miiKInNY29DVqSrbBY7iND6mtGIFcVX3Td9eq+aTEW14+t4GGIwKr205yQc7/q2y/pTHoUULApcsIejrr3G6qxPO99xjLt+rKAoFCRWrJS9JNZGl23H7YNpZ9QCm5IYvA1sURamc32y3uKu7qipn/4HG2xuAgvh4FKMRoaqa4zwqlWDa/bcR7OnEzK2nuLN+9a8H5ti2DYFffVUipUvGn38SOfUZXAcPwnPceHT1/Kuwh5JU+SzdVfUdEIYp5chFQAsMuXY7paIoX9u0d7covZ0GISAjt4ACgxGN2ra/2FV2dqhdXTGkpmJITq7y3UJD2gfQ/TYfPJ2v7rDKLTBgp1GXcVfVUul05j/nnPjHlOZl7TekrN+Aa58+eD75BHYhIVXYQ0mqPNb8RgoCZgOrS3mssnnvblEqlUBfmG02vbJGHebpqpu/QH49xYPGH+fiuPe9PzgaXjN2MHlPnULI99tw6dcXgNStW7nUuw9XJk+ukkzFklTZLN2Oq7LgUX2/HtZAtjgEWJaiwJEfWzUL5GVZvS+MqNQchi3ex4ZDV6q6Oxaxa9gQ/3nzaPDTj7gNG4bQaMj49TcyDxwo/2ZJqmEsXeOwqLqfoih/lX+VZAlXBy0Rydk2OctxPRpfX6D6jDiK++zRdvxv22lW7Qtj2sYTnI5OY8ZDTW0+ZVcZdAEB1J39Bt6TnyZ57Te4Dx1qfi/5m3UYs7NxG/IwamfnKuylJN0YS9c4/sCUUuR6OSKUYv+tvMRHt5iinVU3enq8NBqfwgXyKtxZVRqtWsWbA5pzu58LM7eeZNnuUM7FpLNwZFvcnXTlN1ANaLy98Z46xfzcmJdH/MJPMcQnkLBwIW4PP4z7yBHoAgOrsJeSVDGWfoULAAIL/1v8UR/TSfIcILIyOnirMp/lqOSpquo44igyokMgayfciZezHXsuJjJuxcEaW3xJaDTUnT0bxzvuwJiRQdLy5Vy8/wHCn3iC9D/+QDEYym9EkqoJS9c4Iq99AG2A7cAk4H9A40rs5y3HfHq8kkYc2hoQOMBU7GrblM60D3Jn+kNNq0VixIoQKhX67t0JWrWS4I0bcR0wAKHVkvnXLiImPkXWwYNV3UVJspjVk8ZCiLZCiN+BTcDvQCNFUeYqipJr897dwip/qqpmBA6Auq4ObJjYifbBV895/PVvPAWGmlm5z6F5M/zenUPDP//AZ9qLOHXujGOHDub3k1atJuvo0Ro7upJqP4vXJIQQAcAcTGVitwLNFEWpPpV5apmiXVUplRw48mtA4ABKjDR+OxPLuBWHuDPEg49HtMFHXzMz3mjc3fEcNw7PcePMr+VHRxM7Zw4YjegaNMBt8GBc+/er8rM2klScRSMOIcS7wDlMiQ27KYoySAaNyhXk6QjA6ai0Smlf4+UFKhWGxESU/JqVAMDZToOXsx37LiXR5+O/OXA5qaq7ZDNCq8Xz8bGovbzIu3iRuHnzOH/3PURMmUL6zp0oBbKagVT1hCXDYSGEEcgG9lBGwSZFUe6zXdcqrn379sqhQ4equhs3JCY1hzvn/IaTTs3xWfdVylbU8127URAfT8Pff0Pr52fz9itTXFoOk9ce5cDlJNQqwcsPNGFC15AauwZyLSU/n4y//iLl201k/PknGAygVtPoj53mlDGSZGtCiMOKorQv7zpLp6pWIiv83VR1XO0J9nQkNDGLU1FptApws/lnaHx9KYiPJz82tsYFDh8Xe74e35H3f/mXz/+8yDvbz3IoNJn3h7Yyrw/VZEKrRd+jB/oePSiIjyd161YK4uPNQUMxGLjy5EScOt2JS+/eaOvUqeIeS7cSiwKHoihjKrMTQoghwBtAU6CDoiiHCl8PBs5gmiYD2KcoysTK7Et1cmeIJ6GJWey7lFhpgYOTJymIrRnrHNfSqFW88uBttAty5/n1xzgRkYrBUPu+32i8vfEcP77Ea1kHD5L5999k/v03ce/Px/GOO3Dp0xuX++9H7epaRT2VbhXV5SjuSWAQpsy717qoKErrwsctEzQAOoaYdhHtr6Q5fK1v0c6q6pd2xBq9bvflhyld+ezRqwcE8w1GDMbaF0SKOLRtS71PP0F///0IrZasAweImTmL8126cmXS07LQlFSpqsVJb0VRzgC1Zn7aVjrWN+2kOXg5CYNRQa2y7d+PxseUdqQ65quyVqCnI4GFGwoA3v/5HMcjUlgwrDV1XR2qsGeVQ6XToe/ZE33PnhjS00nf8Stp328jc99+cs6cQeXiYr42/fffcWjdGo1H9U9bL9UM1SJwlKO+EOIokAa8pijKrqru0M3i5+ZAPXcHIpKzOReTzu1+LuXfZAVzvqoaOlVVmrScfDYdjSQ+PZcHP9rFvMEtua9Z7V0DUOv1uA0aiNuggeTHxZF/5Yr5S1hBQgIRT08GIXC84w709/VC36OHXBORbshNm6oSQvwqhDh5nUf/Mm6LBgIVRWkDPA98LYS47m9PIcQTQohDQohD8fHVL/9SRbUNdAfgSCWkGDdPVdWCEUdxLvZatk/tyt2NvUnJyueJVYeZvvkfMnNr/1ZWrY8Pju3amZ8b0tJw6tIF1Gqy9u8n9s23uHDPvVwaMJC4BR9SkFR7tjJLN89NCxyKovRUFKX5dR5by7gnV1GUxMI/H8ZUROq6qU0URVmiKEp7RVHae9ei7YrtgiovcBSNOKqq9nhl8tbbsWzMHbzWuyk6tYqv94fzwEd/se9SYlV37aayCwkhcOkSGu/+G7+57+LcswfC0ZHcs2dJ/OILhPpqNYTsf/7BkFY554ak2qVaT1UJIbyBJEVRDEKIEKARcKmKu3VTFY04jobbvqx78akqRVFq3RqTSiUY3zWELo28eGH9cU5FpbF8dyh3htx6p7DVLi649u+Pa//+GPPyyDpwkLyLF8w7sBSjkStPTcKQnIxDy5Y4deqEU+e7cGjZEqGt+dubJduqFoFDCDEQ+ATwBn4QQhxTFOV+oBvwPyFEPmAEJiqKckuNrW+rq8deq+JyQiZJmXl42DCtuNrZGZWjI8asLIxpabV2G+dtdVzY8nRnlvx1iWF3BJhfzzcY0daAGh+2ptLpcO7SGbp0Nr9mSEnBLjiYrORkso8eJfvoURIWLULl6Ihjhw54PT0JhxYtqrDXUnVSLf7VKIqyWVGUeoqi2CmK4lsYNFAU5VtFUZoVbsVtqyjKtqru682mVatoWc90hqMySqmap6tq2TrHtbRqFU/f2xCvwhK1+QYjD3++l3e2nyErr/avfZRH4+FB0OpVNN6/j3qLFuL+6KPoQkIwZmWR8ccfYLyaUDLtp59JWrWanLNnZTr4W1S1GHFIZWsb6M6By0kcDkumR1Nfm7at8fUl7/Jl086qxrdOZvyDl5P4JyKF41dS+OFENG8NaM69t/lUdbeqnNrZGX337ui7dwcgPyaGzL37sG/e3HxN8jffkLVvHwAqvR7Htm1xaN8Ox/btcWjWDKGrGcW2pIqrFiMOqWwd65v23+++aPuF3dpyCNBadzX0YvOkzjTzcyEyJZuxyw/y9JojxKblVHXXqhVtnTq4DRxQYhHdbeAAXPv3Q+vnhzE9nYw//yR+/geEjRhJ9Kw3zNcZMzPJj4yU6eFrITniqAE6hnigVQtORKSQkpWHm6PtvtFpfE37+Wv7VNX1tApwY+vTnVm+J5QPdvzLD/9E89e/8bze93aGtg8ov4FbVNEiO0B+VBRZhw+TdegwWYcO4dC2jfm6jL93E/nMM6i9vHBo0QKHli2wb9EShxbNa+162q1CBo4awFGnoX2QB3svJfL3hQT6tLRdQkJN0Ygj5tYLHGDKdzW+awgPtqjLrK0n+fVMHHkFNbNAVFXQ+vnh6ueHa9++ACVGF8Z004YLQ0ICGTt3krFzp/k9XUgIIdu+M49kCpKSULu717qdfbWVDBw1RLfG3uy9lMhf/8bbNHAUnSCubYcAreXv5sDSUe3ZfSGRuxpc3a7708kYWtRzxd+t9qUtqQzFf/G7PfwwroMHk3/lCtkn/iHnnxOm/54+jVCrzUFDURQuPdQbRVGwb9IE+6a3YdfkNuxva4IuJASVfc0s1FWbycBRQ3Rt5MXcn2DX+QSMRgWVjfJW3cpTVdcSQtClkZf5eXRqNs+tO4aCwhPdGjDx7hAcdfKfjDWEEOgCA9EFBuLapzdgqjVSkHh1vc6QYjqjZExNJevAAbIOHCjeAH5z38W1Xz8A8q5cwZCcjC6kAWpnp5v3g0glyH8FNcTtdV3w0dsRnZrDS9+e4N1BLWxS3Mm8OB4Tc8Nt1TYqIejR1IfvT0Tz8W/nWX/wCi890IT+rf1tnnDyViK02hK5sjTu7jTau4eC2Fhyzp4l9+xZcs6eI/fcOfLCw9HWrWu+NuXbb0n8fLHpvrp1sQsJQRcUhC4oEF1IA5y7drnpP8+tyKIKgDVNbagAeD1/n09gwspDZOcbeLZnI57teePbZxWjkbMtW0FBAU2OH0NlZ2eDntYuB0OT+N+20/wTaUpV3tjXmed7Neb+ZnXknHwlU/LyQKVCaEzfcROXLSd182byLl/+T8lju9tuI68GyyYAABuQSURBVGTLZtN9isKVJ59EW6cuuqBAtIGB6AIC0Pr5oXaxbbLQ2sTSCoAycNQw2/+JZtKaI7QNdGPTpM7l32CBC917kB8VRYNffkYXGEh+dDSoVGh9bXtmpCYzGhU2HY1kwY5/iUzJpktDL1aP71jV3bplKQUF5EdEkHvpMnnhYeSHh6Px9sbrqacAyI+L40K3u697r8rZGb95c81nVbJPnSIvNBStnx9aPz803t4I1a15UsHWpWOlaqJTYZ6lM9HpNqvRofH1JT8qioLYWLR163J56FCERkvD3369Zf8BXUulEjzcrh59W9Vl3cErtC5WkfF8bDpJmXl0vAVzYFUVodGgCw5GFxx83ffVej0BS5eQFxZOXngYeWFh5EdEkh8VhTEjA7Veb742bft2kr786urNWi1ab280Pj7YNW5M3f/NNr+VeeAAGnd3NN7eqFxdb9kRpwwcNYy7kw4/V3uiUnM4E53GD/9E07tFXZr7V3xfvKZOYdqRmFg0UVEY4hNMz8PDS/2Heauy06gZ1Sm4xGvvbD/DznPxtA9y56l7GnBvEx+bbV6QKkbl4IBz167QteTriqJgSElB5XR1Yd2+cWP0991HflQU+VFRGJKSzH9W8vKu3ms0Ev74OCgwpagROh0aHx/Tw9MT95EjcOrUCYC8iEjywkLReHqidvdA4+5Wq07Uy8BRAzXzdyUqNYcZm//heEQqF+IyWDqq3NFlqbS+RVtyY8gLv/pNOufcvzJwlMNoVGgV4MaR8BQOhSUzbsUhmvjqmXhPCH1a+t2SSRSrMyEEGnf3Eq8VP9AIYMzJoSA+noK4OCg2la9kZ+PYvj0FcXEUxMVhzMggPyKC/IgIAPT39TJfm/7rDuLenVvic1R6PWoP02glaNUq82glecMGKChA5eKC2sUVtauL6VpXV9QuLub1neqk+vVIKlczPxd2nI7leIRpsTYyOfuG2rua6DAO4Xi1/GruuXNw/3031HZtp1IJnu3ZmAldQ1h7IJwvdl3mXGw6z607zvs//8tHw1vTPliWbK1JVPb26AIC0AWUzB6gcnIiaPky83NjVpY5wBQkJuHQ8mr2YI2XN44dOmBITqIgKRlDcjLG9HTTIzOrxBRXwqLPKIiOvm5fPMePw+fFFwHIPnGCuPfeR+VaGGD0zqic/t/enYdHVd4LHP/+Zg0z2SEhQIAEwiKgVXbEKu7ihqViXaql2tt6q3aztXq1V69Wb73tta1Pe6u1KrbaWttal1YtgnV53BCUKjsEUAJkIQmZJJNMMjPv/eOcJJOQYIaQmSTz+zzPPDlz5syZX95Zfue873veNx1HejqOdD9Z553X6UyqP2niGISmj+5cLdXX8ZXcdlVVuLy805hEzdu29mm/qcTvdfGVz07gqgVFPPPBXh54vZSymibG5nYk4vrmVjLSdG6LocLh89ldgccf8ljW+ee1X7cCVjVXpK7OSiDBzgd62UuXEq6qIhIIEA3UEakLEKmvJ1pXhzPm7Kh1336C773XYzwZp52miUP1bHqXucerG1sIhSN4Xc4ennF4sRcBmpjhs0NbNHHEy+NycMmcsVw8q5BN+wOMzLSueo5EDefe/waF2T6uWjCeM6eNPCrX4ajBQRwOq1G9SzUZQN4N1/dqH755cxm34lErsQTqiDY02mcwDUQaGnDENPj3N00cg9CorDRy/R5qGjsa7ioDoU5Ht/Fovwhw/35MU8fRUGtZGZGGBpzp6X0LOAU5HNKpw8L2ynqqG1rYU9PE2zurGZnp5XMnFHLxrDGU5CfuC68GL1dODq7585MdBqDDqg9KIsJt5x3DlxcWcaz947S/7sirq1wFBTiysghXVREqLQXAXVgIQGjb9r4HrJhakMk7/3E6d1wwjQl5fioCIR54rZQz7nudJb98k0odzl0NIpo4BqmlMwu5/YLpFOZYg++V9+GHR5xOMhbZF0tFozizsvDNtnpphbZu6XOsypKZ5mb5wmJWf+cU/nTtAi6dM5Z0r4uqQHP7zIQAb+44QGNIZyVUA5dWVQ1ybXXo5XV961mVfvrp1D37HIA1PEOR1eDXundv3wJUhxAR5hTlMqcol9svmM7u6sb26z4ONIS48uF38bgcLJqcz+JjCzhtar42qqsBRRPHIDcqqy1xhPq0n/SFCxGPB9PSgmfsWJzZ1vUckbpAn2NUPRvmcXLMqI7ODgcaQpwwLod1H9fy0sZyXtpYjsfl4ORJIzht6kguPH406V792qrk0qqqQa6gLXEE+nbG4fD78Z94IgDucbGJo65vAaq4TC3I5C//fiJv33Iad1wwjbnFubRGoqzaXMkPnt1AJNJxQdrOqgZaIzrplEo8PXQZ5Araq6r63rg64rqvE21qIvuii6yBDtHEkSyjsoaxfGExyxcWU1nfzKpNlZTVBsnyWVVW0ajhkgffIdQaYW5xLgsmDmf+hOEcMypTh3xX/U4TxyA3KstuHD8KiWPYsccy/rEVgHVVLHRMsqOSJz8jjcvnjeu0rqohRLbPzY7KEKu3VLJ6SyUAGWku5hXncsNpk/hMzECMSh1NmjgGufxMqzdOZX3oqI2WC+DMsrr56hnHwDQyM41V3zmFfQebeGdntX2r4ZOaIKs2V3L9aZPat/3Dmk/YXtHACeOyOWFcNmOyh6XsqK7q6NDEMciluZ3tFwNWN4TIzzw68zM7srSNYzAYnT2MpTMLWTrTuu6mrDbIuztrmBEzusDfPtzHmzuq4U3rfo7PzTGjMpk2KpOFJSM4dWp+MkJXg5gmjiFgdHYaNY0tbNofOHqJw+8DtxvT1EQ0FNKZAQeJwhwfhbM6jyBw/amTmFc8nA8+qWX9noPUBlt5q7Sat0qrqWtqbU8c+w42cd/L2yjJT2diXjoT8/yMy/Xp0CjqEJo4hoAzjylgw94Af15XxqIpR+foUURwZmUROXCASF0djnw9Kh2sFkwczoKJ1iRTxhjKA81s2hdg075Ap2FRPiyr48/ryjo91+0Uxg/3MzHPzw8vOpa8DOsA4mCwhXSvS5NKitLEMQQsm13Iz1ZvY+XGCmoaW8j1H50JY9oTx8GDuDVxDAkiwqisYYzKGsbpx3SeGnj66EzuumgGO6saKK1qpLSygb0Hm9hR2UBpVQM/v/SE9m2/9rt1rPu4llHZaYzL9TE2x8fYXB+FOcOYPjqLknwd32wo08QxBIzOHsYpk/N4dWsVv3ljJzeeNeWoNJK3NZBHtZ0jJYzN9XHl/M5DhAdbwuw60EhZbRNp7pgh98NRwlHDnpom9tQ0AdXtj121YDx3LpkBwMZ9dXz3Tx9SkOllZGYaIzPTKMhKIz/Dy/B0L1MLMjrtVw0OmjiGiMvnjuPVrVX836ulvLKlkqeuXUBmH4ep0J5VyudxMX101iFzwDx73UKaWyOU1TaxpzZIWU2QPbVN7KkJclxhRzfgvbVNbN4fYHP38xTxxk2nto/q/INnNvDBnlpy/V6G+z0M93vITfeQPczDhDw/8+053SNRQ11TKxlpLp1hMUk0cQwRZ04byb2fP5b7Xt7GlvJ6Vm6s4OJZhX3ap149rg4nze2kJD/9sNVSJ5aM4LnrF1Je10xFfYiKumbKA81U1YeoaWxheHpHteq2ino27O1+iJvFMwraE8f+uiZOuvefAPg9TjKHucka5iYzzU3mMBc3njWlfRiXt0oPsGlfAJ/Hhd/rtP56nPi8LjLTXEzI0yq1I6GJY4gQEb4wZxwNoQh3/W0Tb5Ue6HviaDvjOKiJQx2ZdK+L4wqzOa4XH8WfXXo8FYEQNY0hDjS0UNNo3eqCrcwo7DjjaW6NkO1zE2hqpbElQmNLpNO0Al87ZWL78subKnj0zd3dvt6EPD+v3Lio/f7xd64kGjWkuZ2kuZ14XQ572cHVC4tZfOwoANZ9XMtf3i8jzWU95m3/a21/8azC9k4D739SSzAUwe0U3C4HHqcDt9OB2ylkDXMz3B4VORI1tEaiuJ2OQXHlvyaOIeZEu/fM26XVGGP6dKGXM1urqlTitDXaf5qS/AzW/+dZRKOGxpYwgeYwdcFWAs2t1DW1MinmDGj+hOE4RAi2hGkMRWgMhWlsCRNsiTAmu+O1olHDwWArAIHmQ4e0v/D4Me3LOyrr+f27n/QYX+wB253Pb2L9nu5HX7h4ViE/WfYZALaW13Pu/W8A4BBwO60k43IKbqeDR5bPae8B94tXtvPCR+W4nILLIbgcVrJxOYXiEf729qX+NCASh4j8GLgAaAFKgS8bYw7aj90CXANEgG8YY/6RtEAHgSkjM8j1e9hf18zu6iDFI458DuKOMw4ddkQNPA6HkJHmJiPN3SkJxDp7egFnTy/41H2JwNYfnkMwFCEUjtLcGqE5HCHUai2PH97xPZo1Poe7lkxv3659+9YooXCk0xnDZwqz8HudtIYNLZEorZEo4Yh1dpGf0XFtVNQYvC4HLZEoUQOhcJRQuGMAS9MxtiV7DzaxaX/3VXq1wZZu1x9tAyJxAC8DtxhjwiJyL3AL8H0RmQZcCkwHRgOrRGSyMSaSxFgHNIdDmD8hlxc+Kuft0uq+JQ5t41ApQkTwupx4XZ/ew6skP6PX0/3+Vy+P/meMyWLrDxcDHdVWLTFJJsfX0RZ0w2mTuGLeeMJRQyRqbROOWjefJzE91AZE4jDGrIy5+w5wsb28BHjSGBMCdonIDmAu8HaCQxxUFkwcwQsflfP6tqpDBseLh/aqUirxnA7B6XD22E15dPYwRvdwhpUoA7Ev29XAi/byGGBPzGNl9jp1GKdOycPlEFZuKmdbRf0R78ehiUMp1Y2EJQ4RWSUiG7q5LYnZ5lYgDDxxBPv/qoisFZG1VVVVRzP0Qacwx8fl88YRNXDvi0c+Z7izfaBDbeNQSnVIWOIwxpxhjJnRze1ZABFZDpwPXGFMe1PQXmBszG4K7XXd7f/XxpjZxpjZeXl5/fifDA7fOH0Sfo+T1VsqueTBt1n3cU3c+2jvVaXdcZVSMQZEVZWInAPcBFxojAnGPPQccKmIeEWkGJgErElGjIPNiHQvdy6Zgd/jZM2uGq5esZbqhvjmJXf4/dYIucEg0aa+TU2rlBo6BkTiAH4BZAAvi8h6EXkAwBizEXgK2AS8BFynPap67/OzCnn31jNYWDKcuqZW7n5hc1zPFxE8hVaf9JZPeu63rpRKLQMicRhjSowxY40xx9u3a2Meu9sYM9EYM8UY8+Lh9qMOle51cfdFx+JxOXj6/b1868kP2FMT/PQn2jxFRQC07NrdPwEqpQadAZE4VP8qGuHnB+cdg9MhPLN+H1eveA8Te0XRYXiKiwFo2b2rP0NUSg0imjhSxJULinjte4sYke5he2UDm/f3rpuup8gaZrtllyYOpZRFE0cKKczxcZY9/MKLG3oY57oLr33GEdq9u7/CUkoNMpo4Usy5M6wRPl/cUN6r7WPbOHpbvaWUGto0caSYeRNyyfa52VHZ0KtrO5wjRuBITycaCBCpif9aEKXU0KOJI8W4nQ4uOG40AJc99C5Prd1z2O1FpOOsQ6urlFJo4khJt5w7lcvmjqUlHOW2ZzZQWd982O3be1ZpA7lSCk0cKcnncfHfS4/jrGkjaQlHe5whrU1bz6rQTk0cSilNHCnt2kXWFJuPv/0xgebWHrfzTpoEQGj79oTEpZQa2DRxpLCZ43KYV5xLfSjMVw4zllXa5MkAhLZtS2R4SqkBShNHirtzyQxGZnpZs7uGpb96i/K6Q9s73GPHImlphCsqdBpZpZQmjlQ3pSCD568/iRljMvm4OshlD71DZaBz8hCns726qlnPOpRKeZo4FPmZaTx+zTyOGZXJrgONXPnwGg52mfTeO9lu59im7RxKpTpNHAqAbJ+Hx6+ZS0l+Olsr6rniN++y92DHHBzt7RxbtyYrRKXUAKGJQ7Ubnu7l8WvmMS7Xx8Z9Ac6//w1e3VoJgHfKFEAbyJVSmjhUFwVZaTx73UIWTcmjNtjK8kff496XtsCEEsDqkmui0SRHqZRKJk0c6hA5fg+PfGkO3zt7Cg6BX71aypLHN7J56jyiwSD1K1cmO0SlVBJp4lDdcjiE604t4amvLWBCnp8dlQ18Z+oyfjT7CtY98FtMOJzsEJVSSSJDcajs2bNnm7Vr1yY7jCGjuTXCr14t5YHXSgmFo4iJcnIOXHnhXE6enIfHpccfSg0FIrLOGDP7U7fTxKF6q6w2yE8fXcWz5YawwwVAZpqLy+eN5+bFU5McnVKqr3qbOPRQUfVaYY6PH193Nk+89XO+vPHvTM52E2gO09waSXZoSqkEciU7ADW4OLxeis47k0se+y1fnZlPw7dvJM3tTHZYSqkE0jMOFbfsZcsACDz/PBN8MDbXl+SIlFKJpIlDxc1bUoJvntU1t+bRFckORymVYJo41BHJ+8YNANSsWEG4tjbJ0SilEkkThzoivlmz8J/8WaLBINW/fijZ4SilEkgThzpied/8JrjdENVeVUqlEu1VpY7YsOnTmfTPV3CNGJHsUJRSCaRnHKpPNGkolXo0cSillIqLJg6llFJx0cShlFIqLpo4lFJKxUUTh1JKqbho4lBKKRUXTRxKKaXiMiQnchKRKuDjPuxiBHDgKIVzNGlc8dG44qNxxWcoxjXeGJP3aRsNycTRVyKytjezYCWaxhUfjSs+Gld8UjkurapSSikVF00cSiml4qKJo3u/TnYAPdC44qNxxUfjik/KxqVtHEoppeKiZxxKKaXiookjhoicIyJbRWSHiNycxDjGisg/RWSTiGwUkW/a6+8Qkb0ist6+nZuE2HaLyEf266+11+WKyMsist3+m5PgmKbElMl6EQmIyLeSUV4i8oiIVIrIhph13ZaPWO63P28fisjMBMf1YxHZYr/2X0Uk215fJCJNMeX2QILj6vF9E5Fb7PLaKiJnJziuP8bEtFtE1tvrE1lePf02JPYzZozRm1Vd5wRKgQmAB/gXMC1JsYwCZtrLGcA2YBpwB/DdJJfTbmBEl3X/A9xsL98M3Jvk97EcGJ+M8gJOBmYCGz6tfIBzgRcBAeYD7yY4rrMAl718b0xcRbHbJaG8un3f7O/AvwAvUGx/X52JiqvL4/8L/GcSyqun34aEfsb0jKPDXGCHMWanMaYFeBJYkoxAjDH7jTHv28v1wGZgTDJi6aUlwGP28mPARUmM5XSg1BjTlwtAj5gx5nWgpsvqnspnCfBbY3kHyBaRUYmKyxiz0hgTtu++AxT2x2vHG9dhLAGeNMaEjDG7gB1Y39uExiUiAlwC/KE/XvtwDvPbkNDPmCaODmOAPTH3yxgAP9YiUgScALxrr7rePuV8JNFVQjYDrBSRdSLyVXvdSGPMfnu5HBiZhLjaXErnL3Syywt6Lp+B9Jm7GuvItE2xiHwgIq+JyGeTEE9379tAKa/PAhXGmO0x6xJeXl1+GxL6GdPEMYCJSDrwF+BbxpgA8CtgInA8sB/rdDnRTjLGzAQWA9eJyMmxDxrr/DgpXfVExANcCPzJXjUQyquTZJZPT0TkViAMPGGv2g+MM8acAHwH+L2IZCYwpAH3vnVxGZ0PThJeXt38NrRLxGdME0eHvcDYmPuF9rqkEBE31gfjCWPM0wDGmApjTMQYEwUeop9O0w/HGLPX/lsJ/NWOoaLt9Nf+W5nouGyLgfeNMRV2jEkvL1tP5ZP0z5yILAfOB66wf3Cwq4Kq7eV1WG0JkxMV02Het4FQXi5gKfDHtnWJLq/ufhtI8GdME0eH94BJIlJsH7leCjyXjEDsOtSHgc3GmPti1sfWTX4O2ND1uf0cl19EMtqWsRpXN2CV05fszb4EPJvIuGJ0OhJMdnnF6Kl8ngOusnu+zAfqYqob+p2InAPcBFxojAnGrM8TEae9PAGYBOxMYFw9vW/PAZeKiFdEiu241iQqLtsZwBZjTFnbikSWV0+/DST6M5aIngCD5YbVA2Eb1hHDrUmM4ySsU80PgfX27Vzgd8BH9vrngFEJjmsCVq+WfwEb28oIGA6sBrYDq4DcJJSZH6gGsmLWJby8sBLXfqAVqz75mp7KB6unyy/tz9tHwOwEx7UDq/677TP2gL3t5+33dz3wPnBBguPq8X0DbrXLayuwOJFx2etXANd22TaR5dXTb0NCP2N65bhSSqm4aFWVUkqpuGjiUEopFRdNHEoppeKiiUMppVRcNHEopZSKiyYOlVJEZIWIrEp2HF2JyKsi8ptkx6FUb2h3XJVSRCQLcBhjau0f6hJjzKIEvv5twFeMMUVd1ucCYdNl+AilBiJXsgNQKpGMMXX9sV8R8RhrVOUjYozp7QixSiWdVlWplNJWVSUid2BdpXyKiBj7ttzeJl1Efi7WZEJBe9TTpTH7KLK3v0JEXhCRRuAue1iHh0SkVKyJfXaKyD0i4rWftxy4Cxgf85p32I91qqoSEbeI/MiOoUWsiXsu7/K/GBH5uoj8TkTqRaRMRG7pss0SO/6giBwUkTUickI/FK1KIXrGoVLVT7DGFCrGGrQOoM4eC+h5rKEavgDswxqf6EkRWWyMWR2zj3uB7wPX2fcFa3C5y4EK4DjgQaxhK27HGhhvKnAFMMd+TkMP8d2DNdT5tVhDvFwMPC4iFV1iuB24DWvyo3OAX4jIGmPMahEpwBop+Db7bxrWMNxhlOoDTRwqJRljGkSkCWgxxpS3rReRRcACrPkN2qq1fm0PEHcD1nhAbR40xjxBZ7fGLO8WkYnA14HbjTFNItIARGJfsysR8QHfAL5tjGkbIv4eEZlj7z82hj8aYx6yl38pItdjJbrVWLPFuYGnjDG77W029/S6SvWWJg6lOpuDNXXwXuvko50HawC5WIeMzCoi/wZ8BWs6UT/WdyzeKuES+/Ve77L+NeCWLuvWd7m/j45JfD4E/gFsEJGXgVeBp40xe1CqDzRxKNWZA6ijoyopVtfG78bYOyKyDGsk0puxfuQDwDLg7qMfZo8xGexEZYyJiMhirP/lDKxRXH8kIsuMMX/rx5jUEKeJQ6WyFsDZZd1aIBtIM8bEO3/HycAHpvMcKkW9eM2udgAhe3+xMZxCnHOKGKu//Rr7do+IvAR8GdDEoY6YJg6VynYBy0RkOlZjdj3wCtZ8Bk+LyE1Y1T05wIlAc0x7Qne2AteIyBKsH/jz6Wh4j33NAhFZgFX1FTQxkygBGGOCInI/Vk+tKjoax5cAZ/b2nxORE4HTgZVYc0tMwmqwf7i3+1CqO9odV6Wyh7FmfnwLqAIus4/QLwSeBn4KbAH+DpyHNRnO4TyINQnRo8AHwDys3k6xnsHq4fR3+zVv6mFft2JNm/ozrCT0ReCLXXpUfZo6rIb+Z7GS1CNY84rfFcc+lDqEXjmulFIqLnrGoZRSKi6aOJRSSsVFE4dSSqm4aOJQSikVF00cSiml4qKJQymlVFw0cSillIqLJg6llFJx0cShlFIqLv8PkBO51CBbIuEAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "#plt.rc('text',usetex=True)nn\n", - "#plt.xscale('log')\n", - "#mpl.rcParams['font.sans-serif']=['SimHei']\n", - "long_end = 200\n", - "x_long = [i for i in range(long_end+1)]\n", - "plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:red')\n", - "plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,linestyle='--',color = 'tab:blue' )\n", - "#plt.plot(x_long,origin_NIDS_error[:long_end+1],linewidth=3)\n", - "\n", - "x = [i for i in range(num_layers+1)]\n", - "plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,color = 'tab:red')\n", - "plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,color = 'tab:blue')\n", - "#plt.plot(x,pred_NIDS_error[:num_layers+1],linewidth=3)\n", - "\n", - "plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') \n", - "plt.xlabel('iterations',fontsize= 'x-large')\n", - "plt.ylabel('NMSE',fontsize= 'x-large')\n", - "\n", - "figure_name = \"D\"+str(n)+\"M\"+str(m)+\"NO\"+str(nnz)\n", - "plt.savefig(\"./error_fig/noise3/\"+figure_name+\".eps\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "50" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "num_layers" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.10" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/convergence_vs_namse.py b/convergence_vs_namse.py new file mode 100644 index 0000000..54f4d55 --- /dev/null +++ b/convergence_vs_namse.py @@ -0,0 +1,197 @@ +import numpy as np +import networkx as nx +import copy +import pandas as pd +import xlwt +import torch +from torch import nn +import torch.optim as optim +from torch_geometric.utils import from_networkx +from torch.utils.data import Dataset, DataLoader +from torch_geometric.data import Data, Batch +from torch_geometric.nn.conv import MessagePassing +from torch_sparse import SparseTensor, matmul +import torch.nn.functional as F +import matplotlib.pyplot as plt + +from data_generator import SynDataset,collate +from model import Net_PGEXTRA,Net_Prox_DGD +from baseline import torch_PGEXTRA,torchProx_DGD,opt_distance,hist_nmse + + +train_num = 1000 +test_num = 100 +num_layers = 50 + + +train_data = SynDataset(train_num) +val_data = SynDataset(test_num) +test_data = SynDataset(test_num) +train_loader = DataLoader(train_data, batch_size=32, shuffle=True, collate_fn=collate) +val_loader = DataLoader(val_data, batch_size=100, shuffle=False, collate_fn=collate) +test_loader = DataLoader(test_data, batch_size=100, shuffle=False, collate_fn=collate) + + + +######################################################### +# Trainning Method +######################################################### + +def step_loss(gamma,x, y): + #gamma = 0.75 + n_steps = x.shape[0] + #print(n_steps) + di = torch.ones((n_steps)) * gamma + power = torch.tensor(range(n_steps, 0, -1)) + gamma_a = di ** power + gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) + + y = torch.unsqueeze(y, axis = 0) + ele_loss = gamma_a * (x - y) **2 + #print(ele_loss.shape) + #print(torch.mean(ele_loss, (1,2,3) )) + loss = torch.mean(ele_loss) + return loss + +######################################################### +# LPGEXTRA +######################################################### +print("LPGEXTRA") +model_PGEXTRA = Net_PGEXTRA(1e-3, num_layers) +optimizer = optim.Adam(model_PGEXTRA.parameters(), lr=1e-4) +model_PGEXTRA.train() +epoch_losses = [] +for epoch in range(500): + epoch_loss = 0 + for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader): + z, _,_ = model_PGEXTRA(W, A, y, pyg_data,num_layers) + loss = step_loss(0.83,z, x_true) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + epoch_loss += loss.detach().item() + epoch_loss /= (iter + 1) + if(epoch % 10 == 0): + print(epoch_loss, model_PGEXTRA.lam[1], model_PGEXTRA.step_size[1]) + +######################################################### +# LProx-DGD Trainning +######################################################### +print("LProx-DGD") +model_Prox_DGD = Net_Prox_DGD(1e-3, num_layers) +optimizer = optim.Adam(model_Prox_DGD.parameters(), lr=1e-4) +model_Prox_DGD.train() +epoch_losses = [] +for epoch in range(500): + epoch_loss = 0 + for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader): + z, _,_ = model_Prox_DGD(W, A, y, pyg_data,num_layers) + loss = step_loss(0.93,z, x_true) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + epoch_loss += loss.detach().item() + epoch_loss /= (iter + 1) + if(epoch % 10 == 0): + print(epoch_loss, model_Prox_DGD.lam[1], model_Prox_DGD.step_size[1]) + + + +######################################################### +# PGEXTRA Trainning +######################################################### +print("PGEXTRA Trainning") +lams = [5e-4,7e-4,1e-3, 2e-3,5e-3,1e-2] +taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5] +best_error = 100 +best_pgextra_par = {} +for lam in lams: + for tau in taus: + for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader): + original,origin_hist = torch_PGEXTRA(W, A, y, 100, lam, tau) + loss2 = opt_distance(original.detach().numpy(), x_true.numpy()) + loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy()) + + print("lamb\ttau\tlayer_loss\t\tfinal_loss") + print(lam,'\t', tau, '\t',loss1,'\t',loss2) + + if loss2 < best_error: + best_pgextra_par['lam'] = lam + best_pgextra_par['tau'] = tau + best_error = loss2 + + + +######################################################### +# Prox-DGD Trainning +######################################################### +print("Prox-DGD Trainning") +lams = [5e-4,7e-4,1e-3, 2e-3,5e-3] +taus = [1e-2, 5e-2,1e-1,5e-1, 1, 5] +best_error = 100 +best_dgd_par = {} +for lam in lams: + for tau in taus: + for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader): + original,origin_hist = torchProx_DGD(W, A, y, 100, lam, tau) + loss2 = opt_distance(original.detach().numpy(), x_true.numpy()) + loss1 = opt_distance(origin_hist[num_layers].detach().numpy(),x_true.numpy()) + + print("lamb\ttau\tlayer_loss\t\tfinal_loss") + print(lam,'\t', tau, '\t',loss1,'\t',loss2) + if loss2 < best_error: + best_dgd_par['lam'] = lam + best_dgd_par['tau'] = tau + best_error = loss2 + +print("Best fpr PGEXTRA:",best_pgextra_par) +print("Best for Prox-DGD:",best_dgd_par) + + + +######################################################### +# Test Part +######################################################### +for iter, (W, A, y, x_true,pyg_data) in enumerate(test_loader): + _,pred_PGEXTRA,pred_PGEXTRA_hist = model_PGEXTRA(W, A, y, pyg_data,num_layers) + _,pred_DGD,pred_DGD_hist = model_Prox_DGD(W, A, y, pyg_data,num_layers) + + original_PGEXTRA,original_PGEXTRA_hist = torch_PGEXTRA(W, A, y, 300,best_pgextra_par['lam'],best_pgextra_par['tau'] ) + original_DGD, original_DGD_hist = torchProx_DGD(W, A, y, 300,best_dgd_par['lam'],best_dgd_par['tau']) + + +origin_PGEXTRA_error = hist_nmse(original_PGEXTRA_hist,x_true) +origin_DGD_error = hist_nmse(original_DGD_hist,x_true) +pred_PGEXTRA_error = hist_nmse(pred_PGEXTRA_hist,x_true) +pred_DGD_error = hist_nmse(pred_DGD_hist,x_true) + +figure_name = "M300"+"NO30" +writer_error=pd.ExcelWriter(figure_name+".xls") +df_error= pd.DataFrame({'PG-EXTRA':origin_PGEXTRA_error,'DGD':origin_DGD_error}) +df_error.to_excel(writer_error,sheet_name='Origin') + +df_feasibility= pd.DataFrame({'PG-EXTRA':pred_PGEXTRA_error,'DGD':pred_DGD_error}) +df_feasibility.to_excel(writer_error,sheet_name='GNN') +writer_error.save() + + +######################################################### +# Plot Part +######################################################### +long_end = 200 +x_long = [i for i in range(long_end+1)] +plt.plot(x_long,origin_DGD_error[:long_end+1],linewidth=2,color = 'tab:red') +plt.plot(x_long,origin_PGEXTRA_error[:long_end+1],linewidth=2,color = 'tab:blue' ) + +x = [i for i in range(num_layers+1)] +plt.plot(x,pred_DGD_error[:num_layers+1],linewidth=2,linestyle='--',color = 'tab:red') +plt.plot(x,pred_PGEXTRA_error[:num_layers+1],linewidth=2,linestyle='--',color = 'tab:blue') +plt.legend(['Prox-DGD','PG-EXTRA','GNN-Prox-DGD','GNN-PG-EXTRA'],loc='upper right',fontsize='large') +plt.xlabel('iterations',fontsize= 'x-large') +plt.ylabel('NMSE',fontsize= 'x-large') + +figure_name = "M300"+"NO30" +plt.savefig(figure_name+".eps") +plt.show() diff --git a/data_generator.py b/data_generator.py new file mode 100644 index 0000000..0923dac --- /dev/null +++ b/data_generator.py @@ -0,0 +1,128 @@ +import numpy as np +import networkx as nx +import torch +from torch import nn +import torch.optim as optim +from torch_geometric.utils import from_networkx +from torch.utils.data import Dataset, DataLoader +from torch_geometric.data import Data, Batch +from torch_geometric.nn.conv import MessagePassing +from torch_sparse import SparseTensor, matmul + +num_nodes = 5 +num_edges = 6 +n = 100 +m = 300 +k = 60 +nnz = 30 + +def metropolis(adjacency_matrix): + num_of_nodes = adjacency_matrix.shape[0] + metropolis=np.zeros((num_of_nodes,num_of_nodes)) + for i in range(num_of_nodes): + for j in range(num_of_nodes): + if adjacency_matrix[i,j]==1: + d_i = np.sum(adjacency_matrix[i,:]) + d_j = np.sum(adjacency_matrix[j,:]) + metropolis[i,j]=1/(1+max(d_i,d_j)) + metropolis[i,i]=1-sum(metropolis[i,:]) + return metropolis + +class SynDataset(Dataset): + def __init__(self, samples): + self.samples = samples + self.A = []; + self.y = []; + self.x_true = [] + self.pyg_data=[] + self.process() + + + def gen_func(self, num_of_nodes, n, m, k): + A_all = np.random.randn(m, n) + x = np.random.randn(n) + x_norm = 0 + + while(x_norm < 1e-2): + x_mask = np.random.rand(n) + x_mask[x_mask < 1 - nnz/100] = 0 + x_mask[x_mask > 0] = 1 + x_norm = np.linalg.norm(x * x_mask) + + x = x * x_mask + x = x/np.linalg.norm(x) + + SNR_db = 30 + SNR = 10**(SNR_db/10) + + noise = np.random.randn(m) * np.sqrt(1/SNR) + y_all = A_all@x + noise + + A = np.zeros((num_of_nodes, k , n)) + y = np.zeros((num_of_nodes, k)) + for ii in range(num_of_nodes): + start = (k*ii) % m; end = (k*(ii+1) )%m + if(start > end): + A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0) + y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), + np.expand_dims(y_all[:end], axis = 0)), axis = 1) + else: + A[ii,:,:] = A_all[start:end,:] + y[ii,:] = np.expand_dims(y_all[start:end], axis = 0) + + x = np.expand_dims(x, axis = 0) + x = x.repeat(num_of_nodes, axis = 0) + + return A, y, x + + def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True): + G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed) + k = 0 + while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False: + G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed) + k += 1 + # print("Check if connected: ", nx.is_connected(G)) + # nx.draw(G) + + edge_index = from_networkx(G).edge_index + adj = nx.to_numpy_matrix(G) + return G, adj,edge_index + + def process(self): + _, adj,edge_index = self.gen_graph(num_nodes, num_edges) + self.edge_index = edge_index + W = metropolis(adj) + self.W = [torch.tensor(W, dtype = torch.float)] * self.samples + + + for ii in range(self.samples): + A, y, x_true = self.gen_func(num_nodes, n, m, k) + self.A.append(torch.tensor(A, dtype = torch.float) ); + self.y.append(torch.tensor(y, dtype = torch.float) ); + self.x_true.append(torch.tensor(x_true, dtype = torch.float) ) + + edge_weight=torch.tensor(W,dtype=torch.float) + self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) + + + + def __getitem__(self, idx): + return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx] + + def __len__(self): + """Number of graphs in the dataset""" + return len(self.A) + + +def collate(samples): + # The input `samples` is a list of pairs + # (graph, label). + W, A, y, x_true, pyg_data = map(list, zip(*samples)) + W = torch.stack(W) + A = torch.stack(A) + y = torch.stack(y) + x_true = torch.stack(x_true) + pyg_data = Batch.from_data_list(pyg_data) + return W, A, y, x_true, pyg_data + + diff --git a/model.py b/model.py new file mode 100644 index 0000000..d2c0024 --- /dev/null +++ b/model.py @@ -0,0 +1,143 @@ +import torch +from torch import nn +import torch.optim as optim +from torch_geometric.utils import from_networkx +from torch.utils.data import Dataset, DataLoader +from torch_geometric.data import Data, Batch +from torch_geometric.nn.conv import MessagePassing +from torch_sparse import SparseTensor, matmul +import torch.nn.functional as F + +class MetropolisConv(MessagePassing): + def __init__(self): + super(MetropolisConv, self).__init__(aggr='add') # "Add" aggregation. + + def forward(self, x, pyg_data): + (B, N, D)=x.shape + out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1) + return out.view(B,N,D) + + def message_and_aggregate(self, adj_t, x): + return matmul(adj_t, x, reduce=self.aggr) + + +class Net_PGEXTRA(torch.nn.Module): + def __init__(self, step_size, num_layers): + super(Net_PGEXTRA, self).__init__() + self.step_size = nn.Parameter(torch.ones(num_layers)*step_size) + self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10) + self.num_layers = num_layers + self.conv=MetropolisConv() + def tgrad_qp(self, A, b, x): + # A: nodes * k * n + # X: nodes * n + # Y: nodes * k + '''grad_A = np.zeros(x.shape) + for i in range(x.shape[0]): + grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i]) + return grad_A''' + x_ = torch.unsqueeze(x, axis = -1) + b_ = torch.unsqueeze(b, axis = -1) + + A_t = A.transpose(2,3) + grad_A = A_t @ (A @ x_ - b_) + #print(A.shape, x.shape, b.shape) + #print(grad_A.shape) + grad_A = torch.squeeze(grad_A, axis = -1) + #print(grad_A.shape) + return grad_A + + def act(self, x, ii): + tau = self.lam[ii] #* self.step_size[ii] + return F.relu(x - tau) - F.relu( - x - tau) + + def forward(self, W, A, b,pyg_data, max_iter): + (batch_size, num_of_nodes, _, dim) = A.shape + init_x = torch.zeros((batch_size, num_of_nodes, dim)) + ret_z = [] + + k = 1 + x_0 = init_x + x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0) + x_1 = self.act(x_12, 0) + + x_hist = [init_x,x_1] + while (k < max_iter): + x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \ + self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0)) + x_2 = self.act(x_32, k) + + ret_z.append(x_2) + + x_0 = x_1 + x_1 = x_2 + x_12 = x_32 + + k = k + 1 + x_hist.append(x_2) + + ret_z = torch.stack(ret_z) + return ret_z, x_2,x_hist + +class Net_Prox_DGD(torch.nn.Module): + def __init__(self, step_size, num_layers): + super(Net_Prox_DGD, self).__init__() + self.step_size = nn.Parameter(torch.ones(num_layers)*step_size) + self.lam = nn.Parameter(torch.ones(num_layers)*step_size*10) + self.num_layers = num_layers + self.conv=MetropolisConv() + def tgrad_qp(self, A, b, x): + # A: nodes * k * n + # X: nodes * n + # Y: nodes * k + '''grad_A = np.zeros(x.shape) + for i in range(x.shape[0]): + grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i]) + return grad_A''' + x_ = torch.unsqueeze(x, axis = -1) + b_ = torch.unsqueeze(b, axis = -1) + + A_t = A.transpose(2,3) + grad_A = A_t @ (A @ x_ - b_) + #print(A.shape, x.shape, b.shape) + #print(grad_A.shape) + grad_A = torch.squeeze(grad_A, axis = -1) + #print(grad_A.shape) + return grad_A + + def act(self, x, ii): + tau = self.lam[ii] #* self.step_size[ii] + return F.relu(x - tau) - F.relu( - x - tau) + + def forward(self, W, A, b,pyg_data, max_iter): + (batch_size, num_of_nodes, _, dim) = A.shape + init_x = torch.zeros((batch_size, num_of_nodes, dim)) + ret_z = [] + + k = 1 + x_0 = init_x + x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0) + x_1 = self.act(x_12, 0) + + x_hist = [init_x,x_1] + while (k < max_iter): + #x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \ + # self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0)) + x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1) + x_2 = self.act(x_32, k) + + ret_z.append(x_2) + + x_0 = x_1 + x_1 = x_2 + x_12 = x_32 + + k = k + 1 + x_hist.append(x_2) + + ret_z = torch.stack(ret_z) + return ret_z, x_2,x_hist + + + + diff --git a/snr_vs_namse.py b/snr_vs_namse.py index 40f224e..5137112 100644 --- a/snr_vs_namse.py +++ b/snr_vs_namse.py @@ -1,271 +1,89 @@ -import numpy as np -import networkx as nx -import copy -import torch -from torch import nn -import torch.optim as optim -from torch_geometric.utils import from_networkx -from torch.utils.data import Dataset, DataLoader -from torch_geometric.data import Data, Batch -from torch_geometric.nn.conv import MessagePassing -from torch_sparse import SparseTensor, matmul -import torch.nn.functional as F -import matplotlib.pyplot as plt - -def metropolis(adjacency_matrix): - num_of_nodes = adjacency_matrix.shape[0] - metropolis=np.zeros((num_of_nodes,num_of_nodes)) - for i in range(num_of_nodes): - for j in range(num_of_nodes): - if adjacency_matrix[i,j]==1: - d_i = np.sum(adjacency_matrix[i,:]) - d_j = np.sum(adjacency_matrix[j,:]) - metropolis[i,j]=1/(1+max(d_i,d_j)) - metropolis[i,i]=1-sum(metropolis[i,:]) - return metropolis - -class SynDataset(Dataset): - def __init__(self, samples): - self.samples = samples - self.A = []; - self.y = []; - self.x_true = [] - self.pyg_data=[] - self.process() - - - def gen_func(self, num_of_nodes, n, m, k): - A_all = np.random.randn(m, n) - x = np.random.randn(n) - x_norm = 0 - - while(x_norm < 1e-2): - x_mask = np.random.rand(n) - x_mask[x_mask < 1 - nnz/100] = 0 - x_mask[x_mask > 0] = 1 - x_norm = np.linalg.norm(x * x_mask) - - x = x * x_mask - x = x/np.linalg.norm(x) - - SNR = 10**(SNR_db/10) - - noise = np.random.randn(m) * np.sqrt(1/SNR) - y_all = A_all@x + noise - - A = np.zeros((num_of_nodes, k , n)) - y = np.zeros((num_of_nodes, k)) - for ii in range(num_of_nodes): - start = (k*ii) % m; end = (k*(ii+1) )%m - if(start > end): - A[ii,:,:] = np.concatenate((A_all[start:,:],A_all[:end,:]), axis = 0) - y[ii,:] = np.concatenate((np.expand_dims(y_all[start:], axis = 0), - np.expand_dims(y_all[:end], axis = 0)), axis = 1) - else: - A[ii,:,:] = A_all[start:end,:] - y[ii,:] = np.expand_dims(y_all[start:end], axis = 0) - - x = np.expand_dims(x, axis = 0) - x = x.repeat(num_of_nodes, axis = 0) - - return A, y, x - - def gen_graph(self, num_of_nodes, num_of_edges, directed=False, add_self_loops=True): - G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed) - k = 0 - while (nx.is_strongly_connected(G) if directed else nx.is_connected(G)) == False: - G = nx.gnm_random_graph(num_of_nodes, num_of_edges, directed=directed) - k += 1 - # print("Check if connected: ", nx.is_connected(G)) - # nx.draw(G) - - edge_index = from_networkx(G).edge_index - adj = nx.to_numpy_matrix(G) - return G, adj,edge_index - - def process(self): - _, adj,edge_index = self.gen_graph(num_nodes, num_edges) - self.edge_index = edge_index - W = metropolis(adj) - self.W = [torch.tensor(W, dtype = torch.float)] * self.samples - - - for ii in range(self.samples): - A, y, x_true = self.gen_func(num_nodes, n, m, k) - self.A.append(torch.tensor(A, dtype = torch.float) ); - self.y.append(torch.tensor(y, dtype = torch.float) ); - self.x_true.append(torch.tensor(x_true, dtype = torch.float) ) - - edge_weight=torch.tensor(W,dtype=torch.float) - self.pyg_data.append(Data(edge_weight=SparseTensor.from_dense(edge_weight))) - - - - def __getitem__(self, idx): - return self.W[idx], self.A[idx], self.y[idx], self.x_true[idx], self.pyg_data[idx] - - def __len__(self): - """Number of graphs in the dataset""" - return len(self.A) - -def collate(samples): - # The input `samples` is a list of pairs - # (graph, label). - W, A, y, x_true, pyg_data = map(list, zip(*samples)) - W = torch.stack(W) - A = torch.stack(A) - y = torch.stack(y) - x_true = torch.stack(x_true) - pyg_data = Batch.from_data_list(pyg_data) - return W, A, y, x_true, pyg_data - - -class MetropolisConv(MessagePassing): - def __init__(self): - super(MetropolisConv, self).__init__(aggr='add') # "Add" aggregation. - - def forward(self, x, pyg_data): - (B, N, D)=x.shape - out = self.propagate(x=x.view(-1,D), edge_index=pyg_data.edge_weight, node_dim=-1) - return out.view(B,N,D) - - def message_and_aggregate(self, adj_t, x): - return matmul(adj_t, x, reduce=self.aggr) - -class Net_PGEXTRA(torch.nn.Module): - def __init__(self, step_size, num_layers): - super(Net_PGEXTRA, self).__init__() - self.step_size = nn.Parameter(torch.ones(num_layers)*step_size) - self.lam = nn.Parameter(torch.ones(num_layers)*step_size*5) - self.num_layers = num_layers - self.conv=MetropolisConv() - def tgrad_qp(self, A, b, x): - # A: nodes * k * n - # X: nodes * n - # Y: nodes * k - '''grad_A = np.zeros(x.shape) - for i in range(x.shape[0]): - grad_A[i] = A[i].T @ (A[i] @ x[i] - b[i]) - return grad_A''' - x_ = torch.unsqueeze(x, axis = -1) - b_ = torch.unsqueeze(b, axis = -1) - - A_t = A.transpose(2,3) - grad_A = A_t @ (A @ x_ - b_) - #print(A.shape, x.shape, b.shape) - #print(grad_A.shape) - grad_A = torch.squeeze(grad_A, axis = -1) - #print(grad_A.shape) - return grad_A - - def act(self, x, ii): - tau = self.lam[ii] #* self.step_size[ii] - return F.relu(x - tau) - F.relu( - x - tau) - - def forward(self, W, A, b,pyg_data, max_iter): - (batch_size, num_of_nodes, _, dim) = A.shape - init_x = torch.zeros((batch_size, num_of_nodes, dim)) - ret_z = [] - - k = 1 - x_0 = init_x - x_12 = self.conv(x_0,pyg_data) - self.step_size[0] * self.tgrad_qp(A, b, x_0) - x_1 = self.act(x_12, 0) - - x_hist = [init_x,x_1] - while (k < max_iter): - x_32 = self.conv(x_1,pyg_data) + x_12 - (self.conv(x_0,pyg_data) + x_0)/2 - \ - self.step_size[k] * (self.tgrad_qp(A, b, x_1)-self.tgrad_qp(A, b, x_0)) - #x_32 = self.conv(x_1,pyg_data) - self.step_size[k] * self.tgrad_qp(A, b, x_1) - x_2 = self.act(x_32, k) - - ret_z.append(x_2) - - x_0 = x_1 - x_1 = x_2 - x_12 = x_32 - - k = k + 1 - x_hist.append(x_2) - - ret_z = torch.stack(ret_z) - return ret_z, x_2,x_hist - - -def step_loss(x, y, g): - gamma = g - n_steps = x.shape[0] - #print(n_steps) - di = torch.ones((n_steps)) * gamma - power = torch.tensor(range(n_steps, 0, -1)) - gamma_a = di ** power - gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) - - y = torch.unsqueeze(y, axis = 0) - ele_loss = gamma_a * (x - y) **2 - #print(ele_loss.shape) - #print(torch.mean(ele_loss, (1,2,3) )) - loss = torch.mean(ele_loss) - return loss - -def opt_distance(opt,x): - error = 0 - batch_size = x.shape[0] - num_of_nodes = x.shape[1] - error = np.linalg.norm(x-opt)**2 - return error/num_of_nodes/batch_size - -def hist_nmse(x_hist,opt): - error = [] - iteration = len(x_hist) - for k in range(iteration): - error.append(10*np.log10(opt_distance(x_hist[k].detach(),opt))) - return error - -num_nodes = 5 -num_edges = 6 -n = 100 -train_num = 1000 -test_num = 100 -num_epoches = 500 - -gammas = [0.9] -m_array = [300] -layer_array = [10, 30, 50] -SNR_db_array = [0, 5, 10, 15, 20, 25, 30] - -for g in gammas: - for m in m_array: - for num_layers in layer_array: - for SNR_db in SNR_db_array: - k = m // 5 - nnz = m //10 - train_data = SynDataset(train_num) - test_data = SynDataset(test_num) - train_loader = DataLoader(train_data, batch_size=64, shuffle=True, collate_fn=collate) - model = Net_PGEXTRA(1e-3, num_layers) - optimizer = optim.Adam(model.parameters(), lr=2e-5) - model.train() - epoch_losses = [] - for epoch in range(num_epoches): - epoch_loss = 0 - for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader): - z, _,_ = model(W, A, y, pyg_data,num_layers) - loss = step_loss(z, x_true, g) - - optimizer.zero_grad() - loss.backward() - optimizer.step() - epoch_loss += loss.detach().item() - epoch_loss /= (iter + 1) - - val_loader = DataLoader(test_data, batch_size=test_num, shuffle=False, collate_fn=collate) - - for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader): - _,pred,pred_hist = model(W, A, y, pyg_data,num_layers) - pred_error = hist_nmse(pred_hist,x_true) - - print('m', m, 'snr', SNR_db ,'layer', num_layers, 'error', pred_error[num_layers]) - - #if(epoch % 10 == 0): - # print(epoch_loss, model.lam[1], model.step_size[1]) +import numpy as np +import networkx as nx +import copy +import pandas as pd +import xlwt +import torch +from torch import nn +import torch.optim as optim +from torch_geometric.utils import from_networkx +from torch.utils.data import Dataset, DataLoader +from torch_geometric.data import Data, Batch +from torch_geometric.nn.conv import MessagePassing +from torch_sparse import SparseTensor, matmul +import torch.nn.functional as F +import matplotlib.pyplot as plt + +from data_generator import SynDataset,collate +from model import Net_PGEXTRA,Net_Prox_DGD +from baseline import torch_PGEXTRA,torchProx_DGD,opt_distance,hist_nmse + +######################################################### +# Trainning Method +######################################################### +def step_loss(x, y, g): + gamma = g + n_steps = x.shape[0] + #print(n_steps) + di = torch.ones((n_steps)) * gamma + power = torch.tensor(range(n_steps, 0, -1)) + gamma_a = di ** power + gamma_a = gamma_a.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1) + + y = torch.unsqueeze(y, axis = 0) + ele_loss = gamma_a * (x - y) **2 + #print(ele_loss.shape) + #print(torch.mean(ele_loss, (1,2,3) )) + loss = torch.mean(ele_loss) + return loss + +######################################################### +# LPGEXTRA +######################################################### +num_nodes = 5 +num_edges = 6 +n = 100 +train_num = 1000 +test_num = 100 +num_epoches = 500 + +gammas = [0.9] +m_array = [300] +layer_array = [10, 30, 50] +SNR_db_array = [0, 5, 10, 15, 20, 25, 30] + +for g in gammas: + for m in m_array: + for num_layers in layer_array: + for SNR_db in SNR_db_array: + k = m // 5 + nnz = m //10 + train_data = SynDataset(train_num) + test_data = SynDataset(test_num) + train_loader = DataLoader(train_data, batch_size=64, shuffle=True, collate_fn=collate) + model = Net_PGEXTRA(1e-3, num_layers) + optimizer = optim.Adam(model.parameters(), lr=2e-5) + model.train() + epoch_losses = [] + for epoch in range(num_epoches): + epoch_loss = 0 + for iter, (W, A, y, x_true,pyg_data) in enumerate(train_loader): + z, _,_ = model(W, A, y, pyg_data,num_layers) + loss = step_loss(z, x_true, g) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + epoch_loss += loss.detach().item() + epoch_loss /= (iter + 1) + + val_loader = DataLoader(test_data, batch_size=test_num, shuffle=False, collate_fn=collate) + + for iter, (W, A, y, x_true,pyg_data) in enumerate(val_loader): + _,pred,pred_hist = model(W, A, y, pyg_data,num_layers) + pred_error = hist_nmse(pred_hist,x_true) + + print('m', m, 'snr', SNR_db ,'layer', num_layers, 'error', pred_error[num_layers]) + + #if(epoch % 10 == 0): + # print(epoch_loss, model.lam[1], model.step_size[1])