Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • sacs/decentralizepy
  • mvujas/decentralizepy
  • randl/decentralizepy
3 results
Show changes
Commits on Source (39)
Showing
with 1272 additions and 114 deletions
......@@ -8,4 +8,6 @@
**/leaf/
**.egg-info
2021**
2022**
\ No newline at end of file
2022**
**/massif.out*
*swp
......@@ -2,6 +2,9 @@
decentralizepy
==============
decentralizepy is a framework for running distributed applications (particularly ML) on top of arbitrary topologies (decentralized, federated, parameter server).
It was primarily conceived for assessing scientific ideas on several aspects of distributed learning (communication efficiency, privacy, data heterogeneity etc.).
-------------------------
Setting up decentralizepy
-------------------------
......@@ -23,10 +26,14 @@ Setting up decentralizepy
pip install --upgrade pip
* On Mac M1, installing ``pyzmq`` fails with `pip`. Use `conda <https://conda.io>`_.
* Install decentralizepy for development. ::
* Install decentralizepy for development. (zsh) ::
pip3 install --editable .\[dev\]
* Install decentralizepy for development. (bash) ::
pip3 install --editable .[dev]
----------------
Running the code
----------------
......
6
0 1
0 2
0 3
0 4
0 5
1 0
2 0
3 0
4 0
5 0
\ No newline at end of file
%% Cell type:code id: tags:
```
``` python
from datasets.Femnist import Femnist
from graphs import SmallWorld
from collections import defaultdict
import os
import json
import numpy as np
```
%% Cell type:code id: tags:
```
``` python
a = FEMNIST
a
```
%% Cell type:code id: tags:
```
``` python
b = SmallWorld(6, 2, 2, 1)
```
%% Cell type:code id: tags:
```
``` python
b.adj_list
```
%% Cell type:code id: tags:
```
``` python
for i in range(12):
print(b.neighbors(i))
```
%% Cell type:code id: tags:
```
``` python
clients = []
```
%% Cell type:code id: tags:
```
``` python
num_samples = []
data = defaultdict(lambda : None)
```
%% Cell type:code id: tags:
```
``` python
datadir = "./leaf/data/femnist/data/train"
files = os.listdir(datadir)
total_users=0
users = set()
```
%% Cell type:code id: tags:
```
``` python
files = os.listdir(datadir)[0:1]
```
%% Cell type:code id: tags:
```
``` python
for f in files:
file_path = os.path.join(datadir, f)
print(file_path)
with open(file_path, 'r') as inf:
client_data = json.load(inf)
current_users = len(client_data['users'])
print("Current_Users: ", current_users)
total_users += current_users
users.update(client_data['users'])
print("total_users: ", total_users)
print("total_users: ", len(users))
print(client_data['user_data'].keys())
print(np.array(client_data['user_data']['f3408_47']['x']).shape)
print(np.array(client_data['user_data']['f3408_47']['y']).shape)
print(np.array(client_data['user_data']['f3327_11']['x']).shape)
print(np.array(client_data['user_data']['f3327_11']['y']).shape)
print(np.unique(np.array(client_data['user_data']['f3327_11']['y'])))
```
%% Cell type:code id: tags:
```
``` python
file = 'run.py'
with open(file, 'r') as inf:
print(inf.readline().strip())
print(inf.readlines())
```
%% Cell type:code id: tags:
```
``` python
def f(l):
l[2] = 'c'
a = ['a', 'a', 'a']
print(a)
f(a)
print(a)
```
%% Cell type:code id: tags:
```
``` python
l = ['a', 'b', 'c']
print(l[:-1])
```
%% Cell type:code id: tags:
```
``` python
from localconfig import LocalConfig
def read_ini(file_path):
config = LocalConfig(file_path)
for section in config:
print("Section: ", section)
for key, value in config.items(section):
print((key, value))
print(dict(config.items('DATASET')))
return config
config = read_ini("config.ini")
for section in config:
print(section)
#d = dict(config.sections())
```
%% Cell type:code id: tags:
```
``` python
def func(a = 1, b = 2, c = 3):
print(a + b + c)
l = [3, 5, 7]
func(*l)
```
%% Cell type:code id: tags:
```
``` python
from torch import multiprocessing as mp
mp.spawn(fn = func, nprocs = 2, args = [], kwargs = {'a': 4, 'b': 5, 'c': 6})
```
%% Cell type:code id: tags:
```
``` python
l = '[0.4, 0.2, 0.3, 0.1]'
type(eval(l))
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.datasets.Femnist import Femnist
f1 = Femnist(0, 1, 'leaf/data/femnist/data/train')
ts = f1.get_trainset(1)
for data, target in ts:
print(data)
break
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.datasets.Femnist import Femnist
from decentralizepy.graphs.SmallWorld import SmallWorld
from decentralizepy.mappings.Linear import Linear
f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])
g = SmallWorld(4, 1, 0.5)
l = Linear(2, 2)
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.node.Node import Node
from torch import multiprocessing as mp
import logging
n1 = Node(0, l, g, f, "./results", logging.DEBUG)
n2 = Node(1, l, g, f, "./results", logging.DEBUG)
# mp.spawn(fn = Node, nprocs = 2, args=[l,g,f])
```
%% Cell type:code id: tags:
```
``` python
from testing import f
```
%% Cell type:code id: tags:
```
``` python
from torch import multiprocessing as mp
import torch
from decentralizepy.datasets.Celeba import CNN
import numpy as np
m1 = CNN()
o1 = torch.optim.SGD(m1.parameters(), 0.6)
print(m1)
#mp.spawn(fn = f, nprocs = 2, args=[m1, o1])
```
%% Output
CNN(
(conv1): Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=same)
(pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(conv2): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=same)
(conv3): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=same)
(conv4): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=same)
(fc1): Linear(in_features=800, out_features=2, bias=True)
)
%% Cell type:code id: tags:
```
``` python
m1.parameters()
```
%% Output
<generator object Module.parameters at 0x111ec1ba0>
%% Cell type:code id: tags:
```
``` python
m1.state_dict()
```
%% Output
OrderedDict([('conv1.weight',
tensor([[[[-0.1261, 0.1833, -0.1406],
[ 0.1324, -0.0685, 0.0938],
[ 0.0432, 0.1814, -0.0541]],
[[-0.1776, -0.1839, -0.0111],
[ 0.0888, 0.0888, -0.1344],
[-0.1838, 0.1737, 0.1584]],
[[ 0.0417, 0.1064, -0.0156],
[ 0.0667, 0.0856, -0.1746],
[ 0.0412, 0.1620, 0.0125]]],
[[[-0.0530, -0.1273, -0.0797],
[ 0.0422, 0.1135, 0.0475],
[-0.0244, 0.1691, -0.1383]],
[[ 0.0822, -0.1317, -0.1692],
[ 0.1373, 0.1388, 0.0103],
[-0.0481, 0.1105, 0.0631]],
[[-0.0352, 0.1259, -0.0530],
[-0.1394, -0.0281, 0.1844],
[ 0.0082, 0.1187, 0.0211]]],
[[[ 0.0987, 0.0788, -0.1126],
[ 0.1769, 0.0763, -0.1767],
[-0.0570, 0.1156, 0.1770]],
[[ 0.0643, -0.0024, -0.0625],
[ 0.0819, 0.0140, -0.1882],
[ 0.1325, -0.0632, -0.0202]],
[[ 0.0053, 0.1042, -0.0058],
[-0.1082, -0.1753, 0.1762],
[-0.0501, 0.1166, 0.0561]]],
[[[ 0.0358, -0.0685, -0.1278],
[ 0.0029, -0.1107, 0.1169],
[-0.1408, 0.1293, 0.1142]],
[[-0.0814, 0.0470, 0.0188],
[ 0.1538, 0.0137, 0.1128],
[-0.1597, 0.1432, 0.1370]],
[[ 0.1425, 0.1769, -0.0037],
[-0.1080, -0.0805, -0.0195],
[-0.1335, -0.1666, 0.1399]]],
[[[ 0.1117, 0.1918, -0.1666],
[-0.1392, 0.0086, 0.0172],
[-0.0721, -0.1711, 0.0344]],
[[ 0.1820, -0.0537, -0.0974],
[ 0.0366, -0.0710, 0.1273],
[ 0.1132, -0.1594, 0.0878]],
[[-0.0874, -0.0401, 0.1827],
[-0.0301, 0.1205, -0.0396],
[-0.1143, -0.1007, 0.1561]]],
[[[ 0.1522, -0.0012, -0.1785],
[-0.1833, -0.1828, -0.1643],
[-0.1765, -0.1757, -0.0608]],
[[-0.0684, 0.0521, 0.1137],
[-0.0028, 0.0616, 0.0758],
[-0.1736, 0.0667, 0.1229]],
[[ 0.1298, -0.1848, -0.1570],
[-0.1052, -0.1172, -0.1223],
[-0.1389, -0.0095, -0.0410]]],
[[[ 0.0213, -0.0975, 0.0964],
[ 0.0535, -0.0775, 0.0790],
[-0.1796, -0.1468, 0.1036]],
[[-0.0403, 0.0646, -0.0932],
[ 0.1779, -0.1616, 0.0644],
[-0.0508, -0.1158, -0.0592]],
[[-0.1644, -0.1327, 0.0817],
[ 0.0320, -0.0213, -0.0946],
[-0.1106, 0.1463, -0.1642]]],
[[[-0.0985, -0.1160, -0.0915],
[ 0.1857, 0.0806, 0.1761],
[-0.0817, 0.1095, 0.0896]],
[[-0.0660, -0.1680, 0.1833],
[ 0.0611, 0.0077, -0.0848],
[-0.1516, 0.1737, 0.0484]],
[[ 0.1434, -0.0732, -0.0904],
[ 0.0962, 0.1783, 0.0192],
[ 0.0915, 0.0006, 0.0334]]],
[[[-0.0047, 0.1807, -0.1798],
[-0.0164, 0.1119, -0.0805],
[ 0.1855, -0.0681, -0.0187]],
[[-0.0069, 0.0491, -0.1868],
[-0.1609, -0.0316, 0.0150],
[-0.1605, 0.1506, -0.0074]],
[[ 0.0851, -0.1732, -0.1777],
[ 0.0539, -0.0500, -0.1231],
[ 0.1654, 0.0342, -0.1904]]],
[[[ 0.0476, 0.0284, 0.1212],
[-0.1603, -0.1924, 0.0144],
[ 0.0076, -0.0928, -0.1645]],
[[ 0.0215, 0.1845, -0.1034],
[ 0.1574, -0.1577, -0.0438],
[-0.1360, -0.0601, -0.1693]],
[[-0.0720, 0.0619, 0.1405],
[ 0.0699, -0.1288, 0.0041],
[-0.0381, -0.1697, -0.1568]]],
[[[-0.1599, 0.1231, -0.1034],
[-0.0314, 0.0105, -0.1449],
[-0.0172, -0.0781, 0.0839]],
[[-0.0676, 0.1185, -0.1559],
[-0.1053, -0.1306, 0.1820],
[ 0.1584, -0.1370, 0.1828]],
[[ 0.0658, 0.1412, -0.0537],
[-0.1230, -0.1411, -0.0011],
[-0.1318, -0.0458, 0.1838]]],
[[[-0.0268, 0.1747, -0.1037],
[ 0.0515, -0.0228, -0.1024],
[-0.1543, -0.0643, -0.0100]],
[[-0.1572, -0.1530, 0.0026],
[ 0.1463, -0.1233, 0.0470],
[-0.1595, -0.1108, -0.0654]],
[[-0.0521, -0.0094, 0.1544],
[-0.0505, -0.0332, 0.0048],
[ 0.0735, 0.1350, 0.0690]]],
[[[ 0.0025, 0.0724, 0.0930],
[-0.1885, 0.0475, 0.1100],
[-0.1622, 0.0087, -0.0030]],
[[ 0.1032, -0.1425, -0.0620],
[ 0.1515, -0.0736, -0.1888],
[-0.1246, 0.1424, -0.0491]],
[[ 0.1759, -0.1616, 0.1198],
[-0.1103, 0.1032, 0.1727],
[-0.0601, 0.1635, 0.0034]]],
[[[ 0.0301, 0.1517, 0.0657],
[-0.1368, -0.1165, 0.1193],
[-0.0962, 0.1451, 0.1099]],
[[ 0.1646, -0.1860, -0.1187],
[-0.1367, -0.0911, 0.1337],
[-0.0926, -0.0524, -0.0672]],
[[-0.1509, -0.1231, -0.0855],
[ 0.1808, -0.0713, 0.0410],
[-0.0621, -0.0506, 0.1871]]],
[[[ 0.0888, -0.0874, -0.0826],
[ 0.0416, -0.0961, 0.0603],
[ 0.1455, 0.0050, 0.0318]],
[[-0.1633, 0.0070, -0.1537],
[-0.0109, 0.1602, -0.0463],
[-0.0423, -0.0147, -0.1045]],
[[ 0.1640, -0.0997, -0.1662],
[-0.1074, 0.1549, -0.1905],
[-0.1708, 0.1624, 0.0219]]],
[[[ 0.0824, -0.1376, 0.1086],
[ 0.0836, 0.0135, 0.0351],
[-0.1518, 0.0784, -0.1708]],
[[-0.1636, -0.1571, 0.1032],
[-0.1152, 0.0274, -0.1022],
[-0.0956, -0.1606, -0.1615]],
[[ 0.1307, 0.0419, 0.1924],
[-0.0599, -0.1296, -0.0448],
[ 0.0363, 0.0377, -0.0460]]],
[[[-0.1685, -0.1277, -0.0465],
[ 0.0922, -0.1011, 0.0742],
[ 0.0053, -0.1456, 0.0135]],
[[ 0.1341, 0.0131, 0.1281],
[-0.1020, 0.1069, -0.0631],
[-0.0439, -0.1189, -0.1822]],
[[ 0.1624, -0.1253, 0.0302],
[ 0.0709, 0.0767, 0.1453],
[ 0.0203, 0.1603, -0.1720]]],
[[[-0.1550, 0.1513, -0.1003],
[ 0.0370, 0.0367, -0.0233],
[ 0.0916, -0.0871, 0.1579]],
[[-0.1900, 0.0314, 0.0865],
[-0.0197, 0.0296, -0.0048],
[ 0.0846, 0.1543, -0.0770]],
[[-0.0016, -0.0978, 0.1826],
[-0.0477, 0.0689, 0.1079],
[ 0.0400, 0.0880, 0.1674]]],
[[[ 0.0145, -0.0447, -0.1742],
[ 0.0394, 0.0127, -0.1172],
[ 0.1330, -0.1207, 0.0326]],
[[-0.0155, -0.1602, 0.0023],
[ 0.0789, 0.1648, 0.1781],
[-0.1468, -0.0481, -0.1260]],
[[-0.0139, 0.0848, -0.0536],
[-0.1581, 0.1130, 0.0717],
[ 0.0275, -0.0006, -0.0049]]],
[[[-0.0199, 0.0032, -0.1246],
[ 0.0479, 0.1418, -0.1295],
[-0.1646, -0.1139, -0.1018]],
[[ 0.1475, 0.1413, -0.0354],
[ 0.0612, -0.1652, 0.0801],
[-0.1306, -0.0165, 0.1733]],
[[ 0.1527, 0.0911, -0.1906],
[-0.1152, 0.1737, 0.0436],
[-0.0213, -0.0314, -0.0319]]],
[[[-0.0003, -0.0546, -0.1255],
[ 0.0914, -0.1414, 0.0542],
[ 0.1139, 0.0132, 0.0815]],
[[-0.0042, 0.0541, 0.1456],
[ 0.0509, -0.0790, 0.0272],
[ 0.1419, 0.0992, -0.1448]],
[[ 0.0496, 0.0013, 0.0838],
[-0.0662, 0.0315, -0.1168],
[-0.0069, -0.1503, 0.0729]]],
[[[ 0.1866, 0.1329, -0.0560],
[ 0.0026, 0.1533, 0.0326],
[-0.1161, -0.0323, 0.0053]],
[[-0.0243, -0.1823, -0.1657],
[-0.0107, -0.0832, 0.0029],
[ 0.0981, 0.1241, -0.1788]],
[[-0.0400, -0.0577, -0.0757],
[-0.0584, 0.0176, -0.1019],
[-0.1828, 0.1589, -0.0312]]],
[[[-0.1083, -0.1236, -0.0904],
[-0.1575, 0.0157, 0.0552],
[-0.0839, 0.1704, -0.1457]],
[[-0.1648, -0.0270, -0.0489],
[-0.1122, -0.0288, -0.0073],
[-0.1443, -0.1712, 0.0100]],
[[-0.1142, -0.1552, 0.1568],
[ 0.0743, -0.1108, -0.0643],
[-0.0394, -0.1345, 0.0992]]],
[[[-0.1591, 0.0942, -0.1035],
[-0.0781, 0.0725, -0.0888],
[ 0.0959, 0.0213, 0.1222]],
[[ 0.1202, -0.0217, -0.0955],
[-0.1748, -0.1133, -0.0704],
[-0.0670, -0.1401, 0.1553]],
[[ 0.0053, -0.0871, -0.0239],
[ 0.0961, -0.0547, 0.1741],
[-0.0570, 0.0477, 0.1853]]],
[[[-0.1115, -0.0183, -0.1302],
[ 0.1435, -0.0238, -0.0048],
[ 0.1862, -0.1837, 0.1711]],
[[ 0.1375, -0.1798, 0.0818],
[-0.0792, 0.0820, 0.1373],
[ 0.1849, 0.0672, -0.1822]],
[[ 0.1868, -0.0356, 0.0726],
[-0.1523, -0.1130, 0.1506],
[-0.1046, 0.0178, 0.0990]]],
[[[ 0.1321, -0.1641, 0.0411],
[ 0.0526, 0.0393, 0.0918],
[-0.1345, 0.0750, 0.0859]],
[[-0.0985, 0.1466, 0.1349],
[-0.1461, -0.1742, 0.0941],
[-0.1502, -0.1813, 0.0864]],
[[-0.1039, 0.1179, 0.1499],
[-0.0366, -0.0120, 0.0951],
[ 0.0087, 0.1212, -0.0183]]],
[[[-0.1375, 0.0765, -0.0072],
[-0.0041, 0.0379, -0.0243],
[-0.1495, 0.1601, 0.1575]],
[[-0.0454, 0.1642, 0.0720],
[-0.0533, 0.0150, 0.0039],
[ 0.0194, 0.0113, -0.1194]],
[[ 0.0527, -0.0886, 0.0359],
[ 0.1595, 0.0526, -0.0048],
[-0.1790, -0.0458, -0.0324]]],
[[[-0.1638, 0.0942, 0.0686],
[-0.1082, -0.0675, 0.1892],
[-0.1347, -0.1247, 0.0739]],
[[ 0.0595, 0.1504, -0.1657],
[ 0.0733, 0.0529, -0.1599],
[ 0.0171, -0.1127, -0.0259]],
[[-0.0092, 0.0193, 0.1176],
[-0.1183, 0.0101, 0.1011],
[ 0.0648, -0.1897, 0.0782]]],
[[[ 0.0098, -0.1161, -0.0802],
[-0.1821, 0.0221, -0.1754],
[-0.1218, 0.0525, -0.0480]],
[[ 0.0770, 0.0477, 0.1514],
[ 0.0374, -0.1075, -0.1026],
[-0.0581, -0.1011, 0.1241]],
[[-0.0567, -0.0163, 0.0374],
[-0.1739, -0.0579, 0.0704],
[ 0.1817, 0.1561, 0.1677]]],
[[[-0.0569, -0.0763, 0.0044],
[-0.1133, 0.0813, 0.1477],
[ 0.0836, 0.0483, -0.1800]],
[[ 0.1343, -0.1590, 0.1177],
[ 0.1071, -0.1647, -0.0646],
[ 0.1578, -0.1261, 0.0243]],
[[-0.0424, -0.0241, -0.0988],
[ 0.0023, 0.0029, -0.0291],
[ 0.0415, -0.0557, 0.1427]]],
[[[-0.1028, 0.1054, 0.1658],
[-0.0357, 0.1579, 0.1237],
[ 0.0368, 0.0532, -0.1043]],
[[-0.0369, 0.0575, -0.1023],
[ 0.0635, 0.1015, 0.1112],
[-0.1235, 0.0467, 0.0908]],
[[ 0.1380, 0.0633, 0.1087],
[-0.1360, 0.0422, -0.1524],
[ 0.0819, 0.0918, -0.1624]]],
[[[ 0.1584, -0.0218, -0.0236],
[ 0.1878, -0.1289, 0.1343],
[ 0.0351, 0.1225, -0.1460]],
[[ 0.0690, -0.1439, 0.0056],
[ 0.0272, -0.0058, 0.0125],
[ 0.0868, -0.0684, -0.0884]],
[[ 0.1045, 0.0583, -0.0870],
[ 0.0600, -0.0732, -0.1695],
[ 0.0953, 0.0246, 0.1245]]]])),
('conv1.bias',
tensor([-1.8698e-01, -7.9379e-06, -1.9277e-02, 5.2182e-02, 7.5716e-02,
-3.3830e-03, -9.6565e-02, 1.0241e-01, -8.2457e-02, -1.6224e-01,
1.2980e-01, -8.2256e-02, -7.4655e-02, -3.7980e-02, 8.3407e-02,
-1.4880e-01, 4.8939e-02, 2.7506e-02, 5.8676e-03, -1.5813e-01,
-6.2464e-04, 1.0359e-02, -1.5525e-01, 7.9100e-02, 1.6850e-02,
-1.3809e-01, -6.3393e-02, -5.3843e-02, -1.5219e-02, -1.7365e-01,
1.7249e-01, -1.1165e-01])),
('conv2.weight',
tensor([[[[ 4.3750e-02, 4.5533e-02, -2.9410e-02],
[-4.1395e-02, 5.0397e-04, -1.3265e-02],
[-4.9851e-02, -1.0518e-02, 5.7710e-02]],
[[-5.6332e-02, -4.7168e-03, -4.4627e-02],
[ 5.3513e-03, -4.0824e-02, 1.8281e-02],
[ 5.0677e-02, -1.5295e-02, -6.1751e-03]],
[[-2.4984e-02, 1.2784e-02, -4.7123e-02],
[-4.3238e-02, 4.7349e-02, -1.5219e-02],
[-3.6073e-02, 4.1506e-02, -3.5337e-02]],
...,
[[ 5.6048e-02, 1.9129e-03, -1.7200e-02],
[ 5.8869e-02, -5.1520e-02, -5.3205e-02],
[-1.3903e-02, 5.1790e-02, 2.2585e-02]],
[[ 1.1835e-02, -4.9313e-02, -3.1838e-02],
[ 7.6813e-03, 4.2715e-02, -5.7404e-02],
[-4.1474e-02, -2.3128e-02, -4.7935e-02]],
[[-2.1860e-02, -2.1817e-02, -3.2578e-02],
[ 3.1317e-02, 3.3435e-02, 3.1837e-02],
[-2.2399e-03, 3.1600e-02, 4.0183e-02]]],
[[[ 1.9610e-04, 5.3780e-02, -4.5810e-02],
[ 4.0340e-02, 1.4904e-02, -1.5597e-02],
[-4.6080e-02, 5.0714e-02, -5.7445e-03]],
[[-3.5281e-02, 3.3011e-02, 4.3343e-02],
[-4.6263e-02, -5.6184e-02, 5.1245e-03],
[ 3.6015e-02, -3.3152e-02, 4.6629e-03]],
[[ 1.7650e-03, -4.2336e-02, 4.3744e-02],
[ 2.1655e-02, 5.3759e-02, 1.3719e-03],
[ 4.2005e-02, 5.3998e-02, 1.9009e-02]],
...,
[[ 8.9786e-03, -1.8645e-02, -1.3587e-02],
[-5.4167e-02, 1.2335e-02, -3.0384e-02],
[-4.8722e-03, -3.7296e-02, -2.6446e-02]],
[[ 1.7580e-02, 3.8462e-02, -5.0269e-02],
[ 2.6601e-03, -1.1462e-02, 4.7459e-02],
[-2.8888e-02, 3.4436e-02, -4.9943e-02]],
[[-5.0206e-02, -5.6025e-02, -3.6346e-02],
[-2.4407e-02, 5.3721e-02, -5.4920e-02],
[ 5.1835e-02, -3.2396e-02, 3.2373e-02]]],
[[[-2.7759e-02, -5.4737e-02, -1.1689e-02],
[ 3.9462e-02, 2.8649e-02, 5.1776e-02],
[ 2.4253e-02, -2.8318e-02, 2.7402e-02]],
[[ 1.3045e-02, -1.0456e-02, 2.0426e-02],
[ 2.1949e-02, 4.6817e-02, -5.6093e-02],
[ 2.7145e-02, -5.5441e-02, -2.0719e-02]],
[[ 4.4704e-02, -2.4099e-02, -4.7185e-02],
[-4.3257e-02, -3.3058e-02, -8.6451e-03],
[-3.7283e-02, -3.4569e-02, -7.1049e-03]],
...,
[[-4.1559e-02, -2.9240e-02, 2.7197e-03],
[ 2.0770e-02, 5.4479e-02, -4.4845e-02],
[-1.1641e-02, -2.9814e-02, -2.4419e-02]],
[[-1.5743e-02, 1.0854e-02, 3.0878e-02],
[ 2.2739e-02, 3.2999e-02, -1.1902e-02],
[-3.4837e-02, 1.5305e-02, -8.7552e-03]],
[[-2.2882e-02, 9.4639e-03, 5.1878e-03],
[-2.6344e-02, 2.9063e-02, -1.9337e-02],
[-3.4314e-02, 1.5313e-02, 4.1524e-02]]],
...,
[[[ 9.7454e-03, -3.2903e-03, 1.0696e-02],
[-4.0918e-02, 1.6352e-02, 1.4646e-02],
[ 1.2516e-02, -2.1804e-02, -2.5489e-02]],
[[-1.6083e-02, 2.5374e-02, 3.1458e-02],
[-3.1497e-02, -1.9513e-02, -2.1223e-02],
[ 6.6286e-03, 1.6538e-02, -4.8944e-02]],
[[ 2.4808e-02, -2.9520e-02, -4.8227e-02],
[ 1.7325e-03, -4.7443e-02, 2.3087e-03],
[-1.0008e-02, -2.0313e-02, 2.9944e-02]],
...,
[[ 5.5781e-02, -2.0898e-02, -3.1487e-03],
[-1.6931e-02, 4.5279e-04, -1.5024e-02],
[-5.5885e-02, 2.7140e-02, -8.5434e-03]],
[[ 1.3970e-02, -3.3131e-02, 4.3112e-02],
[-3.4956e-02, -5.0144e-02, -1.6391e-02],
[-9.1003e-03, -2.0204e-02, -1.0226e-03]],
[[-4.0053e-02, -5.0194e-02, 5.0405e-02],
[ 5.4107e-02, 4.2185e-02, 3.4359e-02],
[ 1.6749e-02, -1.4102e-02, 5.0171e-02]]],
[[[ 2.8229e-02, 5.6586e-02, -3.9617e-03],
[ 2.9538e-02, -1.2507e-02, -2.5516e-02],
[-1.5193e-02, -2.9232e-02, -2.0701e-02]],
[[-5.8773e-02, 3.3015e-02, -9.4146e-03],
[ 2.8957e-02, 5.8666e-02, 2.8679e-02],
[ 1.5249e-02, -1.2246e-03, 1.2230e-03]],
[[ 2.6050e-02, -4.6042e-02, -3.4895e-03],
[ 4.9529e-02, 6.6835e-03, -4.1808e-02],
[-8.6450e-03, -4.8510e-02, -2.4011e-02]],
...,
[[-5.1427e-02, 2.4528e-02, -5.4878e-02],
[-1.8610e-02, 5.4365e-02, 3.5053e-03],
[-3.9922e-02, 4.2510e-02, -5.7261e-02]],
[[ 4.1938e-02, -4.2039e-02, -1.2487e-02],
[-1.4090e-02, -3.7895e-02, 1.4394e-02],
[ 2.2555e-02, -2.7264e-02, 5.6102e-02]],
[[ 1.5770e-02, 5.4672e-02, -2.4056e-02],
[ 5.2089e-02, -2.8859e-02, -2.6499e-03],
[-5.2122e-02, -3.7436e-02, 3.9897e-02]]],
[[[ 2.7888e-02, 2.9241e-02, -1.9488e-02],
[ 2.8928e-02, 5.3312e-02, -2.9810e-02],
[-8.5104e-03, 5.7751e-02, -8.1857e-03]],
[[ 4.2649e-02, 3.3158e-03, 4.2879e-02],
[ 7.7893e-03, -3.2879e-02, 2.7630e-02],
[ 5.4706e-03, 4.8019e-02, 1.2420e-02]],
[[-4.2004e-02, -4.2790e-02, 2.4634e-02],
[-5.4641e-02, 3.4600e-02, 2.9071e-03],
[ 2.6470e-02, 4.6701e-02, 3.7158e-02]],
...,
[[-1.7641e-02, -2.1205e-02, -5.1504e-02],
[-7.4737e-03, 5.5061e-02, -2.6397e-03],
[-4.4653e-02, -3.6719e-02, 3.4420e-06]],
[[ 1.6525e-02, 1.7280e-02, 5.4554e-03],
[ 4.0098e-02, 2.7571e-02, -4.4965e-02],
[ 6.1493e-03, -5.7754e-02, 1.0513e-02]],
[[-5.7615e-02, 3.2921e-02, -1.5900e-02],
[ 2.0081e-02, 5.4590e-02, 1.1296e-02],
[-4.5015e-02, 1.1341e-03, 2.6447e-02]]]])),
('conv2.bias',
tensor([-0.0538, -0.0320, -0.0153, 0.0558, 0.0254, 0.0281, -0.0148, 0.0060,
-0.0283, -0.0062, 0.0437, -0.0064, 0.0341, 0.0233, -0.0201, 0.0391,
0.0243, 0.0071, 0.0125, -0.0138, -0.0377, -0.0169, -0.0475, -0.0004,
-0.0105, -0.0502, 0.0241, 0.0090, 0.0069, -0.0315, -0.0192, 0.0204])),
('conv3.weight',
tensor([[[[-0.0215, -0.0208, -0.0272],
[-0.0493, -0.0117, -0.0285],
[ 0.0515, -0.0041, 0.0126]],
[[ 0.0299, -0.0301, 0.0552],
[ 0.0450, 0.0449, -0.0583],
[-0.0452, -0.0480, -0.0275]],
[[-0.0262, -0.0338, 0.0505],
[ 0.0146, -0.0364, -0.0044],
[-0.0102, -0.0051, 0.0017]],
...,
[[-0.0367, -0.0468, -0.0586],
[ 0.0126, 0.0037, 0.0191],
[-0.0153, 0.0048, -0.0160]],
[[-0.0050, 0.0364, 0.0582],
[ 0.0093, -0.0268, -0.0355],
[-0.0125, 0.0500, 0.0009]],
[[ 0.0237, -0.0211, -0.0130],
[-0.0489, 0.0118, 0.0387],
[-0.0006, 0.0301, 0.0283]]],
[[[-0.0391, -0.0464, -0.0158],
[ 0.0201, -0.0054, 0.0422],
[ 0.0085, -0.0474, -0.0251]],
[[-0.0346, 0.0536, -0.0391],
[ 0.0244, -0.0263, -0.0073],
[ 0.0076, 0.0160, 0.0044]],
[[-0.0128, 0.0146, -0.0381],
[-0.0277, -0.0142, 0.0226],
[ 0.0190, 0.0326, -0.0219]],
...,
[[-0.0217, 0.0129, 0.0558],
[ 0.0164, -0.0292, -0.0467],
[-0.0296, 0.0205, -0.0300]],
[[ 0.0254, -0.0151, -0.0583],
[ 0.0111, -0.0469, -0.0300],
[-0.0462, 0.0293, -0.0351]],
[[ 0.0401, -0.0251, 0.0160],
[-0.0160, -0.0195, -0.0065],
[-0.0519, 0.0351, 0.0357]]],
[[[ 0.0544, -0.0209, -0.0454],
[ 0.0287, -0.0205, -0.0294],
[-0.0195, -0.0235, -0.0378]],
[[-0.0294, -0.0380, 0.0301],
[ 0.0360, 0.0367, 0.0458],
[-0.0189, -0.0017, 0.0145]],
[[-0.0297, 0.0567, 0.0276],
[ 0.0298, 0.0383, 0.0227],
[ 0.0262, 0.0063, 0.0131]],
...,
[[ 0.0002, 0.0432, -0.0247],
[ 0.0068, -0.0298, -0.0484],
[-0.0361, -0.0014, 0.0444]],
[[-0.0184, -0.0201, -0.0163],
[-0.0466, 0.0255, -0.0244],
[ 0.0283, 0.0149, -0.0588]],
[[ 0.0323, 0.0392, -0.0254],
[ 0.0560, 0.0137, -0.0401],
[-0.0236, 0.0589, 0.0448]]],
...,
[[[ 0.0213, 0.0204, 0.0574],
[-0.0276, -0.0196, 0.0117],
[ 0.0569, -0.0158, -0.0502]],
[[ 0.0452, -0.0038, 0.0502],
[ 0.0428, -0.0398, -0.0486],
[ 0.0130, 0.0563, 0.0576]],
[[ 0.0484, -0.0535, 0.0048],
[ 0.0268, -0.0290, -0.0390],
[ 0.0189, -0.0194, -0.0588]],
...,
[[ 0.0163, -0.0113, -0.0520],
[ 0.0288, -0.0547, -0.0544],
[ 0.0442, 0.0376, 0.0566]],
[[-0.0343, 0.0569, 0.0438],
[-0.0403, -0.0372, -0.0532],
[ 0.0322, 0.0126, 0.0423]],
[[ 0.0577, 0.0136, -0.0480],
[-0.0293, -0.0348, 0.0342],
[-0.0510, -0.0078, -0.0042]]],
[[[-0.0243, 0.0406, 0.0537],
[ 0.0209, -0.0059, -0.0487],
[-0.0425, 0.0339, 0.0444]],
[[ 0.0465, -0.0467, 0.0461],
[-0.0389, 0.0144, -0.0502],
[ 0.0274, 0.0552, 0.0356]],
[[-0.0289, 0.0474, -0.0217],
[ 0.0472, -0.0135, 0.0164],
[-0.0165, -0.0049, -0.0475]],
...,
[[-0.0377, 0.0267, 0.0367],
[ 0.0111, 0.0114, -0.0329],
[ 0.0031, -0.0223, -0.0280]],
[[-0.0500, -0.0529, 0.0116],
[ 0.0483, 0.0121, -0.0149],
[ 0.0328, 0.0201, 0.0402]],
[[ 0.0463, 0.0157, -0.0332],
[ 0.0150, 0.0479, 0.0461],
[ 0.0275, 0.0506, -0.0466]]],
[[[-0.0478, 0.0274, 0.0500],
[-0.0394, 0.0032, -0.0496],
[ 0.0381, 0.0391, 0.0330]],
[[ 0.0184, -0.0560, -0.0345],
[-0.0459, -0.0215, 0.0452],
[ 0.0049, 0.0537, 0.0544]],
[[-0.0413, -0.0084, 0.0585],
[ 0.0338, -0.0067, -0.0113],
[-0.0187, -0.0234, -0.0525]],
...,
[[-0.0389, 0.0325, -0.0538],
[ 0.0118, 0.0509, 0.0352],
[-0.0351, -0.0341, -0.0506]],
[[ 0.0136, -0.0349, 0.0082],
[ 0.0358, -0.0211, 0.0537],
[-0.0183, 0.0390, -0.0267]],
[[-0.0219, -0.0145, -0.0351],
[ 0.0556, 0.0033, -0.0030],
[ 0.0075, -0.0425, -0.0365]]]])),
('conv3.bias',
tensor([ 0.0304, 0.0099, -0.0004, 0.0334, 0.0301, 0.0491, 0.0530, -0.0432,
-0.0127, -0.0549, -0.0419, 0.0159, -0.0284, 0.0295, -0.0148, 0.0275,
0.0554, -0.0056, 0.0389, -0.0264, -0.0383, 0.0126, 0.0320, 0.0312,
0.0018, 0.0560, -0.0329, -0.0155, -0.0391, -0.0539, -0.0571, -0.0254])),
('conv4.weight',
tensor([[[[-3.8911e-02, -3.4220e-02, 4.2567e-03],
[-4.5321e-02, -5.2531e-02, -8.1722e-03],
[-2.2638e-02, 4.4213e-02, 5.6989e-02]],
[[ 1.8417e-03, -1.4453e-02, 4.9892e-02],
[ 5.7762e-02, 9.6610e-03, -3.9509e-02],
[ 3.3795e-02, 5.0409e-02, -5.8834e-02]],
[[ 4.6645e-03, -1.6286e-02, 4.3410e-02],
[-3.4043e-02, -2.2207e-02, 4.0967e-02],
[ 5.3004e-02, -2.2756e-02, -6.7993e-03]],
...,
[[ 1.1741e-02, -5.5062e-02, -3.3625e-02],
[-9.2320e-03, -3.3036e-02, 3.3196e-02],
[ 2.3940e-02, 2.0442e-02, 1.4183e-02]],
[[-2.7139e-02, -3.4129e-03, -1.0090e-02],
[ 1.3073e-02, -1.6998e-02, -4.7540e-02],
[-2.5758e-02, -1.9363e-02, 2.1905e-02]],
[[ 5.7593e-02, 1.5013e-02, -5.7894e-02],
[ 5.7964e-02, -2.3412e-02, 2.6955e-02],
[-3.9814e-02, -4.6015e-02, -5.3240e-02]]],
[[[ 5.8211e-02, -4.1118e-02, 2.7704e-02],
[ 5.7198e-02, 8.4165e-03, -5.1708e-02],
[ 3.1423e-02, 1.5026e-02, 3.5922e-02]],
[[ 8.8858e-03, 3.2818e-02, 5.4486e-02],
[-2.6636e-02, 2.2604e-02, 2.9531e-02],
[-1.0327e-03, 2.2348e-03, 2.4103e-02]],
[[ 3.8683e-02, -5.0057e-03, 5.0224e-02],
[ 3.5756e-02, -2.7295e-02, -2.2854e-02],
[-3.2043e-02, -3.2415e-02, 4.1034e-02]],
...,
[[-1.9791e-02, 4.3243e-02, -3.5177e-02],
[ 2.4554e-02, 4.2845e-03, 4.8009e-02],
[ 2.4897e-03, 3.9550e-02, -3.0833e-02]],
[[ 4.5807e-02, 6.5845e-03, 9.3362e-05],
[-1.9411e-02, -2.9161e-02, 5.0828e-02],
[ 1.2028e-03, 2.1260e-02, -4.3710e-03]],
[[-4.8702e-02, -2.0571e-02, -3.5162e-02],
[-2.5856e-02, -2.6619e-02, 8.1867e-03],
[-2.7671e-02, -9.6651e-03, -5.3279e-02]]],
[[[-5.6432e-02, 2.3722e-02, -2.1750e-02],
[-4.8247e-03, -2.1226e-02, -1.0829e-02],
[-1.9523e-02, -1.8187e-02, 2.2772e-03]],
[[-1.0907e-02, 3.3984e-02, -1.2088e-02],
[-1.8657e-02, -4.8297e-02, 2.3614e-02],
[-3.9670e-02, 6.1733e-03, -2.9168e-02]],
[[-4.2112e-02, 2.8203e-02, -1.7385e-03],
[-2.5282e-02, -9.4592e-05, 6.5093e-03],
[-4.1745e-02, 4.3988e-03, -1.1622e-02]],
...,
[[ 4.2991e-02, 1.6912e-02, -4.3689e-02],
[ 5.1871e-02, 4.8566e-02, 3.6205e-02],
[-3.2016e-02, -1.3596e-02, -2.7950e-02]],
[[-2.8307e-02, -4.0278e-02, 1.5087e-02],
[ 4.0443e-02, -3.5727e-02, 3.7196e-02],
[-1.4194e-02, -2.7319e-02, -5.1305e-02]],
[[-2.9962e-02, 2.4693e-02, -4.4912e-02],
[ 5.5890e-03, 4.6671e-02, 3.3599e-02],
[-3.9949e-02, -4.4716e-02, 2.2345e-02]]],
...,
[[[ 3.1920e-02, -4.9932e-02, -1.0871e-02],
[-3.7500e-02, 4.1638e-02, -1.3246e-02],
[ 1.6447e-02, -5.6741e-02, -3.7524e-02]],
[[ 3.3903e-02, -3.1321e-02, -4.4877e-02],
[-2.2473e-02, -2.4225e-02, 4.5838e-02],
[-2.0069e-02, 3.8338e-02, 5.8010e-02]],
[[ 1.7602e-02, -5.2530e-02, 4.9331e-02],
[ 2.4509e-02, 2.3943e-02, -2.1774e-02],
[-5.7154e-02, 5.7090e-02, 3.7531e-02]],
...,
[[ 2.8630e-02, -4.8644e-04, -5.3822e-02],
[-1.1102e-02, 4.8524e-02, -2.7142e-02],
[-5.3463e-02, 3.5607e-02, -1.1110e-02]],
[[ 4.7891e-02, -3.4098e-02, 3.6984e-02],
[ 3.9062e-02, -5.1119e-03, -3.3252e-02],
[ 5.5029e-02, 3.1092e-03, -5.9391e-03]],
[[ 9.6775e-03, 2.2903e-02, -1.5971e-02],
[-2.6969e-02, 8.5069e-04, -2.5744e-02],
[ 2.7311e-03, -2.2119e-02, 2.4367e-03]]],
[[[ 1.5206e-02, -5.3504e-02, -1.8013e-02],
[-3.0064e-02, 3.2887e-02, 1.7612e-02],
[-4.2775e-02, -2.7335e-02, 5.1532e-02]],
[[ 4.3325e-02, 1.2909e-03, 5.6831e-02],
[ 4.8283e-03, -4.4274e-02, 1.7624e-02],
[-7.2574e-03, 1.3743e-02, -5.1502e-02]],
[[-1.4299e-02, -3.0024e-02, 4.9578e-02],
[-4.6143e-02, -3.0686e-02, -5.0727e-02],
[ 5.4766e-02, -1.4012e-02, 3.3267e-02]],
...,
[[ 5.2773e-02, 4.0294e-02, -2.6113e-02],
[-2.5069e-02, 4.3956e-02, -4.7841e-02],
[ 3.0924e-02, 1.5174e-02, -4.8323e-02]],
[[-2.0325e-02, 3.2666e-02, 2.5174e-02],
[ 5.3775e-03, -3.2712e-02, -5.2251e-02],
[-2.7426e-02, -4.5502e-04, -3.2174e-02]],
[[ 8.5780e-03, -5.2099e-02, 5.7285e-02],
[-5.0897e-02, -5.3995e-03, 4.2719e-02],
[-5.2257e-02, -7.9682e-03, 2.1848e-02]]],
[[[-2.8660e-02, 1.2392e-02, 2.9940e-02],
[-1.1170e-02, -1.7499e-02, -5.3951e-02],
[ 4.9738e-02, 2.7240e-02, 2.9588e-03]],
[[ 3.7728e-02, -3.1084e-02, 4.6459e-02],
[-1.4961e-02, 1.6951e-02, -1.3976e-02],
[ 2.5768e-02, -3.8991e-02, -2.8738e-02]],
[[ 1.6084e-02, -7.3174e-03, -4.5839e-02],
[-3.6029e-02, 1.5303e-02, -5.4380e-02],
[ 2.1913e-02, -4.4792e-02, 4.7973e-02]],
...,
[[ 5.4945e-02, -4.5541e-02, -1.5806e-02],
[-4.5216e-02, -2.0338e-02, 3.3373e-02],
[-1.8431e-02, 4.3953e-02, -4.8196e-02]],
[[-3.6129e-02, 5.7705e-02, -1.2229e-02],
[-5.2801e-02, 7.0930e-03, -2.6721e-02],
[ 3.6720e-02, 7.5540e-04, 5.5401e-02]],
[[-4.0386e-02, 1.6714e-02, 2.9246e-02],
[ 4.7033e-02, -8.7923e-03, 5.1267e-02],
[-3.1211e-02, -2.7036e-02, 4.0346e-02]]]])),
('conv4.bias',
tensor([-0.0368, -0.0535, -0.0568, -0.0364, 0.0167, 0.0255, 0.0031, 0.0562,
-0.0575, 0.0470, -0.0128, 0.0159, -0.0387, 0.0517, -0.0508, -0.0104,
0.0133, -0.0265, 0.0290, 0.0147, -0.0405, 0.0074, 0.0434, -0.0436,
-0.0295, 0.0417, 0.0324, 0.0316, 0.0143, -0.0312, 0.0463, 0.0441])),
('fc1.weight',
tensor([[-0.0206, -0.0161, -0.0053, ..., -0.0257, -0.0314, -0.0105],
[-0.0229, 0.0030, 0.0239, ..., 0.0199, -0.0279, 0.0155]])),
('fc1.bias', tensor([-0.0333, -0.0126]))])
%% Cell type:code id: tags:
```
``` python
m2 = m1.state_dict()
m2['fc1.bias'] = torch.Tensor([-0.5, -1.2])
m1.load_state_dict(m2)
m1.state_dict()
```
%% Output
OrderedDict([('conv1.weight',
tensor([[[[-0.1261, 0.1833, -0.1406],
[ 0.1324, -0.0685, 0.0938],
[ 0.0432, 0.1814, -0.0541]],
[[-0.1776, -0.1839, -0.0111],
[ 0.0888, 0.0888, -0.1344],
[-0.1838, 0.1737, 0.1584]],
[[ 0.0417, 0.1064, -0.0156],
[ 0.0667, 0.0856, -0.1746],
[ 0.0412, 0.1620, 0.0125]]],
[[[-0.0530, -0.1273, -0.0797],
[ 0.0422, 0.1135, 0.0475],
[-0.0244, 0.1691, -0.1383]],
[[ 0.0822, -0.1317, -0.1692],
[ 0.1373, 0.1388, 0.0103],
[-0.0481, 0.1105, 0.0631]],
[[-0.0352, 0.1259, -0.0530],
[-0.1394, -0.0281, 0.1844],
[ 0.0082, 0.1187, 0.0211]]],
[[[ 0.0987, 0.0788, -0.1126],
[ 0.1769, 0.0763, -0.1767],
[-0.0570, 0.1156, 0.1770]],
[[ 0.0643, -0.0024, -0.0625],
[ 0.0819, 0.0140, -0.1882],
[ 0.1325, -0.0632, -0.0202]],
[[ 0.0053, 0.1042, -0.0058],
[-0.1082, -0.1753, 0.1762],
[-0.0501, 0.1166, 0.0561]]],
[[[ 0.0358, -0.0685, -0.1278],
[ 0.0029, -0.1107, 0.1169],
[-0.1408, 0.1293, 0.1142]],
[[-0.0814, 0.0470, 0.0188],
[ 0.1538, 0.0137, 0.1128],
[-0.1597, 0.1432, 0.1370]],
[[ 0.1425, 0.1769, -0.0037],
[-0.1080, -0.0805, -0.0195],
[-0.1335, -0.1666, 0.1399]]],
[[[ 0.1117, 0.1918, -0.1666],
[-0.1392, 0.0086, 0.0172],
[-0.0721, -0.1711, 0.0344]],
[[ 0.1820, -0.0537, -0.0974],
[ 0.0366, -0.0710, 0.1273],
[ 0.1132, -0.1594, 0.0878]],
[[-0.0874, -0.0401, 0.1827],
[-0.0301, 0.1205, -0.0396],
[-0.1143, -0.1007, 0.1561]]],
[[[ 0.1522, -0.0012, -0.1785],
[-0.1833, -0.1828, -0.1643],
[-0.1765, -0.1757, -0.0608]],
[[-0.0684, 0.0521, 0.1137],
[-0.0028, 0.0616, 0.0758],
[-0.1736, 0.0667, 0.1229]],
[[ 0.1298, -0.1848, -0.1570],
[-0.1052, -0.1172, -0.1223],
[-0.1389, -0.0095, -0.0410]]],
[[[ 0.0213, -0.0975, 0.0964],
[ 0.0535, -0.0775, 0.0790],
[-0.1796, -0.1468, 0.1036]],
[[-0.0403, 0.0646, -0.0932],
[ 0.1779, -0.1616, 0.0644],
[-0.0508, -0.1158, -0.0592]],
[[-0.1644, -0.1327, 0.0817],
[ 0.0320, -0.0213, -0.0946],
[-0.1106, 0.1463, -0.1642]]],
[[[-0.0985, -0.1160, -0.0915],
[ 0.1857, 0.0806, 0.1761],
[-0.0817, 0.1095, 0.0896]],
[[-0.0660, -0.1680, 0.1833],
[ 0.0611, 0.0077, -0.0848],
[-0.1516, 0.1737, 0.0484]],
[[ 0.1434, -0.0732, -0.0904],
[ 0.0962, 0.1783, 0.0192],
[ 0.0915, 0.0006, 0.0334]]],
[[[-0.0047, 0.1807, -0.1798],
[-0.0164, 0.1119, -0.0805],
[ 0.1855, -0.0681, -0.0187]],
[[-0.0069, 0.0491, -0.1868],
[-0.1609, -0.0316, 0.0150],
[-0.1605, 0.1506, -0.0074]],
[[ 0.0851, -0.1732, -0.1777],
[ 0.0539, -0.0500, -0.1231],
[ 0.1654, 0.0342, -0.1904]]],
[[[ 0.0476, 0.0284, 0.1212],
[-0.1603, -0.1924, 0.0144],
[ 0.0076, -0.0928, -0.1645]],
[[ 0.0215, 0.1845, -0.1034],
[ 0.1574, -0.1577, -0.0438],
[-0.1360, -0.0601, -0.1693]],
[[-0.0720, 0.0619, 0.1405],
[ 0.0699, -0.1288, 0.0041],
[-0.0381, -0.1697, -0.1568]]],
[[[-0.1599, 0.1231, -0.1034],
[-0.0314, 0.0105, -0.1449],
[-0.0172, -0.0781, 0.0839]],
[[-0.0676, 0.1185, -0.1559],
[-0.1053, -0.1306, 0.1820],
[ 0.1584, -0.1370, 0.1828]],
[[ 0.0658, 0.1412, -0.0537],
[-0.1230, -0.1411, -0.0011],
[-0.1318, -0.0458, 0.1838]]],
[[[-0.0268, 0.1747, -0.1037],
[ 0.0515, -0.0228, -0.1024],
[-0.1543, -0.0643, -0.0100]],
[[-0.1572, -0.1530, 0.0026],
[ 0.1463, -0.1233, 0.0470],
[-0.1595, -0.1108, -0.0654]],
[[-0.0521, -0.0094, 0.1544],
[-0.0505, -0.0332, 0.0048],
[ 0.0735, 0.1350, 0.0690]]],
[[[ 0.0025, 0.0724, 0.0930],
[-0.1885, 0.0475, 0.1100],
[-0.1622, 0.0087, -0.0030]],
[[ 0.1032, -0.1425, -0.0620],
[ 0.1515, -0.0736, -0.1888],
[-0.1246, 0.1424, -0.0491]],
[[ 0.1759, -0.1616, 0.1198],
[-0.1103, 0.1032, 0.1727],
[-0.0601, 0.1635, 0.0034]]],
[[[ 0.0301, 0.1517, 0.0657],
[-0.1368, -0.1165, 0.1193],
[-0.0962, 0.1451, 0.1099]],
[[ 0.1646, -0.1860, -0.1187],
[-0.1367, -0.0911, 0.1337],
[-0.0926, -0.0524, -0.0672]],
[[-0.1509, -0.1231, -0.0855],
[ 0.1808, -0.0713, 0.0410],
[-0.0621, -0.0506, 0.1871]]],
[[[ 0.0888, -0.0874, -0.0826],
[ 0.0416, -0.0961, 0.0603],
[ 0.1455, 0.0050, 0.0318]],
[[-0.1633, 0.0070, -0.1537],
[-0.0109, 0.1602, -0.0463],
[-0.0423, -0.0147, -0.1045]],
[[ 0.1640, -0.0997, -0.1662],
[-0.1074, 0.1549, -0.1905],
[-0.1708, 0.1624, 0.0219]]],
[[[ 0.0824, -0.1376, 0.1086],
[ 0.0836, 0.0135, 0.0351],
[-0.1518, 0.0784, -0.1708]],
[[-0.1636, -0.1571, 0.1032],
[-0.1152, 0.0274, -0.1022],
[-0.0956, -0.1606, -0.1615]],
[[ 0.1307, 0.0419, 0.1924],
[-0.0599, -0.1296, -0.0448],
[ 0.0363, 0.0377, -0.0460]]],
[[[-0.1685, -0.1277, -0.0465],
[ 0.0922, -0.1011, 0.0742],
[ 0.0053, -0.1456, 0.0135]],
[[ 0.1341, 0.0131, 0.1281],
[-0.1020, 0.1069, -0.0631],
[-0.0439, -0.1189, -0.1822]],
[[ 0.1624, -0.1253, 0.0302],
[ 0.0709, 0.0767, 0.1453],
[ 0.0203, 0.1603, -0.1720]]],
[[[-0.1550, 0.1513, -0.1003],
[ 0.0370, 0.0367, -0.0233],
[ 0.0916, -0.0871, 0.1579]],
[[-0.1900, 0.0314, 0.0865],
[-0.0197, 0.0296, -0.0048],
[ 0.0846, 0.1543, -0.0770]],
[[-0.0016, -0.0978, 0.1826],
[-0.0477, 0.0689, 0.1079],
[ 0.0400, 0.0880, 0.1674]]],
[[[ 0.0145, -0.0447, -0.1742],
[ 0.0394, 0.0127, -0.1172],
[ 0.1330, -0.1207, 0.0326]],
[[-0.0155, -0.1602, 0.0023],
[ 0.0789, 0.1648, 0.1781],
[-0.1468, -0.0481, -0.1260]],
[[-0.0139, 0.0848, -0.0536],
[-0.1581, 0.1130, 0.0717],
[ 0.0275, -0.0006, -0.0049]]],
[[[-0.0199, 0.0032, -0.1246],
[ 0.0479, 0.1418, -0.1295],
[-0.1646, -0.1139, -0.1018]],
[[ 0.1475, 0.1413, -0.0354],
[ 0.0612, -0.1652, 0.0801],
[-0.1306, -0.0165, 0.1733]],
[[ 0.1527, 0.0911, -0.1906],
[-0.1152, 0.1737, 0.0436],
[-0.0213, -0.0314, -0.0319]]],
[[[-0.0003, -0.0546, -0.1255],
[ 0.0914, -0.1414, 0.0542],
[ 0.1139, 0.0132, 0.0815]],
[[-0.0042, 0.0541, 0.1456],
[ 0.0509, -0.0790, 0.0272],
[ 0.1419, 0.0992, -0.1448]],
[[ 0.0496, 0.0013, 0.0838],
[-0.0662, 0.0315, -0.1168],
[-0.0069, -0.1503, 0.0729]]],
[[[ 0.1866, 0.1329, -0.0560],
[ 0.0026, 0.1533, 0.0326],
[-0.1161, -0.0323, 0.0053]],
[[-0.0243, -0.1823, -0.1657],
[-0.0107, -0.0832, 0.0029],
[ 0.0981, 0.1241, -0.1788]],
[[-0.0400, -0.0577, -0.0757],
[-0.0584, 0.0176, -0.1019],
[-0.1828, 0.1589, -0.0312]]],
[[[-0.1083, -0.1236, -0.0904],
[-0.1575, 0.0157, 0.0552],
[-0.0839, 0.1704, -0.1457]],
[[-0.1648, -0.0270, -0.0489],
[-0.1122, -0.0288, -0.0073],
[-0.1443, -0.1712, 0.0100]],
[[-0.1142, -0.1552, 0.1568],
[ 0.0743, -0.1108, -0.0643],
[-0.0394, -0.1345, 0.0992]]],
[[[-0.1591, 0.0942, -0.1035],
[-0.0781, 0.0725, -0.0888],
[ 0.0959, 0.0213, 0.1222]],
[[ 0.1202, -0.0217, -0.0955],
[-0.1748, -0.1133, -0.0704],
[-0.0670, -0.1401, 0.1553]],
[[ 0.0053, -0.0871, -0.0239],
[ 0.0961, -0.0547, 0.1741],
[-0.0570, 0.0477, 0.1853]]],
[[[-0.1115, -0.0183, -0.1302],
[ 0.1435, -0.0238, -0.0048],
[ 0.1862, -0.1837, 0.1711]],
[[ 0.1375, -0.1798, 0.0818],
[-0.0792, 0.0820, 0.1373],
[ 0.1849, 0.0672, -0.1822]],
[[ 0.1868, -0.0356, 0.0726],
[-0.1523, -0.1130, 0.1506],
[-0.1046, 0.0178, 0.0990]]],
[[[ 0.1321, -0.1641, 0.0411],
[ 0.0526, 0.0393, 0.0918],
[-0.1345, 0.0750, 0.0859]],
[[-0.0985, 0.1466, 0.1349],
[-0.1461, -0.1742, 0.0941],
[-0.1502, -0.1813, 0.0864]],
[[-0.1039, 0.1179, 0.1499],
[-0.0366, -0.0120, 0.0951],
[ 0.0087, 0.1212, -0.0183]]],
[[[-0.1375, 0.0765, -0.0072],
[-0.0041, 0.0379, -0.0243],
[-0.1495, 0.1601, 0.1575]],
[[-0.0454, 0.1642, 0.0720],
[-0.0533, 0.0150, 0.0039],
[ 0.0194, 0.0113, -0.1194]],
[[ 0.0527, -0.0886, 0.0359],
[ 0.1595, 0.0526, -0.0048],
[-0.1790, -0.0458, -0.0324]]],
[[[-0.1638, 0.0942, 0.0686],
[-0.1082, -0.0675, 0.1892],
[-0.1347, -0.1247, 0.0739]],
[[ 0.0595, 0.1504, -0.1657],
[ 0.0733, 0.0529, -0.1599],
[ 0.0171, -0.1127, -0.0259]],
[[-0.0092, 0.0193, 0.1176],
[-0.1183, 0.0101, 0.1011],
[ 0.0648, -0.1897, 0.0782]]],
[[[ 0.0098, -0.1161, -0.0802],
[-0.1821, 0.0221, -0.1754],
[-0.1218, 0.0525, -0.0480]],
[[ 0.0770, 0.0477, 0.1514],
[ 0.0374, -0.1075, -0.1026],
[-0.0581, -0.1011, 0.1241]],
[[-0.0567, -0.0163, 0.0374],
[-0.1739, -0.0579, 0.0704],
[ 0.1817, 0.1561, 0.1677]]],
[[[-0.0569, -0.0763, 0.0044],
[-0.1133, 0.0813, 0.1477],
[ 0.0836, 0.0483, -0.1800]],
[[ 0.1343, -0.1590, 0.1177],
[ 0.1071, -0.1647, -0.0646],
[ 0.1578, -0.1261, 0.0243]],
[[-0.0424, -0.0241, -0.0988],
[ 0.0023, 0.0029, -0.0291],
[ 0.0415, -0.0557, 0.1427]]],
[[[-0.1028, 0.1054, 0.1658],
[-0.0357, 0.1579, 0.1237],
[ 0.0368, 0.0532, -0.1043]],
[[-0.0369, 0.0575, -0.1023],
[ 0.0635, 0.1015, 0.1112],
[-0.1235, 0.0467, 0.0908]],
[[ 0.1380, 0.0633, 0.1087],
[-0.1360, 0.0422, -0.1524],
[ 0.0819, 0.0918, -0.1624]]],
[[[ 0.1584, -0.0218, -0.0236],
[ 0.1878, -0.1289, 0.1343],
[ 0.0351, 0.1225, -0.1460]],
[[ 0.0690, -0.1439, 0.0056],
[ 0.0272, -0.0058, 0.0125],
[ 0.0868, -0.0684, -0.0884]],
[[ 0.1045, 0.0583, -0.0870],
[ 0.0600, -0.0732, -0.1695],
[ 0.0953, 0.0246, 0.1245]]]])),
('conv1.bias',
tensor([-1.8698e-01, -7.9379e-06, -1.9277e-02, 5.2182e-02, 7.5716e-02,
-3.3830e-03, -9.6565e-02, 1.0241e-01, -8.2457e-02, -1.6224e-01,
1.2980e-01, -8.2256e-02, -7.4655e-02, -3.7980e-02, 8.3407e-02,
-1.4880e-01, 4.8939e-02, 2.7506e-02, 5.8676e-03, -1.5813e-01,
-6.2464e-04, 1.0359e-02, -1.5525e-01, 7.9100e-02, 1.6850e-02,
-1.3809e-01, -6.3393e-02, -5.3843e-02, -1.5219e-02, -1.7365e-01,
1.7249e-01, -1.1165e-01])),
('conv2.weight',
tensor([[[[ 4.3750e-02, 4.5533e-02, -2.9410e-02],
[-4.1395e-02, 5.0397e-04, -1.3265e-02],
[-4.9851e-02, -1.0518e-02, 5.7710e-02]],
[[-5.6332e-02, -4.7168e-03, -4.4627e-02],
[ 5.3513e-03, -4.0824e-02, 1.8281e-02],
[ 5.0677e-02, -1.5295e-02, -6.1751e-03]],
[[-2.4984e-02, 1.2784e-02, -4.7123e-02],
[-4.3238e-02, 4.7349e-02, -1.5219e-02],
[-3.6073e-02, 4.1506e-02, -3.5337e-02]],
...,
[[ 5.6048e-02, 1.9129e-03, -1.7200e-02],
[ 5.8869e-02, -5.1520e-02, -5.3205e-02],
[-1.3903e-02, 5.1790e-02, 2.2585e-02]],
[[ 1.1835e-02, -4.9313e-02, -3.1838e-02],
[ 7.6813e-03, 4.2715e-02, -5.7404e-02],
[-4.1474e-02, -2.3128e-02, -4.7935e-02]],
[[-2.1860e-02, -2.1817e-02, -3.2578e-02],
[ 3.1317e-02, 3.3435e-02, 3.1837e-02],
[-2.2399e-03, 3.1600e-02, 4.0183e-02]]],
[[[ 1.9610e-04, 5.3780e-02, -4.5810e-02],
[ 4.0340e-02, 1.4904e-02, -1.5597e-02],
[-4.6080e-02, 5.0714e-02, -5.7445e-03]],
[[-3.5281e-02, 3.3011e-02, 4.3343e-02],
[-4.6263e-02, -5.6184e-02, 5.1245e-03],
[ 3.6015e-02, -3.3152e-02, 4.6629e-03]],
[[ 1.7650e-03, -4.2336e-02, 4.3744e-02],
[ 2.1655e-02, 5.3759e-02, 1.3719e-03],
[ 4.2005e-02, 5.3998e-02, 1.9009e-02]],
...,
[[ 8.9786e-03, -1.8645e-02, -1.3587e-02],
[-5.4167e-02, 1.2335e-02, -3.0384e-02],
[-4.8722e-03, -3.7296e-02, -2.6446e-02]],
[[ 1.7580e-02, 3.8462e-02, -5.0269e-02],
[ 2.6601e-03, -1.1462e-02, 4.7459e-02],
[-2.8888e-02, 3.4436e-02, -4.9943e-02]],
[[-5.0206e-02, -5.6025e-02, -3.6346e-02],
[-2.4407e-02, 5.3721e-02, -5.4920e-02],
[ 5.1835e-02, -3.2396e-02, 3.2373e-02]]],
[[[-2.7759e-02, -5.4737e-02, -1.1689e-02],
[ 3.9462e-02, 2.8649e-02, 5.1776e-02],
[ 2.4253e-02, -2.8318e-02, 2.7402e-02]],
[[ 1.3045e-02, -1.0456e-02, 2.0426e-02],
[ 2.1949e-02, 4.6817e-02, -5.6093e-02],
[ 2.7145e-02, -5.5441e-02, -2.0719e-02]],
[[ 4.4704e-02, -2.4099e-02, -4.7185e-02],
[-4.3257e-02, -3.3058e-02, -8.6451e-03],
[-3.7283e-02, -3.4569e-02, -7.1049e-03]],
...,
[[-4.1559e-02, -2.9240e-02, 2.7197e-03],
[ 2.0770e-02, 5.4479e-02, -4.4845e-02],
[-1.1641e-02, -2.9814e-02, -2.4419e-02]],
[[-1.5743e-02, 1.0854e-02, 3.0878e-02],
[ 2.2739e-02, 3.2999e-02, -1.1902e-02],
[-3.4837e-02, 1.5305e-02, -8.7552e-03]],
[[-2.2882e-02, 9.4639e-03, 5.1878e-03],
[-2.6344e-02, 2.9063e-02, -1.9337e-02],
[-3.4314e-02, 1.5313e-02, 4.1524e-02]]],
...,
[[[ 9.7454e-03, -3.2903e-03, 1.0696e-02],
[-4.0918e-02, 1.6352e-02, 1.4646e-02],
[ 1.2516e-02, -2.1804e-02, -2.5489e-02]],
[[-1.6083e-02, 2.5374e-02, 3.1458e-02],
[-3.1497e-02, -1.9513e-02, -2.1223e-02],
[ 6.6286e-03, 1.6538e-02, -4.8944e-02]],
[[ 2.4808e-02, -2.9520e-02, -4.8227e-02],
[ 1.7325e-03, -4.7443e-02, 2.3087e-03],
[-1.0008e-02, -2.0313e-02, 2.9944e-02]],
...,
[[ 5.5781e-02, -2.0898e-02, -3.1487e-03],
[-1.6931e-02, 4.5279e-04, -1.5024e-02],
[-5.5885e-02, 2.7140e-02, -8.5434e-03]],
[[ 1.3970e-02, -3.3131e-02, 4.3112e-02],
[-3.4956e-02, -5.0144e-02, -1.6391e-02],
[-9.1003e-03, -2.0204e-02, -1.0226e-03]],
[[-4.0053e-02, -5.0194e-02, 5.0405e-02],
[ 5.4107e-02, 4.2185e-02, 3.4359e-02],
[ 1.6749e-02, -1.4102e-02, 5.0171e-02]]],
[[[ 2.8229e-02, 5.6586e-02, -3.9617e-03],
[ 2.9538e-02, -1.2507e-02, -2.5516e-02],
[-1.5193e-02, -2.9232e-02, -2.0701e-02]],
[[-5.8773e-02, 3.3015e-02, -9.4146e-03],
[ 2.8957e-02, 5.8666e-02, 2.8679e-02],
[ 1.5249e-02, -1.2246e-03, 1.2230e-03]],
[[ 2.6050e-02, -4.6042e-02, -3.4895e-03],
[ 4.9529e-02, 6.6835e-03, -4.1808e-02],
[-8.6450e-03, -4.8510e-02, -2.4011e-02]],
...,
[[-5.1427e-02, 2.4528e-02, -5.4878e-02],
[-1.8610e-02, 5.4365e-02, 3.5053e-03],
[-3.9922e-02, 4.2510e-02, -5.7261e-02]],
[[ 4.1938e-02, -4.2039e-02, -1.2487e-02],
[-1.4090e-02, -3.7895e-02, 1.4394e-02],
[ 2.2555e-02, -2.7264e-02, 5.6102e-02]],
[[ 1.5770e-02, 5.4672e-02, -2.4056e-02],
[ 5.2089e-02, -2.8859e-02, -2.6499e-03],
[-5.2122e-02, -3.7436e-02, 3.9897e-02]]],
[[[ 2.7888e-02, 2.9241e-02, -1.9488e-02],
[ 2.8928e-02, 5.3312e-02, -2.9810e-02],
[-8.5104e-03, 5.7751e-02, -8.1857e-03]],
[[ 4.2649e-02, 3.3158e-03, 4.2879e-02],
[ 7.7893e-03, -3.2879e-02, 2.7630e-02],
[ 5.4706e-03, 4.8019e-02, 1.2420e-02]],
[[-4.2004e-02, -4.2790e-02, 2.4634e-02],
[-5.4641e-02, 3.4600e-02, 2.9071e-03],
[ 2.6470e-02, 4.6701e-02, 3.7158e-02]],
...,
[[-1.7641e-02, -2.1205e-02, -5.1504e-02],
[-7.4737e-03, 5.5061e-02, -2.6397e-03],
[-4.4653e-02, -3.6719e-02, 3.4420e-06]],
[[ 1.6525e-02, 1.7280e-02, 5.4554e-03],
[ 4.0098e-02, 2.7571e-02, -4.4965e-02],
[ 6.1493e-03, -5.7754e-02, 1.0513e-02]],
[[-5.7615e-02, 3.2921e-02, -1.5900e-02],
[ 2.0081e-02, 5.4590e-02, 1.1296e-02],
[-4.5015e-02, 1.1341e-03, 2.6447e-02]]]])),
('conv2.bias',
tensor([-0.0538, -0.0320, -0.0153, 0.0558, 0.0254, 0.0281, -0.0148, 0.0060,
-0.0283, -0.0062, 0.0437, -0.0064, 0.0341, 0.0233, -0.0201, 0.0391,
0.0243, 0.0071, 0.0125, -0.0138, -0.0377, -0.0169, -0.0475, -0.0004,
-0.0105, -0.0502, 0.0241, 0.0090, 0.0069, -0.0315, -0.0192, 0.0204])),
('conv3.weight',
tensor([[[[-0.0215, -0.0208, -0.0272],
[-0.0493, -0.0117, -0.0285],
[ 0.0515, -0.0041, 0.0126]],
[[ 0.0299, -0.0301, 0.0552],
[ 0.0450, 0.0449, -0.0583],
[-0.0452, -0.0480, -0.0275]],
[[-0.0262, -0.0338, 0.0505],
[ 0.0146, -0.0364, -0.0044],
[-0.0102, -0.0051, 0.0017]],
...,
[[-0.0367, -0.0468, -0.0586],
[ 0.0126, 0.0037, 0.0191],
[-0.0153, 0.0048, -0.0160]],
[[-0.0050, 0.0364, 0.0582],
[ 0.0093, -0.0268, -0.0355],
[-0.0125, 0.0500, 0.0009]],
[[ 0.0237, -0.0211, -0.0130],
[-0.0489, 0.0118, 0.0387],
[-0.0006, 0.0301, 0.0283]]],
[[[-0.0391, -0.0464, -0.0158],
[ 0.0201, -0.0054, 0.0422],
[ 0.0085, -0.0474, -0.0251]],
[[-0.0346, 0.0536, -0.0391],
[ 0.0244, -0.0263, -0.0073],
[ 0.0076, 0.0160, 0.0044]],
[[-0.0128, 0.0146, -0.0381],
[-0.0277, -0.0142, 0.0226],
[ 0.0190, 0.0326, -0.0219]],
...,
[[-0.0217, 0.0129, 0.0558],
[ 0.0164, -0.0292, -0.0467],
[-0.0296, 0.0205, -0.0300]],
[[ 0.0254, -0.0151, -0.0583],
[ 0.0111, -0.0469, -0.0300],
[-0.0462, 0.0293, -0.0351]],
[[ 0.0401, -0.0251, 0.0160],
[-0.0160, -0.0195, -0.0065],
[-0.0519, 0.0351, 0.0357]]],
[[[ 0.0544, -0.0209, -0.0454],
[ 0.0287, -0.0205, -0.0294],
[-0.0195, -0.0235, -0.0378]],
[[-0.0294, -0.0380, 0.0301],
[ 0.0360, 0.0367, 0.0458],
[-0.0189, -0.0017, 0.0145]],
[[-0.0297, 0.0567, 0.0276],
[ 0.0298, 0.0383, 0.0227],
[ 0.0262, 0.0063, 0.0131]],
...,
[[ 0.0002, 0.0432, -0.0247],
[ 0.0068, -0.0298, -0.0484],
[-0.0361, -0.0014, 0.0444]],
[[-0.0184, -0.0201, -0.0163],
[-0.0466, 0.0255, -0.0244],
[ 0.0283, 0.0149, -0.0588]],
[[ 0.0323, 0.0392, -0.0254],
[ 0.0560, 0.0137, -0.0401],
[-0.0236, 0.0589, 0.0448]]],
...,
[[[ 0.0213, 0.0204, 0.0574],
[-0.0276, -0.0196, 0.0117],
[ 0.0569, -0.0158, -0.0502]],
[[ 0.0452, -0.0038, 0.0502],
[ 0.0428, -0.0398, -0.0486],
[ 0.0130, 0.0563, 0.0576]],
[[ 0.0484, -0.0535, 0.0048],
[ 0.0268, -0.0290, -0.0390],
[ 0.0189, -0.0194, -0.0588]],
...,
[[ 0.0163, -0.0113, -0.0520],
[ 0.0288, -0.0547, -0.0544],
[ 0.0442, 0.0376, 0.0566]],
[[-0.0343, 0.0569, 0.0438],
[-0.0403, -0.0372, -0.0532],
[ 0.0322, 0.0126, 0.0423]],
[[ 0.0577, 0.0136, -0.0480],
[-0.0293, -0.0348, 0.0342],
[-0.0510, -0.0078, -0.0042]]],
[[[-0.0243, 0.0406, 0.0537],
[ 0.0209, -0.0059, -0.0487],
[-0.0425, 0.0339, 0.0444]],
[[ 0.0465, -0.0467, 0.0461],
[-0.0389, 0.0144, -0.0502],
[ 0.0274, 0.0552, 0.0356]],
[[-0.0289, 0.0474, -0.0217],
[ 0.0472, -0.0135, 0.0164],
[-0.0165, -0.0049, -0.0475]],
...,
[[-0.0377, 0.0267, 0.0367],
[ 0.0111, 0.0114, -0.0329],
[ 0.0031, -0.0223, -0.0280]],
[[-0.0500, -0.0529, 0.0116],
[ 0.0483, 0.0121, -0.0149],
[ 0.0328, 0.0201, 0.0402]],
[[ 0.0463, 0.0157, -0.0332],
[ 0.0150, 0.0479, 0.0461],
[ 0.0275, 0.0506, -0.0466]]],
[[[-0.0478, 0.0274, 0.0500],
[-0.0394, 0.0032, -0.0496],
[ 0.0381, 0.0391, 0.0330]],
[[ 0.0184, -0.0560, -0.0345],
[-0.0459, -0.0215, 0.0452],
[ 0.0049, 0.0537, 0.0544]],
[[-0.0413, -0.0084, 0.0585],
[ 0.0338, -0.0067, -0.0113],
[-0.0187, -0.0234, -0.0525]],
...,
[[-0.0389, 0.0325, -0.0538],
[ 0.0118, 0.0509, 0.0352],
[-0.0351, -0.0341, -0.0506]],
[[ 0.0136, -0.0349, 0.0082],
[ 0.0358, -0.0211, 0.0537],
[-0.0183, 0.0390, -0.0267]],
[[-0.0219, -0.0145, -0.0351],
[ 0.0556, 0.0033, -0.0030],
[ 0.0075, -0.0425, -0.0365]]]])),
('conv3.bias',
tensor([ 0.0304, 0.0099, -0.0004, 0.0334, 0.0301, 0.0491, 0.0530, -0.0432,
-0.0127, -0.0549, -0.0419, 0.0159, -0.0284, 0.0295, -0.0148, 0.0275,
0.0554, -0.0056, 0.0389, -0.0264, -0.0383, 0.0126, 0.0320, 0.0312,
0.0018, 0.0560, -0.0329, -0.0155, -0.0391, -0.0539, -0.0571, -0.0254])),
('conv4.weight',
tensor([[[[-3.8911e-02, -3.4220e-02, 4.2567e-03],
[-4.5321e-02, -5.2531e-02, -8.1722e-03],
[-2.2638e-02, 4.4213e-02, 5.6989e-02]],
[[ 1.8417e-03, -1.4453e-02, 4.9892e-02],
[ 5.7762e-02, 9.6610e-03, -3.9509e-02],
[ 3.3795e-02, 5.0409e-02, -5.8834e-02]],
[[ 4.6645e-03, -1.6286e-02, 4.3410e-02],
[-3.4043e-02, -2.2207e-02, 4.0967e-02],
[ 5.3004e-02, -2.2756e-02, -6.7993e-03]],
...,
[[ 1.1741e-02, -5.5062e-02, -3.3625e-02],
[-9.2320e-03, -3.3036e-02, 3.3196e-02],
[ 2.3940e-02, 2.0442e-02, 1.4183e-02]],
[[-2.7139e-02, -3.4129e-03, -1.0090e-02],
[ 1.3073e-02, -1.6998e-02, -4.7540e-02],
[-2.5758e-02, -1.9363e-02, 2.1905e-02]],
[[ 5.7593e-02, 1.5013e-02, -5.7894e-02],
[ 5.7964e-02, -2.3412e-02, 2.6955e-02],
[-3.9814e-02, -4.6015e-02, -5.3240e-02]]],
[[[ 5.8211e-02, -4.1118e-02, 2.7704e-02],
[ 5.7198e-02, 8.4165e-03, -5.1708e-02],
[ 3.1423e-02, 1.5026e-02, 3.5922e-02]],
[[ 8.8858e-03, 3.2818e-02, 5.4486e-02],
[-2.6636e-02, 2.2604e-02, 2.9531e-02],
[-1.0327e-03, 2.2348e-03, 2.4103e-02]],
[[ 3.8683e-02, -5.0057e-03, 5.0224e-02],
[ 3.5756e-02, -2.7295e-02, -2.2854e-02],
[-3.2043e-02, -3.2415e-02, 4.1034e-02]],
...,
[[-1.9791e-02, 4.3243e-02, -3.5177e-02],
[ 2.4554e-02, 4.2845e-03, 4.8009e-02],
[ 2.4897e-03, 3.9550e-02, -3.0833e-02]],
[[ 4.5807e-02, 6.5845e-03, 9.3362e-05],
[-1.9411e-02, -2.9161e-02, 5.0828e-02],
[ 1.2028e-03, 2.1260e-02, -4.3710e-03]],
[[-4.8702e-02, -2.0571e-02, -3.5162e-02],
[-2.5856e-02, -2.6619e-02, 8.1867e-03],
[-2.7671e-02, -9.6651e-03, -5.3279e-02]]],
[[[-5.6432e-02, 2.3722e-02, -2.1750e-02],
[-4.8247e-03, -2.1226e-02, -1.0829e-02],
[-1.9523e-02, -1.8187e-02, 2.2772e-03]],
[[-1.0907e-02, 3.3984e-02, -1.2088e-02],
[-1.8657e-02, -4.8297e-02, 2.3614e-02],
[-3.9670e-02, 6.1733e-03, -2.9168e-02]],
[[-4.2112e-02, 2.8203e-02, -1.7385e-03],
[-2.5282e-02, -9.4592e-05, 6.5093e-03],
[-4.1745e-02, 4.3988e-03, -1.1622e-02]],
...,
[[ 4.2991e-02, 1.6912e-02, -4.3689e-02],
[ 5.1871e-02, 4.8566e-02, 3.6205e-02],
[-3.2016e-02, -1.3596e-02, -2.7950e-02]],
[[-2.8307e-02, -4.0278e-02, 1.5087e-02],
[ 4.0443e-02, -3.5727e-02, 3.7196e-02],
[-1.4194e-02, -2.7319e-02, -5.1305e-02]],
[[-2.9962e-02, 2.4693e-02, -4.4912e-02],
[ 5.5890e-03, 4.6671e-02, 3.3599e-02],
[-3.9949e-02, -4.4716e-02, 2.2345e-02]]],
...,
[[[ 3.1920e-02, -4.9932e-02, -1.0871e-02],
[-3.7500e-02, 4.1638e-02, -1.3246e-02],
[ 1.6447e-02, -5.6741e-02, -3.7524e-02]],
[[ 3.3903e-02, -3.1321e-02, -4.4877e-02],
[-2.2473e-02, -2.4225e-02, 4.5838e-02],
[-2.0069e-02, 3.8338e-02, 5.8010e-02]],
[[ 1.7602e-02, -5.2530e-02, 4.9331e-02],
[ 2.4509e-02, 2.3943e-02, -2.1774e-02],
[-5.7154e-02, 5.7090e-02, 3.7531e-02]],
...,
[[ 2.8630e-02, -4.8644e-04, -5.3822e-02],
[-1.1102e-02, 4.8524e-02, -2.7142e-02],
[-5.3463e-02, 3.5607e-02, -1.1110e-02]],
[[ 4.7891e-02, -3.4098e-02, 3.6984e-02],
[ 3.9062e-02, -5.1119e-03, -3.3252e-02],
[ 5.5029e-02, 3.1092e-03, -5.9391e-03]],
[[ 9.6775e-03, 2.2903e-02, -1.5971e-02],
[-2.6969e-02, 8.5069e-04, -2.5744e-02],
[ 2.7311e-03, -2.2119e-02, 2.4367e-03]]],
[[[ 1.5206e-02, -5.3504e-02, -1.8013e-02],
[-3.0064e-02, 3.2887e-02, 1.7612e-02],
[-4.2775e-02, -2.7335e-02, 5.1532e-02]],
[[ 4.3325e-02, 1.2909e-03, 5.6831e-02],
[ 4.8283e-03, -4.4274e-02, 1.7624e-02],
[-7.2574e-03, 1.3743e-02, -5.1502e-02]],
[[-1.4299e-02, -3.0024e-02, 4.9578e-02],
[-4.6143e-02, -3.0686e-02, -5.0727e-02],
[ 5.4766e-02, -1.4012e-02, 3.3267e-02]],
...,
[[ 5.2773e-02, 4.0294e-02, -2.6113e-02],
[-2.5069e-02, 4.3956e-02, -4.7841e-02],
[ 3.0924e-02, 1.5174e-02, -4.8323e-02]],
[[-2.0325e-02, 3.2666e-02, 2.5174e-02],
[ 5.3775e-03, -3.2712e-02, -5.2251e-02],
[-2.7426e-02, -4.5502e-04, -3.2174e-02]],
[[ 8.5780e-03, -5.2099e-02, 5.7285e-02],
[-5.0897e-02, -5.3995e-03, 4.2719e-02],
[-5.2257e-02, -7.9682e-03, 2.1848e-02]]],
[[[-2.8660e-02, 1.2392e-02, 2.9940e-02],
[-1.1170e-02, -1.7499e-02, -5.3951e-02],
[ 4.9738e-02, 2.7240e-02, 2.9588e-03]],
[[ 3.7728e-02, -3.1084e-02, 4.6459e-02],
[-1.4961e-02, 1.6951e-02, -1.3976e-02],
[ 2.5768e-02, -3.8991e-02, -2.8738e-02]],
[[ 1.6084e-02, -7.3174e-03, -4.5839e-02],
[-3.6029e-02, 1.5303e-02, -5.4380e-02],
[ 2.1913e-02, -4.4792e-02, 4.7973e-02]],
...,
[[ 5.4945e-02, -4.5541e-02, -1.5806e-02],
[-4.5216e-02, -2.0338e-02, 3.3373e-02],
[-1.8431e-02, 4.3953e-02, -4.8196e-02]],
[[-3.6129e-02, 5.7705e-02, -1.2229e-02],
[-5.2801e-02, 7.0930e-03, -2.6721e-02],
[ 3.6720e-02, 7.5540e-04, 5.5401e-02]],
[[-4.0386e-02, 1.6714e-02, 2.9246e-02],
[ 4.7033e-02, -8.7923e-03, 5.1267e-02],
[-3.1211e-02, -2.7036e-02, 4.0346e-02]]]])),
('conv4.bias',
tensor([-0.0368, -0.0535, -0.0568, -0.0364, 0.0167, 0.0255, 0.0031, 0.0562,
-0.0575, 0.0470, -0.0128, 0.0159, -0.0387, 0.0517, -0.0508, -0.0104,
0.0133, -0.0265, 0.0290, 0.0147, -0.0405, 0.0074, 0.0434, -0.0436,
-0.0295, 0.0417, 0.0324, 0.0316, 0.0143, -0.0312, 0.0463, 0.0441])),
('fc1.weight',
tensor([[-0.0206, -0.0161, -0.0053, ..., -0.0257, -0.0314, -0.0105],
[-0.0229, 0.0030, 0.0239, ..., 0.0199, -0.0279, 0.0155]])),
('fc1.bias', tensor([-0.5000, -1.2000]))])
%% Cell type:code id: tags:
```
``` python
o1.state_dict()
```
%% Output
{'state': {},
'param_groups': [{'lr': 0.6,
'momentum': 0,
'dampening': 0,
'weight_decay': 0,
'nesterov': False,
'params': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]}]}
%% Cell type:code id: tags:
```
``` python
with torch.no_grad():
o1.param_groups[0]["params"][0].copy_(torch.zeros(1,))
```
%% Cell type:code id: tags:
```
``` python
o1.param_groups
```
%% Cell type:code id: tags:
```
``` python
m1.state_dict()
```
%% Cell type:code id: tags:
```
``` python
import torch
loss = getattr(torch.nn.functional, 'nll_loss')
```
%% Cell type:code id: tags:
```
``` python
loss
```
%% Cell type:code id: tags:
```
``` python
%matplotlib inline
from decentralizepy.node.Node import Node
from decentralizepy.graphs.SmallWorld import SmallWorld
from decentralizepy.graphs.Graph import Graph
from decentralizepy.mappings.Linear import Linear
from torch import multiprocessing as mp
import torch
import logging
from localconfig import LocalConfig
def read_ini(file_path):
config = LocalConfig(file_path)
for section in config:
print("Section: ", section)
for key, value in config.items(section):
print((key, value))
print(dict(config.items('DATASET')))
return config
config = read_ini("config.ini")
my_config = dict()
for section in config:
my_config[section] = dict(config.items(section))
#f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])
g = Graph()
g.read_graph_from_file("36_nodes.edges", "edges")
l = Linear(1, 36)
#Node(0, 0, l, g, my_config, 20, "results", logging.DEBUG)
mp.spawn(fn = Node, nprocs = g.n_procs, args=[0,l,g,my_config,20,"results",logging.INFO])
# mp.spawn(fn = Node, args = [l, g, config, 10, "results", logging.DEBUG], nprocs=2)
```
%% Output
Section: GRAPH
('package', 'decentralizepy.graphs.SmallWorld')
('graph_class', 'SmallWorld')
Section: DATASET
('dataset_package', 'decentralizepy.datasets.Femnist')
('dataset_class', 'Femnist')
('model_class', 'CNN')
('n_procs', 36)
('train_dir', 'leaf/data/femnist/per_user_data/train')
('test_dir', 'leaf/data/femnist/data/test')
('sizes', '')
Section: OPTIMIZER_PARAMS
('optimizer_package', 'torch.optim')
('optimizer_class', 'Adam')
('lr', 0.01)
Section: TRAIN_PARAMS
('training_package', 'decentralizepy.training.Training')
('training_class', 'Training')
('epochs_per_round', 1)
('batch_size', 1024)
('shuffle', True)
('loss_package', 'torch.nn')
('loss_class', 'CrossEntropyLoss')
Section: COMMUNICATION
('comm_package', 'decentralizepy.communication.TCP')
('comm_class', 'TCP')
('addresses_filepath', 'ip_addr.json')
Section: SHARING
('sharing_package', 'decentralizepy.sharing.Sharing')
('sharing_class', 'Sharing')
{'dataset_package': 'decentralizepy.datasets.Femnist', 'dataset_class': 'Femnist', 'model_class': 'CNN', 'n_procs': 36, 'train_dir': 'leaf/data/femnist/per_user_data/train', 'test_dir': 'leaf/data/femnist/data/test', 'sizes': ''}
%% Cell type:code id: tags:
```
``` python
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.mappings.Linear import Linear
from testing import f
from torch import multiprocessing as mp
l = Linear(1, 2)
mp.spawn(fn = f, nprocs = 2, args = [0, 2, "ip_addr.json", l])
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.datasets.Celeba import Celeba
f = Celeba(0,0,None)
f.file_per_user('/home/risharma/leaf/data/celeba/data/train','/home/risharma/leaf/data/celeba/per_user_data/train')
```
%% Output
Created File: 3020.json
Created File: 6675.json
Created File: 6725.json
Created File: 2263.json
Created File: 1321.json
Created File: 8721.json
Created File: 8722.json
Created File: 8822.json
Created File: 1448.json
Created File: 6568.json
Created File: 7439.json
Created File: 4905.json
Created File: 4153.json
Created File: 8860.json
Created File: 3941.json
Created File: 4147.json
Created File: 6398.json
Created File: 4030.json
Created File: 1050.json
Created File: 7261.json
Created File: 4602.json
Created File: 8910.json
Created File: 7165.json
Created File: 3508.json
Created File: 5620.json
Created File: 6648.json
Created File: 9790.json
Created File: 8630.json
Created File: 9713.json
Created File: 3751.json
Created File: 3312.json
Created File: 4773.json
Created File: 4582.json
Created File: 9798.json
Created File: 2897.json
Created File: 2859.json
Created File: 562.json
Created File: 6334.json
Created File: 3055.json
Created File: 7932.json
Created File: 1218.json
Created File: 5157.json
Created File: 1151.json
Created File: 6300.json
Created File: 8916.json
Created File: 5768.json
Created File: 9821.json
Created File: 8560.json
Created File: 9984.json
Created File: 2831.json
Created File: 5193.json
Created File: 5457.json
Created File: 4647.json
Created File: 2861.json
Created File: 6135.json
Created File: 6429.json
Created File: 4860.json
Created File: 2837.json
Created File: 7206.json
Created File: 3325.json
Created File: 8962.json
Created File: 4395.json
Created File: 4601.json
Created File: 8826.json
Created File: 1370.json
Created File: 7760.json
Created File: 6152.json
Created File: 3410.json
Created File: 4094.json
Created File: 4458.json
Created File: 6555.json
Created File: 8787.json
Created File: 5326.json
Created File: 5138.json
Created File: 5173.json
Created File: 9946.json
Created File: 2547.json
Created File: 5386.json
Created File: 4715.json
Created File: 10083.json
Created File: 3027.json
Created File: 78.json
Created File: 6128.json
Created File: 5512.json
Created File: 5093.json
Created File: 2334.json
Created File: 6422.json
Created File: 6346.json
Created File: 4312.json
Created File: 1346.json
Created File: 5131.json
Created File: 2374.json
Created File: 5340.json
Created File: 9965.json
Created File: 5310.json
Created File: 7245.json
Created File: 3278.json
Created File: 6415.json
Created File: 3614.json
Created File: 801.json
Created File: 5495.json
Created File: 318.json
Created File: 6705.json
Created File: 3398.json
Created File: 8641.json
Created File: 7233.json
Created File: 9341.json
Created File: 1994.json
Created File: 6137.json
Created File: 6191.json
Created File: 5007.json
Created File: 10025.json
Created File: 2055.json
Created File: 4410.json
Created File: 8148.json
Created File: 5008.json
Created File: 5042.json
Created File: 4262.json
Created File: 3034.json
Created File: 2193.json
Created File: 4076.json
Created File: 1181.json
Created File: 5128.json
Created File: 7768.json
Created File: 844.json
Created File: 8677.json
Created File: 8386.json
Created File: 10175.json
Created File: 7186.json
Created File: 3864.json
Created File: 10032.json
Created File: 1903.json
Created File: 3037.json
Created File: 3397.json
Created File: 4863.json
Created File: 4539.json
Created File: 9010.json
Created File: 4472.json
Created File: 5688.json
Created File: 1625.json
Created File: 6165.json
Created File: 7680.json
Created File: 8469.json
Created File: 6936.json
Created File: 8341.json
Created File: 1034.json
Created File: 6267.json
Created File: 4435.json
Created File: 8831.json
Created File: 22.json
Created File: 2960.json
Created File: 1059.json
Created File: 3040.json
Created File: 7192.json
Created File: 5563.json
Created File: 8788.json
Created File: 3329.json
Created File: 6411.json
Created File: 5151.json
Created File: 5321.json
Created File: 4328.json
Created File: 7516.json
Created File: 2899.json
Created File: 8586.json
Created File: 8933.json
Created File: 1435.json
Created File: 8803.json
Created File: 3436.json
Created File: 7222.json
Created File: 1221.json
Created File: 4691.json
Created File: 2332.json
Created File: 2125.json
Created File: 1755.json
Created File: 8850.json
Created File: 6232.json
Created File: 6127.json
Created File: 3203.json
Created File: 5092.json
Created File: 4110.json
Created File: 2524.json
Created File: 8672.json
Created File: 7519.json
Created File: 2309.json
Created File: 3059.json
Created File: 10073.json
Created File: 7237.json
Created File: 8706.json
Created File: 5471.json
Created File: 5252.json
Created File: 7335.json
Created File: 7450.json
Created File: 1188.json
Created File: 5794.json
Created File: 2758.json
Created File: 6046.json
Created File: 1927.json
Created File: 2014.json
Created File: 2880.json
Created File: 3930.json
Created File: 10070.json
Created File: 6316.json
Created File: 7611.json
Created File: 2445.json
Created File: 7087.json
Created File: 3080.json
Created File: 5418.json
Created File: 1175.json
Created File: 7596.json
Created File: 2791.json
Created File: 3029.json
Created File: 5147.json
Created File: 8482.json
Created File: 1524.json
Created File: 8875.json
Created File: 9401.json
Created File: 1033.json
Created File: 2083.json
Created File: 4991.json
Created File: 2914.json
Created File: 3248.json
Created File: 4625.json
Created File: 6864.json
Created File: 1272.json
Created File: 10156.json
Created File: 8547.json
Created File: 659.json
Created File: 1031.json
Created File: 4447.json
Created File: 4911.json
Created File: 9179.json
Created File: 661.json
Created File: 8627.json
Created File: 8072.json
Created File: 1373.json
Created File: 2863.json
Created File: 1468.json
Created File: 9056.json
Created File: 8498.json
Created File: 9020.json
Created File: 8355.json
Created File: 7126.json
Created File: 2433.json
Created File: 7228.json
Created File: 6802.json
Created File: 6708.json
Created File: 3014.json
Created File: 4138.json
Created File: 9246.json
Created File: 7399.json
Created File: 6320.json
Created File: 5350.json
Created File: 9758.json
Created File: 6355.json
Created File: 8471.json
Created File: 6036.json
Created File: 2937.json
Created File: 7001.json
Created File: 7367.json
Created File: 492.json
Created File: 4242.json
Created File: 4157.json
Created File: 5715.json
Created File: 7488.json
Created File: 6343.json
Created File: 6536.json
Created File: 9986.json
Created File: 5246.json
Created File: 3449.json
Created File: 9843.json
Created File: 2533.json
Created File: 6100.json
Created File: 7613.json
Created File: 4310.json
Created File: 3304.json
Created File: 1985.json
Created File: 9987.json
Created File: 47.json
Created File: 2983.json
Created File: 9722.json
Created File: 7315.json
Created File: 3782.json
Created File: 9918.json
Created File: 1569.json
Created File: 5035.json
Created File: 5101.json
Created File: 5567.json
Created File: 6181.json
Created File: 8966.json
Created File: 3377.json
Created File: 1580.json
Created File: 10134.json
Created File: 1003.json
Created File: 7286.json
Created File: 6567.json
Created File: 1726.json
Created File: 992.json
Created File: 1809.json
Created File: 8693.json
Created File: 4561.json
Created File: 6385.json
Created File: 4168.json
Created File: 4149.json
Created File: 2070.json
Created File: 4165.json
Created File: 9701.json
Created File: 4162.json
Created File: 3620.json
Created File: 7764.json
Created File: 9014.json
Created File: 7178.json
Created File: 6043.json
Created File: 2236.json
Created File: 1544.json
Created File: 1744.json
Created File: 5613.json
Created File: 8485.json
Created File: 8578.json
Created File: 4516.json
Created File: 6995.json
Created File: 4126.json
Created File: 8774.json
Created File: 8164.json
Created File: 8638.json
Created File: 2599.json
Created File: 5158.json
Created File: 3670.json
Created File: 2221.json
Created File: 8868.json
Created File: 4190.json
Created File: 2054.json
Created File: 4070.json
Created File: 6823.json
Created File: 5549.json
Created File: 334.json
Created File: 5569.json
Created File: 5086.json
Created File: 1574.json
Created File: 2243.json
Created File: 7358.json
Created File: 4068.json
Created File: 2429.json
Created File: 6134.json
Created File: 4114.json
Created File: 6614.json
Created File: 6325.json
Created File: 3808.json
Created File: 2078.json
Created File: 7424.json
Created File: 2523.json
Created File: 8685.json
Created File: 5223.json
Created File: 409.json
Created File: 3185.json
Created File: 5581.json
Created File: 2972.json
Created File: 2072.json
Created File: 4172.json
Created File: 9123.json
Created File: 6546.json
Created File: 3912.json
Created File: 8951.json
Created File: 2819.json
Created File: 510.json
Created File: 6583.json
Created File: 2314.json
Created File: 8747.json
Created File: 7569.json
Created File: 3024.json
Created File: 5198.json
Created File: 9669.json
Created File: 7156.json
Created File: 5126.json
Created File: 6314.json
Created File: 8945.json
Created File: 4946.json
Created File: 8098.json
Created File: 8528.json
Created File: 7571.json
Created File: 1314.json
Created File: 8942.json
Created File: 9719.json
Created File: 2240.json
Created File: 9011.json
Created File: 7548.json
Created File: 2155.json
Created File: 9158.json
Created File: 1912.json
Created File: 481.json
Created File: 4255.json
Created File: 8704.json
Created File: 9944.json
Created File: 4825.json
Created File: 2957.json
Created File: 3768.json
Created File: 4884.json
Created File: 248.json
Created File: 1999.json
Created File: 8643.json
Created File: 3140.json
Created File: 2136.json
Created File: 7409.json
Created File: 1499.json
Created File: 9610.json
Created File: 5148.json
Created File: 7845.json
Created File: 2026.json
Created File: 1588.json
Created File: 4571.json
Created File: 4290.json
Created File: 2101.json
Created File: 5507.json
Created File: 9850.json
Created File: 147.json
Created File: 8208.json
Created File: 7271.json
Created File: 6809.json
Created File: 6483.json
Created File: 10003.json
Created File: 5124.json
Created File: 5427.json
Created File: 69.json
Created File: 3928.json
Created File: 1601.json
Created File: 4808.json
Created File: 8667.json
Created File: 4473.json
Created File: 3198.json
Created File: 2612.json
Created File: 6677.json
Created File: 5250.json
Created File: 6301.json
Created File: 5702.json
Created File: 8501.json
Created File: 5452.json
Created File: 4828.json
Created File: 9116.json
Created File: 4028.json
Created File: 3845.json
Created File: 2790.json
Created File: 1689.json
Created File: 1986.json
Created File: 8563.json
Created File: 467.json
Created File: 1302.json
Created File: 4438.json
Created File: 1784.json
Created File: 7755.json
Created File: 5178.json
Created File: 4252.json
Created File: 10080.json
Created File: 10085.json
Created File: 3875.json
Created File: 4944.json
Created File: 9152.json
Created File: 5202.json
Created File: 6185.json
Created File: 1885.json
Created File: 8521.json
Created File: 8500.json
Created File: 8379.json
Created File: 6688.json
Created File: 8275.json
Created File: 981.json
Created File: 8702.json
Created File: 6997.json
Created File: 7607.json
Created File: 9166.json
Created File: 2020.json
Created File: 1660.json
Created File: 9807.json
Created File: 6633.json
Created File: 1651.json
Created File: 5551.json
Created File: 2792.json
Created File: 6365.json
Created File: 4446.json
Created File: 3501.json
Created File: 1537.json
Created File: 2507.json
Created File: 6200.json
Created File: 6412.json
Created File: 988.json
Created File: 1713.json
Created File: 496.json
Created File: 9864.json
Created File: 6081.json
Created File: 8825.json
Created File: 6565.json
Created File: 4955.json
Created File: 8709.json
Created File: 4865.json
Created File: 8810.json
Created File: 7449.json
Created File: 199.json
Created File: 7475.json
Created File: 2969.json
Created File: 4288.json
Created File: 8786.json
Created File: 5273.json
Created File: 8195.json
Created File: 6166.json
Created File: 9810.json
Created File: 2095.json
Created File: 4141.json
Created File: 1224.json
Created File: 2124.json
Created File: 3051.json
Created File: 7095.json
Created File: 8847.json
Created File: 7923.json
Created File: 1614.json
Created File: 5953.json
Created File: 7867.json
Created File: 9128.json
Created File: 8943.json
Created File: 7673.json
Created File: 8807.json
Created File: 5133.json
Created File: 8420.json
Created File: 9031.json
Created File: 3882.json
Created File: 3681.json
Created File: 9874.json
Created File: 3714.json
Created File: 4537.json
Created File: 3407.json
Created File: 3404.json
Created File: 6024.json
Created File: 4874.json
Created File: 6920.json
Created File: 918.json
Created File: 3383.json
Created File: 4987.json
Created File: 1341.json
Created File: 3270.json
Created File: 5028.json
Created File: 8727.json
Created File: 4819.json
Created File: 9698.json
Created File: 1916.json
Created File: 4923.json
Created File: 2030.json
Created File: 4805.json
Created File: 2820.json
Created File: 3084.json
Created File: 6291.json
Created File: 94.json
Created File: 2160.json
Created File: 1589.json
Created File: 4543.json
Created File: 1664.json
Created File: 5382.json
Created File: 4421.json
Created File: 2602.json
Created File: 5370.json
Created File: 397.json
Created File: 6703.json
Created File: 4119.json
Created File: 1037.json
Created File: 5799.json
Created File: 4801.json
Created File: 4244.json
Created File: 1731.json
Created File: 5638.json
Created File: 6628.json
Created File: 4014.json
Created File: 8385.json
Created File: 8558.json
Created File: 3696.json
Created File: 2195.json
Created File: 6637.json
Created File: 3432.json
Created File: 9818.json
Created File: 9794.json
Created File: 1335.json
Created File: 8440.json
Created File: 6988.json
Created File: 3496.json
Created File: 5096.json
Created File: 7524.json
Created File: 8365.json
Created File: 4316.json
Created File: 4031.json
Created File: 8928.json
Created File: 7309.json
Created File: 2120.json
Created File: 1686.json
Created File: 565.json
Created File: 7282.json
Created File: 977.json
Created File: 3113.json
Created File: 2884.json
Created File: 3098.json
Created File: 449.json
Created File: 4816.json
Created File: 6680.json
Created File: 5132.json
Created File: 7074.json
Created File: 991.json
Created File: 114.json
Created File: 6596.json
Created File: 4671.json
Created File: 3546.json
Created File: 4747.json
Created File: 6190.json
Created File: 9808.json
Created File: 3661.json
Created File: 447.json
Created File: 3338.json
Created File: 3129.json
Created File: 4061.json
Created File: 5073.json
Created File: 973.json
Created File: 2261.json
Created File: 1140.json
Created File: 9682.json
Created File: 6059.json
Created File: 6502.json
Created File: 4577.json
Created File: 7533.json
Created File: 2201.json
Created File: 7083.json
Created File: 7408.json
Created File: 1018.json
Created File: 5609.json
Created File: 5341.json
Created File: 9213.json
Created File: 2084.json
Created File: 8419.json
Created File: 1090.json
Created File: 1061.json
Created File: 4419.json
Created File: 9768.json
Created File: 5293.json
Created File: 5391.json
Created File: 9878.json
Created File: 305.json
Created File: 10159.json
Created File: 8861.json
Created File: 2562.json
Created File: 21.json
Created File: 6683.json
Created File: 2063.json
Created File: 8121.json
Created File: 1264.json
Created File: 7317.json
Created File: 8609.json
Created File: 4315.json
Created File: 614.json
Created File: 1051.json
Created File: 3386.json
Created File: 8514.json
Created File: 4814.json
Created File: 6696.json
Created File: 6262.json
Created File: 2516.json
Created File: 7567.json
Created File: 1014.json
Created File: 6702.json
Created File: 3134.json
Created File: 5473.json
Created File: 4308.json
Created File: 1394.json
Created File: 735.json
Created File: 8936.json
Created File: 10062.json
Created File: 1209.json
Created File: 9935.json
Created File: 9155.json
Created File: 2926.json
Created File: 2311.json
Created File: 2509.json
Created File: 1714.json
Created File: 10167.json
Created File: 4974.json
Created File: 4027.json
Created File: 4177.json
Created File: 3118.json
Created File: 6994.json
Created File: 8313.json
Created File: 1711.json
Created File: 6318.json
Created File: 6811.json
Created File: 958.json
Created File: 1984.json
Created File: 2472.json
Created File: 2883.json
Created File: 2573.json
Created File: 8644.json
Created File: 4723.json
Created File: 8944.json
Created File: 5672.json
Created File: 894.json
Created File: 5238.json
Created File: 5897.json
Created File: 4055.json
Created File: 5928.json
Created File: 8011.json
Created File: 6182.json
Created File: 4719.json
Created File: 1011.json
Created File: 8771.json
Created File: 1154.json
Created File: 8494.json
Created File: 4125.json
Created File: 1854.json
Created File: 4453.json
Created File: 8495.json
Created File: 2373.json
Created File: 5491.json
Created File: 6877.json
Created File: 4487.json
Created File: 6986.json
Created File: 10016.json
Created File: 4744.json
Created File: 3619.json
Created File: 7346.json
Created File: 1581.json
Created File: 4436.json
Created File: 8736.json
Created File: 3725.json
Created File: 8954.json
Created File: 3697.json
Created File: 1700.json
Created File: 2249.json
Created File: 3915.json
Created File: 9927.json
Created File: 7392.json
Created File: 1603.json
Created File: 10174.json
Created File: 1054.json
Created File: 6219.json
Created File: 4853.json
Created File: 1604.json
Created File: 1990.json
Created File: 5185.json
Created File: 4462.json
Created File: 9006.json
Created File: 3896.json
Created File: 1899.json
Created File: 7362.json
Created File: 3699.json
Created File: 4731.json
Created File: 7460.json
Created File: 861.json
Created File: 9176.json
Created File: 6404.json
Created File: 6207.json
Created File: 1496.json
Created File: 6335.json
Created File: 3962.json
Created File: 6296.json
Created File: 8529.json
Created File: 6311.json
Created File: 8963.json
Created File: 6202.json
Created File: 5578.json
Created File: 4959.json
Created File: 6025.json
Created File: 9983.json
Created File: 1944.json
Created File: 4286.json
Created File: 1194.json
Created File: 6662.json
Created File: 403.json
Created File: 8288.json
Created File: 1454.json
Created File: 4580.json
Created File: 8882.json
Created File: 9091.json
Created File: 4183.json
Created File: 1488.json
Created File: 1882.json
Created File: 4558.json
Created File: 2443.json
Created File: 7725.json
Created File: 4336.json
Created File: 7143.json
Created File: 5262.json
Created File: 8446.json
Created File: 8032.json
Created File: 5570.json
Created File: 1204.json
Created File: 1410.json
Created File: 1582.json
Created File: 2079.json
Created File: 1393.json
Created File: 5298.json
Created File: 10077.json
Created File: 301.json
Created File: 5732.json
Created File: 6082.json
Created File: 534.json
Created File: 6141.json
Created File: 8457.json
Created File: 6806.json
Created File: 9288.json
Created File: 122.json
Created File: 9168.json
Created File: 2550.json
Created File: 9362.json
Created File: 1395.json
Created File: 4392.json
Created File: 8723.json
Created File: 4721.json
Created File: 5729.json
Created File: 10086.json
Created File: 2111.json
Created File: 8799.json
Created File: 1032.json
Created File: 8834.json
Created File: 10127.json
Created File: 3641.json
Created File: 4334.json
Created File: 5747.json
Created File: 8412.json
Created File: 8798.json
Created File: 6741.json
Created File: 1793.json
Created File: 6308.json
Created File: 1113.json
Created File: 9047.json
Created File: 1591.json
Created File: 6417.json
Created File: 5113.json
Created File: 4914.json
Created File: 1471.json
Created File: 8448.json
Created File: 5210.json
Created File: 7158.json
Created File: 6474.json
Created File: 2411.json
Created File: 9742.json
Created File: 6387.json
Created File: 353.json
Created File: 2318.json
Created File: 6679.json
Created File: 2156.json
Created File: 5199.json
Created File: 9711.json
Created File: 8715.json
Created File: 10102.json
Created File: 5472.json
Created File: 6177.json
Created File: 6452.json
Created File: 6921.json
Created File: 6454.json
Created File: 1590.json
Created File: 9533.json
Created File: 8496.json
Created File: 1725.json
Created File: 6547.json
Created File: 9929.json
Created File: 5329.json
Created File: 10028.json
Created File: 8286.json
Created File: 1587.json
Created File: 171.json
Created File: 3623.json
Created File: 1303.json
Created File: 2153.json
Created File: 5494.json
Created File: 3857.json
Created File: 6645.json
Created File: 8876.json
Created File: 1908.json
Created File: 3305.json
Created File: 4151.json
Created File: 4521.json
Created File: 404.json
Created File: 4609.json
Created File: 349.json
Created File: 8931.json
Created File: 8659.json
Created File: 5416.json
Created File: 1631.json
Created File: 5780.json
Created File: 9041.json
Created File: 4549.json
Created File: 1279.json
Created File: 2194.json
Created File: 9137.json
Created File: 3188.json
Created File: 6351.json
Created File: 9024.json
Created File: 2087.json
Created File: 6714.json
Created File: 4724.json
Created File: 7265.json
Created File: 10000.json
Created File: 3043.json
Created File: 1962.json
Created File: 9812.json
Created File: 9028.json
Created File: 4610.json
Created File: 2117.json
Created File: 9950.json
Created File: 8470.json
Created File: 8511.json
Created File: 9923.json
Created File: 5986.json
Created File: 7145.json
Created File: 3504.json
Created File: 5139.json
Created File: 8045.json
Created File: 4423.json
Created File: 4323.json
Created File: 986.json
Created File: 4706.json
Created File: 5450.json
Created File: 1258.json
Created File: 2182.json
Created File: 951.json
Created File: 6371.json
Created File: 3167.json
Created File: 4988.json
Created File: 6424.json
Created File: 8856.json
Created File: 4276.json
Created File: 5375.json
Created File: 8480.json
Created File: 4975.json
Created File: 23.json
Created File: 4978.json
Created File: 7796.json
Created File: 1289.json
Created File: 4077.json
Created File: 5748.json
Created File: 4225.json
Created File: 9198.json
Created File: 6857.json
Created File: 9687.json
Created File: 1245.json
Created File: 9086.json
Created File: 4465.json
Created File: 9206.json
Created File: 7209.json
Created File: 9276.json
Created File: 8746.json
Created File: 8597.json
Created File: 7307.json
Created File: 8028.json
Created File: 7070.json
Created File: 2288.json
Created File: 4908.json
Created File: 5451.json
Created File: 4652.json
Created File: 4017.json
Created File: 9685.json
Created File: 9087.json
Created File: 6284.json
Created File: 4518.json
Created File: 7707.json
Created File: 6789.json
Created File: 7573.json
Created File: 8742.json
Created File: 4527.json
Created File: 4693.json
Created File: 7193.json
Created File: 871.json
Created File: 3862.json
Created File: 2996.json
Created File: 6340.json
Created File: 7541.json
Created File: 4953.json
Created File: 1420.json
Created File: 5426.json
Created File: 6359.json
Created File: 5431.json
Created File: 8960.json
Created File: 4221.json
Created File: 31.json
Created File: 5040.json
Created File: 9570.json
Created File: 5144.json
Created File: 7275.json
Created File: 1371.json
Created File: 1035.json
Created File: 9731.json
Created File: 1840.json
Created File: 3231.json
Created File: 8518.json
Created File: 6691.json
Created File: 6012.json
Created File: 8642.json
Created File: 7641.json
Created File: 1934.json
Created File: 8887.json
Created File: 1043.json
Created File: 5161.json
Created File: 4238.json
Created File: 4136.json
Created File: 5190.json
Created File: 4562.json
Created File: 9729.json
Created File: 5488.json
Created File: 3469.json
Created File: 9456.json
Created File: 8972.json
Created File: 433.json
Created File: 6194.json
Created File: 1535.json
Created File: 1761.json
Created File: 291.json
Created File: 1815.json
Created File: 6458.json
Created File: 3847.json
Created File: 9962.json
Created File: 1886.json
Created File: 10021.json
Created File: 10163.json
Created File: 6241.json
Created File: 5018.json
Created File: 7124.json
Created File: 5153.json
Created File: 1648.json
Created File: 5529.json
Created File: 1089.json
Created File: 2122.json
Created File: 2924.json
Created File: 6288.json
Created File: 6286.json
Created File: 6159.json
Created File: 2987.json
Created File: 9039.json
Created File: 7138.json
Created File: 5960.json
Created File: 1639.json
Created File: 8971.json
Created File: 5313.json
Created File: 2006.json
Created File: 5489.json
Created File: 7246.json
Created File: 7203.json
Created File: 9192.json
Created File: 8460.json
Created File: 9073.json
Created File: 4306.json
Created File: 8733.json
Created File: 5582.json
Created File: 7407.json
Created File: 3294.json
Created File: 2579.json
Created File: 6058.json
Created File: 5061.json
Created File: 7552.json
Created File: 4821.json
Created File: 3400.json
Created File: 4573.json
Created File: 7881.json
Created File: 1998.json
Created File: 9256.json
Created File: 3207.json
Created File: 3895.json
Created File: 10093.json
Created File: 8427.json
Created File: 1164.json
Created File: 3960.json
Created File: 1228.json
Created File: 4717.json
Created File: 8679.json
Created File: 7429.json
Created File: 3438.json
Created File: 5041.json
Created File: 5140.json
Created File: 2238.json
Created File: 1628.json
Created File: 6876.json
Created File: 8562.json
Created File: 2134.json
Created File: 5239.json
Created File: 8663.json
Created File: 85.json
Created File: 8726.json
Created File: 6280.json
Created File: 5176.json
Created File: 6704.json
Created File: 2451.json
Created File: 8853.json
Created File: 1455.json
Created File: 5224.json
Created File: 7180.json
Created File: 4998.json
Created File: 1519.json
Created File: 9090.json
Created File: 8424.json
Created File: 6746.json
Created File: 6172.json
Created File: 5221.json
Created File: 2973.json
Created File: 4080.json
Created File: 6947.json
Created File: 7348.json
Created File: 2168.json
Created File: 2910.json
Created File: 1634.json
Created File: 2588.json
Created File: 5191.json
Created File: 5226.json
Created File: 6005.json
Created File: 997.json
Created File: 9646.json
Created File: 9979.json
Created File: 1318.json
Created File: 4263.json
Created File: 8987.json
Created File: 5187.json
Created File: 235.json
Created File: 4560.json
Created File: 395.json
Created File: 6420.json
Created File: 3149.json
Created File: 5182.json
Created File: 1560.json
Created File: 5784.json
Created File: 9769.json
Created File: 5342.json
Created File: 4666.json
Created File: 8380.json
Created File: 1977.json
Created File: 4143.json
Created File: 10050.json
Created File: 6322.json
Created File: 142.json
Created File: 9098.json
Created File: 5469.json
Created File: 6369.json
Created File: 3181.json
Created File: 6803.json
Created File: 6188.json
Created File: 6914.json
Created File: 6718.json
Created File: 3035.json
Created File: 4593.json
Created File: 5485.json
Created File: 545.json
Created File: 6999.json
Created File: 9194.json
Created File: 8656.json
Created File: 4824.json
Created File: 5883.json
Created File: 1210.json
Created File: 4849.json
Created File: 8393.json
Created File: 82.json
Created File: 4891.json
Created File: 5242.json
Created File: 1884.json
Created File: 3332.json
Created File: 1027.json
Created File: 7119.json
Created File: 8818.json
Created File: 7294.json
Created File: 7120.json
Created File: 1287.json
Created File: 9023.json
Created File: 8989.json
Created File: 10014.json
Created File: 4496.json
Created File: 1508.json
Created File: 975.json
Created File: 3600.json
Created File: 3727.json
Created File: 7651.json
Created File: 9187.json
Created File: 8904.json
Created File: 4986.json
Created File: 1531.json
Created File: 1578.json
Created File: 220.json
Created File: 6290.json
Created File: 1359.json
Created File: 2349.json
Created File: 4794.json
Created File: 6136.json
Created File: 665.json
Created File: 1489.json
Created File: 5543.json
Created File: 3168.json
Created File: 8785.json
Created File: 5145.json
Created File: 6443.json
Created File: 4832.json
Created File: 6236.json
Created File: 4996.json
Created File: 1297.json
Created File: 5750.json
Created File: 5640.json
Created File: 5146.json
Created File: 10033.json
Created File: 1749.json
Created File: 10096.json
Created File: 2882.json
Created File: 5247.json
Created File: 1092.json
Created File: 2870.json
Created File: 4135.json
Created File: 1246.json
Created File: 4916.json
Created File: 4374.json
Created File: 5001.json
Created File: 5896.json
Created File: 3184.json
Created File: 4146.json
Created File: 7510.json
Created File: 4554.json
Created File: 4920.json
Created File: 6958.json
Created File: 7238.json
Created File: 394.json
Created File: 1914.json
Created File: 2065.json
Created File: 6574.json
Created File: 6952.json
Created File: 1361.json
Created File: 8692.json
Created File: 9119.json
Created File: 1422.json
Created File: 4150.json
Created File: 10055.json
Created File: 2296.json
Created File: 6642.json
Created File: 2529.json
Created File: 3854.json
Created File: 4798.json
Created File: 1470.json
Created File: 9739.json
Created File: 4054.json
Created File: 8376.json
Created File: 5240.json
Created File: 4861.json
Created File: 8968.json
Created File: 4346.json
Created File: 4782.json
Created File: 2174.json
Created File: 8712.json
Created File: 66.json
Created File: 4960.json
Created File: 3524.json
Created File: 2073.json
Created File: 9184.json
Created File: 2570.json
Created File: 7423.json
Created File: 1384.json
Created File: 6180.json
Created File: 8499.json
Created File: 365.json
Created File: 516.json
Created File: 6576.json
Created File: 5510.json
Created File: 4197.json
Created File: 8572.json
Created File: 6110.json
Created File: 5243.json
Created File: 4720.json
Created File: 3846.json
Created File: 7187.json
Created File: 6844.json
Created File: 2613.json
Created File: 3090.json
Created File: 6494.json
Created File: 5459.json
Created File: 9302.json
Created File: 6088.json
Created File: 3521.json
Created File: 6624.json
Created File: 7174.json
Created File: 56.json
Created File: 4643.json
Created File: 2061.json
Created File: 4699.json
Created File: 10157.json
Created File: 1281.json
Created File: 5901.json
Created File: 9210.json
Created File: 1377.json
Created File: 6195.json
Created File: 4722.json
Created File: 6163.json
Created File: 9848.json
Created File: 371.json
Created File: 6090.json
Created File: 8878.json
Created File: 5354.json
Created File: 9841.json
Created File: 5539.json
Created File: 1991.json
Created File: 10008.json
Created File: 4309.json
Created File: 4261.json
Created File: 3272.json
Created File: 10173.json
Created File: 2295.json
Created File: 5204.json
Created File: 9065.json
Created File: 6112.json
Created File: 5165.json
Created File: 2867.json
Created File: 1646.json
Created File: 5024.json
Created File: 4314.json
Created File: 3706.json
Created File: 1811.json
Created File: 1187.json
Created File: 468.json
Created File: 8504.json
Created File: 3929.json
Created File: 4174.json
Created File: 7086.json
Created File: 6562.json
Created File: 8779.json
Created File: 8979.json
Created File: 2898.json
Created File: 8023.json
Created File: 9275.json
Created File: 9054.json
Created File: 3465.json
Created File: 4493.json
Created File: 6080.json
Created File: 1538.json
Created File: 5905.json
Created File: 4697.json
Created File: 8965.json
Created File: 10160.json
Created File: 4943.json
Created File: 2464.json
Created File: 8278.json
Created File: 10143.json
Created File: 5537.json
Created File: 7081.json
Created File: 2537.json
Created File: 1812.json
Created File: 4112.json
Created File: 9945.json
Created File: 9079.json
Created File: 3135.json
Created File: 1895.json
Created File: 567.json
Created File: 8739.json
Created File: 4650.json
Created File: 2467.json
Created File: 5264.json
Created File: 1951.json
Created File: 8546.json
Created File: 6230.json
Created File: 1443.json
Created File: 1559.json
Created File: 4265.json
Created File: 6282.json
Created File: 2353.json
Created File: 4311.json
Created File: 494.json
Created File: 7901.json
Created File: 6225.json
Created File: 4051.json
Created File: 1440.json
Created File: 5566.json
Created File: 3678.json
Created File: 10036.json
Created File: 192.json
Created File: 4448.json
Created File: 7071.json
Created File: 5020.json
Created File: 1306.json
Created File: 1336.json
Created File: 3811.json
Created File: 3017.json
Created File: 1467.json
Created File: 1261.json
Created File: 4612.json
Created File: 25.json
Created File: 9921.json
Created File: 5704.json
Created File: 4664.json
Created File: 9144.json
Created File: 7371.json
Created File: 3166.json
Created File: 7256.json
Created File: 539.json
Created File: 4099.json
Created File: 4450.json
Created File: 5046.json
Created File: 1633.json
Created File: 3880.json
Created File: 3701.json
Created File: 95.json
Created File: 3788.json
Created File: 4532.json
Created File: 4164.json
Created File: 2520.json
Created File: 6928.json
Created File: 6955.json
Created File: 6339.json
Created File: 4962.json
Created File: 5922.json
Created File: 6894.json
Created File: 2303.json
Created File: 6199.json
Created File: 978.json
Created File: 10001.json
Created File: 8648.json
Created File: 5365.json
Created File: 6751.json
Created File: 1513.json
Created File: 6447.json
Created File: 1946.json
Created File: 3562.json
Created File: 5705.json
Created File: 2231.json
Created File: 10010.json
Created File: 1929.json
Created File: 5189.json
Created File: 3625.json
Created File: 4166.json
Created File: 6792.json
Created File: 4357.json
Created File: 4999.json
Created File: 6327.json
Created File: 4355.json
Created File: 2425.json
Created File: 4213.json
Created File: 4087.json
Created File: 1675.json
Created File: 4372.json
Created File: 7786.json
Created File: 6989.json
Created File: 8682.json
Created File: 6556.json
Created File: 6155.json
Created File: 5287.json
Created File: 8939.json
Created File: 1066.json
Created File: 5428.json
Created File: 8374.json
Created File: 676.json
Created File: 8820.json
Created File: 6438.json
Created File: 1498.json
Created File: 5509.json
Created File: 9910.json
Created File: 1088.json
Created File: 2805.json
Created File: 2847.json
Created File: 5482.json
Created File: 2219.json
Created File: 8614.json
Created File: 7506.json
Created File: 5423.json
Created File: 8372.json
Created File: 3989.json
Created File: 1568.json
Created File: 8757.json
Created File: 9789.json
Created File: 343.json
Created File: 4137.json
Created File: 7336.json
Created File: 3826.json
Created File: 5504.json
Created File: 1807.json
Created File: 209.json
Created File: 3835.json
Created File: 1348.json
Created File: 4992.json
Created File: 7354.json
Created File: 2325.json
Created File: 3195.json
Created File: 3111.json
Created File: 5088.json
Created File: 6087.json
Created File: 3157.json
Created File: 3280.json
Created File: 1875.json
Created File: 1170.json
Created File: 9705.json
Created File: 7195.json
Created File: 5207.json
Created File: 4209.json
Created File: 2167.json
Created File: 7133.json
Created File: 8390.json
Created File: 1291.json
Created File: 4236.json
Created File: 1062.json
Created File: 6657.json
Created File: 1337.json
Created File: 10095.json
Created File: 2762.json
Created File: 3182.json
Created File: 4686.json
Created File: 8636.json
Created File: 9245.json
Created File: 7406.json
Created File: 5966.json
Created File: 3760.json
Created File: 8632.json
Created File: 3186.json
Created File: 1135.json
Created File: 4431.json
Created File: 4547.json
Created File: 6066.json
Created File: 6461.json
Created File: 5361.json
Created File: 2279.json
Created File: 6440.json
Created File: 4303.json
Created File: 5433.json
Created File: 2270.json
Created File: 7142.json
Created File: 6886.json
Created File: 9697.json
Created File: 4258.json
Created File: 8406.json
Created File: 8918.json
Created File: 9102.json
Created File: 1565.json
Created File: 5447.json
Created File: 5122.json
Created File: 3980.json
Created File: 2779.json
Created File: 10111.json
Created File: 2610.json
Created File: 1382.json
Created File: 2574.json
Created File: 1993.json
Created File: 6299.json
Created File: 6603.json
Created File: 9135.json
Created File: 4712.json
Created File: 2844.json
Created File: 4245.json
Created File: 8930.json
Created File: 5320.json
Created File: 9886.json
Created File: 2239.json
Created File: 4048.json
Created File: 8842.json
Created File: 5449.json
Created File: 1723.json
Created File: 3500.json
Created File: 4936.json
Created File: 8664.json
Created File: 10132.json
Created File: 2813.json
Created File: 1270.json
Created File: 1472.json
Created File: 5611.json
Created File: 8761.json
Created File: 8833.json
Created File: 8151.json
Created File: 4945.json
Created File: 632.json
Created File: 1862.json
Created File: 7361.json
Created File: 234.json
Created File: 7066.json
Created File: 4219.json
Created File: 417.json
Created File: 719.json
Created File: 1607.json
Created File: 2335.json
Created File: 5902.json
Created File: 10149.json
Created File: 3119.json
Created File: 3477.json
Created File: 1517.json
Created File: 9277.json
Created File: 5840.json
Created File: 2938.json
Created File: 9151.json
Created File: 3010.json
Created File: 9138.json
Created File: 4256.json
Created File: 5911.json
Created File: 2909.json
Created File: 3994.json
Created File: 6344.json
Created File: 4648.json
Created File: 1180.json
Created File: 5410.json
Created File: 1153.json
Created File: 6532.json
Created File: 10047.json
Created File: 5815.json
Created File: 3795.json
Created File: 8392.json
Created File: 2908.json
Created File: 5532.json
Created File: 1293.json
Created File: 8896.json
Created File: 2114.json
Created File: 6439.json
Created File: 8443.json
Created File: 1324.json
Created File: 8984.json
Created File: 2086.json
Created File: 7418.json
Created File: 5444.json
Created File: 5843.json
Created File: 6727.json
Created File: 10144.json
Created File: 4154.json
Created File: 6364.json
Created File: 8748.json
Created File: 6956.json
Created File: 8428.json
Created File: 4566.json
Created File: 6922.json
Created File: 10042.json
Created File: 1301.json
Created File: 5094.json
Created File: 5982.json
Created File: 9718.json
Created File: 2046.json
Created File: 8520.json
Created File: 9297.json
Created File: 4740.json
Created File: 38.json
Created File: 6031.json
Created File: 4836.json
Created File: 3328.json
Created File: 5770.json
Created File: 3317.json
Created File: 5100.json
Created File: 6858.json
Created File: 10108.json
Created File: 7384.json
Created File: 9680.json
Created File: 17.json
Created File: 9917.json
Created File: 5025.json
Created File: 3315.json
Created File: 295.json
Created File: 3901.json
Created File: 1722.json
Created File: 9211.json
Created File: 160.json
Created File: 7114.json
Created File: 8646.json
Created File: 9733.json
Created File: 4934.json
Created File: 5159.json
Created File: 1330.json
Created File: 3478.json
Created File: 7243.json
Created File: 9755.json
Created File: 4251.json
Created File: 650.json
Created File: 2307.json
Created File: 10087.json
Created File: 6256.json
Created File: 2301.json
Created File: 5545.json
Created File: 6658.json
Created File: 5827.json
Created File: 4326.json
Created File: 3886.json
Created File: 7443.json
Created File: 6283.json
Created File: 4478.json
Created File: 9761.json
Created File: 1810.json
Created File: 1362.json
Created File: 2984.json
Created File: 7002.json
Created File: 6119.json
Created File: 5805.json
Created File: 2348.json
Created File: 2419.json
Created File: 303.json
Created File: 6198.json
Created File: 6305.json
Created File: 6421.json
Created File: 7075.json
Created File: 3212.json
Created File: 8974.json
Created File: 2331.json
Created File: 6650.json
Created File: 7866.json
Created File: 1996.json
Created File: 2484.json
Created File: 5656.json
Created File: 5125.json
Created File: 6407.json
Created File: 5941.json
Created File: 8506.json
Created File: 6353.json
Created File: 6042.json
Created File: 6403.json
Created File: 7467.json
Created File: 8515.json
Created File: 5291.json
Created File: 7349.json
Created File: 9026.json
Created File: 1698.json
Created File: 2921.json
Created File: 6337.json
Created File: 4834.json
Created File: 1852.json
Created File: 6548.json
Created File: 2158.json
Created File: 5346.json
Created File: 2955.json
Created File: 2761.json
Created File: 2572.json
Created File: 1817.json
Created File: 6142.json
Created File: 3708.json
Created File: 5135.json
Created File: 873.json
Created File: 4981.json
Created File: 2393.json
Created File: 4414.json
Created File: 3755.json
Created File: 5200.json
Created File: 6846.json
Created File: 6002.json
Created File: 4669.json
Created File: 7422.json
Created File: 8728.json
Created File: 5281.json
Created File: 8570.json
Created File: 5272.json
Created File: 5311.json
Created File: 6292.json
Created File: 9689.json
Created File: 4474.json
Created File: 3330.json
Created File: 6472.json
Created File: 1936.json
Created File: 6394.json
Created File: 6162.json
Created File: 6231.json
Created File: 8957.json
Created File: 7240.json
Created File: 8777.json
Created File: 1490.json
Created File: 3858.json
Created File: 9714.json
Created File: 1715.json
Created File: 3413.json
Created File: 1873.json
Created File: 8940.json
Created File: 7094.json
Created File: 6676.json
Created File: 4139.json
Created File: 6719.json
Created File: 5585.json
Created File: 4692.json
Created File: 6400.json
Created File: 7004.json
Created File: 7127.json
Created File: 2002.json
Created File: 7921.json
Created File: 2242.json
Created File: 4941.json
Created File: 6726.json
Created File: 1481.json
Created File: 1201.json
Created File: 4352.json
Created File: 6973.json
Created File: 2180.json
Created File: 8776.json
Created File: 8468.json
Created File: 2643.json
Created File: 8564.json
Created File: 8549.json
Created File: 9926.json
Created File: 8710.json
Created File: 4373.json
Created File: 8845.json
Created File: 8880.json
Created File: 1008.json
Created File: 4910.json
Created File: 3664.json
Created File: 742.json
Created File: 6524.json
Created File: 2316.json
Created File: 649.json
Created File: 5552.json
Created File: 1351.json
Created File: 9678.json
Created File: 9730.json
Created File: 6824.json
Created File: 10071.json
Created File: 6009.json
Created File: 3984.json
Created File: 3859.json
Created File: 1585.json
Created File: 5369.json
Created File: 7302.json
Created File: 3542.json
Created File: 2017.json
Created File: 1550.json
Created File: 7910.json
Created File: 6079.json
Created File: 5474.json
Created File: 4795.json
Created File: 8932.json
Created File: 5084.json
Created File: 8967.json
Created File: 8430.json
Created File: 8513.json
Created File: 9013.json
Created File: 4620.json
Created File: 2580.json
Created File: 5180.json
Created File: 4598.json
Created File: 1967.json
Created File: 3356.json
Created File: 7716.json
Created File: 7331.json
Created File: 9027.json
Created File: 1844.json
Created File: 9315.json
Created File: 4887.json
Created File: 2398.json
Created File: 5253.json
Created File: 5407.json
Created File: 10130.json
Created File: 10060.json
Created File: 3957.json
Created File: 2067.json
Created File: 8985.json
Created File: 6298.json
Created File: 8551.json
Created File: 5325.json
Created File: 7244.json
Created File: 9830.json
Created File: 4702.json
Created File: 7151.json
Created File: 4931.json
Created File: 1611.json
Created File: 1474.json
Created File: 9712.json
Created File: 7067.json
Created File: 9542.json
Created File: 993.json
Created File: 5519.json
Created File: 1805.json
Created File: 5283.json
Created File: 2534.json
Created File: 273.json
Created File: 2337.json
Created File: 2793.json
Created File: 7826.json
Created File: 4696.json
Created File: 4600.json
Created File: 7396.json
Created File: 1657.json
Created File: 7850.json
Created File: 6164.json
Created File: 5774.json
Created File: 9700.json
Created File: 6814.json
Created File: 9150.json
Created File: 9057.json
Created File: 5029.json
Created File: 5225.json
Created File: 4976.json
Created File: 5303.json
Created File: 6631.json
Created File: 4637.json
Created File: 3052.json
Created File: 1149.json
Created File: 2471.json
Created File: 6214.json
Created File: 7072.json
Created File: 4407.json
Created File: 4670.json
Created File: 1874.json
Created File: 8789.json
Created File: 3214.json
Created File: 3485.json
Created File: 14.json
Created File: 3056.json
Created File: 6151.json
Created File: 3762.json
Created File: 6161.json
Created File: 1103.json
Created File: 7824.json
Created File: 8657.json
Created File: 8618.json
Created File: 5318.json
Created File: 6183.json
Created File: 6368.json
Created File: 4321.json
Created File: 1877.json
Created File: 2253.json
Created File: 4049.json
Created File: 1842.json
Created File: 9924.json
Created File: 987.json
Created File: 1044.json
Created File: 3263.json
Created File: 5338.json
Created File: 6672.json
Created File: 2463.json
Created File: 6370.json
Created File: 5694.json
Created File: 8602.json
Created File: 4811.json
Created File: 8241.json
Created File: 32.json
Created File: 8817.json
Created File: 6147.json
Created File: 1447.json
Created File: 9672.json
Created File: 5301.json
Created File: 1128.json
Created File: 5829.json
Created File: 3117.json
Created File: 1888.json
Created File: 2406.json
Created File: 4388.json
Created File: 3648.json
Created File: 7226.json
Created File: 8796.json
Created File: 2034.json
Created File: 10015.json
Created File: 1642.json
Created File: 4551.json
Created File: 1691.json
Created File: 4741.json
Created File: 8245.json
Created File: 6492.json
Created File: 10089.json
Created File: 3853.json
Created File: 1120.json
Created File: 6187.json
Created File: 4555.json
Created File: 2496.json
Created File: 5625.json
Created File: 5414.json
Created File: 5536.json
Created File: 2110.json
Created File: 7153.json
Created File: 3148.json
Created File: 3067.json
Created File: 5833.json
Created File: 2150.json
Created File: 2981.json
Created File: 5254.json
Created File: 1250.json
Created File: 6077.json
Created File: 5866.json
Created File: 6427.json
Created File: 3745.json
Created File: 6482.json
Created File: 3145.json
Created File: 5222.json
Created File: 985.json
Created File: 6361.json
Created File: 9040.json
Created File: 9017.json
Created File: 4951.json
Created File: 6367.json
Created File: 5464.json
Created File: 8454.json
Created File: 4924.json
Created File: 1025.json
Created File: 6330.json
Created File: 5078.json
Created File: 289.json
Created File: 6665.json
Created File: 3986.json
Created File: 1356.json
Created File: 7883.json
Created File: 3981.json
Created File: 6145.json
Created File: 3719.json
Created File: 3255.json
Created File: 953.json
Created File: 5775.json
Created File: 4879.json
Created File: 6023.json
Created File: 2225.json
Created File: 2019.json
Created File: 3127.json
Created File: 8717.json
Created File: 5353.json
Created File: 2518.json
Created File: 5027.json
Created File: 1947.json
Created File: 1549.json
Created File: 8832.json
Created File: 8909.json
Created File: 4628.json
Created File: 2250.json
Created File: 9797.json
Created File: 3575.json
Created File: 495.json
Created File: 5344.json
Created File: 2245.json
Created File: 6893.json
Created File: 5530.json
Created File: 7743.json
Created File: 8626.json
Created File: 4302.json
Created File: 2202.json
Created File: 5888.json
Created File: 8654.json
Created File: 9183.json
Created File: 5787.json
Created File: 2965.json
Created File: 6324.json
Created File: 5951.json
Created File: 4161.json
Created File: 9601.json
Created File: 5676.json
Created File: 5050.json
Created File: 6585.json
Created File: 8707.json
Created File: 6860.json
Created File: 679.json
Created File: 1288.json
Created File: 9162.json
Created File: 2611.json
Created File: 4636.json
Created File: 3817.json
Created File: 1629.json
Created File: 7369.json
Created File: 2952.json
Created File: 4204.json
Created File: 8749.json
Created File: 5786.json
Created File: 8127.json
Created File: 5169.json
Created File: 2044.json
Created File: 8519.json
Created File: 2546.json
Created File: 4424.json
Created File: 9693.json
Created File: 1392.json
Created File: 8566.json
Created File: 5946.json
Created File: 6397.json
Created File: 7131.json
Created File: 4498.json
Created File: 6745.json
Created File: 6065.json
Created File: 5508.json
Created File: 3475.json
Created File: 5987.json
Created File: 1368.json
Created File: 8708.json
Created File: 9290.json
Created File: 5120.json
Created File: 5274.json
Created File: 8912.json
Created File: 4852.json
Created File: 1757.json
Created File: 7260.json
Created File: 6878.json
Created File: 2173.json
Created File: 4701.json
Created File: 7353.json
Created File: 7378.json
Created File: 5696.json
Created File: 6044.json
Created File: 1641.json
Created File: 3490.json
Created File: 4292.json
Created File: 2235.json
Created File: 1696.json
Created File: 6399.json
Created File: 4257.json
Created File: 5394.json
Created File: 1956.json
Created File: 2461.json
Created File: 5679.json
Created File: 7159.json
Created File: 5765.json
Created File: 1389.json
Created File: 7318.json
Created File: 2246.json
Created File: 1743.json
Created File: 8827.json
Created File: 5663.json
Created File: 1415.json
Created File: 4220.json
Created File: 2097.json
Created File: 8089.json
Created File: 8431.json
Created File: 2141.json
Created File: 7719.json
Created File: 8947.json
Created File: 3336.json
Created File: 3047.json
Created File: 1624.json
Created File: 2066.json
Created File: 499.json
Created File: 1439.json
Created File: 348.json
Created File: 10006.json
Created File: 7513.json
Created File: 1469.json
Created File: 6266.json
Created File: 6851.json
Created File: 6076.json
Created File: 5263.json
Created File: 3421.json
Created File: 4872.json
Created File: 1770.json
Created File: 5479.json
Created File: 6418.json
Created File: 1213.json
Created File: 553.json
Created File: 9737.json
Created File: 9114.json
Created File: 6074.json
Created File: 8812.json
Created File: 3705.json
Created File: 1493.json
Created File: 8415.json
Created File: 6184.json
Created File: 1673.json
Created File: 4514.json
Created File: 6235.json
Created File: 8701.json
Created File: 4418.json
Created File: 3357.json
Created File: 8905.json
Created File: 5228.json
Created File: 7427.json
Created File: 7386.json
Created File: 5876.json
Created File: 7003.json
Created File: 9149.json
Created File: 9240.json
Created File: 2772.json
Created File: 2879.json
Created File: 2522.json
Created File: 5235.json
Created File: 4583.json
Created File: 4338.json
Created File: 1997.json
Created File: 7360.json
Created File: 800.json
Created File: 5234.json
Created File: 8854.json
Created File: 6682.json
Created File: 6563.json
Created File: 9084.json
Created File: 15.json
Created File: 4950.json
Created File: 5276.json
Created File: 3238.json
Created File: 6430.json
Created File: 3556.json
Created File: 2808.json
Created File: 3211.json
Created File: 1423.json
Created File: 4035.json
Created File: 7397.json
Created File: 2860.json
Created File: 5760.json
Created File: 5181.json
Created File: 7756.json
Created File: 3316.json
Created File: 8926.json
Created File: 1223.json
Created File: 8112.json
Created File: 6736.json
Created File: 8964.json
Created File: 3364.json
Created File: 8898.json
Created File: 1839.json
Created File: 3993.json
Created File: 1960.json
Created File: 8625.json
Created File: 3130.json
Created File: 4913.json
Created File: 1672.json
Created File: 7200.json
Created File: 7438.json
Created File: 284.json
Created File: 4437.json
Created File: 4439.json
Created File: 8956.json
Created File: 3911.json
Created File: 8611.json
Created File: 8793.json
Created File: 1446.json
Created File: 10135.json
Created File: 4268.json
Created File: 10023.json
Created File: 8676.json
Created File: 7154.json
Created File: 7338.json
Created File: 971.json
Created File: 3205.json
Created File: 9840.json
Created File: 5635.json
Created File: 3028.json
Created File: 8189.json
Created File: 1566.json
Created File: 9165.json
Created File: 9728.json
Created File: 3033.json
Created File: 1136.json
Created File: 1411.json
Created File: 8914.json
Created File: 6695.json
Created File: 2887.json
Created File: 7924.json
Created File: 2416.json
Created File: 3431.json
Created File: 1708.json
Created File: 5071.json
Created File: 3569.json
Created File: 5142.json
Created File: 2190.json
Created File: 8891.json
Created File: 1198.json
Created File: 6543.json
Created File: 3587.json
Created File: 5260.json
Created File: 3491.json
Created File: 4483.json
Created File: 3081.json
Created File: 8867.json
Created File: 6363.json
Created File: 65.json
Created File: 2119.json
Created File: 4899.json
Created File: 1275.json
Created File: 4293.json
Created File: 2130.json
Created File: 1305.json
Created File: 3376.json
Created File: 4433.json
Created File: 5921.json
Created File: 6992.json
Created File: 3480.json
Created File: 5170.json
Created File: 10117.json
Created File: 3959.json
Created File: 8660.json
Created File: 1326.json
Created File: 2823.json
Created File: 924.json
Created File: 6263.json
Created File: 3643.json
Created File: 6143.json
Created File: 8395.json
Created File: 8705.json
Created File: 8879.json
Created File: 7013.json
Created File: 1649.json
Created File: 2843.json
Created File: 6681.json
Created File: 4672.json
Created File: 5328.json
Created File: 3522.json
Created File: 8858.json
Created File: 4973.json
Created File: 6204.json
Created File: 5544.json
Created File: 1383.json
Created File: 9178.json
Created File: 3022.json
Created File: 4210.json
Created File: 5703.json
Created File: 4393.json
Created File: 5879.json
Created File: 4163.json
Created File: 3440.json
Created File: 5938.json
Created File: 4687.json
Created File: 1938.json
Created File: 4876.json
Created File: 9001.json
Created File: 9915.json
Created File: 6558.json
Created File: 2603.json
Created File: 3175.json
Created File: 10029.json
Created File: 7320.json
Created File: 3324.json
Created File: 10044.json
Created File: 5195.json
Created File: 1322.json
Created File: 2846.json
Created File: 10090.json
Created File: 7395.json
Created File: 3279.json
Created File: 4460.json
Created File: 4972.json
Created File: 5932.json
Created File: 3730.json
Created File: 7499.json
Created File: 1137.json
Created File: 6449.json
Created File: 2810.json
Created File: 7201.json
Created File: 600.json
Created File: 2483.json
Created File: 2930.json
Created File: 6354.json
Created File: 2011.json
Created File: 5706.json
Created File: 8783.json
Created File: 3389.json
Created File: 4304.json
Created File: 6108.json
Created File: 10082.json
Created File: 10176.json
Created File: 5446.json
Created File: 4710.json
Created File: 363.json
Created File: 9902.json
Created File: 2892.json
Created File: 7350.json
Created File: 6987.json
Created File: 59.json
Created File: 5256.json
Created File: 1023.json
Created File: 5334.json
Created File: 5218.json
Created File: 2031.json
Created File: 7632.json
Created File: 8621.json
Created File: 313.json
Created File: 4441.json
Created File: 9967.json
Created File: 8247.json
Created File: 7073.json
Created File: 9177.json
Created File: 5903.json
Created File: 487.json
Created File: 2015.json
Created File: 1542.json
Created File: 5072.json
Created File: 9106.json
Created File: 4889.json
Created File: 1231.json
Created File: 2259.json
Created File: 6575.json
Created File: 5127.json
Created File: 4102.json
Created File: 1339.json
Created File: 10059.json
Created File: 4511.json
Created File: 6386.json
Created File: 4842.json
Created File: 4247.json
Created File: 5520.json
Created File: 5047.json
Created File: 5415.json
Created File: 9720.json
Created File: 8770.json
Created File: 4181.json
Created File: 8946.json
Created File: 2945.json
Created File: 9787.json
Created File: 9156.json
Created File: 1871.json
Created File: 7426.json
Created File: 3227.json
%% Cell type:code id: tags:
```
``` python
from decentralizepy.datasets.Femnist import Femnist
f = Femnist()
f.file_per_user('../leaf/data/femnist/data/train','../leaf/data/femnist/per_user_data/train')
```
%% Cell type:code id: tags:
```
``` python
a = set()
a.update([2, 3, 4, 5])
```
%% Cell type:code id: tags:
```
``` python
a
```
%% Output
{2, 3, 4, 5}
%% Cell type:code id: tags:
```
``` python
print(*a)
```
%% Output
2 3 4 5
%% Cell type:code id: tags:
```
``` python
from decentralizepy.graphs.FullyConnected import FullyConnected
s = FullyConnected(96)
s.write_graph_to_file('96_node_fullyConnected.edges')
```
%% Cell type:code id: tags:
```
``` python
from decentralizepy.graphs.SmallWorld import SmallWorld
s1 = SmallWorld(96, 2, 1.0)
s1.write_graph_to_file('96_nodes_random1.edges')
s2 = SmallWorld(96, 2, 1.0)
s2.write_graph_to_file('96_nodes_random2.edges')
```
%% Cell type:code id: tags:
```
``` python
import sys
sys.argv
```
%% Output
['/home/risharma/miniconda3/envs/decpy/lib/python3.9/site-packages/ipykernel_launcher.py',
'--ip=127.0.0.1',
'--stdin=9008',
'--control=9006',
'--hb=9005',
'--Session.signature_scheme="hmac-sha256"',
'--Session.key=b"eac5d2f8-c460-45f1-a268-1e4b46a6efd6"',
'--shell=9007',
'--transport="tcp"',
'--iopub=9009',
'--f=/tmp/tmp-21212479paJaUBJBN84.json']
%% Cell type:code id: tags:
```
``` python
import torch
from decentralizepy.datasets.Femnist import CNN
m1 = CNN()
o1 = torch.optim.SGD(m1.parameters(), 0.6)
#print("m1_parameters: ", {k:v.data for k, v in zip(m1.state_dict(), m1.parameters())})
#print("m1_state_dict: ", m1.state_dict())
#print("o1_state_dict: ", o1.state_dict())
tensors_to_cat = [v.data for _, v in m1.state_dict().items()]
print("tensors to cat: ", tensors_to_cat)
```
%% Output
tensors to cat: [tensor([[[[-0.0754, -0.1052, 0.0336, 0.0976, -0.1882],
[ 0.1723, 0.0860, -0.0557, -0.1182, 0.1845],
[-0.1112, 0.0554, -0.0190, 0.1264, -0.0851],
[-0.0788, 0.1281, -0.0477, 0.1910, 0.0089],
[-0.1205, -0.0230, -0.0574, 0.1633, -0.1101]]],
[[[ 0.0271, -0.1426, -0.0381, 0.1128, 0.0396],
[-0.0023, 0.1021, -0.0483, -0.0167, 0.0090],
[-0.0645, -0.1863, -0.1494, -0.0473, 0.1572],
[-0.0027, -0.0923, 0.0186, 0.0787, 0.1404],
[ 0.0714, 0.0434, 0.1853, 0.1262, 0.0502]]],
[[[-0.1708, 0.0628, -0.1877, -0.0879, -0.1190],
[-0.0111, 0.0684, -0.1659, -0.1297, 0.0684],
[ 0.1629, -0.1196, -0.0096, 0.1854, -0.0721],
[ 0.1122, -0.0325, 0.1887, -0.0685, 0.1441],
[ 0.1734, 0.0720, -0.0941, -0.0536, 0.0355]]],
[[[-0.1465, -0.0899, 0.1122, 0.0323, -0.1287],
[-0.1237, 0.0852, -0.1984, -0.0995, 0.0734],
[ 0.0521, -0.1664, -0.0875, 0.0643, -0.1166],
[ 0.1862, 0.1754, -0.0282, 0.1074, -0.1337],
[ 0.1779, 0.1883, 0.1376, -0.1518, -0.1619]]],
[[[ 0.0158, -0.1902, 0.0607, 0.1062, 0.0373],
[-0.1737, -0.1112, 0.0597, -0.1030, -0.0897],
[ 0.0892, 0.1166, -0.0127, 0.1859, -0.1859],
[ 0.1935, -0.1200, 0.0472, -0.0144, -0.0219],
[-0.0596, -0.0255, -0.1733, 0.1746, 0.1136]]],
[[[-0.1769, 0.0885, 0.0706, 0.1742, 0.1178],
[-0.1831, 0.0380, -0.0769, -0.1068, 0.1958],
[-0.0166, -0.0059, 0.0663, 0.0610, 0.0321],
[-0.1393, -0.1884, -0.0678, -0.1400, 0.1042],
[ 0.0653, -0.1733, 0.0744, 0.1278, 0.0482]]],
[[[ 0.0935, -0.0115, -0.1991, 0.1451, -0.1708],
[ 0.1979, -0.0709, 0.1771, 0.0486, 0.1292],
[-0.1624, 0.0661, -0.0004, -0.0079, 0.1195],
[ 0.0655, 0.1849, 0.1179, 0.1520, -0.0723],
[ 0.1374, 0.1547, 0.1871, 0.1924, 0.0539]]],
[[[-0.1602, 0.1380, 0.1321, -0.0722, -0.0291],
[ 0.0820, -0.0490, -0.1345, 0.1719, 0.1306],
[-0.0183, 0.0615, 0.1189, -0.0707, -0.0696],
[ 0.0017, -0.1185, -0.0766, 0.1017, -0.0518],
[-0.1316, -0.1323, -0.0135, 0.0103, -0.1509]]],
[[[-0.1189, -0.1300, -0.1908, -0.0222, 0.0017],
[-0.0644, 0.0996, 0.0454, 0.0198, -0.1608],
[-0.0614, -0.0721, -0.0223, -0.0746, -0.0081],
[-0.0133, 0.1326, 0.1275, -0.0954, 0.0281],
[ 0.1004, -0.0435, -0.1027, 0.1867, 0.0034]]],
[[[-0.0022, -0.0830, -0.0399, 0.1039, -0.1230],
[-0.0067, 0.1032, 0.0895, 0.0679, 0.0133],
[ 0.0148, -0.1275, 0.0391, -0.0173, 0.1621],
[ 0.0532, -0.1873, -0.1499, 0.0605, -0.0016],
[ 0.0386, -0.0360, 0.0255, -0.0580, -0.1954]]],
[[[-0.0186, -0.1922, -0.1356, 0.1696, 0.0111],
[ 0.1121, -0.0990, 0.0789, -0.0053, 0.0729],
[-0.0565, -0.0405, 0.0559, 0.1065, -0.1723],
[ 0.0569, -0.0432, -0.0484, 0.1533, 0.0421],
[-0.0369, 0.1050, 0.1623, -0.1503, 0.0680]]],
[[[-0.0028, -0.0167, 0.0598, 0.0854, -0.1996],
[ 0.0548, -0.0586, 0.1987, -0.0415, -0.0477],
[ 0.1172, -0.0924, 0.1351, 0.1319, 0.0983],
[ 0.0756, 0.1345, 0.1360, -0.0779, -0.1169],
[ 0.1524, 0.0208, 0.0672, 0.1320, 0.1958]]],
[[[-0.0794, -0.1947, 0.0410, 0.1480, -0.0168],
[ 0.0836, -0.0750, -0.0903, -0.1946, 0.0676],
[-0.1202, 0.0276, 0.0058, -0.1465, 0.1545],
[-0.1914, 0.0926, -0.0799, -0.0715, -0.0607],
[ 0.1624, 0.1929, -0.1532, -0.0007, -0.1312]]],
[[[ 0.0281, -0.0407, -0.0482, 0.0492, 0.0039],
[-0.0752, -0.0298, 0.0764, -0.0738, -0.0705],
[ 0.1404, -0.1254, 0.1613, -0.0501, -0.1432],
[ 0.0132, -0.0170, -0.0971, -0.0117, 0.1406],
[ 0.0655, -0.1970, 0.1782, 0.0665, 0.1174]]],
[[[ 0.1879, 0.0858, 0.0340, 0.1138, -0.0841],
[ 0.1503, -0.1680, 0.0618, 0.1971, -0.0156],
[-0.1211, -0.0485, 0.0675, -0.0536, 0.0046],
[ 0.1134, 0.1310, -0.0966, -0.1924, -0.0812],
[-0.0800, -0.0553, 0.1818, -0.0217, 0.0107]]],
[[[ 0.1118, 0.1368, 0.0269, -0.0690, 0.0168],
[-0.1371, -0.1983, 0.0016, -0.0094, 0.0502],
[-0.0219, 0.1939, -0.1821, 0.0988, -0.1016],
[-0.0063, 0.0789, 0.1037, -0.0646, -0.1694],
[-0.0464, 0.0624, 0.0681, -0.1855, 0.1219]]],
[[[ 0.0640, 0.1420, -0.0144, -0.0606, 0.0963],
[ 0.1349, 0.0906, 0.0826, -0.0385, -0.1635],
[ 0.0278, 0.1875, 0.1559, 0.1424, 0.0878],
[-0.0593, -0.0847, -0.0458, 0.1402, 0.0160],
[ 0.1887, -0.1220, -0.0316, -0.0892, -0.0401]]],
[[[-0.1103, -0.0884, 0.0775, -0.0338, -0.1245],
[-0.1848, 0.0657, 0.0945, 0.0677, -0.1089],
[ 0.0585, -0.0791, -0.0535, 0.0527, -0.0741],
[ 0.1146, 0.1908, -0.0167, -0.0033, -0.1418],
[-0.1011, 0.0829, -0.1472, -0.1310, -0.0991]]],
[[[ 0.0865, 0.1608, 0.0168, -0.1126, -0.0889],
[-0.1322, 0.1052, 0.1285, 0.0574, 0.1983],
[ 0.0788, 0.0125, -0.0781, -0.0303, -0.1089],
[-0.1860, -0.1878, 0.0608, 0.0402, -0.0390],
[-0.1797, -0.1828, 0.0475, -0.1754, 0.0657]]],
[[[-0.0632, 0.1015, -0.1493, -0.0855, 0.1822],
[-0.1492, 0.0236, -0.1418, -0.1826, -0.1978],
[ 0.0854, -0.1503, 0.1953, -0.0668, -0.1042],
[ 0.1537, -0.0573, 0.0178, 0.1350, 0.1823],
[-0.1131, -0.0838, -0.1445, -0.1476, -0.0288]]],
[[[ 0.1445, 0.1748, 0.0146, -0.0807, -0.1905],
[-0.1474, -0.0938, 0.0965, 0.0233, 0.0757],
[-0.1257, -0.1316, -0.0105, 0.1447, -0.1200],
[ 0.0921, -0.0332, -0.1153, -0.0260, 0.0640],
[-0.1528, 0.1237, 0.1956, 0.0167, 0.0205]]],
[[[-0.0236, -0.1264, -0.0342, -0.1199, -0.1381],
[-0.0895, -0.0116, -0.1108, 0.0994, 0.0744],
[-0.1190, -0.1275, -0.1097, 0.0678, -0.1547],
[-0.1405, 0.0500, -0.1195, 0.0917, -0.0952],
[-0.1799, -0.1516, 0.0956, -0.0402, 0.0426]]],
[[[ 0.0556, 0.1145, 0.0388, 0.0917, -0.0881],
[-0.0933, 0.0580, 0.1433, -0.1127, -0.0142],
[-0.0014, 0.0792, -0.0451, -0.0696, 0.0486],
[ 0.0006, 0.0264, -0.1252, 0.0796, 0.0561],
[-0.0203, -0.0664, -0.0443, 0.1173, -0.0193]]],
[[[-0.0796, -0.1144, -0.0728, 0.0867, -0.1342],
[-0.0150, -0.0840, -0.0134, 0.0634, 0.1743],
[-0.0773, -0.0827, -0.0470, -0.0879, 0.0346],
[ 0.0642, 0.1078, 0.0373, 0.1683, 0.0433],
[-0.0024, 0.1508, -0.1980, 0.0808, -0.1028]]],
[[[-0.1358, -0.1896, -0.1495, 0.1018, 0.0048],
[-0.1030, -0.1340, 0.0090, -0.0781, 0.0415],
[-0.1069, 0.0658, -0.0144, 0.1455, 0.1006],
[-0.0209, 0.0757, -0.1481, -0.0273, -0.0880],
[ 0.0859, -0.1996, -0.1787, -0.1869, -0.0310]]],
[[[-0.1005, 0.1260, -0.1949, 0.1433, 0.0845],
[ 0.0336, 0.1110, -0.0389, 0.0584, -0.0470],
[ 0.0333, 0.0861, -0.0371, 0.1471, -0.0385],
[ 0.1889, 0.1999, 0.1643, 0.0767, -0.1738],
[-0.1423, -0.0945, -0.0881, 0.0847, -0.0170]]],
[[[-0.1417, 0.0120, -0.1466, -0.1825, -0.1018],
[ 0.1989, -0.1077, -0.1263, -0.0141, 0.0296],
[ 0.0414, 0.1355, -0.1630, -0.1348, 0.1382],
[ 0.0589, 0.1278, 0.0218, 0.1217, 0.1369],
[ 0.0659, 0.1200, 0.0031, 0.0240, -0.1200]]],
[[[ 0.1293, -0.1227, -0.0351, -0.1777, 0.0515],
[-0.1087, 0.0541, 0.0684, 0.0929, -0.1989],
[ 0.0596, -0.0794, -0.0461, 0.0260, -0.1812],
[-0.0437, 0.0989, -0.1171, -0.0531, -0.1493],
[-0.1677, -0.1009, 0.0778, -0.1462, 0.0935]]],
[[[-0.0794, 0.1744, -0.0915, -0.1289, -0.0317],
[ 0.1679, 0.1361, 0.0542, 0.1967, 0.1620],
[-0.0898, 0.0305, -0.1617, -0.1643, -0.0253],
[ 0.1339, -0.1151, 0.0954, -0.0359, -0.1790],
[-0.0408, -0.1074, 0.0081, -0.0884, 0.0163]]],
[[[ 0.0022, -0.1097, 0.0842, -0.1927, 0.0277],
[ 0.0127, -0.0666, 0.1235, -0.1696, 0.1377],
[ 0.0741, -0.1627, -0.0953, 0.1622, 0.0921],
[ 0.1111, 0.1422, -0.1884, -0.0130, -0.0533],
[ 0.0838, 0.1769, 0.0109, 0.0086, -0.0795]]],
[[[-0.0502, -0.1545, -0.1304, 0.1680, -0.0439],
[ 0.1891, 0.1681, -0.1068, 0.1589, -0.1011],
[-0.0923, -0.1569, 0.1337, 0.0743, 0.1794],
[-0.1630, 0.0071, -0.0524, -0.1211, -0.0362],
[-0.1111, 0.1168, 0.0010, -0.0352, 0.1756]]],
[[[-0.0282, -0.0647, -0.0309, 0.0586, 0.1442],
[ 0.1881, 0.1471, 0.0371, -0.0573, 0.1329],
[ 0.0760, -0.0789, -0.0935, 0.1464, 0.1953],
[-0.0965, -0.0576, 0.1569, -0.0305, 0.1716],
[ 0.1946, 0.1921, 0.1460, 0.1270, 0.0192]]]]), tensor([ 0.1092, -0.1012, 0.1255, -0.0428, 0.1610, 0.0467, 0.1861, 0.1967,
-0.0119, -0.0437, -0.0364, -0.1432, -0.1487, 0.1987, -0.0183, 0.0156,
0.0776, 0.1051, 0.1560, 0.1141, -0.1168, -0.0773, 0.1036, 0.0045,
0.0811, 0.0564, 0.1103, 0.1955, 0.1101, 0.1653, 0.1274, 0.0707]), tensor([[[[ 3.4215e-02, 3.1851e-02, -3.1990e-02, -2.4633e-02, -1.4061e-02],
[ 1.2874e-02, 1.9820e-02, -2.9326e-02, -3.5093e-02, -1.6611e-02],
[-1.8654e-02, -2.6634e-02, -1.0565e-03, -3.2393e-02, 2.5394e-02],
[ 2.8582e-02, 2.3981e-02, -3.6160e-03, -3.3239e-02, -3.3089e-02],
[-1.4720e-02, -1.4778e-02, -1.3619e-02, -8.4345e-03, 3.2735e-02]],
[[-8.9284e-03, -1.1837e-02, -2.9223e-02, 2.7357e-02, 3.1966e-02],
[-3.2790e-02, -2.8377e-02, 6.0738e-03, -1.0012e-02, 2.0444e-03],
[-2.6949e-02, 4.2564e-03, 2.1914e-02, -1.0865e-02, 6.7992e-03],
[-3.0702e-02, 3.4266e-02, -1.4731e-02, 5.6710e-03, -1.2800e-02],
[-2.6684e-02, -1.1684e-02, 2.0707e-02, 2.5899e-02, -2.3985e-02]],
[[ 1.5901e-02, 2.6151e-02, 3.1089e-02, -1.1213e-02, 3.3065e-02],
[ 3.1703e-03, 2.5620e-02, 9.4295e-03, 2.3775e-02, -1.4995e-02],
[ 2.9061e-02, -2.2070e-02, 1.8273e-02, -1.7573e-02, -8.4471e-03],
[-2.3864e-02, -1.0588e-02, -1.6631e-02, 5.6322e-04, -3.7847e-03],
[-3.4452e-02, -4.9367e-03, -8.4581e-03, 9.4274e-03, 1.8410e-02]],
...,
[[ 1.8886e-02, -1.4010e-02, 3.1874e-02, 3.2005e-02, -2.3095e-02],
[-3.0070e-03, -7.3744e-04, 7.5279e-03, 3.4897e-03, 2.9886e-02],
[-7.0828e-03, -1.5942e-03, -2.5189e-02, 3.1235e-02, 1.0274e-02],
[-6.7937e-03, -6.4489e-03, -1.9903e-03, -2.9061e-03, -2.7987e-02],
[ 1.3823e-02, -2.3655e-02, -1.3454e-02, -1.0748e-02, -2.5365e-02]],
[[-1.7251e-02, 2.2574e-02, -3.1775e-02, 1.1056e-02, 3.0264e-02],
[-2.4015e-03, 3.6797e-04, 3.1375e-02, -2.4610e-02, -1.7509e-02],
[ 1.0531e-02, 1.4460e-02, 2.5519e-02, -2.7404e-02, -3.1486e-02],
[-7.5893e-03, 7.7746e-03, -7.8301e-03, 1.8347e-02, -1.2583e-02],
[-9.9493e-03, 1.7974e-03, -8.8425e-03, -1.0114e-02, -8.3833e-03]],
[[-2.7485e-02, -1.0523e-02, -2.7561e-02, -3.0887e-02, 1.6148e-02],
[-2.1342e-02, -3.1566e-02, -2.8830e-02, 2.3036e-02, 2.0477e-02],
[ 2.5157e-02, 1.3644e-02, 2.7742e-02, 2.7517e-02, 1.9181e-02],
[ 2.8086e-02, -3.4166e-02, -2.4337e-02, 2.3341e-02, -1.8879e-02],
[ 1.1548e-02, 3.8810e-03, 2.1140e-02, -1.9494e-02, 1.9646e-02]]],
[[[ 1.4197e-02, 2.2693e-02, 2.4212e-02, 2.3371e-03, 3.2226e-02],
[-1.2037e-03, 1.7354e-03, 3.5187e-02, 1.1291e-02, -4.9615e-04],
[ 9.9413e-03, -3.5207e-04, 9.8214e-03, 1.9830e-02, 3.4511e-02],
[-5.0710e-03, 2.5797e-02, -2.4624e-02, -2.3307e-02, -1.6377e-02],
[-2.6477e-02, -5.8760e-04, -4.7716e-03, -2.0889e-02, 1.0995e-02]],
[[-2.7908e-02, 3.3644e-02, 5.9671e-03, -1.6862e-02, -3.5059e-02],
[ 1.6676e-02, 1.2755e-02, 1.5726e-02, -3.4257e-03, 2.4924e-02],
[-2.3684e-02, 2.5474e-02, -1.5754e-02, -2.3486e-02, -2.9507e-03],
[ 1.2314e-02, 2.9314e-02, 3.3555e-02, 1.8871e-02, 1.8055e-02],
[-1.4242e-02, 2.5115e-02, -2.2893e-02, -2.3078e-02, 1.8173e-02]],
[[ 5.5846e-03, 2.1291e-03, 5.2495e-03, 1.2620e-03, -2.2444e-02],
[-1.6137e-02, 2.2707e-03, -2.3414e-02, -2.6367e-02, 1.8329e-02],
[-2.9248e-02, -4.8049e-03, -1.7246e-02, -1.4117e-02, -2.3186e-02],
[ 2.5687e-02, 3.1482e-02, -3.5057e-02, -2.6718e-02, -2.1352e-02],
[ 6.3401e-03, 2.7100e-02, -8.5221e-04, -3.0839e-02, 3.2441e-02]],
...,
[[-1.4497e-02, -1.4756e-02, 2.8516e-02, 6.1750e-03, -2.4559e-02],
[-3.5086e-02, -2.1854e-02, -1.5571e-02, -1.5009e-02, 9.1230e-03],
[ 1.7028e-02, 8.5996e-03, 1.8000e-02, 9.9304e-03, 1.4644e-02],
[ 2.5854e-02, -9.1312e-03, -1.5623e-02, 2.3103e-02, 1.4050e-02],
[-2.2471e-02, 1.3548e-02, 1.4819e-02, -3.2382e-02, -3.3049e-02]],
[[ 3.0104e-02, 4.6211e-03, -2.6502e-02, 8.1366e-04, -1.1328e-02],
[ 1.8561e-02, -1.8155e-02, 2.2092e-02, -1.9655e-02, 8.1872e-03],
[ 1.3457e-02, -2.8087e-02, -2.1339e-02, 4.8732e-03, -3.0627e-02],
[ 3.2684e-02, 3.3806e-02, -3.2461e-02, 1.3585e-02, -1.0189e-02],
[ 3.5266e-02, 9.2128e-03, 1.3231e-02, -1.6795e-02, -7.2222e-03]],
[[-1.7422e-02, 2.3825e-02, -3.3179e-02, 2.6211e-02, -9.6366e-03],
[-1.6651e-02, 2.9791e-02, -3.0735e-02, 1.5957e-02, -5.1611e-03],
[-2.5785e-02, 8.2464e-03, -2.6953e-02, -1.0548e-02, -3.5286e-02],
[-6.2504e-03, -1.3809e-02, -3.2342e-02, -5.4005e-03, 1.3821e-02],
[ 1.0036e-02, -2.3806e-02, -2.7316e-03, 8.0372e-03, -1.7473e-02]]],
[[[-1.5131e-03, 2.1498e-02, 2.2542e-02, 7.8614e-04, 8.0929e-03],
[-2.2980e-02, -3.3852e-02, -2.1393e-02, -3.3051e-02, 1.5066e-02],
[ 1.2730e-02, 2.4478e-02, 2.8772e-02, 2.8062e-02, -3.9068e-03],
[-9.3188e-03, -3.2090e-03, 1.1349e-03, -4.4513e-03, -2.2566e-02],
[ 1.5475e-02, -2.4088e-02, 4.7109e-03, -1.9576e-02, 2.5590e-03]],
[[ 3.4533e-02, 1.5805e-02, -8.1290e-03, -2.9473e-02, -2.7830e-02],
[-1.1189e-03, -2.3883e-02, -2.2408e-02, 8.0706e-03, -2.0912e-02],
[-6.0190e-03, 1.8530e-02, 3.1111e-02, 8.7140e-03, 1.4883e-02],
[ 1.0062e-02, -1.3259e-02, 7.0008e-03, -3.3429e-02, -2.5512e-02],
[-3.3497e-02, 1.3196e-02, -2.5707e-02, 3.1772e-02, 3.0991e-03]],
[[-1.4290e-02, 1.0892e-03, 2.0707e-02, 8.9846e-03, 2.7689e-02],
[ 3.4180e-02, -2.9902e-02, -1.7447e-02, -2.6233e-02, -1.4028e-02],
[-1.2295e-02, -2.0546e-02, -9.8043e-03, -2.7490e-02, -3.0622e-02],
[ 4.3674e-04, 2.9490e-02, 3.5398e-03, -4.6837e-03, 2.4393e-02],
[-1.4666e-03, 1.1251e-02, -2.6159e-02, -2.1956e-02, -2.7467e-02]],
...,
[[-2.3644e-02, 3.0802e-02, -1.8002e-02, -3.0707e-02, -2.2172e-02],
[-3.1019e-02, -1.9739e-02, -9.3868e-03, 7.9575e-03, -2.2624e-02],
[ 3.4551e-02, 2.0078e-02, -3.3876e-02, 3.1283e-02, 3.3183e-02],
[-3.3918e-02, -2.3870e-02, -2.3575e-02, -2.9056e-02, -2.8837e-02],
[-2.5986e-02, -3.4845e-02, 7.3302e-03, -1.4608e-02, 1.0884e-02]],
[[-1.4381e-04, -3.4048e-02, -2.3154e-02, -6.6671e-04, -1.3698e-03],
[-2.2083e-02, -7.6509e-03, 1.5926e-02, 3.3870e-02, -8.7966e-03],
[ 1.0156e-02, -1.1124e-02, 3.2173e-02, -1.7401e-02, -1.3916e-02],
[ 2.8669e-02, 7.6822e-03, -4.2535e-04, 2.8009e-02, 2.8256e-02],
[-1.7353e-02, 2.9599e-02, 2.0556e-02, 2.6827e-02, -1.6913e-02]],
[[-1.5121e-02, -2.5019e-02, 9.3863e-04, 1.8527e-02, 2.6718e-02],
[-1.2542e-02, -9.8915e-03, -3.3949e-02, 2.0921e-02, 1.1817e-02],
[-3.3119e-02, 1.9118e-02, 2.8166e-02, 1.4008e-02, 7.0924e-03],
[-8.0425e-04, 4.4199e-03, 4.9465e-03, 6.0860e-03, -9.1247e-03],
[-8.3654e-03, 2.9056e-02, 2.2258e-02, -3.4614e-02, 2.7133e-02]]],
...,
[[[ 2.4745e-02, 2.5447e-02, 7.8521e-03, 2.7326e-02, 4.1176e-03],
[-9.2149e-03, -2.6898e-02, 1.9378e-02, -1.9195e-02, -3.2935e-02],
[-3.1293e-04, -5.0356e-03, -2.7392e-02, -2.9394e-02, 3.3510e-02],
[ 2.8223e-02, 3.3830e-02, 1.9529e-02, 2.8768e-02, -3.0616e-02],
[ 9.7381e-03, 3.0267e-02, -2.2226e-02, -2.2444e-03, -3.5101e-02]],
[[-1.6917e-02, 1.1137e-02, 1.9355e-02, -1.4685e-02, 3.0447e-02],
[-3.4133e-02, -1.8560e-02, 1.5245e-02, 9.2734e-03, 1.3745e-02],
[-2.7426e-02, 1.0003e-02, -2.2804e-02, 1.2485e-02, -1.3537e-02],
[-6.3152e-03, 3.4890e-02, 2.8487e-02, -2.7528e-02, 2.7217e-02],
[ 1.2584e-02, 3.0535e-02, 1.0904e-02, -2.9158e-02, 2.5303e-02]],
[[ 2.6163e-02, 2.8920e-02, 3.4083e-02, 1.2934e-02, -3.4496e-02],
[ 3.2103e-02, 7.7680e-03, 2.3343e-02, 2.5724e-02, -2.5661e-02],
[-1.9408e-02, 1.8801e-02, 2.1603e-02, 3.1151e-02, 1.0190e-03],
[ 2.7702e-02, -2.0514e-02, -1.2836e-02, -2.3691e-02, 2.1812e-03],
[-1.8351e-02, 2.5027e-02, -3.0688e-02, 4.3821e-03, 1.9501e-02]],
...,
[[-2.8270e-02, -2.3117e-02, -1.6419e-02, -3.4316e-02, -2.0557e-02],
[-5.8688e-03, 2.0617e-03, 9.5386e-03, 1.3106e-02, 2.9971e-02],
[ 3.3426e-02, -2.6644e-02, 1.8090e-02, 7.3730e-03, -6.1319e-03],
[-2.1524e-03, 6.3283e-03, 2.8402e-02, 1.5039e-02, 2.4081e-02],
[ 3.0103e-02, -1.2804e-02, -9.0004e-03, 1.6461e-02, -3.5154e-02]],
[[ 4.9777e-03, -8.0093e-03, -3.4668e-02, -8.8671e-03, -1.6355e-02],
[ 2.3159e-02, -3.2046e-02, -2.8806e-02, 3.3175e-02, -1.3727e-02],
[ 2.8510e-02, -1.3080e-02, 3.2749e-02, 5.1974e-03, -1.6185e-02],
[-3.3439e-02, -1.7652e-02, 3.2421e-02, 3.0137e-02, 7.4495e-03],
[-3.7330e-03, -7.3890e-03, -8.5357e-03, 2.2519e-02, -1.9516e-02]],
[[ 6.3844e-03, -2.5492e-02, -1.6749e-02, -1.2231e-02, -1.0527e-02],
[-1.2054e-02, 3.7704e-03, 1.1407e-02, 1.4060e-02, -1.6531e-02],
[-1.1867e-02, 1.8624e-02, -1.1234e-02, -2.0407e-02, -1.2726e-02],
[-1.5739e-02, 1.7892e-02, -1.1010e-02, 1.8143e-02, 1.7330e-03],
[-2.6158e-04, -2.3484e-02, 2.8945e-02, -6.6862e-03, 3.0367e-02]]],
[[[-3.4974e-02, -2.3324e-02, 1.6922e-02, 2.8184e-02, -3.1625e-02],
[-8.9075e-03, -3.1058e-02, -6.1406e-03, 9.4089e-03, -1.1909e-02],
[-7.0569e-03, 2.3088e-02, 7.7149e-03, 2.7768e-02, 2.4761e-02],
[ 2.9902e-02, 7.4964e-03, -2.7370e-02, -1.8732e-02, -3.2662e-02],
[-2.2868e-02, -2.0314e-02, -6.4008e-03, -6.3764e-03, 2.2569e-02]],
[[-7.0775e-03, -4.1908e-03, 2.1538e-02, 1.4809e-02, -2.0912e-02],
[ 2.1704e-02, 6.8497e-03, -3.4349e-02, -9.2698e-03, 4.3580e-03],
[ 1.0022e-02, -2.7300e-03, -9.4724e-03, -7.5713e-04, 1.1330e-02],
[-3.1236e-02, 3.3219e-02, 2.8622e-03, -2.5236e-02, 2.8182e-02],
[-1.1205e-02, 2.5173e-02, 1.3256e-02, -2.3062e-02, 1.1284e-02]],
[[-9.4235e-03, -4.8488e-03, -2.8878e-02, 9.0096e-04, -1.7386e-02],
[ 2.0669e-02, -2.1336e-02, 8.0332e-03, 1.7914e-02, -1.0405e-02],
[-1.1635e-02, 3.3581e-02, -3.2376e-02, 2.9582e-02, 2.3664e-02],
[ 3.1593e-02, 3.4160e-02, -2.6020e-02, 8.8575e-03, 3.0042e-02],
[ 2.8393e-02, -2.4792e-02, 3.2831e-02, -4.0711e-04, -3.4198e-02]],
...,
[[ 1.0640e-02, -3.4773e-03, 1.7848e-02, 2.3339e-02, 6.5579e-03],
[-3.3516e-02, -9.5260e-03, 3.2948e-02, -1.4124e-02, -1.5335e-02],
[-2.6022e-02, -2.4680e-02, -2.5792e-02, -3.4441e-02, 5.2088e-03],
[ 1.1213e-02, -3.2379e-03, 2.8332e-02, 3.4966e-02, 1.9965e-02],
[-6.4067e-05, 1.3670e-02, 3.2867e-03, 8.6834e-03, -1.4342e-02]],
[[ 1.2845e-02, 1.6605e-03, -1.9191e-02, 1.9487e-02, -1.7243e-02],
[ 5.8301e-03, 1.1036e-02, -2.3355e-02, 1.0064e-02, 1.4885e-03],
[ 3.3737e-02, 2.7396e-02, 6.1884e-03, 2.8059e-03, 1.3686e-02],
[-1.2152e-02, -5.8316e-04, -3.1750e-03, 8.5284e-03, 1.3943e-02],
[-1.9356e-02, -1.0950e-04, -3.4086e-02, -7.5568e-04, 1.3694e-03]],
[[ 1.8200e-02, 2.1633e-02, -2.4332e-02, 2.3583e-02, 2.9063e-02],
[-1.3286e-02, 2.9338e-03, 6.6651e-03, 2.9277e-03, 1.3872e-02],
[-2.3295e-02, 2.3481e-02, 6.5986e-03, 2.2366e-02, -3.5175e-02],
[-4.7833e-03, -1.3990e-02, 1.0363e-02, -3.1975e-02, 4.7487e-03],
[ 3.5129e-02, -3.8194e-03, -1.7300e-02, -1.9970e-02, 2.8379e-02]]],
[[[ 1.4019e-02, 4.9214e-03, 1.0111e-02, -3.1889e-02, -2.7347e-02],
[-8.1394e-03, -1.3291e-02, 2.4524e-02, -2.3494e-02, -2.0170e-03],
[-2.7336e-02, -1.9178e-02, -1.8323e-02, -8.8052e-03, 1.3123e-02],
[-3.5069e-02, -2.1669e-02, -1.1426e-02, 3.0186e-02, 1.9609e-02],
[-2.8254e-02, 1.6397e-02, -1.5657e-02, -2.0507e-02, -2.3217e-02]],
[[ 8.5709e-03, -2.3691e-02, 1.7627e-02, 3.5223e-02, 2.6195e-03],
[-3.0899e-02, -3.4378e-02, 6.3869e-03, 3.2724e-03, -3.1556e-02],
[ 2.6143e-02, -2.5520e-02, -1.4755e-02, 3.2573e-02, 2.3502e-02],
[-1.6250e-02, -4.1612e-03, -1.8213e-02, 3.1232e-02, 2.3861e-02],
[-2.1876e-02, -1.5494e-02, 3.0345e-03, 1.4424e-02, 2.9874e-02]],
[[-2.5268e-02, 2.2410e-02, 2.3594e-02, 2.0579e-02, 1.6479e-02],
[ 2.8202e-02, 3.0909e-03, -2.5594e-02, 2.6350e-02, 2.8560e-02],
[-2.0345e-02, 3.2249e-02, 6.1006e-03, 2.1407e-02, -1.6071e-02],
[ 1.8856e-02, 2.9315e-02, -2.6081e-02, -1.4907e-02, -2.5855e-02],
[ 4.3871e-03, -1.1070e-02, -4.8881e-03, 1.8845e-02, -1.4769e-03]],
...,
[[ 1.2379e-02, 7.2388e-03, -1.4122e-02, 1.8673e-02, 1.8472e-02],
[-3.0360e-02, -2.3880e-02, -3.0080e-03, 1.0441e-02, 4.9348e-03],
[-3.0038e-02, 2.0413e-02, 2.8522e-02, -2.0159e-02, -1.8271e-02],
[-1.3495e-02, 9.2360e-03, -2.6240e-02, -2.5488e-02, 2.7568e-02],
[-1.5597e-02, -2.8057e-02, -2.9999e-02, -1.4414e-02, -4.7575e-03]],
[[ 1.8291e-02, -1.6408e-02, -1.8680e-02, -2.2817e-02, -3.1854e-02],
[-2.9158e-02, 3.5350e-02, -6.7355e-04, 7.1955e-03, -2.8063e-02],
[ 1.7034e-02, 7.2418e-04, -1.2859e-02, 2.6168e-02, 1.9541e-02],
[-3.5163e-03, 1.3653e-02, -2.5039e-02, 1.7310e-02, 1.2980e-02],
[-3.1598e-02, 1.0932e-02, -1.6623e-02, 2.7386e-03, -1.7496e-02]],
[[ 3.2967e-03, 9.0435e-03, -6.3722e-03, 6.3557e-06, -1.6582e-04],
[-1.2603e-02, 2.8532e-02, 2.7132e-02, -3.3654e-02, -2.3288e-02],
[-3.3183e-02, -2.8453e-02, -3.4745e-02, 1.1531e-03, 1.3664e-02],
[ 3.2060e-02, 2.5101e-02, 2.1017e-02, 5.9951e-03, 2.6360e-03],
[-2.0098e-02, 9.6565e-03, 1.4130e-02, -2.1930e-02, -1.9950e-03]]]]), tensor([ 0.0258, -0.0098, 0.0090, -0.0266, -0.0003, 0.0192, -0.0012, -0.0049,
0.0099, 0.0069, 0.0019, 0.0319, 0.0208, 0.0209, -0.0220, -0.0238,
0.0177, -0.0307, 0.0308, 0.0066, -0.0072, -0.0034, 0.0152, 0.0293,
0.0169, 0.0327, -0.0059, 0.0043, 0.0057, 0.0174, -0.0194, -0.0216,
-0.0226, -0.0270, -0.0041, 0.0242, -0.0233, 0.0118, 0.0283, -0.0090,
-0.0180, -0.0280, -0.0086, -0.0122, 0.0003, -0.0277, -0.0038, 0.0353,
-0.0082, -0.0109, -0.0251, 0.0068, 0.0323, 0.0098, -0.0314, 0.0030,
0.0113, 0.0020, -0.0341, 0.0107, 0.0091, 0.0199, -0.0185, 0.0127]), tensor([[-0.0081, -0.0059, -0.0143, ..., 0.0117, -0.0148, 0.0108],
[ 0.0151, 0.0105, 0.0061, ..., 0.0053, -0.0170, 0.0006],
[-0.0148, -0.0145, -0.0035, ..., 0.0020, -0.0044, -0.0102],
...,
[-0.0045, 0.0173, -0.0167, ..., -0.0103, 0.0056, -0.0051],
[ 0.0080, 0.0030, -0.0087, ..., 0.0165, -0.0167, 0.0033],
[ 0.0098, 0.0124, 0.0158, ..., -0.0083, -0.0151, 0.0059]]), tensor([ 0.0140, -0.0175, 0.0051, -0.0030, -0.0019, -0.0134, -0.0133, -0.0009,
0.0023, 0.0163, -0.0042, -0.0130, -0.0113, -0.0176, -0.0152, 0.0139,
0.0032, 0.0127, 0.0173, 0.0080, -0.0009, -0.0097, -0.0020, 0.0114,
0.0059, -0.0070, 0.0143, -0.0143, 0.0008, 0.0067, -0.0162, 0.0094,
0.0165, 0.0118, -0.0017, -0.0064, -0.0134, -0.0012, 0.0107, 0.0039,
-0.0173, -0.0093, -0.0028, -0.0070, 0.0075, 0.0142, 0.0102, 0.0140,
-0.0120, 0.0057, 0.0095, 0.0077, -0.0151, -0.0022, 0.0130, 0.0005,
0.0123, 0.0055, 0.0120, -0.0058, -0.0142, -0.0129, 0.0048, -0.0020,
0.0016, 0.0016, 0.0061, -0.0174, -0.0123, -0.0010, 0.0032, -0.0170,
0.0020, 0.0172, 0.0146, -0.0086, 0.0032, 0.0093, 0.0139, 0.0085,
0.0061, -0.0039, 0.0154, -0.0014, -0.0082, -0.0030, 0.0009, 0.0150,
0.0054, 0.0015, 0.0047, 0.0063, 0.0095, 0.0081, 0.0125, -0.0027,
0.0130, -0.0039, -0.0038, 0.0035, 0.0163, -0.0174, 0.0106, 0.0007,
0.0087, 0.0070, -0.0141, -0.0139, -0.0068, -0.0126, 0.0130, -0.0088,
0.0088, -0.0121, -0.0087, 0.0134, 0.0101, -0.0139, -0.0002, -0.0055,
0.0039, -0.0103, -0.0068, 0.0082, 0.0059, -0.0069, -0.0138, -0.0164,
-0.0138, 0.0156, 0.0020, 0.0154, -0.0147, -0.0134, 0.0118, 0.0084,
0.0071, 0.0033, 0.0166, -0.0116, -0.0003, 0.0178, -0.0074, -0.0174,
-0.0094, 0.0002, 0.0010, -0.0066, -0.0090, -0.0143, 0.0048, -0.0169,
-0.0163, -0.0105, -0.0090, -0.0105, -0.0167, -0.0037, 0.0024, 0.0075,
-0.0023, -0.0143, 0.0052, -0.0041, -0.0139, -0.0123, 0.0161, -0.0053,
-0.0059, 0.0012, -0.0077, -0.0133, -0.0012, -0.0127, 0.0005, -0.0130,
-0.0176, -0.0114, 0.0139, -0.0153, 0.0124, 0.0151, -0.0092, -0.0168,
-0.0070, 0.0114, -0.0125, 0.0129, -0.0066, 0.0150, 0.0046, 0.0043,
0.0111, 0.0087, -0.0170, -0.0174, -0.0116, 0.0087, 0.0146, 0.0078,
-0.0055, -0.0119, -0.0041, -0.0017, -0.0093, 0.0170, 0.0093, -0.0026,
-0.0117, 0.0008, -0.0084, -0.0079, -0.0071, -0.0148, -0.0170, 0.0022,
-0.0049, 0.0149, 0.0078, -0.0042, 0.0118, -0.0100, 0.0136, 0.0112,
-0.0024, 0.0149, -0.0113, -0.0128, 0.0030, 0.0005, -0.0037, 0.0042,
0.0034, -0.0113, 0.0158, 0.0071, 0.0074, -0.0136, -0.0155, 0.0028,
0.0136, 0.0177, -0.0068, 0.0063, 0.0021, -0.0085, 0.0044, 0.0164,
-0.0075, 0.0109, 0.0103, -0.0105, -0.0167, 0.0114, 0.0093, -0.0098,
0.0115, -0.0011, 0.0097, 0.0087, 0.0075, -0.0120, 0.0011, -0.0067,
0.0058, -0.0050, -0.0077, 0.0039, -0.0112, -0.0015, -0.0152, 0.0074,
-0.0085, 0.0085, 0.0060, -0.0059, -0.0125, -0.0065, -0.0160, 0.0137,
0.0145, -0.0097, -0.0032, 0.0059, -0.0137, -0.0119, 0.0141, -0.0012,
0.0105, -0.0023, 0.0089, -0.0011, 0.0122, 0.0144, -0.0047, 0.0127,
-0.0046, 0.0057, -0.0112, 0.0062, 0.0176, 0.0162, 0.0004, -0.0061,
-0.0010, -0.0111, -0.0037, -0.0083, 0.0007, -0.0071, 0.0121, -0.0104,
0.0169, -0.0023, -0.0051, -0.0170, -0.0123, 0.0005, -0.0006, -0.0099,
0.0049, -0.0009, 0.0176, 0.0136, -0.0070, 0.0170, -0.0169, 0.0073,
-0.0058, -0.0111, -0.0014, -0.0076, -0.0080, 0.0006, 0.0075, 0.0064,
-0.0090, -0.0023, 0.0173, 0.0155, -0.0130, 0.0142, 0.0103, -0.0153,
-0.0011, 0.0139, 0.0086, -0.0130, 0.0029, 0.0074, -0.0003, 0.0030,
-0.0004, 0.0141, -0.0008, -0.0158, 0.0103, -0.0035, 0.0115, -0.0030,
-0.0049, -0.0023, -0.0098, 0.0058, -0.0002, 0.0170, 0.0124, 0.0086,
-0.0128, -0.0120, 0.0043, 0.0066, 0.0069, -0.0064, 0.0058, -0.0016,
0.0105, 0.0117, 0.0121, -0.0135, 0.0078, -0.0032, -0.0041, 0.0038,
0.0156, 0.0073, 0.0012, 0.0168, -0.0071, -0.0160, 0.0059, 0.0089,
-0.0043, 0.0019, -0.0178, 0.0018, 0.0017, -0.0090, 0.0136, -0.0167,
0.0018, 0.0094, -0.0153, -0.0061, -0.0172, -0.0072, -0.0088, -0.0058,
0.0103, -0.0004, 0.0163, 0.0048, 0.0093, 0.0106, -0.0093, 0.0023,
0.0003, -0.0150, -0.0174, -0.0041, -0.0008, -0.0068, 0.0174, 0.0111,
0.0168, 0.0034, -0.0021, 0.0051, 0.0053, 0.0017, -0.0137, 0.0046,
-0.0005, -0.0071, -0.0112, 0.0125, 0.0107, -0.0157, -0.0031, -0.0079,
-0.0030, 0.0099, -0.0033, 0.0061, 0.0009, 0.0099, -0.0055, -0.0099,
0.0057, 0.0126, -0.0092, -0.0071, -0.0156, -0.0092, -0.0086, 0.0043,
0.0026, 0.0127, -0.0045, 0.0163, -0.0036, 0.0123, 0.0060, 0.0123,
-0.0040, 0.0074, -0.0113, -0.0036, -0.0157, -0.0103, -0.0140, 0.0143,
0.0035, -0.0015, -0.0158, -0.0157, -0.0079, 0.0111, 0.0134, 0.0094,
-0.0120, -0.0178, -0.0065, -0.0007, 0.0016, -0.0102, -0.0133, -0.0095,
-0.0143, -0.0050, -0.0029, -0.0060, -0.0126, 0.0008, 0.0079, 0.0073,
-0.0082, -0.0120, 0.0162, -0.0058, 0.0126, 0.0147, -0.0131, -0.0136,
-0.0013, -0.0102, -0.0065, -0.0032, -0.0055, 0.0150, -0.0159, -0.0142]), tensor([[-0.0060, -0.0238, -0.0167, ..., -0.0295, -0.0283, 0.0013],
[ 0.0272, -0.0164, 0.0261, ..., 0.0342, 0.0118, -0.0295],
[-0.0431, -0.0081, -0.0005, ..., 0.0110, 0.0334, -0.0054],
...,
[-0.0257, -0.0054, 0.0206, ..., -0.0191, 0.0412, -0.0227],
[ 0.0334, -0.0221, 0.0043, ..., 0.0383, -0.0305, 0.0005],
[-0.0314, 0.0262, 0.0313, ..., -0.0048, 0.0006, 0.0426]]), tensor([-0.0431, -0.0134, -0.0107, 0.0229, 0.0287, -0.0393, -0.0060, -0.0009,
0.0412, -0.0344, -0.0020, 0.0278, -0.0317, -0.0283, 0.0094, 0.0142,
-0.0097, -0.0215, -0.0283, -0.0332, -0.0333, 0.0352, -0.0130, -0.0117,
-0.0346, -0.0030, -0.0087, -0.0178, 0.0225, 0.0430, 0.0153, -0.0233,
0.0231, -0.0374, -0.0200, 0.0007, 0.0341, 0.0077, -0.0098, -0.0249,
0.0400, -0.0381, -0.0155, 0.0188, -0.0362, 0.0095, -0.0313, 0.0109,
0.0298, -0.0086, 0.0114, -0.0137, -0.0284, 0.0243, 0.0336, 0.0356,
0.0076, 0.0008, -0.0358, -0.0435, 0.0303, 0.0354])]
%% Cell type:code id: tags:
```
``` python
a = [(3, 2), (2, 5), (2, 6)]
#a.sort(reverse = True)
```
%% Cell type:code id: tags:
```
``` python
b = [(13, 12), (12, 15), (12, 16)]
import torch
a_t = torch.tensor(a)
b_t = torch.tensor(b)
```
%% Cell type:code id: tags:
```
``` python
print(a_t)
print(b_t)
```
%% Output
tensor([[3, 2],
[2, 5],
[2, 6]])
tensor([[13, 12],
[12, 15],
[12, 16]])
%% Cell type:code id: tags:
```
``` python
indices = torch.tensor([2, 3, 5])
torch.flatten(a_t)[indices] = torch.flatten(b_t)[:3]
```
%% Cell type:code id: tags:
```
``` python
c = torch.stack((torch.flatten(a_t), torch.flatten(b_t)), dim=-1)
```
%% Cell type:code id: tags:
```
``` python
print(a_t)
print(b_t)
```
%% Output
tensor([[ 3, 2],
[13, 12],
[ 2, 12]])
tensor([[13, 12],
[12, 15],
[12, 16]])
%% Cell type:code id: tags:
```
``` python
c
```
%% Output
tensor([[ 3, 13],
[ 2, 12],
[ 2, 12],
[ 5, 15],
[ 2, 12],
[ 6, 16]])
%% Cell type:code id: tags:
```
``` python
c[-1] = 26
```
%% Cell type:code id: tags:
```
``` python
c
```
%% Output
tensor([ 3, 2, 2, 5, 2, 6, 13, 12, 12, 15, 12, 26])
%% Cell type:code id: tags:
```
``` python
b
```
%% Output
[(13, 12), (12, 15), (12, 16)]
%% Cell type:code id: tags:
```
``` python
torch.tensor([1,2,3]).shape[0]
```
%% Output
3
%% Cell type:code id: tags:
```
``` python
from decentralizepy.datasets.Partitioner import DataPartitioner
l = [1, 2, 3, 4, 5, 6, 7]
e = len(l) // 3
frac = e / len(l)
sizes = [frac] * 3
sizes[-1] += 1.0 - frac * 3
print("sizes: ", sizes)
for i in range(3):
myPar = DataPartitioner(l, sizes).use(i)
for j in range(len(myPar)):
print(myPar.__getitem__(j), end=' ')
print()
```
%% Output
sizes: [0.2857142857142857, 0.2857142857142857, 0.4285714285714286]
2 3
6 5
1 4 7
%% Cell type:code id: tags:
```
``` python
w = 1
p = 2
i = w/p
print(i)
```
%% Output
0.5
%% Cell type:code id: tags:
```
``` python
from torch import multiprocessing as mp
from decentralizepy.node.PeerSampler import PeerSampler
from decentralizepy.node.Node import Node
from decentralizepy.mappings.Linear import Linear
from decentralizepy.graphs.Regular import Regular
l = Linear(1, 6)
g = Regular(6, 2)
processes = [mp.Process(target = PeerSampler, args=[-1, 0, l, g, None]),
mp.Process(target = Node, args=[1, 0, l, g, None]),
mp.Process(target = Node, args=[2, 0, l, g, None]),
mp.Process(target = Node, args=[3, 0, l, g, None]),
mp.Process(target = Node, args=[4, 0, l, g, None]),
mp.Process(target = Node, args=[5, 0, l, g, None]),
mp.Process(target = Node, args=[6, 0, l, g, None]),
]
for p in processes:
p.start()
for p in processes:
p.join()
```
%% Cell type:code id: tags:
``` python
```
......
import json
import os
import sys
from pathlib import Path
import numpy as np
import pandas as pd
......@@ -25,18 +26,32 @@ def get_stats(l):
return mean_dict, stdev_dict, min_dict, max_dict
def plot(means, stdevs, mins, maxs, title, label, loc):
def plot(means, stdevs, mins, maxs, title, label, loc, xlabel="communication rounds"):
plt.title(title)
plt.xlabel("communication rounds")
x_axis = list(means.keys())
y_axis = list(means.values())
err = list(stdevs.values())
plt.errorbar(x_axis, y_axis, yerr=err, label=label)
plt.xlabel(xlabel)
x_axis = np.array(list(means.keys()))
y_axis = np.array(list(means.values()))
err = np.array(list(stdevs.values()))
plt.plot(x_axis, y_axis, label=label)
plt.fill_between(x_axis, y_axis - err, y_axis + err, alpha=0.4)
plt.legend(loc=loc)
def plot_results(path):
def replace_dict_key(d_org: dict, d_other: dict):
result = {}
for x, y in d_org.items():
result[d_other[x]] = y
return result
def plot_results(path, centralized, data_machine="machine0", data_node=0):
folders = os.listdir(path)
if centralized.lower() in ["true", "1", "t", "y", "yes"]:
centralized = True
print("Centralized")
else:
centralized = False
folders.sort()
print("Reading folders from: ", path)
print("Folders: ", folders)
......@@ -44,8 +59,8 @@ def plot_results(path):
meta_means, meta_stdevs = {}, {}
data_means, data_stdevs = {}, {}
for folder in folders:
folder_path = os.path.join(path, folder)
if not os.path.isdir(folder_path):
folder_path = Path(os.path.join(path, folder))
if not folder_path.is_dir() or "weights" == folder_path.name:
continue
results = []
machine_folders = os.listdir(folder_path)
......@@ -59,71 +74,130 @@ def plot_results(path):
filepath = os.path.join(mf_path, f)
with open(filepath, "r") as inf:
results.append(json.load(inf))
if folder.startswith("FL") or folder.startswith("Parameter Server"):
data_node = -1
else:
data_node = 0
with open(folder_path / data_machine / f"{data_node}_results.json", "r") as f:
main_data = json.load(f)
main_data = [main_data]
# Plotting bytes over time
plt.figure(10)
b_means, stdevs, mins, maxs = get_stats([x["total_bytes"] for x in results])
plot(b_means, stdevs, mins, maxs, "Total Bytes", folder, "lower right")
df = pd.DataFrame(
{
"mean": list(b_means.values()),
"std": list(stdevs.values()),
"nr_nodes": [len(results)] * len(b_means),
},
list(b_means.keys()),
columns=["mean", "std", "nr_nodes"],
)
df.to_csv(
os.path.join(path, "total_bytes_" + folder + ".csv"), index_label="rounds"
)
# Plot Training loss
plt.figure(1)
means, stdevs, mins, maxs = get_stats([x["train_loss"] for x in results])
plot(means, stdevs, mins, maxs, "Training Loss", folder, "upper right")
correct_bytes = [b_means[x] for x in means]
df = pd.DataFrame(
{
"mean": list(means.values()),
"std": list(stdevs.values()),
"nr_nodes": [len(results)] * len(means),
"total_bytes": correct_bytes,
},
list(means.keys()),
columns=["mean", "std", "nr_nodes"],
columns=["mean", "std", "nr_nodes", "total_bytes"],
)
plt.figure(11)
means = replace_dict_key(means, b_means)
plot(
means,
stdevs,
mins,
maxs,
"Training Loss",
folder,
"upper right",
"Total Bytes per node",
)
df.to_csv(
os.path.join(path, "train_loss_" + folder + ".csv"), index_label="rounds"
)
# Plot Testing loss
plt.figure(2)
means, stdevs, mins, maxs = get_stats([x["test_loss"] for x in results])
if centralized:
means, stdevs, mins, maxs = get_stats([x["test_loss"] for x in main_data])
else:
means, stdevs, mins, maxs = get_stats([x["test_loss"] for x in results])
plot(means, stdevs, mins, maxs, "Testing Loss", folder, "upper right")
df = pd.DataFrame(
{
"mean": list(means.values()),
"std": list(stdevs.values()),
"nr_nodes": [len(results)] * len(means),
"total_bytes": correct_bytes,
},
list(means.keys()),
columns=["mean", "std", "nr_nodes"],
columns=["mean", "std", "nr_nodes", "total_bytes"],
)
plt.figure(12)
means = replace_dict_key(means, b_means)
plot(
means,
stdevs,
mins,
maxs,
"Testing Loss",
folder,
"upper right",
"Total Bytes per node",
)
df.to_csv(
os.path.join(path, "test_loss_" + folder + ".csv"), index_label="rounds"
)
# Plot Testing Accuracy
plt.figure(3)
means, stdevs, mins, maxs = get_stats([x["test_acc"] for x in results])
if centralized:
means, stdevs, mins, maxs = get_stats([x["test_acc"] for x in main_data])
else:
means, stdevs, mins, maxs = get_stats([x["test_acc"] for x in results])
plot(means, stdevs, mins, maxs, "Testing Accuracy", folder, "lower right")
df = pd.DataFrame(
{
"mean": list(means.values()),
"std": list(stdevs.values()),
"nr_nodes": [len(results)] * len(means),
"total_bytes": correct_bytes,
},
list(means.keys()),
columns=["mean", "std", "nr_nodes"],
)
df.to_csv(
os.path.join(path, "test_acc_" + folder + ".csv"), index_label="rounds"
columns=["mean", "std", "nr_nodes", "total_bytes"],
)
plt.figure(6)
means, stdevs, mins, maxs = get_stats([x["grad_std"] for x in results])
plt.figure(13)
means = replace_dict_key(means, b_means)
plot(
means,
stdevs,
mins,
maxs,
"Gradient Variation over Nodes",
"Testing Accuracy",
folder,
"upper right",
"lower right",
"Total Bytes per node",
)
# Plot Testing loss
plt.figure(7)
means, stdevs, mins, maxs = get_stats([x["grad_mean"] for x in results])
plot(
means, stdevs, mins, maxs, "Gradient Magnitude Mean", folder, "upper right"
df.to_csv(
os.path.join(path, "test_acc_" + folder + ".csv"), index_label="rounds"
)
# Collect total_bytes shared
bytes_list = []
for x in results:
......@@ -152,16 +226,21 @@ def plot_results(path):
data_means[folder] = list(means.values())[0]
data_stdevs[folder] = list(stdevs.values())[0]
plt.figure(10)
plt.savefig(os.path.join(path, "total_bytes.png"), dpi=300)
plt.figure(11)
plt.savefig(os.path.join(path, "bytes_train_loss.png"), dpi=300)
plt.figure(12)
plt.savefig(os.path.join(path, "bytes_test_loss.png"), dpi=300)
plt.figure(13)
plt.savefig(os.path.join(path, "bytes_test_acc.png"), dpi=300)
plt.figure(1)
plt.savefig(os.path.join(path, "train_loss.png"), dpi=300)
plt.figure(2)
plt.savefig(os.path.join(path, "test_loss.png"), dpi=300)
plt.figure(3)
plt.savefig(os.path.join(path, "test_acc.png"), dpi=300)
plt.figure(6)
plt.savefig(os.path.join(path, "grad_std.png"), dpi=300)
plt.figure(7)
plt.savefig(os.path.join(path, "grad_mean.png"), dpi=300)
# Plot total_bytes
plt.figure(4)
plt.title("Data Shared")
......@@ -236,6 +315,10 @@ def plot_parameters(path):
if __name__ == "__main__":
assert len(sys.argv) == 2
plot_results(sys.argv[1])
assert len(sys.argv) == 3
# The args are:
# 1: the folder with the data
# 2: True/False: If True then the evaluation on the test set was centralized
# for federated learning folder name must start with "FL"!
plot_results(sys.argv[1], sys.argv[2])
# plot_parameters(sys.argv[1])
import distutils
import json
import os
import sys
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
def plot(x_axis, means, stdevs, pos, nb_plots, title, label, loc, xlabel):
cmap = plt.get_cmap("gist_rainbow")
plt.title(title)
plt.xlabel(xlabel)
y_axis = list(means)
err = list(stdevs)
print("label:", label)
print("color: ", cmap(1 / nb_plots * pos))
plt.errorbar(
list(x_axis), y_axis, yerr=err, label=label, color=cmap(1 / nb_plots * pos)
)
plt.legend(loc=loc)
def plot_results(path, epochs, global_epochs="True"):
if global_epochs.lower() in ["true", "1", "t", "y", "yes"]:
global_epochs = True
else:
global_epochs = False
epochs = int(epochs)
# rounds = int(rounds)
folders = os.listdir(path)
folders.sort()
print("Reading folders from: ", path)
print("Folders: ", folders)
bytes_means, bytes_stdevs = {}, {}
meta_means, meta_stdevs = {}, {}
data_means, data_stdevs = {}, {}
files = os.listdir(path)
files = [f for f in files if f.endswith(".csv")]
train_loss = sorted([f for f in files if f.startswith("train_loss")])
test_acc = sorted([f for f in files if f.startswith("test_acc")])
test_loss = sorted([f for f in files if f.startswith("test_loss")])
min_losses = []
for i, f in enumerate(train_loss):
filepath = os.path.join(path, f)
with open(filepath, "r") as inf:
results_csv = pd.read_csv(inf)
# Plot Training loss
plt.figure(1)
if global_epochs:
rounds = results_csv["rounds"].iloc[0]
print("Rounds: ", rounds)
results_cr = results_csv[results_csv.rounds <= epochs * rounds]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = (
results_cr["rounds"].to_numpy() / rounds
) # list(np.arange(0, len(means), 1))
x_label = "global epochs"
else:
results_cr = results_csv[results_csv.rounds <= epochs]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = results_cr["rounds"].to_numpy()
x_label = "communication rounds"
min_losses.append(np.min(means))
plot(
x_axis,
means,
stdevs,
i,
len(train_loss),
"Training Loss",
f[len("train_loss") + 1 : -len(":2022-03-24T17:54.csv")],
"upper right",
x_label,
)
min_tlosses = []
for i, f in enumerate(test_loss):
filepath = os.path.join(path, f)
with open(filepath, "r") as inf:
results_csv = pd.read_csv(inf)
if global_epochs:
rounds = results_csv["rounds"].iloc[0]
print("Rounds: ", rounds)
results_cr = results_csv[results_csv.rounds <= epochs * rounds]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = (
results_cr["rounds"].to_numpy() / rounds
) # list(np.arange(0, len(means), 1))
x_label = "global epochs"
else:
results_cr = results_csv[results_csv.rounds <= epochs]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = results_cr["rounds"].to_numpy()
x_label = "communication rounds"
print("x axis:", x_axis)
min_tlosses.append(np.min(means))
# Plot Testing loss
plt.figure(2)
plot(
x_axis,
means,
stdevs,
i,
len(test_loss),
"Testing Loss",
f[len("test_loss") + 1 : -len(":2022-03-24T17:54.csv")],
"upper right",
x_label,
)
max_taccs = []
for i, f in enumerate(test_acc):
filepath = os.path.join(path, f)
with open(filepath, "r") as inf:
results_csv = pd.read_csv(inf)
if global_epochs:
rounds = results_csv["rounds"].iloc[0]
print("Rounds: ", rounds)
results_cr = results_csv[results_csv.rounds <= epochs * rounds]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = (
results_cr["rounds"].to_numpy() / rounds
) # list(np.arange(0, len(means), 1))
x_label = "global epochs"
else:
results_cr = results_csv[results_csv.rounds <= epochs]
means = results_cr["mean"].to_numpy()
stdevs = results_cr["std"].to_numpy()
x_axis = results_cr["rounds"].to_numpy()
x_label = "communication rounds"
max_taccs.append(np.max(means))
# Plot Testing Accuracy
plt.figure(3)
plot(
x_axis,
means,
stdevs,
i,
len(test_acc),
"Testing Accuracy",
f[len("test_acc") + 1 : -len(":2022-03-24T17:54.csv")],
"lower right",
x_label,
)
names_loss = [
f[len("train_loss") + 1 : -len(":2022-03-24T17:54.csv")] for f in train_loss
]
names_acc = [
f[len("test_acc") + 1 : -len(":2022-03-24T17:54.csv")] for f in test_acc
]
print(names_loss)
print(names_acc)
pf = pd.DataFrame(
{
"test_accuracy": max_taccs,
"test_losses": min_tlosses,
"train_losses": min_losses,
},
names_loss,
)
pf = pf.sort_values(["test_accuracy"], 0, ascending=False)
pf.to_csv(os.path.join(path, "best_results.csv"))
plt.figure(1)
plt.savefig(os.path.join(path, "ge_train_loss.png"), dpi=300)
plt.figure(2)
plt.savefig(os.path.join(path, "ge_test_loss.png"), dpi=300)
plt.figure(3)
plt.savefig(os.path.join(path, "ge_test_acc.png"), dpi=300)
if __name__ == "__main__":
assert len(sys.argv) == 4
# The args are:
# 1: the folder with the csv files,
# 2: the number of epochs / comm rounds to plot for,
# 3: True/False with True meaning plot global epochs and False plot communication rounds
print(sys.argv[1], sys.argv[2], sys.argv[3])
plot_results(sys.argv[1], sys.argv[2], sys.argv[3])
#!/bin/bash
script_path=$(realpath $(dirname $0))
decpy_path=~/Gitlab/decentralizepy/eval
# Working directory, where config files are read from and logs are written.
decpy_path=/mnt/nfs/$(whoami)/decpy_workingdir
cd $decpy_path
env_python=~/miniconda3/envs/decpy/bin/python3
graph=96_regular.edges
original_config=epoch_configs/config_celeba.ini
config_file=/tmp/config.ini
procs_per_machine=16
machines=6
iterations=200
test_after=10
eval_file=testing.py
# Python interpreter
env_python=python3
# File regular_16.txt is available in /tutorial
graph=$decpy_path/regular_16.txt
# File config_celeba_sharing.ini is available in /tutorial
# In this config file, change addresses_filepath to correspond to your list of machines (example in /tutorial/ip.json)
original_config=$decpy_path/config_celeba_sharing.ini
# Local config file
config_file=/tmp/$(basename $original_config)
# Python script to be executed
eval_file=$script_path/testingPeerSampler.py
# General parameters
procs_per_machine=8
machines=2
iterations=5
test_after=2
log_level=INFO
m=`cat $(grep addresses_filepath $original_config | awk '{print $3}') | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
echo M is $m
log_dir=$(date '+%Y-%m-%dT%H:%M')/machine$m
mkdir -p $log_dir
# Copy and manipulate the local config file
cp $original_config $config_file
# echo "alpha = 0.10" >> $config_file
$env_python $eval_file -ro 0 -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
\ No newline at end of file
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level -wsd $log_dir
#!/bin/bash
# Documentation
# Note: documentation was not written for this run file, so actual behaviour may differ
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use Adam.
# For SGD these need to be changed manually
# - The script will set '--test_after' and '--train_evaluate_after' to comm_rounds_per_global_epoch, i.e., the eavaluation
# on the train set and on the test set is carried out every global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=6_star.edges
config_file=~/tmp/config.ini
procs_per_machine=6
machines=1
global_epochs=20
eval_file=testing.py
log_level=INFO
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
# Base configs for which the gird search is done
tests=("step_configs/config_celeba_synchronous.ini")
# Learning rates
lr="0.001"
# Batch size
batchsize="8"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="2000"
# testing every x communication rounds
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
# Celeba has 63741 samples
# Reddit has 70642
# Femnist 734463
# Shakespeares 3678451
dataset_size=63741
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
random_seeds=("90" "91" "92" "93" "94")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir=$nfs_home$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')/machine$m
echo results are stored in: $log_dir
mkdir -p $log_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
\ No newline at end of file
......@@ -18,10 +18,8 @@
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use Adam.
# For SGD these need to be changed manually
# - The script will set '--test_after' and '--train_evaluate_after' to comm_rounds_per_global_epoch, i.e., the eavaluation
# on the train set and on the test set is carried out every global epoch.
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
......@@ -39,7 +37,7 @@ decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=192_regular.edges
graph=96_regular.edges
config_file=~/tmp/config.ini
procs_per_machine=16
machines=6
......@@ -62,7 +60,6 @@ batchsize=("8" "16")
comm_rounds_per_global_epoch=("1" "5" "10")
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
# Celeba has 63741 samples
dataset_size=63741
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
......@@ -86,6 +83,8 @@ do
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $r); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $r)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for lr in "${lrs[@]}"
do
for i in "${tests[@]}"
......@@ -102,7 +101,7 @@ do
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $b
$env_python $eval_file -ro 0 -tea $r -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $r -cf $config_file -ll $log_level
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 1
echo end of sleep
......
#!/bin/bash
# Documentation
# Note: documentation was not written for this run file, so actual behaviour may differ
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use Adam.
# For SGD these need to be changed manually
# - The script will set '--test_after' and '--train_evaluate_after' to comm_rounds_per_global_epoch, i.e., the eavaluation
# on the train set and on the test set is carried out every global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
config_file=~/tmp/config.ini
procs_per_machine=16
machines=6
global_epochs=150
eval_file=testingPeerSampler.py
log_level=INFO
ip_machines=$nfs_home/$logs_subfolder/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
tests="$nfs_home/$logs_subfolder/config.ini"
#tests=("$nfs_home/$logs_subfolder/config_cifar_sharing.ini" "$nfs_home/$logs_subfolder/config_cifar_partialmodel.ini" "$nfs_home/$logs_subfolder/config_cifar_topkacc.ini" "$nfs_home/$logs_subfolder/config_cifar_topkaccRandomAlpha.ini" "$nfs_home/$logs_subfolder/config_cifar_subsampling.ini" "$nfs_home/$logs_subfolder/config_cifar_wavelet.ini" "$nfs_home/$logs_subfolder/config_cifar_waveletRandomAlpha.ini")
#tests=("$nfs_home/$logs_subfolder/config_cifar_partialmodel.ini" "$nfs_home/$logs_subfolder/config_cifar_topkacc.ini" "$nfs_home/$logs_subfolder/config_cifar_topkaccRandomAlpha.ini" "$nfs_home/$logs_subfolder/config_cifar_subsampling.ini" "$nfs_home/$logs_subfolder/config_cifar_wavelet.ini" "$nfs_home/$logs_subfolder/config_cifar_waveletRandomAlpha.ini")
#tests=("$nfs_home/$logs_subfolder/config_cifar_subsampling.ini" "$nfs_home/$logs_subfolder/config_cifar_sharing.ini" "$nfs_home/$logs_subfolder/config_cifar_waveletRandomAlpha.ini")
#tests=("$nfs_home/$logs_subfolder/config_cifar_waveletRandomAlpha.ini")
# Learning rates
lr="0.01"
# Batch size
batchsize="8"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="20"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
# Celeba has 63741 samples
# Reddit has 70642
# Femnist 734463
# Shakespeares 3678451
dataset_size=50000
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
# random_seeds=("90" "91" "92" "93" "94")
random_seeds=("94")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
#log_dir_base=$nfs_home$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
log_dir_base=$nfs_home/$logs_subfolder/lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
graph=$nfs_home/decentralizepy/eval/96_regular.edges
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file COMMUNICATION offset 10720
# $env_python $eval_file -cte 0 -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
#!/bin/bash
# Documentation
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=96_regular.edges
config_file=~/tmp/config_celeba_sharing.ini
procs_per_machine=16
machines=6
global_epochs=150
eval_file=testingFederated.py
log_level=INFO
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
#tests=("step_configs/config_celeba_sharing.ini" "step_configs/config_celeba_partialmodel.ini" "step_configs/config_celeba_topkacc.ini" "step_configs/config_celeba_subsampling.ini" "step_configs/config_celeba_wavelet.ini")
tests=("step_configs/config_celeba_sharing.ini")
# Learning rates
lr="0.001"
# Batch size
batchsize="8"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="10"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
dataset_size=63741
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
#random_seeds=("90" "91" "92" "93" "94")
random_seeds=("90")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir_base=$nfs_home/$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
\ No newline at end of file
#!/bin/bash
# Documentation
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=96_regular.edges
config_file=~/tmp/config.ini
procs_per_machine=16
machines=6
global_epochs=100
eval_file=testingFederated.py
log_level=INFO
working_rate=0.1
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
tests=("step_configs/config_cifar_sharing.ini")
# Learning rates
lr="0.01"
# Batch size
batchsize="8"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="20"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
dataset_size=50000
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
random_seeds=("90")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir_base=$nfs_home/$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level -wr $working_rate
echo $i is done
sleep 200
echo end of sleep
done
done
#
#!/bin/bash
# Documentation
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=96_regular.edges
config_file=~/tmp/config_femnist_sharing.ini
procs_per_machine=16
machines=6
global_epochs=80
eval_file=testingFederated.py
log_level=INFO
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
#tests=("step_configs/config_femnist_sharing.ini" "step_configs/config_femnist_partialmodel.ini" "step_configs/config_femnist_topkacc.ini" "step_configs/config_femnist_subsampling.ini" "step_configs/config_femnist_wavelet.ini")
tests=("step_configs/config_femnist_sharing.ini")
# Learning rates
lr="0.01"
# Batch size
batchsize="16"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="10"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
dataset_size=734463
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
#random_seeds=("90" "91" "92" "93" "94")
random_seeds=("90")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir_base=$nfs_home/$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
\ No newline at end of file
#!/bin/bash
# Documentation
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=96_regular.edges
config_file=~/tmp/config_reddit_sharing.ini
procs_per_machine=16
machines=6
global_epochs=50
eval_file=testingFederated.py
log_level=INFO
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
#tests=("step_configs/config_reddit_sharing.ini" "step_configs/config_reddit_partialmodel.ini" "step_configs/config_reddit_topkacc.ini" "step_configs/config_reddit_subsampling.ini" "step_configs/config_reddit_wavelet.ini")
tests=("step_configs/config_reddit_sharing.ini")
# Learning rates
lr="1"
# Batch size
batchsize="16"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="10"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
dataset_size=70642
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
#random_seeds=("90" "91" "92" "93" "94")
random_seeds=("90")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir_base=$nfs_home/$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
\ No newline at end of file
#!/bin/bash
# Documentation
# This bash file takes three inputs. The first argument (nfs_home) is the path to the nfs home directory.
# The second one (python_bin) is the path to the python bin folder.
# The last argument (logs_subfolder) is the path to the logs folder with respect to the nfs home directory.
#
# The nfs home directory should contain the code of this framework stored in $nfs_home/decentralizepy and a folder
# called configs which contains the file 'ip_addr_6Machines.json'
# The python bin folder needs to include all the dependencies of this project including crudini.
# The results will be stored in $nfs_home/$logs_subfolder
# Each of the experiments will be stored in its own folder inside the logs_subfolder. The folder of the experiment
# starts with the last part of the config name, i.e., for 'config_celeba_topkacc.ini' it will start with topkacc.
# The name further includes the learning rate, rounds and batchsize as well as the exact date at which the experiment
# was run.
# Example: ./run_grid.sh /mnt/nfs/wigger /mnt/nfs/wigger/anaconda3/envs/sacs39/bin /logs/celeba
#
# Additional requirements:
# Each node needs a folder called 'tmp' in the user's home directory
#
# Note:
# - The script does not change the optimizer. All configs are writen to use SGD.
# - The script will set '--test_after' and '--train_evaluate_after' such that it happens at the end of a global epoch.
# - The '--reset_optimizer' option is set to 0, i.e., the optimizer is not reset after a communication round (only
# relevant for Adams and other optimizers with internal state)
#
# Addapting the script to other datasets:
# Change the variable 'dataset_size' to reflect the data sets size.
#
# Known issues:
# - If the script is started at the very end of a minute then there is a change that two folders are created as not all
# machines may start running the script at the exact same moment.
nfs_home=$1
python_bin=$2
logs_subfolder=$3
decpy_path=$nfs_home/decentralizepy/eval
cd $decpy_path
env_python=$python_bin/python3
graph=96_regular.edges
config_file=~/tmp/config_shakespeare_sharing.ini
procs_per_machine=16
machines=6
global_epochs=100
eval_file=testingFederated.py
log_level=INFO
ip_machines=$nfs_home/configs/ip_addr_6Machines.json
m=`cat $ip_machines | grep $(/sbin/ifconfig ens785 | grep 'inet ' | awk '{print $2}') | cut -d'"' -f2`
export PYTHONFAULTHANDLER=1
# Base configs for which the gird search is done
#tests=("step_configs/config_shakespeare_sharing.ini" "step_configs/config_shakespeare_partialmodel.ini" "step_configs/config_shakespeare_topkacc.ini" "step_configs/config_shakespeare_subsampling.ini" "step_configs/config_shakespeare_wavelet.ini")
tests=("step_configs/config_shakespeare_sharing.ini")
# Learning rates
lr="0.5"
# Batch size
batchsize="16"
# The number of communication rounds per global epoch
comm_rounds_per_global_epoch="25"
procs=`expr $procs_per_machine \* $machines`
echo procs: $procs
dataset_size=97545 # sub96, for sub: 678696
# Calculating the number of samples that each user/proc will have on average
samples_per_user=`expr $dataset_size / $procs`
echo samples per user: $samples_per_user
# random_seeds for which to rerun the experiments
#random_seeds=("90" "91" "92" "93" "94")
random_seeds=("90")
# random_seed = 97
echo batchsize: $batchsize
echo communication rounds per global epoch: $comm_rounds_per_global_epoch
# calculating how many batches there are in a global epoch for each user/proc
batches_per_epoch=$(($samples_per_user / $batchsize))
echo batches per global epoch: $batches_per_epoch
# the number of iterations in 25 global epochs
iterations=$($env_python -c "from math import floor; print($batches_per_epoch * $global_epochs) if $comm_rounds_per_global_epoch >= $batches_per_epoch else print($global_epochs * $comm_rounds_per_global_epoch)")
echo iterations: $iterations
# calculating the number of batches each user/proc uses per communication step (The actual number may be a float, which we round down)
batches_per_comm_round=$($env_python -c "from math import floor; x = floor($batches_per_epoch / $comm_rounds_per_global_epoch); print(1 if x==0 else x)")
# since the batches per communication round were rounded down we need to change the number of iterations to reflect that
new_iterations=$($env_python -c "from math import floor; tmp = floor($batches_per_epoch / $comm_rounds_per_global_epoch); x = 1 if tmp == 0 else tmp; y = floor((($batches_per_epoch / $comm_rounds_per_global_epoch)/x)*$iterations); print($iterations if y<$iterations else y)")
echo batches per communication round: $batches_per_comm_round
echo corrected iterations: $new_iterations
test_after=$(($new_iterations / $global_epochs))
echo test after: $test_after
for i in "${tests[@]}"
do
for seed in "${random_seeds[@]}"
do
echo $i
IFS='_' read -ra NAMES <<< $i
IFS='.' read -ra NAME <<< ${NAMES[-1]}
log_dir_base=$nfs_home/$logs_subfolder/${NAME[0]}:lr=$lr:r=$comm_rounds_per_global_epoch:b=$batchsize:$(date '+%Y-%m-%dT%H:%M')
echo results are stored in: $log_dir_base
log_dir=$log_dir_base/machine$m
mkdir -p $log_dir
weight_store_dir=$log_dir_base/weights
mkdir -p $weight_store_dir
cp $i $config_file
# changing the config files to reflect the values of the current grid search state
$python_bin/crudini --set $config_file COMMUNICATION addresses_filepath $ip_machines
$python_bin/crudini --set $config_file OPTIMIZER_PARAMS lr $lr
$python_bin/crudini --set $config_file TRAIN_PARAMS rounds $batches_per_comm_round
$python_bin/crudini --set $config_file TRAIN_PARAMS batch_size $batchsize
$python_bin/crudini --set $config_file DATASET random_seed $seed
$env_python $eval_file -ro 0 -tea $test_after -ld $log_dir -wsd $weight_store_dir -mid $m -ps $procs_per_machine -ms $machines -is $new_iterations -gf $graph -ta $test_after -cf $config_file -ll $log_level
echo $i is done
sleep 200
echo end of sleep
done
done
#
\ No newline at end of file
......@@ -10,7 +10,7 @@ sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = Adam
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
......
......@@ -10,7 +10,7 @@ sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = Adam
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
......
......@@ -10,7 +10,7 @@ sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = Adam
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
......
......@@ -10,7 +10,7 @@ sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = Adam
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
......@@ -31,3 +31,4 @@ addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.PartialModel
sharing_class = PartialModel
alpha = 0.1
\ No newline at end of file
......@@ -10,7 +10,7 @@ sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = Adam
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
......