Skip to content
Snippets Groups Projects
Commit 0bc45f9b authored by Rishi Sharma's avatar Rishi Sharma
Browse files

Reset optimizer state after iterations - fix

parent e27d584b
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from datasets.Femnist import Femnist from datasets.Femnist import Femnist
from graphs import SmallWorld from graphs import SmallWorld
from collections import defaultdict from collections import defaultdict
import os import os
import json import json
import numpy as np import numpy as np
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
a = FEMNIST a = FEMNIST
a a
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
b = SmallWorld(6, 2, 2, 1) b = SmallWorld(6, 2, 2, 1)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
b.adj_list b.adj_list
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
for i in range(12): for i in range(12):
print(b.neighbors(i)) print(b.neighbors(i))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
clients = [] clients = []
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
num_samples = [] num_samples = []
data = defaultdict(lambda : None) data = defaultdict(lambda : None)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
datadir = "./leaf/data/femnist/data/train" datadir = "./leaf/data/femnist/data/train"
files = os.listdir(datadir) files = os.listdir(datadir)
total_users=0 total_users=0
users = set() users = set()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
files = os.listdir(datadir)[0:1] files = os.listdir(datadir)[0:1]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
for f in files: for f in files:
file_path = os.path.join(datadir, f) file_path = os.path.join(datadir, f)
print(file_path) print(file_path)
with open(file_path, 'r') as inf: with open(file_path, 'r') as inf:
client_data = json.load(inf) client_data = json.load(inf)
current_users = len(client_data['users']) current_users = len(client_data['users'])
print("Current_Users: ", current_users) print("Current_Users: ", current_users)
total_users += current_users total_users += current_users
users.update(client_data['users']) users.update(client_data['users'])
print("total_users: ", total_users) print("total_users: ", total_users)
print("total_users: ", len(users)) print("total_users: ", len(users))
print(client_data['user_data'].keys()) print(client_data['user_data'].keys())
print(np.array(client_data['user_data']['f3408_47']['x']).shape) print(np.array(client_data['user_data']['f3408_47']['x']).shape)
print(np.array(client_data['user_data']['f3408_47']['y']).shape) print(np.array(client_data['user_data']['f3408_47']['y']).shape)
print(np.array(client_data['user_data']['f3327_11']['x']).shape) print(np.array(client_data['user_data']['f3327_11']['x']).shape)
print(np.array(client_data['user_data']['f3327_11']['y']).shape) print(np.array(client_data['user_data']['f3327_11']['y']).shape)
print(np.unique(np.array(client_data['user_data']['f3327_11']['y']))) print(np.unique(np.array(client_data['user_data']['f3327_11']['y'])))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
file = 'run.py' file = 'run.py'
with open(file, 'r') as inf: with open(file, 'r') as inf:
print(inf.readline().strip()) print(inf.readline().strip())
print(inf.readlines()) print(inf.readlines())
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
def f(l): def f(l):
l[2] = 'c' l[2] = 'c'
a = ['a', 'a', 'a'] a = ['a', 'a', 'a']
print(a) print(a)
f(a) f(a)
print(a) print(a)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
l = ['a', 'b', 'c'] l = ['a', 'b', 'c']
print(l[:-1]) print(l[:-1])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from localconfig import LocalConfig from localconfig import LocalConfig
def read_ini(file_path): def read_ini(file_path):
config = LocalConfig(file_path) config = LocalConfig(file_path)
for section in config: for section in config:
print("Section: ", section) print("Section: ", section)
for key, value in config.items(section): for key, value in config.items(section):
print((key, value)) print((key, value))
print(dict(config.items('DATASET'))) print(dict(config.items('DATASET')))
return config return config
config = read_ini("config.ini") config = read_ini("config.ini")
for section in config: for section in config:
print(section) print(section)
#d = dict(config.sections()) #d = dict(config.sections())
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
def func(a = 1, b = 2, c = 3): def func(a = 1, b = 2, c = 3):
print(a + b + c) print(a + b + c)
l = [3, 5, 7] l = [3, 5, 7]
func(*l) func(*l)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from torch import multiprocessing as mp from torch import multiprocessing as mp
mp.spawn(fn = func, nprocs = 2, args = [], kwargs = {'a': 4, 'b': 5, 'c': 6}) mp.spawn(fn = func, nprocs = 2, args = [], kwargs = {'a': 4, 'b': 5, 'c': 6})
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
l = '[0.4, 0.2, 0.3, 0.1]' l = '[0.4, 0.2, 0.3, 0.1]'
type(eval(l)) type(eval(l))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.datasets.Femnist import Femnist from decentralizepy.datasets.Femnist import Femnist
f1 = Femnist(0, 1, 'leaf/data/femnist/data/train') f1 = Femnist(0, 1, 'leaf/data/femnist/data/train')
ts = f1.get_trainset(1) ts = f1.get_trainset(1)
for data, target in ts: for data, target in ts:
print(data) print(data)
break break
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.datasets.Femnist import Femnist from decentralizepy.datasets.Femnist import Femnist
from decentralizepy.graphs.SmallWorld import SmallWorld from decentralizepy.graphs.SmallWorld import SmallWorld
from decentralizepy.mappings.Linear import Linear from decentralizepy.mappings.Linear import Linear
f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4]) f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])
g = SmallWorld(4, 1, 0.5) g = SmallWorld(4, 1, 0.5)
l = Linear(2, 2) l = Linear(2, 2)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.node.Node import Node from decentralizepy.node.Node import Node
from torch import multiprocessing as mp from torch import multiprocessing as mp
import logging import logging
n1 = Node(0, l, g, f, "./results", logging.DEBUG) n1 = Node(0, l, g, f, "./results", logging.DEBUG)
n2 = Node(1, l, g, f, "./results", logging.DEBUG) n2 = Node(1, l, g, f, "./results", logging.DEBUG)
# mp.spawn(fn = Node, nprocs = 2, args=[l,g,f]) # mp.spawn(fn = Node, nprocs = 2, args=[l,g,f])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from testing import f from testing import f
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from torch import multiprocessing as mp from torch import multiprocessing as mp
import torch import torch
m1 = torch.nn.Linear(1,1) m1 = torch.nn.Linear(1,1)
o1 = torch.optim.SGD(m1.parameters(), 0.6) o1 = torch.optim.SGD(m1.parameters(), 0.6)
print(m1) print(m1)
mp.spawn(fn = f, nprocs = 2, args=[m1, o1]) mp.spawn(fn = f, nprocs = 2, args=[m1, o1])
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
o1.param_groups o1.param_groups
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
with torch.no_grad(): with torch.no_grad():
o1.param_groups[0]["params"][0].copy_(torch.zeros(1,)) o1.param_groups[0]["params"][0].copy_(torch.zeros(1,))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
o1.param_groups o1.param_groups
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
m1.state_dict() m1.state_dict()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
import torch import torch
loss = getattr(torch.nn.functional, 'nll_loss') loss = getattr(torch.nn.functional, 'nll_loss')
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
loss loss
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
%matplotlib inline %matplotlib inline
from decentralizepy.node.Node import Node from decentralizepy.node.Node import Node
from decentralizepy.graphs.SmallWorld import SmallWorld from decentralizepy.graphs.SmallWorld import SmallWorld
from decentralizepy.graphs.Graph import Graph from decentralizepy.graphs.Graph import Graph
from decentralizepy.mappings.Linear import Linear from decentralizepy.mappings.Linear import Linear
from torch import multiprocessing as mp from torch import multiprocessing as mp
import torch import torch
import logging import logging
from localconfig import LocalConfig from localconfig import LocalConfig
def read_ini(file_path): def read_ini(file_path):
config = LocalConfig(file_path) config = LocalConfig(file_path)
for section in config: for section in config:
print("Section: ", section) print("Section: ", section)
for key, value in config.items(section): for key, value in config.items(section):
print((key, value)) print((key, value))
print(dict(config.items('DATASET'))) print(dict(config.items('DATASET')))
return config return config
config = read_ini("config.ini") config = read_ini("config.ini")
my_config = dict() my_config = dict()
for section in config: for section in config:
my_config[section] = dict(config.items(section)) my_config[section] = dict(config.items(section))
#f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4]) #f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])
g = Graph() g = Graph()
g.read_graph_from_file("36_nodes.edges", "edges") g.read_graph_from_file("36_nodes.edges", "edges")
l = Linear(1, 36) l = Linear(1, 36)
#Node(0, 0, l, g, my_config, 20, "results", logging.DEBUG) #Node(0, 0, l, g, my_config, 20, "results", logging.DEBUG)
mp.spawn(fn = Node, nprocs = g.n_procs, args=[0,l,g,my_config,20,"results",logging.INFO]) mp.spawn(fn = Node, nprocs = g.n_procs, args=[0,l,g,my_config,20,"results",logging.INFO])
# mp.spawn(fn = Node, args = [l, g, config, 10, "results", logging.DEBUG], nprocs=2) # mp.spawn(fn = Node, args = [l, g, config, 10, "results", logging.DEBUG], nprocs=2)
``` ```
%% Output %% Output
Section: GRAPH Section: GRAPH
('package', 'decentralizepy.graphs.SmallWorld') ('package', 'decentralizepy.graphs.SmallWorld')
('graph_class', 'SmallWorld') ('graph_class', 'SmallWorld')
Section: DATASET Section: DATASET
('dataset_package', 'decentralizepy.datasets.Femnist') ('dataset_package', 'decentralizepy.datasets.Femnist')
('dataset_class', 'Femnist') ('dataset_class', 'Femnist')
('model_class', 'CNN') ('model_class', 'CNN')
('n_procs', 36) ('n_procs', 36)
('train_dir', 'leaf/data/femnist/per_user_data/train') ('train_dir', 'leaf/data/femnist/per_user_data/train')
('test_dir', 'leaf/data/femnist/data/test') ('test_dir', 'leaf/data/femnist/data/test')
('sizes', '') ('sizes', '')
Section: OPTIMIZER_PARAMS Section: OPTIMIZER_PARAMS
('optimizer_package', 'torch.optim') ('optimizer_package', 'torch.optim')
('optimizer_class', 'Adam') ('optimizer_class', 'Adam')
('lr', 0.01) ('lr', 0.01)
Section: TRAIN_PARAMS Section: TRAIN_PARAMS
('training_package', 'decentralizepy.training.Training') ('training_package', 'decentralizepy.training.Training')
('training_class', 'Training') ('training_class', 'Training')
('epochs_per_round', 1) ('epochs_per_round', 1)
('batch_size', 1024) ('batch_size', 1024)
('shuffle', True) ('shuffle', True)
('loss_package', 'torch.nn') ('loss_package', 'torch.nn')
('loss_class', 'CrossEntropyLoss') ('loss_class', 'CrossEntropyLoss')
Section: COMMUNICATION Section: COMMUNICATION
('comm_package', 'decentralizepy.communication.TCP') ('comm_package', 'decentralizepy.communication.TCP')
('comm_class', 'TCP') ('comm_class', 'TCP')
('addresses_filepath', 'ip_addr.json') ('addresses_filepath', 'ip_addr.json')
Section: SHARING Section: SHARING
('sharing_package', 'decentralizepy.sharing.Sharing') ('sharing_package', 'decentralizepy.sharing.Sharing')
('sharing_class', 'Sharing') ('sharing_class', 'Sharing')
{'dataset_package': 'decentralizepy.datasets.Femnist', 'dataset_class': 'Femnist', 'model_class': 'CNN', 'n_procs': 36, 'train_dir': 'leaf/data/femnist/per_user_data/train', 'test_dir': 'leaf/data/femnist/data/test', 'sizes': ''} {'dataset_package': 'decentralizepy.datasets.Femnist', 'dataset_class': 'Femnist', 'model_class': 'CNN', 'n_procs': 36, 'train_dir': 'leaf/data/femnist/per_user_data/train', 'test_dir': 'leaf/data/femnist/data/test', 'sizes': ''}
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
/tmp/ipykernel_2255475/3991202644.py in <module>
27 #f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])
28 g = Graph()
---> 29 g.read_graph_from_file("36_nodes.edges", "edges")
30 l = Linear(1, 36)
31
~/Gitlab/decentralizepy/src/decentralizepy/graphs/Graph.py in read_graph_from_file(self, file, type, force_connect)
70 for line in lines:
71 x, y = map(int, line.strip().split())
---> 72 self.__insert_edge__(x, y)
73 elif type == "adjacency":
74 node_id = 0
~/Gitlab/decentralizepy/src/decentralizepy/graphs/Graph.py in __insert_edge__(self, x, y)
39 The destination vertex
40 """
---> 41 self.adj_list[x].add(y)
42 self.adj_list[y].add(x)
43
IndexError: list index out of range
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.mappings.Linear import Linear from decentralizepy.mappings.Linear import Linear
from testing import f from testing import f
from torch import multiprocessing as mp from torch import multiprocessing as mp
l = Linear(1, 2) l = Linear(1, 2)
mp.spawn(fn = f, nprocs = 2, args = [0, 2, "ip_addr.json", l]) mp.spawn(fn = f, nprocs = 2, args = [0, 2, "ip_addr.json", l])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.datasets.Femnist import Femnist from decentralizepy.datasets.Femnist import Femnist
f = Femnist() f = Femnist()
f.file_per_user('leaf/data/femnist/data/train','leaf/data/femnist/per_user_data/train') f.file_per_user('leaf/data/femnist/data/train','leaf/data/femnist/per_user_data/train')
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
a = set() a = set()
a.update([2, 3, 4, 5]) a.update([2, 3, 4, 5])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
a a
``` ```
%% Output %% Output
{2, 3, 4, 5} {2, 3, 4, 5}
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
print(*a) print(*a)
``` ```
%% Output %% Output
2 3 4 5 2 3 4 5
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` ```
from decentralizepy.graphs.SmallWorld import SmallWorld from decentralizepy.graphs.SmallWorld import SmallWorld
s = SmallWorld(36, 2, .5) s = SmallWorld(36, 2, .5)
s.write_graph_to_file('36_nodes.edges')
```
%% Cell type:code id: tags:
```
import sys
sys.argv
```
%% Output
['/home/risharma/miniconda3/envs/decpy/lib/python3.9/site-packages/ipykernel_launcher.py',
'--ip=127.0.0.1',
'--stdin=9008',
'--control=9006',
'--hb=9005',
'--Session.signature_scheme="hmac-sha256"',
'--Session.key=b"eac5d2f8-c460-45f1-a268-1e4b46a6efd6"',
'--shell=9007',
'--transport="tcp"',
'--iopub=9009',
'--f=/tmp/tmp-21212479paJaUBJBN84.json']
%% Cell type:code id: tags:
```
``` ```
......
...@@ -160,6 +160,7 @@ class Node: ...@@ -160,6 +160,7 @@ class Node:
self.sharing.step() self.sharing.step()
self.optimizer = optimizer_class(self.model.parameters(), **optimizer_params) # Reset optimizer state self.optimizer = optimizer_class(self.model.parameters(), **optimizer_params) # Reset optimizer state
self.trainer.reset_optimizer(self.optimizer)
rounds_to_test -= 1 rounds_to_test -= 1
......
...@@ -43,6 +43,10 @@ class Training: ...@@ -43,6 +43,10 @@ class Training:
plt.imshow(np.transpose(npimg, (1, 2, 0))) plt.imshow(np.transpose(npimg, (1, 2, 0)))
plt.show() plt.show()
def reset_optimizer(self, optimizer):
self.optimizer = optimizer
def train(self, dataset): def train(self, dataset):
""" """
One training iteration One training iteration
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment