Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • sacs/decentralizepy
  • mvujas/decentralizepy
  • randl/decentralizepy
3 results
Show changes
Showing
with 632 additions and 16 deletions
[DATASET]
dataset_package = decentralizepy.datasets.CIFAR10
dataset_class = CIFAR10
model_class = LeNet
train_dir = /mnt/nfs/shared/CIFAR
test_dir = /mnt/nfs/shared/CIFAR
; python list of fractions below
sizes =
random_seed = 99
partition_niid = True
shards = 4
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 65
full_epochs = False
batch_size = 8
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCP
comm_class = TCP
addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.PartialModel
sharing_class = PartialModel
alpha = 0.5
accumulation = True
accumulate_averaging_changes = True
\ No newline at end of file
[DATASET]
dataset_package = decentralizepy.datasets.CIFAR10
dataset_class = CIFAR10
model_class = LeNet
train_dir = /mnt/nfs/shared/CIFAR
test_dir = /mnt/nfs/shared/CIFAR
; python list of fractions below
sizes =
random_seed = 99
partition_niid = True
shards = 4
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 65
full_epochs = False
batch_size = 8
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCP
comm_class = TCP
addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.Wavelet
sharing_class = Wavelet
change_based_selection = True
alpha = 0.5
wavelet=sym2
level= 4
accumulation = True
accumulate_averaging_changes = True
[DATASET]
dataset_package = decentralizepy.datasets.Femnist
dataset_class = Femnist
random_seed = 97
model_class = CNN
train_dir = /mnt/nfs/shared/leaf/data/femnist/per_user_data/train
test_dir = /mnt/nfs/shared/leaf/data/femnist/data/test
; python list of fractions below
sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
# There are 734463 femnist samples
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 47
full_epochs = False
batch_size = 16
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCP
comm_class = TCP
addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.FFT
sharing_class = FFT
alpha = 0.1
change_based_selection = True
accumulation = True
...@@ -9,12 +9,12 @@ sizes = ...@@ -9,12 +9,12 @@ sizes =
[OPTIMIZER_PARAMS] [OPTIMIZER_PARAMS]
optimizer_package = torch.optim optimizer_package = torch.optim
optimizer_class = Adam optimizer_class = SGD
lr = 0.001 lr = 0.001
[TRAIN_PARAMS] [TRAIN_PARAMS]
training_package = decentralizepy.training.GradientAccumulator training_package = decentralizepy.training.Training
training_class = GradientAccumulator training_class = Training
rounds = 20 rounds = 20
full_epochs = False full_epochs = False
batch_size = 64 batch_size = 64
......
[DATASET] [DATASET]
dataset_package = decentralizepy.datasets.Femnist dataset_package = decentralizepy.datasets.Femnist
dataset_class = Femnist dataset_class = Femnist
random_seed = 97
model_class = CNN model_class = CNN
train_dir = /home/risharma/leaf/data/femnist/per_user_data/train train_dir = /mnt/nfs/shared/leaf/data/femnist/per_user_data/train
test_dir = /home/risharma/leaf/data/femnist/data/test test_dir = /mnt/nfs/shared/leaf/data/femnist/data/test
; python list of fractions below ; python list of fractions below
sizes = sizes =
[OPTIMIZER_PARAMS] [OPTIMIZER_PARAMS]
optimizer_package = torch.optim optimizer_package = torch.optim
optimizer_class = Adam optimizer_class = SGD
lr = 0.001 lr = 0.001
[TRAIN_PARAMS] [TRAIN_PARAMS]
training_package = decentralizepy.training.GradientAccumulator training_package = decentralizepy.training.Training
training_class = GradientAccumulator training_class = Training
rounds = 20 rounds = 47
full_epochs = False full_epochs = False
batch_size = 64 batch_size = 16
shuffle = True shuffle = True
loss_package = torch.nn loss_package = torch.nn
loss_class = CrossEntropyLoss loss_class = CrossEntropyLoss
...@@ -30,3 +31,4 @@ addresses_filepath = ip_addr_6Machines.json ...@@ -30,3 +31,4 @@ addresses_filepath = ip_addr_6Machines.json
[SHARING] [SHARING]
sharing_package = decentralizepy.sharing.PartialModel sharing_package = decentralizepy.sharing.PartialModel
sharing_class = PartialModel sharing_class = PartialModel
alpha=0.1
[DATASET] [DATASET]
dataset_package = decentralizepy.datasets.Femnist dataset_package = decentralizepy.datasets.Femnist
dataset_class = Femnist dataset_class = Femnist
random_seed = 97
model_class = CNN model_class = CNN
train_dir = /home/risharma/leaf/data/femnist/per_user_data/train train_dir = /mnt/nfs/shared/leaf/data/femnist/per_user_data/train
test_dir = /home/risharma/leaf/data/femnist/data/test test_dir = /mnt/nfs/shared/leaf/data/femnist/data/test
; python list of fractions below ; python list of fractions below
sizes = sizes =
[OPTIMIZER_PARAMS] [OPTIMIZER_PARAMS]
optimizer_package = torch.optim optimizer_package = torch.optim
optimizer_class = Adam optimizer_class = SGD
lr = 0.001 lr = 0.001
[TRAIN_PARAMS] [TRAIN_PARAMS]
training_package = decentralizepy.training.Training training_package = decentralizepy.training.Training
training_class = Training training_class = Training
rounds = 20 rounds = 47
full_epochs = False full_epochs = False
batch_size = 64 batch_size = 16
shuffle = True shuffle = True
loss_package = torch.nn loss_package = torch.nn
loss_class = CrossEntropyLoss loss_class = CrossEntropyLoss
......
[DATASET]
dataset_package = decentralizepy.datasets.Femnist
dataset_class = Femnist
random_seed = 97
model_class = CNN
train_dir = /mnt/nfs/shared/leaf/data/femnist/per_user_data/train
test_dir = /mnt/nfs/shared/leaf/data/femnist/data/test
; python list of fractions below
sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
# There are 734463 femnist samples
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 47
full_epochs = False
batch_size = 16
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCP
comm_class = TCP
addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.SubSampling
sharing_class = SubSampling
alpha = 0.1
[DATASET]
dataset_package = decentralizepy.datasets.Femnist
dataset_class = Femnist
random_seed = 97
model_class = CNN
train_dir = /mnt/nfs/shared/leaf/data/femnist/per_user_data/train
test_dir = /mnt/nfs/shared/leaf/data/femnist/data/test
; python list of fractions below
sizes =
[OPTIMIZER_PARAMS]
optimizer_package = torch.optim
optimizer_class = SGD
lr = 0.001
# There are 734463 femnist samples
[TRAIN_PARAMS]
training_package = decentralizepy.training.Training
training_class = Training
rounds = 47
full_epochs = False
batch_size = 16
shuffle = True
loss_package = torch.nn
loss_class = CrossEntropyLoss
[COMMUNICATION]
comm_package = decentralizepy.communication.TCP
comm_class = TCP
addresses_filepath = ip_addr_6Machines.json
[SHARING]
sharing_package = decentralizepy.sharing.PartialModel
sharing_class = PartialModel
alpha = 0.1
accumulation = True
accumulate_averaging_changes = True
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.