Skip to content
Snippets Groups Projects
Commit 68e38e9f authored by Rishi Sharma's avatar Rishi Sharma
Browse files

GradientAccumulator migration to steps

parent cef92565
No related branches found
No related tags found
No related merge requests found
......@@ -5,7 +5,14 @@ from decentralizepy.training.Training import Training
class GradientAccumulator(Training):
def __init__(
self, model, optimizer, loss, rounds="", full_epochs="", batch_size="", shuffle=""
self,
model,
optimizer,
loss,
rounds="",
full_epochs="",
batch_size="",
shuffle="",
):
"""
Constructor
......@@ -24,7 +31,9 @@ class GradientAccumulator(Training):
shuffle : bool
True if the dataset should be shuffled before training.
"""
super().__init__(model, optimizer, loss, rounds, full_epochs, batch_size, shuffle)
super().__init__(
model, optimizer, loss, rounds, full_epochs, batch_size, shuffle
)
def trainstep(self, data, target):
"""
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment