Skip to content
Snippets Groups Projects
Commit 0fa9ba10 authored by Jeffrey Wigger's avatar Jeffrey Wigger
Browse files

encoding indices as np.int32

parent f77e0f94
No related branches found
No related tags found
No related merge requests found
import base64
import json
import logging
import os
import pickle
from pathlib import Path
from time import time
import torch
import torch.fft as fft
import numpy as np
from decentralizepy.sharing.Sharing import Sharing
......@@ -182,7 +181,7 @@ class FFT(Sharing):
m["alpha"] = self.alpha
m["params"] = topk.numpy()
m["indices"] = indices.numpy()
m["indices"] = indices.numpy().astype(np.int32)
self.total_data += len(self.communication.encrypt(m["params"]))
self.total_meta += len(self.communication.encrypt(m["indices"])) + len(
......@@ -215,23 +214,12 @@ class FFT(Sharing):
if not self.dict_ordered:
raise NotImplementedError
shapes = []
lens = []
tensors_to_cat = []
for _, v in state_dict.items():
shapes.append(v.shape)
t = v.flatten()
lens.append(t.shape[0])
tensors_to_cat.append(t)
T = torch.cat(tensors_to_cat, dim=0)
indices = m["indices"]
alpha = m["alpha"]
params = m["params"]
params_tensor = torch.tensor(params)
indices_tensor = torch.tensor(indices)
indices_tensor = torch.tensor(indices, dtype=torch.long)
ret = dict()
ret["indices"] = indices_tensor
ret["params"] = params_tensor
......
import base64
import json
import logging
import os
import pickle
from pathlib import Path
import torch
import numpy as np
from decentralizepy.sharing.Sharing import Sharing
......@@ -203,7 +202,7 @@ class SubSampling(Sharing):
m["seed"] = seed
m["alpha"] = alpha
m["params"] = subsample.numpy()
m["params"] = subsample.numpy().astype(np.int32)
# logging.info("Converted dictionary to json")
self.total_data += len(self.communication.encrypt(m["params"]))
......
......@@ -3,6 +3,7 @@ import logging
import os
from pathlib import Path
import numpy as np
import torch
from decentralizepy.sharing.Sharing import Sharing
......@@ -166,7 +167,7 @@ class TopK(Sharing):
if not self.dict_ordered:
raise NotImplementedError
m["indices"] = G_topk.numpy()
m["indices"] = G_topk.numpy().astype(np.int32)
m["params"] = T_topk.numpy()
assert len(m["indices"]) == len(m["params"])
......@@ -214,7 +215,7 @@ class TopK(Sharing):
tensors_to_cat.append(t)
T = torch.cat(tensors_to_cat, dim=0)
index_tensor = torch.tensor(m["indices"])
index_tensor = torch.tensor(m["indices"], dtype=torch.long)
logging.debug("Original tensor: {}".format(T[index_tensor]))
T[index_tensor] = torch.tensor(m["params"])
logging.debug("Final tensor: {}".format(T[index_tensor]))
......
......@@ -3,6 +3,7 @@ import logging
import os
from pathlib import Path
import numpy as np
import torch
from decentralizepy.sharing.Sharing import Sharing
......@@ -157,7 +158,7 @@ class TopKParams(Sharing):
if not self.dict_ordered:
raise NotImplementedError
m["indices"] = index.numpy()
m["indices"] = index.numpy().astype(np.int32)
m["params"] = values.numpy()
m["offsets"] = offsets
......@@ -206,7 +207,7 @@ class TopKParams(Sharing):
tensors_to_cat = []
offsets = m["offsets"]
params = torch.tensor(m["params"])
indices = torch.tensor(m["indices"])
indices = torch.tensor(m["indices"], dtype=torch.long)
for i, (_, v) in enumerate(state_dict.items()):
shapes.append(v.shape)
......
import base64
import json
import logging
import os
import pickle
from pathlib import Path
from time import time
import numpy as np
import pywt
import torch
......@@ -206,7 +205,7 @@ class Wavelet(Sharing):
m["params"] = topk.numpy()
m["indices"] = indices.numpy()
m["indices"] = indices.numpy().astype(np.int32)
self.total_data += len(self.communication.encrypt(m["params"]))
self.total_meta += len(self.communication.encrypt(m["indices"])) + len(
......@@ -255,7 +254,7 @@ class Wavelet(Sharing):
params = m["params"]
params_tensor = torch.tensor(params)
indices_tensor = torch.tensor(indices)
indices_tensor = torch.tensor(indices, dtype=torch.long)
ret = dict()
ret["indices"] = indices_tensor
ret["params"] = params_tensor
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment