{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from datasets.Femnist import Femnist\n", "from graphs import SmallWorld\n", "from collections import defaultdict\n", "import os\n", "import json\n", "import numpy as np\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "a = FEMNIST\n", "a" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "b = SmallWorld(6, 2, 2, 1)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "b.adj_list" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "for i in range(12):\n", " print(b.neighbors(i))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "clients = []" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "num_samples = []\n", "data = defaultdict(lambda : None)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "datadir = \"./leaf/data/femnist/data/train\"\n", "files = os.listdir(datadir)\n", "total_users=0\n", "users = set()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "files = os.listdir(datadir)[0:1]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "for f in files:\n", " file_path = os.path.join(datadir, f)\n", " print(file_path)\n", " with open(file_path, 'r') as inf:\n", " client_data = json.load(inf)\n", " current_users = len(client_data['users'])\n", " print(\"Current_Users: \", current_users)\n", " total_users += current_users\n", " users.update(client_data['users'])\n", "\n", "print(\"total_users: \", total_users)\n", "print(\"total_users: \", len(users))\n", "print(client_data['user_data'].keys())\n", "print(np.array(client_data['user_data']['f3408_47']['x']).shape)\n", "print(np.array(client_data['user_data']['f3408_47']['y']).shape)\n", "print(np.array(client_data['user_data']['f3327_11']['x']).shape)\n", "print(np.array(client_data['user_data']['f3327_11']['y']).shape)\n", "print(np.unique(np.array(client_data['user_data']['f3327_11']['y'])))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "file = 'run.py'\n", "with open(file, 'r') as inf:\n", " print(inf.readline().strip())\n", " print(inf.readlines())" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def f(l):\n", " l[2] = 'c'\n", "\n", "a = ['a', 'a', 'a']\n", "print(a)\n", "f(a)\n", "print(a)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "l = ['a', 'b', 'c']\n", "print(l[:-1])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from localconfig import LocalConfig\n", "\n", "def read_ini(file_path):\n", " config = LocalConfig(file_path)\n", " for section in config:\n", " print(\"Section: \", section)\n", " for key, value in config.items(section):\n", " print((key, value))\n", " print(dict(config.items('DATASET')))\n", " \n", "read_ini(\"config.ini\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def func(a = 1, b = 2, c = 3):\n", " print(a + b + c)\n", "\n", "l = [3, 5, 7]\n", "\n", "func(*l)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from torch import multiprocessing as mp\n", "\n", "mp.spawn(fn = func, nprocs = 2, args = [], kwargs = {'a': 4, 'b': 5, 'c': 6})" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "l = '[0.4, 0.2, 0.3, 0.1]'\n", "type(eval(l))\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "f1 = Femnist(1, 'leaf/data/femnist/data/train')\n", "f1.instantiate_dataset()\n", "f1.train_x.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from decentralizepy.datasets.Femnist import Femnist\n", "from decentralizepy.graphs.SmallWorld import SmallWorld\n", "from decentralizepy.mappings.Linear import Linear\n", "\n", "f = Femnist(2, 'leaf/data/femnist/data/train', sizes=[0.6, 0.4])\n", "g = SmallWorld(4, 1, 0.5)\n", "l = Linear(2, 2)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from decentralizepy.node.Node import Node\n", "from torch import multiprocessing as mp\n", "import logging\n", "n1 = Node(0, l, g, f, \"./results\", logging.DEBUG)\n", "n2 = Node(1, l, g, f, \"./results\", logging.DEBUG)\n", "# mp.spawn(fn = Node, nprocs = 2, args=[l,g,f])" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from testing import f" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Linear(in_features=1, out_features=1, bias=True)\n", "1 OrderedDict([('weight', tensor([[0.9654]])), ('bias', tensor([-0.2141]))])\n", "1 [{'params': [Parameter containing:\n", "tensor([[0.9654]], requires_grad=True), Parameter containing:\n", "tensor([-0.2141], requires_grad=True)], 'lr': 0.6, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n", "1 OrderedDict([('weight', tensor([[0.]])), ('bias', tensor([-0.2141]))])\n", "1 [{'params': [Parameter containing:\n", "tensor([[0.]], requires_grad=True), Parameter containing:\n", "tensor([-0.2141], requires_grad=True)], 'lr': 0.6, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n", "0 OrderedDict([('weight', tensor([[0.]])), ('bias', tensor([-0.2141]))])\n", "0 [{'params': [Parameter containing:\n", "tensor([[0.]], requires_grad=True), Parameter containing:\n", "tensor([-0.2141], requires_grad=True)], 'lr': 0.6, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n", "0 OrderedDict([('weight', tensor([[0.]])), ('bias', tensor([-0.2141]))])\n", "0 [{'params': [Parameter containing:\n", "tensor([[0.]], requires_grad=True), Parameter containing:\n", "tensor([-0.2141], requires_grad=True)], 'lr': 0.6, 'momentum': 0, 'dampening': 0, 'weight_decay': 0, 'nesterov': False}]\n" ] } ], "source": [ "from torch import multiprocessing as mp\n", "import torch\n", "m1 = torch.nn.Linear(1,1)\n", "o1 = torch.optim.SGD(m1.parameters(), 0.6)\n", "print(m1)\n", "\n", "\n", "mp.spawn(fn = f, nprocs = 2, args=[m1, o1])\n", "\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "o1.param_groups" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with torch.no_grad():\n", " o1.param_groups[0][\"params\"][0].copy_(torch.zeros(1,))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "o1.param_groups" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "m1.state_dict()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "interpreter": { "hash": "996934296aa9d79be6c3d800a38d8fdb7dfa8fe7bb07df178f1397cde2cb8742" }, "kernelspec": { "display_name": "Python 3.9.7 64-bit ('tff': conda)", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.7" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }