viceroy

git clone git://git.codymlewis.com/viceroy.git
Log | Files | Refs | README

commit 3015c5ccf62fb988601bc9d8de5c183cd7b48ec2
parent aa99436619df2f1131c53cc45e9de0593e262f11
Author: Cody Lewis <cody@codymlewis.com>
Date:   Mon, 19 Oct 2020 13:53:42 +1100

Started decoupling reusable functions into a utilities module

Diffstat:
M.gitignore | 2++
MClient.py | 37++++++++++++++++++-------------------
MServer.py | 36++++++++++++++++++++----------------
MSoftMaxModel.py | 14++++++++++++--
Autils.py | 39+++++++++++++++++++++++++++++++++++++++
5 files changed, 91 insertions(+), 37 deletions(-)

diff --git a/.gitignore b/.gitignore @@ -3,3 +3,5 @@ __pycache__/* *.pyc Session.vim tags +data +data/* diff --git a/Client.py b/Client.py @@ -11,11 +11,13 @@ import torch import torch.nn as nn import SoftMaxModel +import utils class Client: def __init__(self, x, y): - self.net = SoftMaxModel.SoftMaxModel(len(x[0]), len(y[0])) + dims = utils.get_dims(x.shape, y.shape) + self.net = SoftMaxModel.SoftMaxModel(dims['x'], dims['y']) self.x = x self.y = y self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -24,14 +26,12 @@ class Client: def connect(self, host, port): """Connect to the host:port federated learning server""" self.socket.connect((host, port)) - self.net.copy_params(pickle.loads(self.socket.recv(1024))) def fit(self): - if self.socket.recv(1024) != b'OK': - pass criterion = nn.BCELoss() e = 0 - while True: + while (msg := self.socket.recv(4096)) != b'DONE': + self.net.copy_params(pickle.loads(msg)) e += 1 history, grads = self.net.fit(self.x, self.y, 1, verbose=False) self.socket.sendall(pickle.dumps(grads)) @@ -39,26 +39,25 @@ class Client: f"Epoch: {e}, Loss: {criterion(self.net(X), Y)}", end="\r" ) - if self.socket.recv(1024) != b'OK': - break # An improvement would be to save grads as a backlog and concurrent # send them when the server is ready print() if __name__ == '__main__': - X = torch.tensor([ - [0, 0], - [0, 1], - [1, 0], - [1, 1] - ], dtype=torch.float32) - Y = torch.tensor([ - [1, 0], - [1, 0], - [1, 0], - [0, 1] - ], dtype=torch.float32) + # X = torch.tensor([ + # [0, 0], + # [0, 1], + # [1, 0], + # [1, 1] + # ], dtype=torch.float32) + # Y = torch.tensor([ + # [1, 0], + # [1, 0], + # [1, 0], + # [0, 1] + # ], dtype=torch.float32) + X, Y = utils.load_data("mnist") client = Client(X, Y) HOST, PORT = '127.0.0.1', 5000 print(f"Connecting to {HOST}:{PORT}") diff --git a/Server.py b/Server.py @@ -6,10 +6,10 @@ Author: Cody Lewis import socket import pickle -import torch import torch.nn as nn import GlobalModel +import utils class Server: @@ -46,9 +46,10 @@ class Server: for e in range(epochs): grads = dict() for i, (c, _) in enumerate(self.clients): - c.send(b'OK') grads[i] = pickle.loads(c.recv(4096)) self.net.fit(1, grads) + for c, _ in self.clients: + c.send(pickle.dumps(self.net.get_params())) print( f"Epoch: {e + 1}/{epochs}, Loss: {criterion(server.net.predict(X), Y)}", end="\r" @@ -57,6 +58,7 @@ class Server: def close(self): for c, _ in self.clients: + c.send(b'DONE') c.close() self.clients = [] @@ -64,20 +66,22 @@ class Server: if __name__ == '__main__': PORT = 5000 - server = Server(2, 2, PORT) + X, Y = utils.load_data("mnist", train=False) + dims = utils.get_dims(X.shape, Y.shape) + server = Server(dims['x'], dims['y'], PORT) print(f"Starting server on port {PORT}") - X = torch.tensor([ - [0, 0], - [0, 1], - [1, 0], - [1, 1] - ], dtype=torch.float32) - Y = torch.tensor([ - [1, 0], - [1, 0], - [1, 0], - [0, 1] - ], dtype=torch.float32) - server.accept_clients(2) + # X = torch.tensor([ + # [0, 0], + # [0, 1], + # [1, 0], + # [1, 1] + # ], dtype=torch.float32) + # Y = torch.tensor([ + # [1, 0], + # [1, 0], + # [1, 0], + # [0, 1] + # ], dtype=torch.float32) + server.accept_clients(1) server.fit(X, Y, 5000) server.close() diff --git a/SoftMaxModel.py b/SoftMaxModel.py @@ -4,10 +4,11 @@ Pytorch implementation of a softmax perceptron Author: Cody Lewis """ -import torch import torch.nn as nn import torch.optim as optim +import utils + class SoftMaxModel(nn.Module): """The softmax perceptron class""" @@ -37,8 +38,10 @@ class SoftMaxModel(nn.Module): verbose -- output training stats if True """ optimizer = optim.SGD(self.parameters(), lr=self.lr, momentum=0.9) - criterion = nn.BCELoss() + # criterion = nn.MSELoss() + criterion = nn.CrossEntropyLoss() history = {'loss': []} + # TODO: take a random batch of the data for i in range(epochs): optimizer.zero_grad() output = self(x) @@ -65,3 +68,10 @@ class SoftMaxModel(nn.Module): """Copy input parameters into self""" for p, t in zip(params, self.parameters()): t.data.copy_(p) + + +if __name__ == '__main__': + X, Y = utils.load_data("mnist") + dims = utils.get_dims(X.shape, Y.shape) + net = SoftMaxModel(dims['x'], dims['y']) + net.fit(X, Y, 500) diff --git a/utils.py b/utils.py @@ -0,0 +1,39 @@ +""" +Utility functions for use on other classes in this project + +Author: Cody +""" + +import torch +from torch import nn +import torchvision + + +def load_data(ds_name, train=True): + """ + Load the specified dataset in a form suitable for the Softmax model + + Keyword arguments: + ds_name -- name of the dataset + train -- load the training dataset if true otherwise load the validation + """ + datasets = { + "mnist": torchvision.datasets.MNIST, + } + if (chosen_set := datasets.get(ds_name)) is None: + return torch.tensor(), torch.tensor() + data = chosen_set(f"./data/{ds_name}", train=train, download=True) + X = data.data + if len(X.shape) == 3: + X = X.reshape(X.shape[0], X.shape[1] * X.shape[2]) + # return X.float(), nn.functional.one_hot(data.targets) + return X.float(), data.targets.long().unsqueeze(dim=0) + + +def get_dims(x_shape, y_shape): + """Get the dimensions for a dataset based on its shapes""" + return { + "x": x_shape[-1] if len(x_shape) > 1 else 1, + "y": y_shape[-1] if len(y_shape) > 1 else 1, + } +