tf-encrypted / tf-encrypted

A Framework for Encrypted Machine Learning in TensorFlow

Home Page:https://tf-encrypted.io/

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

Use MNIST data Train CNN with TFE but failed. Who can help me ?

carlzhangweiwen opened this issue · comments

I want to train MNIST data with TFE but failed.

I referenced example codes from mnist and notebook

Who can help me ? My tensorflow version is 1.15.3, and tf_encrypted version is 0.5.9.

I have change activation 'relu' to None, because not support relu.

import tf_encrypted as tfe
import tensorflow as tf
from tf_encrypted.keras import backend as KE
from convert import decode
from tf_encrypted.keras.losses import BinaryCrossentropy
from tf_encrypted.keras.optimizers import SGD

from tf_encrypted.keras.models import Sequential
from tf_encrypted.keras.layers import Dense,Flatten,MaxPooling2D,BatchNormalization,Activation,ReLU
from tf_encrypted.keras.layers.convolutional import Conv2D




class DataOwner:
    """Contains code meant to be executed by a data owner Player."""

    IMG_ROWS = 28
    IMG_COLS = 28
    FLATTENED_DIM = IMG_ROWS * IMG_COLS

    def __init__(
        self, player_name, batch_size
    ):
        self.player_name = player_name
        self.batch_size = batch_size
        self.train_initializer = None
        self.test_initializer = None

    @property
    def initializer(self):
        return tf.group(self.train_initializer)

    def _build_data_pipeline(self):
        """Build a reproducible tf.data iterator."""

        def flatten(image, label):
            image = tf.reshape(image, shape=[-1, 28, 28, 1])
            # label = to_categorical(label, num_classes=10)
            return image, label

        dataset = tf.data.TFRecordDataset(["/home/carl/Documents/github/tf-encrypted/data/train.tfrecord"])
        dataset = dataset.map(decode)
        dataset = dataset.map(flatten)
        dataset = dataset.repeat()
        # dataset = dataset.batch(self.batch_size)

        iterator = tf.data.make_initializable_iterator(dataset)
        return iterator

    @tfe.local_computation
    def provide_training_data(self):
        """Preprocess training dataset
    Return single batch of training dataset
    """
        train_set_iterator = self._build_data_pipeline()
        self.train_initializer = train_set_iterator.initializer

        x, y = train_set_iterator.get_next()
        print("shape x:", x)
        print("shape y:", y)
        # x = tf.reshape(x, [self.batch_size, 28, 28, 1])
        # y = tf.reshape(y, [self.batch_size])
        return x, y



training_set_size = 60000
batch_size = 32
steps_per_epoch = (training_set_size // batch_size)
epochs = 20

data_owner = DataOwner('data-owner',
                       batch_size)

x_train, y_train = data_owner.provide_training_data()

BATCH_NORM = False
with tfe.protocol.SecureNN():
    model = Sequential()
    print("X_train.shape:", x_train.shape)
    model.add(Conv2D(32, (3, 3), activation=None, input_shape=x_train.shape[1:]))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation=None))
    model.add(MaxPooling2D((2, 2)))
    model.add(Conv2D(64, (3, 3), activation=None))
    model.add(Flatten())
    model.add(Dense(64, activation=None))
    model.add(Dense(10))
    model.compile(optimizer=SGD(lr=0.01), loss=BinaryCrossentropy())


sess = KE.get_session()
sess.run([data_owner.initializer])


# Train
model.fit(x_train,
          y_train,
          epochs=epochs,
          steps_per_epoch=steps_per_epoch)

Traceback (most recent call last):
File "lenet3.py", line 86, in
model.add(Conv2D(32, (3, 3), activation=None, input_shape=x_train.shape[1:]))
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/keras/models/sequential.py", line 62, in add
y = layer(x)
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/keras/engine/base_layer.py", line 102, in call
outputs = self.call(inputs, *args, **kargs)
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/keras/layers/convolutional.py", line 164, in call
inputs = self.prot.transpose(inputs, perm=[0, 3, 1, 2])
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/protocol/pond/pond.py", line 1015, in transpose
x_t = _transpose_private(self, x, perm=perm)
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/protocol/pond/pond.py", line 3622, in _transpose_private
x0_t = x0.transpose(perm=perm)
File "/home/carl/.local/lib/python3.6/site-packages/tf_encrypted/tensor/native.py", line 286, in transpose
return DenseTensor(tf.transpose(self.value, perm))
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/ops/array_ops.py", line 1859, in transpose
ret = transpose_fn(a, perm, name=name)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/ops/gen_array_ops.py", line 11453, in transpose
"Transpose", x=x, perm=perm, name=name)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/op_def_library.py", line 794, in _apply_op_helper
op_def=op_def)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py", line 3357, in create_op
attrs, op_def, compute_device)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py", line 3426, in _create_op_internal
op_def=op_def)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py", line 1770, in init
control_input_ops)
File "/home/carl/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py", line 1610, in _create_c_op
raise ValueError(str(e))
ValueError: Dimension must be 3 but is 4 for 'transpose/transpose' (op: 'Transpose') with input shapes: [28,28,1], [4].

Please check the examples on ML inference and training on secret-shared images. https://github.com/tf-encrypted/tf-encrypted/tree/master/examples/benchmark