KiLJ4EdeN / TensorFlow_AddOns

Tensorflow Algorithm and Layer Implementations.

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

TensorfFlow_AddOns

License Version Code size Repo size Issue open Issue closed

Usage:

Install Dependencies w pip:

pip install tensorflow

Import the block module and use the predefined layers.

from blocks import __conv_block, __dense_block, __classification_block, __parallel_block
from blocks import __depthwise_block, __indentity_block, __residual_block

Examples:

Resnet Like Architecture.

import tensorflow as tf
from blocks import __identity_block, __residual_block, __dense_block, __classification_block

inputs = tf.keras.layers.Input(shape=(32, 32, 3))

x = __residual_block(inputs, filter_start=16, kernel_size=(3, 3),
                     use_bn=True, use_constraint=True,
                     use_dropout=True, constraint_rate=1,
                     dropout_rate=0.25, activation='relu')

x = __identity_block(x, filter_start=16, kernel_size=(3, 3),
                     use_bn=True, activation='relu')
x = __identity_block(x, filter_start=16, kernel_size=(3, 3),
                     use_bn=True, activation='relu')

x = __residual_block(x, filter_start=32, kernel_size=(3, 3),
                     use_bn=True, use_constraint=True,
                     use_dropout=True, constraint_rate=1,
                     dropout_rate=0.25, activation='relu')
x = __identity_block(x, filter_start=32, kernel_size=(3, 3),
                     use_bn=True, activation='relu')
x = __identity_block(x, filter_start=32, kernel_size=(3, 3),
                     use_bn=True, activation='relu')


x = __residual_block(x, filter_start=64, kernel_size=(3, 3),
                     use_bn=True, use_constraint=True,
                     use_dropout=True, constraint_rate=1,
                     dropout_rate=0.25, activation='relu')
x = __identity_block(x, filter_start=64, kernel_size=(3, 3),
                     use_bn=True, activation='relu')
x = __identity_block(x, filter_start=64, kernel_size=(3, 3),
                     use_bn=True, activation='relu')

x = __residual_block(x, filter_start=128, kernel_size=(3, 3),
                     use_bn=True, use_constraint=True,
                     use_dropout=True, constraint_rate=1,
                     dropout_rate=0.25, activation='relu')
x = __identity_block(x, filter_start=128, kernel_size=(3, 3),
                     use_bn=True, activation='relu')
x = __identity_block(x, filter_start=128, kernel_size=(3, 3),
                     use_bn=True, activation='relu')

x = __dense_block(x, unit_start=512, num_blocks=2,
                  flatten=True, use_constraint=True,
                  use_dropout=True, constraint_rate=1,
                  dropout_rate=0.25, activation='relu')

x = __classification_block(x, num_classes=100)

model = tf.keras.models.Model(inputs=inputs, outputs=x)
print(model.summary())

Mobilenet Customized.

import tensorflow as tf
from blocks import __depthwise_block, __dense_block, __classification_block

inputs = tf.keras.layers.Input(shape=(32, 32, 3))

x = __depthwise_block(inputs, filters=8, strides=(1, 1), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=16, strides=(2, 2), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=32, strides=(1, 1), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=64, strides=(2, 2), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=128, strides=(1, 1), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=256, strides=(2, 2), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=512, strides=(1, 1), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __depthwise_block(x, filters=1024, strides=(2, 2), alpha=1.0,
                      use_bn=True, use_dropout=True, 
                      dropout_rate=0.25, activation='relu')

x = __dense_block(x, unit_start=512, num_blocks=1,
                  flatten=True, use_constraint=True,
                  use_dropout=True, constraint_rate=1,
                  dropout_rate=0.5, activation='relu')

x = __classification_block(x, num_classes=100)

model = tf.keras.models.Model(inputs=inputs, outputs=x)
print(model.summary())

Parallel Feature Extraction.

import tensorflow as tf
from blocks import __parallel_block, __dense_block, __classification_block

inputs = tf.keras.layers.Input(shape=(32, 32, 3))

x = __parallel_block(inputs, width=3, filter_start=64,
                     num_blocks=2,
                     use_bn=True, use_constraint=True,
                     use_dropout=True, constraint_rate=2,
                     dropout_rate=0.2, activation='relu')

x = __dense_block(x, unit_start=64, num_blocks=1,
                  flatten=False, use_constraint=True,
                  use_dropout=True, constraint_rate=2,
                  dropout_rate=0.2, activation='relu')

x = __classification_block(x, num_classes=100)
model = tf.keras.models.Model(inputs=inputs, outputs=x)
print(model.summary())

Simple CNN.

import tensorflow as tf
from blocks import __conv_block, __dense_block, __classification_block

# basic net.

inputs = tf.keras.layers.Input(shape=(32, 32, 3))

x = __conv_block(inputs, filter_start=64, kernel_size=(2, 2),
                 num_blocks=2,
                 use_bn=True, use_constraint=True,
                 use_dropout=True, constraint_rate=1,
                 dropout_rate=0.3, activation='relu')

x = __dense_block(x, unit_start=128, num_blocks=2,
                  flatten=True, use_constraint=True,
                  use_dropout=True, constraint_rate=1,
                  dropout_rate=0.5, activation='relu')

x = __classification_block(x, num_classes=100)

model = tf.keras.models.Model(inputs=inputs, outputs=x)
print(model.summary())

Transfer Learning Inference.

from transfer import Transfer_Learn

# note that selecting included_layers as -1 sets all layers of model for training.
model = Transfer_Learn(input_shape=(224, 224, 3), classes=1, included_layers=1, model='MobileNet')
print(model.summary())

About

Tensorflow Algorithm and Layer Implementations.

License:MIT License


Languages

Language:Python 100.0%