Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters

Time:2020-6-18

code

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from datetime import datetime
from packaging import version

import tensorflow as tf 
from tensorflow import keras

import numpy  as np 
import matplotlib.pyplot as plt 


print("Tensorflow version:", tf.__version__)
assert(version.parse(tf.__version__).release[0] >=2)


def realModel():
    x_data = np.linspace(-1, 1, 1000)
    # np.random.shuffle(x)
    # Generate the output data.
    # y_data = 0.5x_data + 2 + noise
    y_data = 0.5 * x_data + 2 + np.random.normal(0, 0.05, 1000)
    return x_data, y_data

#Model
def realModel2():
    x_data = np.linspace(-1, 1, 300)[:, np.newaxis]
    noise = np.random.normal(0, 0.05, x_data.shape)
    y_data = np.power(x_data, 3) + np.square(x_data) - 0.05 + noise
    return x_data, y_data



#Modeling
def buildModel():
    model = keras.models.Sequential([
        keras.layers.Dense(16, input_dim=1),
        keras.layers.Dense(1)
    ])

    model.compile(
        loss='mse',
        optimizer=keras.optimizers.SGD(lr=0.2)
    )

    return model

#Modeling
def buildModel2():
    model = tf.keras.Sequential([
        tf.keras.layers .Flatten(input_ Shape = (1,)), ා compress the input dimension into a dimension
        tf.keras.layers . dense (30, activation = relu '), 1 hidden layer, 10 units, and the activation function is relu (the realization of nonlinear transformation is because relu can eliminate the part less than 0 and realize the sparsity of matrix)
        Wei tf.keras.layers . dense (10, activation ='relu '), I've shielded it here, and I can add layers if necessary
        tf.keras.layers . dropout (0.2), ා randomly discard 0.2 to prevent over fitting
        tf.keras.layers.Dense(1)
    ])

    optimizer = tf.keras.optimizers.Adam(0.001)  
    model.compile(optimizer=optimizer, loss="mse")

    return model


if __name__ == "__main__":
    # x_train, y_train= realModel()
    x_train, y_train= realModel2()
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.plot(x_train, y_train)
    plt.ion()
    plt.show()
    
    # model = buildModel()
    model = buildModel2()

    logdir = "logs/scalars/" + datetime.now().strftime("%Y%m%d-%H%M%S")
    tensorboard_callbacks = keras.callbacks.TensorBoard(
        log_dir=logdir,
        histogram_freq=True,
        write_graph=True,
        write_grads=True
    )


    for i in range(100):
        model.fit(
            x_train,
            y_train,
            batch_size=150,
            version=0,
            epochs=5,
            callbacks=[tensorboard_callbacks]
        )

        if i % 2 == 0:
            y_pred = model.predict(x_train)
            try:
                #Remove previous curve
                ax.lines.remove(lines[0])
            except Exception:
                pass
            lines = ax.plot(x_train, y_pred)
            plt.pause(1)

    plt.pause(0)
    model.save(logdir)

Training dynamic chart

Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters

start-uptensorboard

tensorboard --logdir logs\scalars

Browser open: http://localhost :6006/

Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters

Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters

Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters

Tensorflow2 dynamic view of simple model training process tensorboard display of relevant parameters