Perceptron as a binary classifier#

1raise SystemExit("Stop right there!");
An exception has occurred, use %tb to see the full traceback.

SystemExit: Stop right there!

Importing libraries and packages#

 1# System
 2import os
 3
 4# Mathematical operations and data manipulation
 5import pandas as pd
 6
 7# Modelling
 8import tensorflow as tf
 9
10# Statistics
11from sklearn.metrics import confusion_matrix
12from sklearn.metrics import accuracy_score
13
14# Plotting
15import matplotlib.pyplot as plt
16
17%matplotlib inline
1os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"

Set paths#

1# Path to datasets directory
2data_path = "./datasets"
3# Path to assets directory (for saving results to)
4assets_path = "./assets"

Loading dataset#

1dataset = pd.read_csv(f"{data_path}/data.csv")
1dataset.head()
label x1 x2
0 1 2.6487 4.5192
1 1 1.5438 2.4443
2 1 1.8990 4.2409
3 1 2.4711 5.8097
4 1 3.3590 6.4423

The data has three columns. x1 and x2 are the features, and the label column contains the labels 0 or 1.

Exploring dataset#

 1plt.scatter(
 2    dataset[dataset["label"] == 0]["x1"],
 3    dataset[dataset["label"] == 0]["x2"],
 4    marker="*",
 5)
 6plt.scatter(
 7    dataset[dataset["label"] == 1]["x1"],
 8    dataset[dataset["label"] == 1]["x2"],
 9    marker="<",
10)
<matplotlib.collections.PathCollection at 0x7fe2e64dbdf0>
../../_images/46d1596f85137d5fbed36837462566ed78363f8e86e0d08f60360c1648566c4d.png

Training of the perceptron#

1# Split into features and labels. Convert values at the end into
2# matrix format
3x_input = dataset[["x1", "x2"]].values
4y_label = dataset[["label"]].values
1# Creating TensorFlow variables for features and labels and
2# typecasting them to float
3x = tf.Variable(x_input, dtype=tf.float32)
4y = tf.Variable(y_label, dtype=tf.float32)
 1# Training of the perceptron
 2Number_of_features = 2
 3Number_of_units = 1
 4learning_rate = 0.01
 5
 6# Weights and bias
 7weight = tf.Variable(tf.zeros([Number_of_features, Number_of_units]))
 8bias = tf.Variable(tf.zeros([Number_of_units]))
 9
10# Optimizer
11optimizer = tf.optimizers.SGD(learning_rate)
 1def perceptron(xx):
 2    z = tf.add(tf.matmul(xx, weight), bias)
 3    output = tf.sigmoid(z)
 4    return output
 5
 6
 7def train(i):
 8    for n in range(i):
 9        loss = lambda: abs(
10            tf.reduce_mean(
11                tf.nn.sigmoid_cross_entropy_with_logits(
12                    labels=y, logits=perceptron(x)
13                )
14            )
15        )
16        optimizer.minimize(loss, [weight, bias])
17
18
19# Train the perceptron
20train(1000)

Statistics#

1tf.print(weight)
[[-0.844034076]
 [0.673354685]]
1tf.print(bias)
[0.0593947768]
1# Passing the input data to check whether the perceptron
2# classifies it correctly
3ypred = perceptron(x)
4# Rounding off the output to convert it into binary format
5ypred = tf.round(ypred)
1# Measuring the accuracy
2acc = accuracy_score(y.numpy(), ypred.numpy())
3print(acc)
1.0
1# Performance measurement of the model
2cnf_matrix = confusion_matrix(y.numpy(), ypred.numpy())
3print(cnf_matrix)
[[12  0]
 [ 0  9]]