Clustering

Clustering#

Clustering seeks to group data into clusters based on their properties and then allow us to predict which cluster a new member belongs.

import numpy as np
import matplotlib.pyplot as plt

We’ll use a dataset generator that is part of scikit-learn called make_moons. This generates data that falls into 2 different sets with a shape that looks like half-moons.

from sklearn import datasets
def generate_data():
    xvec, val = datasets.make_moons(200, noise=0.2)

    # encode the output to be 2 elements
    x = []
    v = []
    for xv, vv in zip(xvec, val):
        x.append(np.array(xv))
        v.append(vv)

    return np.array(x), np.array(v)
x, v = generate_data()

Let’s look at a point and it’s value

print(f"x = {x[0]}, value = {v[0]}")
x = [1.81301572 0.18800229], value = 1

Now let’s plot the data

def plot_data(x, v):
    xpt = [q[0] for q in x]
    ypt = [q[1] for q in x]

    fig, ax = plt.subplots()
    ax.scatter(xpt, ypt, s=40, c=v, cmap="viridis")
    ax.set_aspect("equal")
    return fig
fig = plot_data(x, v)
../_images/3727d58e6b23cbfb401d8dc5dda8dba4267ef736284c213a397fb9c60f54a70f.png

We want to partition this domain into 2 regions, such that when we come in with a new point, we know which group it belongs to.

First we setup and train our network

from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Input
from keras.optimizers import RMSprop
model = Sequential()
model.add(Input(shape=(2,)))
model.add(Dense(50, activation="relu"))
model.add(Dense(20, activation="relu"))
model.add(Dense(1, activation="sigmoid"))
rms = RMSprop()
model.compile(loss='binary_crossentropy',
              optimizer=rms, metrics=['accuracy'])
model.summary()
Model: "sequential"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense (Dense)                   │ (None, 50)             │           150 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_1 (Dense)                 │ (None, 20)             │         1,020 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_2 (Dense)                 │ (None, 1)              │            21 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 1,191 (4.65 KB)
 Trainable params: 1,191 (4.65 KB)
 Non-trainable params: 0 (0.00 B)

We seem to need a lot of epochs here to get a good result

epochs = 200
results = model.fit(x, v, batch_size=50, epochs=epochs, verbose=2)
Epoch 1/200
4/4 - 0s - 5ms/step - accuracy: 0.7000 - loss: 0.6287
Epoch 2/200
4/4 - 0s - 5ms/step - accuracy: 0.7400 - loss: 0.5998
Epoch 3/200
4/4 - 0s - 5ms/step - accuracy: 0.7600 - loss: 0.5805
Epoch 4/200
4/4 - 0s - 5ms/step - accuracy: 0.7700 - loss: 0.5632
Epoch 5/200
4/4 - 0s - 5ms/step - accuracy: 0.7750 - loss: 0.5474
Epoch 6/200
4/4 - 0s - 5ms/step - accuracy: 0.7850 - loss: 0.5324
Epoch 7/200
4/4 - 0s - 5ms/step - accuracy: 0.8000 - loss: 0.5179
Epoch 8/200
4/4 - 0s - 5ms/step - accuracy: 0.8050 - loss: 0.5027
Epoch 9/200
4/4 - 0s - 5ms/step - accuracy: 0.8100 - loss: 0.4868
Epoch 10/200
4/4 - 0s - 5ms/step - accuracy: 0.8150 - loss: 0.4723
Epoch 11/200
4/4 - 0s - 5ms/step - accuracy: 0.8250 - loss: 0.4580
Epoch 12/200
4/4 - 0s - 5ms/step - accuracy: 0.8300 - loss: 0.4449
Epoch 13/200
4/4 - 0s - 26ms/step - accuracy: 0.8350 - loss: 0.4308
Epoch 14/200
4/4 - 0s - 4ms/step - accuracy: 0.8400 - loss: 0.4189
Epoch 15/200
4/4 - 0s - 5ms/step - accuracy: 0.8400 - loss: 0.4058
Epoch 16/200
4/4 - 0s - 5ms/step - accuracy: 0.8400 - loss: 0.3960
Epoch 17/200
4/4 - 0s - 4ms/step - accuracy: 0.8450 - loss: 0.3844
Epoch 18/200
4/4 - 0s - 4ms/step - accuracy: 0.8450 - loss: 0.3743
Epoch 19/200
4/4 - 0s - 5ms/step - accuracy: 0.8500 - loss: 0.3646
Epoch 20/200
4/4 - 0s - 9ms/step - accuracy: 0.8550 - loss: 0.3547
Epoch 21/200
4/4 - 0s - 4ms/step - accuracy: 0.8600 - loss: 0.3460
Epoch 22/200
4/4 - 0s - 6ms/step - accuracy: 0.8750 - loss: 0.3377
Epoch 23/200
4/4 - 0s - 5ms/step - accuracy: 0.8700 - loss: 0.3295
Epoch 24/200
4/4 - 0s - 6ms/step - accuracy: 0.8700 - loss: 0.3231
Epoch 25/200
4/4 - 0s - 21ms/step - accuracy: 0.8700 - loss: 0.3166
Epoch 26/200
4/4 - 0s - 5ms/step - accuracy: 0.8800 - loss: 0.3098
Epoch 27/200
4/4 - 0s - 5ms/step - accuracy: 0.8800 - loss: 0.3042
Epoch 28/200
4/4 - 0s - 4ms/step - accuracy: 0.8750 - loss: 0.2996
Epoch 29/200
4/4 - 0s - 4ms/step - accuracy: 0.8800 - loss: 0.2960
Epoch 30/200
4/4 - 0s - 4ms/step - accuracy: 0.8800 - loss: 0.2901
Epoch 31/200
4/4 - 0s - 4ms/step - accuracy: 0.8850 - loss: 0.2872
Epoch 32/200
4/4 - 0s - 4ms/step - accuracy: 0.8850 - loss: 0.2832
Epoch 33/200
4/4 - 0s - 5ms/step - accuracy: 0.8850 - loss: 0.2803
Epoch 34/200
4/4 - 0s - 4ms/step - accuracy: 0.8900 - loss: 0.2785
Epoch 35/200
4/4 - 0s - 5ms/step - accuracy: 0.8900 - loss: 0.2748
Epoch 36/200
4/4 - 0s - 5ms/step - accuracy: 0.8900 - loss: 0.2720
Epoch 37/200
4/4 - 0s - 5ms/step - accuracy: 0.8900 - loss: 0.2696
Epoch 38/200
4/4 - 0s - 4ms/step - accuracy: 0.8950 - loss: 0.2682
Epoch 39/200
4/4 - 0s - 5ms/step - accuracy: 0.8900 - loss: 0.2661
Epoch 40/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2637
Epoch 41/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2616
Epoch 42/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2607
Epoch 43/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2584
Epoch 44/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2562
Epoch 45/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2562
Epoch 46/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2541
Epoch 47/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2531
Epoch 48/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2498
Epoch 49/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2488
Epoch 50/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2472
Epoch 51/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2458
Epoch 52/200
4/4 - 0s - 5ms/step - accuracy: 0.8950 - loss: 0.2459
Epoch 53/200
4/4 - 0s - 5ms/step - accuracy: 0.9050 - loss: 0.2429
Epoch 54/200
4/4 - 0s - 42ms/step - accuracy: 0.8950 - loss: 0.2428
Epoch 55/200
4/4 - 0s - 5ms/step - accuracy: 0.9000 - loss: 0.2409
Epoch 56/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2408
Epoch 57/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2380
Epoch 58/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2374
Epoch 59/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2374
Epoch 60/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2339
Epoch 61/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2325
Epoch 62/200
4/4 - 0s - 4ms/step - accuracy: 0.9100 - loss: 0.2329
Epoch 63/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2296
Epoch 64/200
4/4 - 0s - 11ms/step - accuracy: 0.9100 - loss: 0.2305
Epoch 65/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2281
Epoch 66/200
4/4 - 0s - 4ms/step - accuracy: 0.9000 - loss: 0.2273
Epoch 67/200
4/4 - 0s - 6ms/step - accuracy: 0.8950 - loss: 0.2286
Epoch 68/200
4/4 - 0s - 5ms/step - accuracy: 0.9050 - loss: 0.2233
Epoch 69/200
4/4 - 0s - 6ms/step - accuracy: 0.9050 - loss: 0.2223
Epoch 70/200
4/4 - 0s - 6ms/step - accuracy: 0.9050 - loss: 0.2213
Epoch 71/200
4/4 - 0s - 23ms/step - accuracy: 0.9050 - loss: 0.2213
Epoch 72/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2196
Epoch 73/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2174
Epoch 74/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2164
Epoch 75/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2155
Epoch 76/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2140
Epoch 77/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2138
Epoch 78/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2119
Epoch 79/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2102
Epoch 80/200
4/4 - 0s - 5ms/step - accuracy: 0.9050 - loss: 0.2110
Epoch 81/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2075
Epoch 82/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2077
Epoch 83/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2054
Epoch 84/200
4/4 - 0s - 5ms/step - accuracy: 0.9050 - loss: 0.2050
Epoch 85/200
4/4 - 0s - 5ms/step - accuracy: 0.9050 - loss: 0.2018
Epoch 86/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2026
Epoch 87/200
4/4 - 0s - 4ms/step - accuracy: 0.9050 - loss: 0.2009
Epoch 88/200
4/4 - 0s - 4ms/step - accuracy: 0.9100 - loss: 0.2015
Epoch 89/200
4/4 - 0s - 4ms/step - accuracy: 0.9100 - loss: 0.1971
Epoch 90/200
4/4 - 0s - 4ms/step - accuracy: 0.9100 - loss: 0.1983
Epoch 91/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1961
Epoch 92/200
4/4 - 0s - 4ms/step - accuracy: 0.9150 - loss: 0.1948
Epoch 93/200
4/4 - 0s - 4ms/step - accuracy: 0.9150 - loss: 0.1931
Epoch 94/200
4/4 - 0s - 4ms/step - accuracy: 0.9150 - loss: 0.1916
Epoch 95/200
4/4 - 0s - 4ms/step - accuracy: 0.9150 - loss: 0.1910
Epoch 96/200
4/4 - 0s - 5ms/step - accuracy: 0.9150 - loss: 0.1913
Epoch 97/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1883
Epoch 98/200
4/4 - 0s - 5ms/step - accuracy: 0.9150 - loss: 0.1867
Epoch 99/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1878
Epoch 100/200
4/4 - 0s - 46ms/step - accuracy: 0.9200 - loss: 0.1848
Epoch 101/200
4/4 - 0s - 5ms/step - accuracy: 0.9150 - loss: 0.1832
Epoch 102/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1841
Epoch 103/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1810
Epoch 104/200
4/4 - 0s - 5ms/step - accuracy: 0.9250 - loss: 0.1806
Epoch 105/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1797
Epoch 106/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1783
Epoch 107/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1780
Epoch 108/200
4/4 - 0s - 5ms/step - accuracy: 0.9200 - loss: 0.1749
Epoch 109/200
4/4 - 0s - 5ms/step - accuracy: 0.9250 - loss: 0.1736
Epoch 110/200
4/4 - 0s - 12ms/step - accuracy: 0.9250 - loss: 0.1723
Epoch 111/200
4/4 - 0s - 5ms/step - accuracy: 0.9250 - loss: 0.1722
Epoch 112/200
4/4 - 0s - 5ms/step - accuracy: 0.9300 - loss: 0.1702
Epoch 113/200
4/4 - 0s - 7ms/step - accuracy: 0.9300 - loss: 0.1685
Epoch 114/200
4/4 - 0s - 6ms/step - accuracy: 0.9250 - loss: 0.1694
Epoch 115/200
4/4 - 0s - 7ms/step - accuracy: 0.9300 - loss: 0.1678
Epoch 116/200
4/4 - 0s - 6ms/step - accuracy: 0.9300 - loss: 0.1653
Epoch 117/200
4/4 - 0s - 22ms/step - accuracy: 0.9300 - loss: 0.1633
Epoch 118/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1644
Epoch 119/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1613
Epoch 120/200
4/4 - 0s - 4ms/step - accuracy: 0.9200 - loss: 0.1617
Epoch 121/200
4/4 - 0s - 4ms/step - accuracy: 0.9300 - loss: 0.1589
Epoch 122/200
4/4 - 0s - 4ms/step - accuracy: 0.9400 - loss: 0.1577
Epoch 123/200
4/4 - 0s - 4ms/step - accuracy: 0.9350 - loss: 0.1566
Epoch 124/200
4/4 - 0s - 4ms/step - accuracy: 0.9350 - loss: 0.1564
Epoch 125/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1550
Epoch 126/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1527
Epoch 127/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1530
Epoch 128/200
4/4 - 0s - 4ms/step - accuracy: 0.9300 - loss: 0.1528
Epoch 129/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1509
Epoch 130/200
4/4 - 0s - 4ms/step - accuracy: 0.9350 - loss: 0.1491
Epoch 131/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1478
Epoch 132/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1476
Epoch 133/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1454
Epoch 134/200
4/4 - 0s - 5ms/step - accuracy: 0.9350 - loss: 0.1464
Epoch 135/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1430
Epoch 136/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1445
Epoch 137/200
4/4 - 0s - 5ms/step - accuracy: 0.9450 - loss: 0.1410
Epoch 138/200
4/4 - 0s - 5ms/step - accuracy: 0.9450 - loss: 0.1399
Epoch 139/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1396
Epoch 140/200
4/4 - 0s - 5ms/step - accuracy: 0.9450 - loss: 0.1379
Epoch 141/200
4/4 - 0s - 5ms/step - accuracy: 0.9450 - loss: 0.1385
Epoch 142/200
4/4 - 0s - 4ms/step - accuracy: 0.9500 - loss: 0.1367
Epoch 143/200
4/4 - 0s - 4ms/step - accuracy: 0.9450 - loss: 0.1346
Epoch 144/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1350
Epoch 145/200
4/4 - 0s - 5ms/step - accuracy: 0.9400 - loss: 0.1352
Epoch 146/200
4/4 - 0s - 50ms/step - accuracy: 0.9500 - loss: 0.1315
Epoch 147/200
4/4 - 0s - 4ms/step - accuracy: 0.9450 - loss: 0.1320
Epoch 148/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1318
Epoch 149/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1304
Epoch 150/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1290
Epoch 151/200
4/4 - 0s - 5ms/step - accuracy: 0.9500 - loss: 0.1277
Epoch 152/200
4/4 - 0s - 5ms/step - accuracy: 0.9450 - loss: 0.1270
Epoch 153/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1253
Epoch 154/200
4/4 - 0s - 4ms/step - accuracy: 0.9500 - loss: 0.1251
Epoch 155/200
4/4 - 0s - 12ms/step - accuracy: 0.9450 - loss: 0.1252
Epoch 156/200
4/4 - 0s - 4ms/step - accuracy: 0.9550 - loss: 0.1234
Epoch 157/200
4/4 - 0s - 4ms/step - accuracy: 0.9500 - loss: 0.1226
Epoch 158/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1218
Epoch 159/200
4/4 - 0s - 7ms/step - accuracy: 0.9450 - loss: 0.1247
Epoch 160/200
4/4 - 0s - 6ms/step - accuracy: 0.9550 - loss: 0.1194
Epoch 161/200
4/4 - 0s - 6ms/step - accuracy: 0.9500 - loss: 0.1202
Epoch 162/200
4/4 - 0s - 7ms/step - accuracy: 0.9550 - loss: 0.1186
Epoch 163/200
4/4 - 0s - 19ms/step - accuracy: 0.9550 - loss: 0.1176
Epoch 164/200
4/4 - 0s - 4ms/step - accuracy: 0.9550 - loss: 0.1170
Epoch 165/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1161
Epoch 166/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1148
Epoch 167/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1159
Epoch 168/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1135
Epoch 169/200
4/4 - 0s - 4ms/step - accuracy: 0.9550 - loss: 0.1144
Epoch 170/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1117
Epoch 171/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1140
Epoch 172/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1097
Epoch 173/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1115
Epoch 174/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1109
Epoch 175/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1092
Epoch 176/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1079
Epoch 177/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1064
Epoch 178/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1066
Epoch 179/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1072
Epoch 180/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1066
Epoch 181/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1043
Epoch 182/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1043
Epoch 183/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1027
Epoch 184/200
4/4 - 0s - 5ms/step - accuracy: 0.9550 - loss: 0.1046
Epoch 185/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1016
Epoch 186/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1008
Epoch 187/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1020
Epoch 188/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.1040
Epoch 189/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0995
Epoch 190/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0986
Epoch 191/200
4/4 - 0s - 53ms/step - accuracy: 0.9600 - loss: 0.0989
Epoch 192/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0977
Epoch 193/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0978
Epoch 194/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.0964
Epoch 195/200
4/4 - 0s - 5ms/step - accuracy: 0.9600 - loss: 0.0978
Epoch 196/200
4/4 - 0s - 5ms/step - accuracy: 0.9650 - loss: 0.0964
Epoch 197/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0951
Epoch 198/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0967
Epoch 199/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0952
Epoch 200/200
4/4 - 0s - 4ms/step - accuracy: 0.9600 - loss: 0.0946
score = model.evaluate(x, v, verbose=0)
print(f"score = {score[0]}")
print(f"accuracy = {score[1]}")
score = 0.09179027378559113
accuracy = 0.9599999785423279

Let’s look at a prediction. We need to feed in a single point as an array of shape (N, 2), where N is the number of points

res = model.predict(np.array([[-2, 2]]))
res
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step

1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step
array([[1.338889e-14]], dtype=float32)

We see that we get a floating point number. We will need to convert this to 0 or 1 by rounding.

Let’s plot the partitioning

M = 128
N = 128

xmin = -1.75
xmax = 2.5
ymin = -1.25
ymax = 1.75

xpt = np.linspace(xmin, xmax, M)
ypt = np.linspace(ymin, ymax, N)

To make the prediction go faster, we want to feed in a vector of these points, of the form:

[[xpt[0], ypt[0]],
 [xpt[1], ypt[1]],
 ...
]

We can see that this packs them into the vector

pairs = np.array(np.meshgrid(xpt, ypt)).T.reshape(-1, 2)
pairs[0]
array([-1.75, -1.25])

Now we do the prediction. We will get a vector out, which we reshape to match the original domain.

res = model.predict(pairs, verbose=0)
res.shape = (M, N)

Finally, round to 0 or 1

domain = np.where(res > 0.5, 1, 0)

and we can plot the data

fig, ax = plt.subplots()
ax.imshow(domain.T, origin="lower",
          extent=[xmin, xmax, ymin, ymax], alpha=0.25)
xpt = [q[0] for q in x]
ypt = [q[1] for q in x]

ax.scatter(xpt, ypt, s=40, c=v, cmap="viridis")
<matplotlib.collections.PathCollection at 0x7f3e64db6d50>
../_images/18a2431ca355b19a900b6298b01e1f22fb68884ecd9b24cfef2634bf9db1b7fb.png