-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpca_with_autoencoders.py
More file actions
43 lines (35 loc) · 1.27 KB
/
pca_with_autoencoders.py
File metadata and controls
43 lines (35 loc) · 1.27 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import tensorflow as tf
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn.datasets import make_blobs
from sklearn.preprocessing import MinMaxScaler
from tensorflow.contrib.layers import fully_connected
data = make_blobs(n_samples=100, n_features=3, centers=2, random_state=101)
scl = MinMaxScaler()
scaled_data = scl.fit_transform(data[0])
data_x = scaled_data[:, 0]
data_y = scaled_data[:, 1]
data_z = scaled_data[:, 2]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(data_x, data_y, data_z, c=data[1])
plt.show()
num_inputs = 3
num_hidden = 2
num_outputs = num_inputs
num_steps = 1000
learning_rate = 0.01
x = tf.placeholder(tf.float32, shape=[None, num_inputs])
hidden = fully_connected(x, num_hidden, activation_fn=None)
outputs = fully_connected(hidden, num_outputs, activation_fn=None)
loss = tf.reduce_mean(tf.square(outputs - x))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train = optimizer.minimize(loss)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for i in range(num_steps):
sess.run(train, feed_dict={x: scaled_data})
output_2d = hidden.eval(feed_dict={x: scaled_data})
plt.scatter(output_2d[:, 0], output_2d[:, 1], c=data[1])
plt.show()