Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
D
deep_learning_study_circle_2016
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Martin Karlsson
deep_learning_study_circle_2016
Commits
d331a533
Commit
d331a533
authored
Oct 12, 2016
by
GIngesson
Browse files
Options
Downloads
Patches
Plain Diff
Upload new file
parent
ca49b651
No related branches found
No related tags found
No related merge requests found
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
hw2_autoencoders/Gabriel_Ingesson/autoencoder_eval_3dim_4lay.py
+256
-0
256 additions, 0 deletions
...toencoders/Gabriel_Ingesson/autoencoder_eval_3dim_4lay.py
with
256 additions
and
0 deletions
hw2_autoencoders/Gabriel_Ingesson/autoencoder_eval_3dim_4lay.py
0 → 100644
+
256
−
0
View file @
d331a533
# -*- coding: utf-8 -*-
# cd DL/Tensorflow_examples/examples/3_NeuralNetworks python execfile("autoencoder_eval_3dim_4lay.py")
"""
Auto Encoder Example.
Using an auto encoder on MNIST handwritten digits.
References:
Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner.
"
Gradient-based
learning applied to document recognition.
"
Proceedings of the IEEE,
86(11):2278-2324, November 1998.
Links:
[MNIST Dataset] http://yann.lecun.com/exdb/mnist/
"""
from
__future__
import
division
,
print_function
,
absolute_import
from
mpl_toolkits.mplot3d
import
Axes3D
import
tensorflow
as
tf
import
numpy
as
np
import
matplotlib.pyplot
as
plt
plt
.
ion
()
# Import MNIST data
from
tensorflow.examples.tutorials.mnist
import
input_data
mnist
=
input_data
.
read_data_sets
(
"
/tmp/data/
"
,
one_hot
=
True
)
# Parameters
training_epochs
=
150
batch_size
=
256
display_step
=
1
examples_to_show
=
10
total_batch
=
int
(
mnist
.
train
.
num_examples
/
batch_size
)
learning_rate
=
0.01
;
# Network Parameters
n_hidden_1
=
64
# 1st layer num features
n_hidden_2
=
64
# 2nd layer num features
n_hidden_3
=
36
# 2nd layer num features
n_hidden_4
=
3
# 3rd layer num features
n_input
=
784
# MNIST data input (img shape: 28*28)
x1
=
np
.
zeros
(
1000
)
x2
=
np
.
zeros
(
1000
)
x3
=
np
.
zeros
(
1000
)
color
=
np
.
zeros
(
1000
)
# tf Graph input (only pictures)
X
=
tf
.
placeholder
(
"
float
"
,
[
None
,
n_input
])
weights
=
{
'
encoder_h1
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_input
,
n_hidden_1
])),
'
encoder_h2
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_1
,
n_hidden_2
])),
'
encoder_h3
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_2
,
n_hidden_3
])),
'
encoder_h4
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_3
,
n_hidden_4
])),
'
decoder_h1
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_4
,
n_hidden_3
])),
'
decoder_h2
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_3
,
n_hidden_2
])),
'
decoder_h3
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_2
,
n_hidden_1
])),
'
decoder_h4
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_1
,
n_input
])),
}
biases
=
{
'
encoder_b1
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_1
])),
'
encoder_b2
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_2
])),
'
encoder_b3
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_3
])),
'
encoder_b4
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_4
])),
'
decoder_b1
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_3
])),
'
decoder_b2
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_2
])),
'
decoder_b3
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_hidden_1
])),
'
decoder_b4
'
:
tf
.
Variable
(
tf
.
random_normal
([
n_input
])),
}
# Building the encoder
def
encoder
(
x
):
# Encoder Hidden layer with sigmoid activation #1
layer_1
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
x
,
weights
[
'
encoder_h1
'
]),
biases
[
'
encoder_b1
'
]))
# Decoder Hidden layer with sigmoid activation #2
layer_2
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_1
,
weights
[
'
encoder_h2
'
]),
biases
[
'
encoder_b2
'
]))
layer_3
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_2
,
weights
[
'
encoder_h3
'
]),
biases
[
'
encoder_b3
'
]))
layer_4
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_3
,
weights
[
'
encoder_h4
'
]),
biases
[
'
encoder_b4
'
]))
return
layer_4
# Building the decoder
def
decoder
(
x
):
# Encoder Hidden layer with sigmoid activation #1
layer_1
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
x
,
weights
[
'
decoder_h1
'
]),
biases
[
'
decoder_b1
'
]))
# Decoder Hidden layer with sigmoid activation
layer_2
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_1
,
weights
[
'
decoder_h2
'
]),
biases
[
'
decoder_b2
'
]))
layer_3
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_2
,
weights
[
'
decoder_h3
'
]),
biases
[
'
decoder_b3
'
]))
layer_4
=
tf
.
nn
.
sigmoid
(
tf
.
add
(
tf
.
matmul
(
layer_3
,
weights
[
'
decoder_h4
'
]),
biases
[
'
decoder_b4
'
]))
return
layer_4
# Construct model
encoder_op
=
encoder
(
X
)
decoder_op
=
decoder
(
encoder_op
)
# Prediction
y_pred
=
decoder_op
# Targets (Labels) are the input data.
y_true
=
X
# Define loss and optimizer, minimize the squared error
cost
=
tf
.
reduce_mean
(
tf
.
pow
(
y_true
-
y_pred
,
2
))
optimizer
=
tf
.
train
.
RMSPropOptimizer
(
learning_rate
).
minimize
(
cost
)
# Initializing the variables
init
=
tf
.
initialize_all_variables
()
# Launch the graph
with
tf
.
Session
()
as
sess
:
sess
.
run
(
init
)
total_batch
=
int
(
mnist
.
train
.
num_examples
/
batch_size
)
# Training cycle
for
epoch
in
range
(
training_epochs
):
# Loop over all batches
for
i
in
range
(
total_batch
):
batch_xs
,
batch_ys
=
mnist
.
train
.
next_batch
(
batch_size
)
# Run optimization op (backprop) and cost op (to get loss value)
_
,
c
=
sess
.
run
([
optimizer
,
cost
],
feed_dict
=
{
X
:
batch_xs
})
# Display logs per epoch step
if
epoch
%
display_step
==
0
:
print
(
"
Epoch:
"
,
'
%04d
'
%
(
epoch
+
1
),
"
cost=
"
,
"
{:.9f}
"
.
format
(
c
))
example_pic
=
mnist
.
test
.
images
hidden_layer_output
=
sess
.
run
(
encoder
(
example_pic
))
for
i
in
range
(
1000
):
x1
[
i
]
=
hidden_layer_output
[
i
][
0
]
x2
[
i
]
=
hidden_layer_output
[
i
][
1
]
x3
[
i
]
=
hidden_layer_output
[
i
][
2
]
tmp
=
mnist
.
test
.
labels
[
i
]
tmp
=
np
.
where
(
tmp
>
0
)[
0
][
0
]
color
[
i
]
=
tmp
fig
=
plt
.
figure
(
100
)
ax
=
fig
.
add_subplot
(
111
,
projection
=
'
3d
'
)
cm
=
plt
.
get_cmap
(
'
gist_rainbow
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
0
,
0
)],
x2
[
np
.
logical_or
(
color
==
0
,
0
)],
x3
[
np
.
logical_or
(
color
==
0
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0
),
label
=
'
0
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
1
,
0
)],
x2
[
np
.
logical_or
(
color
==
1
,
0
)],
x3
[
np
.
logical_or
(
color
==
1
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.1
),
label
=
'
1
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
2
,
0
)],
x2
[
np
.
logical_or
(
color
==
2
,
0
)],
x3
[
np
.
logical_or
(
color
==
2
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.2
),
label
=
'
2
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
3
,
0
)],
x2
[
np
.
logical_or
(
color
==
3
,
0
)],
x3
[
np
.
logical_or
(
color
==
3
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.3
),
label
=
'
3
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
4
,
0
)],
x2
[
np
.
logical_or
(
color
==
4
,
0
)],
x3
[
np
.
logical_or
(
color
==
4
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.4
),
label
=
'
4
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
5
,
0
)],
x2
[
np
.
logical_or
(
color
==
5
,
0
)],
x3
[
np
.
logical_or
(
color
==
5
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.5
),
label
=
'
5
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
6
,
0
)],
x2
[
np
.
logical_or
(
color
==
6
,
0
)],
x3
[
np
.
logical_or
(
color
==
6
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.6
),
label
=
'
6
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
7
,
0
)],
x2
[
np
.
logical_or
(
color
==
7
,
0
)],
x3
[
np
.
logical_or
(
color
==
7
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.7
),
label
=
'
7
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
8
,
0
)],
x2
[
np
.
logical_or
(
color
==
8
,
0
)],
x3
[
np
.
logical_or
(
color
==
8
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.8
),
label
=
'
8
'
)
ax
.
scatter
(
x1
[
np
.
logical_or
(
color
==
9
,
0
)],
x2
[
np
.
logical_or
(
color
==
9
,
0
)],
x3
[
np
.
logical_or
(
color
==
9
,
0
)],
marker
=
'
o
'
,
c
=
cm
(
0.9
),
label
=
'
9
'
)
ax
.
set_xlabel
(
'
f_1
'
)
ax
.
set_ylabel
(
'
f_2
'
)
ax
.
set_zlabel
(
'
f_3
'
)
plt
.
legend
(
loc
=
'
upper left
'
,
numpoints
=
1
,
ncol
=
3
,
fontsize
=
8
,
bbox_to_anchor
=
(
0
,
0
))
plt
.
pause
(
0.05
)
tmp
=
mnist
.
test
.
labels
[
i
]
tmp
=
np
.
where
(
tmp
>
0
)[
0
][
0
]
color
[
i
]
=
tmp
example_pic
=
mnist
.
test
.
images
hidden_layer_output
=
sess
.
run
(
encoder
(
example_pic
))
w1
=
sess
.
run
(
weights
[
'
encoder_h1
'
])
w2
=
sess
.
run
(
weights
[
'
encoder_h2
'
])
w3
=
sess
.
run
(
weights
[
'
encoder_h3
'
])
print
(
sess
.
run
(
encoder
(
example_pic
)))
print
(
"
Optimization Finished!
"
)
#Applying encode and decode over test set
encode_decode
=
sess
.
run
(
y_pred
,
feed_dict
=
{
X
:
mnist
.
test
.
images
[:
examples_to_show
]})
#Compare original images with their reconstructions
plt
.
figure
(
10
)
f
,
a
=
plt
.
subplots
(
2
,
10
,
figsize
=
(
10
,
2
))
for
i
in
range
(
examples_to_show
):
a
[
0
][
i
].
imshow
(
np
.
reshape
(
mnist
.
test
.
images
[
i
],
(
28
,
28
)),
cmap
=
'
Greys_r
'
)
a
[
1
][
i
].
imshow
(
np
.
reshape
(
encode_decode
[
i
],
(
28
,
28
)),
cmap
=
'
Greys_r
'
)
f
.
show
()
#plt.draw()
#plt.waitforbuttonpress()
for
i
in
range
(
1000
):
x1
[
i
]
=
hidden_layer_output
[
i
][
0
]
x2
[
i
]
=
hidden_layer_output
[
i
][
1
]
x3
[
i
]
=
hidden_layer_output
[
i
][
2
]
tmp
=
mnist
.
test
.
labels
[
i
]
tmp
=
np
.
where
(
tmp
>
0
)[
0
][
0
]
color
[
i
]
=
tmp
plt
.
figure
(
3
)
w1_t
=
np
.
transpose
(
w1
);
for
i
in
range
(
64
):
plt
.
subplot
(
8
,
8
,
i
+
1
)
plt
.
imshow
(
np
.
reshape
(
w1_t
[
i
],
(
28
,
28
)),
cmap
=
'
Greys_r
'
)
plt
.
figure
(
4
)
w2_t
=
np
.
transpose
(
w2
);
for
i
in
range
(
64
):
plt
.
subplot
(
8
,
8
,
i
+
1
)
plt
.
imshow
(
np
.
reshape
(
w2_t
[
i
],
(
8
,
8
)),
cmap
=
'
Greys_r
'
)
plt
.
show
()
fig
=
plt
.
figure
()
ax
=
fig
.
add_subplot
(
111
,
projection
=
'
3d
'
)
ax
.
scatter
(
x1
,
x2
,
x3
,
c
=
color
,
marker
=
'
o
'
)
ax
.
set_xlabel
(
'
X Label
'
)
ax
.
set_ylabel
(
'
Y Label
'
)
ax
.
set_zlabel
(
'
Z Label
'
)
plt
.
show
()
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment