Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
B
beta-vae-normalizing-flows
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kaan Güney Keklikçi
beta-vae-normalizing-flows
Commits
49b12917
Commit
49b12917
authored
Aug 08, 2021
by
Kaan Güney Keklikçi
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
implemented inverse autogregressive flow, adding optimize test for maf
parent
dd081af7
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
71 additions
and
10 deletions
+71
-10
maf.py
scripts/flows/maf/maf.py
+36
-4
maf_execute.py
scripts/flows/maf/maf_execute.py
+16
-3
maf_optimizer_experiment.py
scripts/flows/maf/maf_optimizer_experiment.py
+19
-3
No files found.
scripts/flows/maf/maf.py
View file @
49b12917
...
...
@@ -10,8 +10,16 @@ plt.style.use('seaborn')
tfd
=
tfp
.
distributions
tfb
=
tfp
.
bijectors
class
MAF
:
def
__init__
(
self
,
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
):
class
MAF
(
object
):
def
__init__
(
self
,
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
,
activation
,
conditional
,
hidden_degrees
,
conditional_event_shape
,
conditional_input_layers
,
event_shape
):
self
.
tf_version
=
tf_version
self
.
dtype
=
dtype
self
.
base_dist
=
base_dist
...
...
@@ -19,6 +27,12 @@ class MAF:
self
.
params
=
params
self
.
hidden_units
=
hidden_units
self
.
batch_size
=
batch_size
self
.
activation
=
activation
self
.
conditional
=
conditional
self
.
conditional_event_shape
=
conditional_event_shape
self
.
hidden_degrees
=
hidden_degrees
self
.
conditional_input_layers
=
conditional_input_layers
self
.
event_shape
=
event_shape
def
get_tf_version
(
self
):
return
self
.
tf_version
...
...
@@ -39,8 +53,16 @@ class MAF:
samples
=
data_iterator
.
get_next
()
return
samples
def
get_shift_scale_func
(
self
,
data
):
func
=
tfb
.
AutoregressiveNetwork
(
params
=
self
.
params
,
hidden_units
=
self
.
hidden_units
)
def
get_shift_scale_func
(
self
):
func
=
tfb
.
AutoregressiveNetwork
(
params
=
self
.
params
,
hidden_units
=
self
.
hidden_units
,
activation
=
self
.
activation
,
conditional
=
self
.
conditional
,
conditional_event_shape
=
self
.
conditional_event_shape
,
event_shape
=
self
.
event_shape
,
conditional_input_layers
=
self
.
conditional_input_layers
,
hidden_degrees
=
self
.
hidden_degrees
)
return
func
def
make_maf
(
self
,
data
):
...
...
@@ -51,3 +73,13 @@ class MAF:
maf
=
tfd
.
TransformedDistribution
(
tfd
.
Sample
(
distribution
,
sample_shape
),
bijector
)
return
maf
class
IAF
(
MAF
):
def
make_maf
(
self
,
data
):
distribution
=
self
.
base_dist
sample_shape
=
self
.
get_dims
(
data
)
shift_scale_function
=
self
.
get_shift_scale_func
()
bijector
=
tfb
.
Invert
(
tfb
.
MaskedAutoregressiveFlow
(
shift_scale_function
))
maf
=
tfd
.
TransformedDistribution
(
tfd
.
Sample
(
distribution
,
sample_shape
),
bijector
)
return
maf
\ No newline at end of file
scripts/flows/maf/maf_execute.py
View file @
49b12917
...
...
@@ -8,6 +8,7 @@ tf.compat.v1.disable_eager_execution()
import
tensorflow_probability
as
tfp
import
matplotlib.pyplot
as
plt
plt
.
style
.
use
(
'seaborn'
)
from
data_loader
import
load_data
from
data_preprocesser
import
preprocess_data
...
...
@@ -28,7 +29,6 @@ def train(session, loss, optimizer, steps=int(1e5)):
print
(
'Iteration {iteration}: {loss}'
.
format
(
iteration
=
i
,
loss
=
loss_per_iteration
))
return
recorded_losses
def
plot_results
(
recorded_losses
):
""" plot loss """
...
...
@@ -81,10 +81,23 @@ def main():
base_dist
=
tfp
.
distributions
.
Normal
(
loc
=
0.
,
scale
=
1.
)
dims
=
data
.
shape
[
1
]
learning_rate
=
1e-4
activation
=
'relu'
hidden_degrees
=
'random'
conditional
=
True
conditional_event_shape
=
(
dims
,)
event_shape
=
conditional_event_shape
conditional_input_layers
=
'first_layer'
""" initialize samples """
maf
=
MAF
(
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
)
maf
=
MAF
(
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
,
activation
,
conditional
,
hidden_degrees
,
conditional_event_shape
,
conditional_input_layers
,
event_shape
)
dims
=
maf
.
get_dims
(
data
)
samples
=
maf
.
create_tensor
(
data
)
...
...
@@ -99,7 +112,7 @@ def main():
""" initialize loss and optimizer """
loss
=
-
tf
.
reduce_mean
(
maf
.
log_prob
(
samples
))
loss
=
-
tf
.
reduce_mean
(
maf
.
log_prob
(
samples
,
bijector_kwargs
=
{
'conditional_input'
:
samples
}
))
optimizer
=
tf
.
compat
.
v1
.
train
.
AdamOptimizer
(
learning_rate
)
.
minimize
(
loss
)
session
=
tf
.
compat
.
v1
.
Session
()
...
...
scripts/flows/maf/maf_optimizer_experiment.py
View file @
49b12917
...
...
@@ -92,10 +92,23 @@ def main():
dims
=
data
.
shape
[
1
]
learning_rate
=
1e-4
steps
=
1e4
activation
=
'relu'
hidden_degrees
=
'random'
conditional
=
True
conditional_event_shape
=
(
dims
,)
event_shape
=
conditional_event_shape
conditional_input_layers
=
'first_layer'
""" initialize samples """
maf
=
MAF
(
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
)
maf
=
MAF
(
dtype
,
tf_version
,
batch_size
,
params
,
hidden_units
,
base_dist
,
dims
,
activation
,
conditional
,
hidden_degrees
,
conditional_event_shape
,
conditional_input_layers
,
event_shape
)
dims
=
maf
.
get_dims
(
data
)
samples
=
maf
.
create_tensor
(
data
)
...
...
@@ -110,7 +123,7 @@ def main():
""" initialize loss and optimizer """
loss
=
-
tf
.
reduce_mean
(
maf
.
log_prob
(
samples
))
loss
=
-
tf
.
reduce_mean
(
maf
.
log_prob
(
samples
,
bijector_kwargs
=
{
'conditional_input'
:
samples
}))
optimizer
=
tf
.
compat
.
v1
.
train
.
AdamOptimizer
(
learning_rate
)
.
minimize
(
loss
)
experiment
=
Experiment
(
optimizer
,
learning_rate
,
loss
,
steps
)
...
...
@@ -138,3 +151,6 @@ def main():
if
__name__
==
"__main__"
:
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment