Skip to content
This repository was archived by the owner on Jan 1, 2021. It is now read-only.

Commit 6223b07

Browse files
committed
code for lecture 5
1 parent 33d9678 commit 6223b07

File tree

5 files changed

+119
-2
lines changed

5 files changed

+119
-2
lines changed

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,7 @@
44
*.SUNet
55
*.pyc
66
.env/*
7+
examples/data/*
8+
examples/graphs/*
9+
examples/checkpoints/*
10+
examples/visualization/*

examples/04_word2vec_visualize.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def visualize(self, visual_fld, num_visualize):
158158
embedding.tensor_name = embedding_var.name
159159

160160
# link this tensor to its metadata file, in this case the first NUM_VISUALIZE words of vocab
161-
embedding.metadata_path = os.path.join(visual_fld, 'vocab_' + str(num_visualize) + '.tsv')
161+
embedding.metadata_path = 'vocab_' + str(num_visualize) + '.tsv'
162162

163163
# saves a configuration file that TensorBoard will read during startup.
164164
projector.visualize_embeddings(summary_writer, config)

examples/05_randomization.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
""" Examples to demonstrate ops level randomization
2+
CS 20: "TensorFlow for Deep Learning Research"
3+
cs20.stanford.edu
4+
Chip Huyen (chiphuyen@cs.stanford.edu)
5+
Lecture 05
6+
"""
7+
import os
8+
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
9+
10+
import tensorflow as tf
11+
12+
# Example 1: session keeps track of the random state
13+
c = tf.random_uniform([], -10, 10, seed=2)
14+
15+
with tf.Session() as sess:
16+
print(sess.run(c)) # >> 3.574932
17+
print(sess.run(c)) # >> -5.9731865
18+
19+
# Example 2: each new session will start the random state all over again.
20+
c = tf.random_uniform([], -10, 10, seed=2)
21+
22+
with tf.Session() as sess:
23+
print(sess.run(c)) # >> 3.574932
24+
25+
with tf.Session() as sess:
26+
print(sess.run(c)) # >> 3.574932
27+
28+
# Example 3: with operation level random seed, each op keeps its own seed.
29+
c = tf.random_uniform([], -10, 10, seed=2)
30+
d = tf.random_uniform([], -10, 10, seed=2)
31+
32+
with tf.Session() as sess:
33+
print(sess.run(c)) # >> 3.574932
34+
print(sess.run(d)) # >> 3.574932
35+
36+
# Example 4: graph level random seed
37+
tf.set_random_seed(2)
38+
c = tf.random_uniform([], -10, 10)
39+
d = tf.random_uniform([], -10, 10)
40+
41+
with tf.Session() as sess:
42+
print(sess.run(c)) # >> 9.123926
43+
print(sess.run(d)) # >> -4.5340395
44+

examples/05_variable_sharing.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
""" Examples to demonstrate variable sharing
2+
CS 20: 'TensorFlow for Deep Learning Research'
3+
cs20.stanford.edu
4+
Chip Huyen (chiphuyen@cs.stanford.edu)
5+
Lecture 05
6+
"""
7+
import os
8+
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
9+
10+
import tensorflow as tf
11+
12+
x1 = tf.truncated_normal([200, 100], name='x1')
13+
x2 = tf.truncated_normal([200, 100], name='x2')
14+
15+
def two_hidden_layers(x):
16+
assert x.shape.as_list() == [200, 100]
17+
w1 = tf.Variable(tf.random_normal([100, 50]), name='h1_weights')
18+
b1 = tf.Variable(tf.zeros([50]), name='h1_biases')
19+
h1 = tf.matmul(x, w1) + b1
20+
assert h1.shape.as_list() == [200, 50]
21+
w2 = tf.Variable(tf.random_normal([50, 10]), name='h2_weights')
22+
b2 = tf.Variable(tf.zeros([10]), name='2_biases')
23+
logits = tf.matmul(h1, w2) + b2
24+
return logits
25+
26+
def two_hidden_layers_2(x):
27+
assert x.shape.as_list() == [200, 100]
28+
w1 = tf.get_variable('h1_weights', [100, 50], initializer=tf.random_normal_initializer())
29+
b1 = tf.get_variable('h1_biases', [50], initializer=tf.constant_initializer(0.0))
30+
h1 = tf.matmul(x, w1) + b1
31+
assert h1.shape.as_list() == [200, 50]
32+
w2 = tf.get_variable('h2_weights', [50, 10], initializer=tf.random_normal_initializer())
33+
b2 = tf.get_variable('h2_biases', [10], initializer=tf.constant_initializer(0.0))
34+
logits = tf.matmul(h1, w2) + b2
35+
return logits
36+
37+
# logits1 = two_hidden_layers(x1)
38+
# logits2 = two_hidden_layers(x2)
39+
40+
# logits1 = two_hidden_layers_2(x1)
41+
# logits2 = two_hidden_layers_2(x2)
42+
43+
# with tf.variable_scope('two_layers') as scope:
44+
# logits1 = two_hidden_layers_2(x1)
45+
# scope.reuse_variables()
46+
# logits2 = two_hidden_layers_2(x2)
47+
48+
# with tf.variable_scope('two_layers') as scope:
49+
# logits1 = two_hidden_layers_2(x1)
50+
# scope.reuse_variables()
51+
# logits2 = two_hidden_layers_2(x2)
52+
53+
def fully_connected(x, output_dim, scope):
54+
with tf.variable_scope(scope, reuse=tf.AUTO_REUSE) as scope:
55+
w = tf.get_variable('weights', [x.shape[1], output_dim], initializer=tf.random_normal_initializer())
56+
b = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0))
57+
return tf.matmul(x, w) + b
58+
59+
def two_hidden_layers(x):
60+
h1 = fully_connected(x, 50, 'h1')
61+
h2 = fully_connected(h1, 10, 'h2')
62+
63+
with tf.variable_scope('two_layers') as scope:
64+
logits1 = two_hidden_layers(x1)
65+
# scope.reuse_variables()
66+
logits2 = two_hidden_layers(x2)
67+
68+
writer = tf.summary.FileWriter('./graphs/cool_variables', tf.get_default_graph())
69+
writer.close()

examples/word2vec_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def most_common_words(visual_fld, num_visualize):
6161
"""
6262
words = open(os.path.join(visual_fld, 'vocab.tsv'), 'r').readlines()[:num_visualize]
6363
words = [word for word in words]
64-
file = open(os.path.join(visual_fld, 'vocab_' + str(num_visualize) + '.tsv'), "w")
64+
file = open(os.path.join(visual_fld, 'vocab_' + str(num_visualize) + '.tsv'), 'w')
6565
for word in words:
6666
file.write(word)
6767
file.close()

0 commit comments

Comments
 (0)