資源簡(jiǎn)介
用python語言,利用對(duì)抗抗神經(jīng)網(wǎng)絡(luò)(GAN神經(jīng))生成一維數(shù)據(jù),用python語言,利用對(duì)抗抗神經(jīng)網(wǎng)絡(luò)(GAN神經(jīng))生成一維數(shù)據(jù)。
代碼片段和文件信息
import?argparse
import?numpy?as?np
from?scipy.stats?import?norm
import?tensorflow?as?tf
import?matplotlib.pyplot?as?plt
from?matplotlib?import?animation
import?seaborn?as?sns
sns.set(color_codes=True)??
seed?=?42
np.random.seed(seed)
tf.set_random_seed(seed)
class?DataDistribution(object):
????def?__init__(self):
????????self.mu?=?4
????????self.sigma?=?0.5
????def?sample(self?N):
????????samples?=?np.random.normal(self.mu?self.sigma?N)
????????samples.sort()
????????return?samples
class?GeneratorDistribution(object):
????def?__init__(self?range):
????????self.range?=?range
????def?sample(self?N):
????????return?np.linspace(-self.range?self.range?N)?+?\
????????????np.random.random(N)?*?0.01
def?linear(input?output_dim?scope=None?stddev=1.0):
????norm?=?tf.random_normal_initializer(stddev=stddev)
????const?=?tf.constant_initializer(0.0)
????with?tf.variable_scope(scope?or?‘linear‘):
????????w?=?tf.get_variable(‘w‘?[input.get_shape()[1]?output_dim]?initializer=norm)
????????b?=?tf.get_variable(‘b‘?[output_dim]?initializer=const)
????????return?tf.matmul(input?w)?+?b
def?generator(input?h_dim):
????h0?=?tf.nn.softplus(linear(input?h_dim?‘g0‘))
????h1?=?linear(h0?1?‘g1‘)
????return?h1
def?discriminator(input?h_dim):
????h0?=?tf.tanh(linear(input?h_dim?*?2?‘d0‘))
????h1?=?tf.tanh(linear(h0?h_dim?*?2?‘d1‘))???
????h2?=?tf.tanh(linear(h1?h_dim?*?2?scope=‘d2‘))
????h3?=?tf.sigmoid(linear(h2?1?scope=‘d3‘))
????return?h3
def?optimizer(loss?var_list?initial_learning_rate):
????decay?=?0.95
????num_decay_steps?=?150
????batch?=?tf.Variable(0)
????learning_rate?=?tf.train.exponential_decay(
????????initial_learning_rate
????????batch
????????num_decay_steps
????????decay
????????staircase=True
????)
????optimizer?=?tf.train.GradientDescentOptimizer(learning_rate).minimize(
????????loss
????????global_step=batch
????????var_list=var_list
????)
????return?optimizer
class?GAN(object):
????def?__init__(self?data?gen?num_steps?batch_size?log_every):
????????self.data?=?data
????????self.gen?=?gen
????????self.num_steps?=?num_steps
????????self.batch_size?=?batch_size
????????self.log_every?=?log_every
????????self.mlp_hidden_size?=?4
????????
????????self.learning_rate?=?0.03
????????self._create_model()
????def?_create_model(self):
????????with?tf.variable_scope(‘D_pre‘):
????????????self.pre_input?=?tf.placeholder(tf.float32?shape=(self.batch_size?1))
????????????self.pre_labels?=?tf.placeholder(tf.float32?shape=(self.batch_size?1))
????????????D_pre?=?discriminator(self.pre_input?self.mlp_hidden_size)
????????????self.pre_loss?=?tf.reduce_mean(tf.square(D_pre?-?self.pre_labels))
????????????self.pre_opt?=?optimizer(self.pre_loss?None?self.learning_rate)
????????#?This?defines?the?generator?network?-?it?takes?samples?from?a?noise
????????#?distribution?as?input?and?passes?them?through?an?MLP.
????????with?tf.variable_scope(‘Gen‘):
????????????self.z?=?tf.placeholder(tf.float32?shape=(self
- 上一篇:python小波閾值去噪
- 下一篇:python3.0 爬蟲小說
評(píng)論
共有 條評(píng)論