-*- coding: utf-8 -*-
"""
Created on Fri Mar 30 18:09:10 2018
@author: 32641
"""
import tensorflow as tf
import xlrd
import numpy as np
data2=[]
data0=[]
data = xlrd.open_workbook('C:/Users/32641/Desktop/论文2/汇率上证.xls')
table = data.sheet_by_name('Sheet1')
nrows = table.nrows
for rownum in range(1, nrows): #也就是从Excel第二行开始,第一行表头不算
row = table.row_values(rownum)
#for i in range(1,ncols):heet
#row[i]=(row[i]-meansl[i-1])/stdl[i-1]
#row[i]=(row[i]-minsl[i-1])/(maxsl[i-1]-minsl[i-1])
#date = xlrd.xldate_as_tuple(row[0],0)
#date1=str(date[0])+'.'+str(date[1])+'.'+str(date[2])
#row[0]=date1
data0.append(row[1])
data2.append(row[2])
timeseries1=[]
timeseries2=[]
timeseriess=data0
for q in range(len(timeseriess)):
timeseries1.append((timeseriess[q]-min(timeseriess))/(max(timeseriess)-min(timeseriess)))
timeseries1=np.array(timeseries1)
for q in range(len(timeseriess)):
timeseries2.append((timeseriess[q]-min(timeseriess))/(max(timeseriess)-min(timeseriess)))
timeseries2=np.array(timeseries2)
lag=1
def cut(timeseries,lag):
x=np.zeros((1,lag))
y=np.zeros((1,1))
for i in range(len(timeseries)-lag):
x=np.vstack((x,timeseries[i:i+lag]))
y=np.vstack((y,timeseries[i+lag]))
return x[1:],y[1:]
x1,y1=cut(timeseries1,lag)
x2,y2=cut(timeseries2,lag)
split_boundary = int(x1.shape[0] * 0.8)
train_x1=x1[:split_boundary]
test_x1=x1[split_boundary:]
train_y1=x1[:split_boundary]
test_y1=x1[split_boundary:]
train_x2=x2[:split_boundary]
test_x2=x2[split_boundary:]
train_y2=x2[:split_boundary]
test_y2=x2[split_boundary:]
定义占位符
X = tf.placeholder("float",name="X")
Y1 = tf.placeholder("float", name="Y1")
Y2 = tf.placeholder("float", name="Y2")
定义权重
initial_shared_layer_weights = np.random.rand(1,2)
initial_Y1_layer_weights = np.random.rand(2,1)
initial_Y2_layer_weights = np.random.rand(2,1)
shared_layer_weights = tf.Variable(initial_shared_layer_weights, name="share_W", dtype="float32")
Y1_layer_weights = tf.Variable(initial_Y1_layer_weights, name="share_Y1", dtype="float32")
Y2_layer_weights = tf.Variable(initial_Y2_layer_weights, name="share_Y2", dtype="float32")
使用relu激活函数构建层
shared_layer = tf.nn.sigmoid(tf.matmul(X,shared_layer_weights))
Y1_layer = tf.nn.sigmoid(tf.matmul(shared_layer,Y1_layer_weights))
Y2_layer = tf.nn.sigmoid(tf.matmul(shared_layer,Y2_layer_weights))
计算loss
Y1_Loss = tf.nn.l2_loss(Y1-Y1_layer)
Y2_Loss = tf.nn.l2_loss(Y2-Y2_layer)
优化器
Y1_op = tf.train.AdamOptimizer().minimize(Y1_Loss)
Y2_op = tf.train.AdamOptimizer().minimize(Y2_Loss)
Calculation (Session) Code
==========================
open the session
with tf.Session() as session:
session.run(tf.initialize_all_variables())
for iters in range(3000):
if np.random.rand() < 0.5:
_, Y1_loss,Y1_Layer= session.run([Y1_op, Y1_Loss,Y1_layer],
{
X: train_x1,
Y1: train_y1,
Y2: train_y2
})
print(Y1_Layer)
print(Y1_loss)
else:
_, Y2_loss= session.run([Y2_op, Y2_Loss],
{
X: train_x2,
Y1: train_y1,
Y2: train_y2
})
print(Y2_loss)
session.close()