技术标签: tensorflow
import tensorflow as tf
import math
input_data = tf.Variable([ [0.2, 0.1, 0.9] , [0.2, 0.1, 0.9], [0.7, 0.4, 0.1]], dtype=tf.float32)
labels1=[[0, 0, 1], [0, 1, 0] , [1, 0, 0]]
output = tf.nn.softmax_cross_entropy_with_logits_v2(logits=input_data, labels=labels1 )
labels2 = tf.argmax(labels1,1 )
output1 = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=input_data, labels=labels2 )
sm = tf.nn.softmax ( input_data )
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
our = sess.run(output)
print('softmax_cross_entropy_with_logits_v2:\n', our)
our1 = sess.run(output1)
print('sparse_softmax_cross_entropy_with_logits:\n', our)
inp= sess.run( input_data )
smv = sess.run( sm )
print( 'softmax output:\n', smv )
l2 = sess.run( labels2 )
print ('argmax output(sparse labels)', l2 )
for k in range(len(inp) ) :
x = 0.0
for i in range(len(inp[0])) :
x += math.log( smv[k][i] ) * labels1[k][i]
x = -x
print( 'softmax_cross_entropy_with_logits by my computing for input ' , k ,':', x )
#print( 'tensorflow == myComputing?' , math.fabs( x - our[k] ) < 0.001 )
输出结果:
softmax_cross_entropy_with_logits_v2:
[0.6657319 1.4657319 0.8283902]
sparse_softmax_cross_entropy_with_logits:
[0.6657319 1.4657319 0.8283902]
softmax output:
[[0.25519383 0.23090893 0.51389724]
[0.25519383 0.23090893 0.51389724]
[0.4367518 0.3235537 0.23969449]]
argmax output(sparse labels) [2 1 0]
softmax_cross_entropy_with_logits by my computing for input 0 : 0.6657319553780278
softmax_cross_entropy_with_logits by my computing for input 1 : 1.4657318873121323
softmax_cross_entropy_with_logits by my computing for input 2 : 0.8283901795553891
有四种方法计算交叉熵,在正常情况下他们的值相等,但遇到极小值值时,会不一样:
#4 methods to calculate cross entropy , First without clip to ynew ,
#Second cliped manually
#Third/Forth with clip inside themself , but the clip area is not clear
cross_entropy1 = -tf.reduce_mean( tf.reduce_sum( y_*tf.log(ynew), 1 ) )
ynew1 = tf.clip_by_value(ynew,1e-36,1.0)
cross_entropy2 = -tf.reduce_mean( tf.reduce_sum( y_*tf.log(ynew1), 1 ) )
cross_entropy3 = tf.reduce_mean (tf.nn.softmax_cross_entropy_with_logits_v2( logits = y3n , labels=y_ ) )
cross_entropy4 = tf.reduce_mean (tf.nn.sparse_softmax_cross_entropy_with_logits(\
logits = y3n , labels=tf.argmax(y_,1) ) )
cross_entropy1会溢出,cross_entropy3使用softmax_cross_entropy_with_logits_v2得到的交叉熵在极端下也是非常的大(6.72853e+30),而当我们用 cross_entropy2方法,将输入的分量限制在(1e-36,1.0)时,得到的交叉熵是个可继续训练的值(82.89307)
请看下面的代码:
#file crossEntroyTest.py
import tensorflow as tf
import math
print('exp(lar)' , math.exp( - 6.72853e+30 ) )
print(1.2e+3)
def testCrossEbtropyNormalData() :
y_ = tf.Variable( [[0. , 0., 0., 0., 1., 0., 0., 0., 0., 0.],\
[0., 0., 1. ,0., 0., 0., 0. ,0., 0., 0.]])
ynew = tf.Variable( [[0.09775153, 0.10184532, 0.10208829, 0.0985443, \
0.09175218, 0.10402172,\
0.10551486, 0.09878179, 0.10147586 ,0.09822416],\
[0.09767619, 0.10240721, 0.09688035, 0.09931276, \
0.09407537, 0.10758881,\
0.10283026, 0.09836568, 0.10210507, 0.09875835]]) #ynew= tf.nn.softmax( y3n ) ,
#calculated by englishHndUsingCnn.py in NN training
y3n = tf.Variable( [[-0.02758069, 0.01344561 , 0.01582842, \
-0.0195034, -0.09091826, 0.03459012,\
0.04884226, -0.01709634 , 0.00981142 ,-0.02275741],\
[-0.02703298, 0.02026634, -0.03521407, -0.0104167,\
-0.06459457, 0.06962582,\
0.02438892, -0.01999882, 0.01731157, -0.01601485]])
cross_entropy = -tf.reduce_mean( tf.reduce_sum( y_*tf.log(ynew), 1 ) )
cross_entropy1 = tf.reduce_mean (tf.nn.softmax_cross_entropy_with_logits_v2( logits = y3n , labels=y_ ) )
cross_entropy2 = tf.reduce_mean (tf.nn.sparse_softmax_cross_entropy_with_logits(\
logits = y3n , labels=tf.argmax(y_,1) ) )
ynews = tf.nn.softmax( y3n )
sum1 = tf.reduce_mean( tf.reduce_sum( ynews,1 ))
equalBool = tf.subtract( ynews , ynew )
equalBool = tf.abs( equalBool )
equalBool = tf.less( equalBool , 0.000001 )
equalFloat = tf.cast( equalBool ,'float32')
equNum = tf.reduce_sum( equalFloat )
with tf.Session() as sess :
sess.run( tf.global_variables_initializer() )
r = sess.run( cross_entropy )
print('crs' , r )
print( 'crs1' , sess.run( cross_entropy1 ) )
print( 'crs2' , sess.run( cross_entropy2 ) )
ynewsR = sess.run( ynews )
print('ynews:', ynewsR )
print('SoftMax' , sess.run( sum1) )
print('soft equal ?' , sess.run( equNum ) )
'''
crs 2.3614712
crs1 2.3614712
crs2 2.3614712
ynews: [[0.09775153 0.10184532 0.10208829 0.0985443 0.09175218 0.10402172
0.10551486 0.09878179 0.10147586 0.09822416]
[0.09767619 0.10240721 0.09688035 0.09931276 0.09407537 0.10758881
0.10283026 0.09836569 0.10210507 0.09875835]]
SoftMax 1.0
soft equal ? 20.0
'''
def testCrossEbtropyAbnormalData() :
y_ = tf.Variable( [[0.,0.,0.,1.,0.,0.,0.,0.,0.,0.],
[0.,0.,0.,0.,1.,0.,0.,0.,0.,0.]])
ynew = tf.Variable( [[0.,0.,0.,0.,0.,1.,0.,0.,0.,0.],
[0.,0.,0.,0.,0.,1.,0.,0.,0.,0.]])
y3n = tf.Variable( [[-2.4375510e+23 ,2.0621505e+22,-7.2626525e+23, 1.5664488e+31
, 3.5969707e+24, 1.5914237e+31,-3.2679661e+22, 2.5958722e+24
, 5.2547755e+23,-3.1578729e+31],
[-2.0226255e+23,1.7113962e+22,-6.0270867e+23, 1.3000047e+31
, 2.9850567e+24, 1.3207314e+31,-2.7118364e+22, 2.1541442e+24
, 4.3611144e+23,-2.6207365e+31]])
#4 methods to calculate cross entropy , First without clip to ynew ,
#Second cliped manually
#Third/Forth with clip inside themself , but the clip area is not clear
cross_entropy1 = -tf.reduce_mean( tf.reduce_sum( y_*tf.log(ynew), 1 ) )
ynew1 = tf.clip_by_value(ynew,1e-36,1.0)
cross_entropy2 = -tf.reduce_mean( tf.reduce_sum( y_*tf.log(ynew1), 1 ) )
cross_entropy3 = tf.reduce_mean (tf.nn.softmax_cross_entropy_with_logits_v2( logits = y3n , labels=y_ ) )
cross_entropy4 = tf.reduce_mean (tf.nn.sparse_softmax_cross_entropy_with_logits(\
logits = y3n , labels=tf.argmax(y_,1) ) )
ynews = tf.nn.softmax( y3n )
sum1 = tf.reduce_mean( tf.reduce_sum( ynews,1 ))
equalBool = tf.subtract( ynews , ynew )
equalBool = tf.abs( equalBool )
equalBool = tf.less( equalBool , 0.000001 )
equalFloat = tf.cast( equalBool ,'float32')
equNum = tf.reduce_sum( equalFloat )
with tf.Session() as sess :
sess.run( tf.global_variables_initializer() )
print( 'crs1' , sess.run( cross_entropy1 ) )
print( 'crs2' , sess.run( cross_entropy2 ) )
print( 'crs3' , sess.run( cross_entropy3 ) )
print( 'crs4' , sess.run( cross_entropy4 ) )
ynewsR = sess.run( ynews )
print('ynews:', ynewsR )
print('SoftMax' , sess.run( sum1) )
print('soft equal ?' , sess.run( equNum ) )
'''
soft equal ? 20.0
crs1 nan
crs2 82.89307
crs3 6.72853e+30
crs4 6.72853e+30
ynews: [[0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]]
SoftMax 1.0
soft equal ? 20.0
'''
testCrossEbtropyNormalData()
testCrossEbtropyAbnormalData()
package cn.tedu;import java.sql.Connection;import java.sql.DriverManager;import java.sql.ResultSet;import java.sql.Statement;//JDBC连接数据库 查询 用户信息public class Test01 { public static void main(String[] args) throws Exception { //1.注册驱动(反._通过jdbc程序连接数据库。 - 查询用户所有有效角色 - 将查询到的结果以csv格式保
RoadblocksTime Limit: 2000MS Memory Limit: 65536KTotal Submissions: 14180 Accepted: 4983DescriptionBessie has moved to a small farm and sometimes enjoys returning to visit on
阿里官方文档:https://nacos.io/zh-cn/docs/what-is-nacos.html1.下载最新版本Nacos并解压:https://github.com/alibaba/nacos/releases(linux下载第一个,window下载第二个)2.启动: linux/mac 启动命令:sh startup.sh -m standalone或者..._org.springframework.cloud spring-cloud-den
这次主要说一下vi编辑器vi:是标准的Linux文本编辑程序,相当于DOS下的EDIT基本上vi 共分为三种模式一般模式:以vi 处理一个档案的时后,一进入该档案就是一般模式了。在这个模式中,你可以使用『上下左右』按键来移动光标,您可以使用『删除字符』或『删除整行』来处理档案内容,也可以使用『复制、贴上』来处理您的文件数据。编辑模式:在一般模式中可以处理删除、复制、贴上等等的动作,但是却无法编辑的..._vi视频教学
我自己测试连接数据库的时候,出现java.lang.ClassNotFoundException: 加载mysql数据库驱动报错也就是在执行Class.forName("com.mysql.jdbc.Driver");这句话的时候报错。第一步:检查是否已经配置路径(右击项目—build path—configure build path—libraries—add external jar..._eclipse 引入数据库驱动没用
public class MyTask extends AsyncTask { private volatile boolean running = true; private final ProgressDialog progressDialog; public MyTask(Context ctx) { progressDialog =
父页面const app = getApp()Page({ jump: function () { wx.navigateTo({ url: './test', events: { //监听父acceptDataFromOpenedPage acceptDataFromOpenedPage: function (data) { console.log(data) }, },
ELF(Executable and Linkable Format)即可执行连接文件格式,是一种比较复杂的文件格式,但其应用广泛。与linux下的其他可执行文件(a.out,cof)相比,它对节的定义和gnu工具链对它的支持使它十分灵活,它保存的足够了系统相关信息使它能支持不同平台上的交叉编译和交叉链接,可移植性很强.同时它在执行中支持动态链接共享库。 通过本文,可以大致了解Linux系统中ELF格式文件的分类,组成,作用,以及其中包含的内容。另外后面介绍了几种常用的对elf文件进行操作的工具,并且对其_跟踪elf文件命令
Mac 版 VSCode + PlantUML+uPic + Gitee搭建图床使用 VSCode + PlantUml 画图时,需要将PlantUML的语法转换成图片,上传到图床后,再插入到对应的Markdown文档中,这样可以在未安装PlantUML的机器上查看对应的Markdown文档。1 使用的软件VSCode: 是一款现代 Web 和云应用的跨平台源代码编辑器。强大的插件市场可以让你完全自定义你的专属编辑器。uPic: 一款 Mac图床(文件)上传客户端,它可以将图片、各种文件上传到配置_typora plantuml
http://course.cug.edu.cn/21cn/%CE%A2%D0%CD%BC%C6%CB%E3%BB%FA%BC%BC%CA%F5/content/masm/chapter4/4-4/4-4-0.htm 任何程序的运行均离不开一定的硬件环境和软件环境。 基本硬件环境包括CPU、存储器、I/O设备(键盘、磁盘、打印机、显示卡等),关于CPU和存储器,已经作了一些了解。从原理上讲,使用IN/OUT指令可以控制任何I/O设备。以打印机为例,使用IN指令可以读入_bios中断 dos中断
Tool:Visio2019专业版64位中文下载、安装(图文教程)之详细攻略目录Visio2019专业版64位中文下载、安装1、下载文件2、软件安装Visio2019专业版64位中文下载、安装1、下载文件2、软件安装_visio 2019 64位
昨天的罗永浩直播,抱歉,我没看。很不好意思,没看直播又来蹭热点。最近这两天,真可以说是全民消费罗永浩,自媒体不用说,各种点评各种复盘都来了,各个电商平台也全都凑过来了,什么值得买,拼多..._caoz罗永浩