📌  相关文章
📜  Tensorflow.js tf.initializers.Initializer 类

📅  最后修改于: 2022-05-13 01:56:33.417000             🧑  作者: Mango

Tensorflow.js tf.initializers.Initializer 类

Tensorflow.js 是谷歌开发的一个开源库,用于在浏览器或节点环境中运行机器学习模型和深度学习神经网络。 tf.initializers.Initializer() 类用于扩展 serialization.Serializable 类。它是Initializer的基类。

这个tf.initializers.Initializer 类包含 15 个内置函数,如下所示:

  • tf.initializers.Initializer 类 .constant()函数
  • tf.initializers.Initializer 类 .glorotNormal()函数
  • tf.initializers.Initializer 类 .glorotUniform()函数
  • tf.initializers.Initializer 类 .heNormal()函数
  • tf.initializers.Initializer 类 .heUniform()函数
  • tf.initializers.Initializer 类 .identity()函数
  • tf.initializers.Initializer 类 .leCunNormal()函数
  • tf.initializers.Initializer 类 .leCunUniform()函数
  • tf.initializers.Initializer 类 .ones()函数
  • tf.initializers.Initializer 类 .orthogonal()函数
  • tf.initializers.Initializer 类 .randomNormal()函数
  • tf.initializers.Initializer 类 .randomUniform()函数
  • tf.initializers.Initializer 类 .truncatedNormal()函数
  • tf.initializers.Initializer 类 .varianceScaling()函数
  • tf.initializers.Initializer 类 .zeros()函数

1. tf.initializers.Initializer 类 .constant()函数:用于生成初始化为某个常量的值。


例子:

Javascript
// Importing the tensorflow.js library
const tf = require("@tensorflow/tfjs")
 
// Use tf.initializers.constant() function
var initializer = tf.initializers.constant({ value: 7, })
 
// Print the value of constant
console.log(initializer);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.glorotNormal() function
console.log(tf.initializers.glorotNormal(9));
 
// Printing Individual gainvalues
console.log('\nIndividual values:\n');
console.log(tf.initializers.glorotNormal(9).scale);
console.log(tf.initializers.glorotNormal(9).mode);
console.log(tf.initializers.glorotNormal(9).distribution);


Javascript
// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.glorotUniform() function
const geek = tf.initializers.glorotUniform(7)
 
// Printing gain value
console.log(geek);
 
// Printing individual values from gain
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.heNormal()
// function
const geek = tf.initializers.heNormal(7)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.heUniform() function
const geek = tf.initializers.heUniform(7)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);


Javascript
// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Generates the identity matrix
const value=tf.initializers.identity(1.0)
 
// Print gain
console.log(value)


Javascript
// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.leCunNormal() function
const geek = tf.initializers.leCunNormal(3)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);


Javascript
// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initialising the .initializers.leCunUniform() function
console.log(tf.initializers.leCunUniform(4));
 
// Printing individual values from the gain
console.log("\nIndividual Values\n");
console.log(tf.initializers.leCunUniform(4).scale);
console.log(tf.initializers.leCunUniform(4).mode);
console.log(tf.initializers.leCunUniform(4).distribution);


Javascript
//import tensorflow.js
const tf=require("@tensorflow/tfjs")
 
//use tf.ones()
var GFG=tf.ones([3, 4]);
 
//print tensor
GFG.print()


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.orthogonal() function
let geek = tf.initializers.orthogonal(2)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_GAIN);
console.log(geek.gain);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.randomNormal() function
let geek = tf.initializers.randomNormal(3)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MEAN);
console.log(geek.DEFAULT_STDDEV);
console.log(geek.mean);
console.log(geek.stddev);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.randomUniform() function
let geek = tf.initializers.randomUniform(5)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MINVAL);
console.log(geek.DEFAULT_MAXVAL);
console.log(geek.minval);
console.log(geek.maxval);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.truncatedNormal()
// function
let geek = tf.initializers.truncatedNormal(13)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MEAN);
console.log(geek.DEFAULT_STDDEV);
console.log(geek.mean);
console.log(geek.stddev);


Javascript
// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.varianceScaling()
// function
let geek = tf.initializers.varianceScaling(33)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);


Javascript
// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Calling tf.initializers.zeros() function
const initializer = tf.initializers.zeros();
 
// Printing output
console.log(JSON.stringify(+initializer));


输出:

Constant { value: 7 }

2. tf.initializers.Initializer 类 .glorotNormal()函数:它从以 0 为中心的截断正态分布中提取样本,stddev = sqrt(2 / (fan_in + fan_out))。请注意,fan_in 是张量权重中的输入数量,而 fan_out 是张量权重中的输出数量。

例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.glorotNormal() function
console.log(tf.initializers.glorotNormal(9));
 
// Printing Individual gainvalues
console.log('\nIndividual values:\n');
console.log(tf.initializers.glorotNormal(9).scale);
console.log(tf.initializers.glorotNormal(9).mode);
console.log(tf.initializers.glorotNormal(9).distribution);

输出:

{
    "scale": 1,
    "mode": "fanAvg",
    "distribution": "normal"
}

Individual values:

1
fanAvg
normal

3. tf.initializers.Initializer 类 .glorotUniform()函数:用于从 [-limit, limit] 内的均匀分布中提取样本,其中 limit 是 sqrt(6 / (fan_in + fan_out)) 其中 fan_in 是数量权重张量中的输入单元和扇出是权重张量中的输出单元数。

例子:

Javascript

// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.glorotUniform() function
const geek = tf.initializers.glorotUniform(7)
 
// Printing gain value
console.log(geek);
 
// Printing individual values from gain
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);

输出:

{
  "scale": 1,
  "mode": "fanAvg",
  "distribution": "uniform"
}

Individual values:

1
fanAvg
uniform

4. tf.initializers.Initializer 类 .heNormal()函数:用于从以零为中心的截断正态分布中抽取样本,其中 stddev = sqrt(2 / fanIn) 在 [-limit, limit] 范围内,limit 为 sqrt( 6 / 扇入)。请注意,fanIn 是张量权重中的输入数量。

例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.heNormal()
// function
const geek = tf.initializers.heNormal(7)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);

输出:

{
  "scale": 2,
  "mode": "fanIn",
  "distribution": "normal"
}

Individual values:

2
fanIn
normal

5. tf.initializers.Initializer 类 .heUniform()函数:它从 [-cap, cap] 内的均匀分布中抽取样本,其中 cap 是 sqrt(6 / fan_in)。请注意,fanIn 是张量权重中的输入数量。


例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.heUniform() function
const geek = tf.initializers.heUniform(7)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);

输出:

{
    "scale": 2,
    "mode": "fanIn",
    "distribution": "uniform"
}

Individual values:

2
fanIn
uniform

6. tf.initializers.Initializer 类 .identity()函数:用于返回一个带有单位矩阵的新张量对象。它仅用于二维矩阵。

例子:

Javascript

// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Generates the identity matrix
const value=tf.initializers.identity(1.0)
 
// Print gain
console.log(value)

输出:

{
 "gain": 1
}

7. tf.initializers.Initializer 类 .leCunNormal()函数:用于从以 stddev = sqrt(1 / fanIn) 为中心的以零为中心的截断正态分布中提取样本。请注意, fanIn 是张量权重中的输入数。


例子:

Javascript

// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.leCunNormal() function
const geek = tf.initializers.leCunNormal(3)
 
// Printing gain
console.log(geek);
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);

输出:

{
    "scale": 1,
    "mode": "fanIn",
    "distribution": "normal"
}

Individual values:

1
fanIn
normal

8. tf.initializers.Initializer 类 .leCunUniform()函数:它从区间 [-cap, cap] 的均匀分布中抽取样本,其中 cap = sqrt(3 / fanIn)。请注意, fanIn 是张量权重中的输入数。

例子:

Javascript

// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Initialising the .initializers.leCunUniform() function
console.log(tf.initializers.leCunUniform(4));
 
// Printing individual values from the gain
console.log("\nIndividual Values\n");
console.log(tf.initializers.leCunUniform(4).scale);
console.log(tf.initializers.leCunUniform(4).mode);
console.log(tf.initializers.leCunUniform(4).distribution);

输出:

{
  "scale": 1,
  "mode": "fanIn",
  "distribution": "uniform"
}

Individual Values

1
fanIn
uniform

9. tf.initializers.Initializer 类 .ones()函数:用于创建一个所有元素都设置为 1 的张量,或者初始化值为 1 的张量。


例子:

Javascript

//import tensorflow.js
const tf=require("@tensorflow/tfjs")
 
//use tf.ones()
var GFG=tf.ones([3, 4]);
 
//print tensor
GFG.print()

输出:

Tensor
   [[1, 1, 1, 1],
    [1, 1, 1, 1],
    [1, 1, 1, 1]]

10. tf.initializers.Initializer 类 .orthogonal()函数:它产生一个随机正交矩阵。

例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.orthogonal() function
let geek = tf.initializers.orthogonal(2)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_GAIN);
console.log(geek.gain);

输出:

{
  "DEFAULT_GAIN": 1,
  "gain": 1
}

Individual values:

1
1

11. tf.initializers.Initializer 类 .randomNormal()函数:用于生成初始化为正态分布的随机值。


例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.randomNormal() function
let geek = tf.initializers.randomNormal(3)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MEAN);
console.log(geek.DEFAULT_STDDEV);
console.log(geek.mean);
console.log(geek.stddev);

输出:

{
  "DEFAULT_MEAN": 0,
  "DEFAULT_STDDEV": 0.05,
  "mean": 0,
  "stddev": 0.05
}

Individual values:

0
0.05
0
0.05

12. tf.initializers.Initializer 类 .randomUniform()函数:用于生成初始化为均匀分布的随机值。这些值在配置的最小值和最大值之间均匀分布。

例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.randomUniform() function
let geek = tf.initializers.randomUniform(5)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MINVAL);
console.log(geek.DEFAULT_MAXVAL);
console.log(geek.minval);
console.log(geek.maxval);

输出:

{
  "DEFAULT_MINVAL": -0.05,
  "DEFAULT_MAXVAL": 0.05,
  "minval": -0.05,
  "maxval": 0.05
}

Individual values:

-0.05
0.05
-0.05
0.05

13. tf.initializers.Initializer 类 .truncatedNormal():它函数产生初始化为截断正态分布的随机值。


例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.truncatedNormal()
// function
let geek = tf.initializers.truncatedNormal(13)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value
console.log('\nIndividual values:\n');
console.log(geek.DEFAULT_MEAN);
console.log(geek.DEFAULT_STDDEV);
console.log(geek.mean);
console.log(geek.stddev);

输出:

{
  "DEFAULT_MEAN": 0,
  "DEFAULT_STDDEV": 0.05,
  "mean": 0,
  "stddev": 0.05
}

Individual values:

0
0.05
0
0.05

14. tf.initializers.Initializer 类 .varianceScaling()函数:它能够将其比例调整为权重的形状。使用 distribution = NORMAL 的值,样本是从中心为 0 的截断正态分布中抽取的,stddev = sqrt(scale / n)。

例子:

Javascript

// Importing the tensorflow.js library
import * as tf from "@tensorflow/tfjs"
 
// Initializing the .initializers.varianceScaling()
// function
let geek = tf.initializers.varianceScaling(33)
 
// Printing gain value
console.log(geek);
 
// Printing individual gain value.
console.log('\nIndividual values:\n');
console.log(geek.scale);
console.log(geek.mode);
console.log(geek.distribution);

输出:

{
  "scale": 1,
  "mode": "fanIn",
  "distribution": "normal"
}

Individual values:

1
fanIn
normal

15. tf.initializers.Initializer 类 .zeros()函数:它是一个初始化器,用于生成初始化为零的张量。

例子:

Javascript

// Importing the tensorflow.Js library
import * as tf from "@tensorflow/tfjs"
 
// Calling tf.initializers.zeros() function
const initializer = tf.initializers.zeros();
 
// Printing output
console.log(JSON.stringify(+initializer));

输出:

null

参考: https://js.tensorflow.org/api/latest/#class:initializers.Initializer