forked from zhanggyb/nndl
-
Notifications
You must be signed in to change notification settings - Fork 0
/
terms.tex
29 lines (25 loc) · 1000 Bytes
/
terms.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
% file: terms.tex
% epoch, epochs
%
% In the neural network terminology:
%
% one epoch = one forward pass and one backward pass of all the training
% examples
%
% batch size = the number of training examples in one forward/backward pass. The
% higher the batch size, the more memory space you'll need.
%
% number of iterations = number of passes, each pass using [batch size] number
% of examples. To be clear, one pass = one forward pass + one backward pass (we
% do not count the forward pass and backward pass as two different passes).
%
% Example: if you have 1000 training examples, and your batch size is 500, then
% it will take 2 iterations to complete 1 epoch.
%
% and: http://stackoverflow.com/questions/25887205/what-is-an-epoch-in-anns-and-how-does-it-translate-into-code-in-matlab
\newcommand{\epoch}[1][迭代期]{#1}
\newcommand{\epochs}[1][迭代期]{#1}
% mini-batch
\newcommand{\minibatch}[1][小批量数据]{#1}
% learning rate
\newcommand{\learningrate}[1][学习速率]{#1}