Skip to content

Commit

Permalink
Replace "weight" with "parameter".rs (#193)
Browse files Browse the repository at this point in the history
Co-authored-by: Jarrett Ye <[email protected]>
  • Loading branch information
Expertium and L-M-Sherlock authored May 22, 2024
1 parent 2c8c951 commit a2d9ea0
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ mod error;
mod inference;
mod model;
mod optimal_retention;
mod parameter_clipper;
mod pre_training;
#[cfg(test)]
mod test_helpers;
mod training;
mod weight_clipper;

pub use dataset::{FSRSItem, FSRSReview};
pub use error::{FSRSError, Result};
Expand Down
4 changes: 2 additions & 2 deletions src/model.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::error::{FSRSError, Result};
use crate::inference::{Parameters, DECAY, FACTOR, S_MIN};
use crate::weight_clipper::clip_parameters;
use crate::parameter_clipper::clip_parameters;
use crate::DEFAULT_PARAMETERS;
use burn::backend::ndarray::NdArrayDevice;
use burn::backend::NdArray;
Expand Down Expand Up @@ -196,7 +196,7 @@ impl ModelConfig {
}

/// This is the main structure provided by this crate. It can be used
/// for both weight training, and for reviews.
/// for both parameter training, and for reviews.
#[derive(Debug, Clone)]
pub struct FSRS<B: Backend = NdArray> {
model: Option<Model<B>>,
Expand Down
6 changes: 3 additions & 3 deletions src/weight_clipper.rs → src/parameter_clipper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::{
};
use burn::tensor::{backend::Backend, Data, Tensor};

pub(crate) fn weight_clipper<B: Backend>(parameters: Tensor<B, 1>) -> Tensor<B, 1> {
pub(crate) fn parameter_clipper<B: Backend>(parameters: Tensor<B, 1>) -> Tensor<B, 1> {
let val = clip_parameters(&parameters.to_data().convert().value);
Tensor::from_data(
Data::new(val, parameters.shape()).convert(),
Expand Down Expand Up @@ -49,14 +49,14 @@ mod tests {
use burn::backend::ndarray::NdArrayDevice;

#[test]
fn weight_clipper_works() {
fn parameter_clipper_works() {
let device = NdArrayDevice::Cpu;
let tensor = Tensor::from_floats(
[0.0, -1000.0, 1000.0, 0.0, 1000.0, -1000.0, 1.0, 0.25, -0.1],
&device,
);

let param: Tensor<1> = weight_clipper(tensor);
let param: Tensor<1> = parameter_clipper(tensor);
let values = &param.to_data().value;

assert_eq!(
Expand Down
6 changes: 3 additions & 3 deletions src/training.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use crate::cosine_annealing::CosineAnnealingLR;
use crate::dataset::{split_filter_data, FSRSBatcher, FSRSDataset, FSRSItem};
use crate::error::Result;
use crate::model::{Model, ModelConfig};
use crate::parameter_clipper::parameter_clipper;
use crate::pre_training::pretrain;
use crate::weight_clipper::weight_clipper;
use crate::{FSRSError, DEFAULT_PARAMETERS, FSRS};
use burn::backend::Autodiff;

Expand Down Expand Up @@ -267,7 +267,7 @@ impl<B: Backend> FSRS<B> {

if optimized_parameters
.iter()
.any(|weight: &f32| weight.is_infinite())
.any(|parameter: &f32| parameter.is_infinite())
{
return Err(FSRSError::InvalidInput);
}
Expand Down Expand Up @@ -358,7 +358,7 @@ fn train<B: AutodiffBackend>(
}
let grads = GradientsParams::from_grads(gradients, &model);
model = optim.step(lr, model, grads);
model.w = Param::from_tensor(weight_clipper(model.w.val()));
model.w = Param::from_tensor(parameter_clipper(model.w.val()));
// info!("epoch: {:?} iteration: {:?} lr: {:?}", epoch, iteration, lr);
renderer.render_train(TrainingProgress {
progress,
Expand Down

0 comments on commit a2d9ea0

Please sign in to comment.