diff --git a/app/controllers/evaluations_controller.rb b/app/controllers/evaluations_controller.rb
index 96336e78..e6fea7fd 100644
--- a/app/controllers/evaluations_controller.rb
+++ b/app/controllers/evaluations_controller.rb
@@ -1,7 +1,167 @@
# frozen_string_literal: true
# Controller for evaluations CRUD actions.
+# TODO: Needs to be simplified and made shorter for Rubocop
class EvaluationsController < ApplicationController
before_action -> { authorize_user('evaluator') }
+ before_action :set_evaluation_and_submission_assignment, only: %i[save_draft mark_complete]
+
def index; end
+
+ def show; end
+
+ def new
+ @evaluator_submission_assignment = find_evaluator_submission_assignment
+
+ if @evaluator_submission_assignment.nil? || !can_access_evaluation?
+ return redirect_to evaluations_path, alert: I18n.t("evaluations.alerts.evaluator_submission_assignment_not_found")
+ end
+
+ @evaluation_form = find_evaluation_form
+
+ if @evaluation_form.nil?
+ return redirect_to evaluations_path, alert: I18n.t("evaluations.alerts.evaluation_form_not_found")
+ end
+
+ build_evaluation
+
+ render :new
+ end
+
+ def edit
+ @evaluation = Evaluation.find_by(id: params[:id])
+ return unauthorized_redirect unless can_access_evaluation?
+
+ render :edit
+ end
+
+ def save_draft
+ @evaluator_submission_assignment = @evaluation.evaluator_submission_assignment
+
+ begin
+ @evaluation.completed_at = nil
+ @evaluation.save(validate: false)
+ handle_save_draft_success
+ rescue ActiveRecord::RecordInvalid, ActiveRecord::NotNullViolation
+ handle_save_draft_failure
+ end
+ end
+
+ def mark_complete
+ @evaluator_submission_assignment = @evaluation.evaluator_submission_assignment
+
+ # TODO: Set total_score here when the evaluation is marked complete
+
+ @evaluation.completed_at = Time.current
+
+ if @evaluation.save
+ handle_mark_complete_success
+ else
+ @evaluation.completed_at = nil
+ handle_mark_complete_failure
+ end
+ end
+
+ private
+
+ def set_evaluation_and_submission_assignment
+ @evaluation = find_or_initialize_evaluation
+ @evaluation.assign_attributes(evaluation_params)
+
+ @evaluator_submission_assignment = find_evaluator_submission_assignment
+
+ unauthorized_redirect unless can_access_evaluation?
+ end
+
+ def find_or_initialize_evaluation
+ if params[:id]
+ Evaluation.includes([:evaluation_criteria]).find(params[:id])
+ else
+ Evaluation.new
+ end
+ end
+
+ def find_evaluator_submission_assignment
+ return @evaluation.evaluator_submission_assignment if @evaluation&.evaluator_submission_assignment.present?
+
+ EvaluatorSubmissionAssignment.find_by(id: params[:evaluator_submission_assignment_id])
+ end
+
+ def can_access_evaluation?
+ (@evaluator_submission_assignment && @evaluator_submission_assignment.user_id == current_user.id) ||
+ (@evaluation && @evaluation.user_id == current_user.id)
+ end
+
+ def find_evaluation_form
+ @phase = @evaluator_submission_assignment.phase
+ EvaluationForm.find_by(phase: @phase)
+ end
+
+ def build_evaluation
+ @submission = @evaluator_submission_assignment.submission
+ @evaluation = Evaluation.new(
+ user: current_user,
+ evaluation_form: @evaluation_form,
+ submission: @submission,
+ evaluator_submission_assignment: @evaluator_submission_assignment
+ )
+
+ @evaluation_form.evaluation_criteria.each do |criterion|
+ @evaluation.evaluation_scores.build(evaluation_criterion: criterion)
+ end
+ end
+
+ # Redirect Helpers
+ def unauthorized_redirect
+ redirect_to evaluations_path, alert: I18n.t("evaluations.alerts.unauthorized")
+ end
+
+ def handle_save_draft_success
+ flash[:notice] = I18n.t("evaluations.notices.saved_draft")
+ redirect_to evaluations_path
+ end
+
+ def handle_save_draft_failure
+ flash.now[:alert] =
+ I18n.t("evaluations.alerts.save_draft_error", errors: @evaluation.errors.full_messages.to_sentence)
+
+ if @evaluation.new_record?
+ render :new, status: :unprocessable_entity
+ else
+ render :edit, status: :unprocessable_entity
+ end
+ end
+
+ def handle_mark_complete_success
+ flash[:notice] = I18n.t("evaluations.notices.marked_complete")
+ redirect_to evaluations_path
+ end
+
+ def handle_mark_complete_failure
+ flash.now[:alert] =
+ I18n.t("evaluations.alerts.mark_complete_error", errors: @evaluation.errors.full_messages.to_sentence)
+
+ if @evaluation.new_record?
+ render :new, status: :unprocessable_entity
+ else
+ render :edit, status: :unprocessable_entity
+ end
+ end
+
+ # Params
+ def evaluation_params
+ params.require(:evaluation).permit(
+ :user_id,
+ :evaluator_submission_assignment_id,
+ :submission_id,
+ :evaluation_form_id,
+ :additional_comments,
+ :revision_comments,
+ evaluation_scores_attributes: %i[
+ id evaluation_criterion_id
+ score score_override
+ comment comment_override
+ ]
+ )
+ end
end
diff --git a/app/models/evaluation.rb b/app/models/evaluation.rb
index 81c0c1ed..7868b0a5 100644
--- a/app/models/evaluation.rb
+++ b/app/models/evaluation.rb
@@ -22,6 +22,8 @@ class Evaluation < ApplicationRecord
belongs_to :submission
belongs_to :evaluator_submission_assignment
has_many :evaluation_scores, dependent: :destroy
+ has_many :evaluation_criteria, through: :evaluation_form
+ accepts_nested_attributes_for :evaluation_scores
validates :user_id,
uniqueness: { scope: [:evaluation_form_id, :submission_id],
diff --git a/app/models/evaluation_score.rb b/app/models/evaluation_score.rb
index c77f8535..4a3a2f32 100644
--- a/app/models/evaluation_score.rb
+++ b/app/models/evaluation_score.rb
@@ -23,7 +23,8 @@ class EvaluationScore < ApplicationRecord
message: I18n.t("evaluation_scores.unique_evaluation_for_evaluation_criterion_error")
}
- validates :score, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, presence: true
+ validates :score, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, allow_nil: true
+ validates :score, presence: true, if: -> { evaluation.completed_at.present? }
validates :score_override, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, allow_nil: true
validates :comment, presence: true, if: -> { evaluation.evaluation_form.comments_required? }
validates :comment, length: { maximum: 3000 }, allow_nil: true
@@ -58,11 +59,16 @@ def score_within_criterion_limits
def validate_numeric_score
max_score = evaluation_criterion.points_or_weight
+ validate_score_presence
+
if score && score > max_score
errors.add(:score, "must be less than or equal to #{max_score}")
- elsif score_override && score_override > max_score
- errors.add(:score_override, "must be less than or equal to #{max_score}")
end
+
+ # This is written differently than above because of rubocop
+ return unless score_override && score_override > max_score
+
+ errors.add(:score_override, "must be less than or equal to #{max_score}")
end
def validate_range_score
@@ -70,10 +76,19 @@ def validate_range_score
range_end = evaluation_criterion.option_range_end
valid_range = (range_start..range_end)
+ validate_score_presence
+
if score && valid_range.exclude?(score)
errors.add(:score, "must be within the range #{range_start} to #{range_end}")
- elsif score_override && valid_range.exclude?(score_override)
- errors.add(:score_override, "must be within the range #{range_start} to #{range_end}")
end
+
+ # This is written differently than above because of rubocop
+ return unless score_override && valid_range.exclude?(score_override)
+
+ errors.add(:score_override, "must be within the range #{range_start} to #{range_end}")
+ end
+
+ def validate_score_presence
+ errors.add(:score, "cannot be blank") if score.nil?
end
end
diff --git a/app/views/evaluations/_form.html.erb b/app/views/evaluations/_form.html.erb
new file mode 100644
index 00000000..a05332ab
--- /dev/null
+++ b/app/views/evaluations/_form.html.erb
@@ -0,0 +1,7 @@
+<%= form_with(model: @evaluation, method: form_method, data: { controller: "evaluation modal" }) do |f| %>
+ <%= hidden_field_tag :authenticity_token, form_authenticity_token %>
+
Placeholder form in app/views/evaluations/_form.html.erb
+
+ <%= f.submit "Save Draft", formaction: draft_action %>
+ <%= f.submit "Mark Complete", formaction: complete_action %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/evaluations/edit.html.erb b/app/views/evaluations/edit.html.erb
new file mode 100644
index 00000000..b5925311
--- /dev/null
+++ b/app/views/evaluations/edit.html.erb
@@ -0,0 +1,9 @@
+
+
+
Evaluations Edit
+
+
Find me in app/views/evaluations/edit.html.erb
+
+ <%= render "form", form_method: :patch, draft_action: save_draft_evaluation_path, complete_action: mark_complete_evaluation_path %>
+
+
diff --git a/app/views/evaluations/new.html.erb b/app/views/evaluations/new.html.erb
new file mode 100644
index 00000000..32281824
--- /dev/null
+++ b/app/views/evaluations/new.html.erb
@@ -0,0 +1,9 @@
+
+
+
Evaluations New
+
+
Find me in app/views/evaluations/new.html.erb
+
+ <%= render "form", form_method: :post, draft_action: save_draft_evaluations_path, complete_action: mark_complete_evaluations_path %>
+
+
diff --git a/config/locales/en.yml b/config/locales/en.yml
index 89b314dd..20fd113c 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -87,6 +87,14 @@ en:
evaluations:
unique_user_for_evaluation_form_and_submission_error: "already has an evaluation for this form and submission"
unique_evaluator_submission_assignment: "already has an evaluation"
+ alerts:
+ evaluator_submission_assignment_not_found: "No assignment found for this submission"
+ evaluation_form_not_found: "No evaluation form found for this submission"
+ save_draft_error: "Failed to save evaluation as draft: #{errors}."
+ mark_complete_error: "Failed to mark evaluation as complete: %{errors}."
+ notices:
+ saved_draft: "Evaluation saved as draft"
+ marked_complete: "Evaluation complete"
evaluation_scores:
unique_evaluation_for_evaluation_criterion_error: "already has a score for this evaluation criterion"
alerts:
diff --git a/config/routes.rb b/config/routes.rb
index 43a0f0a4..194d9645 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -10,7 +10,21 @@
get '/dashboard', to: "dashboard#index"
- resources :evaluations, only: [:index]
+ resources :evaluations, only: [:index, :show, :edit] do
+ member do
+ patch 'save_draft'
+ patch 'mark_complete'
+ end
+ collection do
+ post 'save_draft'
+ post 'mark_complete'
+ end
+ end
+
+ resources :evaluator_submission_assignments, only: [] do
+ resources :evaluations, only: [:new]
+ end
+
resources :evaluation_forms do
member do
get 'confirmation'
diff --git a/db/migrate/20241223190634_allow_evaluation_score_null.rb b/db/migrate/20241223190634_allow_evaluation_score_null.rb
new file mode 100644
index 00000000..979c08df
--- /dev/null
+++ b/db/migrate/20241223190634_allow_evaluation_score_null.rb
@@ -0,0 +1,5 @@
+class AllowEvaluationScoreNull < ActiveRecord::Migration[7.2]
+ def change
+ change_column_null :evaluation_scores, :score, true
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index fb9cf75d..dab60730 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -438,7 +438,7 @@ CREATE TABLE public.evaluation_scores (
id bigint NOT NULL,
evaluation_id bigint NOT NULL,
evaluation_criterion_id bigint NOT NULL,
- score integer NOT NULL,
+ score integer,
score_override integer,
comment text,
comment_override text,
@@ -2465,6 +2465,7 @@ ALTER TABLE ONLY public.winners
SET search_path TO "$user", public;
INSERT INTO "schema_migrations" (version) VALUES
+(20241223190634),
(20241217164258),
(20241125060011),
(20241120024946),
diff --git a/spec/factories/evaluation_score.rb b/spec/factories/evaluation_score.rb
index f1b12622..0a5871db 100644
--- a/spec/factories/evaluation_score.rb
+++ b/spec/factories/evaluation_score.rb
@@ -9,16 +9,20 @@
# Evaluator is a FactoryBot param containing attributes from the factory record
after(:build) do |evaluation_score, _evaluator|
- criterion = evaluation_score.evaluation_criterion
-
- case criterion.scoring_type
- when "numeric"
- evaluation_score.score = rand(0..criterion.points_or_weight)
- when "rating", "binary"
- evaluation_score.score = rand(criterion.option_range_start..criterion.option_range_end)
- else
- raise ArgumentError, "Invalid scoring type '#{criterion.scoring_type}' for evaluation criterion"
- end
+ valid_score_for_criterion(evaluation_score)
end
end
end
+
+def valid_score_for_criterion(evaluation_score)
+ criterion = evaluation_score.evaluation_criterion
+
+ case criterion.scoring_type
+ when "numeric"
+ evaluation_score.score = rand(0..criterion.points_or_weight)
+ when "rating", "binary"
+ evaluation_score.score = rand(criterion.option_range_start..criterion.option_range_end)
+ else
+ raise ArgumentError, "Invalid scoring type '#{criterion.scoring_type}' for evaluation criterion"
+ end
+end
diff --git a/spec/requests/evaluations_spec.rb b/spec/requests/evaluations_spec.rb
index c608f6a5..01c2b7c4 100644
--- a/spec/requests/evaluations_spec.rb
+++ b/spec/requests/evaluations_spec.rb
@@ -1,6 +1,14 @@
require 'rails_helper'
RSpec.describe "Evaluations" do
+ before(:all) do
+ Bullet.enable = false
+ end
+
+ after(:all) do
+ Bullet.enable = true
+ end
+
describe "GET /index" do
context "when logged in as an super admin" do
before do
@@ -63,4 +71,500 @@
end
end
end
+
+ # evaluation_path
+ describe "GET /evaluations/:id" do
+ context "when logged in as an evaluator" do
+ it "allows me to view a draft evaluation I created"
+
+ it "allows me to view a completed evaluation I created"
+
+ it "does not allow me to view an evaluation I didn't create"
+ end
+ end
+
+ # new_evaluator_submission_assignment_evaluation_path
+ describe "GET /evaluator_submission_assignments/:evaluator_submission_assignment_id/evaluations/new" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+ let(:evaluator_submission_assignment) { create(:evaluator_submission_assignment, user_id: current_user.id) }
+
+ before { log_in_user(current_user) }
+
+ it "takes me to the new evaluation page if I was assigned to the submission" do
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ get new_evaluator_submission_assignment_evaluation_path(
+ evaluator_submission_assignment_id: evaluator_submission_assignment.id
+ )
+
+ expect(response).to have_http_status(:success)
+
+ evaluation = assigns(:evaluation)
+ expect(evaluation.user_id).to eq(current_user.id)
+ expect(evaluation.evaluation_form_id).to eq(evaluation_form.id)
+ expect(evaluation.submission_id).to eq(evaluator_submission_assignment.submission_id)
+ expect(evaluation.evaluator_submission_assignment_id).to eq(evaluator_submission_assignment.id)
+
+ criteria_ids = evaluation.evaluation_form.evaluation_criteria.pluck(:id)
+ score_criteria_ids = evaluation.evaluation_scores.map(&:evaluation_criterion_id)
+
+ expect(score_criteria_ids).to match_array(criteria_ids)
+ end
+
+ it "redirects me to my evaluations if I was not assigned to the submission" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ get new_evaluator_submission_assignment_evaluation_path(
+ evaluator_submission_assignment_id: evaluator_submission_assignment.id
+ )
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.evaluator_submission_assignment_not_found"))
+ end
+
+ it "redirects me to my evaluations if the submission assignment was not found" do
+ get new_evaluator_submission_assignment_evaluation_path(evaluator_submission_assignment_id: "missing")
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.evaluator_submission_assignment_not_found"))
+ end
+
+ it "redirects me to my evaluations if the submission assignment has no evaluation form" do
+ # No evaluation form is currently created for the phase by default in the factory
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+
+ get new_evaluator_submission_assignment_evaluation_path(
+ evaluator_submission_assignment_id: evaluator_submission_assignment.id
+ )
+ expect(response).to redirect_to(evaluations_path)
+ end
+ end
+ end
+
+ # save_draft_evaluations_path
+ describe "POST /evaluations/save_draft" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+
+ before { log_in_user(current_user) }
+
+ it "allows me to save a draft of my new evaluation to skip validations and not set completed_at" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ # Nullify scores to test draft saving skipping validations
+ evaluation_params[:evaluation_scores_attributes].each do |value|
+ value[:score] = nil
+ value[:comment] = nil
+ end
+
+ expect do
+ post save_draft_evaluations_path, params: { evaluation: evaluation_params }
+ end.to change { Evaluation.count }
+
+ saved_evaluation = Evaluation.last
+
+ expect(saved_evaluation.completed_at).to be_nil
+ expect(saved_evaluation.evaluation_scores.count).to eq(evaluation_form.evaluation_criteria.count)
+ saved_evaluation.evaluation_scores.each do |score|
+ expect(score.score).to be_nil
+ expect(score.comment).to be_nil
+ end
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:notice]).to eq(I18n.t('evaluations.notices.saved_draft'))
+ end
+
+ it "does not allow me to save a draft evaluation for a submission I'm not assigned to" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ # Nullify scores to test draft saving skipping validations
+ evaluation_params[:evaluation_scores_attributes].each do |value|
+ value[:score] = nil
+ value[:comment] = nil
+ end
+
+ expect do
+ post save_draft_evaluations_path, params: { evaluation: evaluation_params }
+ end.not_to change { Evaluation.count }
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.unauthorized"))
+ end
+
+ it "renders new template if an association is missing" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ post save_draft_evaluations_path, params: { evaluation: evaluation_params.merge({ evaluation_form_id: nil }) }
+
+ expect(response).to render_template(:new)
+ expect(response).to have_http_status(:unprocessable_entity)
+ expect(flash[:alert]).to match(I18n.t("evaluations.alerts.save_draft_error"))
+ end
+ end
+ end
+
+ # mark_complete_evaluations_path
+ describe "POST /evaluations/mark_complete" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+
+ before { log_in_user(current_user) }
+
+ it "allows me to mark my new evaluation as complete" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ expect do
+ post mark_complete_evaluations_path, params: { evaluation: evaluation_params }
+ end.to change { Evaluation.count }
+
+ saved_evaluation = Evaluation.last
+
+ expect(saved_evaluation.completed_at).not_to be_nil
+ expect(saved_evaluation.evaluation_scores.count).to eq(evaluation_form.evaluation_criteria.count)
+ saved_evaluation.evaluation_scores.each do |score|
+ expect(score.score).not_to be_nil
+ end
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:notice]).to eq(I18n.t("evaluations.notices.marked_complete"))
+ end
+
+ it "does not allow me to mark my new evaluation as complete if it fails validations" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ # Nullify scores to test marking as complete fail validations
+ evaluation_params[:evaluation_scores_attributes].each do |value|
+ value[:score] = nil
+ value[:comment] = nil
+ end
+
+ expect do
+ post mark_complete_evaluations_path, params: { evaluation: evaluation_params }
+ end.not_to change { Evaluation.count }
+
+ failed_evaluation = assigns(:evaluation)
+
+ expect(response).to render_template(:new)
+ expect(response).to have_http_status(:unprocessable_entity)
+ expect(flash[:alert]).to match(I18n.t("evaluations.alerts.mark_complete_error",
+ errors: failed_evaluation.errors.full_messages.to_sentence))
+ end
+
+ it "does not allow me to mark my new evaluation as complete for a submission I'm not assigned to" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = build_evaluation(evaluator_submission_assignment)
+ evaluation_params = build_evaluation_params(evaluation)
+
+ expect do
+ post mark_complete_evaluations_path, params: { evaluation: evaluation_params }
+ end.not_to change { Evaluation.count }
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.unauthorized"))
+ end
+ end
+ end
+
+ # edit_evaluation_path
+ describe "GET /evaluations/:id/edit" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+
+ before { log_in_user(current_user) }
+
+ it "allows me to view an existing draft evaluation I created" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: current_user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ get edit_evaluation_path(evaluation)
+
+ evaluation = assigns(:evaluation)
+
+ expect(evaluation.user_id).to eq(current_user.id)
+ expect(evaluation.evaluation_form_id).to eq(evaluation_form.id)
+ expect(evaluation.submission_id).to eq(evaluator_submission_assignment.submission_id)
+ expect(evaluation.evaluator_submission_assignment_id).to eq(evaluator_submission_assignment.id)
+
+ criteria_ids = evaluation.evaluation_form.evaluation_criteria.pluck(:id)
+ score_criteria_ids = evaluation.evaluation_scores.map(&:evaluation_criterion_id)
+
+ expect(score_criteria_ids).to match_array(criteria_ids)
+ end
+
+ it "allows me to view an existing complete evaluation I created" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: current_user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment,
+ completed_at: Time.current)
+
+ get edit_evaluation_path(evaluation)
+
+ evaluation = assigns(:evaluation)
+
+ expect(evaluation.user_id).to eq(current_user.id)
+ expect(evaluation.evaluation_form_id).to eq(evaluation_form.id)
+ expect(evaluation.submission_id).to eq(evaluator_submission_assignment.submission_id)
+ expect(evaluation.evaluator_submission_assignment_id).to eq(evaluator_submission_assignment.id)
+
+ criteria_ids = evaluation.evaluation_form.evaluation_criteria.pluck(:id)
+ score_criteria_ids = evaluation.evaluation_scores.map(&:evaluation_criterion_id)
+
+ expect(score_criteria_ids).to match_array(criteria_ids)
+ end
+
+ it "redirects me if I try to view an evaluation I did not create" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ get edit_evaluation_path(evaluation)
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.unauthorized"))
+ end
+ end
+ end
+
+ # save_draft_evaluation_path
+ describe "PATCH /evaluations/:id/save_draft" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+
+ before { log_in_user(current_user) }
+
+ it "allows me to save a draft of my existing evaluation to skip validations and not set completed_at" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: current_user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ evaluation_params = build_patch_evaluation_params(evaluation)
+
+ evaluation_params = evaluation_params.merge(additional_comments: "Test")
+
+ # Nullify scores to test draft saving skipping validations
+ evaluation_params[:evaluation_scores_attributes].each do |value|
+ value[:score] = nil
+ value[:comment] = nil
+ end
+
+ patch save_draft_evaluation_path(evaluation, params: { evaluation: evaluation_params })
+
+ updated_evaluation = assigns(:evaluation)
+
+ expect(updated_evaluation).to be_persisted
+ expect(updated_evaluation.additional_comments).to eq("Test")
+
+ expect(updated_evaluation.errors).to be_empty
+ expect(updated_evaluation.completed_at).to be_nil
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:notice]).to include(I18n.t("evaluations.notices.saved_draft"))
+ end
+
+ it "does not allow me to save a draft of an evaluation I did not create" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ evaluation_params = { additional_comments: "Test" }
+
+ patch save_draft_evaluation_path(evaluation, params: { evaluation: evaluation_params })
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.unauthorized"))
+ end
+ end
+ end
+
+ # mark_complete_evaluation_path
+ describe "PATCH /evaluations/:id/mark_complete" do
+ context "when logged in as an evaluator" do
+ let(:current_user) { create_user(role: "evaluator") }
+
+ before { log_in_user(current_user) }
+
+ it "allows me to mark my existing evaluation as complete" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: current_user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ evaluation_params = { additional_comments: "Test" }
+
+ patch mark_complete_evaluation_path(evaluation, params: { evaluation: evaluation_params })
+
+ updated_evaluation = assigns(:evaluation)
+
+ expect(updated_evaluation).to be_persisted
+ expect(updated_evaluation.additional_comments).to eq("Test")
+
+ expect(updated_evaluation.errors).to be_empty
+ expect(updated_evaluation.completed_at).not_to be_nil
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:notice]).to include(I18n.t("evaluations.notices.marked_complete"))
+ end
+
+ # TODO: Needs fix
+ it "does not allow me to mark my existing evaluation as complete if it fails validations" do
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: current_user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: current_user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ evaluation_params = build_patch_evaluation_params(evaluation)
+
+ # Nullify scores to test draft saving skipping validations
+ evaluation_params[:evaluation_scores_attributes].each do |value|
+ value[:score] = nil
+ value[:comment] = nil
+ end
+
+ patch mark_complete_evaluation_path(evaluation, params: { evaluation: evaluation_params })
+
+ failed_evaluation = assigns(:evaluation)
+ evaluation_record = Evaluation.find_by(id: failed_evaluation.id)
+
+ expect(failed_evaluation.completed_at).to be_nil
+ expect(evaluation_record.completed_at).to be_nil
+
+ expect(response).to render_template(:edit)
+ expect(response).to have_http_status(:unprocessable_entity)
+ expect(flash[:alert]).to match(I18n.t("evaluations.alerts.mark_complete_error",
+ errors: failed_evaluation.errors.full_messages.to_sentence))
+ end
+
+ it "does not allow me to mark an evaluation I did not create as complete" do
+ user = create(:user, :evaluator)
+ evaluator_submission_assignment = create(:evaluator_submission_assignment, user_id: user.id)
+ evaluation_form = create(:evaluation_form, phase: evaluator_submission_assignment.phase)
+
+ evaluation = create(:evaluation,
+ user: user,
+ evaluation_form: evaluation_form,
+ submission: evaluator_submission_assignment.submission,
+ evaluator_submission_assignment: evaluator_submission_assignment)
+
+ evaluation_params = { additional_comments: "Test" }
+
+ patch mark_complete_evaluation_path(evaluation, params: { evaluation: evaluation_params })
+
+ expect(response).to redirect_to(evaluations_path)
+ expect(flash[:alert]).to eq(I18n.t("evaluations.alerts.unauthorized"))
+ end
+ end
+ end
+
+ def build_evaluation(evaluator_submission_assignment)
+ user = evaluator_submission_assignment.evaluator
+ evaluation_form = evaluator_submission_assignment.phase.evaluation_form
+ submission = evaluator_submission_assignment.submission
+
+ evaluation = Evaluation.new(
+ user: user,
+ evaluation_form: evaluation_form,
+ submission: submission,
+ evaluator_submission_assignment: evaluator_submission_assignment
+ )
+
+ evaluation_form.evaluation_criteria.each do |criterion|
+ evaluation.evaluation_scores.build(evaluation_criterion: criterion)
+ end
+
+ evaluation
+ end
+
+ def build_evaluation_params(evaluation)
+ {
+ user_id: evaluation.user_id,
+ evaluator_submission_assignment_id: evaluation.evaluator_submission_assignment_id,
+ submission_id: evaluation.evaluator_submission_assignment.submission_id,
+ evaluation_form_id: evaluation.evaluation_form_id,
+ evaluation_scores_attributes: evaluation.evaluation_scores.map do |score|
+ {
+ evaluation_criterion_id: score.evaluation_criterion_id,
+ score: valid_score_for_criterion(score),
+ comment: Faker::Lorem.sentence
+ }
+ end
+ }
+ end
+
+ def build_patch_evaluation_params(evaluation)
+ {
+ additional_comments: evaluation.additional_comments,
+ revision_comments: evaluation.revision_comments,
+ evaluation_scores_attributes: evaluation.evaluation_scores.map do |score|
+ {
+ id: score.id,
+ evaluation_criterion_id: score.evaluation_criterion_id,
+ score: valid_score_for_criterion(score),
+ comment: Faker::Lorem.sentence
+ }
+ end
+ }
+ end
end