From b6bc578aea9d31ba078230e05e6230a2263e7744 Mon Sep 17 00:00:00 2001 From: Felix Auringer <48409110+felixauringer@users.noreply.github.com> Date: Fri, 11 Jun 2021 12:01:16 +0200 Subject: [PATCH] Move submission scoring from controller concern to submission model Localization and markdown formatting is now done in a controller spec in order to bring this logic away from the data and towards the view. --- .../concerns/scoring_result_formatting.rb | 11 +++ .../concerns/submission_scoring.rb | 93 ------------------- .../remote_evaluation_controller.rb | 3 +- app/controllers/submissions_controller.rb | 3 +- app/models/submission.rb | 90 ++++++++++++++++-- .../scoring_result_formatting_spec.rb | 54 +++++++++++ spec/concerns/submission_scoring_spec.rb | 38 -------- spec/controllers/exercises_controller_spec.rb | 4 +- spec/features/editor_spec.rb | 4 +- spec/models/submission_spec.rb | 31 +++++++ 10 files changed, 188 insertions(+), 143 deletions(-) create mode 100644 app/controllers/concerns/scoring_result_formatting.rb delete mode 100644 app/controllers/concerns/submission_scoring.rb create mode 100644 spec/concerns/scoring_result_formatting_spec.rb delete mode 100644 spec/concerns/submission_scoring_spec.rb diff --git a/app/controllers/concerns/scoring_result_formatting.rb b/app/controllers/concerns/scoring_result_formatting.rb new file mode 100644 index 00000000..27096413 --- /dev/null +++ b/app/controllers/concerns/scoring_result_formatting.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module ScoringResultFormatting + def format_scoring_results(outputs) + outputs.map do |output| + output[:message] = t(output[:message], default: render_markdown(output[:message])) + output[:filename] = t(output[:filename], default: output[:filename]) + output + end + end +end diff --git a/app/controllers/concerns/submission_scoring.rb b/app/controllers/concerns/submission_scoring.rb deleted file mode 100644 index ebcfb3d2..00000000 --- a/app/controllers/concerns/submission_scoring.rb +++ /dev/null @@ -1,93 +0,0 @@ -# frozen_string_literal: true - -module SubmissionScoring - def test_result(output, file) - submission = self - # Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do - # Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do - assessor = Assessor.new(execution_environment: submission.execution_environment) - assessment = assessor.assess(output) - passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?) - testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}" - if testrun_output.present? - submission.exercise.execution_environment.error_templates.each do |template| - pattern = Regexp.new(template.signature).freeze - StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output) - end - end - testrun = Testrun.create( - submission: submission, - cause: 'assess', # Required to differ run and assess for RfC show - file: file, # Test file that was executed - passed: passed, - output: testrun_output, - container_execution_time: output[:container_execution_time], - waiting_for_container_time: output[:waiting_for_container_time] - ) - - filename = file.name_with_extension - - if file.teacher_defined_linter? - LinterCheckRun.create_from(testrun, assessment) - assessment = assessor.translate_linter(assessment, I18n.locale) - - # replace file name with hint if linter is not used for grading. Refactor! - filename = t('exercises.implement.not_graded', locale: :de) if file.weight.zero? - end - - output.merge!(assessment) - output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight) - end - - # TODO: make this a controller concern again to bring the locales nearer to the view - def feedback_message(file, output) - if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test' - I18n.t('exercises.implement.default_test_feedback') - elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter' - I18n.t('exercises.implement.default_linter_feedback') - else - # render_markdown(file.feedback_message) - file.feedback_message - end - end - - def score_submission(outputs) - submission = self - score = 0.0 - if outputs.present? - outputs.each do |output| - score += output[:score] * output[:weight] unless output.nil? - - if output.present? && output[:status] == :timeout - output[:stderr] += "\n\n#{t('exercises.editor.timeout', - permitted_execution_time: submission.exercise.execution_environment.permitted_execution_time.to_s)}" - end - end - end - submission.update(score: score) - if submission.normalized_score.to_d == 1.0.to_d - Thread.new do - RequestForComment.where(exercise_id: submission.exercise_id, user_id: submission.user_id, - user_type: submission.user_type).each do |rfc| - rfc.full_score_reached = true - rfc.save - end - ensure - ActiveRecord::Base.connection_pool.release_connection - end - end - if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present? - outputs.each do |output| - output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout) - end - end - - # Return all test results except for those of a linter if not allowed - show_linter = Python20CourseWeek.show_linter? submission.exercise - outputs&.reject do |output| - next if show_linter || output.blank? - - output[:file_role] == 'teacher_defined_linter' - end - end -end diff --git a/app/controllers/remote_evaluation_controller.rb b/app/controllers/remote_evaluation_controller.rb index 96db6c8b..2f23eacb 100644 --- a/app/controllers/remote_evaluation_controller.rb +++ b/app/controllers/remote_evaluation_controller.rb @@ -3,6 +3,7 @@ class RemoteEvaluationController < ApplicationController include RemoteEvaluationParameters include Lti + include ScoringResultFormatting skip_after_action :verify_authorized skip_before_action :verify_authenticity_token @@ -62,7 +63,7 @@ status: 202} validation_token = remote_evaluation_params[:validation_token] if (remote_evaluation_mapping = RemoteEvaluationMapping.find_by(validation_token: validation_token)) @submission = Submission.create(build_submission_params(cause, remote_evaluation_mapping)) - @submission.calculate_score + format_scoring_results(@submission.calculate_score) else # TODO: better output # TODO: check token expired? diff --git a/app/controllers/submissions_controller.rb b/app/controllers/submissions_controller.rb index 05e86e4d..06bb8876 100644 --- a/app/controllers/submissions_controller.rb +++ b/app/controllers/submissions_controller.rb @@ -5,6 +5,7 @@ class SubmissionsController < ApplicationController include CommonBehavior include Lti include SubmissionParameters + include ScoringResultFormatting include Tubesock::Hijack before_action :set_submission, @@ -240,7 +241,7 @@ class SubmissionsController < ApplicationController hijack do |tubesock| return kill_socket(tubesock) if @embed_options[:disable_run] - tubesock.send_data(@submission.calculate_score) + tubesock.send_data(JSON.dump(format_scoring_results(@submission.calculate_score))) # To enable hints when scoring a submission, uncomment the next line: # send_hints(tubesock, StructuredError.where(submission: @submission)) rescue Runner::Error::ExecutionTimeout => e diff --git a/app/models/submission.rb b/app/models/submission.rb index c2fd3121..7810cbb8 100644 --- a/app/models/submission.rb +++ b/app/models/submission.rb @@ -4,7 +4,6 @@ class Submission < ApplicationRecord include Context include Creation include ActionCableHelper - include SubmissionScoring require 'concurrent/future' @@ -140,9 +139,9 @@ class Submission < ApplicationRecord end def calculate_score - score = nil + file_scores = nil prepared_runner do |runner, waiting_duration| - scores = collect_files.select(&:teacher_defined_assessment?).map do |file| + file_scores = collect_files.select(&:teacher_defined_assessment?).map do |file| score_command = command_for execution_environment.test_command, file.name_with_extension stdout = +'' stderr = +'' @@ -167,11 +166,10 @@ class Submission < ApplicationRecord stdout: stdout, stderr: stderr, } - test_result(output, file) + score_file(output, file) end - score = score_submission(scores) end - JSON.dump(score) + combine_file_scores(file_scores) end def run(file, &block) @@ -214,4 +212,84 @@ class Submission < ApplicationRecord module_name: File.basename(filename, File.extname(filename)).underscore, } end + + def score_file(output, file) + # Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: id } do + # Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: id } do + assessor = Assessor.new(execution_environment: execution_environment) + assessment = assessor.assess(output) + passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?) + testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}" + if testrun_output.present? + execution_environment.error_templates.each do |template| + pattern = Regexp.new(template.signature).freeze + StructuredError.create_from_template(template, testrun_output, self) if pattern.match(testrun_output) + end + end + testrun = Testrun.create( + submission: self, + cause: 'assess', # Required to differ run and assess for RfC show + file: file, # Test file that was executed + passed: passed, + output: testrun_output, + container_execution_time: output[:container_execution_time], + waiting_for_container_time: output[:waiting_for_container_time] + ) + + filename = file.name_with_extension + + if file.teacher_defined_linter? + LinterCheckRun.create_from(testrun, assessment) + assessment = assessor.translate_linter(assessment, session[:locale]) + + # replace file name with hint if linter is not used for grading. Refactor! + filename = 'exercises.implement.not_graded' if file.weight.zero? + end + + output.merge!(assessment) + output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight) + end + + def feedback_message(file, output) + if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test' + 'exercises.implement.default_test_feedback' + elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter' + 'exercises.implement.default_linter_feedback' + else + file.feedback_message + end + end + + def combine_file_scores(outputs) + score = 0.0 + if outputs.present? + outputs.each do |output| + score += output[:score] * output[:weight] unless output.nil? + end + end + update(score: score) + if normalized_score.to_d == 1.0.to_d + Thread.new do + RequestForComment.find_each(exercise_id: exercise_id, user_id: user_id, user_type: user_type) do |rfc| + rfc.full_score_reached = true + rfc.save + end + ensure + ActiveRecord::Base.connection_pool.release_connection + end + end + if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present? + outputs.each do |output| + output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout) + end + end + + # Return all test results except for those of a linter if not allowed + show_linter = Python20CourseWeek.show_linter? exercise + outputs&.reject do |output| + next if show_linter || output.blank? + + output[:file_role] == 'teacher_defined_linter' + end + end end diff --git a/spec/concerns/scoring_result_formatting_spec.rb b/spec/concerns/scoring_result_formatting_spec.rb new file mode 100644 index 00000000..d66f9e3d --- /dev/null +++ b/spec/concerns/scoring_result_formatting_spec.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require 'rails_helper' + +class Controller < AnonymousController + include ScoringResultFormatting +end + +describe ScoringResultFormatting do + let(:controller) { Controller.new } + let(:filename) { 'exercise.py' } + let(:feedback_message) { '**good work**' } + let(:outputs) { [{filename: filename, message: feedback_message}] } + + describe 'feedback message' do + let(:new_feedback_message) { controller.format_scoring_results(outputs).first[:message] } + + context 'when the feedback message is not a path to a locale' do + let(:feedback_message) { '**good work**' } + + it 'renders the feedback message as markdown' do + expect(new_feedback_message).to match('
good work
') + end + end + + context 'when the feedback message is a valid path to a locale' do + let(:feedback_message) { 'exercises.implement.default_test_feedback' } + + it 'replaces the feedback message with the locale' do + expect(new_feedback_message).to eq(I18n.t(feedback_message)) + end + end + end + + describe 'filename' do + let(:new_filename) { controller.format_scoring_results(outputs).first[:filename] } + + context 'when the filename is not a path to a locale' do + let(:filename) { 'exercise.py' } + + it 'does not alter the filename' do + expect(new_filename).to eq(filename) + end + end + + context 'when the filename is a valid path to a locale' do + let(:filename) { 'exercises.implement.not_graded' } + + it 'replaces the filename with the locale' do + expect(new_filename).to eq(I18n.t(filename)) + end + end + end +end diff --git a/spec/concerns/submission_scoring_spec.rb b/spec/concerns/submission_scoring_spec.rb deleted file mode 100644 index 8eb1506f..00000000 --- a/spec/concerns/submission_scoring_spec.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'rails_helper' - -describe SubmissionScoring do - let(:submission) { FactoryBot.create(:submission, cause: 'submit') } - - describe '#collect_test_results' do - let(:runner) { FactoryBot.create :runner } - - before do - allow(Runner).to receive(:for).and_return(runner) - allow(runner).to receive(:copy_files) - allow(runner).to receive(:attach_to_execution).and_return(1.0) - end - - after { submission.calculate_score } - - it 'executes every teacher-defined test file' do - allow(submission).to receive(:score_submission) - submission.collect_files.select(&:teacher_defined_assessment?).each do |file| - allow(submission).to receive(:test_result).with(any_args, file).and_return({}) - end - end - - it 'scores the submission' do - allow(submission).to receive(:score_submission).and_return([]) - end - end - - describe '#score_submission', cleaning_strategy: :truncation do - after { submission.score_submission([]) } - - it 'assigns a score to the submissions' do - expect(submission).to receive(:update).with(score: anything) - end - end -end diff --git a/spec/controllers/exercises_controller_spec.rb b/spec/controllers/exercises_controller_spec.rb index fa0c7628..683f0d40 100644 --- a/spec/controllers/exercises_controller_spec.rb +++ b/spec/controllers/exercises_controller_spec.rb @@ -242,7 +242,7 @@ describe ExercisesController do let(:user) { FactoryBot.create(:external_user) } let(:scoring_response) do [{ - status: 'ok', + status: :ok, stdout: '', stderr: '', waiting_for_container_time: 0, @@ -263,7 +263,7 @@ describe ExercisesController do FactoryBot.create(:lti_parameter, external_user: user, exercise: exercise) submission = FactoryBot.build(:submission, exercise: exercise, user: user) allow(submission).to receive(:normalized_score).and_return(1) - allow(submission).to receive(:calculate_score).and_return(JSON.dump(scoring_response)) + allow(submission).to receive(:calculate_score).and_return(scoring_response) allow(Submission).to receive(:create).and_return(submission) end diff --git a/spec/features/editor_spec.rb b/spec/features/editor_spec.rb index 2f2b1126..06919ea7 100644 --- a/spec/features/editor_spec.rb +++ b/spec/features/editor_spec.rb @@ -6,7 +6,7 @@ describe 'Editor', js: true do let(:exercise) { FactoryBot.create(:audio_video, description: Forgery(:lorem_ipsum).sentence) } let(:scoring_response) do [{ - status: 'ok', + status: :ok, stdout: '', stderr: '', waiting_for_container_time: 0, @@ -95,7 +95,7 @@ describe 'Editor', js: true do it 'contains a button for submitting the exercise' do submission = FactoryBot.build(:submission, user: user, exercise: exercise) - allow(submission).to receive(:calculate_score).and_return(JSON.dump(scoring_response)) + allow(submission).to receive(:calculate_score).and_return(scoring_response) allow(Submission).to receive(:find).and_return(submission) click_button(I18n.t('exercises.editor.score')) expect(page).not_to have_css('#submit_outdated') diff --git a/spec/models/submission_spec.rb b/spec/models/submission_spec.rb index b4d4eb7b..60a86923 100644 --- a/spec/models/submission_spec.rb +++ b/spec/models/submission_spec.rb @@ -141,4 +141,35 @@ describe Submission do end end end + + describe '#calculate_score' do + let(:runner) { FactoryBot.create :runner } + + before do + allow(Runner).to receive(:for).and_return(runner) + allow(runner).to receive(:copy_files) + allow(runner).to receive(:attach_to_execution).and_return(1.0) + end + + after { submission.calculate_score } + + it 'executes every teacher-defined test file' do + allow(submission).to receive(:combine_file_scores) + submission.collect_files.select(&:teacher_defined_assessment?).each do |file| + expect(submission).to receive(:score_file).with(any_args, file) + end + end + + it 'scores the submission' do + expect(submission).to receive(:combine_file_scores) + end + end + + describe '#combine_file_scores' do + after { submission.send(:combine_file_scores, []) } + + it 'assigns a score to the submissions' do + expect(submission).to receive(:update).with(score: anything) + end + end end