Move submission scoring from controller concern to submission model

Localization and markdown formatting is now done in a controller
spec in order to bring this logic away from the data and towards
the view.
This commit is contained in:
Felix Auringer
2021-06-11 12:01:16 +02:00
committed by Sebastian Serth
parent b847daf823
commit b6bc578aea
10 changed files with 188 additions and 143 deletions

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module ScoringResultFormatting
def format_scoring_results(outputs)
outputs.map do |output|
output[:message] = t(output[:message], default: render_markdown(output[:message]))
output[:filename] = t(output[:filename], default: output[:filename])
output
end
end
end

View File

@ -1,93 +0,0 @@
# frozen_string_literal: true
module SubmissionScoring
def test_result(output, file)
submission = self
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
assessor = Assessor.new(execution_environment: submission.execution_environment)
assessment = assessor.assess(output)
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
if testrun_output.present?
submission.exercise.execution_environment.error_templates.each do |template|
pattern = Regexp.new(template.signature).freeze
StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output)
end
end
testrun = Testrun.create(
submission: submission,
cause: 'assess', # Required to differ run and assess for RfC show
file: file, # Test file that was executed
passed: passed,
output: testrun_output,
container_execution_time: output[:container_execution_time],
waiting_for_container_time: output[:waiting_for_container_time]
)
filename = file.name_with_extension
if file.teacher_defined_linter?
LinterCheckRun.create_from(testrun, assessment)
assessment = assessor.translate_linter(assessment, I18n.locale)
# replace file name with hint if linter is not used for grading. Refactor!
filename = t('exercises.implement.not_graded', locale: :de) if file.weight.zero?
end
output.merge!(assessment)
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
end
# TODO: make this a controller concern again to bring the locales nearer to the view
def feedback_message(file, output)
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
I18n.t('exercises.implement.default_test_feedback')
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
I18n.t('exercises.implement.default_linter_feedback')
else
# render_markdown(file.feedback_message)
file.feedback_message
end
end
def score_submission(outputs)
submission = self
score = 0.0
if outputs.present?
outputs.each do |output|
score += output[:score] * output[:weight] unless output.nil?
if output.present? && output[:status] == :timeout
output[:stderr] += "\n\n#{t('exercises.editor.timeout',
permitted_execution_time: submission.exercise.execution_environment.permitted_execution_time.to_s)}"
end
end
end
submission.update(score: score)
if submission.normalized_score.to_d == 1.0.to_d
Thread.new do
RequestForComment.where(exercise_id: submission.exercise_id, user_id: submission.user_id,
user_type: submission.user_type).each do |rfc|
rfc.full_score_reached = true
rfc.save
end
ensure
ActiveRecord::Base.connection_pool.release_connection
end
end
if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present?
outputs.each do |output|
output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout)
end
end
# Return all test results except for those of a linter if not allowed
show_linter = Python20CourseWeek.show_linter? submission.exercise
outputs&.reject do |output|
next if show_linter || output.blank?
output[:file_role] == 'teacher_defined_linter'
end
end
end

View File

@ -3,6 +3,7 @@
class RemoteEvaluationController < ApplicationController
include RemoteEvaluationParameters
include Lti
include ScoringResultFormatting
skip_after_action :verify_authorized
skip_before_action :verify_authenticity_token
@ -62,7 +63,7 @@ status: 202}
validation_token = remote_evaluation_params[:validation_token]
if (remote_evaluation_mapping = RemoteEvaluationMapping.find_by(validation_token: validation_token))
@submission = Submission.create(build_submission_params(cause, remote_evaluation_mapping))
@submission.calculate_score
format_scoring_results(@submission.calculate_score)
else
# TODO: better output
# TODO: check token expired?

View File

@ -5,6 +5,7 @@ class SubmissionsController < ApplicationController
include CommonBehavior
include Lti
include SubmissionParameters
include ScoringResultFormatting
include Tubesock::Hijack
before_action :set_submission,
@ -240,7 +241,7 @@ class SubmissionsController < ApplicationController
hijack do |tubesock|
return kill_socket(tubesock) if @embed_options[:disable_run]
tubesock.send_data(@submission.calculate_score)
tubesock.send_data(JSON.dump(format_scoring_results(@submission.calculate_score)))
# To enable hints when scoring a submission, uncomment the next line:
# send_hints(tubesock, StructuredError.where(submission: @submission))
rescue Runner::Error::ExecutionTimeout => e

View File

@ -4,7 +4,6 @@ class Submission < ApplicationRecord
include Context
include Creation
include ActionCableHelper
include SubmissionScoring
require 'concurrent/future'
@ -140,9 +139,9 @@ class Submission < ApplicationRecord
end
def calculate_score
score = nil
file_scores = nil
prepared_runner do |runner, waiting_duration|
scores = collect_files.select(&:teacher_defined_assessment?).map do |file|
file_scores = collect_files.select(&:teacher_defined_assessment?).map do |file|
score_command = command_for execution_environment.test_command, file.name_with_extension
stdout = +''
stderr = +''
@ -167,11 +166,10 @@ class Submission < ApplicationRecord
stdout: stdout,
stderr: stderr,
}
test_result(output, file)
score_file(output, file)
end
score = score_submission(scores)
end
JSON.dump(score)
combine_file_scores(file_scores)
end
def run(file, &block)
@ -214,4 +212,84 @@ class Submission < ApplicationRecord
module_name: File.basename(filename, File.extname(filename)).underscore,
}
end
def score_file(output, file)
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: id } do
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: id } do
assessor = Assessor.new(execution_environment: execution_environment)
assessment = assessor.assess(output)
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
if testrun_output.present?
execution_environment.error_templates.each do |template|
pattern = Regexp.new(template.signature).freeze
StructuredError.create_from_template(template, testrun_output, self) if pattern.match(testrun_output)
end
end
testrun = Testrun.create(
submission: self,
cause: 'assess', # Required to differ run and assess for RfC show
file: file, # Test file that was executed
passed: passed,
output: testrun_output,
container_execution_time: output[:container_execution_time],
waiting_for_container_time: output[:waiting_for_container_time]
)
filename = file.name_with_extension
if file.teacher_defined_linter?
LinterCheckRun.create_from(testrun, assessment)
assessment = assessor.translate_linter(assessment, session[:locale])
# replace file name with hint if linter is not used for grading. Refactor!
filename = 'exercises.implement.not_graded' if file.weight.zero?
end
output.merge!(assessment)
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
end
def feedback_message(file, output)
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
'exercises.implement.default_test_feedback'
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
'exercises.implement.default_linter_feedback'
else
file.feedback_message
end
end
def combine_file_scores(outputs)
score = 0.0
if outputs.present?
outputs.each do |output|
score += output[:score] * output[:weight] unless output.nil?
end
end
update(score: score)
if normalized_score.to_d == 1.0.to_d
Thread.new do
RequestForComment.find_each(exercise_id: exercise_id, user_id: user_id, user_type: user_type) do |rfc|
rfc.full_score_reached = true
rfc.save
end
ensure
ActiveRecord::Base.connection_pool.release_connection
end
end
if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present?
outputs.each do |output|
output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout)
end
end
# Return all test results except for those of a linter if not allowed
show_linter = Python20CourseWeek.show_linter? exercise
outputs&.reject do |output|
next if show_linter || output.blank?
output[:file_role] == 'teacher_defined_linter'
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'rails_helper'
class Controller < AnonymousController
include ScoringResultFormatting
end
describe ScoringResultFormatting do
let(:controller) { Controller.new }
let(:filename) { 'exercise.py' }
let(:feedback_message) { '**good work**' }
let(:outputs) { [{filename: filename, message: feedback_message}] }
describe 'feedback message' do
let(:new_feedback_message) { controller.format_scoring_results(outputs).first[:message] }
context 'when the feedback message is not a path to a locale' do
let(:feedback_message) { '**good work**' }
it 'renders the feedback message as markdown' do
expect(new_feedback_message).to match('<p><strong>good work</strong></p>')
end
end
context 'when the feedback message is a valid path to a locale' do
let(:feedback_message) { 'exercises.implement.default_test_feedback' }
it 'replaces the feedback message with the locale' do
expect(new_feedback_message).to eq(I18n.t(feedback_message))
end
end
end
describe 'filename' do
let(:new_filename) { controller.format_scoring_results(outputs).first[:filename] }
context 'when the filename is not a path to a locale' do
let(:filename) { 'exercise.py' }
it 'does not alter the filename' do
expect(new_filename).to eq(filename)
end
end
context 'when the filename is a valid path to a locale' do
let(:filename) { 'exercises.implement.not_graded' }
it 'replaces the filename with the locale' do
expect(new_filename).to eq(I18n.t(filename))
end
end
end
end

View File

@ -1,38 +0,0 @@
# frozen_string_literal: true
require 'rails_helper'
describe SubmissionScoring do
let(:submission) { FactoryBot.create(:submission, cause: 'submit') }
describe '#collect_test_results' do
let(:runner) { FactoryBot.create :runner }
before do
allow(Runner).to receive(:for).and_return(runner)
allow(runner).to receive(:copy_files)
allow(runner).to receive(:attach_to_execution).and_return(1.0)
end
after { submission.calculate_score }
it 'executes every teacher-defined test file' do
allow(submission).to receive(:score_submission)
submission.collect_files.select(&:teacher_defined_assessment?).each do |file|
allow(submission).to receive(:test_result).with(any_args, file).and_return({})
end
end
it 'scores the submission' do
allow(submission).to receive(:score_submission).and_return([])
end
end
describe '#score_submission', cleaning_strategy: :truncation do
after { submission.score_submission([]) }
it 'assigns a score to the submissions' do
expect(submission).to receive(:update).with(score: anything)
end
end
end

View File

@ -242,7 +242,7 @@ describe ExercisesController do
let(:user) { FactoryBot.create(:external_user) }
let(:scoring_response) do
[{
status: 'ok',
status: :ok,
stdout: '',
stderr: '',
waiting_for_container_time: 0,
@ -263,7 +263,7 @@ describe ExercisesController do
FactoryBot.create(:lti_parameter, external_user: user, exercise: exercise)
submission = FactoryBot.build(:submission, exercise: exercise, user: user)
allow(submission).to receive(:normalized_score).and_return(1)
allow(submission).to receive(:calculate_score).and_return(JSON.dump(scoring_response))
allow(submission).to receive(:calculate_score).and_return(scoring_response)
allow(Submission).to receive(:create).and_return(submission)
end

View File

@ -6,7 +6,7 @@ describe 'Editor', js: true do
let(:exercise) { FactoryBot.create(:audio_video, description: Forgery(:lorem_ipsum).sentence) }
let(:scoring_response) do
[{
status: 'ok',
status: :ok,
stdout: '',
stderr: '',
waiting_for_container_time: 0,
@ -95,7 +95,7 @@ describe 'Editor', js: true do
it 'contains a button for submitting the exercise' do
submission = FactoryBot.build(:submission, user: user, exercise: exercise)
allow(submission).to receive(:calculate_score).and_return(JSON.dump(scoring_response))
allow(submission).to receive(:calculate_score).and_return(scoring_response)
allow(Submission).to receive(:find).and_return(submission)
click_button(I18n.t('exercises.editor.score'))
expect(page).not_to have_css('#submit_outdated')

View File

@ -141,4 +141,35 @@ describe Submission do
end
end
end
describe '#calculate_score' do
let(:runner) { FactoryBot.create :runner }
before do
allow(Runner).to receive(:for).and_return(runner)
allow(runner).to receive(:copy_files)
allow(runner).to receive(:attach_to_execution).and_return(1.0)
end
after { submission.calculate_score }
it 'executes every teacher-defined test file' do
allow(submission).to receive(:combine_file_scores)
submission.collect_files.select(&:teacher_defined_assessment?).each do |file|
expect(submission).to receive(:score_file).with(any_args, file)
end
end
it 'scores the submission' do
expect(submission).to receive(:combine_file_scores)
end
end
describe '#combine_file_scores' do
after { submission.send(:combine_file_scores, []) }
it 'assigns a score to the submissions' do
expect(submission).to receive(:update).with(score: anything)
end
end
end