Reimplement scoring and create connection abstraction
Co-authored-by: Felix Auringer <felix.auringer@student.hpi.uni-potsdam.de>
This commit is contained in:

committed by
Sebastian Serth

parent
1546f70818
commit
92b249e7b3
@@ -1,57 +1,48 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'concurrent/future'
|
||||
|
||||
module SubmissionScoring
|
||||
def collect_test_results(submission)
|
||||
def test_result(output, file)
|
||||
submission = self
|
||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do
|
||||
futures = submission.collect_files.select(&:teacher_defined_assessment?).map do |file|
|
||||
Concurrent::Future.execute do
|
||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
|
||||
assessor = Assessor.new(execution_environment: submission.execution_environment)
|
||||
output = execute_test_file(file, submission)
|
||||
assessment = assessor.assess(output)
|
||||
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
|
||||
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
|
||||
if testrun_output.present?
|
||||
submission.exercise.execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output)
|
||||
end
|
||||
end
|
||||
testrun = Testrun.create(
|
||||
submission: submission,
|
||||
cause: 'assess', # Required to differ run and assess for RfC show
|
||||
file: file, # Test file that was executed
|
||||
passed: passed,
|
||||
output: testrun_output,
|
||||
container_execution_time: output[:container_execution_time],
|
||||
waiting_for_container_time: output[:waiting_for_container_time]
|
||||
)
|
||||
|
||||
filename = file.name_with_extension
|
||||
|
||||
if file.teacher_defined_linter?
|
||||
LinterCheckRun.create_from(testrun, assessment)
|
||||
switch_locale do
|
||||
assessment = assessor.translate_linter(assessment, I18n.locale)
|
||||
|
||||
# replace file name with hint if linter is not used for grading. Refactor!
|
||||
filename = t('exercises.implement.not_graded') if file.weight.zero?
|
||||
end
|
||||
end
|
||||
|
||||
output.merge!(assessment)
|
||||
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
|
||||
# end
|
||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
|
||||
assessor = Assessor.new(execution_environment: submission.execution_environment)
|
||||
assessment = assessor.assess(output)
|
||||
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
|
||||
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
|
||||
if testrun_output.present?
|
||||
submission.exercise.execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output)
|
||||
end
|
||||
end
|
||||
futures.map(&:value!)
|
||||
testrun = Testrun.create(
|
||||
submission: submission,
|
||||
cause: 'assess', # Required to differ run and assess for RfC show
|
||||
file: file, # Test file that was executed
|
||||
passed: passed,
|
||||
output: testrun_output,
|
||||
container_execution_time: output[:container_execution_time],
|
||||
waiting_for_container_time: output[:waiting_for_container_time]
|
||||
)
|
||||
|
||||
filename = file.name_with_extension
|
||||
|
||||
if file.teacher_defined_linter?
|
||||
LinterCheckRun.create_from(testrun, assessment)
|
||||
assessment = assessor.translate_linter(assessment, I18n.locale)
|
||||
|
||||
# replace file name with hint if linter is not used for grading. Refactor!
|
||||
filename = t('exercises.implement.not_graded') if file.weight.zero?
|
||||
end
|
||||
|
||||
output.merge!(assessment)
|
||||
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
|
||||
end
|
||||
|
||||
private :collect_test_results
|
||||
|
||||
def execute_test_file(file, submission)
|
||||
# TODO: replace DockerClient here
|
||||
DockerClient.new(execution_environment: file.context.execution_environment).execute_test_command(submission,
|
||||
file.name_with_extension)
|
||||
end
|
||||
@@ -59,19 +50,19 @@ module SubmissionScoring
|
||||
private :execute_test_file
|
||||
|
||||
def feedback_message(file, output)
|
||||
switch_locale do
|
||||
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
|
||||
I18n.t('exercises.implement.default_test_feedback')
|
||||
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
|
||||
I18n.t('exercises.implement.default_linter_feedback')
|
||||
else
|
||||
render_markdown(file.feedback_message)
|
||||
end
|
||||
# set_locale
|
||||
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
|
||||
I18n.t('exercises.implement.default_test_feedback')
|
||||
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
|
||||
I18n.t('exercises.implement.default_linter_feedback')
|
||||
else
|
||||
# render_markdown(file.feedback_message)
|
||||
end
|
||||
end
|
||||
|
||||
def score_submission(submission)
|
||||
outputs = collect_test_results(submission)
|
||||
def score_submission(outputs)
|
||||
# outputs = collect_test_results(submission)
|
||||
submission = self
|
||||
score = 0.0
|
||||
if outputs.present?
|
||||
outputs.each do |output|
|
||||
|
Reference in New Issue
Block a user