diff --git a/Gemfile b/Gemfile index 073f6d8a..c75764ec 100644 --- a/Gemfile +++ b/Gemfile @@ -16,8 +16,8 @@ gem 'highline' gem 'i18n-js' gem 'ims-lti', '< 2.0.0' gem 'jbuilder' -gem 'js-routes' gem 'json_schemer' +gem 'js-routes' gem 'kramdown' gem 'mimemagic' gem 'nokogiri' diff --git a/app/controllers/concerns/scoring_result_formatting.rb b/app/controllers/concerns/scoring_result_formatting.rb deleted file mode 100644 index 27096413..00000000 --- a/app/controllers/concerns/scoring_result_formatting.rb +++ /dev/null @@ -1,11 +0,0 @@ -# frozen_string_literal: true - -module ScoringResultFormatting - def format_scoring_results(outputs) - outputs.map do |output| - output[:message] = t(output[:message], default: render_markdown(output[:message])) - output[:filename] = t(output[:filename], default: output[:filename]) - output - end - end -end diff --git a/app/controllers/remote_evaluation_controller.rb b/app/controllers/remote_evaluation_controller.rb index 2f23eacb..96db6c8b 100644 --- a/app/controllers/remote_evaluation_controller.rb +++ b/app/controllers/remote_evaluation_controller.rb @@ -3,7 +3,6 @@ class RemoteEvaluationController < ApplicationController include RemoteEvaluationParameters include Lti - include ScoringResultFormatting skip_after_action :verify_authorized skip_before_action :verify_authenticity_token @@ -63,7 +62,7 @@ status: 202} validation_token = remote_evaluation_params[:validation_token] if (remote_evaluation_mapping = RemoteEvaluationMapping.find_by(validation_token: validation_token)) @submission = Submission.create(build_submission_params(cause, remote_evaluation_mapping)) - format_scoring_results(@submission.calculate_score) + @submission.calculate_score else # TODO: better output # TODO: check token expired? diff --git a/app/controllers/request_for_comments_controller.rb b/app/controllers/request_for_comments_controller.rb index 2fd4046e..4592d57d 100644 --- a/app/controllers/request_for_comments_controller.rb +++ b/app/controllers/request_for_comments_controller.rb @@ -119,7 +119,7 @@ class RequestForCommentsController < ApplicationController if @request_for_comment.save # create thread here and execute tests. A run is triggered from the frontend and does not need to be handled here. Thread.new do - @request_for_comment.submission.calculate_score + switch_locale { @request_for_comment.submission.calculate_score } ensure ActiveRecord::Base.connection_pool.release_connection end diff --git a/app/controllers/submissions_controller.rb b/app/controllers/submissions_controller.rb index cf82f546..97f9cd3c 100644 --- a/app/controllers/submissions_controller.rb +++ b/app/controllers/submissions_controller.rb @@ -5,7 +5,6 @@ class SubmissionsController < ApplicationController include CommonBehavior include Lti include SubmissionParameters - include ScoringResultFormatting include Tubesock::Hijack before_action :set_submission, @@ -35,15 +34,6 @@ class SubmissionsController < ApplicationController create_and_respond(object: @submission) end - def command_substitutions(filename) - { - class_name: File.basename(filename, File.extname(filename)).upcase_first, - filename: filename, - module_name: File.basename(filename, File.extname(filename)).underscore, - } - end - private :command_substitutions - def copy_comments # copy each annotation and set the target_file.id params[:annotations_arr]&.each do |annotation| @@ -247,7 +237,7 @@ class SubmissionsController < ApplicationController hijack do |tubesock| return kill_socket(tubesock) if @embed_options[:disable_run] - tubesock.send_data(JSON.dump(format_scoring_results(@submission.calculate_score))) + tubesock.send_data(JSON.dump(@submission.calculate_score)) # To enable hints when scoring a submission, uncomment the next line: # send_hints(tubesock, StructuredError.where(submission: @submission)) rescue Runner::Error => e diff --git a/app/models/submission.rb b/app/models/submission.rb index 4916bc49..532a0c0c 100644 --- a/app/models/submission.rb +++ b/app/models/submission.rb @@ -163,10 +163,10 @@ class Submission < ApplicationRecord end output.merge!(container_execution_time: execution_time, status: exit_code.zero? ? :ok : :failed) rescue Runner::Error::ExecutionTimeout => e - Rails.logger.debug("Running tests in #{file.name_with_extension} for submission #{id} timed out: #{e.message}") + Rails.logger.debug { "Running tests in #{file.name_with_extension} for submission #{id} timed out: #{e.message}" } output.merge!(status: :timeout, container_execution_time: e.execution_duration) rescue Runner::Error => e - Rails.logger.debug("Running tests in #{file.name_with_extension} for submission #{id} failed: #{e.message}") + Rails.logger.debug { "Running tests in #{file.name_with_extension} for submission #{id} failed: #{e.message}" } output.merge!(status: :failed, container_execution_time: e.execution_duration) ensure output.merge!(stdout: stdout, stderr: stderr) @@ -212,7 +212,7 @@ class Submission < ApplicationRecord def command_substitutions(filename) { - class_name: File.basename(filename, File.extname(filename)).camelize, + class_name: File.basename(filename, File.extname(filename)).upcase_first, filename: filename, module_name: File.basename(filename, File.extname(filename)).underscore, } @@ -245,10 +245,10 @@ class Submission < ApplicationRecord if file.teacher_defined_linter? LinterCheckRun.create_from(testrun, assessment) - assessment = assessor.translate_linter(assessment, session[:locale]) + assessment = assessor.translate_linter(assessment, I18n.locale) # replace file name with hint if linter is not used for grading. Refactor! - filename = 'exercises.implement.not_graded' if file.weight.zero? + filename = I18n.t('exercises.implement.not_graded') if file.weight.zero? end output.merge!(assessment) @@ -257,11 +257,16 @@ class Submission < ApplicationRecord def feedback_message(file, output) if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test' - 'exercises.implement.default_test_feedback' + I18n.t('exercises.implement.default_test_feedback') elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter' - 'exercises.implement.default_linter_feedback' + I18n.t('exercises.implement.default_linter_feedback') else - file.feedback_message + # The render_markdown method from application_helper.rb is not available in model classes. + ActionController::Base.helpers.sanitize( + Kramdown::Document.new(file.feedback_message).to_html, + tags: %w[strong], + attributes: [] + ) end end diff --git a/lib/runner/connection.rb b/lib/runner/connection.rb index 5c205be9..bc5d55e8 100644 --- a/lib/runner/connection.rb +++ b/lib/runner/connection.rb @@ -37,7 +37,7 @@ class Runner::Connection def send(raw_data) encoded_message = encode(raw_data) - Rails.logger.debug("#{Time.zone.now.getutc}: Sending to #{@socket.url}: #{encoded_message.inspect}") + Rails.logger.debug { "#{Time.zone.now.getutc}: Sending to #{@socket.url}: #{encoded_message.inspect}" } @socket.send(encoded_message) end @@ -52,7 +52,7 @@ class Runner::Connection end def on_message(raw_event) - Rails.logger.debug("#{Time.zone.now.getutc}: Receiving from #{@socket.url}: #{raw_event.data.inspect}") + Rails.logger.debug { "#{Time.zone.now.getutc}: Receiving from #{@socket.url}: #{raw_event.data.inspect}" } event = decode(raw_event) return unless BACKEND_OUTPUT_SCHEMA.valid?(event) @@ -72,7 +72,7 @@ class Runner::Connection def on_error(_event); end def on_close(_event) - Rails.logger.debug("#{Time.zone.now.getutc}: Closing connection to #{@socket.url} with status: #{@status}") + Rails.logger.debug { "#{Time.zone.now.getutc}: Closing connection to #{@socket.url} with status: #{@status}" } case @status when :timeout raise Runner::Error::ExecutionTimeout.new('Execution exceeded its time limit') diff --git a/spec/concerns/scoring_result_formatting_spec.rb b/spec/concerns/scoring_result_formatting_spec.rb deleted file mode 100644 index d66f9e3d..00000000 --- a/spec/concerns/scoring_result_formatting_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'rails_helper' - -class Controller < AnonymousController - include ScoringResultFormatting -end - -describe ScoringResultFormatting do - let(:controller) { Controller.new } - let(:filename) { 'exercise.py' } - let(:feedback_message) { '**good work**' } - let(:outputs) { [{filename: filename, message: feedback_message}] } - - describe 'feedback message' do - let(:new_feedback_message) { controller.format_scoring_results(outputs).first[:message] } - - context 'when the feedback message is not a path to a locale' do - let(:feedback_message) { '**good work**' } - - it 'renders the feedback message as markdown' do - expect(new_feedback_message).to match('
good work
') - end - end - - context 'when the feedback message is a valid path to a locale' do - let(:feedback_message) { 'exercises.implement.default_test_feedback' } - - it 'replaces the feedback message with the locale' do - expect(new_feedback_message).to eq(I18n.t(feedback_message)) - end - end - end - - describe 'filename' do - let(:new_filename) { controller.format_scoring_results(outputs).first[:filename] } - - context 'when the filename is not a path to a locale' do - let(:filename) { 'exercise.py' } - - it 'does not alter the filename' do - expect(new_filename).to eq(filename) - end - end - - context 'when the filename is a valid path to a locale' do - let(:filename) { 'exercises.implement.not_graded' } - - it 'replaces the filename with the locale' do - expect(new_filename).to eq(I18n.t(filename)) - end - end - end -end