Merge branch 'master' of github.com:openHPI/codeocean

This commit is contained in:
Felix Wolff
2015-07-16 15:10:59 +02:00
46 changed files with 569 additions and 133 deletions

View File

@ -7,7 +7,7 @@ gem 'carrierwave'
gem 'coffee-rails', '~> 4.0.0'
gem 'concurrent-ruby'
gem 'concurrent-ruby-ext', platform: :ruby
gem 'docker-api', require: 'docker'
gem 'docker-api','~> 1.21.1', require: 'docker'
gem 'factory_girl_rails', '~> 4.0'
gem 'forgery'
gem 'highline'
@ -16,6 +16,7 @@ gem 'jquery-rails'
gem 'jquery-turbolinks'
gem 'ims-lti'
gem 'kramdown'
gem 'newrelic_rpm'
gem 'pg', platform: :ruby
gem 'pry'
gem 'puma'
@ -59,7 +60,7 @@ group :test do
gem 'nyan-cat-formatter'
gem 'rake'
gem 'rspec-autotest'
gem 'rspec-rails', '~> 3.1.0'
gem 'rspec-rails'
gem 'selenium-webdriver'
gem 'simplecov', require: false
end

View File

@ -50,7 +50,7 @@ GEM
bootstrap-will_paginate (0.0.10)
will_paginate
builder (3.2.2)
byebug (4.0.3)
byebug (4.0.5)
columnize (= 0.9.0)
capistrano (3.3.5)
capistrano-stats (~> 1.1.0)
@ -67,7 +67,7 @@ GEM
capistrano (~> 3.0)
sshkit (~> 1.2)
capistrano-stats (1.1.1)
capistrano-upload-config (0.5.0)
capistrano-upload-config (0.6.0)
capistrano (>= 3.0)
capistrano3-puma (0.9.0)
capistrano (~> 3.0)
@ -83,7 +83,7 @@ GEM
activesupport (>= 3.2.0)
json (>= 1.7)
mime-types (>= 1.16)
childprocess (0.5.5)
childprocess (0.5.6)
ffi (~> 1.0, >= 1.0.11)
codeclimate-test-reporter (0.4.7)
simplecov (>= 0.7.1, < 1.0.0)
@ -91,11 +91,11 @@ GEM
coffee-rails (4.0.1)
coffee-script (>= 2.2.0)
railties (>= 4.0.0, < 5.0)
coffee-script (2.3.0)
coffee-script (2.4.1)
coffee-script-source
execjs
coffee-script-source (1.9.1)
colorize (0.7.5)
colorize (0.7.7)
columnize (0.9.0)
concurrent-ruby (0.8.0)
ref (~> 1.0, >= 1.0.5)
@ -106,12 +106,12 @@ GEM
debug_inspector (0.0.2)
diff-lcs (1.2.5)
docile (1.1.5)
docker-api (1.20.0)
docker-api (1.21.1)
excon (>= 0.38.0)
json
erubis (2.7.0)
excon (0.45.0)
execjs (2.4.0)
excon (0.45.2)
execjs (2.5.2)
factory_girl (4.5.0)
activesupport (>= 3.0.0)
factory_girl_rails (4.5.0)
@ -128,7 +128,7 @@ GEM
ims-lti (1.1.8)
builder
oauth (~> 0.4.5)
jbuilder (2.2.12)
jbuilder (2.2.13)
activesupport (>= 3.0.0, < 5)
multi_json (~> 1.2)
jdbc-postgres (9.4.1200)
@ -154,6 +154,7 @@ GEM
net-scp (1.2.1)
net-ssh (>= 2.6.5)
net-ssh (2.9.2)
newrelic_rpm (3.11.2.286)
nokogiri (1.6.6.2)
mini_portile (~> 0.6.0)
nokogiri (1.6.6.2-java)
@ -169,7 +170,7 @@ GEM
parser (2.2.0.3)
ast (>= 1.1, < 3.0)
pg (0.18.1)
polyamorous (1.1.0)
polyamorous (1.2.0)
activerecord (>= 3.0)
powerpack (0.1.0)
pry (0.10.1)
@ -210,12 +211,12 @@ GEM
thor (>= 0.18.1, < 2.0)
rainbow (2.0.0)
rake (10.4.2)
ransack (1.6.4)
ransack (1.6.6)
actionpack (>= 3.0)
activerecord (>= 3.0)
activesupport (>= 3.0)
i18n
polyamorous (~> 1.1)
polyamorous (~> 1.2)
rdoc (4.2.0)
ref (1.0.5)
rspec (3.1.0)
@ -240,7 +241,7 @@ GEM
rspec-mocks (~> 3.1.0)
rspec-support (~> 3.1.0)
rspec-support (3.1.2)
rubocop (0.29.1)
rubocop (0.30.0)
astrolabe (~> 1.3)
parser (>= 2.2.0.1, < 3.0)
powerpack (~> 0.1)
@ -275,13 +276,13 @@ GEM
temple (~> 0.7.3)
tilt (>= 1.3.3, < 2.1)
slop (3.6.0)
sorcery (0.9.0)
sorcery (0.9.1)
bcrypt (~> 3.1)
oauth (~> 0.4, >= 0.4.4)
oauth2 (>= 0.8.0)
spoon (0.0.4)
ffi
spring (1.3.3)
spring (1.3.4)
sprockets (2.12.3)
hike (~> 1.2)
multi_json (~> 1.0)
@ -342,7 +343,7 @@ DEPENDENCIES
concurrent-ruby
concurrent-ruby-ext
database_cleaner
docker-api
docker-api (~> 1.21.1)
factory_girl_rails (~> 4.0)
forgery
highline
@ -351,6 +352,7 @@ DEPENDENCIES
jquery-rails
jquery-turbolinks
kramdown
newrelic_rpm
nyan-cat-formatter
pg
pry
@ -361,7 +363,7 @@ DEPENDENCIES
rake
ransack
rspec-autotest
rspec-rails (~> 3.1.0)
rspec-rails
rubocop
rubocop-rspec
rubytree

View File

@ -12,14 +12,14 @@ Install qt - https://github.com/thoughtbot/capybara-webkit/wiki/Installing-Qt-an
Install and setup
Create a local codeOceanRoot: mkdir /path/to/CodeOcean
==> codeOceanRoot = /path/to/CodeOcean
Clone Hauke's Repository to codeOceanRoot - https://github.com/openHPI/codeocean (old: https://dev.xikolo.de/gitlab/hklement/xikolo-hands-on-programming, Valid https://dev.xikolo.de/gitlab user required)
Clone Repository to codeOceanRoot - https://github.com/openHPI/codeocean
==> repoPath = codeOceanRoot/xikolo-hands-on-programming
cd repoPath/config
duplicate .example config files (remove .example from filename)
action_mailer.yml, database.yml, secrets.yml, sendmail.yml, smtp.yml
add your local dbuser credentials to database.yml
Linux users may need to add a "host" and a "port" parameter
Copy vagrant files from https://github.com/hklement/vagrant-docker to codeOceanRoot
Copy vagrant files from https://github.com/hklement/vagrant-docker to codeOceanRoot or use boot2docker
==> vagrantPath = codeOceanRoot/vagrant-docker-master
cd vagrantPath
open Vagrantfile in text editor of choice
@ -28,15 +28,15 @@ Execute: vagrant box add ubuntu/trusty64
Execute: vagrant up
Install docker environments
export DOCKER_HOST=tcp://192.168.23.75:2375
docker pull hklement/ubuntu-coffee
docker pull hklement/ubuntu-java
docker pull hklement/ubuntu-sqlite
docker pull hklement/ubuntu-sinatra
docker pull hklement/ubuntu-ruby
docker pull hklement/ubuntu-python
docker pull hklement/ubuntu-node
docker pull hklement/ubuntu-html
docker pull hklement/ubuntu-jruby
docker pull jprberlin/ubuntu-coffee
docker pull jprberlin/ubuntu-java
docker pull jprberlin/ubuntu-sqlite
docker pull jprberlin/ubuntu-sinatra
docker pull jprberlin/ubuntu-ruby
docker pull jprberlin/ubuntu-python
docker pull jprberlin/ubuntu-node
docker pull jprberlin/ubuntu-html
docker pull jprberlin/ubuntu-jruby
cd repoPath
bundle install

View File

@ -25,7 +25,7 @@ In order to execute code submissions using Docker, source code files are written
- create *config/sendmail.yml*
- create *config/smtp.yml*
- if Docker is not supported by your OS, set up a local Docker server, for instance using [vagrant-docker](https://github.com/hklement/vagrant-docker)
- use boot2docker if there is no native support for docker on your OS
- create seed data by executing `rake db:seed`
## Production Setup
@ -34,3 +34,23 @@ In order to execute code submissions using Docker, source code files are written
- customize *config/deploy/production.rb* if you want to deploy using [Capistrano](http://capistranorb.com/)
The application is compatible with MRI and JRuby. Due to superior parallelism, we recommend using JRuby.
## Useful service maintenance commands
- delete all containers (include running ones) `docker rm -f $(docker ps -aq)`
- if the application is run as a service restart it by using `service codeocean restart`
- `/etc/init.d/postgresql restart`
- if deployed via capistrano you will find the logs at `/var/www/app/shared/log/` -> `production.log`
## Roadmap
1.1
[ ] WebSocket Suppport
[ ] Interactive Exercises
[ ] Allow Disabling of File Creation
[ ] Set Container Recyling per Environment

View File

@ -17,6 +17,7 @@ $(function() {
var active_frame = undefined;
var running = false;
var qa_api = undefined;
var output_mode_is_streaming = true;
var flowrResultHtml = '<div class="panel panel-default"><div id="{{headingId}}" role="tab" class="panel-heading"><h4 class="panel-title"><a data-toggle="collapse" data-parent="#flowrHint" href="#{{collapseId}}" aria-expanded="true" aria-controls="{{collapseId}}"></a></h4></div><div id="{{collapseId}}" role="tabpanel" aria-labelledby="{{headingId}}" class="panel-collapse collapse"><div class="panel-body"></div></div></div>'
@ -615,6 +616,10 @@ $(function() {
var printOutput = function(output, colorize, index) {
var element = findOrCreateOutputElement(index);
// disable streaming if desired
//if (output.stdout && output.stdout.length >= 20 && output.stdout.substr(0,20) == "##DISABLESTREAMING##"){
// output_mode_is_streaming = false;
//}
if (!colorize) {
var stream = _.sortBy([output.stderr || '', output.stdout || ''], function(stream) {
return stream.length;
@ -623,7 +628,14 @@ $(function() {
} else if (output.stderr) {
element.addClass('text-warning').append(output.stderr);
} else if (output.stdout) {
//if (output_mode_is_streaming){
element.addClass('text-success').append(output.stdout);
//}else{
// element.addClass('text-success');
// element.data('content_buffer' , element.data('content_buffer') + output.stdout);
//}
//} else if (output.code && output.code == '200'){
// element.append( element.data('content_buffer'));
} else {
element.addClass('text-muted').text($('#output').data('message-no-output'));
}
@ -651,6 +663,11 @@ $(function() {
})) {
showTimeoutMessage();
}
if (_.some(response, function(result) {
return result.status === 'container_depleted';
})) {
showContainerDepletedMessage();
}
if (qa_api) {
// send test response to QA
qa_api.executeCommand('syncOutput', [response]);
@ -815,6 +832,8 @@ $(function() {
var showStatus = function(output) {
if (output.status === 'timeout') {
showTimeoutMessage();
} else if (output.status === 'container_depleted') {
showContainerDepletedMessage();
} else if (output.stderr) {
$.flash.danger({
icon: ['fa', 'fa-bug'],
@ -828,6 +847,13 @@ $(function() {
}
};
var showContainerDepletedMessage = function() {
$.flash.danger({
icon: ['fa', 'fa-clock-o'],
text: $('#editor').data('message-depleted')
});
};
var showTab = function(index) {
$('a[data-toggle="tab"]').eq(index || 0).tab('show');
};
@ -944,7 +970,7 @@ $(function() {
}
var initializeCodePilot = function() {
if ($('#questions-column').isPresent() && QaApi.isBrowserSupported()) {
if ($('#questions-column').isPresent() && (typeof QaApi != 'undefined') && QaApi.isBrowserSupported()) {
$('#editor-column').addClass('col-md-8').removeClass('col-md-10');
$('#questions-column').addClass('col-md-3');

View File

@ -1,5 +1,10 @@
h1 {
margin-bottom: 1em;
font-size: 25px;
margin-bottom: 0.5em;
}
.lead {
font-size: 16px;
}
i.fa {

View File

@ -10,6 +10,7 @@ class ApplicationController < ActionController::Base
rescue_from Pundit::NotAuthorizedError, with: :render_not_authorized
def current_user
::NewRelic::Agent.add_custom_parameters({ external_user_id: session[:external_user_id], session_user_id: session[:user_id] })
@current_user ||= ExternalUser.find_by(id: session[:external_user_id]) || login_from_session || login_from_other_sources
end

View File

@ -16,27 +16,31 @@ class CommentsController < ApplicationController
#if admin, show all comments.
#check whether user is the author of the passed file_id, if so, show all comments. otherwise, only show comments of auther and own comments
file = CodeOcean::File.find(params[:file_id])
submission = Submission.find(file.context_id)
#there might be no submission yet, so dont use find
submission = Submission.find_by(id: file.context_id)
if submission
is_admin = false
if current_user.respond_to? :external_id
user_id = current_user.external_id
else
user_id = current_user.id
is_admin = current_user.role == 'admin'
end
is_admin = false
if current_user.respond_to? :external_id
user_id = current_user.external_id
if(is_admin || user_id == submission.user_id)
# fetch all comments for this file
@comments = Comment.where(file_id: params[:file_id])
else
@comments = Comment.where(file_id: params[:file_id], user_id: user_id)
end
#@comments = Comment.where(file_id: params[:file_id])
#add names to comments
@comments.map{|comment| comment.username = Xikolo::UserClient.get(comment.user_id.to_s)[:display_name]}
else
user_id = current_user.id
is_admin = current_user.role == 'admin'
@comments = Comment.all.limit(0) #we need an empty relation here
end
if(is_admin || user_id == submission.user_id)
# fetch all comments for this file
@comments = Comment.where(file_id: params[:file_id])
else
@comments = Comment.where(file_id: params[:file_id], user_id: user_id)
end
#@comments = Comment.where(file_id: params[:file_id])
#add names to comments
@comments.map{|comment| comment.username = Xikolo::UserClient.get(comment.user_id.to_s)[:display_name]}
authorize!
end
@ -60,7 +64,7 @@ class CommentsController < ApplicationController
# POST /comments
# POST /comments.json
def create
@comment = Comment.new(comment_params.merge(user_type: 'InternalUser'))
@comment = Comment.new(comment_params.merge(user_type: current_user.class.name))
respond_to do |format|
if @comment.save

View File

@ -95,6 +95,7 @@ module Lti
private :return_to_consumer
def send_score(score)
::NewRelic::Agent.add_custom_parameters({ score: score, session: session })
fail(Error, "Score #{score} must be between 0 and #{MAXIMUM_SCORE}!") unless (0..MAXIMUM_SCORE).include?(score)
provider = build_tool_provider(consumer: Consumer.find_by(id: session[:consumer_id]), parameters: session[:lti_parameters])
if provider.nil?

View File

@ -12,7 +12,11 @@ module SubmissionParameters
private :reject_illegal_file_attributes!
def submission_params
submission_params = params[:submission].permit(:cause, :exercise_id, files_attributes: file_attributes).merge(user_id: current_user.id, user_type: current_user.class.name)
if current_user
current_user_id = current_user.id
current_user_class_name = current_user.class.name
end
submission_params = params[:submission].permit(:cause, :exercise_id, files_attributes: file_attributes).merge(user_id: current_user_id, user_type: current_user_class_name)
reject_illegal_file_attributes!(submission_params)
submission_params
end

View File

@ -20,12 +20,14 @@ module SubmissionScoring
private :execute_test_file
def feedback_message(file, score)
set_locale
score == Assessor::MAXIMUM_SCORE ? I18n.t('exercises.implement.default_feedback') : file.feedback_message
end
def score_submission(submission)
outputs = collect_test_results(submission)
score = outputs.map { |output| output[:score] * output[:weight] }.reduce(:+)
score = outputs.map { |output|
output[:score] * output[:weight] }.reduce(:+)
submission.update(score: score)
outputs
end

View File

@ -6,7 +6,7 @@ class ExercisesController < ApplicationController
before_action :handle_file_uploads, only: [:create, :update]
before_action :set_execution_environments, only: [:create, :edit, :new, :update]
before_action :set_exercise, only: MEMBER_ACTIONS + [:clone, :implement, :run, :statistics, :submit]
before_action :set_exercise, only: MEMBER_ACTIONS + [:clone, :implement, :run, :statistics, :submit, :reload]
before_action :set_file_types, only: [:create, :edit, :new, :update]
before_action :set_teams, only: [:create, :edit, :new, :update]
@ -138,6 +138,10 @@ class ExercisesController < ApplicationController
def show
end
#we might want to think about auth here
def reload
end
def statistics
end
@ -152,6 +156,7 @@ class ExercisesController < ApplicationController
end
def transmit_lti_score
::NewRelic::Agent.add_custom_parameters({ submission: @submission.id, normalized_score: @submission.normalized_score })
response = send_score(@submission.normalized_score)
if response[:status] == 'success'
redirect_to_lti_return_path

View File

@ -20,7 +20,7 @@ class SubmissionsController < ApplicationController
def create
@submission = Submission.new(submission_params)
authorize!
copy_comments
#copy_comments
create_and_respond(object: @submission)
end
@ -28,7 +28,7 @@ class SubmissionsController < ApplicationController
# copy each annotation and set the target_file.id
unless(params[:annotations_arr].nil?)
params[:annotations_arr].each do | annotation |
comment = Comment.new(:user_id => annotation[1][:user_id], :file_id => annotation[1][:file_id], :user_type => 'InternalUser', :row => annotation[1][:row], :column => annotation[1][:column], :text => annotation[1][:text])
comment = Comment.new(:user_id => annotation[1][:user_id], :file_id => annotation[1][:file_id], :user_type => current_user.class.name, :row => annotation[1][:row], :column => annotation[1][:column], :text => annotation[1][:text])
source_file = CodeOcean::File.find(annotation[1][:file_id])
#comment = Comment.new(annotation[1].permit(:user_id, :file_id, :user_type, :row, :column, :text, :created_at, :updated_at))
@ -55,7 +55,7 @@ class SubmissionsController < ApplicationController
end
def index
@search = Submission.search(params[:q])
@search = Submission.last(100).search(params[:q])
@submissions = @search.result.includes(:exercise, :user).paginate(page: params[:page])
authorize!
end
@ -70,22 +70,18 @@ class SubmissionsController < ApplicationController
def run
with_server_sent_events do |server_sent_event|
container_info_sent = false
stderr = ''
output = @docker_client.execute_run_command(@submission, params[:filename]) do |stream, chunk|
unless container_info_sent
server_sent_event.write({id: @docker_client.container.try(:id), port_bindings: @docker_client.container.try(:port_bindings)}, event: 'info')
container_info_sent = true
end
server_sent_event.write({stream => chunk}, event: 'output')
stderr += chunk if stream == :stderr
end
server_sent_event.write(output, event: 'status')
if stderr.present?
if hint = Whistleblower.new(execution_environment: @submission.execution_environment).generate_hint(stderr)
output = @docker_client.execute_run_command(@submission, params[:filename])
server_sent_event.write({stdout: output[:stdout]}, event: 'output') if output[:stdout]
server_sent_event.write({stderr: output[:stderr]}, event: 'output') if output[:stderr]
server_sent_event.write({status: output[:status]}, event: 'status')
unless output[:stderr].nil?
if hint = Whistleblower.new(execution_environment: @submission.execution_environment).generate_hint(output[:stderr])
server_sent_event.write(hint, event: 'hint')
else
store_error(stderr)
store_error(output[:stderr])
end
end
end
@ -138,7 +134,7 @@ class SubmissionsController < ApplicationController
end
def store_error(stderr)
::Error.create(execution_environment_id: @submission.execution_environment.id, message: stderr)
::Error.create(submission_id: @submission.id, execution_environment_id: @submission.execution_environment.id, message: stderr)
end
private :store_error

View File

@ -13,6 +13,8 @@ module ExerciseHelper
if enabled
config.read[:code_pilot][:url]
else
return nil
end
end
end

View File

@ -23,7 +23,7 @@ class Exercise < ActiveRecord::Base
validates :token, presence: true, uniqueness: true
def average_percentage
(average_score / maximum_score * 100).round if average_score
(average_score/ maximum_score * 100).round if average_score
end
def average_score

View File

@ -38,11 +38,16 @@ class Submission < ActiveRecord::Base
end
def normalized_score
score / exercise.maximum_score if score
::NewRelic::Agent.add_custom_parameters({ unnormalized_score: score })
if !score.nil? && !exercise.maximum_score.nil? && (exercise.maximum_score > 0)
score / exercise.maximum_score
else
0
end
end
def percentage
(normalized_score * 100).round if score
(normalized_score * 100).round
end
[:score, :stop].each do |action|

View File

@ -12,12 +12,12 @@ class ExercisePolicy < AdminOrAuthorPolicy
define_method(action) { admin? || author? || team_member? }
end
[:implement?, :submit?].each do |action|
[:implement?, :submit?, :reload?].each do |action|
define_method(action) { everyone }
end
def team_member?
@record.team.try(:members, []).include?(@user)
@record.team.try(:members, []).include?(@user) if @record.team
end
private :team_member?

View File

@ -1,10 +1,10 @@
#editor.row data-exercise-id=exercise.id data-message-timeout=t('exercises.editor.timeout', permitted_execution_time: @exercise.execution_environment.permitted_execution_time) data-errors-url=execution_environment_errors_path(exercise.execution_environment) data-submissions-url=submissions_path data-user-id=@current_user.id
#editor.row data-exercise-id=exercise.id data-message-depleted=t('exercises.editor.depleted') data-message-timeout=t('exercises.editor.timeout', permitted_execution_time: @exercise.execution_environment.permitted_execution_time) data-errors-url=execution_environment_errors_path(exercise.execution_environment) data-submissions-url=submissions_path data-user-id=@current_user.id
.col-sm-3 = render('editor_file_tree', files: @files)
#frames.col-sm-9
- @files.each do |file|
= render('editor_frame', exercise: exercise, file: file)
#editor-buttons.btn-group
= render('editor_button', data: {:'data-message-confirm' => t('exercises.editor.confirm_start_over'), :'data-url' => exercise_path(exercise)}, icon: 'fa fa-history', id: 'start-over', label: t('exercises.editor.start_over'))
= render('editor_button', data: {:'data-message-confirm' => t('exercises.editor.confirm_start_over'), :'data-url' => reload_exercise_path(exercise)}, icon: 'fa fa-history', id: 'start-over', label: t('exercises.editor.start_over'))
= render('editor_button', data: {:'data-message-success' => t('submissions.create.success'), :'data-placement' => 'top', :'data-tooltip' => true}, icon: 'fa fa-save', id: 'save', label: t('exercises.editor.save'), title: t('.tooltips.save'))
.btn-group
= render('editor_button', disabled: true, icon: 'fa fa-ban', id: 'dummy', label: t('exercises.editor.dummy'))

View File

@ -71,8 +71,11 @@
span.score
.progress
.progress-bar role='progressbar'
br
p.text-center = render('editor_button', classes: 'btn-lg btn-success', data: {:'data-message-confirm' => t('exercises.editor.confirm_submit'), :'data-url' => submit_exercise_path(@exercise)}, icon: 'fa fa-send', id: 'submit', label: t('exercises.editor.submit'))
- if session[:lti_parameters].try(:has_key?, 'lis_outcome_service_url')
p.text-center = render('editor_button', classes: 'btn-lg btn-success', data: {:'data-message-confirm' => t('exercises.editor.confirm_submit'), :'data-url' => submit_exercise_path(@exercise)}, icon: 'fa fa-send', id: 'submit', label: t('exercises.editor.submit'))
- if qa_url
#questions-column
#questions-holder data-url="#{qa_url}/qa/index/#{@exercise.id}/#{@user_id}"

View File

@ -9,7 +9,7 @@ html lang='en'
= stylesheet_link_tag('//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css')
= stylesheet_link_tag('application', media: 'all', 'data-turbolinks-track' => true)
= javascript_include_tag('application', 'data-turbolinks-track' => true)
= javascript_include_tag('//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.8.2/underscore-min.js')
= javascript_include_tag('//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.8.3/underscore-min.js')
= javascript_include_tag('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js')
= yield(:head)
= csrf_meta_tags

View File

@ -10,6 +10,6 @@ h2 = t('shared.statistics')
p == t('shared.out_of', maximum_value: @submission.exercise.maximum_score, value: @submission.score)
p = progress_bar(@submission.percentage)
= row(label: '.final_submissions', value: @submission.exercise.submissions.final.distinct.count(:user_id, :user_type) - 1)
= row(label: '.average_score') do
p == t('shared.out_of', maximum_value: @submission.exercise.maximum_score, value: @submission.exercise.average_score.round(2))
p = progress_bar(@submission.exercise.average_percentage)
/= row(label: '.average_score') do
/ p == t('shared.out_of', maximum_value: @submission.exercise.maximum_score, value: @submission.exercise.average_score.round(2))
/ p = progress_bar(@submission.exercise.average_percentage)

View File

@ -30,5 +30,12 @@ module CodeOcean
config.autoload_paths << Rails.root.join('lib')
config.eager_load_paths << Rails.root.join('lib')
case (RUBY_ENGINE)
when 'ruby'
# ...
when 'jruby'
# plattform specific
java.lang.Class.for_name('javax.crypto.JceSecurity').get_declared_field('isRestricted').tap{|f| f.accessible = true; f.set nil, false}
end
end
end

View File

@ -4,7 +4,7 @@ set :default_env, 'PATH' => '/usr/java/jdk1.8.0_40/bin:$PATH'
set :deploy_to, '/var/www/app'
set :keep_releases, 3
set :linked_dirs, %w(bin log public/uploads tmp/cache tmp/files tmp/pids tmp/sockets)
set :linked_files, %w(config/action_mailer.yml config/code_ocean.yml config/database.yml config/secrets.yml config/sendmail.yml config/smtp.yml)
set :linked_files, %w(config/action_mailer.yml config/code_ocean.yml config/database.yml config/newrelic.yml config/secrets.yml config/sendmail.yml config/smtp.yml)
set :log_level, :info
set :puma_threads, [0, 16]
set :repo_url, 'git@github.com:openHPI/codeocean.git'

View File

@ -1 +1 @@
server 'codeocean.openhpi.de', roles: [:app, :db, :puma_nginx, :web], user: 'hklement'
server 'codeocean.openhpi.de', roles: [:app, :db, :puma_nginx, :web], user: 'codeocean'

View File

@ -6,11 +6,12 @@ default: &default
development:
<<: *default
host: tcp://192.168.23.75:2375
host: tcp://192.168.59.104:2376
workspace_root: <%= File.join('/', 'shared', Rails.env) %>
production:
<<: *default
host: unix:///var/run/docker.sock
pool:
active: true
refill:
@ -22,5 +23,5 @@ production:
test:
<<: *default
host: tcp://192.168.23.75:2375
host: tcp://192.168.59.104:2376
workspace_root: <%= File.join('/', 'shared', Rails.env) %>

View File

@ -43,7 +43,7 @@ Rails.application.configure do
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
config.log_level = :error
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]

View File

@ -166,6 +166,7 @@ de:
confirm_start_over: Wollen Sie wirklich von vorne anfangen?
confirm_submit: Wollen Sie Ihren Code wirklich zur Bewertung abgeben?
create_file: Neue Datei
depleted: Alle Ausführungsausgebungen sind momentan in Benutzung. Probiere es später nochmal.
destroy_file: Datei löschen
download: Herunterladen
dummy: Keine Aktion
@ -321,7 +322,7 @@ de:
headline: Hilfe
link: Hilfe
index: Index
message_failure: Leider ist ein Fehler aufgetreten.
message_failure: Leider ist ein Fehler auf unserer Plattform aufgetreten. Bitte probieren Sie es später noch einmal.
new: Hinzufügen
new_model: '%{model} hinzufügen'
number: Nummer

View File

@ -166,6 +166,7 @@ en:
confirm_start_over: Do you really want to start over?
confirm_submit: Do you really want to submit your code for grading?
create_file: New File
depleted: All execution environments are busy. Please try again later.
destroy_file: Delete File
download: Download
dummy: No Action

220
config/newrelic.yml.example Normal file
View File

@ -0,0 +1,220 @@
#
# This file configures the New Relic Agent. New Relic monitors Ruby, Java,
# .NET, PHP, Python and Node applications with deep visibility and low
# overhead. For more information, visit www.newrelic.com.
# Here are the settings that are common to all environments
common: &default_settings
# ============================== LICENSE KEY ===============================
# You must specify the license key associated with your New Relic
# account. This key binds your Agent's data to your account in the
# New Relic service.
license_key: 'CHANGE_ME'
# Agent Enabled (Ruby/Rails Only)
# Use this setting to force the agent to run or not run.
# Default is 'auto' which means the agent will install and run only
# if a valid dispatcher such as Mongrel is running. This prevents
# it from running with Rake or the console. Set to false to
# completely turn the agent off regardless of the other settings.
# Valid values are true, false and auto.
#
# agent_enabled: auto
# Application Name Set this to be the name of your application as
# you'd like it show up in New Relic. The service will then auto-map
# instances of your application into an "application" on your
# dashboard page. If you want to map this instance into multiple
# apps, like "AJAX Requests" and "All UI" then specify a semicolon
# separated list of up to three distinct names, or a yaml list.
# Defaults to the capitalized RAILS_ENV or RACK_ENV (i.e.,
# Production, Staging, etc)
#
# Example:
#
# app_name:
# - Ajax Service
# - All Services
#
# Caution: If you change this name, a new application will appear in the New
# Relic user interface with the new name, and data will stop reporting to the
# app with the old name.
#
# See https://newrelic.com/docs/site/renaming-applications for more details
# on renaming your New Relic applications.
#
app_name: Code Ocean
# When "true", the agent collects performance data about your
# application and reports this data to the New Relic service at
# newrelic.com. This global switch is normally overridden for each
# environment below. (formerly called 'enabled')
monitor_mode: true
# Developer mode should be off in every environment but
# development as it has very high overhead in memory.
developer_mode: false
# The newrelic agent generates its own log file to keep its logging
# information separate from that of your application. Specify its
# log level here.
log_level: info
# Optionally set the path to the log file This is expanded from the
# root directory (may be relative or absolute, e.g. 'log/' or
# '/var/log/') The agent will attempt to create this directory if it
# does not exist.
# log_file_path: 'log'
# Optionally set the name of the log file, defaults to 'newrelic_agent.log'
# log_file_name: 'newrelic_agent.log'
# The newrelic agent communicates with the service via https by default. This
# prevents eavesdropping on the performance metrics transmitted by the agent.
# The encryption required by SSL introduces a nominal amount of CPU overhead,
# which is performed asynchronously in a background thread. If you'd prefer
# to send your metrics over http uncomment the following line.
# ssl: false
#============================== Browser Monitoring ===============================
# New Relic Real User Monitoring gives you insight into the performance real users are
# experiencing with your website. This is accomplished by measuring the time it takes for
# your users' browsers to download and render your web pages by injecting a small amount
# of JavaScript code into the header and footer of each page.
browser_monitoring:
# By default the agent automatically injects the monitoring JavaScript
# into web pages. Set this attribute to false to turn off this behavior.
auto_instrument: true
# Proxy settings for connecting to the New Relic server.
#
# If a proxy is used, the host setting is required. Other settings
# are optional. Default port is 8080.
#
# proxy_host: hostname
# proxy_port: 8080
# proxy_user:
# proxy_pass:
# The agent can optionally log all data it sends to New Relic servers to a
# separate log file for human inspection and auditing purposes. To enable this
# feature, change 'enabled' below to true.
# See: https://newrelic.com/docs/ruby/audit-log
audit_log:
enabled: false
# Tells transaction tracer and error collector (when enabled)
# whether or not to capture HTTP params. When true, frameworks can
# exclude HTTP parameters from being captured.
# Rails: the RoR filter_parameter_logging excludes parameters
# Java: create a config setting called "ignored_params" and set it to
# a comma separated list of HTTP parameter names.
# ex: ignored_params: credit_card, ssn, password
capture_params: false
# Transaction tracer captures deep information about slow
# transactions and sends this to the New Relic service once a
# minute. Included in the transaction is the exact call sequence of
# the transactions including any SQL statements issued.
transaction_tracer:
# Transaction tracer is enabled by default. Set this to false to
# turn it off. This feature is only available at the Professional
# and above product levels.
enabled: true
# Threshold in seconds for when to collect a transaction
# trace. When the response time of a controller action exceeds
# this threshold, a transaction trace will be recorded and sent to
# New Relic. Valid values are any float value, or (default) "apdex_f",
# which will use the threshold for an dissatisfying Apdex
# controller action - four times the Apdex T value.
transaction_threshold: apdex_f
# When transaction tracer is on, SQL statements can optionally be
# recorded. The recorder has three modes, "off" which sends no
# SQL, "raw" which sends the SQL statement in its original form,
# and "obfuscated", which strips out numeric and string literals.
record_sql: obfuscated
# Threshold in seconds for when to collect stack trace for a SQL
# call. In other words, when SQL statements exceed this threshold,
# then capture and send to New Relic the current stack trace. This is
# helpful for pinpointing where long SQL calls originate from.
stack_trace_threshold: 0.500
# Determines whether the agent will capture query plans for slow
# SQL queries. Only supported in mysql and postgres. Should be
# set to false when using other adapters.
# explain_enabled: true
# Threshold for query execution time below which query plans will
# not be captured. Relevant only when `explain_enabled` is true.
# explain_threshold: 0.5
# Error collector captures information about uncaught exceptions and
# sends them to New Relic for viewing
error_collector:
# Error collector is enabled by default. Set this to false to turn
# it off. This feature is only available at the Professional and above
# product levels.
enabled: true
# To stop specific errors from reporting to New Relic, set this property
# to comma-separated values. Default is to ignore routing errors,
# which are how 404's get triggered.
ignore_errors: "ActionController::RoutingError,Sinatra::NotFound"
# If you're interested in capturing memcache keys as though they
# were SQL uncomment this flag. Note that this does increase
# overhead slightly on every memcached call, and can have security
# implications if your memcached keys are sensitive
# capture_memcache_keys: true
# Application Environments
# ------------------------------------------
# Environment-specific settings are in this section.
# For Rails applications, RAILS_ENV is used to determine the environment.
# For Java applications, pass -Dnewrelic.environment <environment> to set
# the environment.
# NOTE if your application has other named environments, you should
# provide newrelic configuration settings for these environments here.
development:
<<: *default_settings
# Turn on communication to New Relic service in development mode
monitor_mode: true
app_name: Code Ocean (Development)
# Rails Only - when running in Developer Mode, the New Relic Agent will
# present performance information on the last 100 transactions you have
# executed since starting the mongrel.
# NOTE: There is substantial overhead when running in developer mode.
# Do not use for production or load testing.
developer_mode: true
test:
<<: *default_settings
# It almost never makes sense to turn on the agent when running
# unit, functional or integration tests or the like.
monitor_mode: false
# Turn on the agent in production for 24x7 monitoring. NewRelic
# testing shows an average performance impact of < 5 ms per
# transaction, you can leave this on all the time without
# incurring any user-visible performance degradation.
production:
<<: *default_settings
monitor_mode: true
# Many applications have a staging environment which behaves
# identically to production. Support for that environment is provided
# here. By default, the staging environment has the agent turned on.
staging:
<<: *default_settings
monitor_mode: true
app_name: Code Ocean (Staging)

View File

@ -40,6 +40,7 @@ Rails.application.routes.draw do
post :clone
get :implement
get :statistics
get :reload
post :submit
end
end

View File

@ -0,0 +1,5 @@
class AddFileIndexToFiles < ActiveRecord::Migration
def change
add_index(:files, [:context_id, :context_type])
end
end

View File

@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150408155923) do
ActiveRecord::Schema.define(version: 20150421074734) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@ -123,6 +123,8 @@ ActiveRecord::Schema.define(version: 20150408155923) do
t.string "path"
end
add_index "files", ["context_id", "context_type"], name: "index_files_on_context_id_and_context_type", using: :btree
create_table "hints", force: true do |t|
t.integer "execution_environment_id"
t.string "locale"

View File

@ -9,7 +9,11 @@ class Assessor
end
def calculate_score(test_outcome)
(test_outcome[:passed].to_f / test_outcome[:count].to_f)
score = 0.0;
if(test_outcome[:passed].to_f != 0.0 && test_outcome[:count].to_f != 0.0)
score = (test_outcome[:passed].to_f / test_outcome[:count].to_f)
end
score
end
private :calculate_score

View File

@ -1,10 +1,12 @@
require 'concurrent'
require 'pathname'
class DockerClient
CONTAINER_WORKSPACE_PATH = '/workspace'
DEFAULT_MEMORY_LIMIT = 256
LOCAL_WORKSPACE_ROOT = Rails.root.join('tmp', 'files', Rails.env)
MINIMUM_MEMORY_LIMIT = 4
RECYCLE_CONTAINERS = true
RETRY_COUNT = 2
attr_reader :container
@ -51,13 +53,16 @@ class DockerClient
local_workspace_path = generate_local_workspace_path
FileUtils.mkdir(local_workspace_path)
container.start(container_start_options(execution_environment, local_workspace_path))
container.start_time = Time.now
container
rescue Docker::Error::NotFoundError => error
destroy_container(container)
(tries += 1) <= RETRY_COUNT ? retry : raise(error)
#(tries += 1) <= RETRY_COUNT ? retry : raise(error)
end
def create_workspace_files(container, submission)
#clear directory (it should be emtpy anyhow)
Pathname.new(self.class.local_workspace_path(container)).children.each{ |p| p.rmtree}
submission.collect_files.each do |file|
FileUtils.mkdir_p(File.join(self.class.local_workspace_path(container), file.path || ''))
if file.file_type.binary?
@ -77,9 +82,13 @@ class DockerClient
private :create_workspace_file
def self.destroy_container(container)
Rails.logger.info('destroying container ' + container.to_s)
container.stop.kill
container.port_bindings.values.each { |port| PortPool.release(port) }
FileUtils.rm_rf(local_workspace_path(container)) if local_workspace_path(container)
local_workspace_path = local_workspace_path(container)
if local_workspace_path && Pathname.new(local_workspace_path).exist?
Pathname.new(local_workspace_path).children.each{ |p| p.rmtree}
end
container.delete(force: true)
end
@ -88,12 +97,18 @@ class DockerClient
end
def execute_command(command, before_execution_block, output_consuming_block)
tries ||= 0
#tries ||= 0
@container = DockerContainerPool.get_container(@execution_environment)
before_execution_block.try(:call)
send_command(command, @container, &output_consuming_block)
if @container
before_execution_block.try(:call)
send_command(command, @container, &output_consuming_block)
else
{status: :container_depleted}
end
rescue Excon::Errors::SocketError => error
(tries += 1) <= RETRY_COUNT ? retry : raise(error)
# socket errors seems to be normal when using exec
# so lets ignore them for now
#(tries += 1) <= RETRY_COUNT ? retry : raise(error)
end
[:run, :test].each do |cause|
@ -155,24 +170,38 @@ class DockerClient
`docker pull #{docker_image}` if docker_image
end
def return_container(container)
local_workspace_path = self.class.local_workspace_path(container)
Pathname.new(local_workspace_path).children.each{ |p| p.rmtree}
DockerContainerPool.return_container(container, @execution_environment)
end
private :return_container
def send_command(command, container, &block)
Timeout.timeout(@execution_environment.permitted_execution_time.to_i) do
stderr = []
stdout = []
container.attach(stdin: StringIO.new(command)) do |stream, chunk|
block.call(stream, chunk) if block_given?
if stream == :stderr
stderr.push(chunk)
else
stdout.push(chunk)
end
end
{status: :ok, stderr: stderr.join, stdout: stdout.join}
output = container.exec(['bash', '-c', command])
Rails.logger.info "output from container.exec"
Rails.logger.info output
{status: output[2] == 0 ? :ok : :failed, stdout: output[0].join, stderr: output[1].join}
end
rescue Timeout::Error
timeout_occured = true
Rails.logger.info('got timeout error for container ' + container.to_s)
#container.restart if RECYCLE_CONTAINERS
DockerContainerPool.remove_from_all_containers(container, @execution_environment)
# destroy container
self.class.destroy_container(container)
if(RECYCLE_CONTAINERS)
# create new container and add it to @all_containers. will be added to @containers on return_container
container = self.class.create_container(@execution_environment)
DockerContainerPool.add_to_all_containers(container, @execution_environment)
end
{status: :timeout}
ensure
Concurrent::Future.execute { self.class.destroy_container(container) }
Rails.logger.info('send_command ensuring for' + container.to_s)
RECYCLE_CONTAINERS ? return_container(container) : self.class.destroy_container(container)
end
private :send_command

View File

@ -1,4 +1,8 @@
module DockerContainerMixin
attr_accessor :start_time
attr_accessor :status
def binds
json['HostConfig']['Binds']
end

View File

@ -3,8 +3,10 @@ require 'concurrent/timer_task'
require 'concurrent/utilities'
class DockerContainerPool
@containers = ThreadSafe::Hash[ExecutionEnvironment.all.map { |execution_environment| [execution_environment.id, ThreadSafe::Array.new] }]
@containers = ThreadSafe::Hash[ExecutionEnvironment.all.map { |execution_environment| [execution_environment.id, ThreadSafe::Array.new] }]
#as containers are not containing containers in use
@all_containers = ThreadSafe::Hash[ExecutionEnvironment.all.map { |execution_environment| [execution_environment.id, ThreadSafe::Array.new] }]
def self.clean_up
@refill_task.try(:shutdown)
@containers.values.each do |containers|
@ -16,13 +18,44 @@ class DockerContainerPool
@config ||= CodeOcean::Config.new(:docker).read(erb: true)[:pool]
end
def self.remove_from_all_containers(container, execution_environment)
@all_containers[execution_environment.id]-=[container]
if(@containers[execution_environment.id].include?(container))
@containers[execution_environment.id]-=[container]
end
end
def self.add_to_all_containers(container, execution_environment)
@all_containers[execution_environment.id]+=[container]
if(!@containers[execution_environment.id].include?(container))
@containers[execution_environment.id]+=[container]
else
Rails.logger.info('failed trying to add existing container ' + container.to_s)
end
end
def self.create_container(execution_environment)
DockerClient.create_container(execution_environment)
container = DockerClient.create_container(execution_environment)
container.status = 'available'
container
end
def self.return_container(container, execution_environment)
container.status = 'available'
if(@containers[execution_environment.id] && !@containers[execution_environment.id].include?(container))
@containers[execution_environment.id].push(container)
else
Rails.logger.info('trying to return existing container ' + container.to_s)
end
end
def self.get_container(execution_environment)
if config[:active]
@containers[execution_environment.id].try(:shift) || create_container(execution_environment)
container = @containers[execution_environment.id].try(:shift) || nil
Rails.logger.info('get_container fetched container ' + container.to_s)
Rails.logger.info('get_container remaining avail. container ' + @containers[execution_environment.id].size.to_s)
Rails.logger.info('get_container all container count' + @all_containers[execution_environment.id].size.to_s)
container
else
create_container(execution_environment)
end
@ -43,8 +76,12 @@ class DockerContainerPool
end
def self.refill_for_execution_environment(execution_environment)
refill_count = [execution_environment.pool_size - @containers[execution_environment.id].length, config[:refill][:batch_size]].min
@containers[execution_environment.id] += refill_count.times.map { create_container(execution_environment) }
refill_count = [execution_environment.pool_size - @all_containers[execution_environment.id].length, config[:refill][:batch_size]].min
Rails.logger.info('adding' + refill_count.to_s + ' containers for ' + execution_environment.name )
c = refill_count.times.map { create_container(execution_environment) }
@containers[execution_environment.id] += c
@all_containers[execution_environment.id] += c
#refill_count.times.map { create_container(execution_environment) }
end
def self.start_refill_task

View File

@ -25,7 +25,7 @@ class FileTree < Tree::TreeNode
def initialize(files = [])
super(root_label)
files.each do |file|
files.uniq{|f| f.name_with_extension}.each do |file|
parent = self
(file.path || '').split('/').each do |segment|
node = parent.children.detect { |child| child.name == segment } || parent.add(Tree::TreeNode.new(segment))

16
lib/mocha_adapter.rb Normal file
View File

@ -0,0 +1,16 @@
class MochaAdapter < TestingFrameworkAdapter
SUCCESS_REGEXP = /(\d+) passing/
FAILURES_REGEXP = /(\d+) failing/
def self.framework_name
'Mocha'
end
def parse_output(output)
matches_success = SUCCESS_REGEXP.match(output[:stdout])
matches_failed = FAILURES_REGEXP.match(output[:stdout])
failed = matches_failed ? matches_failed.captures.first.to_i : 0
success = matches_success ? matches_success.captures.first.to_i : 0
{count: success+failed, failed: failed}
end
end

View File

@ -1,7 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<title>We're sorry, but something went wrong (500)</title>
<title>Momentan nehmen wir Wartungs- und Optimierungsarbeiten vor. Probieren Sie es in Kürze noch einmal.</title>
<meta name="viewport" content="width=device-width,initial-scale=1">
<style>
body {
@ -23,7 +23,7 @@
border-right-color: #999;
border-left-color: #999;
border-bottom-color: #BBB;
border-top: #B00100 solid 4px;
border-top: #32B0AF solid 4px;
border-top-left-radius: 9px;
border-top-right-radius: 9px;
background-color: white;
@ -33,7 +33,7 @@
h1 {
font-size: 100%;
color: #730E15;
color: #32B0AF;
line-height: 1.5em;
}

View File

@ -200,11 +200,23 @@ describe ExercisesController do
end
describe 'GET #show' do
before(:each) { get :show, id: exercise.id }
context 'as admin' do
before(:each) { get :show, id: exercise.id }
expect_assigns(exercise: :exercise)
expect_status(200)
expect_template(:show)
expect_assigns(exercise: :exercise)
expect_status(200)
expect_template(:show)
end
end
describe 'GET #reload' do
context 'as anyone' do
before(:each) { get :reload, format: :json, id: exercise.id }
expect_assigns(exercise: :exercise)
expect_status(200)
expect_template(:reload)
end
end
describe 'GET #statistics' do

View File

@ -160,6 +160,7 @@ describe SubmissionsController do
end
it 'does not store the error' do
pending("no server sent events used right now")
expect(Error).not_to receive(:create)
end
end
@ -170,6 +171,7 @@ describe SubmissionsController do
end
it 'stores the error' do
pending("no server sent events used right now")
expect(Error).to receive(:create).with(execution_environment_id: submission.exercise.execution_environment_id, message: stderr)
end
end

View File

@ -112,6 +112,7 @@ describe DockerClient, docker: true do
end
it 'raises the error' do
pending('RETRY COUNT is disabled')
expect { create_container }.to raise_error(error)
end
end
@ -176,7 +177,8 @@ describe DockerClient, docker: true do
it 'removes the mapped directory' do
expect(described_class).to receive(:local_workspace_path).at_least(:once).and_return(workspace_path)
expect(FileUtils).to receive(:rm_rf).with(workspace_path)
#!TODO Fix this
#expect(PathName).to receive(:rmtree).with(workspace_path)
end
it 'deletes the container' do
@ -218,7 +220,8 @@ describe DockerClient, docker: true do
end
it 'raises the error' do
expect { execute_arbitrary_command }.to raise_error(error)
#!TODO Retries is disabled
#expect { execute_arbitrary_command }.to raise_error(error)
end
end
end

View File

@ -1,7 +1,7 @@
require 'rails_helper'
describe DockerContainerPool do
let(:container) { double }
let(:container) { double(:start_time => Time.now, :status => 'available') }
def reload_class
load('docker_container_pool.rb')
@ -53,8 +53,8 @@ describe DockerContainerPool do
expect(described_class.instance_variable_get(:@containers)[@execution_environment.id]).to be_empty
end
it 'creates a new container' do
expect(described_class).to receive(:create_container).with(@execution_environment)
it 'not creates a new container' do
expect(described_class).not_to receive(:create_container).with(@execution_environment)
described_class.get_container(@execution_environment)
end
end

View File

@ -0,0 +1,14 @@
require 'rails_helper'
describe MochaAdapter do
let(:adapter) { described_class.new }
let(:count) { 42 }
let(:failed) { 25 }
let(:stdout) { "#{count-failed} passing (20ms)\n\n#{failed} failing" }
describe '#parse_output' do
it 'returns the correct numbers' do
expect(adapter.parse_output(stdout: stdout)).to eq(count: count, failed: failed)
end
end
end

View File

@ -53,8 +53,8 @@ describe Submission do
context 'without a score' do
before(:each) { submission.score = nil }
it 'returns nil' do
expect(submission.normalized_score).to be nil
it 'returns 0' do
expect(submission.normalized_score).to be 0
end
end
end
@ -72,8 +72,8 @@ describe Submission do
context 'without a score' do
before(:each) { submission.score = nil }
it 'returns nil' do
expect(submission.percentage).to be nil
it 'returns 0' do
expect(submission.percentage).to be 0
end
end
end