Update Study Group Dashboard and Statistics

This commit is contained in:
Sebastian Serth
2022-08-19 23:19:44 +02:00
parent d762f976a8
commit 6927b57170
5 changed files with 20 additions and 22 deletions

View File

@ -44,7 +44,7 @@ class ExecutionEnvironmentsController < ApplicationController
FROM FROM
(SELECT user_id, (SELECT user_id,
exercise_id, exercise_id,
CASE WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' ELSE working_time END AS working_time_new CASE WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0' ELSE working_time END AS working_time_new
FROM FROM
(SELECT user_id, (SELECT user_id,
exercise_id, exercise_id,

View File

@ -32,7 +32,7 @@ class ExternalUsersController < ApplicationController
score, score,
id, id,
CASE CASE
WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0'
ELSE working_time ELSE working_time
END AS working_time_new END AS working_time_new
FROM FROM

View File

@ -2,7 +2,9 @@
module StatisticsHelper module StatisticsHelper
WORKING_TIME_DELTA_IN_SECONDS = 5.minutes WORKING_TIME_DELTA_IN_SECONDS = 5.minutes
WORKING_TIME_DELTA_IN_SQL_INTERVAL = ActiveRecord::Base.sanitize_sql("'0:05:00'") # yes, a string with quotes def self.working_time_larger_delta
@working_time_larger_delta ||= ActiveRecord::Base.sanitize_sql(['working_time >= ?', '0:05:00'])
end
def statistics_data def statistics_data
[ [
@ -174,14 +176,12 @@ module StatisticsHelper
end end
def ranged_rfc_data(interval = 'year', from = DateTime.new(0), to = DateTime.now) def ranged_rfc_data(interval = 'year', from = DateTime.new(0), to = DateTime.now)
interval = ActiveRecord::Base.sanitize_sql(interval)
[ [
{ {
key: 'rfcs', key: 'rfcs',
name: t('activerecord.models.request_for_comment.other'), name: t('activerecord.models.request_for_comment.other'),
data: RequestForComment.in_range(from, to) data: RequestForComment.in_range(from, to)
.select("date_trunc('#{interval}', created_at) AS \"key\", count(id) AS \"value\"") .select(RequestForComment.sanitize_sql(['date_trunc(?, created_at) AS "key", count(id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
}, },
{ {
@ -189,7 +189,7 @@ module StatisticsHelper
name: t('statistics.entries.request_for_comments.percent_solved'), name: t('statistics.entries.request_for_comments.percent_solved'),
data: RequestForComment.in_range(from, to) data: RequestForComment.in_range(from, to)
.where(solved: true) .where(solved: true)
.select("date_trunc('#{interval}', created_at) AS \"key\", count(id) AS \"value\"") .select(RequestForComment.sanitize_sql(['date_trunc(?, created_at) AS "key", count(id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
}, },
{ {
@ -197,36 +197,34 @@ module StatisticsHelper
name: t('statistics.entries.request_for_comments.percent_soft_solved'), name: t('statistics.entries.request_for_comments.percent_soft_solved'),
data: RequestForComment.in_range(from, to).unsolved data: RequestForComment.in_range(from, to).unsolved
.where(full_score_reached: true) .where(full_score_reached: true)
.select("date_trunc('#{interval}', created_at) AS \"key\", count(id) AS \"value\"") .select(RequestForComment.sanitize_sql(['date_trunc(?, created_at) AS "key", count(id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
}, },
{ {
key: 'rfcs_unsolved', key: 'rfcs_unsolved',
name: t('statistics.entries.request_for_comments.percent_unsolved'), name: t('statistics.entries.request_for_comments.percent_unsolved'),
data: RequestForComment.in_range(from, to).unsolved data: RequestForComment.in_range(from, to).unsolved
.select("date_trunc('#{interval}', created_at) AS \"key\", count(id) AS \"value\"") .select(RequestForComment.sanitize_sql(['date_trunc(?, created_at) AS "key", count(id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
}, },
] ]
end end
def ranged_user_data(interval = 'year', from = DateTime.new(0), to = DateTime.now) def ranged_user_data(interval = 'year', from = DateTime.new(0), to = DateTime.now)
interval = ActiveRecord::Base.sanitize_sql(interval)
[ [
{ {
key: 'active', key: 'active',
name: t('statistics.entries.users.active'), name: t('statistics.entries.users.active'),
data: ExternalUser.joins(:submissions) data: ExternalUser.joins(:submissions)
.where(submissions: {created_at: from..to}) .where(submissions: {created_at: from..to})
.select("date_trunc('#{interval}', submissions.created_at) AS \"key\", count(distinct external_users.id) AS \"value\"") .select(ExternalUser.sanitize_sql(['date_trunc(?, submissions.created_at) AS "key", count(distinct external_users.id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
}, },
{ {
key: 'submissions', key: 'submissions',
name: t('statistics.entries.exercises.submissions'), name: t('statistics.entries.exercises.submissions'),
data: Submission.where(created_at: from..to) data: Submission.where(created_at: from..to)
.select("date_trunc('#{interval}', created_at) AS \"key\", count(id) AS \"value\"") .select(Submission.sanitize_sql(['date_trunc(?, created_at) AS "key", count(id) AS "value"', interval]))
.group('key').order('key'), .group('key').order('key'),
axis: 'right', axis: 'right',
}, },

View File

@ -94,7 +94,7 @@ class Exercise < ApplicationRecord
(SELECT user_id, (SELECT user_id,
user_type, user_type,
score, score,
CASE WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' ELSE working_time END AS working_time_new CASE WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0' ELSE working_time END AS working_time_new
FROM FROM
(SELECT user_id, (SELECT user_id,
user_type, user_type,
@ -103,7 +103,7 @@ class Exercise < ApplicationRecord
(created_at - lag(created_at) over (PARTITION BY user_id, exercise_id (created_at - lag(created_at) over (PARTITION BY user_id, exercise_id
ORDER BY created_at)) AS working_time ORDER BY created_at)) AS working_time
FROM submissions FROM submissions
WHERE exercise_id=#{self.class.sanitize_sql(id)}) AS foo) AS bar WHERE #{self.class.sanitize_sql(['exercise_id = ?', id])}) AS foo) AS bar
GROUP BY user_id, user_type GROUP BY user_id, user_type
" "
end end
@ -118,7 +118,7 @@ class Exercise < ApplicationRecord
(created_at - lag(created_at) over (PARTITION BY submissions.user_type, submissions.user_id, exercise_id (created_at - lag(created_at) over (PARTITION BY submissions.user_type, submissions.user_id, exercise_id
ORDER BY created_at)) AS working_time ORDER BY created_at)) AS working_time
FROM submissions FROM submissions
WHERE exercise_id = #{self.class.sanitize_sql(exercise_id)} AND study_group_id = #{self.class.sanitize_sql(study_group_id)} #{self.class.sanitize_sql(additional_filter)}), WHERE #{self.class.sanitize_sql(['exercise_id = ? and study_group_id = ?', exercise_id, study_group_id])} #{self.class.sanitize_sql(additional_filter)}),
working_time_with_deltas_ignored AS ( working_time_with_deltas_ignored AS (
SELECT user_id, SELECT user_id,
user_type, user_type,
@ -126,7 +126,7 @@ class Exercise < ApplicationRecord
sum(CASE WHEN score IS NOT NULL THEN 1 ELSE 0 END) sum(CASE WHEN score IS NOT NULL THEN 1 ELSE 0 END)
over (ORDER BY user_type, user_id, created_at ASC) AS change_in_score, over (ORDER BY user_type, user_id, created_at ASC) AS change_in_score,
created_at, created_at,
CASE WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' ELSE working_time END AS working_time_filtered CASE WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0' ELSE working_time END AS working_time_filtered
FROM working_time_between_submissions FROM working_time_between_submissions
), ),
working_times_with_score_expanded AS ( working_times_with_score_expanded AS (
@ -263,7 +263,7 @@ class Exercise < ApplicationRecord
Max(score) AS max_score, Max(score) AS max_score,
(created_at - Lag(created_at) OVER (partition BY user_id, exercise_id ORDER BY created_at)) AS working_time (created_at - Lag(created_at) OVER (partition BY user_id, exercise_id ORDER BY created_at)) AS working_time
FROM submissions FROM submissions
WHERE exercise_id = #{self.class.sanitize_sql(id)} WHERE #{self.class.sanitize_sql(['exercise_id = ?', id])}
AND user_type = 'ExternalUser' AND user_type = 'ExternalUser'
GROUP BY user_id, GROUP BY user_id,
id, id,
@ -273,7 +273,7 @@ class Exercise < ApplicationRecord
Sum(weight) AS max_points Sum(weight) AS max_points
FROM files FROM files
WHERE context_type = 'Exercise' WHERE context_type = 'Exercise'
AND context_id = #{self.class.sanitize_sql(id)} AND #{self.class.sanitize_sql(['context_id = ?', id])}
AND role IN ('teacher_defined_test', 'teacher_defined_linter') AND role IN ('teacher_defined_test', 'teacher_defined_linter')
GROUP BY context_id), GROUP BY context_id),
-- filter for rows containing max points -- filter for rows containing max points
@ -342,7 +342,7 @@ class Exercise < ApplicationRecord
exercise_id, exercise_id,
max_score, max_score,
CASE CASE
WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0'
ELSE working_time ELSE working_time
END AS working_time_new END AS working_time_new
FROM all_working_times_until_max ), result AS FROM all_working_times_until_max ), result AS
@ -445,7 +445,7 @@ class Exercise < ApplicationRecord
FILTERED_TIMES_UNTIL_MAX AS FILTERED_TIMES_UNTIL_MAX AS
( (
SELECT user_id,exercise_id, max_score, CASE WHEN working_time >= #{StatisticsHelper::WORKING_TIME_DELTA_IN_SQL_INTERVAL} THEN '0' ELSE working_time END AS working_time_new SELECT user_id,exercise_id, max_score, CASE WHEN #{StatisticsHelper.working_time_larger_delta} THEN '0' ELSE working_time END AS working_time_new
FROM ALL_WORKING_TIMES_UNTIL_MAX FROM ALL_WORKING_TIMES_UNTIL_MAX
) )
SELECT e.external_id AS external_user_id, f.user_id, exercise_id, MAX(max_score) AS max_score, sum(working_time_new) AS working_time SELECT e.external_id AS external_user_id, f.user_id, exercise_id, MAX(max_score) AS max_score, sum(working_time_new) AS working_time

View File

@ -63,7 +63,7 @@ namespace :detect_exercise_anomalies do
from exercises e from exercises e
join submissions s on s.exercise_id = e.id join submissions s on s.exercise_id = e.id
group by e.id group by e.id
having count(s.user_id) > #{ExerciseCollection.sanitize_sql(number_of_solutions)} having #{ExerciseCollection.sanitize_sql(['count(s.user_id) > ?', number_of_solutions])}
) as exercises_with_submissions on exercises_with_submissions.id = eci.exercise_id") ) as exercises_with_submissions on exercises_with_submissions.id = eci.exercise_id")
.group('exercise_collections.id') .group('exercise_collections.id')
.having('count(exercises_with_submissions.id) > ?', number_of_exercises) .having('count(exercises_with_submissions.id) > ?', number_of_exercises)