NDOJ/judge/utils/problems.py

262 lines
8.3 KiB
Python
Raw Normal View History

2020-01-21 06:35:58 +00:00
from collections import defaultdict
from math import e
2021-05-25 04:22:56 +00:00
import os, zipfile
2023-01-28 01:15:37 +00:00
from datetime import datetime
import random
2020-01-21 06:35:58 +00:00
2021-05-25 04:22:56 +00:00
from django.conf import settings
2020-01-21 06:35:58 +00:00
from django.core.cache import cache
from django.db.models import Case, Count, ExpressionWrapper, F, Max, Q, When
from django.db.models.fields import FloatField
from django.utils import timezone
from django.utils.translation import gettext as _, gettext_noop
from judge.models import Problem, Submission
2023-01-28 01:15:37 +00:00
from judge.ml.collab_filter import CollabFilter
2020-01-21 06:35:58 +00:00
2022-05-14 17:57:27 +00:00
__all__ = [
"contest_completed_ids",
"get_result_data",
"user_completed_ids",
"user_editable_ids",
2023-02-18 22:38:47 +00:00
"user_tester_ids",
2022-05-14 17:57:27 +00:00
]
2020-01-21 06:35:58 +00:00
2023-02-18 22:38:47 +00:00
def user_tester_ids(profile):
return set(
Problem.testers.through.objects.filter(profile=profile).values_list(
"problem_id", flat=True
)
)
2020-01-21 06:35:58 +00:00
def user_editable_ids(profile):
2022-05-14 17:57:27 +00:00
result = set(
(
Problem.objects.filter(authors=profile)
| Problem.objects.filter(curators=profile)
).values_list("id", flat=True)
)
2020-01-21 06:35:58 +00:00
return result
def contest_completed_ids(participation):
2022-05-14 17:57:27 +00:00
key = "contest_complete:%d" % participation.id
2020-01-21 06:35:58 +00:00
result = cache.get(key)
if result is None:
2022-05-14 17:57:27 +00:00
result = set(
participation.submissions.filter(
submission__result="AC", points=F("problem__points")
)
.values_list("problem__problem__id", flat=True)
.distinct()
)
2020-01-21 06:35:58 +00:00
cache.set(key, result, 86400)
return result
def user_completed_ids(profile):
2022-05-14 17:57:27 +00:00
key = "user_complete:%d" % profile.id
2020-01-21 06:35:58 +00:00
result = cache.get(key)
if result is None:
2022-05-14 17:57:27 +00:00
result = set(
Submission.objects.filter(
user=profile, result="AC", points=F("problem__points")
)
.values_list("problem_id", flat=True)
.distinct()
)
2020-07-17 02:30:48 +00:00
cache.set(key, result, 86400)
2020-01-21 06:35:58 +00:00
return result
def contest_attempted_ids(participation):
2022-05-14 17:57:27 +00:00
key = "contest_attempted:%s" % participation.id
2020-01-21 06:35:58 +00:00
result = cache.get(key)
if result is None:
2022-05-14 17:57:27 +00:00
result = {
id: {"achieved_points": points, "max_points": max_points}
for id, max_points, points in (
participation.submissions.values_list(
"problem__problem__id", "problem__points"
)
.annotate(points=Max("points"))
.filter(points__lt=F("problem__points"))
)
}
2020-01-21 06:35:58 +00:00
cache.set(key, result, 86400)
return result
def user_attempted_ids(profile):
2022-05-14 17:57:27 +00:00
key = "user_attempted:%s" % profile.id
2020-01-21 06:35:58 +00:00
result = cache.get(key)
if result is None:
2022-05-14 17:57:27 +00:00
result = {
2022-05-22 01:30:44 +00:00
id: {
"achieved_points": points,
"max_points": max_points,
"last_submission": last_submission,
"code": problem_code,
"name": problem_name,
}
for id, max_points, problem_code, problem_name, points, last_submission in (
2022-05-14 17:57:27 +00:00
Submission.objects.filter(user=profile)
2022-05-22 01:30:44 +00:00
.values_list(
"problem__id", "problem__points", "problem__code", "problem__name"
)
.annotate(points=Max("points"), last_submission=Max("id"))
2022-05-14 17:57:27 +00:00
.filter(points__lt=F("problem__points"))
)
}
2020-07-17 02:30:48 +00:00
cache.set(key, result, 86400)
2020-01-21 06:35:58 +00:00
return result
def _get_result_data(results):
return {
2022-05-14 17:57:27 +00:00
"categories": [
2020-01-21 06:35:58 +00:00
# Using gettext_noop here since this will be tacked into the cache, so it must be language neutral.
# The caller, SubmissionList.get_result_data will run ugettext on the name.
2022-05-14 17:57:27 +00:00
{"code": "AC", "name": gettext_noop("Accepted"), "count": results["AC"]},
{"code": "WA", "name": gettext_noop("Wrong"), "count": results["WA"]},
{
"code": "CE",
"name": gettext_noop("Compile Error"),
"count": results["CE"],
},
{"code": "TLE", "name": gettext_noop("Timeout"), "count": results["TLE"]},
{
"code": "ERR",
"name": gettext_noop("Error"),
"count": results["MLE"]
+ results["OLE"]
+ results["IR"]
+ results["RTE"]
+ results["AB"]
+ results["IE"],
},
2020-01-21 06:35:58 +00:00
],
2022-05-14 17:57:27 +00:00
"total": sum(results.values()),
2020-01-21 06:35:58 +00:00
}
def get_result_data(*args, **kwargs):
if args:
submissions = args[0]
if kwargs:
raise ValueError(_("Can't pass both queryset and keyword filters"))
else:
2022-05-14 17:57:27 +00:00
submissions = (
Submission.objects.filter(**kwargs)
if kwargs is not None
else Submission.objects
)
raw = (
submissions.values("result")
.annotate(count=Count("result"))
.values_list("result", "count")
)
2020-01-21 06:35:58 +00:00
return _get_result_data(defaultdict(int, raw))
def editable_problems(user, profile=None):
subquery = Problem.objects.all()
if profile is None:
profile = user.profile
2022-05-14 17:57:27 +00:00
if not user.has_perm("judge.edit_all_problem"):
2020-01-21 06:35:58 +00:00
subfilter = Q(authors__id=profile.id) | Q(curators__id=profile.id)
2022-05-14 17:57:27 +00:00
if user.has_perm("judge.edit_public_problem"):
2020-01-21 06:35:58 +00:00
subfilter |= Q(is_public=True)
subquery = subquery.filter(subfilter)
return subquery
def hot_problems(duration, limit):
2022-05-14 17:57:27 +00:00
cache_key = "hot_problems:%d:%d" % (duration.total_seconds(), limit)
2020-01-21 06:35:58 +00:00
qs = cache.get(cache_key)
if qs is None:
2022-05-14 17:57:27 +00:00
qs = Problem.get_public_problems().filter(
submission__date__gt=timezone.now() - duration
)
qs0 = (
qs.annotate(k=Count("submission__user", distinct=True))
.order_by("-k")
.values_list("k", flat=True)
)
2020-01-21 06:35:58 +00:00
if not qs0:
return []
# make this an aggregate
mx = float(qs0[0])
2022-05-14 17:57:27 +00:00
qs = qs.annotate(unique_user_count=Count("submission__user", distinct=True))
2020-01-21 06:35:58 +00:00
# fix braindamage in excluding CE
2022-05-14 17:57:27 +00:00
qs = qs.annotate(
submission_volume=Count(
Case(
When(submission__result="AC", then=1),
When(submission__result="WA", then=1),
When(submission__result="IR", then=1),
When(submission__result="RTE", then=1),
When(submission__result="TLE", then=1),
When(submission__result="OLE", then=1),
output_field=FloatField(),
)
)
)
qs = qs.annotate(
ac_volume=Count(
Case(
When(submission__result="AC", then=1),
output_field=FloatField(),
)
)
)
2020-01-21 06:35:58 +00:00
qs = qs.filter(unique_user_count__gt=max(mx / 3.0, 1))
2022-05-14 17:57:27 +00:00
qs = (
qs.annotate(
ordering=ExpressionWrapper(
0.02
* F("points")
* (
0.4 * F("ac_volume") / F("submission_volume")
+ 0.6 * F("ac_rate")
)
+ 100 * e ** (F("unique_user_count") / mx),
output_field=FloatField(),
)
)
.order_by("-ordering")
.defer("description")[:limit]
)
2020-01-21 06:35:58 +00:00
cache.set(cache_key, qs, 900)
2022-05-14 17:57:27 +00:00
return qs
2023-01-28 01:15:37 +00:00
def get_related_problems(profile, problem, limit=8):
2023-01-28 01:16:48 +00:00
if not profile or not settings.ML_OUTPUT_PATH:
2023-01-28 01:15:37 +00:00
return None
2023-01-28 16:46:17 +00:00
cache_key = "related_problems:%d:%d" % (profile.id, problem.id)
qs = cache.get(cache_key)
if qs is not None:
return qs
2023-01-28 01:15:37 +00:00
problemset = Problem.get_visible_problems(profile.user).values_list("id", flat=True)
problemset = problemset.exclude(id__in=user_completed_ids(profile))
problemset = problemset.exclude(id=problem.id)
cf_model = CollabFilter("collab_filter")
results = cf_model.problem_neighbors(
problem, problemset, CollabFilter.DOT, limit
) + cf_model.problem_neighbors(problem, problemset, CollabFilter.COSINE, limit)
results = list(set([i[1] for i in results]))
seed = datetime.now().strftime("%d%m%Y")
random.Random(seed).shuffle(results)
results = results[:limit]
2023-01-28 16:46:17 +00:00
results = [Problem.objects.get(id=i) for i in results]
cache.set(cache_key, results, 21600)
return results