|
17 | 17 | from users.admin_mixins import ConferencePermissionMixin |
18 | 18 |
|
19 | 19 |
|
| 20 | +def get_accepted_submissions(conference): |
| 21 | + return ( |
| 22 | + Submission.objects.filter(conference=conference) |
| 23 | + .filter( |
| 24 | + Q(pending_status=Submission.STATUS.accepted) |
| 25 | + | Q(pending_status__isnull=True, status=Submission.STATUS.accepted) |
| 26 | + | Q(pending_status="", status=Submission.STATUS.accepted) |
| 27 | + ) |
| 28 | + .select_related("speaker", "type", "audience_level") |
| 29 | + .prefetch_related("languages") |
| 30 | + .order_by("id") |
| 31 | + ) |
| 32 | + |
| 33 | + |
20 | 34 | class AvailableScoreOptionInline(admin.TabularInline): |
21 | 35 | model = AvailableScoreOption |
22 | 36 |
|
@@ -366,16 +380,7 @@ def review_shortlist_view(self, request, review_session_id): |
366 | 380 | return TemplateResponse(request, adapter.shortlist_template, context) |
367 | 381 |
|
368 | 382 | def _get_accepted_submissions(self, conference): |
369 | | - return ( |
370 | | - Submission.objects.filter(conference=conference) |
371 | | - .filter( |
372 | | - Q(pending_status=Submission.STATUS.accepted) |
373 | | - | Q(pending_status__isnull=True, status=Submission.STATUS.accepted) |
374 | | - | Q(pending_status="", status=Submission.STATUS.accepted) |
375 | | - ) |
376 | | - .select_related("speaker", "type", "audience_level") |
377 | | - .prefetch_related("languages") |
378 | | - ) |
| 383 | + return get_accepted_submissions(conference) |
379 | 384 |
|
380 | 385 | def review_recap_view(self, request, review_session_id): |
381 | 386 | review_session = ReviewSession.objects.get(id=review_session_id) |
@@ -448,49 +453,51 @@ def review_recap_compute_analysis_view(self, request, review_session_id): |
448 | 453 | raise PermissionDenied() |
449 | 454 |
|
450 | 455 | conference = review_session.conference |
451 | | - accepted_submissions = self._get_accepted_submissions(conference) |
| 456 | + accepted_submissions = list(self._get_accepted_submissions(conference)) |
452 | 457 | force_recompute = request.GET.get("recompute") == "1" |
453 | 458 |
|
454 | | - from reviews.similar_talks import compute_similar_talks, compute_topic_clusters |
| 459 | + from django.core.cache import cache |
455 | 460 |
|
456 | | - similar_talks = compute_similar_talks( |
457 | | - accepted_submissions, |
458 | | - top_n=5, |
459 | | - conference_id=conference.id, |
460 | | - force_recompute=force_recompute, |
461 | | - ) |
| 461 | + from pycon.tasks import check_pending_heavy_processing_work |
| 462 | + from reviews.cache_keys import get_cache_key |
| 463 | + from reviews.tasks import compute_recap_analysis |
462 | 464 |
|
463 | | - topic_clusters = compute_topic_clusters( |
464 | | - accepted_submissions, |
465 | | - min_topic_size=3, |
466 | | - conference_id=conference.id, |
467 | | - force_recompute=force_recompute, |
| 465 | + combined_cache_key = get_cache_key( |
| 466 | + "recap_analysis", conference.id, accepted_submissions |
468 | 467 | ) |
469 | 468 |
|
470 | | - # Build submissions list with similar talks, sorted by highest similarity |
471 | | - submissions_list = sorted( |
472 | | - [ |
473 | | - { |
474 | | - "id": s.id, |
475 | | - "title": str(s.title), |
476 | | - "type": s.type.name, |
477 | | - "speaker": s.speaker.display_name if s.speaker else "Unknown", |
478 | | - "similar": similar_talks.get(s.id, []), |
479 | | - } |
480 | | - for s in accepted_submissions |
481 | | - ], |
482 | | - key=lambda x: max( |
483 | | - (item["similarity"] for item in x["similar"]), default=0 |
484 | | - ), |
485 | | - reverse=True, |
486 | | - ) |
| 469 | + if not force_recompute: |
| 470 | + cached_result = cache.get(combined_cache_key) |
| 471 | + if cached_result is not None: |
| 472 | + return JsonResponse(cached_result) |
487 | 473 |
|
488 | | - return JsonResponse( |
489 | | - { |
490 | | - "submissions_list": submissions_list, |
491 | | - "topic_clusters": topic_clusters, |
492 | | - } |
493 | | - ) |
| 474 | + # Use cache.add as a lock to prevent duplicate task dispatch. |
| 475 | + # Short TTL so lock auto-expires if the worker is killed before cleanup. |
| 476 | + computing_key = f"{combined_cache_key}:computing" |
| 477 | + |
| 478 | + # Check for stale lock from a crashed/finished task |
| 479 | + existing_task_id = cache.get(computing_key) |
| 480 | + if existing_task_id: |
| 481 | + from celery.result import AsyncResult |
| 482 | + |
| 483 | + if AsyncResult(existing_task_id).state in ( |
| 484 | + "SUCCESS", |
| 485 | + "FAILURE", |
| 486 | + "REVOKED", |
| 487 | + ): |
| 488 | + cache.delete(computing_key) |
| 489 | + |
| 490 | + if cache.add(computing_key, "pending", timeout=300): |
| 491 | + result = compute_recap_analysis.apply_async( |
| 492 | + args=[conference.id, combined_cache_key], |
| 493 | + kwargs={"force_recompute": force_recompute}, |
| 494 | + queue="heavy_processing", |
| 495 | + ) |
| 496 | + # Store task ID so subsequent requests can detect stale locks |
| 497 | + cache.set(computing_key, result.id, timeout=300) |
| 498 | + check_pending_heavy_processing_work.delay() |
| 499 | + |
| 500 | + return JsonResponse({"status": "processing"}) |
494 | 501 |
|
495 | 502 | def review_view(self, request, review_session_id, review_item_id): |
496 | 503 | review_session = ReviewSession.objects.get(id=review_session_id) |
|
0 commit comments